file_path
stringlengths 21
202
| content
stringlengths 12
1.02M
| size
int64 12
1.02M
| lang
stringclasses 9
values | avg_line_length
float64 3.33
100
| max_line_length
int64 10
993
| alphanum_fraction
float64 0.27
0.93
|
---|---|---|---|---|---|---|
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/repo_test_context.py | import json
import logging
import os
logger = logging.getLogger(__name__)
class RepoTestContext: # pragma: no cover
def __init__(self):
self.context = None
repo_test_context_file = os.environ.get("REPO_TEST_CONTEXT", None)
if repo_test_context_file and os.path.exists(repo_test_context_file):
print("Found repo test context file:", repo_test_context_file)
with open(repo_test_context_file) as f:
self.context = json.load(f)
logger.info("repo test context:", self.context)
def get(self):
return self.context
| 609 | Python | 28.047618 | 77 | 0.627258 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/nvdf.py | import itertools
import json
import logging
import os
import re
import sys
import time
import urllib.error
import urllib.request
from collections import defaultdict
from functools import lru_cache
from pathlib import Path
from typing import Dict, List, Tuple
import carb.settings
import omni.kit.app
from .gitlab import get_gitlab_build_url, is_running_in_gitlab
from .teamcity import get_teamcity_build_url, is_running_in_teamcity
from .utils import call_git, get_global_test_output_path, is_running_on_ci
logger = logging.getLogger(__name__)
@lru_cache()
def get_nvdf_report_filepath() -> str:
return os.path.join(get_global_test_output_path(), "nvdf_data.json")
def _partition(pred, iterable):
"""Use a predicate to partition entries into false entries and true entries"""
t1, t2 = itertools.tee(iterable)
return itertools.filterfalse(pred, t1), filter(pred, t2)
def _get_json_data(report_data: List[Dict[str, str]], app_info: dict, ci_info: dict) -> Dict:
"""Transform report_data into json data
Input:
{"event": "start", "test_id": "omni.kit.viewport", "ext_name": "omni.kit.viewport", "start_time": 1664831914.002093}
{"event": "stop", "test_id": "omni.kit.viewport", "ext_name": "omni.kit.viewport", "success": true, "skipped": false, "stop_time": 1664831927.1145973, "duration": 13.113}
...
Output:
{
"omni.kit.viewport+omni.kit.viewport": {
"app": { ... },
"teamcity": { ... },
"test": {
"b_success": false,
"d_duration": 1.185,
"s_ext_name": "omni.kit.viewport",
"s_name": "omni.kit.viewport",
"s_type": "exttest",
"ts_start_time": 1664893802530,
"ts_stop_time": 1664893803715
"result": { ... },
},
},
}
"""
def _aggregate_json_data(data: dict, config=""):
test_id = data["test_id"] + config
# increment retries count - that info is not available in the report_data, start count at 0 (-1 + 1 = 0)
# by keeping all passed results we can know if all retries failed and set consecutive_failure to true
if data["event"] == "start":
retries = test_retries.get(test_id, -1) + 1
test_retries[test_id] = retries
data["retries"] = retries
else:
retries = test_retries.get(test_id, 0)
if data["event"] == "stop":
test_results[test_id].append(data.get("passed", False))
test_id += f"{CONCAT_CHAR}{retries}"
test_data = json_data.get(test_id, {}).get("test", {})
test_data.update(data)
# special case for time, convert it to nvdf ts_ format right away
if "start_time" in test_data:
test_data["ts_start_time"] = int(test_data.pop("start_time") * 1000)
if "stop_time" in test_data:
test_data["ts_stop_time"] = int(test_data.pop("stop_time") * 1000)
# event is discarded
if "event" in test_data:
test_data.pop("event")
# init passed to false if needed, it can be missing if a test crashes (no stop event)
if "passed" not in test_data:
test_data["passed"] = False
json_data.update({test_id: {"app": app_info, "ci": ci_info, "test": test_data}})
CONCAT_CHAR = "|"
MIN_CONSECUTIVE_FAILURES = 3 # this value is in sync with repo.toml testExtMaxTestRunCount=3
test_retries: Dict[str, int] = {}
test_results = defaultdict(list)
exttest, unittest = _partition(lambda data: data["test_type"] == "unittest", report_data)
# add exttests - group by name + retry count
json_data: Dict[str, str] = {}
for data in exttest:
_aggregate_json_data(data)
# second loop to only keep exttest with results
for key, data in list(json_data.items()):
if not data.get("test", {}).get("result"):
del json_data[key]
# add all unittests - group by name + config + retry count
for data in unittest:
config = data["ext_test_id"].rsplit(CONCAT_CHAR, maxsplit=1)
config = f"{CONCAT_CHAR}{config[1]}" if len(config) > 1 else ""
_aggregate_json_data(data, config)
# second loop to tag all consecutive failures (when all results are false and equal or above the retry count)
for key, data in json_data.items():
results = test_results.get(key.rsplit(CONCAT_CHAR, maxsplit=1)[0])
all_failures = results and not any(results) and len(results) >= MIN_CONSECUTIVE_FAILURES - 1
if all_failures:
data["test"]["consecutive_failure"] = all_failures
return json_data
def _can_post_to_nvdf() -> bool:
if omni.kit.app.get_app().is_app_external():
logger.info("nvdf is disabled for external build")
return False
if not is_running_on_ci():
logger.info("nvdf posting only enabled on CI")
return False
return True
def post_to_nvdf(report_data: List[Dict[str, str]]):
if not report_data or not _can_post_to_nvdf():
return
try:
app_info = get_app_info()
ci_info = _get_ci_info()
json_data = _get_json_data(report_data, app_info, ci_info)
with open(get_nvdf_report_filepath(), "w") as f:
json.dump(json_data, f, skipkeys=True, sort_keys=True, indent=4)
# convert json_data to nvdf form and add to list
json_array = []
for data in json_data.values():
data["ts_created"] = int(time.time() * 1000)
json_array.append(to_nvdf_form(data))
# post all results in one request
project = "omniverse-kit-tests-results-v2"
json_str = json.dumps(json_array, skipkeys=True)
_post_json(project, json_str)
# print(json_str) # uncomment to debug
except Exception as e:
logger.warning(f"Exception occurred: {e}")
def post_coverage_to_nvdf(coverage_data: Dict[str, Dict]):
if not coverage_data or not _can_post_to_nvdf():
return
try:
app_info = get_app_info()
ci_info = _get_ci_info()
# convert json_data to nvdf form and add to list
json_array = []
for data in coverage_data.values():
data["ts_created"] = int(time.time() * 1000)
data["app"] = app_info
data["ci"] = ci_info
json_array.append(to_nvdf_form(data))
# post all results in one request
project = "omniverse-kit-tests-coverage-v2"
json_str = json.dumps(json_array, skipkeys=True)
_post_json(project, json_str)
# print(json_str) # uncomment to debug
except Exception as e:
logger.warning(f"Exception occurred: {e}")
def _post_json(project: str, json_str: str):
url = f"https://gpuwa.nvidia.com/dataflow/{project}/posting"
try:
resp = None
req = urllib.request.Request(url)
req.add_header("Content-Type", "application/json; charset=utf-8")
json_data_bytes = json_str.encode("utf-8") # needs to be bytes
# use a short 10 seconds timeout to avoid taking too much time in case of problems
resp = urllib.request.urlopen(req, json_data_bytes, timeout=10)
except (urllib.error.URLError, json.JSONDecodeError) as e:
logger.warning(f"Error sending request to nvdf, response: {resp}, exception: {e}")
def query_nvdf(query: str) -> dict:
project = "df-omniverse-kit-tests-results-v2*"
url = f"https://gpuwa.nvidia.com:443/elasticsearch/{project}/_search"
try:
resp = None
req = urllib.request.Request(url)
req.add_header("Content-Type", "application/json; charset=utf-8")
json_data = json.dumps(query).encode("utf-8")
# use a short 10 seconds timeout to avoid taking too much time in case of problems
with urllib.request.urlopen(req, data=json_data, timeout=10) as resp:
return json.loads(resp.read())
except (urllib.error.URLError, json.JSONDecodeError) as e:
logger.warning(f"Request error to nvdf, response: {resp}, exception: {e}")
return {}
@lru_cache()
def _detect_kit_branch_and_mr(full_kit_version: str) -> Tuple[str, int]:
match = re.search(r"^([^\+]+)\+([^\.]+)", full_kit_version)
if match is None:
logger.warning(f"Cannot detect kit SDK branch from: {full_kit_version}")
branch = "Unknown"
else:
if match[2] == "release":
branch = f"release/{match[1]}"
else:
branch = match[2]
# merge requests will be named mr1234 with 1234 being the merge request number
if branch.startswith("mr") and branch[2:].isdigit():
mr = int(branch[2:])
branch = "" # if we have an mr we don't have the branch name
else:
mr = 0
return branch, mr
@lru_cache()
def _find_repository_info() -> str:
"""Get repo remote origin url, fallback on yaml if not found"""
res = call_git(["config", "--get", "remote.origin.url"])
remote_url = res.stdout.strip("\n") if res and res.returncode == 0 else ""
if remote_url:
return remote_url
# Attempt to find the repository from yaml file
kit_root = Path(sys.argv[0]).parent
if kit_root.stem.lower() != "kit":
info_yaml = kit_root.joinpath("INFO.yaml")
if not info_yaml.exists():
info_yaml = kit_root.joinpath("PACKAGE-INFO.yaml")
if info_yaml.exists():
repo_re = re.compile(r"^Repository\s*:\s*(.+)$", re.MULTILINE)
content = info_yaml.read_text()
matches = repo_re.findall(content)
if len(matches) == 1:
return matches[0].strip()
return ""
@lru_cache()
def get_app_info() -> Dict:
"""This should be part of omni.kit.app.
Example response:
{
"app_name": "omni.app.full.kit",
"app_version": "1.0.1",
"kit_version_full": "103.1+release.10030.f5f9dcab.tc",
"kit_version": "103.1",
"kit_build_number": 10030,
"branch": "master"
"config": "release",
"platform": "windows-x86_64",
"python_version": "cp37"
}
"""
app = omni.kit.app.get_app()
ver = app.get_build_version() # eg 103.1+release.10030.f5f9dcab.tc
branch, mr = _detect_kit_branch_and_mr(ver)
settings = carb.settings.get_settings()
info = {
"app_name": settings.get("/app/name"),
"app_name_full": settings.get("/app/window/title") or settings.get("/app/name"),
"app_version": settings.get("/app/version"),
"branch": branch,
"merge_request": mr,
"git_hash": ver.rsplit(".", 2)[1],
"git_remote_url": _find_repository_info(),
"kit_version_full": ver,
"kit_version": ver.split("+", 1)[0],
"kit_build_number": int(ver.rsplit(".", 3)[1]),
}
info.update(app.get_platform_info())
return info
@lru_cache()
def _get_ci_info() -> Dict:
info = {
"ci_name": "local",
}
if is_running_in_teamcity():
info.update(
{
"ci_name": "teamcity",
"build_id": os.getenv("TEAMCITY_BUILD_ID") or "",
"build_config_name": os.getenv("TEAMCITY_BUILDCONF_NAME") or "",
"build_url": get_teamcity_build_url(),
"project_name": os.getenv("TEAMCITY_PROJECT_NAME") or "",
}
)
elif is_running_in_gitlab():
info.update(
{
"ci_name": "gitlab",
"build_id": os.getenv("CI_PIPELINE_ID") or "",
"build_config_name": os.getenv("CI_JOB_NAME") or "",
"build_url": get_gitlab_build_url(),
"project_name": os.getenv("CI_PROJECT_NAME") or "",
}
)
# todo : support github
return info
def to_nvdf_form(data: dict) -> Dict:
"""Convert dict to NVDF-compliant form.
https://confluence.nvidia.com/display/nvdataflow/NVDataFlow#NVDataFlow-PostingPayload
"""
reserved = {"ts_created", "_id"}
prefixes = {str: "s_", float: "d_", int: "l_", bool: "b_", list: "obj_", tuple: "obj_"}
key_illegal_pattern = "[!@#$%^&*.]+"
def _convert(d):
result = {}
try:
for key, value in d.items():
key = re.sub(key_illegal_pattern, "_", key)
if key in reserved:
result[key] = value
elif key.startswith("ts_"):
result[key] = value
elif isinstance(value, dict):
# note that nvdf docs state this should prefix with 'obj_', but without works also.
# We choose not to as it matches up with existing fields from kit benchmarking
result[key] = _convert(value)
elif hasattr(value, "__dict__"):
# support for Classes
result[key] = _convert(value.__dict__)
elif isinstance(value, (list, tuple)):
_type = type(value[0]) if value else str
result[prefixes[_type] + key] = value
elif isinstance(value, (str, float, int, bool)):
result[prefixes[type(value)] + key] = value
else:
raise ValueError(f"Type {type(value)} not supported in nvdf (data: {data})")
return result
except Exception as e:
raise Exception(f"Exception for {key} {value} -> {e}")
return _convert(data)
def remove_nvdf_form(data: dict):
prefixes = ["s_", "d_", "l_", "b_"]
def _convert(d):
result = {}
try:
for key, value in d.items():
if isinstance(value, dict):
# note that nvdf docs state this should prefix with 'obj_', but without works also.
# We choose not to as it matches up with existing fields from kit benchmarking
result[key] = _convert(value)
elif hasattr(value, "__dict__"):
# support for Classes
result[key] = _convert(value.__dict__)
elif isinstance(value, (list, tuple, str, float, int, bool)):
if key[:2] in prefixes:
key = key[2:]
result[key] = value
else:
raise ValueError(f"Type {type(value)} not supported in nvdf (data: {data})")
return result
except Exception as e:
raise Exception(f"Exception for {key} {value} -> {e}")
return _convert(data)
| 14,649 | Python | 35.901763 | 174 | 0.569117 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/__init__.py | import asyncio
import omni.kit.app
import omni.ext
from .async_unittest import AsyncTestCase
from .async_unittest import AsyncTestCaseFailOnLogError
from .async_unittest import AsyncTestSuite
from .utils import get_setting, get_global_test_output_path, get_test_output_path
from .ext_utils import decompose_test_list
from .ext_utils import extension_from_test_name
from .ext_utils import find_disabled_tests
from .ext_utils import get_module_to_extension_map
from .ext_utils import test_only_extension_dependencies
from . import unittests
from .unittests import get_tests_from_modules
from .unittests import get_tests_to_remove_from_modules
from .unittests import run_tests
from .unittests import get_tests
from .unittests import remove_from_dynamic_test_cache
from .exttests import run_ext_tests, shutdown_ext_tests
from .exttests import ExtTest, ExtTestResult
from .test_reporters import TestRunStatus
from .test_reporters import add_test_status_report_cb
from .test_reporters import remove_test_status_report_cb
from .test_coverage import PyCoverageCollector
from .test_populators import DEFAULT_POPULATOR_NAME, TestPopulator, TestPopulateAll, TestPopulateDisabled
from .reporter import generate_report
try:
from omni.kit.omni_test_registry import omni_test_registry
except ImportError:
# omni_test_registry is copied at build time into the omni.kit.test extension directory in _build
pass
async def _auto_run_tests(run_tests_and_exit: bool):
# Skip 2 updates to make sure all extensions loaded and initialized
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
# Run Extension tests?
# This part runs on the parent Kit Process that triggers all extension tests
test_exts = list(get_setting("/exts/omni.kit.test/testExts", default=[]))
if len(test_exts) > 0:
# Quit on finish:
def on_finish(result: bool):
# generate coverage report at the end?
if get_setting("/exts/omni.kit.test/testExtGenerateCoverageReport", default=False):
generate_report()
returncode = 0 if result else 21
omni.kit.app.get_app().post_quit(returncode)
exclude_exts = list(get_setting("/exts/omni.kit.test/excludeExts", default=[]))
run_ext_tests(test_exts, on_finish_fn=on_finish, exclude_exts=exclude_exts)
return
# Print tests?
# This part runs on the child Kit Process to print the number of extension tests
if len(test_exts) == 0 and get_setting("/exts/omni.kit.test/printTestsAndQuit", default=False):
unittests.print_tests()
omni.kit.app.get_app().post_quit(0)
return
# Run python tests?
# This part runs on the child Kit Process that performs the extension tests
if run_tests_and_exit:
tests_filter = get_setting("/exts/omni.kit.test/runTestsFilter", default="")
from unittest.result import TestResult
# Quit on finish:
def on_finish(result: TestResult):
returncode = 0 if result.wasSuccessful() else 13
cpp_test_res = get_setting("/exts/omni.kit.test/~cppTestResult", default=None)
if cpp_test_res is not None:
returncode += cpp_test_res
if not get_setting("/exts/omni.kit.test/doNotQuit", default=False):
omni.kit.app.get_app().post_quit(returncode)
unittests.run_tests(unittests.get_tests(tests_filter), on_finish)
class _TestAutoRunner(omni.ext.IExt):
"""Automatically run tests based on setting"""
def __init__(self):
super().__init__()
self._py_coverage = PyCoverageCollector()
def on_startup(self):
# Report generate mode?
if get_setting("/exts/omni.kit.test/testExtGenerateReport", default=False):
generate_report()
omni.kit.app.get_app().post_quit(0)
return
# Otherwise: regular test run
run_tests_and_exit = get_setting("/exts/omni.kit.test/runTestsAndQuit", default=False)
ui_mode = get_setting("/exts/omni.kit.test/testExtUIMode", default=False)
# If launching a Python test then start test coverage subsystem (might do nothing depending on the settings)
if run_tests_and_exit or ui_mode:
self._py_coverage.startup()
def on_app_ready(e):
asyncio.ensure_future(_auto_run_tests(run_tests_and_exit))
self._app_ready_sub = (
omni.kit.app.get_app()
.get_startup_event_stream()
.create_subscription_to_pop_by_type(
omni.kit.app.EVENT_APP_READY, on_app_ready, name="omni.kit.test start tests"
)
)
def on_shutdown(self):
# Stop coverage and generate report if it's started.
self._py_coverage.shutdown()
shutdown_ext_tests()
| 4,851 | Python | 39.099173 | 116 | 0.683983 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/sampling.py | import datetime
import logging
import random
from statistics import mean
from .nvdf import get_app_info, query_nvdf
from .utils import clamp, get_setting, is_running_on_ci
logger = logging.getLogger(__name__)
class SamplingFactor:
LOWER_BOUND = 0.0
UPPER_BOUND = 1.0
MID_POINT = 0.5
class Sampling:
"""Basic Tests Sampling support"""
AGG_TEST_IDS = "test_ids"
AGG_LAST_PASSED = "last_passed"
LAST_PASSED_COUNT = 3
TEST_IDS_COUNT = 1000
DAYS = 4
def __init__(self, app_info: dict):
self.tests_sample = []
self.tests_run_count = []
self.query_result = False
self.app_info = app_info
def run_query(self, extension_name: str, unittests: list, running_on_ci: bool):
# when running locally skip the nvdf query
if running_on_ci:
try:
self.query_result = self._query_nvdf(extension_name, unittests)
except Exception as e:
logger.warning(f"Exception while doing nvdf query: {e}")
else:
self.query_result = True
# populate test list if empty, can happen both locally and on CI
if self.query_result and not self.tests_sample:
self.tests_sample = unittests
self.tests_run_count = [SamplingFactor.MID_POINT] * len(self.tests_sample)
def get_tests_to_skip(self, sampling_factor: float) -> list:
if not self.query_result:
return []
weights = self._calculate_weights()
samples_count = len(self.tests_sample)
# Grab (1.0 - sampling factor) to get the list of tests to skip
sampling_factor = SamplingFactor.UPPER_BOUND - sampling_factor
sampling_count = clamp(int(sampling_factor * float(samples_count)), 0, samples_count)
# use sampling seed if available
seed = int(get_setting("/exts/omni.kit.test/testExtSamplingSeed", default=-1))
if seed >= 0:
random.seed(seed)
sampled_tests = self._random_choices_no_replace(
population=self.tests_sample,
weights=weights,
k=sampling_count,
)
return sampled_tests
def _query_nvdf(self, extension_name: str, unittests: list) -> bool: # pragma: no cover
query = self._es_query(extension_name, days=self.DAYS, hours=0)
r = query_nvdf(query)
for aggs in r.get("aggregations", {}).get(self.AGG_TEST_IDS, {}).get("buckets", {}):
key = aggs.get("key")
if key not in unittests:
continue
hits = aggs.get(self.AGG_LAST_PASSED, {}).get("hits", {}).get("hits", [])
if not hits:
continue
all_failed = False
for hit in hits:
passed = hit["_source"]["test"]["b_passed"]
all_failed = all_failed or not passed
# consecutive failed tests cannot be skipped
if all_failed:
continue
self.tests_sample.append(key)
self.tests_run_count.append(aggs.get("doc_count", 0))
return True
def _random_choices_no_replace(self, population, weights, k) -> list:
"""Similar to numpy.random.Generator.choice() with replace=False"""
weights = list(weights)
positions = range(len(population))
indices = []
while True:
needed = k - len(indices)
if not needed:
break
for i in random.choices(positions, weights, k=needed):
if weights[i]:
weights[i] = SamplingFactor.LOWER_BOUND
indices.append(i)
return [population[i] for i in indices]
def _calculate_weights(self) -> list:
"""Simple weight adjusting to make sure all tests run an equal amount of times"""
samples_min = min(self.tests_run_count)
samples_max = max(self.tests_run_count)
samples_width = samples_max - samples_min
samples_mean = mean(self.tests_run_count)
def _calculate_weight(test_count: int):
if samples_width == 0:
return SamplingFactor.MID_POINT
weight = SamplingFactor.MID_POINT + (samples_mean - float(test_count)) / float(samples_width)
# clamp is not set to [0.0, 1.0] to have better random distribution
return clamp(
weight,
SamplingFactor.LOWER_BOUND + 0.05,
SamplingFactor.UPPER_BOUND - 0.05,
)
return [_calculate_weight(c) for c in self.tests_run_count]
def _es_query(self, extension_name: str, days: int, hours: int) -> dict:
target_date = datetime.datetime.utcnow() - datetime.timedelta(days=days, hours=hours)
kit_version = self.app_info["kit_version"]
platform = self.app_info["platform"]
branch = self.app_info["branch"]
merge_request = self.app_info["merge_request"]
query = {
"aggs": {
self.AGG_TEST_IDS: {
"terms": {"field": "test.s_test_id", "order": {"_count": "desc"}, "size": self.TEST_IDS_COUNT},
"aggs": {
self.AGG_LAST_PASSED: {
"top_hits": {
"_source": "test.b_passed",
"size": self.LAST_PASSED_COUNT,
"sort": [{"ts_created": {"order": "desc"}}],
}
}
},
}
},
"size": 0,
"query": {
"bool": {
"filter": [
{"match_all": {}},
{"term": {"test.s_ext_test_id": extension_name}},
{"term": {"app.s_kit_version": kit_version}},
{"term": {"app.s_platform": platform}},
{"term": {"app.s_branch": branch}},
{"term": {"app.l_merge_request": merge_request}},
{
"range": {
"ts_created": {
"gte": target_date.isoformat() + "Z",
"format": "strict_date_optional_time",
}
}
},
],
}
},
}
return query
def get_tests_sampling_to_skip(extension_name: str, sampling_factor: float, unittests: list) -> list: # pragma: no cover
"""Return a list of tests that can be skipped for a given extension based on a sampling factor
When using tests sampling we have to run:
1) all new tests (not found on nvdf)
2) all failed tests (ie: only consecutive failures, flaky tests are not considered)
3) sampling tests (sampling factor * number of tests)
By applying (1 - sampling factor) we get a list of tests to skip, which are garanteed not to contain any test
from point 1 or 2.
"""
ts = Sampling(get_app_info())
ts.run_query(extension_name, unittests, is_running_on_ci())
return ts.get_tests_to_skip(sampling_factor)
| 7,260 | Python | 37.015707 | 121 | 0.52865 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/test_populators.py | """Support for the population of a test list from various configurable sources"""
from __future__ import annotations
import abc
import unittest
from .ext_utils import find_disabled_tests
from .unittests import get_tests
__all__ = [
"DEFAULT_POPULATOR_NAME",
"TestPopulator",
"TestPopulateAll",
"TestPopulateDisabled",
]
# The name of the default populator, implemented with TestPopulateAll
DEFAULT_POPULATOR_NAME = "All Tests"
# ==============================================================================================================
class TestPopulator(abc.ABC):
"""Base class for the objects used to populate the initial list of tests, before filtering."""
def __init__(self, name: str, description: str):
"""Set up the populator with the important information it needs for getting tests from some location
Args:
name: Name of the populator, which can be used for a menu
description: Verbose description of the populator, which can be used for the tooltip of the menu item
"""
self.name: str = name
self.description: str = description
self.tests: list[unittest.TestCase] = [] # Remembers the tests it retrieves for later use
# --------------------------------------------------------------------------------------------------------------
def destroy(self):
"""Opportunity to clean up any allocated resources"""
pass
# --------------------------------------------------------------------------------------------------------------
@abc.abstractmethod
def get_tests(self, call_when_done: callable):
"""Populate the internal list of raw tests and then call the provided function when it has been done.
The callable takes one optional boolean 'canceled' that is only True if the test retrieval was not done.
"""
# ==============================================================================================================
class TestPopulateAll(TestPopulator):
"""Implementation of the TestPopulator that returns a list of all tests known to Kit"""
def __init__(self):
super().__init__(
DEFAULT_POPULATOR_NAME,
"Use all of the tests in currently enabled extensions that pass the filters",
)
def get_tests(self, call_when_done: callable):
self.tests = get_tests()
call_when_done()
# ==============================================================================================================
class TestPopulateDisabled(TestPopulator):
"""Implementation of the TestPopulator that returns a list of all tests disabled by their extension.toml file"""
def __init__(self):
super().__init__(
"Disabled Tests",
"Use all tests from enabled extensions whose extension.toml flags them as disabled",
)
def get_tests(self, call_when_done: callable):
self.tests = find_disabled_tests()
call_when_done()
| 3,004 | Python | 39.608108 | 116 | 0.542943 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/crash_process.py | def _error(stream, msg):
stream.write(f"[error] [{__file__}] {msg}\n")
def _crash_process_win(pid):
# fmt: off
import ctypes
POINTER = ctypes.POINTER
LPVOID = ctypes.c_void_p
PVOID = LPVOID
HANDLE = LPVOID
PHANDLE = POINTER(HANDLE)
ULONG = ctypes.c_ulong
SIZE_T = ctypes.c_size_t
LONG = ctypes.c_long
NTSTATUS = LONG
DWORD = ctypes.c_uint32
ACCESS_MASK = DWORD
INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
BOOL = ctypes.c_int
byref = ctypes.byref
long = int
WAIT_TIMEOUT = 0x102
WAIT_FAILED = 0xFFFFFFFF
WAIT_OBJECT_0 = 0
STANDARD_RIGHTS_ALL = long(0x001F0000)
SPECIFIC_RIGHTS_ALL = long(0x0000FFFF)
SYNCHRONIZE = long(0x00100000)
STANDARD_RIGHTS_REQUIRED = long(0x000F0000)
PROCESS_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFFF)
THREAD_CREATE_FLAGS_SKIP_THREAD_ATTACH = long(0x00000002)
def NT_SUCCESS(x): return x >= 0
windll = ctypes.windll
# HANDLE WINAPI OpenProcess(
# IN DWORD dwDesiredAccess,
# IN BOOL bInheritHandle,
# IN DWORD dwProcessId
# );
_OpenProcess = windll.kernel32.OpenProcess
_OpenProcess.argtypes = [DWORD, BOOL, DWORD]
_OpenProcess.restype = HANDLE
# NTSTATUS NtCreateThreadEx(
# OUT PHANDLE hThread,
# IN ACCESS_MASK DesiredAccess,
# IN PVOID ObjectAttributes,
# IN HANDLE ProcessHandle,
# IN PVOID lpStartAddress,
# IN PVOID lpParameter,
# IN ULONG Flags,
# IN SIZE_T StackZeroBits,
# IN SIZE_T SizeOfStackCommit,
# IN SIZE_T SizeOfStackReserve,
# OUT PVOID lpBytesBuffer
# );
_NtCreateThreadEx = windll.ntdll.NtCreateThreadEx
_NtCreateThreadEx.argtypes = [PHANDLE, ACCESS_MASK, PVOID, HANDLE, PVOID, PVOID, ULONG, SIZE_T, SIZE_T, SIZE_T, PVOID]
_NtCreateThreadEx.restype = NTSTATUS
# DWORD WINAPI WaitForSingleObject(
# HANDLE hHandle,
# DWORD dwMilliseconds
# );
_WaitForSingleObject = windll.kernel32.WaitForSingleObject
_WaitForSingleObject.argtypes = [HANDLE, DWORD]
_WaitForSingleObject.restype = DWORD
hProcess = _OpenProcess(
PROCESS_ALL_ACCESS,
0, # bInheritHandle
pid
)
if not hProcess:
raise ctypes.WinError()
# this injects a new thread into the process running the test code. this thread starts executing at address 0,
# causing a crash.
#
# alternatives considered:
#
# DebugBreakProcess(): in order for DebugBreakProcess() to send the breakpoint, a debugger must be attached. this
# can be accomplished with DebugActiveProcess()/WaitForDebugEvent()/ContinueDebugEvent(). unfortunately, when a
# debugger is attached, UnhandledExceptionFilter() is ignored. UnhandledExceptionFilter() is where the test process
# runs the crash dump code.
#
# CreateRemoteThread(): this approach does not work if the target process is stuck waiting for the loader lock.
#
# the solution below uses NtCreateThreadEx to create the faulting thread in the test process. unlike
# CreateRemoteThread(), NtCreateThreadEx accepts the THREAD_CREATE_FLAGS_SKIP_THREAD_ATTACH flag which skips
# THREAD_ATTACH in DllMain thereby avoiding the loader lock.
hThread = HANDLE(INVALID_HANDLE_VALUE)
status = _NtCreateThreadEx(
byref(hThread),
(STANDARD_RIGHTS_ALL | SPECIFIC_RIGHTS_ALL),
0, # ObjectAttributes
hProcess,
0, # lpStartAddress (calls into null causing a crash)
0, # lpParameter
THREAD_CREATE_FLAGS_SKIP_THREAD_ATTACH,
0, # StackZeroBits
0, # StackZeroBits (must be 0 to crash)
0, # SizeOfStackReserve
0, # lpBytesBuffer
)
if not NT_SUCCESS(status):
raise OSError(None, "NtCreateThreadEx failed", None, status)
waitTimeMs = 30 * 1000
status = _WaitForSingleObject(hProcess, waitTimeMs)
if status == WAIT_TIMEOUT:
raise TimeoutError("timed out while waiting for target process to exit")
elif status == WAIT_FAILED:
raise ctypes.WinError()
elif status != WAIT_OBJECT_0:
raise OSError(None, "failed to wait for target process to exit", None, status)
# fmt: on
def crash_process(process, stream):
"""
Triggers a crash dump in the test process, terminating the process.
Returns True if the test process was terminated, False if the process is still running.
"""
import os
assert process
pid = process.pid
if os.name == "nt":
try:
_crash_process_win(pid)
except Exception as e:
_error(stream, f"Failed crashing timed out process: {pid}. Error: {e}")
else:
import signal
try:
process.send_signal(signal.SIGABRT)
process.wait(timeout=30) # seconds
except Exception as e:
_error(stream, f"Failed crashing timed out process: {pid}. Error: {e}")
return not process.is_running()
| 5,281 | Python | 34.449664 | 122 | 0.625071 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/utils.py | import glob
import hashlib
import os
import shutil
import sys
from datetime import datetime
from functools import lru_cache
from pathlib import Path
from typing import List, Tuple
import carb
import carb.settings
import carb.tokens
import omni.ext
from .gitlab import is_running_in_gitlab
from .teamcity import is_running_in_teamcity
_settings_iface = None
def get_setting(path, default=None):
global _settings_iface
if not _settings_iface:
_settings_iface = carb.settings.get_settings()
setting = _settings_iface.get(path)
return setting if setting is not None else default
def get_local_timestamp():
return (
# ':' is not path-friendly on windows
datetime.now()
.isoformat(timespec="seconds")
.replace(":", "-")
)
@lru_cache()
def _split_argv() -> Tuple[List[str], List[str]]:
"""Return list of argv before `--` and after (processed and unprocessed)"""
try:
index = sys.argv.index("--")
return list(sys.argv[:index]), list(sys.argv[index + 1 :])
except ValueError:
return list(sys.argv), []
def get_argv() -> List[str]:
return _split_argv()[0]
def get_unprocessed_argv() -> List[str]:
return _split_argv()[1]
def resolve_path(path, root) -> str:
path = carb.tokens.get_tokens_interface().resolve(path)
if not os.path.isabs(path):
path = os.path.join(root, path)
return os.path.normpath(path)
@lru_cache()
def _get_passed_test_output_path():
return get_setting("/exts/omni.kit.test/testOutputPath", default=None)
@lru_cache()
def get_global_test_output_path():
"""Get global extension test output path. It is shared for all extensions."""
# If inside test process, we have testoutput for actual extension, just go on folder up:
output_path = _get_passed_test_output_path()
if output_path:
return os.path.abspath(os.path.join(output_path, ".."))
# If inside ext test runner process, use setting:
output_path = carb.tokens.get_tokens_interface().resolve(
get_setting("/exts/omni.kit.test/testExtOutputPath", default="")
)
output_path = os.path.abspath(output_path)
return output_path
@lru_cache()
def get_test_output_path():
"""Get local extension test output path. It is unique for each extension test process."""
output_path = _get_passed_test_output_path()
# If not passed we probably not inside test process, default to global
if not output_path:
return get_global_test_output_path()
output_path = os.path.abspath(carb.tokens.get_tokens_interface().resolve(output_path))
return output_path
@lru_cache()
def get_ext_test_id() -> str:
return str(get_setting("/exts/omni.kit.test/extTestId", default=""))
def cleanup_folder(path):
try:
for p in glob.glob(f"{path}/*"):
if os.path.isdir(p):
if omni.ext.is_link(p):
omni.ext.destroy_link(p)
else:
shutil.rmtree(p)
else:
os.remove(p)
except Exception as exc: # pylint: disable=broad-except
carb.log_warn(f"Unable to clean up files: {path}: {exc}")
def ext_id_to_fullname(ext_id: str) -> str:
return omni.ext.get_extension_name(ext_id)
def clamp(value, min_value, max_value):
return max(min(value, max_value), min_value)
@lru_cache()
def is_running_on_ci():
return is_running_in_teamcity() or is_running_in_gitlab()
def call_git(args, cwd=None):
import subprocess
cmd = ["git"] + args
carb.log_verbose("run process: {}".format(cmd))
try:
res = subprocess.run(cmd, cwd=cwd, capture_output=True, text=True)
if res.returncode != 0:
carb.log_warn(f"Error running process: {cmd}. Result: {res}. Stderr: {res.stderr}")
return res
except FileNotFoundError:
carb.log_warn("Failed calling git")
except PermissionError:
carb.log_warn("No permission to execute git")
def _hash_file_impl(path, hash, as_text):
mode = "r" if as_text else "rb"
encoding = "utf-8" if as_text else None
with open(path, mode, encoding=encoding) as f:
while True:
data = f.readline().encode("utf-8") if as_text else f.read(65536)
if not data:
break
hash.update(data)
def hash_file(path, hash):
# Try as text first, to avoid CRLF/LF mismatch on both platforms
try:
return _hash_file_impl(path, hash, as_text=True)
except UnicodeDecodeError:
return _hash_file_impl(path, hash, as_text=False)
def sha1_path(path, hash_length=16) -> str:
exclude_files = ["extension.gen.toml"]
hash = hashlib.sha1()
if os.path.isfile(path):
hash_file(path, hash)
else:
for p in glob.glob(f"{path}/**", recursive=True):
if not os.path.isfile(p) or os.path.basename(p) in exclude_files:
continue
hash_file(p, hash)
return hash.hexdigest()[:hash_length]
def sha1_list(strings: List[str], hash_length=16) -> str:
hash = hashlib.sha1()
for s in strings:
hash.update(s.encode("utf-8"))
return hash.hexdigest()[:hash_length]
| 5,195 | Python | 27.23913 | 95 | 0.635226 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/test_reporters.py | from enum import Enum
from typing import Callable, Any
class TestRunStatus(Enum):
UNKNOWN = 0
RUNNING = 1
PASSED = 2
FAILED = 3
_callbacks = []
def add_test_status_report_cb(callback: Callable[[str, TestRunStatus, Any], None]):
"""Add callback to be called when tests start, fail, pass."""
global _callbacks
_callbacks.append(callback)
def remove_test_status_report_cb(callback: Callable[[str, TestRunStatus, Any], None]):
"""Remove callback to be called when tests start, fail, pass."""
global _callbacks
_callbacks.remove(callback)
def _test_status_report(test_id: str, status: TestRunStatus, **kwargs):
for cb in _callbacks:
cb(test_id, status, **kwargs)
| 720 | Python | 23.033333 | 86 | 0.679167 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/async_unittest.py | """Async version of python unittest module.
AsyncTestCase, AsyncTestSuite and AsyncTextTestRunner classes were copied from python unittest source and async/await
keywords were added.
There are two ways of registering tests, which must all be in the 'tests' submodule of your python module.
1. 'from X import *" from every file containing tests
2. Add the line 'scan_for_test_modules = True' in your __init__.py file to pick up tests in every file starting
with 'test_'
"""
import asyncio
import time
import unittest
import warnings
from unittest.case import _Outcome
import carb
import omni.kit.app
from .reporter import TestReporter
from .test_reporters import TestRunStatus
from .utils import get_ext_test_id, is_running_on_ci
KEY_FAILING_TESTS = "Failing tests"
STARTED_UNITTEST = "started "
async def await_or_call(func):
"""
Awaits on function if it is a coroutine, calls it otherwise.
"""
if asyncio.iscoroutinefunction(func):
await func()
else:
func()
class LogErrorChecker:
"""Automatically subscribes to logging events and monitors if error were produced during the test."""
def __init__(self):
# Setup this test case to fail if any error is produced
self._error_count = 0
def on_log_event(e):
if e.payload["level"] >= carb.logging.LEVEL_ERROR:
self._error_count = self._error_count + 1
self._log_stream = omni.kit.app.get_app().get_log_event_stream()
self._log_sub = self._log_stream.create_subscription_to_pop(on_log_event, name="test log event")
def shutdown(self):
self._log_stream = None
self._log_sub = None
def get_error_count(self):
self._log_stream.pump()
return self._error_count
class AsyncTestCase(unittest.TestCase):
"""Base class for all async test cases.
Derive from it to make your tests auto discoverable. Test methods must start with `test_` prefix.
Test cases allow for generation and/or adaptation of tests at runtime. See testing_exts_python.md for more details.
"""
# If true test will check for Carbonite logging messages and fail if any error level or higher was produced during the test.
fail_on_log_error = False
async def run(self, result=None):
# Log error checker
self._log_error_checker = None
if self.fail_on_log_error:
carb.log_warn(
"[DEPRECATION WARNING] `AsyncTestCaseFailOnLogError` is deprecated. Replace with `AsyncTestCase`. Errors are captured from stdout by an external test runner process now."
)
# Make sure log buffer pumped:
await omni.kit.app.get_app().next_update_async()
self._log_error_checker = LogErrorChecker()
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, "startTestRun", None)
if startTestRun is not None:
startTestRun()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if getattr(self.__class__, "__unittest_skip__", False) or getattr(testMethod, "__unittest_skip__", False):
# If the class or method was skipped.
try:
skip_why = getattr(self.__class__, "__unittest_skip_why__", "") or getattr(
testMethod, "__unittest_skip_why__", ""
)
self._addSkip(result, self, skip_why)
finally:
result.stopTest(self)
return
expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False)
expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
expecting_failure = expecting_failure_class or expecting_failure_method
outcome = _Outcome(result)
try:
self._outcome = outcome
with outcome.testPartExecutor(self):
await await_or_call(self.setUp)
if outcome.success:
outcome.expecting_failure = expecting_failure
with outcome.testPartExecutor(self, isTest=True):
await await_or_call(testMethod)
outcome.expecting_failure = False
with outcome.testPartExecutor(self):
await await_or_call(self.tearDown)
# Log error checks
if self._log_error_checker:
await omni.kit.app.get_app().next_update_async()
error_count = self._log_error_checker.get_error_count()
if error_count > 0:
self.fail(f"Test failure because of {error_count} error message(s) logged during it.")
self.doCleanups()
for test, reason in outcome.skipped:
self._addSkip(result, test, reason)
self._feedErrorsToResult(result, outcome.errors)
if outcome.success:
if expecting_failure:
if outcome.expectedFailure:
self._addExpectedFailure(result, outcome.expectedFailure)
else:
self._addUnexpectedSuccess(result)
else:
result.addSuccess(self)
return result
finally:
if self._log_error_checker:
self._log_error_checker.shutdown()
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, "stopTestRun", None)
if stopTestRun is not None:
stopTestRun()
# explicitly break reference cycles:
# outcome.errors -> frame -> outcome -> outcome.errors
# outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
outcome.errors.clear()
outcome.expectedFailure = None
# clear the outcome, no more needed
self._outcome = None
class AsyncTestCaseFailOnLogError(AsyncTestCase):
"""Test Case which automatically subscribes to logging events and fails if any error were produced during the test.
This class is for backward compatibility, you can also just change value of `fail_on_log_error`.
"""
# Enable failure on error
fail_on_log_error = True
class OmniTestResult(unittest.TextTestResult):
def __init__(self, stream, descriptions, verbosity):
# If we are running under CI we will use default unittest reporter with higher verbosity.
if not is_running_on_ci():
verbosity = 2
super(OmniTestResult, self).__init__(stream, descriptions, verbosity)
self.reporter = TestReporter(stream)
self.on_status_report_fn = None
def _report_status(self, *args, **kwargs):
if self.on_status_report_fn:
self.on_status_report_fn(*args, **kwargs)
@staticmethod
def get_tc_test_id(test):
if isinstance(test, str):
return test
# Use dash as a clear visual separator of 3 parts:
test_id = "%s - %s - %s" % (test.__class__.__module__, test.__class__.__qualname__, test._testMethodName)
# Dots have special meaning in TC, replace with /
test_id = test_id.replace(".", "/")
ext_test_id = get_ext_test_id()
if ext_test_id:
# In the context of extension test it has own test id. Convert to TC form by getting rid of dots.
ext_test_id = ext_test_id.replace(".", "+")
test_id = f"{ext_test_id}.{test_id}"
return test_id
def addSuccess(self, test):
super(OmniTestResult, self).addSuccess(test)
def addError(self, test, err, *k):
super(OmniTestResult, self).addError(test, err)
fail_message = self._get_error_message(test, "Error", self.errors)
self.report_fail(test, "Error", err, fail_message)
def addFailure(self, test, err, *k):
super(OmniTestResult, self).addFailure(test, err)
fail_message = self._get_error_message(test, "Fail", self.failures)
self.report_fail(test, "Failure", err, fail_message)
def report_fail(self, test, fail_type: str, err, fail_message: str):
tc_test_id = self.get_tc_test_id(test)
test_id = test.id()
# pass the failing test info back to the ext testing framework in parent proc
self.stream.write(f"##omni.kit.test[append, {KEY_FAILING_TESTS}, {test_id}]\n")
self.reporter.unittest_fail(test_id, tc_test_id, fail_type, fail_message)
self._report_status(test_id, TestRunStatus.FAILED, fail_message=fail_message)
def _get_error_message(self, test, fail_type: str, errors: list) -> str:
# In python/Lib/unittest/result.py the failures are reported with _exc_info_to_string() that is private.
# To get the same result we grab the latest errors/failures from `self.errors[-1]` or `self.failures[-1]`
# In python/Lib/unittest/runner.py from the `printErrorList` function we also copied the logic here.
exc_info = errors[-1][1] if errors[-1] else ""
error_msg = []
error_msg.append(self.separator1)
error_msg.append(f"{fail_type.upper()}: {self.getDescription(test)}")
error_msg.append(self.separator2)
error_msg.append(exc_info)
return "\n".join(error_msg)
def startTest(self, test):
super(OmniTestResult, self).startTest(test)
tc_test_id = self.get_tc_test_id(test)
test_id = test.id()
self.stream.write("\n")
# python tests can start but never finish (crash, time out, etc)
# track it from the parent proc with a pragma message (see _extract_metadata_pragma in exttests.py)
self.stream.write(f"##omni.kit.test[set, {test_id}, {STARTED_UNITTEST}{tc_test_id}]\n")
self.reporter.unittest_start(test_id, tc_test_id, captureStandardOutput="true")
self._report_status(test_id, TestRunStatus.RUNNING)
def stopTest(self, test):
super(OmniTestResult, self).stopTest(test)
tc_test_id = self.get_tc_test_id(test)
test_id = test.id()
# test finished, delete it from the metadata
self.stream.write(f"##omni.kit.test[del, {test_id}]\n")
# test._outcome is None when test is skipped using decorator.
# When skipped using self.skipTest() it contains list of skipped test cases
skipped = test._outcome is None or bool(test._outcome.skipped)
# self.skipped last index contains the current skipped test, name is at index 0, reason at index 1
skip_reason = self.skipped[-1][1] if skipped and self.skipped else ""
# skipped tests are marked as "passed" not to confuse reporting down the line
passed = test._outcome.success if test._outcome and not skipped else True
self.reporter.unittest_stop(test_id, tc_test_id, passed=passed, skipped=skipped, skip_reason=skip_reason)
if passed:
self._report_status(test_id, TestRunStatus.PASSED)
class TeamcityTestResult(OmniTestResult):
def __init__(self, stream, descriptions, verbosity):
carb.log_warn("[DEPRECATION WARNING] `TeamcityTestResult` is deprecated. Replace with `OmniTestResult`.")
super(TeamcityTestResult, self).__init__(stream, descriptions, verbosity)
class AsyncTextTestRunner(unittest.TextTestRunner):
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
async def run(self, test, on_status_report_fn=None):
"Run the given test case or test suite."
result = self._makeResult()
unittest.signals.registerResult(result)
result.failfast = self.failfast
result.buffer = self.buffer
result.tb_locals = self.tb_locals
result.on_status_report_fn = on_status_report_fn
with warnings.catch_warnings():
if self.warnings:
# if self.warnings is set, use it to filter all the warnings
warnings.simplefilter(self.warnings)
# if the filter is 'default' or 'always', special-case the
# warnings from the deprecated unittest methods to show them
# no more than once per module, because they can be fairly
# noisy. The -Wd and -Wa flags can be used to bypass this
# only when self.warnings is None.
if self.warnings in ["default", "always"]:
warnings.filterwarnings(
"module", category=DeprecationWarning, message=r"Please use assert\w+ instead."
)
startTime = time.time()
startTestRun = getattr(result, "startTestRun", None)
if startTestRun is not None:
startTestRun()
try:
await test(result)
finally:
stopTestRun = getattr(result, "stopTestRun", None)
if stopTestRun is not None:
stopTestRun()
stopTime = time.time()
timeTaken = stopTime - startTime
result.printErrors()
if hasattr(result, "separator2"):
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" % (run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
expectedFails = unexpectedSuccesses = skipped = 0
try:
results = map(len, (result.expectedFailures, result.unexpectedSuccesses, result.skipped))
except AttributeError:
pass
else:
expectedFails, unexpectedSuccesses, skipped = results
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = len(result.failures), len(result.errors)
if failed:
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
else:
self.stream.write("OK")
if skipped:
infos.append("skipped=%d" % skipped)
if expectedFails:
infos.append("expected failures=%d" % expectedFails)
if unexpectedSuccesses:
infos.append("unexpected successes=%d" % unexpectedSuccesses)
if infos:
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
return result
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class AsyncTestSuite(unittest.TestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
async def run(self, result, debug=False):
topLevel = False
if getattr(result, "_testRunEntered", False) is False:
result._testRunEntered = topLevel = True
for index, test in enumerate(self):
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if getattr(test.__class__, "_classSetupFailed", False) or getattr(result, "_moduleSetUpFailed", False):
continue
if not debug:
await test(result)
else:
await test.debug()
if self._cleanup:
self._removeTestAtIndex(index)
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
| 16,411 | Python | 39.927681 | 186 | 0.613796 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/reporter.py | import fnmatch
import glob
import json
import os
import platform
import shutil
import sys
import time
import xml.etree.ElementTree as ET
from collections import defaultdict
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from functools import lru_cache
from pathlib import Path
from typing import Dict, List, Optional
import carb
import carb.settings
import carb.tokens
import psutil
from .nvdf import post_coverage_to_nvdf, post_to_nvdf
from .teamcity import teamcity_message, teamcity_publish_artifact, teamcity_status
from .test_coverage import generate_coverage_report
from .utils import (
ext_id_to_fullname,
get_ext_test_id,
get_global_test_output_path,
get_setting,
get_test_output_path,
is_running_on_ci,
)
CURRENT_PATH = Path(__file__).parent
HTML_PATH = CURRENT_PATH.parent.parent.parent.joinpath("html")
REPORT_FILENAME = "report.jsonl"
RESULTS_FILENAME = "results.xml"
@lru_cache()
def get_report_filepath():
return os.path.join(get_test_output_path(), REPORT_FILENAME)
@lru_cache()
def get_results_filepath():
return os.path.join(get_test_output_path(), RESULTS_FILENAME)
def _load_report_data(report_path):
data = []
with open(report_path, "r") as f:
for line in f:
data.append(json.loads(line))
return data
def _get_tc_test_id(test_id):
return test_id.replace(".", "+")
class TestReporter:
"""Combines TC reports to stdout and JSON lines report to a file"""
def __init__(self, stream=sys.stdout):
self._stream = stream
self._timers = {}
self._report_filepath = get_report_filepath()
self.unreliable_tests = get_setting("/exts/omni.kit.test/unreliableTests", default=[])
self.parallel_run = get_setting("/exts/omni.kit.test/parallelRun", default=False)
def _get_duration(self, test_id: str) -> float:
try:
duration = round(time.time() - self._timers.pop(test_id), 3)
except KeyError:
duration = 0.0
return duration
def _is_unreliable(self, test_id):
return any(fnmatch.fnmatch(test_id, p) for p in self.unreliable_tests)
def set_output_path(self, output_path: str):
self._report_filepath = os.path.join(output_path, REPORT_FILENAME)
def _write_report(self, data: dict):
if self._report_filepath:
with open(self._report_filepath, "a") as f:
f.write(json.dumps(data))
f.write("\n")
def unittest_start(self, test_id, tc_test_id, captureStandardOutput="false"):
teamcity_message(
"testStarted", stream=self._stream, name=tc_test_id, captureStandardOutput=captureStandardOutput
)
self._timers[test_id] = time.time()
self._write_report(
{
"event": "start",
"test_type": "unittest",
"test_id": test_id,
"ext_test_id": get_ext_test_id(),
"unreliable": self._is_unreliable(test_id),
"parallel_run": self.parallel_run,
"start_time": time.time(),
}
)
def unittest_stop(self, test_id, tc_test_id, passed=False, skipped=False, skip_reason=""):
if skipped:
teamcity_message("testIgnored", stream=self._stream, name=tc_test_id, message=skip_reason)
teamcity_message("testFinished", stream=self._stream, name=tc_test_id)
self._write_report(
{
"event": "stop",
"test_type": "unittest",
"test_id": test_id,
"ext_test_id": get_ext_test_id(),
"passed": passed,
"skipped": skipped,
"skip_reason": skip_reason,
"stop_time": time.time(),
"duration": self._get_duration(test_id),
}
)
def unittest_fail(self, test_id, tc_test_id, fail_type: str, fail_message: str):
teamcity_message("testFailed", stream=self._stream, name=tc_test_id, fail_type=fail_type, message=fail_message)
self._write_report(
{
"event": "fail",
"test_type": "unittest",
"test_id": test_id,
"ext_test_id": get_ext_test_id(),
"fail_type": fail_type,
"message": fail_message,
}
)
def exttest_start(self, test_id, tc_test_id, ext_id, ext_name, captureStandardOutput="false", report=True):
teamcity_message(
"testStarted", stream=self._stream, name=tc_test_id, captureStandardOutput=captureStandardOutput
)
if report:
self._timers[test_id] = time.time()
self._write_report(
{
"event": "start",
"test_type": "exttest",
"test_id": test_id,
"ext_id": ext_id,
"ext_name": ext_name,
"start_time": time.time(),
}
)
def exttest_stop(self, test_id, tc_test_id, passed=False, skipped=False, report=True):
if skipped:
teamcity_message("testIgnored", stream=self._stream, name=tc_test_id, message="skipped")
teamcity_message("testFinished", stream=self._stream, name=tc_test_id)
if report:
self._write_report(
{
"event": "stop",
"test_type": "exttest",
"test_id": test_id,
"passed": passed,
"skipped": skipped,
"stop_time": time.time(),
"duration": self._get_duration(test_id),
}
)
def exttest_fail(self, test_id, tc_test_id, fail_type: str, fail_message: str):
teamcity_message("testFailed", stream=self._stream, name=tc_test_id, fail_type=fail_type, message=fail_message)
self._write_report(
{
"event": "fail",
"test_type": "exttest",
"test_id": test_id,
"fail_type": fail_type,
"message": fail_message,
}
)
def report_result(self, test):
"""Write tests results data we want to later show on the html report and in elastic"""
res = defaultdict(dict)
res["config"] = test.config
res["retries"] = test.retries
res["timeout"] = test.timeout if test.timeout else 0
ext_info = test.ext_info
ext_dict = ext_info.get_dict()
res["state"]["enabled"] = ext_dict.get("state", {}).get("enabled", False)
res["package"]["version"] = ext_dict.get("package", {}).get("version", "")
res.update(vars(test.result))
change = {}
if test.change_analyzer_result:
change["skip"] = test.change_analyzer_result.should_skip_test
change["startup_sequence_hash"] = test.change_analyzer_result.startup_sequence_hash
change["tested_ext_hash"] = test.change_analyzer_result.tested_ext_hash
change["kernel_version"] = test.change_analyzer_result.kernel_version
self._write_report(
{
"event": "result",
"test_type": "exttest",
"test_id": test.test_id,
"ext_id": test.ext_id,
"ext_name": test.ext_name,
"test_bucket": test.bucket_name,
"unreliable": test.config.get("unreliable", False),
"parallel_run": get_setting("/exts/omni.kit.test/parallelRun", default=False),
"change_analyzer": change,
"result": res,
}
)
# TODO: this function should be rewritten to avoid any guessing
def _get_extension_name(path: str, ext_id_to_name: dict):
# if ext_id is in the path return that extension name
for k, v in ext_id_to_name.items():
if k in path:
return v
p = Path(path)
for i, e in enumerate(p.parts):
if e == "exts" or e == "extscore":
if p.parts[i + 1][0:1].isdigit():
return ext_id_to_fullname(p.parts[i + 2])
else:
return p.parts[i + 1]
elif e == "extscache" or e == "extsPhysics":
# exts from cache will be named like this: omni.ramp-103.0.10+103.1.wx64.r.cp37
# exts from physics will be named like this: omni.physx-1.5.0-5.1
return ext_id_to_fullname(p.parts[i + 1])
elif e == "extensions":
# on linux we'll have paths from source/extensions/<ext_name>
return p.parts[i + 1]
carb.log_warn(f"Could not get extension name for {path}")
return "_unsorted"
class ExtCoverage:
def __init__(self):
self.ext_id: str = ""
self.ext_name: str = ""
self.covered_lines = []
self.num_statements = []
self.test_result = {}
def mean_cov(self):
statements = self.sum_statements()
if statements == 0:
return 0
return (self.sum_covered_lines() / statements) * 100.0
def sum_covered_lines(self):
return sum(self.covered_lines)
def sum_statements(self):
return sum(self.num_statements)
# Note that the combined coverage data will 'merge' (or 'lose') the test config because the coverage is reported
# at the filename level. For example an extension with 2 configs, omni.kit.renderer.core [default, compatibility]
# will produce 2 .pycov files, but in the combined report (json) it will be merged per source file, so no way to know
# what was the coverage for default vs compatibility, we'll get to coverage for all of omni.kit.renderer.core tests
def _build_ext_coverage(coverage_data: dict, ext_id_to_name: dict) -> Dict[str, ExtCoverage]:
exts = defaultdict(ExtCoverage)
for file, info in coverage_data["files"].items():
ext_name = _get_extension_name(file, ext_id_to_name)
exts[ext_name].ext_name = ext_name
exts[ext_name].covered_lines.append(info["summary"]["covered_lines"])
exts[ext_name].num_statements.append(info["summary"]["num_statements"])
return exts
def _report_unreliable_tests(report_data):
# Dummy tests to group all "unreliable" tests and report (convenience for TC UI)
unreliable_failed = [r for r in report_data if r["event"] == "result" and r["result"]["unreliable_fail"] == 1]
reporter = TestReporter()
total = len(unreliable_failed)
if total > 0:
dummy_test_id = "UNRELIABLE_TESTS"
summary = ""
for r in unreliable_failed:
test_result = r["result"]
summary += " [{0:5.1f}s] {1} (Count: {2})\n".format(
test_result["duration"], r["test_id"], test_result["test_count"]
)
reporter.unittest_start(dummy_test_id, dummy_test_id)
message = f"There are {total} tests that fail, but marked as unreliable:\n{summary}"
reporter.unittest_fail(dummy_test_id, dummy_test_id, "Error", message)
print(message)
reporter.unittest_stop(dummy_test_id, dummy_test_id)
def _build_test_data_html(report_data):
# consider retries: start, fail, start, (nothing) -> success
# for each fail look back and add extra data that this test will pass or fail.
# it is convenient for the next code to know ahead of time if test will pass.
started_tests = {}
for e in report_data:
if e["event"] == "start":
started_tests[e["test_id"]] = e
elif e["event"] == "fail":
started_tests[e["test_id"]]["will_pass"] = False
results = {item["test_id"]: item["result"] for item in report_data if item["event"] == "result"}
RESULT_EMOJI = {True: "✅", False: "❌"}
COLOR_CLASS = {True: "add-green-color", False: "add-red-color"}
unreliable = False
html_data = '<ul class="test_list">\n'
depth = 0
for e in report_data:
if e["event"] == "start":
# reset depth if needed (missing stop event)
if e["test_type"] == "exttest" and depth > 0:
depth -= 1
while depth > 0:
html_data += "</ul>\n"
depth -= 1
depth += 1
if depth > 1:
html_data += "<ul>\n"
test_id = e["test_id"]
passed = e.get("will_pass", True)
extra = ""
attr = ""
# Root test ([[test]] entry)
# Reset unreliable marker
if depth == 1:
unreliable = False
# Get more stats about the whole [[test]] run
if test_id in results:
test_result = results[test_id]
extra += " [{0:5.1f}s]".format(test_result["duration"])
unreliable = bool(test_result["unreliable"])
passed = test_result["passed"]
style_class = COLOR_CLASS[passed]
if unreliable:
extra += " <b>[unreliable]</b>"
style_class = "add-yellow-color unreliable"
html_data += '<li class="{0}" {4}>{3} {1} {2}</li>\n'.format(
style_class, extra, test_id, RESULT_EMOJI[passed], attr
)
if e["event"] == "stop":
depth -= 1
if depth > 0:
html_data += "</ul>\n"
html_data += "</ul>\n"
return html_data
def _post_build_status(report_data: list):
exts = {item["ext_id"] for item in report_data if item["event"] == "result"}
# there could be retry events, so only count unique tests:
tests_started = {item["test_id"] for item in report_data if item["event"] == "start"}
tests_passed = {item["test_id"] for item in report_data if item["event"] == "stop" and item["passed"]}
total_count = len(tests_started)
fail_count = total_count - len(tests_passed)
if fail_count:
status = "failure"
text = f"{fail_count} tests failed out of {total_count}"
else:
status = "success"
text = f"All {total_count} tests passed"
text += " (extensions tested: {}).".format(len(exts))
teamcity_status(text=text, status=status)
def _calculate_durations(report_data: list):
"""
Calculate startup time of each extension and time taken by each individual test
We count the time between the extension start_time to the start_time of the first test
"""
ext_startup_time = {}
ext_startup_time_found = {}
ext_tests_time = {}
for d in report_data:
test_id = d["test_id"]
test_type = d["test_type"]
ext_test_id = d.get("ext_test_id", None)
if d["event"] == "start":
if test_type == "exttest":
if not ext_startup_time_found.get(test_id):
start_time = d["start_time"]
ext_startup_time[test_id] = start_time
else:
if not ext_startup_time_found.get(ext_test_id):
t = ext_startup_time.get(ext_test_id, 0.0)
ext_startup_time[ext_test_id] = round(d["start_time"] - t, 2)
ext_startup_time_found[ext_test_id] = True
elif d["event"] == "stop":
if test_type == "unittest":
t = ext_tests_time.get(ext_test_id, 0.0)
t += d.get("duration", 0.0)
ext_tests_time[ext_test_id] = t
elif d["event"] == "result":
test_result = d.get("result", None)
if test_result:
# it's possible an extension has no tests, so we set startup_duration = duration
if ext_startup_time_found.get(test_id, False) is True:
t = ext_startup_time.get(test_id, 0.0)
test_result["startup_duration"] = t
else:
test_result["startup_duration"] = test_result.get("duration", 0.0)
# update duration of all tests
test_result["tests_duration"] = ext_tests_time.get(test_id, 0.0)
# ratios
test_result["startup_ratio"] = 0.0
test_result["tests_ratio"] = 0.0
if test_result["tests_duration"] != 0.0:
test_result["startup_ratio"] = (test_result["startup_duration"] / test_result["duration"]) * 100.0
test_result["tests_ratio"] = (test_result["tests_duration"] / test_result["duration"]) * 100.0
def generate_report():
"""After running tests this function will generate html report / post to nvdf / publish artifacts"""
# at this point all kit processes should be finished
if is_running_on_ci():
_kill_kit_processes()
try:
print("\nGenerating a Test Report...")
_generate_report_internal()
except Exception as e:
import traceback
print(f"Exception while running generate_report(): {e}, callstack: {traceback.format_exc()}")
def _kill_kit_processes():
"""Kill all Kit processes except self"""
kit_process_name = carb.tokens.get_tokens_interface().resolve("${exe-filename}")
for proc in psutil.process_iter():
if proc.pid == os.getpid():
continue
try:
if proc.name() == kit_process_name:
carb.log_warn(
"Killing a Kit process that is still running:\n"
f" PID: {proc.pid}\n"
f" Command line: {proc.cmdline()}"
)
proc.terminate()
except psutil.AccessDenied as e:
carb.log_warn(f"Access denied: {e}")
except psutil.ZombieProcess as e:
carb.log_warn(f"Encountered a zombie process: {e}")
except psutil.NoSuchProcess as e:
carb.log_warn(f"Process no longer exists: {e}")
except (psutil.Error, Exception) as e:
carb.log_warn(f"An error occurred: {str(e)}")
def _generate_report_internal():
# Get Test report and publish it
report_data = []
# combine report from various test runs (each process has own file, for parallel run)
for report_file in glob.glob(get_global_test_output_path() + "/*/" + REPORT_FILENAME):
report_data.extend(_load_report_data(report_file))
if not report_data:
return
# generate combined file
combined_report_path = get_global_test_output_path() + "/report_combined.jsonl"
with open(combined_report_path, "w") as f:
f.write(json.dumps(report_data))
teamcity_publish_artifact(combined_report_path)
# TC Build status
_post_build_status(report_data)
# Dummy test report
_report_unreliable_tests(report_data)
# Prepare output path
output_path = get_global_test_output_path()
os.makedirs(output_path, exist_ok=True)
# calculate durations (startup, total, etc)
_calculate_durations(report_data)
# post to elasticsearch
post_to_nvdf(report_data)
# write junit xml
_write_junit_results(report_data)
# get coverage results and generate html report
merged_results, coverage_results = _load_coverage_results(report_data)
html = _generate_html_report(report_data, merged_results)
# post coverage results
post_coverage_to_nvdf(_get_coverage_for_nvdf(merged_results, coverage_results))
# write and publish html report
_write_html_report(html, output_path)
# publish all test output to TC in the end:
teamcity_publish_artifact(f"{output_path}/**/*")
def _load_coverage_results(report_data, read_coverage=True) -> tuple[dict, dict]:
# build a map of extension id to extension name
ext_id_to_name = {}
for item in report_data:
if item["event"] == "result":
ext_id_to_name[item["ext_id"]] = item["ext_name"]
# Get data coverage per extension (configs are merged)
coverage_results = defaultdict(ExtCoverage)
if read_coverage:
coverage_result = generate_coverage_report()
if coverage_result and coverage_result.json_path:
coverage_data = json.load(open(coverage_result.json_path))
coverage_results = _build_ext_coverage(coverage_data, ext_id_to_name)
# combine test results and coverage data, key is the test_id (separates extensions per config)
merged_results = defaultdict(ExtCoverage)
for item in report_data:
if item["event"] == "result":
test_id = item["test_id"]
ext_id = item["ext_id"]
ext_name = item["ext_name"]
merged_results[test_id].ext_id = ext_id
merged_results[test_id].ext_name = ext_name
merged_results[test_id].test_result = item["result"]
cov = coverage_results.get(ext_name)
if cov:
merged_results[test_id].covered_lines = cov.covered_lines
merged_results[test_id].num_statements = cov.num_statements
return merged_results, coverage_results
def _get_coverage_for_nvdf(merged_results: dict, coverage_results: dict) -> dict:
json_data = {}
for ext_name, _ in coverage_results.items():
# grab the matching result
result: ExtCoverage = merged_results.get(ext_name)
if not result:
# in rare cases the default name of a test config can be different, search of the extension name instead
res: ExtCoverage
for res in merged_results.values():
if res.ext_name == ext_name:
result = res
break
test_result = result.test_result if result else None
if not test_result:
continue
test_data = {
"ext_id": result.ext_id,
"ext_name": ext_name,
}
test_data.update(test_result)
json_data.update({ext_name: {"test": test_data}})
return json_data
def _generate_html_report(report_data, merged_results):
html = ""
with open(os.path.join(HTML_PATH, "template.html"), "r") as f:
html = f.read()
class Color(Enum):
RED = 0
GREEN = 1
YELLOW = 2
def get_color(var, threshold: tuple, inverse=False, warning_only=False) -> Color:
if var == "":
return None
if inverse is True:
if float(var) >= threshold[0]:
return Color.RED
elif float(var) >= threshold[1]:
return Color.YELLOW
elif not warning_only:
return Color.GREEN
else:
if float(var) <= threshold[0]:
return Color.RED
elif float(var) <= threshold[1]:
return Color.YELLOW
elif not warning_only:
return Color.GREEN
def get_td(var, color: Color = None):
if color is Color.RED:
return f"<td ov-red>{var}</td>\n"
elif color is Color.GREEN:
return f"<td ov-green>{var}</td>\n"
elif color is Color.YELLOW:
return f"<td ov-yellow>{var}</td>\n"
else:
return f"<td>{var}</td>\n"
coverage_enabled = get_setting("/exts/omni.kit.test/pyCoverageEnabled", default=False)
coverage_threshold = get_setting("/exts/omni.kit.test/pyCoverageThreshold", default=75)
# disable coverage button when not needed
if not coverage_enabled:
html = html.replace(
"""<button class="tablinks" onclick="openTab(event, 'Coverage')">Coverage</button>""",
"""<button disabled class="tablinks" onclick="openTab(event, 'Coverage')">Coverage</button>""",
)
# Build test run data
html = html.replace("%%test_data%%", _build_test_data_html(report_data))
# Build extension table
html_data = ""
for test_id, info in sorted(merged_results.items()):
r = info.test_result
waiver = True if r.get("config", {}).get("waiver") else False
passed = r.get("passed", False)
test_count = r.get("test_count", 0)
duration = round(r.get("duration", 0.0), 1)
startup_duration = round(r.get("startup_duration", 0.0), 1)
startup_ratio = round(r.get("startup_ratio", 0.0), 1)
tests_duration = round(r.get("tests_duration", 0.0), 1)
tests_ratio = round(r.get("tests_ratio", 0.0), 1)
timeout = round(r.get("timeout", 0), 0)
timeout_ratio = 0
if timeout != 0:
timeout_ratio = round((duration / timeout) * 100.0, 0)
# an extension can override pyCoverageEnabled / pyCoverageThreshold
ext_coverage_enabled = bool(r.get("config", {}).get("pyCoverageEnabled", coverage_enabled))
ext_coverage_threshold = int(r.get("config", {}).get("pyCoverageThreshold", coverage_threshold))
ext_coverage_threshold_low = int(ext_coverage_threshold * (2 / 3))
# coverage data
num_statements = info.sum_statements()
num_covered_lines = info.sum_covered_lines()
cov_percent = round(info.mean_cov(), 2)
# add those calculated values to our results
py_coverage = {
"lines_total": num_statements,
"lines_tested": num_covered_lines,
"cov_percent": float(cov_percent),
"cov_threshold": ext_coverage_threshold,
"enabled": bool(coverage_enabled and ext_coverage_enabled),
}
info.test_result["pyCoverage"] = py_coverage
html_data += "<tr>\n"
html_data += get_td(test_id)
html_data += get_td(r.get("package", {}).get("version", ""))
html_data += get_td(waiver, Color.GREEN if waiver is True else None)
html_data += get_td(passed, Color.GREEN if bool(passed) is True else Color.RED)
html_data += get_td(test_count, Color.GREEN if waiver is True else get_color(test_count, (0, 5)))
# color code tests duration: >=60 seconds is red and >=30 seconds is yellow
html_data += get_td(str(duration), get_color(duration, (60, 30), inverse=True, warning_only=True))
html_data += get_td(str(startup_duration))
html_data += get_td(str(startup_ratio))
html_data += get_td(str(tests_duration))
html_data += get_td(str(tests_ratio))
html_data += get_td(str(timeout))
html_data += get_td(str(timeout_ratio), get_color(timeout_ratio, (90, 75), inverse=True, warning_only=True))
html_data += get_td(bool(coverage_enabled and ext_coverage_enabled))
if coverage_enabled and ext_coverage_enabled:
html_data += get_td(ext_coverage_threshold)
html_data += get_td(num_statements)
html_data += get_td(num_covered_lines)
html_data += get_td(
cov_percent,
Color.GREEN
if waiver is True
else get_color(cov_percent, (ext_coverage_threshold_low, ext_coverage_threshold)),
)
print(f" > Coverage for {test_id} is {cov_percent}%")
else:
for _ in range(4):
html_data += get_td("-")
html_data += "</tr>\n"
html = html.replace("%%table_data%%", html_data)
return html
def _write_html_report(html, output_path):
REPORT_NAME = "index.html"
REPORT_FOLDER_NAME = "test_report"
report_dir = os.path.join(output_path, REPORT_FOLDER_NAME)
os.makedirs(report_dir, exist_ok=True)
with open(os.path.join(report_dir, REPORT_NAME), "w") as f:
f.write(html)
print(f" > Full report available here {f.name}")
if not is_running_on_ci():
import webbrowser
webbrowser.open(f.name)
# copy javascript/css files
shutil.copyfile(os.path.join(HTML_PATH, "script.js"), os.path.join(report_dir, "script.js"))
shutil.copyfile(os.path.join(HTML_PATH, "style.css"), os.path.join(report_dir, "style.css"))
shutil.make_archive(os.path.join(output_path, REPORT_FOLDER_NAME), "zip", report_dir)
teamcity_publish_artifact(os.path.join(output_path, "*.zip"))
@dataclass
class Stats:
passed: int = 0
failure: int = 0
error: int = 0
skipped: int = 0
def get_total(self):
return self.passed + self.failure + self.error + self.skipped
def _write_junit_results(report_data: list):
"""Write a JUnit XML from our report data"""
testcases = []
testsuites = ET.Element("testsuites")
start_time = datetime.now()
last_failure = {"message": "", "fail_type": ""}
stats = Stats()
for data in report_data:
test_id = data["test_id"]
test_type = data["test_type"]
ext_test_id = data.get("ext_test_id", test_id)
if data["event"] == "start":
if test_type == "exttest":
start_time = datetime.fromtimestamp(data["start_time"])
elif data["event"] == "fail":
last_failure = data
elif data["event"] == "stop":
# create a testcase for each stop event (for both exttest and unittest)
testcase = ET.Element("testcase", name=test_id, classname=ext_test_id, time=f"{data['duration']:.3f}")
if data.get("skipped"):
stats.skipped += 1
node = ET.SubElement(testcase, "skipped")
node.text = data.get("skip_reason", "")
elif data.get("passed"):
stats.passed += 1
else:
# extension tests failures are of type Error
if test_type == "exttest":
stats.error += 1
node = ET.SubElement(testcase, "error")
elif last_failure["fail_type"] == "Failure":
stats.failure += 1
node = ET.SubElement(testcase, "failure")
else:
stats.error += 1
node = ET.SubElement(testcase, "error")
node.text = last_failure["message"]
testcases.append(testcase)
# extension test stop - gather all testcases and add test suite
if test_type == "exttest":
testsuite = ET.Element(
"testsuite",
name=test_id,
failures=str(stats.failure),
errors=str(stats.error),
skipped=str(stats.skipped),
tests=str(stats.get_total()),
time=f"{data['duration']:.3f}",
timestamp=start_time.isoformat(),
hostname=platform.node(),
)
testsuite.extend(testcases)
testsuites.append(testsuite)
# reset things between test suites
testcases = []
last_failure = {"message": "", "fail_type": ""}
stats = Stats()
# write our file
ET.indent(testsuites)
with open(get_results_filepath(), "w", encoding="utf-8") as f:
f.write(ET.tostring(testsuites, encoding="unicode", xml_declaration=True))
| 30,977 | Python | 37.386617 | 119 | 0.568971 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/test_coverage.py | import os
import shutil
from datetime import datetime
from functools import lru_cache
from pathlib import Path
import carb.settings
import coverage
import omni.kit.app
from .teamcity import teamcity_publish_artifact
from .utils import get_global_test_output_path, get_setting
# For Coverage.py to be able to combine data it's required that combined reports have the same prefix in the filenames.
# From https://coverage.readthedocs.io/en/coverage-6.1.2/api_coverage.htm:
# "All coverage data files whose name starts with data_file (from the coverage() constructor) will be read,
# and combined together into the current measurements."
COV_OUTPUT_DATAFILE_PREFIX = "py_cov"
COV_OUTPUT_DATAFILE_EXTENSION = ".pycov"
CURRENT_PATH = Path(__file__).parent
HTML_LOCAL_PATH = CURRENT_PATH.parent.parent.parent.joinpath("html", "coverage")
class PyCoverageCollectorSettings:
def __init__(self):
self.enabled = False
self.output_dir = None
self.filter: list = None
self.omit: list = None
self.include_modules = False
self.include_dependencies = False
self.include_test_dependencies = False
@lru_cache()
def _get_coverage_output_dir():
return os.path.join(get_global_test_output_path(), "pycov")
def read_coverage_collector_settings() -> PyCoverageCollectorSettings:
result = PyCoverageCollectorSettings()
result.enabled = get_setting("/exts/omni.kit.test/pyCoverageEnabled", True)
result.output_dir = _get_coverage_output_dir()
result.filter = get_setting("/exts/omni.kit.test/pyCoverageFilter", None)
result.omit = get_setting("/exts/omni.kit.test/pyCoverageOmit", None)
result.include_modules = get_setting("/exts/omni.kit.test/pyCoverageIncludeModules", False)
result.include_dependencies = get_setting("/exts/omni.kit.test/pyCoverageIncludeDependencies", False)
result.include_test_dependencies = get_setting("/exts/omni.kit.test/pyCoverageIncludeTestDependencies", False)
return result
class PyCoverageCollector:
"""Initializes code coverage collections and saves collected data at Python interpreter exit"""
class PyCoverageSettings:
def __init__(self):
self.filter = None
self.omit = None
self.output_data_path_prefix = None
self.output_datafile_suffix = None
def __init__(self):
self._coverage = None
def _read_collector_settings(self) -> PyCoverageSettings:
"""
Reads coverage settings and returns non None PyCoverageSettings if Python coverage is required
"""
app_name = get_setting("/app/name")
collector_settings = read_coverage_collector_settings()
if not collector_settings.enabled:
print(f"'{app_name}' has disabled Python coverage in settings")
return None
if collector_settings.output_dir is None:
print(f"Output directory for Python coverage isn't set. Skipping Python coverage for '{app_name}'.")
return None
result = self.PyCoverageSettings()
result.filter = collector_settings.filter
result.omit = collector_settings.omit
filename_timestamp = app_name + f"_{datetime.now():%Y-%m-%d_%H-%M-%S-%f}"
# PyCoverage combines report files that have the same prefix so adding the same prefix to created reports
result.output_data_path_prefix = os.path.normpath(
os.path.join(collector_settings.output_dir, COV_OUTPUT_DATAFILE_PREFIX)
)
result.output_datafile_suffix = filename_timestamp + COV_OUTPUT_DATAFILE_EXTENSION
return result
def startup(self):
# Reading settings to check if it's needed to start Python coverage
# It's needed to be done as soon as possible to properly collect data
self._settings = self._read_collector_settings()
if self._settings is not None:
self._coverage = coverage.Coverage(
source=self._settings.filter,
omit=self._settings.omit,
data_file=self._settings.output_data_path_prefix,
data_suffix=self._settings.output_datafile_suffix,
)
self._coverage.config.disable_warnings = [
"module-not-measured",
"module-not-imported",
"no-data-collected",
"couldnt-parse",
]
self._coverage.start()
# Register for app shutdown to finalize the coverage.
# For fast shutdown, shutdown function of ext will not be called.
# The following subscription will give a chance to collect coverage report.
if carb.settings.get_settings().get("/app/fastShutdown"):
self._shutdown_subs = (
omni.kit.app.get_app()
.get_shutdown_event_stream()
.create_subscription_to_pop_by_type(
omni.kit.app.POST_QUIT_EVENT_TYPE, self.shutdown, name="omni.kit.test::coverage", order=1000
)
)
else:
self._shutdown_subs = None
def shutdown(self, _=None):
if self._coverage is not None:
self._coverage.stop()
try:
# Note: trying to save report in non-internal format in the "atexit" handler will result in error
self._coverage.save()
except coverage.misc.CoverageException as err:
print(f"Couldn't save Coverage report in internal format: {err}")
self._coverage = None
self._settings = None
self._shutdown_subs = None
class PyCoverageReporterSettings:
def __init__(self):
self.source_dir = None
self.output_to_std = False
self.output_to_json = False
self.output_to_html = False
self.combine_previous_data = False
def read_coverage_reporter_settings() -> PyCoverageReporterSettings:
coverage_enabled = get_setting("/exts/omni.kit.test/pyCoverageEnabled", True)
if not coverage_enabled:
return None
pyCoverageFormats = [s.lower() for s in get_setting("/exts/omni.kit.test/pyCoverageFormats", ["json"])]
output_to_std = "stdout" in pyCoverageFormats
output_to_json = "json" in pyCoverageFormats
output_to_html = "html" in pyCoverageFormats
# Check if no Python coverage report required
if not output_to_std and not output_to_json:
return None
source_dir = _get_coverage_output_dir()
if not os.path.exists(source_dir):
return None
result = PyCoverageReporterSettings()
result.source_dir = source_dir
result.output_to_std = output_to_std
result.output_to_json = output_to_json
result.output_to_html = output_to_html
result.combine_previous_data = get_setting("/exts/omni.kit.test/pyCoverageCombinedReport", False)
return result
def _report_single_coverage_result(
cov: coverage,
src_path: str,
std_output: bool = True,
json_output_file: str = None,
title: str = None,
html_output_path: str = None,
):
"""
Creates single report and returns path for created json file (or None if it wasn't created)
"""
try:
# Note: parameter 'keep' sets if read files will be removed afterwards
# setting it to true as they might be used to regenerate overall coverage report
cov.combine(data_paths=[src_path], keep=True)
# Note: ignore errors is needed to ignore some of the errors when coverage fails to process
# .../PythonExtension.cpp::shutdown() or some other file
if std_output:
print()
print("=" * 60)
title = title if title is not None else "Python coverage report"
print(title)
print()
cov.report(ignore_errors=True)
print("=" * 60)
if json_output_file is not None:
cov.json_report(outfile=json_output_file, ignore_errors=True)
if html_output_path is not None:
cov.html_report(directory=html_output_path, ignore_errors=True)
except coverage.misc.CoverageException as err:
print(f"Couldn't create coverage report for '{src_path}': {err}")
def _modify_html_report(output_path: str):
# modify coverage html file to have a larger and clearer filter for extensions
html = ""
with open(os.path.join(output_path, "index.html"), 'r') as file:
html = file.read()
with open(os.path.join(HTML_LOCAL_PATH, "modify.html"), 'r') as file:
# find_replace [0] is the line to find, [1] the line to replace and [2] the line to add
find_replace = file.read().splitlines()
html = html.replace(find_replace[0], find_replace[1] + '\n' + find_replace[2])
with open(os.path.join(output_path, "index.html"), 'w') as file:
file.write(html)
# overwrite coverage css/js files
shutil.copyfile(os.path.join(HTML_LOCAL_PATH, "new_style.css"), os.path.join(output_path, "style.css"))
shutil.copyfile(os.path.join(HTML_LOCAL_PATH, "new_script.js"), os.path.join(output_path, "coverage_html.js"))
class PyCoverageReporterResult:
def __init__(self):
self.html_path = None
self.json_path = None
def report_coverage_results(reporter_settings: PyCoverageReporterSettings = None) -> PyCoverageReporterResult:
"""
Processes previously collected coverage data according to settings in the 'reporter_settings'
"""
result = PyCoverageReporterResult()
if reporter_settings is None:
return result
if (
not reporter_settings.output_to_std
and not reporter_settings.output_to_json
and not reporter_settings.output_to_html
):
print("No output report options selected for the coverage results. No result report generated.")
return result
# use global configuration file
config_file = str(CURRENT_PATH.joinpath(".coveragerc"))
# A helper file required by coverage for combining already existing reports
cov_internal_file = os.path.join(reporter_settings.source_dir, COV_OUTPUT_DATAFILE_PREFIX)
cov = coverage.Coverage(source=None, data_file=cov_internal_file, config_file=config_file)
cov.config.disable_warnings = ["module-not-measured", "module-not-imported", "no-data-collected", "couldnt-parse"]
if reporter_settings.combine_previous_data:
result.json_path = (
os.path.join(reporter_settings.source_dir, "combined_py_coverage" + COV_OUTPUT_DATAFILE_EXTENSION + ".json")
if reporter_settings.output_to_json
else None
)
result.html_path = (
os.path.join(reporter_settings.source_dir, "combined_py_coverage_html")
if reporter_settings.output_to_html
else None
)
_report_single_coverage_result(
cov,
reporter_settings.source_dir,
reporter_settings.output_to_std,
result.json_path,
html_output_path=result.html_path,
)
if result.html_path and os.path.exists(result.html_path):
# slightly modify the html report for our needs
_modify_html_report(result.html_path)
# add folder to zip file, while be used on TeamCity
shutil.make_archive(os.path.join(reporter_settings.source_dir, "coverage"), "zip", result.html_path)
if not os.path.exists(result.json_path):
result.json_path = None
else:
internal_reports = [
file for file in os.listdir(reporter_settings.source_dir) if file.endswith(COV_OUTPUT_DATAFILE_EXTENSION)
]
for cur_file in internal_reports:
cov.erase()
processed_filename = (
cur_file[len(COV_OUTPUT_DATAFILE_PREFIX) + 1 :]
if cur_file.startswith(COV_OUTPUT_DATAFILE_PREFIX)
else cur_file
)
json_path = None
if reporter_settings.output_to_json:
json_path = os.path.join(reporter_settings.source_dir, processed_filename + ".json")
title = None
if reporter_settings.output_to_std:
title, _ = os.path.splitext(processed_filename)
title = f"Python coverage report for '{title}'"
_report_single_coverage_result(
cov,
os.path.join(reporter_settings.source_dir, cur_file),
reporter_settings.output_to_std,
json_path,
title,
)
# Cleanup of intermediate data
cov.erase()
return result
def generate_coverage_report() -> PyCoverageReporterResult:
# processing coverage data
result = PyCoverageReporterResult()
coverage_collector_settings = read_coverage_collector_settings()
# automatically enable coverage if we detect a pycov directory present when generating a report
if os.path.exists(coverage_collector_settings.output_dir):
carb.settings.get_settings().set("/exts/omni.kit.test/pyCoverageEnabled", True)
coverage_collector_settings.enabled = True
if coverage_collector_settings.enabled:
coverage_reporter_settings = read_coverage_reporter_settings()
result = report_coverage_results(coverage_reporter_settings)
teamcity_publish_artifact(os.path.join(coverage_collector_settings.output_dir, "*.zip"))
return result
| 13,448 | Python | 38.09593 | 120 | 0.645895 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/flaky.py | import datetime
import logging
import os
from collections import defaultdict
import carb
from .utils import get_test_output_path
from .nvdf import get_app_info, query_nvdf
logger = logging.getLogger(__name__)
FLAKY_TESTS_QUERY_DAYS = 30
class FlakyTestAnalyzer:
"""Basic Flaky Tests Analyzer"""
AGG_TEST_IDS = "ids"
AGG_LAST_EXT_CONFIG = "config"
BUCKET_PASSED = "passed"
BUCKET_FAILED = "failed"
tests_failed = set()
ext_failed = defaultdict(list)
def __init__(
self, ext_test_id: str = "*", query_days=FLAKY_TESTS_QUERY_DAYS, exclude_consecutive_failure: bool = True
):
self.ext_test_id = ext_test_id
self.query_days = query_days
self.exclude_consecutive_failure = exclude_consecutive_failure
self.app_info = get_app_info()
self.query_result = self._query_nvdf()
def should_skip_test(self) -> bool:
if not self.query_result:
carb.log_info(f"{self.ext_test_id} query error - skipping test")
return True
if len(self.tests_failed) == 0:
carb.log_info(f"{self.ext_test_id} has no failed tests in last {self.query_days} days - skipping test")
return True
return False
def get_flaky_tests(self, ext_id: str) -> list:
return self.ext_failed.get(ext_id, [])
def generate_playlist(self) -> str:
test_output_path = get_test_output_path()
os.makedirs(test_output_path, exist_ok=True)
filename = "flakytest_" + self.ext_test_id.replace(".", "_").replace(":", "-")
filepath = os.path.join(test_output_path, f"{filename}_playlist.log")
if self._write_playlist(filepath):
return filepath
def _write_playlist(self, filepath: str) -> bool:
try:
with open(filepath, "w") as f:
f.write("\n".join(self.tests_failed))
return True
except IOError as e:
carb.log_warn(f"Error writing to {filepath} -> {e}")
return False
def _query_nvdf(self) -> bool:
query = self._es_query(days=self.query_days, hours=0)
r = query_nvdf(query)
for aggs in r.get("aggregations", {}).get(self.AGG_TEST_IDS, {}).get("buckets", {}):
test_id = aggs.get("key")
test_config = aggs.get("config", {}).get("hits", {}).get("hits")
if not test_config or not test_config[0]:
continue
test_config = test_config[0]
ext_test_id = test_config.get("fields", {}).get("test.s_ext_test_id")
if not ext_test_id or not ext_test_id[0]:
continue
ext_test_id = ext_test_id[0]
passed = aggs.get(self.BUCKET_PASSED, {}).get("doc_count", 0)
failed = aggs.get(self.BUCKET_FAILED, {}).get("doc_count", 0)
ratio = 0
if passed != 0 and failed != 0:
ratio = failed / (passed + failed)
carb.log_info(
f"{test_id} passed: {passed} failed: {failed} ({ratio * 100:.2f}% fail rate) in last {self.query_days} days"
)
if failed == 0:
continue
self.ext_failed[ext_test_id].append(
{"test_id": test_id, "passed": passed, "failed": failed, "ratio": ratio}
)
self.tests_failed.add(test_id)
return True
def _es_query(self, days: int, hours: int) -> dict:
target_date = datetime.datetime.utcnow() - datetime.timedelta(days=days, hours=hours)
kit_version = self.app_info["kit_version"]
carb.log_info(f"NVDF query for {self.ext_test_id} on Kit {kit_version}, last {days} days")
query = {
"aggs": {
self.AGG_TEST_IDS: {
"terms": {"field": "test.s_test_id", "order": {self.BUCKET_FAILED: "desc"}, "size": 1000},
"aggs": {
self.AGG_LAST_EXT_CONFIG: {
"top_hits": {
"fields": [{"field": "test.s_ext_test_id"}],
"_source": False,
"size": 1,
"sort": [{"ts_created": {"order": "desc"}}],
}
},
self.BUCKET_PASSED: {
"filter": {
"bool": {
"filter": [{"term": {"test.b_passed": True}}],
}
}
},
self.BUCKET_FAILED: {
"filter": {
"bool": {
"filter": [{"term": {"test.b_passed": False}}],
}
}
},
},
}
},
"size": 0,
"query": {
"bool": {
# filter out consecutive failure
# not (test.b_consecutive_failure : * and test.b_consecutive_failure : true)
"must_not": {
"bool": {
"filter": [
{
"bool": {
"should": [{"exists": {"field": "test.b_consecutive_failure"}}],
"minimum_should_match": 1,
}
},
{
"bool": {
"should": [
{"term": {"test.b_consecutive_failure": self.exclude_consecutive_failure}}
],
"minimum_should_match": 1,
}
},
]
}
},
"filter": [
{"term": {"test.s_ext_test_id": self.ext_test_id}},
{"term": {"test.s_test_type": "unittest"}},
{"term": {"test.b_skipped": False}},
{"term": {"test.b_unreliable": False}},
{"term": {"test.b_parallel_run": False}}, # Exclude parallel_run results
{"term": {"app.s_kit_version": kit_version}},
{"term": {"app.l_merge_request": 0}}, # Should we enable flaky tests from MR? For now excluded.
{
"range": {
"ts_created": {
"gte": target_date.isoformat() + "Z",
"format": "strict_date_optional_time",
}
}
},
],
}
},
}
return query
| 7,157 | Python | 38.988827 | 124 | 0.415398 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/unittests.py | import asyncio
import fnmatch
import os
import random
import sys
import traceback
import unittest
from contextlib import suppress
from glob import glob
from importlib import import_module
from itertools import islice
from os.path import basename, dirname, isfile, join, splitext
from types import ModuleType
from typing import Callable, List
import carb
import carb.tokens
import omni.kit.app
from .async_unittest import AsyncTestSuite, AsyncTextTestRunner, OmniTestResult
from .exttests import RunExtTests
from .reporter import TestReporter
from .sampling import SamplingFactor, get_tests_sampling_to_skip
from .teamcity import teamcity_message
from .test_reporters import _test_status_report
from .utils import get_ext_test_id, get_setting, get_test_output_path
def _import_if_exist(module: str):
try:
return import_module(module)
except ModuleNotFoundError as e:
# doesn't exist if that is what we trying to import or namespace
if e.name == module or module.startswith(e.name + "."):
return None
carb.log_error(
f"Failed to import python module with tests: {module}. Error: {e}. Traceback:\n{traceback.format_exc()}"
)
except Exception as e:
carb.log_error(
f"Failed to import python module with tests: {module}. Error: {e}. Traceback:\n{traceback.format_exc()}"
)
def _get_enabled_extension_modules(filter_fn: Callable[[str], bool] = None):
manager = omni.kit.app.get_app().get_extension_manager()
# For each extension get each python module it declares
module_names = manager.get_enabled_extension_module_names()
sys_modules = set()
for name in module_names:
if name in sys.modules:
if filter_fn and not filter_fn(name):
continue
sys_modules.add(sys.modules[name])
# Automatically look for and import '[some_module].tests' and '[some_module].ogn.tests' so that extensions
# don't have to put tests into config files and import them all the time.
for test_submodule in [f"{name}.tests", f"{name}.ogn.tests"]:
if filter_fn and not filter_fn(test_submodule):
continue
if test_submodule in sys.modules:
sys_modules.add(sys.modules[test_submodule])
else:
test_module = _import_if_exist(test_submodule)
if test_module:
sys_modules.add(test_module)
return sys_modules
# ----------------------------------------------------------------------
SCANNED_TEST_MODULES = {} # Dictionary of moduleName: [dynamicTestModules]
_EXTENSION_DISABLED_HOOK = None # Hook for monitoring extension state changes, to keep the auto-populated list synced
_LOG = bool(os.getenv("TESTS_DEBUG")) # Environment variable to enable debugging of the test registration
# ----------------------------------------------------------------------
def remove_from_dynamic_test_cache(module_root):
"""Get the list of tests dynamically added to the given module directory (via "scan_for_test_modules")"""
global SCANNED_TEST_MODULES
for module_suffix in ["", ".tests", ".ogn.tests"]:
module_name = module_root + module_suffix
tests_to_remove = SCANNED_TEST_MODULES.get(module_name, [])
if tests_to_remove:
if _LOG:
print(f"Removing {len(tests_to_remove)} tests from {module_name}")
del SCANNED_TEST_MODULES[module_name]
# ----------------------------------------------------------------------
def _on_ext_disabled(ext_id, *_):
"""Callback executed when an extension has been disabled - scan for tests to remove"""
config = omni.kit.app.get_app().get_extension_manager().get_extension_dict(ext_id)
for node in ("module", "modules"):
with suppress(KeyError):
for module in config["python"][node]:
remove_from_dynamic_test_cache(module["name"])
# ----------------------------------------------------------------------
def dynamic_test_modules(module_root: str, module_file: str) -> List[ModuleType]:
"""Import all of the test modules and return a list of the imports so that automatic test recognition works
The normal test recognition mechanism relies on knowing all of the file names at build time. This function is
used to support automatic recognition of all test files in a certain directory at run time.
Args:
module_root: Name of the module for which tests are being imported, usually just __name__ of the caller
module_file: File from which the import is happening, usually just __file__ of the caller
Usage:
In the directory containing your tests add this line to the __init__.py file (creating the file if necessary):
scan_for_test_modules = True
It will pick up any Python files names testXXX.py or TestXXX.py and scan them for tests when the extension
is loaded.
Important:
The __init__.py file must be imported with the extension. If you have a .tests module or .ogn.tests module
underneath your main module this will happen automatically for you.
Returns:
List of modules that were added, each pointing to a file in which tests are contained
"""
global _EXTENSION_DISABLED_HOOK
global SCANNED_TEST_MODULES
if module_root in SCANNED_TEST_MODULES:
return SCANNED_TEST_MODULES[module_root]
modules_imported = []
for module_name in [basename(f) for f in glob(join(dirname(module_file), "*.py")) if isfile(f)]:
if module_name != "__init__" and module_name.lower().startswith("test"):
imported_module = f"{module_root}.{splitext(module_name)[0]}"
modules_imported.append(import_module(imported_module))
SCANNED_TEST_MODULES[module_root] = modules_imported
# This is a singleton initialization. If ever any test modules are scanned then from then on monitor for an
# extension being disabled so that the cached list can be cleared for rebuilding on the next run.
if _EXTENSION_DISABLED_HOOK is None:
hooks = omni.kit.app.get_app().get_extension_manager().get_hooks()
_EXTENSION_DISABLED_HOOK = hooks.create_extension_state_change_hook(
_on_ext_disabled,
omni.ext.ExtensionStateChangeType.BEFORE_EXTENSION_DISABLE,
ext_dict_path="python",
hook_name="python.unit_tests",
)
return modules_imported
# ==============================================================================================================
def get_tests_to_remove_from_modules(modules, log=_LOG):
"""Return the list of tests to be removed when a module is unloaded.
This includes all tests registered or dynamically discovered from the list of modules and their .tests or
.ogn.tests submodules. Keeping this separate from get_tests_from_modules() allows the import of all three related
modules, while preventing duplication of their tests when all extension module tests are requested.
Args:
modules: List of modules to
"""
all_modules = modules
all_modules += [module.tests for module in modules if hasattr(module, "tests")]
all_modules += [module.ogn.tests for module in modules if hasattr(module, "ogn") and hasattr(module.ogn, "tests")]
return get_tests_from_modules(all_modules, log)
# ==============================================================================================================
def get_tests_from_modules(modules, log=_LOG):
"""Return the list of tests registered or dynamically discovered from the list of modules"""
loader = unittest.TestLoader()
loader.suiteClass = AsyncTestSuite
tests = []
for module in modules:
if log:
carb.log_warn(f"Getting tests from module {module.__name__}")
suite = loader.loadTestsFromModule(module)
test_count = suite.countTestCases()
if test_count > 0:
if log:
carb.log_warn(f"Found {test_count} tests in {module.__name__}")
for t in suite:
tests += t._tests
if "scan_for_test_modules" in module.__dict__:
if log:
carb.log_warn(f"Scanning for test modules in {module.__name__} loaded from {module.__file__}")
for extra_module in dynamic_test_modules(module.__name__, module.__file__):
if log:
carb.log_warn(f" Processing additional module {extra_module}")
extra_suite = loader.loadTestsFromModule(extra_module)
extra_count = extra_suite.countTestCases()
if extra_count > 0:
if log:
carb.log_warn(f"Found {extra_count} additional tests added through {extra_module.__name__}")
for extra_test in extra_suite:
tests += extra_test._tests
# Some tests can be generated at runtime out of discovered ones. For example, we can leverage that to duplicate
# tests for different configurations.
for t in islice(tests, 0, len(tests)):
generate_extra = getattr(t, "generate_extra_tests", None)
if callable(generate_extra):
generated = generate_extra()
if generated:
tests += generated
return tests
def get_tests_from_enabled_extensions():
include_tests = get_setting("/exts/omni.kit.test/includeTests", default=[])
exclude_tests = get_setting("/exts/omni.kit.test/excludeTests", default=[])
def include_test(test_id: str) -> bool:
return any(fnmatch.fnmatch(test_id, p) for p in include_tests) and not any(
fnmatch.fnmatch(test_id, p) for p in exclude_tests
)
# Filter modules before importing. That allows having test-only modules and dependencies, they will fail to import
# in non-test environment. Tricky part is filtering itself. For includeTests = "omni.foo.test_abc_def_*" we want to
# match `omni.foo` module, but not `omni.foo_test_abc` test id. Thus module filtering is more permissive and
# checks "starts with" too.
def include_module(module: str) -> bool:
def match_module(module, pattern):
return fnmatch.fnmatch(module, pattern) or pattern.startswith(module)
return any(match_module(module, p) for p in include_tests)
modules = _get_enabled_extension_modules(filter_fn=include_module)
return (t for t in get_tests_from_modules(modules) if include_test(t.id()))
def _get_tests_from_file(filepath: str) -> list:
test_list = []
try:
with open(filepath) as f:
test_list = f.read().splitlines()
except IOError as e:
carb.log_warn(f"Error opening file {filepath} -> {e}")
return test_list
def _get_tests_override(tests: list) -> list:
"""Apply some override/modifiers to get the proper list of tests in that order:
1. Add/Remove unreliable tests depending on testExtRunUnreliableTests value
2. Get list of failed tests if present (if enabled, used with retry-on-failure)
3. Get list of tests from a file (if enabled, generated when running tests)
4. Get list of tests from sampling (if enabled)
5. Shuffle (random order) is applied last
"""
def is_unreliable_test(test_id: str) -> bool:
return any(fnmatch.fnmatch(test_id, p) for p in unreliable_tests)
unreliable_tests = get_setting("/exts/omni.kit.test/unreliableTests", default=[])
run_unreliable_tests = get_setting("/exts/omni.kit.test/testExtRunUnreliableTests", default=0)
if run_unreliable_tests == RunExtTests.RELIABLE_ONLY:
tests = [t for t in tests if not is_unreliable_test(t.id())]
elif run_unreliable_tests == RunExtTests.UNRELIABLE_ONLY:
tests = [t for t in tests if is_unreliable_test(t.id())]
failed_tests = get_setting("/exts/omni.kit.test/retryFailedTests", default=[])
tests_filepath = get_setting("/exts/omni.kit.test/runTestsFromFile", default="")
sampling_factor = float(get_setting("/exts/omni.kit.test/samplingFactor", default=SamplingFactor.UPPER_BOUND))
shuffle_tests = bool(get_setting("/exts/omni.kit.test/testExtRandomOrder", default=False))
if failed_tests:
tests = [t for t in tests if t.id() in failed_tests]
elif tests_filepath:
tests_from_file = _get_tests_from_file(tests_filepath)
tests = [t for t in tests if t.id() in tests_from_file]
tests.sort(key=lambda x: tests_from_file.index(x.id()))
elif tests and sampling_factor != SamplingFactor.UPPER_BOUND:
sampling = get_tests_sampling_to_skip(get_ext_test_id(), sampling_factor, [t.id() for t in tests])
skipped_tests = [t for t in tests if t.id() in sampling]
print(
"----------------------------------------\n"
f"Tests Sampling Factor set to {int(sampling_factor * 100)}% "
f"(each test should run every ~{int(1.0 / sampling_factor)} runs)\n"
)
teamcity_message("message", text=f"Tests Sampling Factor set to {int(sampling_factor * 100)}%")
# Add unittest.skip function (decorator) to all skipped tests if not skipped already.
# It will provide an explicit reason why the test was skipped.
for t in skipped_tests:
test_method = getattr(t, t._testMethodName)
if not getattr(test_method, "__unittest_skip__", False):
setattr(t, t._testMethodName, unittest.skip("Skipped by Sampling")(test_method))
if shuffle_tests:
seed = int(get_setting("/exts/omni.kit.test/testExtSamplingSeed", default=-1))
if seed >= 0:
random.seed(seed)
random.shuffle(tests)
return tests
def get_tests(tests_filter="") -> List:
"""Default function to get all current tests.
It gets tests from all enabled extensions, but also included include and exclude settings to filter them
Args:
tests_filter(str): Additional filter string to apply on list of tests.
Returns:
List of tests.
"""
if "*" not in tests_filter:
tests_filter = f"*{tests_filter}*"
# Find all tests in loaded extensions and filter with patterns using settings above:
tests = [t for t in get_tests_from_enabled_extensions() if fnmatch.fnmatch(t.id(), tests_filter)]
tests = _get_tests_override(tests)
return tests
def _setup_output_path(test_output_path: str):
tokens = carb.tokens.get_tokens_interface()
os.makedirs(test_output_path, exist_ok=True)
tokens.set_value("test_output", test_output_path)
def _write_tests_playlist(test_output_path: str, tests: list):
n = 1
filepath = test_output_path
app_name = get_setting("/app/name", "exttest")
while os.path.exists(filepath):
filepath = os.path.join(test_output_path, f"{app_name}_playlist_{n}.log")
n += 1
try:
with open(filepath, "w") as f:
for t in tests:
f.write(f"{t.id()}\n")
except IOError as e:
carb.log_warn(f"Error writing to {filepath} -> {e}")
def run_tests_in_modules(modules, on_finish_fn=None):
run_tests(get_tests_from_modules(modules, True), on_finish_fn)
def run_tests(tests=None, on_finish_fn=None, on_status_report_fn=None):
if tests is None:
tests = get_tests()
test_output_path = get_test_output_path()
_setup_output_path(test_output_path)
_write_tests_playlist(test_output_path, tests)
loader = unittest.TestLoader()
loader.suiteClass = AsyncTestSuite
suite = AsyncTestSuite()
suite.addTests(tests)
def on_status_report(*args, **kwargs):
if on_status_report_fn:
on_status_report_fn(*args, **kwargs)
_test_status_report(*args, **kwargs)
# Use our own TC reporter:
AsyncTextTestRunner.resultclass = OmniTestResult
runner = AsyncTextTestRunner(verbosity=2, stream=sys.stdout)
async def run():
result = await runner.run(suite, on_status_report)
if on_finish_fn:
on_finish_fn(result)
print("========================================")
print("========================================")
print(f"Running Tests (count: {len(tests)}):")
print("========================================")
asyncio.ensure_future(run())
def print_tests():
tests = get_tests()
print("========================================")
print(f"Printing All Tests (count: {len(tests)}):")
print("========================================")
reporter = TestReporter()
for t in tests:
reporter.unittest_start(t.id(), t.id())
print(t.id())
reporter.unittest_stop(t.id(), t.id())
print("========================================")
| 16,769 | Python | 41.890025 | 119 | 0.62705 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/exttests.py | from __future__ import annotations
import asyncio
import fnmatch
import io
import multiprocessing
import os
import pprint
import random
import re
import subprocess
import sys
import time
from collections import defaultdict
from enum import IntEnum
from typing import Dict, List, Set, Tuple
import carb.dictionary
import carb.settings
import carb.tokens
import omni.kit.app
import psutil
from .async_unittest import KEY_FAILING_TESTS, STARTED_UNITTEST
from .code_change_analyzer import CodeChangeAnalyzer
from .crash_process import crash_process
from .flaky import FLAKY_TESTS_QUERY_DAYS, FlakyTestAnalyzer
from .repo_test_context import RepoTestContext
from .reporter import TestReporter
from .sampling import SamplingFactor
from .teamcity import is_running_in_teamcity, teamcity_message, teamcity_test_retry_support
from .test_coverage import read_coverage_collector_settings
from .test_reporters import TestRunStatus, _test_status_report
from .utils import (
clamp,
cleanup_folder,
ext_id_to_fullname,
get_argv,
get_global_test_output_path,
get_local_timestamp,
get_setting,
get_unprocessed_argv,
is_running_on_ci,
resolve_path,
)
BEGIN_SEPARATOR = "\n{0} [EXTENSION TEST START: {{0}}] {0}\n".format("|" * 30)
END_SEPARATOR = "\n{0} [EXTENSION TEST {{0}}: {{1}}] {0}\n".format("|" * 30)
DEFAULT_TEST_NAME = "default"
def _error(stream, msg):
stream.write(f"[error] [{__file__}] {msg}\n")
_debug_log = bool(os.getenv("OMNI_KIT_TEST_DEBUG", default=False))
_asyncio_process_was_terminated = False
def _debug(stream, msg):
if _debug_log:
stream.write(f"[info] [{__file__}] {msg}\n")
def matched_patterns(s: str, patterns: List[str]) -> List[str]:
return [p for p in patterns if fnmatch.fnmatch(s, p)]
def match(s: str, patterns: List[str]) -> bool:
return len(matched_patterns(s, patterns)) > 0
def escape_for_fnmatch(s: str) -> str:
return s.replace("[", "[[]")
def unescape_fnmatch(s: str) -> str:
return s.replace("[[]", "[")
class FailPatterns:
def __init__(self, include=[], exclude=[]):
self.include = [escape_for_fnmatch(s.lower()) for s in include]
self.exclude = [escape_for_fnmatch(s.lower()) for s in exclude]
def merge(self, patterns: FailPatterns):
self.include += patterns.include
self.exclude += patterns.exclude
def match_line(self, line: str) -> Tuple[str, str, bool]:
line_lower = line.lower()
include_matched = match(line_lower, self.include)
exclude_matched = match(line_lower, self.exclude)
if include_matched and not exclude_matched:
patterns = matched_patterns(line_lower, self.include)
patterns = [unescape_fnmatch(p) for p in patterns]
return ", ".join(patterns), line.strip(), exclude_matched
return "", "", exclude_matched
def __str__(self):
return pprint.pformat(vars(self))
class RunExtTests(IntEnum):
RELIABLE_ONLY = 0
UNRELIABLE_ONLY = 1
BOTH = 2
class RetryStrategy:
NO_RETRY = "no-retry"
RETRY_ON_FAILURE = "retry-on-failure"
ITERATIONS = "iterations"
RERUN_UNTIL_FAILURE = "rerun-until-failure"
# CI strategy, default to no-retry when testing locally
RETRY_ON_FAILURE_CI_ONLY = "retry-on-failure-ci-only"
class SamplingContext:
ANY = "any"
LOCAL = "local"
CI = "ci"
class TestRunContext:
def __init__(self):
# Setup output path for test data
self.output_path = get_global_test_output_path()
os.makedirs(self.output_path, exist_ok=True)
print("Test output path: {}".format(self.output_path))
self.coverage_mode = get_setting("/exts/omni.kit.test/testExtGenerateCoverageReport", default=False) or (
"--coverage" in get_argv()
)
# clean output folder?
clean_output = get_setting("/exts/omni.kit.test/testExtCleanOutputPath", default=False)
if clean_output:
cleanup_folder(self.output_path)
self.shared_patterns = FailPatterns(
get_setting("/exts/omni.kit.test/stdoutFailPatterns/include", default=[]),
get_setting("/exts/omni.kit.test/stdoutFailPatterns/exclude", default=[]),
)
self.trim_stdout_on_success = bool(get_setting("/exts/omni.kit.test/testExtTrimStdoutOnSuccess", default=False))
self.trim_excluded_messages = bool(
get_setting("/exts/omni.kit.test/stdoutFailPatterns/trimExcludedMessages", default=False)
)
self.retry_strategy = get_setting("/exts/omni.kit.test/testExtRetryStrategy", default=RetryStrategy.NO_RETRY)
self.max_test_run = int(get_setting("/exts/omni.kit.test/testExtMaxTestRunCount", default=1))
if self.retry_strategy == RetryStrategy.RETRY_ON_FAILURE_CI_ONLY:
if is_running_on_ci():
self.retry_strategy = RetryStrategy.RETRY_ON_FAILURE
self.max_test_run = 3
else:
self.retry_strategy = RetryStrategy.NO_RETRY
self.max_test_run = 1
self.run_unreliable_tests = RunExtTests(get_setting("/exts/omni.kit.test/testExtRunUnreliableTests", default=0))
self.run_flaky_tests = get_setting("/exts/omni.kit.test/testExtRunFlakyTests", default=False)
self.start_ts = get_local_timestamp()
self.repo_test_context = RepoTestContext()
self.change_analyzer = None
if get_setting("/exts/omni.kit.test/testExtCodeChangeAnalyzerEnabled", default=False) and is_running_on_ci():
self.change_analyzer = CodeChangeAnalyzer(self.repo_test_context)
def _prepare_ext_for_testing(ext_name, stream=sys.stdout):
manager = omni.kit.app.get_app().get_extension_manager()
ext_id = None
ext_info_local = manager.get_extension_dict(ext_name)
if ext_info_local:
return ext_info_local
ext_info_remote = manager.get_registry_extension_dict(ext_name)
if ext_info_remote:
ext_id = ext_info_remote["package/id"]
else:
versions = manager.fetch_extension_versions(ext_name)
if len(versions) > 0:
ext_id = versions[0]["id"]
else:
_error(stream, f"Can't find extension: {ext_name} to run extension test on.")
return None
ext_info_local = manager.get_extension_dict(ext_id)
is_local = ext_info_local is not None
if not is_local:
if not manager.pull_extension(ext_id):
_error(stream, f"Failed to pull extension: {ext_id} to run extension test on.")
return None
ext_info_local = manager.get_extension_dict(ext_id)
if not ext_info_local:
_error(stream, f"Failed to get extension dict: {ext_id} while preparing extension for testing.")
return ext_info_local
def _prepare_app_for_testing(stream) -> Tuple[str, str]:
"""Returns path to app (kit file) and short name of an app."""
test_app = get_setting("/exts/omni.kit.test/testExtApp", default=None)
test_app = carb.tokens.get_tokens_interface().resolve(test_app)
# Test app can be either path to kit file or extension id (to optionally download and use extension as an app)
if test_app.endswith(".kit") or "/" in test_app:
return (test_app, "")
app_ext_info = _prepare_ext_for_testing(test_app, stream)
if app_ext_info:
return (app_ext_info["path"], test_app)
return (None, test_app)
class ExtTestResult:
def __init__(self):
self.duration = 0.0
self.test_count = 0
self.unreliable = 0
self.unreliable_fail = 0
self.fail = 0
self.passed = True
class TestApp:
def __init__(self, stream):
self.path, self.name = _prepare_app_for_testing(stream)
self.is_empty = not self.name
class ExtTest:
def __init__(
self,
ext_id: str,
ext_info: carb.dictionary.Item,
test_config: Dict,
test_id: str,
is_parallel_run: bool,
run_context: TestRunContext,
test_app: TestApp,
valid=True,
):
self.context = run_context
self.ext_id = ext_id
self.ext_name = ext_id_to_fullname(ext_id)
self.test_id = test_id
self.app_name = ""
# TC treats dots are separators to filter tests in UI, replace them.
self.tc_test_id = test_id.replace(".", "+") + ".[PROCESS CHECK]"
self.bucket_name = get_setting("/exts/omni.kit.test/testExtTestBucket", default="")
self.unreliable = False
self.skip = False
self.allow_sampling = True
self.args: List[str] = []
self.patterns = FailPatterns()
self.timeout = -1
self.result = ExtTestResult()
self.retries = 0
self.buffer_stdout = bool(is_parallel_run) or bool(self.context.trim_stdout_on_success)
self.stdout = io.StringIO() if self.buffer_stdout else sys.stdout
self.log_file = ""
self.parallelizable = True
self.reporter = TestReporter(self.stdout)
self.test_app = test_app
self.config = test_config
self.ext_info = ext_info
self.output_path = ""
self.valid = bool(valid and self.ext_info)
self.change_analyzer_result = None
self.failed_tests = []
if self.valid:
self._fill_ext_test()
def _fill_ext_test(self):
self.args = [get_argv()[0]]
self.app_name = "exttest_" + self.test_id.replace(".", "_").replace(":", "-")
ui_mode = get_setting("/exts/omni.kit.test/testExtUIMode", default=False) or ("--dev" in get_argv())
print_mode = get_setting("/exts/omni.kit.test/printTestsAndQuit", default=False)
use_kit_file_as_app = get_setting("/exts/omni.kit.test/testExtUseKitFileAsApp", default=True)
coverage_mode = self.context.coverage_mode
self.ext_id = self.ext_info["package/id"]
self.ext_name = self.ext_info["package/name"]
is_kit_file = self.ext_info.get("isKitFile", False)
# If extension is kit file just run startup test without using a test app
ext_path = self.ext_info.get("path", "")
if is_kit_file and use_kit_file_as_app:
self.args += [ext_path]
else:
self.args += [self.test_app.path, "--enable", self.ext_id]
# test output dir
self.output_path = f"{self.context.output_path}/{self.app_name}"
if not os.path.exists(self.output_path):
os.makedirs(self.output_path)
self.reporter.set_output_path(self.output_path)
# current ts (not precise as test run can be delayed relative to this moment)
ts = get_local_timestamp()
self.log_file = f"{self.output_path}/{self.app_name}_{ts}_0.log"
self.args += [
"--/log/flushStandardStreamOutput=1",
"--/app/name=" + self.app_name,
f"--/log/file='{self.log_file}'",
f"--/exts/omni.kit.test/testOutputPath='{self.output_path}'",
f"--/exts/omni.kit.test/extTestId='{self.test_id}'",
f"--/crashreporter/dumpDir='{self.output_path}'",
"--/crashreporter/preserveDump=1",
"--/crashreporter/gatherUserStory=0", # don't pop up the GUI on crash
"--/rtx-transient/dlssg/enabled=false", # OM-97205: Disable DLSS-G for now globally, so L40 tests will all pass. DLSS-G tests will have to enable it
]
# also set extTestId on the parent process - needed when calling unittest_* functions from exttests.py
carb.settings.get_settings().set_string("/exts/omni.kit.test/extTestId", self.test_id)
# Pass all exts folders
ext_folders = list(get_setting("/app/exts/folders", default=[]))
ext_folders += list(get_setting("/persistent/app/exts/userFolders", default=[]))
for folder in ext_folders:
self.args += ["--ext-folder", folder]
# Profiler trace enabled ?
default_profiling = get_setting("/exts/omni.kit.test/testExtEnableProfiler", default=False)
profiling = self.config.get("profiling", default_profiling)
if profiling:
self.args += [
"--/plugins/carb.profiler-cpu.plugin/saveProfile=1",
"--/plugins/carb.profiler-cpu.plugin/compressProfile=1",
"--/app/profileFromStart=1",
f"--/plugins/carb.profiler-cpu.plugin/filePath='{self.output_path}/ct_{self.app_name}_{ts}.gz'",
]
# Timeout for the process
default_timeout = int(get_setting("/exts/omni.kit.test/testExtDefaultTimeout", default=300))
max_timeout = int(get_setting("/exts/omni.kit.test/testExtMaxTimeout", default=0))
self.timeout = self.config.get("timeout", default_timeout)
# Clamp timeout if needed
if max_timeout > 0 and self.timeout > max_timeout:
self.timeout = max_timeout
# [[test]] can be marked as unreliable - meaning it will not run any of its tests unless unreliable tests are run
self.unreliable = self.config.get("unreliable", self.config.get("flaky", False))
# python tests to include
include_tests = list(self.config.get("pythonTests", {}).get("include", []))
exclude_tests = list(self.config.get("pythonTests", {}).get("exclude", []))
unreliable_tests = list(self.config.get("pythonTests", {}).get("unreliable", []))
# When running unreliable tests:
# 1. if the [[test]] is set as unreliable run all python tests (override the `unreliable_tests` list)
# 2. if running unreliable tests - set unreliable to true and disable sampling
if self.unreliable:
unreliable_tests = ["*"]
self.allow_sampling = False
elif unreliable_tests and self.context.run_unreliable_tests != RunExtTests.RELIABLE_ONLY:
self.unreliable = True
self.allow_sampling = False
# Check if we run flaky tests - if we do grab the test list as a playlist
if self.context.run_flaky_tests:
self.allow_sampling = False
query_days = int(get_setting("/exts/omni.kit.test/flakyTestsQueryDays", default=FLAKY_TESTS_QUERY_DAYS))
flaky_test_analyzer = FlakyTestAnalyzer(self.test_id, query_days)
if flaky_test_analyzer.should_skip_test():
self.skip = True
elif self.config.get("samplingFactor") == SamplingFactor.UPPER_BOUND:
pass # if an extension has disabled tests sampling we run all tests
else:
file = flaky_test_analyzer.generate_playlist()
if file:
self.args += [f"--/exts/omni.kit.test/runTestsFromFile='{file}'"]
def get_python_modules(ext_info: carb.dictionary.Item):
python_dict = ext_info.get("python", {})
if isinstance(python_dict, dict):
python_modules = python_dict.get("module", []) + python_dict.get("modules", [])
for m in python_modules:
module = m.get("name")
if module:
yield module
# By default if extension has python modules use them to fill in tests mask. Can be overridden with explicit tests list.
# Do that only for pure extensions tests, so that for tests inside an app extensions can opt in add more tests slowly.
python_modules_names = []
python_modules_names.extend(get_python_modules(self.ext_info))
if len(include_tests) == 0 and self.test_app.is_empty:
include_tests.extend(["{}.*".format(e) for e in python_modules_names])
# Cpp test libraries
test_libraries = self.config.get("cppTests", {}).get("libraries", [])
test_libraries = [resolve_path(library, ext_path) for library in test_libraries]
# If extension has tests -> run python (or cpp) tests, otherwise just do startup test
if len(include_tests) > 0 or len(test_libraries) > 0:
# We need kit.test as a test runner then
self.args += ["--enable", "omni.kit.test"]
if ui_mode:
self.args += [
"--enable",
"omni.kit.window.tests",
"--enable",
"omni.kit.window.extensions",
"--enable",
"omni.kit.renderer.core",
"--/exts/omni.kit.window.tests/openWindow=1",
"--/exts/omni.kit.test/testExtUIMode=1",
]
self.timeout = None # No timeout in that case
elif print_mode:
self.args += ["--/exts/omni.kit.test/printTestsAndQuit=true"]
else:
self.args += ["--/exts/omni.kit.test/runTestsAndQuit=true"]
for i, test_mask in enumerate(include_tests):
self.args += [f"--/exts/omni.kit.test/includeTests/{i}='{test_mask}'"]
for i, test_mask in enumerate(exclude_tests):
self.args += [f"--/exts/omni.kit.test/excludeTests/{i}='{test_mask}'"]
for i, test_mask in enumerate(unreliable_tests):
self.args += [f"--/exts/omni.kit.test/unreliableTests/{i}='{test_mask}'"]
for i, test_library in enumerate(test_libraries):
self.args += [f"--/exts/omni.kit.test/testLibraries/{i}='{test_library}'"]
else:
self.args += ["--/app/quitAfter=10", "--/crashreporter/gatherUserStory=0"]
# Reduce output on TC to make log shorter. Mostly that removes long extension startup/shutdown lists. We have
# that information in log files attached to artifacts anyway.
if is_running_on_ci():
self.args += ["--/app/enableStdoutOutput=0"]
# Test filtering (support shorter version)
argv = get_argv()
filter_value = _parse_arg_shortcut(argv, "-f")
if filter_value:
self.args += [f"--/exts/omni.kit.test/runTestsFilter='{filter_value}'"]
# Pass some args down the line:
self.args += _propagate_args(argv, "--portable")
self.args += _propagate_args(argv, "--portable-root", True)
self.args += _propagate_args(argv, "--allow-root")
self.args += _propagate_args(argv, "-d")
self.args += _propagate_args(argv, "-v")
self.args += _propagate_args(argv, "-vv")
self.args += _propagate_args(argv, "--wait-debugger")
self.args += _propagate_args(argv, "--/exts/omni.kit.test/runTestsFilter", starts_with=True)
self.args += _propagate_args(argv, "--/exts/omni.kit.test/runTestsFromFile", starts_with=True)
self.args += _propagate_args(argv, "--/exts/omni.kit.test/testExtRunUnreliableTests", starts_with=True)
self.args += _propagate_args(argv, "--/exts/omni.kit.test/doNotQuit", starts_with=True)
self.args += _propagate_args(argv, "--/exts/omni.kit.test/parallelRun", starts_with=True)
self.args += _propagate_args(argv, "--/telemetry/mode", starts_with=True)
self.args += _propagate_args(argv, "--/crashreporter/data/testName", starts_with=True)
def is_arg_prefix_present(args, prefix: str):
for arg in args:
if arg.startswith(prefix):
return True
return False
# make sure to set the telemetry mode to 'test' if it hasn't explicitly been overridden
# to something else. This prevents structured log events generated from tests from
# unintentionally polluting the telemetry analysis data.
if not is_arg_prefix_present(self.args, "--/telemetry/mode"):
self.args += ["--/telemetry/mode=test"]
# make sure to pass on the test name that was given in the settings if it was not
# explicitly given on the command line.
if not is_arg_prefix_present(self.args, "--/crashreporter/data/testName"):
test_name_setting = get_setting("/crashreporter/data/testName")
if test_name_setting != None:
self.args += [f"--/crashreporter/data/testName=\"{test_name_setting}\""]
# Read default coverage settings
default_coverage_settings = read_coverage_collector_settings()
# Sets if python test coverage enabled or disabled
py_coverage_enabled = self.config.get("pyCoverageEnabled", default_coverage_settings.enabled or coverage_mode)
# This must be set explicitly for the child test process:
# if the main process gets this setting from the command line and it's different from
# values in the configuration files then we must pass it to the child process but
# there is no way to know whether or not the value were from the command line so
# always set it explicitly for the child process
self.args += [f"--/exts/omni.kit.test/pyCoverageEnabled={py_coverage_enabled}"]
if py_coverage_enabled:
self.allow_sampling = False
py_coverage_filter = default_coverage_settings.filter or []
py_coverage_deps_omit = []
# If custom filter is specified, only use that list
custom_filter = self.config.get("pyCoverageFilter", None)
if custom_filter:
py_coverage_filter = custom_filter
else:
# Append all python modules
if self.config.get("pyCoverageIncludeModules", default_coverage_settings.include_modules):
for m in python_modules_names:
py_coverage_filter.append(m)
# Append all python modules from the dependencies
dependencies = [
{
"setting": "pyCoverageIncludeDependencies",
"default": default_coverage_settings.include_dependencies,
"config": self.ext_info,
},
{
"setting": "pyCoverageIncludeTestDependencies",
"default": default_coverage_settings.include_test_dependencies,
"config": self.config,
},
]
for d in dependencies:
if not self.config.get(d["setting"], d["default"]):
continue
deps = d["config"].get("dependencies", [])
manager = omni.kit.app.get_app().get_extension_manager()
for ext_d in manager.get_extensions():
if ext_d["name"] not in deps:
continue
ext_info = manager.get_extension_dict(ext_d["id"])
py_coverage_filter.extend(get_python_modules(ext_info))
# also look for omit in dependencies
test_info = ext_info.get("test", None)
if isinstance(test_info, list) or isinstance(test_info, tuple):
for t in test_info:
for cov_omit in t.get("pyCoverageOmit", []):
cov_omit = cov_omit.replace("\\", "/")
if not os.path.isabs(cov_omit) and not cov_omit.startswith("*/"):
cov_omit = "*/" + cov_omit
py_coverage_deps_omit.append(cov_omit)
if len(py_coverage_filter) > 0:
for i, cov_filter in enumerate(py_coverage_filter):
self.args += [f"--/exts/omni.kit.test/pyCoverageFilter/{i}='{cov_filter}'"]
# omit files/path for coverage
default_py_coverage_omit = default_coverage_settings.omit or []
py_coverage_omit = list(self.config.get("pyCoverageOmit", default_py_coverage_omit))
py_coverage_omit.extend(py_coverage_deps_omit)
if len(py_coverage_omit) > 0:
for i, cov_omit in enumerate(py_coverage_omit):
cov_omit = cov_omit.replace("\\", "/")
if not os.path.isabs(cov_omit) and not cov_omit.startswith("*/"):
cov_omit = "*/" + cov_omit
self.args += [f"--/exts/omni.kit.test/pyCoverageOmit/{i}='{cov_omit}'"]
# in coverage mode we generate a report at the end, need to set the settings on the parent process
if coverage_mode:
carb.settings.get_settings().set("/exts/omni.kit.test/pyCoverageEnabled", py_coverage_enabled)
carb.settings.get_settings().set("/exts/omni.kit.test/testExtGenerateCoverageReport", True)
# Extra extensions to run
exts_to_enable = [self.ext_id]
for ext in self.config.get("dependencies", []):
self.args += ["--enable", ext]
exts_to_enable.append(ext)
# Check if skipped by code change analyzer based on extensions it is about to enable
if self.context.change_analyzer:
self.change_analyzer_result = self.context.change_analyzer.analyze(
self.test_id, self.ext_name, exts_to_enable
)
if self.change_analyzer_result.should_skip_test:
self.skip = True
if not self.context.change_analyzer.allow_sampling():
self.allow_sampling = False
# Tests Sampling per extension
default_sampling = float(
get_setting("/exts/omni.kit.test/testExtSamplingFactor", default=SamplingFactor.UPPER_BOUND)
)
sampling_factor = clamp(
self.config.get("samplingFactor", default_sampling), SamplingFactor.LOWER_BOUND, SamplingFactor.UPPER_BOUND
)
if sampling_factor == SamplingFactor.UPPER_BOUND:
self.allow_sampling = False
if self.allow_sampling and self._use_tests_sampling():
self.args += [f"--/exts/omni.kit.test/samplingFactor={sampling_factor}"]
# tests random order
random_order = get_setting("/exts/omni.kit.test/testExtRandomOrder", default=False)
if random_order:
self.args += ["--/exts/omni.kit.test/testExtRandomOrder=1"]
# Test Sampling Seed
seed = int(get_setting("/exts/omni.kit.test/testExtSamplingSeed", default=-1))
if seed >= 0:
self.args += [f"--/exts/omni.kit.test/testExtSamplingSeed={seed}"]
# Extra args
self.args += list(get_setting("/exts/omni.kit.test/testExtArgs", default=[]))
# Extra args
self.args += self.config.get("args", [])
# if in ui mode we need to remove --no-window
if ui_mode:
self.args = [a for a in self.args if a != "--no-window"]
# Build fail patterns
self.patterns = FailPatterns(
self.config.get("stdoutFailPatterns", {}).get("include", []),
self.config.get("stdoutFailPatterns", {}).get("exclude", []),
)
self.patterns.merge(self.context.shared_patterns)
# Pass all unprocessed argv down the line at the very end. They can also have another `--` potentially.
unprocessed_argv = get_unprocessed_argv()
if unprocessed_argv:
self.args += unprocessed_argv
# Other settings
self.parallelizable = self.config.get("parallelizable", True)
def _pre_test_run(self, test_run: int, retry_strategy: RetryStrategy):
"""Update arguments that must change between each test run"""
if test_run > 0:
for index, arg in enumerate(self.args):
# make sure to use a different log file if we run tests multiple times
if arg.startswith("--/log/file="):
ts = get_local_timestamp()
self.log_file = f"{self.output_path}/{self.app_name}_{ts}_{test_run}.log"
self.args[index] = f"--/log/file='{self.log_file}'"
# make sure to use a different random seed if present, only valid on some retry strategies
if retry_strategy == RetryStrategy.ITERATIONS or retry_strategy == RetryStrategy.RERUN_UNTIL_FAILURE:
if arg.startswith("--/exts/omni.kit.test/testExtSamplingSeed="):
random_seed = random.randint(0, 2**16)
self.args[index] = f"--/exts/omni.kit.test/testExtSamplingSeed={random_seed}"
def _use_tests_sampling(self) -> bool:
external_build = get_setting("/privacy/externalBuild")
if external_build:
return False
use_sampling = get_setting("/exts/omni.kit.test/useSampling", default=True)
if not use_sampling:
return False
use_sampling = bool(os.getenv("OMNI_KIT_TEST_USE_SAMPLING", default=True))
if not use_sampling:
return False
sampling_context = get_setting("/exts/omni.kit.test/testExtSamplingContext")
if sampling_context == SamplingContext.CI and is_running_on_ci():
return True
elif sampling_context == SamplingContext.LOCAL and not is_running_on_ci():
return True
return sampling_context == SamplingContext.ANY
def get_cmd(self) -> str:
return " ".join(self.args)
def on_start(self):
self.result = ExtTestResult()
self.reporter.exttest_start(self.test_id, self.tc_test_id, self.ext_id, self.ext_name)
self.stdout.write(BEGIN_SEPARATOR.format(self.test_id))
def on_finish(self, test_result):
self.stdout.write(END_SEPARATOR.format("PASSED" if test_result else "FAILED", self.test_id))
self.reporter.exttest_stop(self.test_id, self.tc_test_id, passed=test_result)
def on_fail(self, fail_message):
# TC service messages can't match failure with a test start message when there are other tests in between.
# As a work around in that case stop test and start again (send those messages). That makes it has 2 block
# entries in the log, but gets reported as failed correctly.
if is_running_in_teamcity():
self.reporter.exttest_stop(self.test_id, self.tc_test_id, report=False)
self.reporter.exttest_start(self.test_id, self.tc_test_id, self.ext_id, self.ext_name, report=False)
self.reporter.exttest_fail(self.test_id, self.tc_test_id, "Error", fail_message)
self.stdout.write(f"{fail_message}\n")
def _kill_process_recursive(pid, stream):
def _output(msg: str):
teamcity_message("message", text=msg)
stream.write(msg)
def _terminate(proc: psutil.Process):
try:
proc.terminate()
except psutil.AccessDenied as e:
_error(stream, f"Access denied: {e}")
except psutil.ZombieProcess as e:
_error(stream, f"Encountered a zombie process: {e}")
except psutil.NoSuchProcess as e:
_error(stream, f"Process no longer exists: {e}")
except (psutil.Error, Exception) as e:
_error(stream, f"An error occurred: {str(e)}")
try:
process = psutil.Process(pid)
# kill all children of test process (if any)
for proc in process.children(recursive=True):
if crash_process(proc, stream):
_output(f"\nTest Process Timed out, crashing child test process to collect callstack, PID: {proc.pid}\n\n")
else:
_output(
f"\nAttempt to crash child test process to collect callstack failed. Killing child test process, PID: {proc.pid}\n\n"
)
_terminate(proc)
# kill the test process itself
if crash_process(process, stream):
_output(f"\nTest Process Timed out, crashing test process to collect callstack, PID: {process.pid}\n\n")
else:
_output(
f"\nAttempt to crash test process to collect callstack failed. Killing test process, PID: {process.pid}\n\n"
)
_terminate(process)
except psutil.NoSuchProcess as e:
_error(stream, f"Process no longer exists: {e}")
global _asyncio_process_was_terminated
_asyncio_process_was_terminated = True
PRAGMA_REGEX = re.compile(r"^##omni\.kit\.test\[(.*)\]")
def _extract_metadata_pragma(line, metadata):
"""
Test subprocs can print specially formatted pragmas, that get picked up here as extra fields
that get printed into the status report. Pragmas must be at the start of the line, and should
be the only thing on that line.
Format:
##omni.kit.test[op, key, value]
op = operation type, either "set", "append" or "del" (str)
key = name of the key (str)
value = string value (str)
Examples:
# set a value
##omni.kit.test[set, foo, this is a message and spaces are allowed]
# append a value to a list
##omni.kit.test[append, bah, test-13]
"""
match = PRAGMA_REGEX.match(line)
if not match:
return False
body = match.groups()[0]
args = body.split(",")
args = [x.strip() for x in args]
if not args:
return False
op = args[0]
args = args[1:]
if op in ("set", "append"):
if len(args) != 2:
return False
key, value = args
if op == "set":
metadata[key] = value
elif op == "append":
metadata.setdefault(key, []).append(value)
elif op == "del":
if len(args) != 1:
return False
key = args[0]
del metadata[key]
else:
return False # unsupported pragma op
return True
async def _run_test_process(test: ExtTest) -> Tuple[int, List[str], Dict]:
"""Run test process and read stdout (use PIPE)."""
returncode = 0
fail_messages = []
fail_patterns = defaultdict(list)
test_run_metadata = {}
proc = None
try:
test.stdout.write(f">>> running process: {test.get_cmd()}\n")
_debug(test.stdout, f"fail patterns: {test.patterns}")
async def run_proc():
nonlocal proc
proc = await asyncio.create_subprocess_exec(
*test.args,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=0,
)
async for line in proc.stdout:
suppress_line = False
line = line.decode(errors="replace").replace("\r\n", "\n").replace("\r", "\n")
# Check for failure on certain stdout message (like log errors)
nonlocal fail_messages
pattern_str, messages, exclude_matched = test.patterns.match_line(line)
if pattern_str and messages:
fail_patterns[pattern_str].append(messages)
# Check for special pragmas printed by the child proc that tell us to add custom
# fields to the formatted status report
try:
if _extract_metadata_pragma(line, test_run_metadata):
suppress_line = True
except: # noqa
pass
# grab the number of tests
m = re.match(r"(?:Running|Printing All) Tests \(count: (\d+)\)", line, re.M)
if m:
try:
test.result.test_count = int(m.group(1))
except: # noqa
pass
# replace with some generic message to avoid confusion when people search for [error] etc.
if exclude_matched and test.context.trim_excluded_messages:
line = "[...line contained error that was excluded by omni.kit.test...]\n"
if not suppress_line:
test.stdout.write("|| " + line)
await proc.wait()
nonlocal returncode
returncode = proc.returncode
proc = None
await asyncio.wait_for(run_proc(), timeout=test.timeout)
except subprocess.CalledProcessError as e:
returncode = e.returncode
fail_messages.append(f"subprocess.CalledProcessError was raised: {e.output}")
except asyncio.TimeoutError:
returncode = 15
fail_messages.append(
f"Process timed out (timeout: {test.timeout} seconds), terminating. Check artifacts for .dmp files."
)
if proc:
_kill_process_recursive(proc.pid, test.stdout)
except NotImplementedError as e:
fail_messages.append(
f"The asyncio loop does not implement subprocess. This is known to happen when using SelectorEventLoop on Windows, exception {e}"
)
# loop all pattern matches and put them on top of the fail messages
pattern_messages = []
for pattern, messages in fail_patterns.items():
pattern_messages.append(f"Matched {len(messages)} fail pattern '{pattern}' in stdout: ")
for msg in messages:
pattern_messages.append(f" '{msg}'")
fail_messages = pattern_messages + fail_messages
# return code failure check.
if returncode == 13:
# 13 - is code we return when python test fails
failing_tests_cnt = max(len(test_run_metadata.get(KEY_FAILING_TESTS, [])), 1)
fail_messages.append(f"{failing_tests_cnt} test(s) failed.")
elif returncode == 15:
# 15 - is code we return when a test process timeout, fail_message already added
pass
elif returncode != 0:
# other return codes usually mean crash
fail_messages.append("Process might have crashed or timed out.")
# Check if any unittests were started but never completed (crashed/timed out/etc.)
# When a test crash the 'stop' message is missing making test results harder to read, add them manually.
for key, value in test_run_metadata.items():
if type(value) == str and value.startswith(STARTED_UNITTEST):
test_id = key
tc_test_id = value.replace(STARTED_UNITTEST, "", 1)
test.reporter.unittest_fail(
test_id,
tc_test_id,
"Error",
f"Test started but never finished, test: {tc_test_id}. Test likely crashed or timed out.",
)
test.reporter.unittest_stop(test_id, tc_test_id)
return (returncode, fail_messages, test_run_metadata)
def _propagate_args(argv, arg_name, has_value=False, starts_with=False):
args = []
for i, arg in enumerate(argv):
if arg == arg_name or (starts_with and arg.startswith(arg_name)):
args += [arg]
if has_value:
args += [argv[i + 1]]
return args
def _parse_arg_shortcut(argv, arg_name):
for i, arg in enumerate(argv):
if arg == arg_name:
return argv[i + 1]
return None
def _get_test_configs_for_ext(ext_info, name_filter=None) -> List[Dict]:
test_config = ext_info.get("test", None)
configs = []
if not test_config:
# no [[test]] entry
configs.append({})
elif isinstance(test_config, dict):
# [test] entry
configs.append(test_config)
elif isinstance(test_config, list) or isinstance(test_config, tuple):
# [[test]] entry
if len(test_config) == 0:
configs.append({})
else:
configs.extend(test_config)
# Filter those matching the name filter
configs = [t for t in configs if not name_filter or match(t.get("name", DEFAULT_TEST_NAME), [name_filter])]
# Filter out disabled
configs = [t for t in configs if t.get("enabled", True)]
return configs
def is_matching_list(ext_id, ext_name, ext_list):
return any(fnmatch.fnmatch(ext_id, p) or fnmatch.fnmatch(ext_name, p) for p in ext_list)
def _build_exts_set(exts: List[str], exclude: List[str], use_registry: bool, match_version_as_string: bool) -> Set[str]:
manager = omni.kit.app.get_app().get_extension_manager()
all_exts = manager.get_extensions()
if use_registry:
manager.sync_registry()
all_exts += manager.get_registry_extensions()
def is_match_ext(ext_id, ext_name, ext_def):
return (fnmatch.fnmatch(ext_id, ext_def) or fnmatch.fnmatch(ext_name, ext_def)) and not is_matching_list(
ext_id, ext_name, exclude
)
exts_to_test = set()
for ext_def in exts:
# Empty string is same as "all"
if ext_def == "":
ext_def = "*"
# If wildcard is used, match all
if "*" in ext_def:
exts_to_test.update([e["id"] for e in all_exts if is_match_ext(e["id"], e["name"], ext_def)])
else:
# Otherwise use extension manager to get matching version and pick highest one (they are sorted)
ext_ids = [v["id"] for v in manager.fetch_extension_versions(ext_def)]
if match_version_as_string:
ext_ids = [v for v in ext_ids if v.startswith(ext_def)]
# Take highest version, but if we are not using registry skip remote local one:
for ext_id in ext_ids:
if use_registry or manager.get_extension_dict(ext_id) is not None:
exts_to_test.add(ext_id)
break
return sorted(exts_to_test)
def _format_cmdline(cmdline: str) -> str:
"""Format commandline printed from CI so that we can run it locally"""
cmdline = cmdline.replace("\\", "/").replace("//", "/")
if is_running_on_ci():
exe_path = cmdline.split(" ")[0]
index = exe_path.find("/_build/")
if index != -1:
path_to_remove = exe_path[:index]
cmdline = (
cmdline.replace(path_to_remove, ".")
.replace(path_to_remove.lower(), ".")
.replace(path_to_remove.replace("/", "\\"), ".")
)
return cmdline
def _get_test_cmdline(ext_name: str, failed_tests: list = []) -> list:
"""Return an example cmdline to run extension tests or a single unittest"""
cmdline = []
try:
shell_ext = carb.tokens.get_tokens_interface().resolve("${shell_ext}")
kit_exe = carb.tokens.get_tokens_interface().resolve("${kit}")
path_to_kit = _format_cmdline(os.path.relpath(kit_exe, os.getcwd()))
if not path_to_kit.startswith("./"):
path_to_kit = f"./{path_to_kit}"
test_file = f"{path_to_kit}/tests-{ext_name}{shell_ext}"
if failed_tests:
test_name = failed_tests[0].rsplit(".")[-1]
cmdline.append(f" Cmdline to run a single unittest: {test_file} -f *{test_name}")
cmdline.append(f" Cmdline to run the extension tests: {test_file}")
except: # noqa
pass
return cmdline
async def gather_with_concurrency(n, *tasks):
semaphore = asyncio.Semaphore(n)
async def sem_task(task):
async with semaphore:
return await task
return await asyncio.gather(*(sem_task(task) for task in tasks))
async def run_serial_and_parallel_tasks(parallel_tasks, serial_tasks, max_parallel_tasks: int):
for r in serial_tasks:
yield await r
for r in await gather_with_concurrency(max_parallel_tasks, *parallel_tasks):
yield r
async def _run_ext_test(run_context: TestRunContext, test: ExtTest, on_status_report_fn):
def _print_info(mode: str, result_str: str = None):
if result_str is None:
result_str = "succeeded" if test.result.passed else "failed"
print(f"{test.test_id} test {result_str} ({mode} {test_run + 1} out of {run_context.max_test_run})")
teamcity_test_retry_support(run_context.retry_strategy == RetryStrategy.RETRY_ON_FAILURE)
# Allow retrying tests multiple times:
for test_run in range(run_context.max_test_run):
is_last_try = (test_run == run_context.max_test_run - 1) or (
run_context.retry_strategy == RetryStrategy.NO_RETRY
)
retry_failed_tests = run_context.retry_strategy == RetryStrategy.RETRY_ON_FAILURE
test._pre_test_run(test_run, run_context.retry_strategy)
test = await _run_ext_test_once(test, on_status_report_fn, is_last_try, retry_failed_tests)
# depending on the retry strategy we might continue or exit the loop
if run_context.retry_strategy == RetryStrategy.NO_RETRY:
# max_test_run is ignored in no-retry strategy
break
elif run_context.retry_strategy == RetryStrategy.RETRY_ON_FAILURE:
# retry on failure - stop at first success otherwise continue
result_str = "succeeded"
if not test.result.passed:
result_str = "failed" if is_last_try else "failed, retrying..."
_print_info("attempt", result_str)
if test.result.passed:
break
else:
test.retries += 1
elif run_context.retry_strategy == RetryStrategy.ITERATIONS:
# iterations - continue until the end
_print_info("iteration")
elif run_context.retry_strategy == RetryStrategy.RERUN_UNTIL_FAILURE:
# rerun until failure - stop at first failure otherwise continue
_print_info("rerun")
if not test.result.passed:
break
else:
_error(sys.stderr, f"Invalid retry strategy '{run_context.retry_strategy}'")
return test
async def _run_ext_test_once(test: ExtTest, on_status_report_fn, is_last_try: bool, retry_failed_tests: bool):
ext = test.ext_id
if on_status_report_fn:
on_status_report_fn(test.test_id, TestRunStatus.RUNNING)
# Starting test
test.on_start()
err_messages = []
metadata = {}
cmd = ""
returncode = 0
if test.valid:
cmd = test.get_cmd()
# Run process
start_time = time.time()
returncode, err_messages, metadata = await _run_test_process(test)
test.result.duration = round(time.time() - start_time, 2)
else:
err_messages.append(f"Failed to run process for extension testing (ext: {ext}).")
if test.unreliable:
test.result.unreliable = 1
# Grab failed tests
test.failed_tests = list(metadata.pop(KEY_FAILING_TESTS, []))
for key, value in list(metadata.items()):
if type(value) == str and value.startswith(STARTED_UNITTEST):
test_id = key
test.failed_tests.append(test_id + " (started but never finished)")
del metadata[key]
if retry_failed_tests:
# remove failed tests from previous run if any
test.args = [item for item in test.args if not item.startswith("--/exts/omni.kit.test/retryFailedTests")]
# Only retry failed tests if all conditions are met:
# - retry-on-failure strategy selected
# - metadata with failing tests is present
# - extension tests reported failures but no crash (return code 13)
# - at least on retry left to do (ie: not last retry)
if test.failed_tests and returncode == 13 and not is_last_try:
# add new failed tests as args for the next run
for i, test_id in enumerate(test.failed_tests):
test.args.append(f"--/exts/omni.kit.test/retryFailedTests/{i}='{test_id}'")
# Report failure and mark overall run as failure
test.result.passed = True
if len(err_messages) > 0:
spaces_8 = " " * 8
spaces_12 = " " * 12
messages_str = f"\n{spaces_8}".join([""] + err_messages)
fail_message_lines = [
"",
"[fail] Extension Test failed. Details:",
f" Cmdline: {_format_cmdline(cmd)}",
]
fail_message_lines += _get_test_cmdline(test.ext_name, test.failed_tests)
fail_message_lines += [
f" Return code: {returncode} ({returncode & (2**31-1):#010x})",
f" Failure reason(s): {messages_str}",
]
details_message_lines = [" Details:"]
if metadata:
details_message_lines.append(f"{spaces_8}Metadata:")
for key, value in sorted(metadata.items()):
details_message_lines.append(f"{spaces_12}{key}: {value}")
if test.failed_tests:
messages_str = f"\n{spaces_12}".join([""] + test.failed_tests)
details_message_lines.append(f"{spaces_8}{KEY_FAILING_TESTS}: {messages_str}")
if not omni.kit.app.get_app().is_app_external():
url = f"http://omnitests.nvidia.com/?query={test.test_id}"
details_message_lines.append(f"{spaces_8}Test history:")
details_message_lines.append(f"{spaces_12}{url}")
fail_message = "\n".join(fail_message_lines + details_message_lines)
test.result.passed = False
if test.unreliable:
test.result.unreliable_fail = 1
test.stdout.write("[fail] Extension test failed, but marked as unreliable.\n")
else:
test.result.fail = 1
test.stdout.write("[fail] Extension test failed.\n")
if is_last_try:
test.on_fail(fail_message)
if on_status_report_fn:
on_status_report_fn(test.test_id, TestRunStatus.FAILED, fail_message=fail_message, ext_test=test)
else:
test.stdout.write("[ ok ] Extension test passed.\n")
test.on_finish(test.result.passed)
if test.result.passed and on_status_report_fn:
on_status_report_fn(test.test_id, TestRunStatus.PASSED, ext_test=test)
# dump stdout, acts as stdout sync point for parallel run
if test.stdout != sys.stdout:
if test.context.trim_stdout_on_success and test.result.passed:
for line in test.stdout.getvalue().splitlines():
# We still want to print all service messages to correctly output number of tests on TC and all that.
if "##teamcity[" in line:
sys.stdout.write(line)
sys.stdout.write("\n")
sys.stdout.write(
f"[omni.kit.test] Stdout was trimmed. Look for the Kit log file '{test.log_file}' in TC artifacts for the full output.\n"
)
else:
sys.stdout.write(test.stdout.getvalue())
sys.stdout.flush()
# reset test.stdout (io.StringIO)
test.stdout.truncate(0)
test.stdout.seek(0)
return test
def _build_test_id(test_type: str, ext: str, app: str = "", test_name: str = "") -> str:
s = ""
if test_type:
s += f"{test_type}:"
s += ext_id_to_fullname(ext)
if test_name and test_name != DEFAULT_TEST_NAME:
s += f"-{test_name}"
if app:
s += f"_app:{app}"
return s
async def _run_ext_tests(exts, on_status_report_fn, exclude_exts, only_list=False) -> bool:
run_context = TestRunContext()
use_registry = get_setting("/exts/omni.kit.test/testExtUseRegistry", default=False)
match_version_as_string = get_setting("/exts/omni.kit.test/testExtMatchVersionAsString", default=False)
test_type = get_setting("/exts/omni.kit.test/testExtTestType", default="exttest")
# Test Name filtering (support shorter version)
test_name_filter = _parse_arg_shortcut(get_argv(), "-n")
if not test_name_filter:
test_name_filter = get_setting("/exts/omni.kit.test/testExtTestNameFilter", default="")
max_parallel_procs = int(get_setting("/exts/omni.kit.test/testExtMaxParallelProcesses", default=-1))
if max_parallel_procs <= 0:
max_parallel_procs = multiprocessing.cpu_count()
exts_to_test = _build_exts_set(exts, exclude_exts, use_registry, match_version_as_string)
# Prepare an app:
test_app = TestApp(sys.stdout)
def fail_all(fail_message):
reporter = TestReporter(sys.stdout)
for ext in exts:
message = fail_message.format(ext)
test_id = _build_test_id(test_type, ext, test_app.name)
tc_test_id = test_id.replace(".", "+") + ".[PROCESS CHECK]"
_error(sys.stderr, message)
# add start / fail / stop messages for TC + our own reporter
reporter.exttest_start(test_id, tc_test_id, ext, ext)
reporter.exttest_fail(test_id, tc_test_id, fail_type="Error", fail_message=message)
reporter.exttest_stop(test_id, tc_test_id, passed=False)
if on_status_report_fn:
on_status_report_fn(test_id, TestRunStatus.FAILED, fail_message=message)
# If no extensions found report query entries as failures
if len(exts_to_test) == 0:
fail_all("Can't find any extension matching: '{0}'.")
# If no app found report query entries as failures
if not test_app.path:
fail_all(f"Can't find app: {test_app.name}")
exts_to_test = []
# Prepare test run tasks, put into separate serial and parallel queues
parallel_tasks = []
serial_tasks = []
is_parallel_run = max_parallel_procs > 1 and len(exts_to_test) > 1
exts_issues = []
total = 0
for ext in exts_to_test:
ext_info = _prepare_ext_for_testing(ext)
if ext_info:
test_configs = _get_test_configs_for_ext(ext_info, test_name_filter)
unique_test_names = set()
for test_config in test_configs:
valid = True
test_name = test_config.get("name", DEFAULT_TEST_NAME)
if test_name in unique_test_names:
_error(
sys.stderr,
f"Extension {ext} has multiple [[test]] entry with the same 'name' attribute. It should be unique, default is '{DEFAULT_TEST_NAME}'",
)
valid = False
else:
unique_test_names.add(test_name)
total += 1
# Build test id.
test_id = _build_test_id(test_type, ext, test_app.name, test_name)
if only_list:
print(f"test_id: '{test_id}'")
continue
test = ExtTest(
ext,
ext_info,
test_config,
test_id,
is_parallel_run,
run_context=run_context,
test_app=test_app,
valid=valid,
)
# fmt: off
# both means we run all tests (reliable and unreliable)
# otherwise we either run reliable tests only or unreliable tests only, so we skip accordingly
if run_context.run_unreliable_tests != RunExtTests.BOTH and int(run_context.run_unreliable_tests) != int(test.unreliable):
test_unreliable = "unreliable" if test.unreliable else "reliable"
run_unreliable = "unreliable" if run_context.run_unreliable_tests == RunExtTests.UNRELIABLE_ONLY else "reliable"
print(f"[INFO] {test_id} skipped because it's marked as {test_unreliable} and we currently run all {run_unreliable} tests")
total -= 1
continue
# fmt: on
# Test skipped itself? (it should have explained it already by now)
if test.skip:
total -= 1
continue
# A single test may be invoked in more than one way, gather them all
from .ext_test_generator import get_tests_to_run
for test_instance in get_tests_to_run(test, ExtTest, run_context, is_parallel_run, valid):
task = _run_ext_test(run_context, test_instance, on_status_report_fn)
if test_instance.parallelizable:
parallel_tasks.append(task)
else:
serial_tasks.append(task)
else:
exts_issues.append(ext)
intro = f"Running {total} Extension Test Process(es)."
if run_context.run_unreliable_tests == RunExtTests.UNRELIABLE_ONLY:
intro = "[Unreliable Tests Run] " + intro
print(intro)
# Actual test run:
finished_tests: List[ExtTest] = []
fail_count = 0
unreliable_fail_count = 0
unreliable_total = 0
async for test in run_serial_and_parallel_tasks(parallel_tasks, serial_tasks, max_parallel_procs):
unreliable_total += test.result.unreliable
unreliable_fail_count += test.result.unreliable_fail
fail_count += test.result.fail
finished_tests.append(test)
if only_list:
print(f"Found {total} tests processes to run.")
return True
return_result = True
def generate_summary():
for test in finished_tests:
if test.result.passed:
if test.retries > 0:
res_str = "[retry ok]"
else:
res_str = "[ ok ]"
else:
res_str = "[ fail ]"
if test.result.unreliable:
res_str += " [unreliable]"
res_str += f" [{test.result.duration:5.1f}s]"
res_str += f" {test.test_id}"
res_str += f" (Count: {test.result.test_count})"
yield f"{res_str}"
for ext in exts_issues:
res_str = f"[ fail ] {ext} (extension registry issue)"
yield f"{res_str}"
def get_failed_tests():
all_failed_tests = [t for test in finished_tests for t in test.failed_tests]
if all_failed_tests:
yield f"\nFailing tests (Count: {len(all_failed_tests)}) :"
for test_name in all_failed_tests:
yield f" - {test_name}"
# Print summary
test_results_file = os.path.join(run_context.output_path, "ext_test_results.txt")
with open(test_results_file, "a") as f:
def report(line):
print(line)
f.write(line + "\n")
report("\n")
report("=" * 60)
report(f"Extension Tests Run Summary (Date: {run_context.start_ts})")
report("=" * 60)
report(" app: {}".format(test_app.name if not test_app.is_empty else "[empty]"))
report(f" retry strategy: {run_context.retry_strategy}," f" max test run: {run_context.max_test_run}")
report("=" * 60)
for line in generate_summary():
report(line)
for line in get_failed_tests():
report(line)
report("=" * 60)
report("=" * 60)
if unreliable_total > 0:
report(
f"UNRELIABLE TESTS REPORT: {unreliable_fail_count} unreliable tests processes failed out of {unreliable_total}."
)
# Exit with non-zero code on failure
if fail_count > 0 or len(exts_issues) > 0:
if fail_count > 0:
report(f"[ERROR] {fail_count} tests processes failed out of {total}.")
if len(exts_issues) > 0:
report(f"[ERROR] {len(exts_issues)} extension registry issue.")
return_result = False
else:
report(f"[OK] All {total} tests processes returned 0.")
# Report all results
for test in finished_tests:
test.reporter.report_result(test)
return return_result
def run_ext_tests(test_exts, on_finish_fn=None, on_status_report_fn=None, exclude_exts=[]):
def on_status_report(*args, **kwargs):
if on_status_report_fn:
on_status_report_fn(*args, **kwargs)
_test_status_report(*args, **kwargs)
async def run():
result = await _run_ext_tests(test_exts, on_status_report, exclude_exts)
if on_finish_fn:
on_finish_fn(result)
return asyncio.ensure_future(run())
def shutdown_ext_tests():
# When running extension tests and killing the process after timeout, asyncio hangs somewhere in python shutdown.
# Explicitly closing event loop here helps with that.
if _asyncio_process_was_terminated:
def exception_handler(_, exc):
print(f"Asyncio exception on shutdown: {exc}")
asyncio.get_event_loop().set_exception_handler(exception_handler)
asyncio.get_event_loop().close()
| 60,071 | Python | 40.572318 | 161 | 0.594397 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/code_change_analyzer.py | import json
import os
import omni.kit.app
import logging
from typing import List
from .repo_test_context import RepoTestContext
from .utils import sha1_path, sha1_list, get_global_test_output_path
logger = logging.getLogger(__name__)
KNOWN_EXT_SOURCE_PATH = ["kit/source/extensions/", "source/extensions/"]
# We know for sure that this hash changes all the time and used in many many tests, don't want it to mess with our logic for now
STARTUP_SEQUENCE_EXCLUDE = ["omni.rtx.shadercache.d3d12", "omni.rtx.shadercache.vulkan"]
def _get_extension_hash(path):
path = os.path.normpath(path)
hash_cache_file = f"{get_global_test_output_path()}/exts_hash.json"
ext_hashes = {}
# cache hash calculation in a file to speed up things (it's slow)
try:
with open(hash_cache_file, "r") as f:
ext_hashes = json.load(f)
except FileNotFoundError:
pass
except Exception as e:
logger.warn(f"Failed to load extension hashes from {hash_cache_file}, error: {e}")
ext_hash = ext_hashes.get(path, None)
if ext_hash:
return ext_hash
ext_hash = sha1_path(path)
# read from file again in case it changed while calculating hash (parallel run) to update
try:
with open(hash_cache_file, "r") as f:
ext_hashes = json.load(f)
except FileNotFoundError:
pass
except Exception as e:
logger.warn(f"Failed to load extension hashes from {hash_cache_file}, error: {e}")
ext_hashes[path] = ext_hash
with open(hash_cache_file, "w") as f:
json.dump(ext_hashes, f)
return ext_hash
def _get_extension_name_for_file(file):
for path in KNOWN_EXT_SOURCE_PATH:
if file.startswith(path):
ext = file[len(path) :].split("/")[0]
return ext
return None
def _print(str, *argv):
print(f"[omni.kit.test.code_change_analyzer] {str}", *argv)
class ChangeAnalyzerResult:
def __init__(self):
self.should_skip_test = False
self.startup_sequence = []
self.startup_sequence_hash = ""
self.tested_ext_hash = ""
self.kernel_version = ""
class CodeChangeAnalyzer:
"""repo_test can provide (if in MR and on TC) with a list of changed files using env var.
Check if changed ONLY extensions. If any change is not in `source/extensions` -> run all tests
If changed ONLY extensions than for each test solve list of ALL enabled extensions and check against that list.
"""
def __init__(self, repo_test_context: RepoTestContext):
self._allow_sampling = True
self._allow_skipping = False
self._changed_extensions = self._gather_changed_extensions(repo_test_context)
def _gather_changed_extensions(self, repo_test_context: RepoTestContext):
data = repo_test_context.get()
if data:
changed_files = data.get("changed_files", [])
if changed_files:
self._allow_skipping = True
changed_extensions = set()
for file in changed_files:
ext = _get_extension_name_for_file(file)
if ext:
logger.info(f"Changed path: {file} is an extension: {ext}")
changed_extensions.add(ext)
elif self._allow_skipping:
_print("All tests will run. At least one changed file is not in an extension:", file)
self._allow_skipping = False
self._allow_sampling = False
if self._allow_skipping:
ext_list_str = "\n".join(("\t - " + e for e in changed_extensions))
_print(f"Only tests that use those extensions will run. Changed extensions:\n{ext_list_str}")
return changed_extensions
logger.info("No changed files provided")
return set()
def get_changed_extensions(self) -> List[str]:
return list(self._changed_extensions)
def allow_sampling(self) -> bool:
return self._allow_sampling
def _build_startup_sequence(self, result: ChangeAnalyzerResult, ext_name: str, exts: List):
result.kernel_version = omni.kit.app.get_app().get_kernel_version()
result.startup_sequence = [("kernel", result.kernel_version)]
for ext in exts:
if ext["name"] in STARTUP_SEQUENCE_EXCLUDE:
continue
path = ext.get("path", None)
if path:
hash = _get_extension_hash(path)
result.startup_sequence.append((ext["name"], hash))
if ext["name"] == ext_name:
result.tested_ext_hash = hash
# Hash whole startup sequence
result.startup_sequence_hash = sha1_list([hash for ext, hash in result.startup_sequence])
def analyze(self, test_id: str, ext_name: str, exts_to_enable: List[str]) -> ChangeAnalyzerResult:
result = ChangeAnalyzerResult()
result.should_skip_test = False
# Ask manager for extension startup sequence
manager = omni.kit.app.get_app().get_extension_manager()
solve_result, exts, err = manager.solve_extensions(
exts_to_enable, add_enabled=False, return_only_disabled=False
)
if not solve_result:
logger.warn(f"Failed to solve dependencies for extension(s): {exts_to_enable}, error: {err}")
return result
# Build hashes for a startup sequence
self._build_startup_sequence(result, ext_name, exts)
if not self._allow_skipping:
return result
if not self._changed_extensions:
return result
for ext in exts:
if ext["name"] in self._changed_extensions:
_print(f"{test_id} test will run because it uses the changed extension:", ext["name"])
self._allow_sampling = False
return result
_print(
f"{test_id} skipped by code change analyzer. Extensions enabled in this tests were not changed in this MR."
)
result.should_skip_test = True
return result
| 6,153 | Python | 34.572254 | 128 | 0.609946 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/gitlab.py | import os
from functools import lru_cache
# GitLab CI/CD variables :
# https://docs.gitlab.com/ee/ci/variables/predefined_variables.html
@lru_cache()
def is_running_in_gitlab():
return bool(os.getenv("GITLAB_CI"))
@lru_cache()
def get_gitlab_build_url() -> str:
return os.getenv("CI_PIPELINE_URL") or ""
| 317 | Python | 18.874999 | 67 | 0.700315 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/tests/test_reporter.py | from pathlib import Path
import omni.kit.test
from ..reporter import _calculate_durations, _load_report_data, _load_coverage_results, _generate_html_report
CURRENT_PATH = Path(__file__).parent
DATA_TESTS_PATH = CURRENT_PATH.parent.parent.parent.parent.joinpath("data/tests")
class TestReporter(omni.kit.test.AsyncTestCase):
async def test_success_report_data(self):
"""
omni_kit_test_success_report.jsonl contains the report.jsonl of a successful run testing omni.kit.test
"""
path = DATA_TESTS_PATH.joinpath("omni_kit_test_success_report.jsonl")
report_data = _load_report_data(path)
self.assertEqual(len(report_data), 11)
result = report_data[10]
test_result = result.get("result", None)
self.assertNotEqual(test_result, None)
# make sure durations are good
_calculate_durations(report_data)
startup_duration = test_result["startup_duration"]
tests_duration = test_result["tests_duration"]
self.assertAlmostEqual(startup_duration, 1.040, places=3)
self.assertAlmostEqual(tests_duration, 0.007, places=3)
# make sure our ratio are good
duration = test_result["duration"]
startup_ratio = test_result["startup_ratio"]
tests_ratio = test_result["tests_ratio"]
self.assertAlmostEqual(startup_ratio, 100 * (startup_duration / duration), places=3)
self.assertAlmostEqual(tests_ratio, 100 * (tests_duration / duration), places=3)
async def test_fail_report_data(self):
"""
omni_kit_test_fail_report.jsonl contains the report.jsonl of a failed run of testing omni.kit.test
with a few failed tests and also a test that crash
"""
path = DATA_TESTS_PATH.joinpath("omni_kit_test_fail_report.jsonl")
report_data = _load_report_data(path)
self.assertEqual(len(report_data), 18)
result = report_data[17]
test_result = result.get("result", None)
self.assertNotEqual(test_result, None)
# make sure durations are good
_calculate_durations(report_data)
startup_duration = test_result["startup_duration"]
tests_duration = test_result["tests_duration"]
self.assertAlmostEqual(startup_duration, 0.950, places=3)
self.assertAlmostEqual(tests_duration, 0.006, places=3)
# make sure our ratio are good
duration = test_result["duration"]
startup_ratio = test_result["startup_ratio"]
tests_ratio = test_result["tests_ratio"]
self.assertAlmostEqual(startup_ratio, 100 * (startup_duration / duration), places=3)
self.assertAlmostEqual(tests_ratio, 100 * (tests_duration / duration), places=3)
async def test_html_report(self):
path = DATA_TESTS_PATH.joinpath("omni_kit_test_success_report.jsonl")
report_data = _load_report_data(path)
_calculate_durations(report_data)
merged_results, _ = _load_coverage_results(report_data, read_coverage=False)
html = _generate_html_report(report_data, merged_results)
# total duration is 1.32 seconds, in the hmtl report we keep 1 decimal so it will be shown as 1.3
self.assertTrue(html.find("<td>1.3</td>") != -1)
# startup duration will be 78.8 %
self.assertTrue(html.find("<td>78.8</td>") != -1)
| 3,342 | Python | 47.449275 | 110 | 0.664572 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/tests/test_kit_test.py | import unittest
import carb
import omni.kit.app
import omni.kit.test
# uncomment for dev work
# import unittest
class TestKitTest(omni.kit.test.AsyncTestCase):
async def test_test_settings(self):
# See [[test]] section
carb.log_error("This message will not fail the test because it is excluded in [[test]]")
self.assertEqual(carb.settings.get_settings().get("/extra_arg_passed/param"), 123)
async def test_test_other_settings(self):
self.assertEqual(carb.settings.get_settings().get("/extra_arg_passed/param"), 456)
async def test_that_is_excluded(self):
self.fail("Should not be called")
async def test_get_test(self):
if any("test_that_is_unreliable" in t.id() for t in omni.kit.test.get_tests()):
self.skipTest("Skipping if test_that_is_unreliable ran")
self.assertSetEqual(
{t.id() for t in omni.kit.test.get_tests()},
set(
[
"omni.kit.test.tests.test_kit_test.TestKitTest.test_are_async",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_can_be_skipped_1",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_can_be_skipped_2",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_can_be_sync",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_get_test",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_test_settings",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_with_metadata",
"omni.kit.test.tests.test_kit_test.TestKitTest.test_with_subtest",
"omni.kit.test.tests.test_lookups.TestLookups.test_lookups",
"omni.kit.test.tests.test_nvdf.TestNVDF.test_convert_advanced_types",
"omni.kit.test.tests.test_nvdf.TestNVDF.test_convert_basic_types",
"omni.kit.test.tests.test_nvdf.TestNVDF.test_convert_reserved_types",
"omni.kit.test.tests.test_reporter.TestReporter.test_fail_report_data",
"omni.kit.test.tests.test_reporter.TestReporter.test_html_report",
"omni.kit.test.tests.test_reporter.TestReporter.test_success_report_data",
"omni.kit.test.tests.test_sampling.TestSampling.test_sampling_factor_one",
"omni.kit.test.tests.test_sampling.TestSampling.test_sampling_factor_point_five",
"omni.kit.test.tests.test_sampling.TestSampling.test_sampling_factor_zero",
"omni.kit.test.tests.test_sampling.TestSampling.test_with_fake_nvdf_query",
]
),
)
self.assertListEqual(
[t.id() for t in omni.kit.test.get_tests(tests_filter="test_settings")],
[
"omni.kit.test.tests.test_kit_test.TestKitTest.test_test_settings",
],
)
async def test_are_async(self):
app = omni.kit.app.get_app()
update = app.get_update_number()
await app.next_update_async()
self.assertEqual(app.get_update_number(), update + 1)
def test_can_be_sync(self):
self.assertTrue(True)
@unittest.skip("Skip test with @unittest.skip")
async def test_can_be_skipped_1(self):
self.assertTrue(False)
async def test_can_be_skipped_2(self):
self.skipTest("Skip test with self.skipTest")
self.assertTrue(False)
# subTest will get fixes in python 3.11, see https://bugs.python.org/issue25894
async def test_with_subtest(self):
with self.subTest(msg="subtest example"):
self.assertTrue(True)
async def test_with_metadata(self):
"""This is an example to use metadata"""
print("##omni.kit.test[set, my_key, This line will be printed if the test fails]")
self.assertTrue(True)
async def test_that_is_unreliable(self):
"""This test will not run unless we run unreliable tests"""
self.assertTrue(True) # we don't make it fail when running unreliable tests
# Development tests - uncomment when doing dev work to test all ways a test can succeed / fail
# async def test_success(self):
# self.assertTrue(True)
# async def test_fail_1(self):
# self.assertTrue(False)
# async def test_fail_2(self):
# raise Exception("fuff")
# self.assertTrue(False)
# will crash with stack overflow
# async def test_fail_3(self):
# __import__("sys").setrecursionlimit(100000000)
# def crash():
# crash()
# crash()
| 4,638 | Python | 41.559633 | 101 | 0.617076 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/tests/test_nvdf.py | import omni.kit.test
from ..nvdf import remove_nvdf_form, to_nvdf_form
class TestNVDF(omni.kit.test.AsyncTestCase):
async def test_convert_basic_types(self):
d = {
"some_boolean": True,
"some_int": -123,
"some_float": 0.001,
"array_of_string": ["a", "b"],
}
nv = to_nvdf_form(d)
self.assertDictEqual(
nv, {"b_some_boolean": True, "l_some_int": -123, "d_some_float": 0.001, "s_array_of_string": ["a", "b"]}
)
r = remove_nvdf_form(nv)
self.assertDictEqual(d, r)
async def test_convert_advanced_types(self):
class myClass:
def __init__(self, int_value: int, float_value: float) -> None:
self.cl_int: int = int_value
self.cl_float: float = float_value
m = myClass(12, 0.1)
d = {
"some_list": [3, 4],
"some_tuple": (1, 2),
"some_class": m,
}
nv = to_nvdf_form(d)
self.assertDictEqual(
nv, {"l_some_list": [3, 4], "l_some_tuple": (1, 2), "some_class": {"l_cl_int": 12, "d_cl_float": 0.1}}
)
d["some_class"] = m.__dict__
r = remove_nvdf_form(nv)
self.assertDictEqual(d, r)
async def test_convert_reserved_types(self):
d = {
"ts_anything": 2992929,
"ts_created": 56555,
"_id": 69988,
}
nv = to_nvdf_form(d)
self.assertDictEqual(
nv, {"ts_anything": 2992929, "ts_created": 56555, "_id": 69988}
)
r = remove_nvdf_form(nv)
self.assertDictEqual(d, r)
| 1,649 | Python | 30.132075 | 116 | 0.4906 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/tests/__init__.py | from .test_kit_test import *
from .test_lookups import *
from .test_nvdf import *
from .test_reporter import *
from .test_sampling import *
| 140 | Python | 22.499996 | 28 | 0.742857 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/tests/test_lookups.py | """Test the functionality used by the test runner."""
import omni.kit.app
import omni.kit.test
class TestLookups(omni.kit.test.AsyncTestCase):
async def test_lookups(self):
"""Oddly self-referencing test that uses the test runner test lookup utility to confirm that the utility
finds this test.
"""
manager = omni.kit.app.get_app().get_extension_manager()
my_extension_id = manager.get_enabled_extension_id("omni.kit.test")
module_map = omni.kit.test.get_module_to_extension_map()
self.assertTrue("omni.kit.test" in module_map)
extension_info = module_map["omni.kit.test"]
self.assertEqual((my_extension_id, True), extension_info)
this_test_info = omni.kit.test.extension_from_test_name("omni.kit.test.TestLookups.test_lookups", module_map)
self.assertIsNotNone(this_test_info)
this_test_info_no_module = tuple(e for i, e in enumerate(this_test_info) if i != 2)
self.assertEqual((my_extension_id, True, False), this_test_info_no_module)
| 1,049 | Python | 44.652172 | 117 | 0.682555 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/test/tests/test_sampling.py | import urllib.error
from contextlib import suppress
import omni.kit.test
from ..nvdf import get_app_info
from ..sampling import Sampling
class TestSampling(omni.kit.test.AsyncTestCase):
def setUp(self):
self.sampling = Sampling(get_app_info())
self.unittests = ["test_one", "test_two", "test_three", "test_four"]
async def test_sampling_factor_zero(self):
self.sampling.run_query("omni.foo", self.unittests, running_on_ci=False)
samples = self.sampling.get_tests_to_skip(0.0)
# will return the same list but with a different order
self.assertEqual(len(samples), len(self.unittests))
async def test_sampling_factor_one(self):
self.sampling.run_query("omni.foo", self.unittests, running_on_ci=False)
samples = self.sampling.get_tests_to_skip(1.0)
self.assertListEqual(samples, [])
async def test_sampling_factor_point_five(self):
self.sampling.run_query("omni.foo", self.unittests, running_on_ci=False)
samples = self.sampling.get_tests_to_skip(0.5)
self.assertEqual(len(samples), len(self.unittests) / 2)
async def test_with_fake_nvdf_query(self):
with suppress(urllib.error.URLError):
self.sampling.run_query("omni.foo", self.unittests, running_on_ci=True)
samples = self.sampling.get_tests_to_skip(0.5)
if self.sampling.query_result is True:
self.assertEqual(len(samples), len(self.unittests) / 2)
else:
self.assertListEqual(samples, [])
| 1,551 | Python | 38.794871 | 83 | 0.662153 |
omniverse-code/kit/exts/omni.kit.test/omni/kit/omni_test_registry/omni_test_registry.py | def omni_test_registry(*args, **kwargs):
"""
The decorator for Python tests.
NOTE: currently passing in the test uuid as a kwarg 'guid'
"""
def decorator(func):
func.guid = kwargs.get("guid", None)
return func
return decorator
| 269 | Python | 21.499998 | 62 | 0.6171 |
omniverse-code/kit/exts/omni.kit.test/docs/omni_test_registry.rst | :orphan:
.. _omni.kit.omni_test_registry:
omni.kit.omni_test_registry
###########################
This extension pulls in the `repo_test GUID decorator <https://gitlab-master.nvidia.com/omniverse/repo/repo_test/-/tree/main/omni/repo/test/guid>` via the `omniverse_test packman package <http://packman.ov.nvidia.com/packages/omniverse_test>` that enables the tagging of tests with GUID metadata. This GUID is then used for tracking tests through renames and relocations.
It is imported in all Python unittest test modules that use omni.kit.test, and the decorator is applied to test methods/functions with a GUID:
.. code:: python
import omni.kit.test
def test_itelemetry_generic_events():
"""Test name + GUID pulled from omni.kit.telemetry for example.
"""
pass
**Issues?**
Please reach out to @rafal karp or @chris morrell on Slack, or visit the #ct-omni-repoman Slack channel.
| 922 | reStructuredText | 35.919999 | 372 | 0.715835 |
omniverse-code/kit/exts/omni.kit.test/docs/index.rst | omni.kit.test
###########################
Python asyncio-centric testing system.
To create a test derive from :class:`omni.kit.test.AsyncTestCase` and add a method that starts with ``test_``, like in :mod:`unittest`. Method can be either async or regular one.
.. code:: python
import omni.kit.test
class MyTest(omni.kit.test.AsyncTestCase):
async def setUp(self):
pass
async def tearDown(self):
pass
# Actual test, notice it is "async" function, so "await" can be used if needed
async def test_hello(self):
self.assertEqual(10, 10)
Test class must be defined in "tests" submodule of your public extension module. For example if your ``extension.toml`` defines:
.. code:: toml
[[python.module]]
name = "omni.foo"
``omni.foo.tests.MyTest`` should be a path to your test. Test system will automatically discover and import ``omni.foo.tests`` module. Using ``tests`` submodule of your extension module is a recommended way to organize tests. That keeps tests together with extension, but not too coupled with the actual module they test, so that they can import module with absolute path (e.g. ``import omni.foo``) and test it the way user will see them.
Refer to ``omni.example.hello`` extension as a simplest example of extension with a python test.
Settings
**********
For the settings refer to ``extension.toml`` file:
.. literalinclude:: ../config/extension.toml
:language: toml
They can be used to filter, automatically run tests and quit.
API Reference
***************
.. automodule:: omni.kit.test
:platform: Windows-x86_64, Linux-x86_64
:members:
:undoc-members:
:imported-members:
:exclude-members: contextlib, suppress
| 1,752 | reStructuredText | 30.872727 | 438 | 0.68379 |
omniverse-code/kit/exts/omni.kit.test_suite.menu/omni/kit/test_suite/menu/tests/__init__.py | from .context_menu_bind_material_listview import *
| 51 | Python | 24.999988 | 50 | 0.803922 |
omniverse-code/kit/exts/omni.kit.test_suite.menu/omni/kit/test_suite/menu/tests/context_menu_bind_material_listview.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import os
import omni.kit.app
import omni.usd
import omni.kit.commands
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from pxr import Sdf, Usd, UsdShade
from omni.kit.material.library.test_helper import MaterialLibraryTestHelper
from omni.kit.test_suite.helpers import (
open_stage,
get_test_data_path,
select_prims,
delete_prim_path_children,
arrange_windows
)
from omni.kit.material.library.test_helper import MaterialLibraryTestHelper
from omni.kit.window.content_browser.test_helper import ContentBrowserTestHelper
class ContextMenuBindMaterialListview(AsyncTestCase):
# Before running each test
async def setUp(self):
await arrange_windows("Stage", 300.0)
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
async def test_l1_context_menu_bind_material_listview(self):
await ui_test.find("Stage").focus()
await ui_test.find("Content").focus()
# grid_view_enabled = True doesn't work with item_offset
to_select = ["/World/Cube", "/World/Sphere", "/World/Cylinder"]
stage = omni.usd.get_context().get_stage()
material_test_helper = MaterialLibraryTestHelper()
content_browser_helper = ContentBrowserTestHelper()
await content_browser_helper.toggle_grid_view_async(False)
mdl_list = await omni.kit.material.library.get_mdl_list_async()
for mtl_name, mdl_path, submenu in mdl_list:
# delete any materials in looks
await delete_prim_path_children("/World/Looks")
# get content browser file
await content_browser_helper.navigate_to_async(mdl_path)
await ui_test.human_delay(10)
item = await content_browser_helper.get_treeview_item_async(os.path.basename(mdl_path))
self.assertFalse(item == None)
# get content browser treeview
content_treeview = ui_test.find("Content//Frame/**/TreeView[*].identifier=='content_browser_treeview'")
# select prims
await select_prims(to_select)
# right click content browser
await content_treeview.right_click(item.center)
# click on context menu item
await ui_test.select_context_menu("Bind material to selected prim(s)")
# use create material dialog
await material_test_helper.handle_create_material_dialog(mdl_path, mtl_name)
# verify item(s)
for prim_path in to_select:
prim = stage.GetPrimAtPath(prim_path)
bound_material, _ = UsdShade.MaterialBindingAPI(prim).ComputeBoundMaterial()
self.assertTrue(bound_material.GetPrim().IsValid() == True)
self.assertTrue(bound_material.GetPrim().GetPrimPath().pathString == f"/World/Looks/{mtl_name}")
| 3,328 | Python | 40.098765 | 115 | 0.683894 |
omniverse-code/kit/exts/omni.kit.test_suite.menu/docs/README.md | # omni.kit.test_suite.menu
## omni.kit.test_suite.menu
Test Suite
| 70 | Markdown | 7.874999 | 27 | 0.7 |
omniverse-code/kit/exts/omni.kit.test_suite.menu/docs/index.rst | omni.kit.test_suite.menu
########################
menu tests
.. toctree::
:maxdepth: 1
CHANGELOG
| 106 | reStructuredText | 9.699999 | 24 | 0.509434 |
omniverse-code/kit/exts/omni.kit.viewport.rtx/config/extension.toml | [package]
# Semantic Versioning is used: https://semver.org/
version = "104.0.0"
# Lists people or organizations that are considered the "authors" of the package.
authors = ["NVIDIA"]
# The title and description fields are primarly for displaying extension info in UI
title = "Viewport RTX Bundle"
description="Extension to make the RTX Realtime and Pathtraced renderer and settings available for the Viewport."
# URL of the extension source repository.
repository = ""
# Keywords for the extension
keywords = ["kit", "ui", "viewport", "hydra", "rtx", "render"]
# Location of change log file in target (final) folder of extension, relative to the root.
# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
changelog="docs/CHANGELOG.md"
# Path (relative to the root) or content of readme markdown file for UI.
readme = "docs/README.md"
# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
icon = "data/icon.png"
preview_image = "data/preview.png"
category = "Viewport"
[dependencies]
# Load the RTX renderer extension
"omni.hydra.rtx" = {}
# Load the RTX renderer settings extension
"omni.rtx.window.settings" = { }
# Main python module this extension provides, it will be publicly available as "import omni.kit.viewport.rtx".
# [[python.module]]
# name = "omni.kit.viewport.rtx"
[settings]
# When including multiple omni.kit.viewport.XXX renderer extensions,
# the app should set this to a comma delimited of all renderers to eanble and what to startup with
# renderer.enabled "rtx,iray,pxr"
# renderer.active = "rtx"
# Make sure the renderer is enabled and active
renderer.enabled = "rtx"
renderer.active = "rtx"
[[test]]
# This is just a collection of extensions, they should be tested indivdiually for now
waiver = ""
| 1,788 | TOML | 31.527272 | 113 | 0.744407 |
omniverse-code/kit/exts/omni.kit.viewport.rtx/docs/CHANGELOG.md | # CHANGELOG
This document records all notable changes to ``omni.kit.viewport.rtx`` extension.
This project adheres to `Semantic Versioning <https://semver.org/>`_.
## [104.0.0] - 2022-05-04
### Added
- Initial version
| 220 | Markdown | 23.555553 | 81 | 0.718182 |
omniverse-code/kit/exts/omni.kit.viewport.rtx/docs/README.md | # Viewport RTX Extension [omni.kit.viewport.rtx]
Extension to make the RTX Realtime and Pathtraced renderer and settings available for the Viewport.
| 149 | Markdown | 48.999984 | 99 | 0.818792 |
omniverse-code/kit/exts/omni.kit.viewport.pxr/config/extension.toml | [package]
# Semantic Versioning is used: https://semver.org/
version = "104.0.1"
# Lists people or organizations that are considered the "authors" of the package.
authors = ["NVIDIA"]
# The title and description fields are primarly for displaying extension info in UI
title = "Viewport External Renderers Bundle"
description="Extension to make external HydraDelegate renderers and settings available for the Viewport."
# URL of the extension source repository.
repository = ""
# Keywords for the extension
keywords = ["kit", "ui", "viewport", "hydra", "storm", "render", "pxr", "pixar", "render delegate"]
# Location of change log file in target (final) folder of extension, relative to the root.
# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
changelog="docs/CHANGELOG.md"
# Path (relative to the root) or content of readme markdown file for UI.
readme = "docs/README.md"
# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
icon = "data/icon.png"
preview_image = "data/preview.png"
category = "Viewport"
[dependencies]
# Load the Pixar render delegate extension
"omni.hydra.pxr" = {}
# Load the settings extensionfor those renderers
"omni.hydra.pxr.settings" = { }
# Main python module this extension provides, it will be publicly available as "import omni.kit.viewport.pxr".
# [[python.module]]
# name = "omni.kit.viewport.pxr"
[settings]
# When including multiple omni.kit.viewport.XXX renderer extensions,
# the app should set this to a comma delimited of all renderers to eanble and what to startup with
# renderer.enabled "rtx,iray,pxr"
# renderer.active = "rtx"
# Make sure the renderer is enabled and active
renderer.enabled = "pxr"
renderer.active = "pxr"
# External renderer extensions might append to this list, so put Storm in as valid and enabled.
# Final application can always overide this to explicitly disable Storm if desired.
pxr.renderers="HdStormRendererPlugin:GL"
[[test]]
# This is just a collection of extensions, they should be tested indivdiually for now
waiver = ""
| 2,066 | TOML | 35.263157 | 110 | 0.75363 |
omniverse-code/kit/exts/omni.kit.viewport.pxr/docs/CHANGELOG.md | # CHANGELOG
This document records all notable changes to ``omni.kit.viewport.pxr`` extension.
This project adheres to `Semantic Versioning <https://semver.org/>`_.
## [104.0.1] - 2022-08-11
### Changed
- Default to Storm being available and enabled if other renderers start enabling themselves during startup.
## [104.0.0] - 2022-05-04
### Added
- Initial version
| 367 | Markdown | 27.30769 | 107 | 0.730245 |
omniverse-code/kit/exts/omni.kit.viewport.pxr/docs/README.md | # Viewport Hydra Delegate Extension [omni.kit.viewport.pxr]
Extension to make external HydraDelegate renderers and settings available for the Viewport.
| 152 | Markdown | 49.999983 | 91 | 0.835526 |
omniverse-code/kit/exts/omni.ujitso.python/omni/ujitso/_ujitso.pyi | from __future__ import annotations
import omni.ujitso._ujitso
import typing
import numpy
_Shape = typing.Tuple[int, ...]
__all__ = [
"Agent",
"AgentConfigFlags",
"BuildContext",
"BuildHandle",
"BuildJob",
"DataGrid",
"DataStoreUtils",
"Default",
"DependencyContext",
"DependencyHandle",
"DependencyJob",
"DynamicRequest",
"ExternalStorage",
"ForceRemoteTasks",
"IAgent",
"IDataGrid",
"IDataStore",
"IFactory",
"IHTTPFactory",
"IInProcessFactory",
"ILocalDataStore",
"INucleusDataStore",
"IRegistry",
"IService",
"ITCPFactory",
"ITaskAgent",
"ITaskService",
"KeyToken",
"KeyTokenEx",
"MatchContext",
"MatchResult",
"None",
"OperationResult",
"Processor",
"ProcessorInformation",
"Request",
"RequestCallbackData",
"RequestFilter",
"RequestHandle",
"RequestTokenType",
"RequestType",
"ResultHandle",
"TCPAgentConfigFlags",
"TIME_OUT_INFINITE",
"UjitsoUtils",
"UseRoundRobinServerScheduling",
"ValidationType",
"WaitForConnectionsBeforeLaunch",
"acquire_agent_interface",
"acquire_data_grid_interface",
"acquire_factory_interface",
"acquire_http_factory_interface",
"acquire_in_progress_factory_interface",
"acquire_local_data_store_interface",
"acquire_nucleus_data_store_interface",
"acquire_registry_interface",
"acquire_service_interface",
"acquire_tcp_factory_interface",
"release_agent_interface",
"release_data_grid_interface",
"release_factory_interface",
"release_http_factory_interface",
"release_in_progress_factory_interface",
"release_local_data_store_interface",
"release_nucleus_data_store_interface",
"release_registry_interface",
"release_service_interface",
"release_tcp_factory_interface"
]
class Agent():
@property
def agent(self) -> carb::ujitso::IAgent:
"""
:type: carb::ujitso::IAgent
"""
@property
def factory(self) -> carb::ujitso::IFactory:
"""
:type: carb::ujitso::IFactory
"""
@property
def registry(self) -> IRegistry:
"""
:type: IRegistry
"""
@property
def service(self) -> carb::ujitso::IService:
"""
:type: carb::ujitso::IService
"""
@property
def store(self) -> IDataStore:
"""
:type: IDataStore
"""
@property
def taskAgent(self) -> ITaskAgent:
"""
:type: ITaskAgent
"""
@property
def taskService(self) -> ITaskService:
"""
:type: ITaskService
"""
pass
class AgentConfigFlags():
"""
Members:
None
ForceRemoteTasks
Default
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
Default: omni.ujitso._ujitso.AgentConfigFlags # value = <AgentConfigFlags.None: 1>
ForceRemoteTasks: omni.ujitso._ujitso.AgentConfigFlags # value = <AgentConfigFlags.ForceRemoteTasks: 2>
None: omni.ujitso._ujitso.AgentConfigFlags # value = <AgentConfigFlags.None: 1>
__members__: dict # value = {'None': <AgentConfigFlags.None: 1>, 'ForceRemoteTasks': <AgentConfigFlags.ForceRemoteTasks: 2>, 'Default': <AgentConfigFlags.None: 1>}
pass
class BuildContext():
@property
def agent(self) -> Agent:
"""
:type: Agent
"""
@property
def processor(self) -> carb::ujitso::Processor:
"""
:type: carb::ujitso::Processor
"""
pass
class BuildHandle():
def __init__(self, value: int = 0) -> None: ...
@property
def value(self) -> int:
"""
:type: int
"""
@value.setter
def value(self, arg0: int) -> None:
pass
pass
class BuildJob():
@staticmethod
def __init__(*args, **kwargs) -> typing.Any: ...
@property
def request(self) -> Request:
"""
:type: Request
"""
@request.setter
def request(self, arg0: Request) -> None:
pass
pass
class DataGrid():
@property
def iface(self) -> carb::dad::IDataGrid:
"""
:type: carb::dad::IDataGrid
"""
pass
class DataStoreUtils():
@staticmethod
def copy_data_block(*args, **kwargs) -> typing.Any: ...
pass
class DependencyContext():
@property
def agent(self) -> Agent:
"""
:type: Agent
"""
@property
def processor(self) -> carb::ujitso::Processor:
"""
:type: carb::ujitso::Processor
"""
pass
class DependencyHandle():
def __init__(self, value: int = 0) -> None: ...
@property
def value(self) -> int:
"""
:type: int
"""
@value.setter
def value(self, arg0: int) -> None:
pass
pass
class DependencyJob():
@staticmethod
def __init__(*args, **kwargs) -> typing.Any: ...
@property
def request(self) -> Request:
"""
:type: Request
"""
@request.setter
def request(self, arg0: Request) -> None:
pass
pass
class DynamicRequest():
@typing.overload
def __init__(self) -> None: ...
@typing.overload
def __init__(self, arg0: Request) -> None: ...
def add(self, arg0: KeyTokenEx) -> int: ...
def add_buffer(self, arg0: KeyTokenEx, arg1: RequestTokenType, arg2: numpy.ndarray[numpy.uint8]) -> int: ...
def add_double(self, arg0: KeyTokenEx, arg1: float) -> int: ...
def add_float(self, arg0: KeyTokenEx, arg1: float) -> int: ...
def add_int(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def add_int16(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def add_int64(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def add_key_token(self, arg0: KeyTokenEx, arg1: KeyToken) -> int: ...
def add_string(self, arg0: KeyTokenEx, arg1: str) -> int: ...
def add_uint(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def add_uint16(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def add_uint64(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def add_uint8(self, arg0: KeyTokenEx, arg1: int) -> int: ...
def copy(self, arg0: KeyTokenEx, arg1: DynamicRequest) -> bool: ...
def find_key(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_double(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_float(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_int16(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_int32(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_int64(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_key_token(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_string(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_uint16(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_uint32(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_uint64(self, arg0: KeyTokenEx) -> tuple: ...
def get_as_uint8(self, arg0: KeyTokenEx) -> tuple: ...
def get_key(self, arg0: int) -> KeyToken: ...
def get_request(self) -> Request: ...
def get_type(self, arg0: int) -> RequestTokenType: ...
def remove_key(self, arg0: KeyTokenEx) -> bool: ...
def replace_key(self, arg0: KeyTokenEx, arg1: KeyTokenEx) -> bool: ...
def replace_value_double(self, arg0: KeyTokenEx, arg1: float) -> bool: ...
def replace_value_float(self, arg0: KeyTokenEx, arg1: float) -> bool: ...
def replace_value_int(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def replace_value_int16(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def replace_value_int64(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def replace_value_key_token(self, arg0: KeyTokenEx, arg1: KeyToken) -> bool: ...
def replace_value_string(self, arg0: KeyTokenEx, arg1: str) -> bool: ...
def replace_value_uint(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def replace_value_uint16(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def replace_value_uint64(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def replace_value_uint8(self, arg0: KeyTokenEx, arg1: int) -> bool: ...
def reserve(self, arg0: int) -> None: ...
def size(self) -> int: ...
pass
class ExternalStorage():
def __init__(self, arg0: numpy.ndarray[numpy.uint64]) -> None: ...
@property
def values(self) -> numpy.ndarray[numpy.uint64]:
"""
:type: numpy.ndarray[numpy.uint64]
"""
@values.setter
def values(self, arg1: numpy.ndarray[numpy.uint64]) -> None:
pass
pass
class IAgent():
def destroy_request(self, arg0: RequestHandle) -> OperationResult: ...
def get_request_external_data(self, arg0: ResultHandle) -> tuple: ...
def get_request_meta_data(self, arg0: ResultHandle) -> tuple: ...
def get_request_result(self, arg0: RequestHandle) -> tuple: ...
def get_request_storage_context(self, arg0: RequestHandle) -> tuple: ...
def request_build(self, arg0: Agent, arg1: Request, arg2: RequestCallbackData) -> tuple: ...
def validate_request_external_data(self, arg0: RequestHandle, arg1: ResultHandle, arg2: numpy.ndarray[bool], arg3: bool) -> OperationResult: ...
def wait_all(self, arg0: Agent, arg1: int) -> OperationResult: ...
def wait_request(self, arg0: RequestHandle, arg1: int) -> OperationResult: ...
pass
class IDataGrid():
def create_data_grid(self) -> DataGrid: ...
def destroy_data_grid(self, arg0: DataGrid) -> None: ...
pass
class IDataStore():
class RetrieveFlags():
"""
Members:
EN_RETRIEVEFLAG_NONE
EN_RETRIEVEFLAG_SYNC
EN_RETRIEVEFLAG_SIZE_ONLY
EN_RETRIEVEFLAG_EXISTENCE_ONLY
EN_RETRIEVEFLAG_LOCAL
EN_RETRIEVEFLAG_CLUSTER
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
EN_RETRIEVEFLAG_CLUSTER: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_CLUSTER: 16>
EN_RETRIEVEFLAG_EXISTENCE_ONLY: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_EXISTENCE_ONLY: 4>
EN_RETRIEVEFLAG_LOCAL: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_LOCAL: 8>
EN_RETRIEVEFLAG_NONE: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_NONE: 0>
EN_RETRIEVEFLAG_SIZE_ONLY: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_SIZE_ONLY: 2>
EN_RETRIEVEFLAG_SYNC: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_SYNC: 1>
__members__: dict # value = {'EN_RETRIEVEFLAG_NONE': <RetrieveFlags.EN_RETRIEVEFLAG_NONE: 0>, 'EN_RETRIEVEFLAG_SYNC': <RetrieveFlags.EN_RETRIEVEFLAG_SYNC: 1>, 'EN_RETRIEVEFLAG_SIZE_ONLY': <RetrieveFlags.EN_RETRIEVEFLAG_SIZE_ONLY: 2>, 'EN_RETRIEVEFLAG_EXISTENCE_ONLY': <RetrieveFlags.EN_RETRIEVEFLAG_EXISTENCE_ONLY: 4>, 'EN_RETRIEVEFLAG_LOCAL': <RetrieveFlags.EN_RETRIEVEFLAG_LOCAL: 8>, 'EN_RETRIEVEFLAG_CLUSTER': <RetrieveFlags.EN_RETRIEVEFLAG_CLUSTER: 16>}
pass
class StoreFlags():
"""
Members:
EN_STOREFLAG_DEFAULT
EN_STOREFLAG_NO_LOCAL
EN_STOREFLAG_LOCAL_INCONSISTENT
EN_STOREFLAG_NO_CLUSTER
EN_STOREFLAG_CLUSTER_INCONSISTENT
EN_STOREFLAG_NO_REMOTE
EN_STOREFLAG_REMOTE_INCONSISTENT
EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW
EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM
EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
EN_STOREFLAG_CLUSTER_INCONSISTENT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_CLUSTER_INCONSISTENT: 8>
EN_STOREFLAG_DEFAULT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_DEFAULT: 0>
EN_STOREFLAG_LOCAL_INCONSISTENT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_LOCAL_INCONSISTENT: 2>
EN_STOREFLAG_NO_CLUSTER: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_NO_CLUSTER: 4>
EN_STOREFLAG_NO_LOCAL: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_NO_LOCAL: 1>
EN_STOREFLAG_NO_REMOTE: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_NO_REMOTE: 16>
EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH: 192>
EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW: 64>
EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM: 128>
EN_STOREFLAG_REMOTE_INCONSISTENT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_REMOTE_INCONSISTENT: 32>
__members__: dict # value = {'EN_STOREFLAG_DEFAULT': <StoreFlags.EN_STOREFLAG_DEFAULT: 0>, 'EN_STOREFLAG_NO_LOCAL': <StoreFlags.EN_STOREFLAG_NO_LOCAL: 1>, 'EN_STOREFLAG_LOCAL_INCONSISTENT': <StoreFlags.EN_STOREFLAG_LOCAL_INCONSISTENT: 2>, 'EN_STOREFLAG_NO_CLUSTER': <StoreFlags.EN_STOREFLAG_NO_CLUSTER: 4>, 'EN_STOREFLAG_CLUSTER_INCONSISTENT': <StoreFlags.EN_STOREFLAG_CLUSTER_INCONSISTENT: 8>, 'EN_STOREFLAG_NO_REMOTE': <StoreFlags.EN_STOREFLAG_NO_REMOTE: 16>, 'EN_STOREFLAG_REMOTE_INCONSISTENT': <StoreFlags.EN_STOREFLAG_REMOTE_INCONSISTENT: 32>, 'EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW': <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW: 64>, 'EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM': <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM: 128>, 'EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH': <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH: 192>}
pass
EN_RETRIEVEFLAG_CLUSTER: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_CLUSTER: 16>
EN_RETRIEVEFLAG_EXISTENCE_ONLY: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_EXISTENCE_ONLY: 4>
EN_RETRIEVEFLAG_LOCAL: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_LOCAL: 8>
EN_RETRIEVEFLAG_NONE: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_NONE: 0>
EN_RETRIEVEFLAG_SIZE_ONLY: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_SIZE_ONLY: 2>
EN_RETRIEVEFLAG_SYNC: omni.ujitso._ujitso.IDataStore.RetrieveFlags # value = <RetrieveFlags.EN_RETRIEVEFLAG_SYNC: 1>
EN_STOREFLAG_CLUSTER_INCONSISTENT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_CLUSTER_INCONSISTENT: 8>
EN_STOREFLAG_DEFAULT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_DEFAULT: 0>
EN_STOREFLAG_LOCAL_INCONSISTENT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_LOCAL_INCONSISTENT: 2>
EN_STOREFLAG_NO_CLUSTER: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_NO_CLUSTER: 4>
EN_STOREFLAG_NO_LOCAL: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_NO_LOCAL: 1>
EN_STOREFLAG_NO_REMOTE: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_NO_REMOTE: 16>
EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH: 192>
EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW: 64>
EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM: 128>
EN_STOREFLAG_REMOTE_INCONSISTENT: omni.ujitso._ujitso.IDataStore.StoreFlags # value = <StoreFlags.EN_STOREFLAG_REMOTE_INCONSISTENT: 32>
pass
class IFactory():
def create_agent(self, arg0: DataGrid, arg1: IDataStore, arg2: ITaskAgent, arg3: ITaskService, arg4: AgentConfigFlags) -> Agent: ...
def destroy_agent(self, arg0: Agent) -> None: ...
pass
class IHTTPFactory():
def create_agent(self, arg0: str) -> ITaskAgent: ...
def create_service(self) -> ITaskService: ...
def run_http_jobs(self, arg0: ITaskService, arg1: str, arg2: str) -> str: ...
pass
class IInProcessFactory():
def create_agent(self) -> ITaskAgent: ...
def get_service(self, arg0: ITaskAgent) -> ITaskService: ...
pass
class ILocalDataStore():
def create(self, arg0: str, arg1: int) -> IDataStore: ...
def destroy(self, arg0: IDataStore) -> None: ...
pass
class INucleusDataStore():
def create(self, remote_cache_path: str, remote_cache_discovery_path: str, use_cache_discovery_for_writes: bool = True) -> IDataStore: ...
def destroy(self, arg0: IDataStore) -> None: ...
pass
class IRegistry():
class GlobalKeyToken():
"""
Members:
PATH
VERSION
TIME
FLAGS
PARAM0
PARAM1
PARAM2
PARAM3
CUSTOM_START
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
CUSTOM_START: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.CUSTOM_START: 65536>
FLAGS: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.FLAGS: 3>
PARAM0: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM0: 4>
PARAM1: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM1: 5>
PARAM2: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM2: 6>
PARAM3: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM3: 7>
PATH: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PATH: 0>
TIME: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.TIME: 2>
VERSION: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.VERSION: 1>
__members__: dict # value = {'PATH': <GlobalKeyToken.PATH: 0>, 'VERSION': <GlobalKeyToken.VERSION: 1>, 'TIME': <GlobalKeyToken.TIME: 2>, 'FLAGS': <GlobalKeyToken.FLAGS: 3>, 'PARAM0': <GlobalKeyToken.PARAM0: 4>, 'PARAM1': <GlobalKeyToken.PARAM1: 5>, 'PARAM2': <GlobalKeyToken.PARAM2: 6>, 'PARAM3': <GlobalKeyToken.PARAM3: 7>, 'CUSTOM_START': <GlobalKeyToken.CUSTOM_START: 65536>}
pass
@staticmethod
def register_processor(*args, **kwargs) -> None: ...
@staticmethod
def unregister_processor(*args, **kwargs) -> None: ...
CUSTOM_START: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.CUSTOM_START: 65536>
FLAGS: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.FLAGS: 3>
PARAM0: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM0: 4>
PARAM1: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM1: 5>
PARAM2: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM2: 6>
PARAM3: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PARAM3: 7>
PATH: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.PATH: 0>
TIME: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.TIME: 2>
VERSION: omni.ujitso._ujitso.IRegistry.GlobalKeyToken # value = <GlobalKeyToken.VERSION: 1>
pass
class IService():
def add_dependency(self, arg0: Agent, arg1: DependencyHandle, arg2: Request) -> tuple: ...
def add_request_tuple_input(self, arg0: Agent, arg1: DependencyHandle, arg2: Request, arg3: bool, arg4: bool) -> OperationResult: ...
def allocate_meta_data_storage(self, arg0: Agent, arg1: BuildHandle, arg2: int) -> tuple: ...
def get_dependencies(self, arg0: Agent, arg1: BuildHandle) -> tuple: ...
def get_external_data(self, arg0: Agent, arg1: BuildHandle) -> tuple: ...
def get_meta_data(self, arg0: Agent, arg1: BuildHandle) -> tuple: ...
def set_storage_context(self, arg0: Agent, arg1: DependencyHandle, arg2: str) -> OperationResult: ...
def store_external_data(self, arg0: Agent, arg1: BuildHandle, arg2: typing.List[numpy.ndarray[numpy.uint8]], arg3: typing.List[ValidationType]) -> OperationResult: ...
pass
class ITCPFactory():
def create_agent(self, addressesAndPorts: typing.List[typing.Tuple[str, int]] = [], flags: TCPAgentConfigFlags = TCPAgentConfigFlags.UseRoundRobinServerScheduling) -> ITaskAgent: ...
def create_service(self, port: int = 0) -> ITaskService: ...
def get_service_ip(self, arg0: ITaskService) -> str: ...
pass
class ITaskAgent():
def destroy(self) -> None: ...
pass
class ITaskService():
def destroy(self) -> None: ...
pass
class KeyToken():
def __eq__(self, arg0: KeyToken) -> bool: ...
def __init__(self, value: int = 0) -> None: ...
@property
def value(self) -> int:
"""
:type: int
"""
@value.setter
def value(self, arg0: int) -> None:
pass
STATIC_STRING_HASH_MARKER = 2147483648
__hash__ = None
pass
class KeyTokenEx():
@typing.overload
def __eq__(self, arg0: KeyTokenEx) -> bool: ...
@typing.overload
def __eq__(self, arg0: KeyToken) -> bool: ...
@typing.overload
def __init__(self, arg0: IRegistry.GlobalKeyToken) -> None: ...
@typing.overload
def __init__(self, arg0: str) -> None: ...
@typing.overload
def __init__(self, arg0: KeyToken) -> None: ...
@property
def value(self) -> int:
"""
:type: int
"""
@value.setter
def value(self, arg0: int) -> None:
pass
__hash__ = None
pass
class MatchContext():
@property
def agent(self) -> Agent:
"""
:type: Agent
"""
@property
def processor(self) -> carb::ujitso::Processor:
"""
:type: carb::ujitso::Processor
"""
pass
class MatchResult():
"""
Members:
FAILURE
LOWEST_PRIORITY
NORMAL_PRIORITY
HIGHEST_PRIORITY
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
FAILURE: omni.ujitso._ujitso.MatchResult # value = <MatchResult.FAILURE: 0>
HIGHEST_PRIORITY: omni.ujitso._ujitso.MatchResult # value = <MatchResult.HIGHEST_PRIORITY: 2000>
LOWEST_PRIORITY: omni.ujitso._ujitso.MatchResult # value = <MatchResult.LOWEST_PRIORITY: 1>
NORMAL_PRIORITY: omni.ujitso._ujitso.MatchResult # value = <MatchResult.NORMAL_PRIORITY: 1000>
__members__: dict # value = {'FAILURE': <MatchResult.FAILURE: 0>, 'LOWEST_PRIORITY': <MatchResult.LOWEST_PRIORITY: 1>, 'NORMAL_PRIORITY': <MatchResult.NORMAL_PRIORITY: 1000>, 'HIGHEST_PRIORITY': <MatchResult.HIGHEST_PRIORITY: 2000>}
pass
class OperationResult():
"""
Members:
SUCCESS
FAILURE
OVERFLOW_ERROR
INVALIDHANDLE_ERROR
NOPROCESSOR_ERROR
NOTFOUND_ERROR
NOTBUILT_ERROR
INVALIDMETADATA_ERROR
OUTOFMEMORY_ERROR
DATAVALIDATION_ERROR
INTERNAL
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
DATAVALIDATION_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.DATAVALIDATION_ERROR: 10>
FAILURE: omni.ujitso._ujitso.OperationResult # value = <OperationResult.FAILURE: 1>
INTERNAL: omni.ujitso._ujitso.OperationResult # value = <OperationResult.INTERNAL: 65535>
INVALIDHANDLE_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.INVALIDHANDLE_ERROR: 3>
INVALIDMETADATA_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.INVALIDMETADATA_ERROR: 7>
NOPROCESSOR_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.NOPROCESSOR_ERROR: 4>
NOTBUILT_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.NOTBUILT_ERROR: 6>
NOTFOUND_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.NOTFOUND_ERROR: 5>
OUTOFMEMORY_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.OUTOFMEMORY_ERROR: 8>
OVERFLOW_ERROR: omni.ujitso._ujitso.OperationResult # value = <OperationResult.OVERFLOW_ERROR: 2>
SUCCESS: omni.ujitso._ujitso.OperationResult # value = <OperationResult.SUCCESS: 0>
__members__: dict # value = {'SUCCESS': <OperationResult.SUCCESS: 0>, 'FAILURE': <OperationResult.FAILURE: 1>, 'OVERFLOW_ERROR': <OperationResult.OVERFLOW_ERROR: 2>, 'INVALIDHANDLE_ERROR': <OperationResult.INVALIDHANDLE_ERROR: 3>, 'NOPROCESSOR_ERROR': <OperationResult.NOPROCESSOR_ERROR: 4>, 'NOTFOUND_ERROR': <OperationResult.NOTFOUND_ERROR: 5>, 'NOTBUILT_ERROR': <OperationResult.NOTBUILT_ERROR: 6>, 'INVALIDMETADATA_ERROR': <OperationResult.INVALIDMETADATA_ERROR: 7>, 'OUTOFMEMORY_ERROR': <OperationResult.OUTOFMEMORY_ERROR: 8>, 'DATAVALIDATION_ERROR': <OperationResult.DATAVALIDATION_ERROR: 10>, 'INTERNAL': <OperationResult.INTERNAL: 65535>}
pass
class Processor():
@staticmethod
def __init__(*args, **kwargs) -> typing.Any: ...
pass
class ProcessorInformation():
def __init__(self, arg0: str, arg1: int, arg2: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@name.setter
def name(self, arg1: str) -> None:
pass
@property
def remoteExecutionBatchHint(self) -> int:
"""
:type: int
"""
@remoteExecutionBatchHint.setter
def remoteExecutionBatchHint(self, arg0: int) -> None:
pass
@property
def version(self) -> int:
"""
:type: int
"""
@version.setter
def version(self, arg0: int) -> None:
pass
pass
class Request():
def __init__(self, type: RequestType = ..., token_values: bytes = b'') -> None: ...
@property
def tokenValues(self) -> bytes:
"""
:type: bytes
"""
@property
def type(self) -> RequestType:
"""
:type: RequestType
"""
pass
class RequestCallbackData():
def __init__(self, callback: typing.Callable[[object, object, RequestHandle, ResultHandle, OperationResult], None] = None, callback_context_0: object = None, callback_context_1: object = None) -> None: ...
@property
def callbackContext0(self) -> object:
"""
:type: object
"""
@property
def callbackContext1(self) -> object:
"""
:type: object
"""
pass
class RequestFilter():
def __init__(self, keys: numpy.ndarray[KeyToken] = array([], dtype=[('value', '<u4')])) -> None: ...
@property
def count(self) -> int:
"""
:type: int
"""
@property
def keys(self) -> numpy.ndarray[KeyToken]:
"""
:type: numpy.ndarray[KeyToken]
"""
pass
class RequestHandle():
def __init__(self, value: int = 0) -> None: ...
@property
def value(self) -> int:
"""
:type: int
"""
@value.setter
def value(self, arg0: int) -> None:
pass
pass
class RequestTokenType():
def __eq__(self, arg0: RequestTokenType) -> bool: ...
def __init__(self, size: int = 0, type_name_hash: int = 0) -> None: ...
@property
def size(self) -> int:
"""
:type: int
"""
@size.setter
def size(self, arg0: int) -> None:
pass
@property
def typeNameHash(self) -> int:
"""
:type: int
"""
@typeNameHash.setter
def typeNameHash(self, arg0: int) -> None:
pass
__hash__ = None
pass
class RequestType():
def __init__(self, keys: numpy.ndarray[KeyToken] = array([], dtype=[('value', '<u4')]), types: numpy.ndarray[RequestTokenType] = array([], dtype=[('size', '<u4'), ('typeNameHash', '<u4')])) -> None: ...
@property
def count(self) -> int:
"""
:type: int
"""
@property
def keys(self) -> numpy.ndarray[KeyToken]:
"""
:type: numpy.ndarray[KeyToken]
"""
@property
def types(self) -> numpy.ndarray[RequestTokenType]:
"""
:type: numpy.ndarray[RequestTokenType]
"""
pass
class ResultHandle():
def __init__(self, value: int = 0) -> None: ...
@property
def value(self) -> int:
"""
:type: int
"""
@value.setter
def value(self, arg0: int) -> None:
pass
pass
class TCPAgentConfigFlags():
"""
Members:
None
WaitForConnectionsBeforeLaunch
UseRoundRobinServerScheduling
Default
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
Default: omni.ujitso._ujitso.TCPAgentConfigFlags # value = <TCPAgentConfigFlags.UseRoundRobinServerScheduling: 2>
None: omni.ujitso._ujitso.TCPAgentConfigFlags # value = <TCPAgentConfigFlags.None: 1>
UseRoundRobinServerScheduling: omni.ujitso._ujitso.TCPAgentConfigFlags # value = <TCPAgentConfigFlags.UseRoundRobinServerScheduling: 2>
WaitForConnectionsBeforeLaunch: omni.ujitso._ujitso.TCPAgentConfigFlags # value = <TCPAgentConfigFlags.WaitForConnectionsBeforeLaunch: 4>
__members__: dict # value = {'None': <TCPAgentConfigFlags.None: 1>, 'WaitForConnectionsBeforeLaunch': <TCPAgentConfigFlags.WaitForConnectionsBeforeLaunch: 4>, 'UseRoundRobinServerScheduling': <TCPAgentConfigFlags.UseRoundRobinServerScheduling: 2>, 'Default': <TCPAgentConfigFlags.UseRoundRobinServerScheduling: 2>}
pass
class UjitsoUtils():
@staticmethod
def get_request_value_double(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: float) -> float: ...
@staticmethod
def get_request_value_float(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: float) -> float: ...
@staticmethod
def get_request_value_int(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def get_request_value_int16(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def get_request_value_int64(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def get_request_value_key_token(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: KeyToken) -> KeyToken: ...
@staticmethod
def get_request_value_string(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: str) -> str: ...
@staticmethod
def get_request_value_uint(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def get_request_value_uint16(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def get_request_value_uint64(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def get_request_value_uint8(arg0: Agent, arg1: Request, arg2: KeyTokenEx, arg3: int) -> int: ...
@staticmethod
def make_double_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_float_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_int16_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_int32_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_int64_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_key_token_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_uint16_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_uint32_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_uint64_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_uint8_request_token_type() -> RequestTokenType: ...
@staticmethod
def make_void_request_token_type() -> RequestTokenType: ...
pass
class ValidationType():
"""
Members:
MANDATORY
DEFERRED
NONE
"""
def __eq__(self, other: object) -> bool: ...
def __getstate__(self) -> int: ...
def __hash__(self) -> int: ...
def __index__(self) -> int: ...
def __init__(self, value: int) -> None: ...
def __int__(self) -> int: ...
def __ne__(self, other: object) -> bool: ...
def __repr__(self) -> str: ...
def __setstate__(self, state: int) -> None: ...
@property
def name(self) -> str:
"""
:type: str
"""
@property
def value(self) -> int:
"""
:type: int
"""
DEFERRED: omni.ujitso._ujitso.ValidationType # value = <ValidationType.DEFERRED: 1>
MANDATORY: omni.ujitso._ujitso.ValidationType # value = <ValidationType.MANDATORY: 0>
NONE: omni.ujitso._ujitso.ValidationType # value = <ValidationType.NONE: 2>
__members__: dict # value = {'MANDATORY': <ValidationType.MANDATORY: 0>, 'DEFERRED': <ValidationType.DEFERRED: 1>, 'NONE': <ValidationType.NONE: 2>}
pass
def acquire_agent_interface(plugin_name: str = None, library_path: str = None) -> IAgent:
pass
def acquire_data_grid_interface(plugin_name: str = None, library_path: str = None) -> IDataGrid:
pass
def acquire_factory_interface(plugin_name: str = None, library_path: str = None) -> IFactory:
pass
def acquire_http_factory_interface(plugin_name: str = None, library_path: str = None) -> IHTTPFactory:
pass
def acquire_in_progress_factory_interface(plugin_name: str = None, library_path: str = None) -> IInProcessFactory:
pass
def acquire_local_data_store_interface(plugin_name: str = None, library_path: str = None) -> ILocalDataStore:
pass
def acquire_nucleus_data_store_interface(plugin_name: str = None, library_path: str = None) -> INucleusDataStore:
pass
def acquire_registry_interface(plugin_name: str = None, library_path: str = None) -> IRegistry:
pass
def acquire_service_interface(plugin_name: str = None, library_path: str = None) -> IService:
pass
def acquire_tcp_factory_interface(plugin_name: str = None, library_path: str = None) -> ITCPFactory:
pass
def release_agent_interface(arg0: IAgent) -> None:
pass
def release_data_grid_interface(arg0: IDataGrid) -> None:
pass
def release_factory_interface(arg0: IFactory) -> None:
pass
def release_http_factory_interface(arg0: IHTTPFactory) -> None:
pass
def release_in_progress_factory_interface(arg0: IInProcessFactory) -> None:
pass
def release_local_data_store_interface(arg0: ILocalDataStore) -> None:
pass
def release_nucleus_data_store_interface(arg0: INucleusDataStore) -> None:
pass
def release_registry_interface(arg0: IRegistry) -> None:
pass
def release_service_interface(arg0: IService) -> None:
pass
def release_tcp_factory_interface(arg0: ITCPFactory) -> None:
pass
Default: omni.ujitso._ujitso.AgentConfigFlags # value = <AgentConfigFlags.None: 1>
ForceRemoteTasks: omni.ujitso._ujitso.AgentConfigFlags # value = <AgentConfigFlags.ForceRemoteTasks: 2>
None: omni.ujitso._ujitso.AgentConfigFlags # value = <AgentConfigFlags.None: 1>
TIME_OUT_INFINITE = 4294967295
UseRoundRobinServerScheduling: omni.ujitso._ujitso.TCPAgentConfigFlags # value = <TCPAgentConfigFlags.UseRoundRobinServerScheduling: 2>
WaitForConnectionsBeforeLaunch: omni.ujitso._ujitso.TCPAgentConfigFlags # value = <TCPAgentConfigFlags.WaitForConnectionsBeforeLaunch: 4>
| 38,861 | unknown | 39.146694 | 857 | 0.629603 |
omniverse-code/kit/exts/omni.ujitso.python/omni/ujitso/__init__.py | from ._ujitso import *
| 23 | Python | 10.999995 | 22 | 0.695652 |
omniverse-code/kit/exts/omni.ujitso.python/omni/ujitso/tests/test_bindings.py | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import numpy as np
import omni.kit.test
from omni.ujitso import *
class TestBindings(omni.kit.test.AsyncTestCase):
"""Test bindings in this extension"""
async def test_operation_result(self):
"""validate binding of OperationResult"""
self.assertEqual(len(OperationResult.__members__), 11)
self.assertEqual(int(OperationResult.SUCCESS), 0)
self.assertEqual(int(OperationResult.FAILURE), 1)
self.assertEqual(int(OperationResult.OVERFLOW_ERROR), 2)
self.assertEqual(int(OperationResult.INVALIDHANDLE_ERROR), 3)
self.assertEqual(int(OperationResult.NOPROCESSOR_ERROR), 4)
self.assertEqual(int(OperationResult.NOTFOUND_ERROR), 5)
self.assertEqual(int(OperationResult.NOTBUILT_ERROR), 6)
self.assertEqual(int(OperationResult.INVALIDMETADATA_ERROR), 7)
self.assertEqual(int(OperationResult.OUTOFMEMORY_ERROR), 8)
self.assertEqual(int(OperationResult.DATAVALIDATION_ERROR), 10)
self.assertEqual(int(OperationResult.INTERNAL), 0xffff)
async def test_validation_type(self):
"""validate binding of ValidationType"""
self.assertEqual(len(ValidationType.__members__), 3)
self.assertEqual(int(ValidationType.MANDATORY), 0)
self.assertEqual(int(ValidationType.DEFERRED), 1)
self.assertEqual(int(ValidationType.NONE), 2)
async def test_match_result(self):
"""validate binding of MatchResult"""
self.assertEqual(len(MatchResult.__members__), 4)
self.assertEqual(int(MatchResult.FAILURE), 0)
self.assertEqual(int(MatchResult.LOWEST_PRIORITY), 1)
self.assertEqual(int(MatchResult.NORMAL_PRIORITY), 1000)
self.assertEqual(int(MatchResult.HIGHEST_PRIORITY), 2000)
async def test_global_key_token(self):
"""validate binding of GlobalKeyToken"""
self.assertEqual(len(IRegistry.GlobalKeyToken.__members__), 9)
self.assertEqual(int(IRegistry.PATH), 0)
self.assertEqual(int(IRegistry.VERSION), 1)
self.assertEqual(int(IRegistry.TIME), 2)
self.assertEqual(int(IRegistry.FLAGS), 3)
self.assertEqual(int(IRegistry.PARAM0), 4)
self.assertEqual(int(IRegistry.PARAM1), 5)
self.assertEqual(int(IRegistry.PARAM2), 6)
self.assertEqual(int(IRegistry.PARAM3), 7)
self.assertEqual(int(IRegistry.CUSTOM_START), 1 << 16)
self.assertEqual(int(IRegistry.GlobalKeyToken.PATH), 0)
self.assertEqual(int(IRegistry.GlobalKeyToken.VERSION), 1)
self.assertEqual(int(IRegistry.GlobalKeyToken.TIME), 2)
self.assertEqual(int(IRegistry.GlobalKeyToken.FLAGS), 3)
self.assertEqual(int(IRegistry.GlobalKeyToken.PARAM0), 4)
self.assertEqual(int(IRegistry.GlobalKeyToken.PARAM1), 5)
self.assertEqual(int(IRegistry.GlobalKeyToken.PARAM2), 6)
self.assertEqual(int(IRegistry.GlobalKeyToken.PARAM3), 7)
self.assertEqual(int(IRegistry.GlobalKeyToken.CUSTOM_START), 1 << 16)
async def test_retrieve_flags(self):
"""validate binding of RetrieveFlags"""
self.assertEqual(len(IDataStore.RetrieveFlags.__members__), 6)
self.assertEqual(int(IDataStore.EN_RETRIEVEFLAG_NONE), 0)
self.assertEqual(int(IDataStore.EN_RETRIEVEFLAG_SYNC), 1 << 0)
self.assertEqual(int(IDataStore.EN_RETRIEVEFLAG_SIZE_ONLY), 1 << 1)
self.assertEqual(int(IDataStore.EN_RETRIEVEFLAG_EXISTENCE_ONLY), 1 << 2)
self.assertEqual(int(IDataStore.EN_RETRIEVEFLAG_LOCAL), 1 << 3)
self.assertEqual(int(IDataStore.EN_RETRIEVEFLAG_CLUSTER), 1 << 4)
self.assertEqual(int(IDataStore.RetrieveFlags.EN_RETRIEVEFLAG_NONE), 0)
self.assertEqual(int(IDataStore.RetrieveFlags.EN_RETRIEVEFLAG_SYNC), 1 << 0)
self.assertEqual(int(IDataStore.RetrieveFlags.EN_RETRIEVEFLAG_SIZE_ONLY), 1 << 1)
self.assertEqual(int(IDataStore.RetrieveFlags.EN_RETRIEVEFLAG_EXISTENCE_ONLY), 1 << 2)
self.assertEqual(int(IDataStore.RetrieveFlags.EN_RETRIEVEFLAG_LOCAL), 1 << 3)
self.assertEqual(int(IDataStore.RetrieveFlags.EN_RETRIEVEFLAG_CLUSTER), 1 << 4)
async def test_store_flags(self):
"""validate binding of StoreFlags"""
self.assertEqual(len(IDataStore.StoreFlags.__members__), 10)
self.assertEqual(int(IDataStore.EN_STOREFLAG_DEFAULT), 0)
self.assertEqual(int(IDataStore.EN_STOREFLAG_NO_LOCAL), 1 << 0)
self.assertEqual(int(IDataStore.EN_STOREFLAG_LOCAL_INCONSISTENT), 1 << 1)
self.assertEqual(int(IDataStore.EN_STOREFLAG_NO_CLUSTER), 1 << 2)
self.assertEqual(int(IDataStore.EN_STOREFLAG_CLUSTER_INCONSISTENT), 1 << 3)
self.assertEqual(int(IDataStore.EN_STOREFLAG_NO_REMOTE), 1 << 4)
self.assertEqual(int(IDataStore.EN_STOREFLAG_REMOTE_INCONSISTENT), 1 << 5)
self.assertEqual(int(IDataStore.EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW), 1 << 6)
self.assertEqual(int(IDataStore.EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM), 2 << 6)
self.assertEqual(int(IDataStore.EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH), 3 << 6)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_DEFAULT), 0)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_NO_LOCAL), 1 << 0)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_LOCAL_INCONSISTENT), 1 << 1)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_NO_CLUSTER), 1 << 2)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_CLUSTER_INCONSISTENT), 1 << 3)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_NO_REMOTE), 1 << 4)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_REMOTE_INCONSISTENT), 1 << 5)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_LOW), 1 << 6)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_MEDIUM), 2 << 6)
self.assertEqual(int(IDataStore.StoreFlags.EN_STOREFLAG_PERSISTENCE_PRIORITY_HIGH), 3 << 6)
async def test_request_handle(self):
"""validate binding of RequestHandle"""
# validate the default value
handle = RequestHandle()
self.assertEqual(handle.value, 0)
# only positive integer is accepted
valid_values = [1, 1<<64 - 1, 0]
invalid_values = [-1, -1.0, 1.0, 1<<64]
# validate setter with valid values
for val in valid_values:
handle.value = val
self.assertEqual(handle.value, val)
# validate setter with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
handle.value = val
# only positive integer is accepted
valid_indices = [1, 1<<32 - 1, 0]
# validate initialization with valid values
for val in valid_values:
handle = RequestHandle(val)
self.assertEqual(handle.value, val)
handle = RequestHandle(value = val)
self.assertEqual(handle.value, val)
# validate initialization with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
handle = RequestHandle(val)
with self.assertRaises(TypeError):
handle = RequestHandle(value = val)
async def test_key_token(self):
"""validate binding of KeyToken"""
# validate the default value
token = KeyToken()
self.assertEqual(token.value, 0)
# only positive integer is accepted
valid_values = [1, 1<<32 - 1, 0]
invalid_values = [-1, -1.0, 1.0, 1<<32]
# validate setter with valid values
for val in valid_values:
token.value = val
self.assertEqual(token.value, val)
# validate setter with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
token.value = val
# the value shouldn't change
self.assertEqual(token.value, valid_values[-1])
# validate initialization with valid values
for val in valid_values:
token = KeyToken(val)
self.assertEqual(token.value, val)
token = KeyToken(value = val)
self.assertEqual(token.value, val)
# validate initialization with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
token = KeyToken(val)
with self.assertRaises(TypeError):
token = KeyToken(value = val)
# validate the value of STATIC_STRING_HASH_MARKER
self.assertEqual(KeyToken.STATIC_STRING_HASH_MARKER, 0x80000000)
# can't set attribute
with self.assertRaises(AttributeError):
KeyToken.STATIC_STRING_HASH_MARKER = 0
# validate __eq__
for val in valid_values:
self.assertEqual(KeyToken(val), KeyToken(val))
self.assertNotEqual(KeyToken(val), KeyToken(2))
async def test_key_token_ex(self):
"""validate binding of KeyTokenEx"""
key_token_ex = KeyTokenEx(IRegistry.GlobalKeyToken.PATH)
self.assertEqual(key_token_ex.value, IRegistry.GlobalKeyToken.PATH)
key_token_ex = KeyTokenEx(IRegistry.PATH)
self.assertEqual(key_token_ex.value, IRegistry.PATH)
key_token_ex = KeyTokenEx("")
key_token_ex = KeyTokenEx("test")
key_token_ex = KeyTokenEx(KeyToken())
self.assertEqual(key_token_ex.value, 0)
valid_values = [0, 1, 1<<32 - 1]
for val in valid_values:
key_token_ex_a = KeyTokenEx(KeyToken(val))
self.assertEqual(key_token_ex_a.value, val)
key_token_ex_b = KeyTokenEx(KeyToken())
key_token_ex_b.value = val
self.assertEqual(key_token_ex_b.value, val)
self.assertTrue(key_token_ex_a == key_token_ex_b)
self.assertTrue(key_token_ex_b == key_token_ex_a)
self.assertFalse(key_token_ex_a != key_token_ex_b)
self.assertFalse(key_token_ex_b != key_token_ex_a)
key_token = KeyToken(val)
self.assertTrue(key_token_ex_a == key_token)
self.assertTrue(key_token == key_token_ex_a)
self.assertFalse(key_token_ex_a != key_token)
self.assertFalse(key_token != key_token_ex_a)
key_token_100 = KeyToken(100)
self.assertFalse(key_token_ex_a == key_token_100)
self.assertFalse(key_token_100 == key_token_ex_a)
self.assertTrue(key_token_ex_a != key_token_100)
self.assertTrue(key_token_100 != key_token_ex_a)
key_token_ex_100 = KeyTokenEx(key_token_100)
self.assertFalse(key_token_ex_a == key_token_ex_100)
self.assertFalse(key_token_ex_100 == key_token_ex_a)
self.assertTrue(key_token_ex_a != key_token_ex_100)
self.assertTrue(key_token_ex_100 != key_token_ex_a)
with self.assertRaises(TypeError):
key_token_ex = KeyTokenEx()
with self.assertRaises(TypeError):
key_token_ex = KeyTokenEx(0)
# This following line will end up calling C++ KeyTokenEx::KeyTokenEx(const char* str) with `str` being nullptr.
# It will lead to crash but it is a problem of the C++ implementation rather than the Python binding code.
#
# key_token_ex = KeyTokenEx(None)
async def test_dynamic_request(self):
"""validate binding of DynamicRequest"""
# validate the default constructor
dynamic_request = DynamicRequest()
self.assertEqual(dynamic_request.size(), 0)
request = dynamic_request.get_request()
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
# validate addition of different data types
buf = [12, 13, 14]
buf_token_type = UjitsoUtils.make_uint8_request_token_type()
buf_token_type.size = len(buf)
key_value_pairs = [
(KeyTokenEx("void"),),
(KeyTokenEx("key token"), KeyToken(1)),
(KeyTokenEx("8-bit unsigned interger"), 2),
(KeyTokenEx("16-bit signed integer"), 3),
(KeyTokenEx("16-bit unsigned integer"), 4),
(KeyTokenEx("32-bit signed integer"), 5),
(KeyTokenEx("32-bit unsigned integer"), 6),
(KeyTokenEx("64-bit signed integer"), 7),
(KeyTokenEx("64-bit unsigned integer"), 8),
(KeyTokenEx("double precision floating point"), 9.0),
(KeyTokenEx("single precision floating point"), 10.0),
(KeyTokenEx("string"), "11"),
(KeyTokenEx("buffer"), buf_token_type, buf)]
token_type_size_list = [0, 4, 1, 2, 2, 4, 4, 8, 8, 8, 4, 3, len(buf)]
self.assertEqual(len(key_value_pairs), len(token_type_size_list))
dynamic_request.reserve(len(key_value_pairs))
counter = 0
index = dynamic_request.add(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_key_token(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_uint8(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_int16(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_uint16(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_int(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_uint(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_int64(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_uint64(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_double(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_float(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_string(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
counter += 1
index = dynamic_request.add_buffer(*(key_value_pairs[counter]))
self.assertEqual(index, counter)
self.assertEqual(dynamic_request.size(), counter + 1)
for i in range(dynamic_request.size()):
key_value_pair = key_value_pairs[i]
token_type_size = token_type_size_list[i]
key = key_value_pair[0]
found, request_token_type = dynamic_request.find_key(key)
self.assertTrue(found)
self.assertEqual(request_token_type.size, token_type_size)
self.assertEqual(dynamic_request.get_key(i), key)
self.assertEqual(dynamic_request.get_type(i), request_token_type)
# validate reading of existent requests through matching template.
counter = 1
found, value = dynamic_request.get_as_key_token(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_uint8(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_int16(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_uint16(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_int32(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_uint32(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_int64(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_uint64(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_double(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_float(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
counter += 1
found, value = dynamic_request.get_as_string(key_value_pairs[counter][0])
self.assertTrue(found)
self.assertEqual(value, key_value_pairs[counter][1])
# validate reading of an existent request through mismatching template.
found, value = dynamic_request.get_as_uint8(key_value_pairs[counter][0])
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_uint8(key_value_pairs[0][0])
self.assertFalse(found)
self.assertEqual(value, 0)
# validate reading of an nonexistent request.
nonexistent_key_token = KeyTokenEx("nonexistent")
found, value = dynamic_request.get_as_key_token(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, KeyToken())
found, value = dynamic_request.get_as_uint8(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_int16(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_uint16(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_int32(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_uint32(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_int64(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_uint64(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0)
found, value = dynamic_request.get_as_double(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0.0)
found, value = dynamic_request.get_as_float(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, 0.0)
found, value = dynamic_request.get_as_string(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(value, None)
# validate the constructor with arguments
new_dynamic_request = DynamicRequest(Request())
self.assertEqual(new_dynamic_request.size(), 0)
request = new_dynamic_request.get_request()
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
# validate copying of nonexistent requests between DynamicRequest instances
found = new_dynamic_request.copy(nonexistent_key_token, dynamic_request)
self.assertFalse(found)
self.assertEqual(new_dynamic_request.size(), 0)
# validate copying of existent requests between DynamicRequest instances
existent_key_token = KeyTokenEx("8-bit unsigned interger")
found = new_dynamic_request.copy(existent_key_token, dynamic_request)
self.assertTrue(found)
self.assertEqual(new_dynamic_request.size(), 1)
# validate key replacement of a nonexistent request
new_key_token = KeyTokenEx("unsigned char")
found = new_dynamic_request.replace_key(nonexistent_key_token, new_key_token)
self.assertFalse(found)
# validate key replacement of an existent request
found = new_dynamic_request.replace_key(existent_key_token, new_key_token)
self.assertTrue(found)
found, value = new_dynamic_request.get_as_uint8(new_key_token)
self.assertTrue(found)
self.assertEqual(value, 2)
# validate value replacement
found = new_dynamic_request.replace_value_uint8(new_key_token, 100)
self.assertTrue(found)
# validate removal of a nonexistent request
found = new_dynamic_request.remove_key(nonexistent_key_token)
self.assertFalse(found)
self.assertEqual(new_dynamic_request.size(), 1)
# validate removal of an existent request
found = new_dynamic_request.remove_key(new_key_token)
self.assertTrue(found)
self.assertEqual(new_dynamic_request.size(), 0)
async def test_request_token_type(self):
"""validate binding of RequestTokenType"""
# validate the default value
request_token_type = RequestTokenType()
self.assertEqual(request_token_type.size, 0)
self.assertEqual(request_token_type.typeNameHash, 0)
# only positive integer is accepted
valid_values = [1, 1<<32 - 1, 0]
invalid_values = [-1, -1.0, 1.0, 1<<32]
# validate setter with valid values
for val in valid_values:
request_token_type.size = val
self.assertEqual(request_token_type.size, val)
request_token_type.typeNameHash = val
self.assertEqual(request_token_type.typeNameHash, val)
# validate setter with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
request_token_type.size = val
with self.assertRaises(TypeError):
request_token_type.typeNameHash = val
# the value shouldn't change
self.assertEqual(request_token_type.size, valid_values[-1])
self.assertEqual(request_token_type.typeNameHash, valid_values[-1])
# validate initialization with valid values
for val in valid_values:
request_token_type = RequestTokenType(val, val)
self.assertEqual(request_token_type.size, val)
self.assertEqual(request_token_type.typeNameHash, val)
request_token_type = RequestTokenType(size = val, type_name_hash= val)
self.assertEqual(request_token_type.size, val)
self.assertEqual(request_token_type.typeNameHash, val)
# validate initialization with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
request_token_type = KeyToken(val, val)
with self.assertRaises(TypeError):
request_token_type = KeyToken(value = val, type_name_hash = val)
async def test_make_request_token_type(self):
"""validate bindings of template function makeRequestTokenType()"""
request_token_type = UjitsoUtils.make_key_token_request_token_type()
self.assertEqual(request_token_type.size, 4)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_uint8_request_token_type()
self.assertEqual(request_token_type.size, 1)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_int16_request_token_type()
self.assertEqual(request_token_type.size, 2)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_uint16_request_token_type()
self.assertEqual(request_token_type.size, 2)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_int32_request_token_type()
self.assertEqual(request_token_type.size, 4)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_uint32_request_token_type()
self.assertEqual(request_token_type.size, 4)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_int64_request_token_type()
self.assertEqual(request_token_type.size, 8)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_uint64_request_token_type()
self.assertEqual(request_token_type.size, 8)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_double_request_token_type()
self.assertEqual(request_token_type.size, 8)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_float_request_token_type()
self.assertEqual(request_token_type.size, 4)
self.assertNotEqual(request_token_type.typeNameHash, 0)
request_token_type = UjitsoUtils.make_void_request_token_type()
self.assertEqual(request_token_type.size, 0)
self.assertEqual(request_token_type.typeNameHash, 0)
async def test_get_request_value(self):
"""validate bindings of template function getRequestValue()"""
# validate whether these functions are available in UjitsoUtils.
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_key_token"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_uint8"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_int16"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_uint16"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_int"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_uint"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_int64"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_uint64"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_double"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_float"))
self.assertTrue(hasattr(UjitsoUtils, "get_request_value_string"))
async def test_request_type(self):
"""validate binding of RequestType"""
# validate the default constructor
request_type = RequestType()
self.assertEqual(request_type.count, 0)
self.assertEqual(request_type.keys.tolist(), [])
self.assertEqual(request_type.types.tolist(), [])
# can't set attribute
with self.assertRaises(AttributeError):
request_type.count = 1
with self.assertRaises(AttributeError):
request_type.keys = []
with self.assertRaises(AttributeError):
request_type.types = []
valid_args_list = [
([], []),
([(0,), (1,), (2,)], [(1, 2), (11, 12), (21, 22)]),
]
for keys, types in valid_args_list:
count = len(keys)
self.assertEqual(count, len(types))
request_type = RequestType(keys, types)
self.assertEqual(request_type.count, count)
self.assertEqual(request_type.keys.tolist(), keys)
self.assertEqual(request_type.types.tolist(), types)
# The array size of keys and types doesn't match
with self.assertRaises(ValueError):
request_type = RequestType([(0,), (1,), (2,)], [])
async def test_request_filter(self):
"""validate binding of RequestFilter"""
# validate the default constructor
request_filter = RequestFilter()
self.assertEqual(request_filter.count, 0)
self.assertEqual(request_filter.keys.tolist(), [])
# can't set attribute
with self.assertRaises(AttributeError):
request_filter.count = 1
with self.assertRaises(AttributeError):
request_filter.keys = []
valid_args_list = [
[],
[(0,), (1,), (2,)],
]
for keys in valid_args_list:
request_filter = RequestFilter(keys)
self.assertEqual(request_filter.count, len(keys))
self.assertEqual(request_filter.keys.tolist(), keys)
async def test_request(self):
"""validate binding of Request"""
# validate the default constructor
request = Request()
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
# can't set attribute
with self.assertRaises(AttributeError):
request.type = RequestType()
with self.assertRaises(AttributeError):
request.tokenValues = b''
request = Request(RequestType(), b'')
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
# validate non-default arguments
keys = [(0,), (1,), (2,)]
types = [(1, 2), (2, 12), (3, 22)]
request_type = RequestType(keys, types)
token_values = b'\x01\x02\x03\x04\x05\x06'
request_size = sum([t[0] for t in types])
self.assertEqual(request_size, len(token_values))
request = Request(request_type, token_values)
self.assertEqual(request.type.count, 3)
self.assertEqual(request.type.keys.tolist(), keys)
self.assertEqual(request.type.types.tolist(), types)
self.assertEqual(request.tokenValues, token_values)
async def test_agent(self):
"""validate binding of Agent"""
with self.assertRaises(TypeError):
agent = Agent()
async def test_request_callback_data(self):
"""validate binding of RequestCallbackData"""
# validate the default constructor
request_callback_data = RequestCallbackData()
self.assertEqual(request_callback_data.callbackContext0, None)
self.assertEqual(request_callback_data.callbackContext1, None)
# can't read and write the callback attribute
with self.assertRaises(AttributeError):
callback = request_callback_data.callback
with self.assertRaises(AttributeError):
request_callback_data.callback = None
class _Context:
def __init__(self, name):
self.name = name
def print_name(self):
print(self.name)
def _callback(callback_context0, callback_context1, request_handle, request_index, operation_result):
callback_context0.print_name()
callback_context1.print_name()
context0 = _Context("context0")
context1 = _Context("context1")
request_callback_data = RequestCallbackData(_callback, context0, context1)
self.assertEqual(request_callback_data.callbackContext0, context0)
self.assertEqual(request_callback_data.callbackContext1, context1)
async def test_external_storage(self):
"""validate binding of ExternalStorage"""
values = list(range(4))
external_storage = ExternalStorage(values)
self.assertEqual(external_storage.values.tolist(), values)
external_storage = ExternalStorage(tuple(values))
self.assertEqual(external_storage.values.tolist(), values)
values = list(range(100, 104))
external_storage.values = values
self.assertEqual(external_storage.values.tolist(), values)
external_storage.values = tuple(values)
self.assertEqual(external_storage.values.tolist(), values)
with self.assertRaises(TypeError):
external_storage.values = None
with self.assertRaises(ValueError):
external_storage.values = list(range(3))
with self.assertRaises(ValueError):
external_storage.values = list(range(5))
with self.assertRaises(TypeError):
external_storage = ExternalStorage()
with self.assertRaises(TypeError):
external_storage = ExternalStorage(*values)
with self.assertRaises(ValueError):
external_storage = ExternalStorage(list(range(3)))
with self.assertRaises(ValueError):
external_storage = ExternalStorage(list(range(5)))
async def test_service_interface(self):
"""validate binding of IService"""
service_interface = acquire_service_interface()
self.assertIsNotNone(service_interface)
release_service_interface(service_interface)
async def test_local_data_store_interface(self):
""" validate binding of ILocalDataStore"""
local_data_store_interface = acquire_local_data_store_interface()
self.assertIsNotNone(local_data_store_interface)
data_store = local_data_store_interface.create("test", 1024)
self.assertIsNotNone(data_store)
local_data_store_interface.destroy(data_store)
release_local_data_store_interface(local_data_store_interface)
async def test_nucleus_data_store_interface(self):
""" validate binding of INucleusDataStore"""
nucleus_data_store_interface = acquire_nucleus_data_store_interface()
self.assertIsNotNone(nucleus_data_store_interface)
data_store = nucleus_data_store_interface.create("test", "test", False)
self.assertIsNotNone(data_store)
nucleus_data_store_interface.destroy(data_store)
release_nucleus_data_store_interface(nucleus_data_store_interface)
async def test_processor_information(self):
""" validate binding of ProcessorInformation"""
valid_args_list = [(None, 0, 0), ("", 0, 0), ("test1", 0, 0), ("test2", 1, 1)]
for name, version, remote_execution_batch_hint in valid_args_list:
processor_information = ProcessorInformation(name, version, remote_execution_batch_hint)
self.assertEqual(processor_information.name, name)
self.assertEqual(processor_information.version, version)
self.assertEqual(processor_information.remoteExecutionBatchHint, remote_execution_batch_hint)
with self.assertRaises(TypeError):
processor_information = ProcessorInformation()
invalid_args_list = [("test1", -1, 0), ("test2", 0, -1), ("test3", 0.5, 0), ("test4", 0, 0.5)]
for name, version, remote_execution_batch_hint in invalid_args_list:
with self.assertRaises(TypeError):
processor_information = ProcessorInformation(name, version, remote_execution_batch_hint)
processor_information = ProcessorInformation(None, 0, 0)
for name, version, remote_execution_batch_hint in valid_args_list:
processor_information.name = name
processor_information.version = version
processor_information.remoteExecutionBatchHint = remote_execution_batch_hint
self.assertEqual(processor_information.name, name)
self.assertEqual(processor_information.version, version)
self.assertEqual(processor_information.remoteExecutionBatchHint, remote_execution_batch_hint)
processor_information = ProcessorInformation(None, 0, 0)
for name, version, remote_execution_batch_hint in invalid_args_list:
with self.assertRaises(TypeError):
processor_information.name = name
processor_information.version = version
processor_information.remoteExecutionBatchHint = remote_execution_batch_hint
async def test_processor(self):
""" validate binding of Processor"""
processor = Processor(None, None, None, None)
# It is fine to intialize with functions with mismatched signature, exception will be raised later when triggering these callbacks.
processor = Processor(lambda : None, lambda : None, lambda : None, lambda : None)
with self.assertRaises(TypeError):
processor = Processor()
with self.assertRaises(TypeError):
processor = Processor(None, None, None)
with self.assertRaises(TypeError):
processor = Processor("", "", "", "")
with self.assertRaises(TypeError):
processor = Processor(0, 0, 0, 0)
async def test_match_context(self):
""" validate binding of MatchContext"""
with self.assertRaises(TypeError):
match_context = MatchContext()
async def test_dependency_context(self):
""" validate binding of DependencyContext"""
with self.assertRaises(TypeError):
dependency_context = DependencyContext()
async def test_build_context(self):
""" validate binding of BuildContext"""
with self.assertRaises(TypeError):
build_context = BuildContext()
async def test_dependency_handle(self):
"""validate binding of DependencyHandle"""
# validate the default value
handle = DependencyHandle()
self.assertEqual(handle.value, 0)
# only positive integer is accepted
valid_values = [1, 1<<64 - 1, 0]
invalid_values = [-1, -1.0, 1.0, 1<<64]
# validate setter with valid values
for val in valid_values:
handle.value = val
self.assertEqual(handle.value, val)
# validate setter with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
handle.value = val
# the value shouldn't change
self.assertEqual(handle.value, valid_values[-1])
# validate initialization with valid values
for val in valid_values:
handle = DependencyHandle(val)
self.assertEqual(handle.value, val)
handle = DependencyHandle(value = val)
self.assertEqual(handle.value, val)
# validate initialization with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
handle = DependencyHandle(val)
with self.assertRaises(TypeError):
handle = DependencyHandle(value = val)
async def test_build_handle(self):
"""validate binding of BuildHandle"""
# validate the default value
handle = BuildHandle()
self.assertEqual(handle.value, 0)
# only positive integer is accepted
valid_values = [1, 1<<64 - 1, 0]
invalid_values = [-1, -1.0, 1.0, 1<<64]
# validate setter with valid values
for val in valid_values:
handle.value = val
self.assertEqual(handle.value, val)
# validate setter with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
handle.value = val
# the value shouldn't change
self.assertEqual(handle.value, valid_values[-1])
# validate initialization with valid values
for val in valid_values:
handle = BuildHandle(val)
self.assertEqual(handle.value, val)
handle = BuildHandle(value = val)
self.assertEqual(handle.value, val)
# validate initialization with invalid values
for val in invalid_values:
with self.assertRaises(TypeError):
handle = BuildHandle(val)
with self.assertRaises(TypeError):
handle = BuildHandle(value = val)
async def test_dependency_job(self):
""" validate binding of DependencyJob"""
# validate default values
dependency_job = DependencyJob()
request = dependency_job.request
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
keys = [(0,), (1,), (2,)]
types = [(1, 2), (2, 12), (3, 22)]
request_type = RequestType(keys, types)
token_values = b'\x11\x12\x13\x14\x15\x16'
request_size = sum([t[0] for t in types])
self.assertEqual(request_size, len(token_values))
# validate setter and getter
dependency_job.request = Request(request_type, token_values)
request = dependency_job.request
self.assertEqual(request.type.count, len(keys))
self.assertEqual(request.type.keys.tolist(), keys)
self.assertEqual(request.type.types.tolist(), types)
self.assertEqual(request.tokenValues, token_values)
# validate resetting of requests
dependency_job.request = Request()
request = dependency_job.request
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
with self.assertRaises(TypeError):
dependency_job.request = None
async def test_build_job(self):
""" validate binding of BuildJob"""
# validate default values
build_job = BuildJob()
request = build_job.request
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
keys = [(0,), (1,), (2,)]
types = [(1, 2), (2, 12), (3, 22)]
request_type = RequestType(keys, types)
token_values = b'\x11\x12\x13\x14\x15\x16'
request_size = sum([t[0] for t in types])
self.assertEqual(request_size, len(token_values))
# validate setter and getter
request = Request(request_type, token_values)
build_job.request = request
request = build_job.request
self.assertEqual(request.type.count, len(keys))
self.assertEqual(request.type.keys.tolist(), keys)
self.assertEqual(request.type.types.tolist(), types)
self.assertEqual(request.tokenValues, token_values)
# validate resetting of requests
build_job.request = Request()
request = build_job.request
self.assertEqual(request.type.count, 0)
self.assertEqual(request.type.keys.tolist(), [])
self.assertEqual(request.type.types.tolist(), [])
self.assertEqual(request.tokenValues, b'')
with self.assertRaises(TypeError):
build_job.request = None
async def test_data_grid(self):
""" validate binding of DataGrid"""
with self.assertRaises(TypeError):
data_grid = DataGrid()
async def test_data_grid_interface(self):
"""validate binding of IDataGrid"""
data_grid_interface = acquire_data_grid_interface()
self.assertIsNotNone(data_grid_interface)
data_grid = data_grid_interface.create_data_grid()
self.assertIsNotNone(data_grid)
self.assertIsNotNone(data_grid.iface)
# can't set attribute
with self.assertRaises(AttributeError):
data_grid.iface = None
data_grid_interface.destroy_data_grid(data_grid)
release_data_grid_interface(data_grid_interface)
async def test_in_progress_factory_interface(self):
""" validate binding of IInProgressFactory"""
factory_interface = acquire_in_progress_factory_interface()
self.assertIsNotNone(factory_interface)
task_agent = factory_interface.create_agent()
self.assertIsNotNone(task_agent)
task_service = factory_interface.get_service(task_agent)
self.assertIsNotNone(task_service)
task_service.destroy()
task_agent.destroy()
release_in_progress_factory_interface(factory_interface)
async def test_tcp_factory_interface(self):
""" validate binding of ITCPFactory"""
factory_interface = acquire_tcp_factory_interface()
self.assertIsNotNone(factory_interface)
port = 1113
task_service = factory_interface.create_service(port)
self.assertIsNotNone(task_service)
address_and_port = ("127.0.0.1", port)
addresses = [address_and_port]
task_agent = factory_interface.create_agent(addresses)
self.assertIsNotNone(task_agent)
service_ip = factory_interface.get_service_ip(task_service)
self.assertIsNotNone(service_ip)
self.assertTrue(isinstance(service_ip, str))
task_service.destroy()
task_agent.destroy()
release_tcp_factory_interface(factory_interface)
async def test_http_factory_interface(self):
""" validate binding of IHTTPFactory"""
factory_interface = acquire_http_factory_interface()
self.assertIsNotNone(factory_interface)
task_agent = factory_interface.create_agent("test")
self.assertIsNotNone(task_agent)
task_service = factory_interface.create_service()
self.assertIsNotNone(task_service)
result = factory_interface.run_http_jobs(task_service, "desc", "store_path")
self.assertIsNotNone(result)
self.assertTrue(isinstance(result, str))
task_service.destroy()
task_agent.destroy()
release_http_factory_interface(factory_interface)
| 47,293 | Python | 40.053819 | 139 | 0.646058 |
omniverse-code/kit/exts/omni.ujitso.python/omni/ujitso/tests/__init__.py | scan_for_test_modules = True
"""The presence of this object causes the test runner to automatically scan the directory for unit test cases"""
| 142 | Python | 46.666651 | 112 | 0.78169 |
omniverse-code/kit/exts/omni.ujitso.python/omni/ujitso/tests/test_UJITSO.py | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import numpy as np
from omni.kit.test import AsyncTestCase
from omni.ujitso import *
kTokenTypeUSDPath = KeyToken(11)
kTokenTypeRTXPackedMeshLODs = KeyToken(12)
kTokenTypeRTXPackedMeshLOD = KeyToken(13)
kTokenTypeMeshLOD = KeyToken(14)
kTokenTypeTriangulatedMesh = KeyToken(15)
kTokenTypeMesh = KeyToken(16)
kTokenMeshReduceFactor = KeyToken(17)
class ProcessorImpl(Processor):
def __init__(self, agent, key_token, get_info_impl, match_impl, gather_dependencies_impl, build_impl):
super().__init__(get_info_impl, match_impl, gather_dependencies_impl, build_impl)
self._agent = agent
request_filter = RequestFilter([(key_token.value,)])
agent.registry.register_processor(self, request_filter)
def __del__(self):
self._agent.registry.unregister_processor(self)
@staticmethod
def _match_impl(match_context, request_array):
return OperationResult.SUCCESS, MatchResult.NORMAL_PRIORITY
@staticmethod
def _build_impl(build_context, build_job, build_handle):
return OperationResult.SUCCESS
class ProcessorRTXPackedMeshLODs(ProcessorImpl):
def __init__(self, agent, key_token):
super().__init__(agent, key_token, ProcessorRTXPackedMeshLODs._get_info_impl, ProcessorRTXPackedMeshLODs._match_impl, ProcessorRTXPackedMeshLODs._gather_dependencies_impl, ProcessorRTXPackedMeshLODs._build_impl)
@staticmethod
def _get_info_impl(processor):
return OperationResult.SUCCESS, ProcessorInformation("ProcessorRTXPackedMeshLODs", 1, 1)
@staticmethod
def _gather_dependencies_impl(dependency_context, dependency_job, dependency_handle):
dynamic_request = DynamicRequest(dependency_job.request)
dynamic_request.replace_key(KeyTokenEx(kTokenTypeRTXPackedMeshLODs), KeyTokenEx(kTokenTypeRTXPackedMeshLOD))
dynamic_request.add_float(KeyTokenEx(kTokenMeshReduceFactor), 1.0)
dependency_context.agent.service.add_dependency(dependency_context.agent, dependency_handle, dynamic_request.get_request())
dynamic_request.replace_value_float(KeyTokenEx(kTokenMeshReduceFactor), 0.3)
dependency_context.agent.service.add_dependency(dependency_context.agent, dependency_handle, dynamic_request.get_request())
dynamic_request.replace_value_float(KeyTokenEx(kTokenMeshReduceFactor), 0.1)
dependency_context.agent.service.add_dependency(dependency_context.agent, dependency_handle, dynamic_request.get_request())
return OperationResult.SUCCESS
class ProcessorRTXPackedMeshLOD(ProcessorImpl):
def __init__(self, agent, key_token):
super().__init__(agent, key_token, ProcessorRTXPackedMeshLOD._get_info_impl, ProcessorRTXPackedMeshLOD._match_impl, ProcessorRTXPackedMeshLOD._gather_dependencies_impl, ProcessorRTXPackedMeshLOD._build_impl)
@staticmethod
def _get_info_impl(processor):
return OperationResult.SUCCESS, ProcessorInformation("ProcessorRTXPackedMeshLOD", 1, 1)
@staticmethod
def _gather_dependencies_impl(dependency_context, dependency_job, dependency_handle):
dynamic_request = DynamicRequest(dependency_job.request)
dynamic_request.replace_key(KeyTokenEx(kTokenTypeRTXPackedMeshLOD), KeyTokenEx(kTokenTypeMeshLOD))
dependency_context.agent.service.add_dependency(dependency_context.agent, dependency_handle, dynamic_request.get_request())
return OperationResult.SUCCESS
class ProcessorMeshLOD(ProcessorImpl):
def __init__(self, agent, key_token):
super().__init__(agent, key_token, ProcessorMeshLOD._get_info_impl, ProcessorMeshLOD._match_impl, ProcessorMeshLOD._gather_dependencies_impl, ProcessorMeshLOD._build_impl)
@staticmethod
def _get_info_impl(processor):
return OperationResult.SUCCESS, ProcessorInformation("ProcessorMeshLOD", 1, 1)
@staticmethod
def _gather_dependencies_impl(dependency_context, dependency_job, dependency_handle):
dynamic_request = DynamicRequest(dependency_job.request)
dynamic_request.replace_key(KeyTokenEx(kTokenTypeMeshLOD), KeyTokenEx(kTokenTypeTriangulatedMesh))
dynamic_request.remove_key(KeyTokenEx(kTokenMeshReduceFactor))
dependency_context.agent.service.add_dependency(dependency_context.agent, dependency_handle, dynamic_request.get_request())
return OperationResult.SUCCESS
class ProcessorTriangulatedMesh(ProcessorImpl):
def __init__(self, agent, key_token):
super().__init__(agent, key_token, ProcessorTriangulatedMesh._get_info_impl, ProcessorTriangulatedMesh._match_impl, ProcessorTriangulatedMesh._gather_dependencies_impl, ProcessorTriangulatedMesh._build_impl)
@staticmethod
def _get_info_impl(processor):
return OperationResult.SUCCESS, ProcessorInformation("ProcessorTriangulatedMesh", 1, 1)
@staticmethod
def _gather_dependencies_impl(dependency_context, dependency_job, dependency_handle):
dynamic_request = DynamicRequest(dependency_job.request)
dynamic_request.replace_key(KeyTokenEx(kTokenTypeTriangulatedMesh), KeyTokenEx(kTokenTypeMesh))
dependency_context.agent.service.add_dependency(dependency_context.agent, dependency_handle, dynamic_request.get_request())
return OperationResult.SUCCESS
class ProcessorMesh(ProcessorImpl):
def __init__(self, agent, key_token):
super().__init__(agent, key_token, ProcessorMesh._get_info_impl, ProcessorMesh._match_impl, ProcessorMesh._gather_dependencies_impl, ProcessorMesh._build_impl)
@staticmethod
def _get_info_impl(processor):
return OperationResult.SUCCESS, ProcessorInformation("ProcessorMesh", 1, 1)
@staticmethod
def _gather_dependencies_impl(dependency_context, dependency_job, dependency_handle):
return OperationResult.SUCCESS
class TestUJITSO(AsyncTestCase):
"""Test UJITSO (ported from TestUJITSO.cpp)"""
async def test_ujitso_agent(self):
"""UJITSO agent test"""
self._test_UJITSO()
def _test_UJITSO(self, task_agent = None, task_service = None):
factory_interface = acquire_factory_interface()
self.assertIsNotNone(factory_interface)
data_grid_interface = acquire_data_grid_interface()
self.assertIsNotNone(data_grid_interface)
data_grid = data_grid_interface.create_data_grid()
self.assertIsNotNone(data_grid)
local_data_store_interface = acquire_local_data_store_interface()
self.assertIsNotNone(local_data_store_interface)
data_store = local_data_store_interface.create(None, 0)
self.assertIsNotNone(data_store)
agent = factory_interface.create_agent(data_grid, data_store, task_agent, task_service, AgentConfigFlags.Default)
self.assertIsNotNone(agent)
self.assertIsNotNone(agent.agent)
self._test_registry(agent)
self._test_request(agent)
self._test_lod_mockup(agent)
self._test_build(agent, data_grid, data_store)
factory_interface.destroy_agent(agent)
data_grid_interface.destroy_data_grid(data_grid)
local_data_store_interface.destroy(data_store)
release_factory_interface(factory_interface)
release_local_data_store_interface(local_data_store_interface)
release_data_grid_interface(data_grid_interface)
def _test_registry(self, agent):
string1 = KeyTokenEx("Test1")
string2 = KeyTokenEx("Test2")
string3 = KeyTokenEx("Test1")
self.assertTrue(string1 != string2)
self.assertTrue(string2 != string3)
self.assertTrue(string1 == string3)
def _test_request(self, agent):
agent_interface = agent.agent
keys = [(123,)]
key_token_rtype = UjitsoUtils.make_key_token_request_token_type()
types = [(key_token_rtype.size, key_token_rtype.typeNameHash)]
request_type = RequestType(keys, types)
key_token_dtype = np.dtype([('value', np.uint32)])
key_token_value = [1]
request_values = np.array(key_token_value, key_token_dtype).tobytes()
request = Request(request_type, request_values)
request_callback_data = RequestCallbackData()
operation_result, request_handle = agent_interface.request_build(agent, request, request_callback_data)
self.assertEqual(operation_result, OperationResult.SUCCESS)
self.assertIsNotNone(request_handle)
self.assertNotEqual(request_handle.value, 0)
operation_result = agent_interface.wait_all(agent, TIME_OUT_INFINITE)
self.assertEqual(operation_result, OperationResult.SUCCESS)
operation_result, result_handle = agent_interface.get_request_result(request_handle)
self.assertEqual(operation_result, OperationResult.NOPROCESSOR_ERROR)
operation_result = agent_interface.destroy_request(request_handle)
self.assertEqual(operation_result, OperationResult.SUCCESS)
operation_result = agent_interface.destroy_request(request_handle)
self.assertEqual(operation_result, OperationResult.INVALIDHANDLE_ERROR)
operation_result, result_handle = agent_interface.get_request_result(request_handle)
self.assertEqual(operation_result, OperationResult.INVALIDHANDLE_ERROR)
def _test_lod_mockup(self, agent):
agent_interface = agent.agent
procPackedLODs = ProcessorRTXPackedMeshLODs(agent, kTokenTypeRTXPackedMeshLODs)
procPackedLOD = ProcessorRTXPackedMeshLOD(agent, kTokenTypeRTXPackedMeshLOD)
procMeshLOD = ProcessorMeshLOD(agent, kTokenTypeMeshLOD)
procTriangulatedMesh = ProcessorTriangulatedMesh(agent, kTokenTypeTriangulatedMesh)
procMesh = ProcessorMesh(agent, kTokenTypeMesh)
dynamic_request = DynamicRequest()
dynamic_request.add(KeyTokenEx(kTokenTypeRTXPackedMeshLODs))
dynamic_request.add_string(KeyTokenEx(kTokenTypeUSDPath), "/Meshes/TestMesh")
request = dynamic_request.get_request()
self.assertIsNotNone(request)
request_callback_data = RequestCallbackData()
operation_result, request_handle = agent_interface.request_build(agent, request, request_callback_data)
self.assertEqual(operation_result, OperationResult.SUCCESS)
self.assertIsNotNone(request_handle)
self.assertNotEqual(request_handle.value, 0)
operation_result = agent_interface.wait_all(agent, TIME_OUT_INFINITE)
self.assertEqual(operation_result, OperationResult.SUCCESS)
agent_interface.destroy_request(request_handle)
def _test_build(self, agent, data_grid, data_store):
class ProcessorBuild(ProcessorImpl):
def __init__(self, agent, key_token):
super().__init__(agent, key_token, ProcessorBuild._get_info_impl, ProcessorBuild._match_impl, ProcessorBuild._gather_dependencies_impl, ProcessorBuild._build_impl)
@staticmethod
def _get_info_impl(processor):
return OperationResult.SUCCESS, ProcessorInformation("ProcessorBuild", 1, 1)
@staticmethod
def _gather_dependencies_impl(dependency_context, dependency_job, dependency_handle):
agent = dependency_context.agent
service = agent.service
dynamic_request = DynamicRequest(dependency_job.request)
service.add_request_tuple_input(agent, dependency_handle, dynamic_request.get_request(), False, False)
service.set_storage_context(agent, dependency_handle, "TestStorageContext")
return OperationResult.SUCCESS
@staticmethod
def _build_impl(build_context, build_job, build_handle):
agent = build_context.agent
string_value = UjitsoUtils.get_request_value_string(agent, build_job.request, KeyTokenEx("String"), None)
int_value = UjitsoUtils.get_request_value_int(agent, build_job.request, KeyTokenEx("IntParam"), 0)
dtype = np.dtype('uint32')
elem_size = dtype.itemsize
operation_result, metadata = agent.service.allocate_meta_data_storage(agent, build_handle, elem_size * 255)
self.assertEqual(operation_result, OperationResult.SUCCESS)
# reinterpret data type
metadata_array = np.frombuffer(metadata, dtype)
for i in range(len(metadata_array)):
metadata_array[i] = int_value
external_data = [np.frombuffer(string_value.encode(), dtype=np.uint8)]
validation_data = [ValidationType.MANDATORY]
operation_result = agent.service.store_external_data(agent, build_handle, external_data, validation_data)
return OperationResult.SUCCESS
kIntValue = 0x102
kStringValue = "MyTestStringForFun"
kTokenRequest = KeyTokenEx("String")
test_processor = ProcessorBuild(agent, kTokenRequest)
dynamic_request = DynamicRequest()
dynamic_request.add_int(KeyTokenEx("IntParam"), kIntValue)
dynamic_request.add_string(KeyTokenEx("String"), kStringValue)
request = dynamic_request.get_request()
request_callback_data = RequestCallbackData()
agent_interface = agent.agent
operation_result, request_handle = agent_interface.request_build(agent, request, request_callback_data)
self.assertEqual(operation_result, OperationResult.SUCCESS)
self.assertIsNotNone(request_handle)
self.assertNotEqual(request_handle.value, 0)
operation_result = agent_interface.wait_request(request_handle, TIME_OUT_INFINITE)
self.assertEqual(operation_result, OperationResult.SUCCESS)
operation_result, result_handle = agent_interface.get_request_result(request_handle)
operation_result, metadata = agent_interface.get_request_meta_data(result_handle)
self.assertEqual(operation_result, OperationResult.SUCCESS)
# reinterpret data type
metadata_array = np.frombuffer(metadata, dtype='uint32')
for i in range(len(metadata_array)):
self.assertEqual(metadata_array[i], kIntValue)
operation_result, storages = agent_interface.get_request_external_data(result_handle)
self.assertEqual(operation_result, OperationResult.SUCCESS)
self.assertEqual(len(storages), 1)
operation_result = agent_interface.validate_request_external_data(request_handle, result_handle, [True], True)
self.assertEqual(operation_result, OperationResult.SUCCESS)
storage = storages[0]
operation_result, data_block = DataStoreUtils.copy_data_block(data_store, "", storage)
string_value = ''.join(chr(v) for v in data_block)
self.assertEqual(string_value, kStringValue)
operation_result, storage_context = agent_interface.get_request_storage_context(request_handle)
self.assertEqual(operation_result, OperationResult.SUCCESS)
self.assertEqual(storage_context, "TestStorageContext")
agent_interface.destroy_request(request_handle)
| 15,517 | Python | 46.895062 | 219 | 0.7147 |
omniverse-code/kit/exts/omni.kit.autocapture/omni/kit/autocapture/scripts/extension.py | import os
import importlib
import carb
import carb.settings
try:
import omni.renderer_capture
omni_renderer_capture_present = True
except ImportError:
omni_renderer_capture_present = False
import omni.ext
import omni.kit.app
from omni.hydra.engine.stats import HydraEngineStats
class Extension(omni.ext.IExt):
def __init__(self):
super().__init__()
pass
def _set_default_settings(self):
self._settings.set_default_int("/app/captureFrame/startFrame", -1)
self._settings.set_default("/app/captureFrame/startMultipleFrame/0", -1)
self._settings.set_default_bool("/app/captureFrame/closeApplication", False)
self._settings.set_default_string("/app/captureFrame/fileName", "no-filename-specified")
self._settings.set_default_string("/app/captureFrame/outputPath", "")
self._settings.set_default_bool("/app/captureFrame/setAlphaTo1", True)
self._settings.set_default_bool("/app/captureFrame/saveFps", False)
self._settings.set_default_bool("/app/captureFrame/hdr", False)
self._settings.set_default_int("/app/captureFrame/asyncBufferSizeMB", 2048)
self._settings.set_default_bool("/renderer/gpuProfiler/record", False)
self._settings.set_default_int("/renderer/gpuProfiler/maxIndent", 1)
def on_startup(self):
self._settings = carb.settings.get_settings()
self._set_default_settings()
self._app = omni.kit.app.get_app()
try:
module_omni_usd = importlib.import_module('omni.usd')
self._usd_context = module_omni_usd.get_context()
self._opened_state = module_omni_usd.StageState.OPENED
except ImportError:
self._usd_context = None
self._opened_state = None
if omni_renderer_capture_present:
self._renderer_capture = omni.renderer_capture.acquire_renderer_capture_interface()
self._renderer_capture.start_frame_updates()
else:
self._renderer_capture = None
carb.log_error("Autocapture initialization failed: renderer.capture extension should be present!")
return
# Initial configuration
self._frame_no = 0
# App is exiting before last image has been saved, exit after _quitFrameCounter frames
self._quitFrameCounter = 10
self._multiple_frame_no = 0
self._start_frame = self._settings.get("/app/captureFrame/startFrame")
self._start_multiple_frame = self._settings.get("/app/captureFrame/startMultipleFrame")
self._close_app = self._settings.get("/app/captureFrame/closeApplication")
self._file_name = self._settings.get("/app/captureFrame/fileName")
self._output_path = self._settings.get("/app/captureFrame/outputPath")
if len(self._output_path) == 0:
module_carb_tokens = importlib.import_module('carb.tokens')
self._output_path = module_carb_tokens.get_tokens_interface().resolve("${kit}") + "/../../../outputs/"
self._record_gpu_performance = self._settings.get("/renderer/gpuProfiler/record")
self._recording_max_indent = self._settings.get("/renderer/gpuProfiler/maxIndent")
self._gpu_perf = []
# viewport_api = get_active_viewport()
# self.__stats = HydraEngineStats(viewport_api.usd_context_name, viewport_api.hydra_engine)
self.__stats = HydraEngineStats()
self._count_loading_frames = False
self._next_frame_exit = False
if self._start_frame > 0 or self._start_multiple_frame[0] > 0:
def on_post_update(e: carb.events.IEvent):
if not self._app.is_app_ready():
return
if self._next_frame_exit:
if self._quitFrameCounter <= 0:
self._app.post_quit()
self._quitFrameCounter = self._quitFrameCounter - 1
return None
count_frame = True
if not self._count_loading_frames and self._usd_context is not None:
if self._usd_context.get_stage_state() != self._opened_state:
count_frame = True
if count_frame:
if self._record_gpu_performance and self.__stats:
frame_perf = self.__stats.get_nested_gpu_profiler_result(self._recording_max_indent)
dev_count = len(frame_perf)
has_data = False
for dev_idx in range(dev_count):
if len(frame_perf[dev_idx]) > 0:
has_data = True
break
if has_data:
if len(self._gpu_perf) == 0:
self._gpu_perf.extend(frame_perf)
for dev_idx in range(dev_count):
self._gpu_perf[dev_idx] = {}
for dev_idx in range(dev_count):
self._gpu_perf[dev_idx]["frame %d" % (self._frame_no)] = frame_perf[dev_idx]
self._frame_no += 1
if self._start_frame > 0:
if self._frame_no >= self._start_frame:
self._renderer_capture.capture_next_frame_swapchain(self._output_path + self._file_name)
self._next_frame_exit = self._close_app
if self._start_multiple_frame[0] > 0 and self._multiple_frame_no < len(self._start_multiple_frame):
if self._frame_no >= self._start_multiple_frame[self._multiple_frame_no] :
self._renderer_capture.capture_next_frame_swapchain(self._output_path + self._file_name + "_" +str(self._start_multiple_frame[self._multiple_frame_no]))
self._multiple_frame_no += 1
if self._multiple_frame_no >= len(self._start_multiple_frame):
self._next_frame_exit = self._close_app
self._post_update_subs = self._app.get_post_update_event_stream().create_subscription_to_pop(on_post_update, name="Autocapture post-update")
def on_shutdown(self):
if self._record_gpu_performance:
json_filename = self._output_path + self._file_name + ".json"
dump_json = {}
dev_count = len(self._gpu_perf)
for dev_idx in range(dev_count):
dump_json["GPU-%d" % (dev_idx)] = self._gpu_perf[dev_idx]
import json
with open(json_filename, 'w', encoding='utf-8') as json_file:
json.dump(dump_json, json_file, ensure_ascii=False, indent=4)
self._gpu_perf = None
self._settings = None
self._usd_context = None
self._opened_state = None
self._renderer_capture = None
self._post_update_subs = None
| 7,035 | Python | 44.393548 | 180 | 0.572139 |
omniverse-code/kit/exts/omni.kit.autocapture/docs/index.rst | omni.kit.autocapture
#########################
.. automodule:: omni.kit.autocapture
:platform: Windows-x86_64, Linux-x86_64, Linux-aarch64
:members:
:undoc-members:
:imported-members:
| 201 | reStructuredText | 21.444442 | 58 | 0.606965 |
omniverse-code/kit/exts/omni.kit.property.render/omni/kit/property/render/extension.py | # Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import omni.ext
# Any class derived from `omni.ext.IExt` in top level module (defined in `python.modules` of `extension.toml`) will be
# instantiated when extension gets enabled and `on_startup(ext_id)` will be called. Later when extension gets disabled
# on_shutdown() is called.
class RenderPropertiesExtension(omni.ext.IExt):
# ext_id is current extension id. It can be used with extension manager to query additional information, like where
# this extension is located on filesystem.
def on_startup(self, ext_id):
self.__wg_registered = False
# Register custom UI for Property widget
self.__register_widgets()
def on_shutdown(self):
self.__unregister_widgets()
def __register_widgets(self):
import omni.kit.window.property as p
from omni.kit.property.usd.usd_property_widget import MultiSchemaPropertiesWidget
from .product_schema import ProductSchemaAttributesWidget
from pxr import UsdRender
w = p.get_window()
if w:
self.__wg_registered = True
w.register_widget(
'prim',
'rendersettings_base',
MultiSchemaPropertiesWidget('Render Settings', UsdRender.Settings, [UsdRender.SettingsBase], group_api_schemas = True),
)
w.register_widget(
'prim',
'renderproduct_base',
ProductSchemaAttributesWidget('Render Product',
UsdRender.Product,
[UsdRender.SettingsBase],
include_list=["camera", "orderedVars"],
exclude_list=["aspectRatioConformPolicy", "dataWindowNDC", "instantaneousShutter", "pixelAspectRatio", "productName", "productType"],
group_api_schemas = True),
)
w.register_widget(
'prim',
'rendervar_base',
MultiSchemaPropertiesWidget('Render Var', UsdRender.Var, [UsdRender.Var], group_api_schemas = True),
)
def __unregister_widgets(self):
if self.__wg_registered:
import omni.kit.window.property as p
w = p.get_window()
if w:
w.unregister_widget('prim', 'rendersettings_base')
w.unregister_widget('prim', 'renderproduct_base')
w.unregister_widget('prim', 'rendervar_base')
self.__wg_registered = False
| 3,015 | Python | 45.399999 | 179 | 0.605307 |
omniverse-code/kit/exts/omni.kit.property.render/omni/kit/property/render/__init__.py | from .extension import RenderPropertiesExtension
| 49 | Python | 23.999988 | 48 | 0.897959 |
omniverse-code/kit/exts/omni.kit.property.render/omni/kit/property/render/product_schema.py | import carb
import omni.ext
from typing import List, Sequence
from pxr import Kind, Sdf, Usd, UsdGeom, Vt, UsdRender
from omni.kit.property.usd.usd_property_widget import MultiSchemaPropertiesWidget, UsdPropertyUiEntry
class ProductSchemaAttributesWidget(MultiSchemaPropertiesWidget):
def __init__(self, title: str, schema, schema_subclasses: list, include_list: list = [], exclude_list: list = [], api_schemas: Sequence[str] = None, group_api_schemas: bool = False):
super().__init__(title, schema, schema_subclasses, include_list, exclude_list, api_schemas, group_api_schemas)
def on_new_payload(self, payload):
"""
See PropertyWidget.on_new_payload
"""
if not super().on_new_payload(payload):
return False
if not self._payload or len(self._payload) == 0:
return False
used = []
for prim_path in self._payload:
prim = self._get_prim(prim_path)
if not prim or not prim.IsA(self._schema):
return False
used += [attr for attr in prim.GetAttributes() if attr.GetName() in self._schema_attr_names and not attr.IsHidden()]
return used
def _customize_props_layout(self, attrs):
from omni.kit.property.usd.custom_layout_helper import (
CustomLayoutFrame,
CustomLayoutGroup,
CustomLayoutProperty,
)
from omni.kit.window.property.templates import (
SimplePropertyWidget,
LABEL_WIDTH,
LABEL_HEIGHT,
HORIZONTAL_SPACING,
)
frame = CustomLayoutFrame(hide_extra=False)
anchor_prim = self._get_prim(self._payload[-1])
with frame:
with CustomLayoutGroup("Render Product"):
CustomLayoutProperty("resolution", "Resolution")
CustomLayoutProperty("camera", "Camera")
CustomLayoutProperty("orderedVars", "Ordered Vars")
# https://github.com/PixarAnimationStudios/USD/commit/dbbe38b94e6bf113acbb9db4c85622fe12a344a5
if hasattr(UsdRender.Tokens, 'disableMotionBlur'):
CustomLayoutProperty("disableMotionBlur", "Disable Motion Blur")
return frame.apply(attrs)
| 2,268 | Python | 38.120689 | 186 | 0.632275 |
omniverse-code/kit/exts/omni.kit.property.render/omni/kit/property/render/tests/test_render_properties.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import pathlib
import omni.kit.app
import omni.ui as ui
from omni.ui.tests.test_base import OmniUiTest
from omni.kit import ui_test
from omni.kit.test_suite.helpers import wait_stage_loading
class TestRenderPropertiesWidget(OmniUiTest):
# Before running each test
async def setUp(self):
await super().setUp()
from omni.kit.property.usd.usd_attribute_widget import UsdPropertiesWidget
import omni.kit.window.property as p
self._w = p.get_window()
extension_path = omni.kit.app.get_app().get_extension_manager().get_extension_path_by_module(__name__)
test_data_path = pathlib.Path(extension_path).joinpath("data").joinpath("tests")
self.__golden_img_dir = test_data_path.absolute().joinpath('golden_img').absolute()
self.__usd_path = str(test_data_path.joinpath('render_prim_test.usda').absolute())
# After running each test
async def tearDown(self):
await super().tearDown()
# Test(s)
async def __test_render_prim_ui(self, prim_name):
usd_context = omni.usd.get_context()
await self.docked_test_window(
window=self._w._window,
width=450,
height=650,
restore_window = ui.Workspace.get_window('Layer') or ui.Workspace.get_window('Stage'),
restore_position = ui.DockPosition.BOTTOM)
await usd_context.open_stage_async(self.__usd_path)
await wait_stage_loading()
# NOTE: cannot do DomeLight as it contains a file path which is build specific
# Select the prim.
usd_context.get_selection().set_selected_prim_paths([f'/World/RenderTest/{prim_name}'], True)
# Need to wait for an additional frames for omni.ui rebuild to take effect
await ui_test.human_delay(10)
await self.finalize_test(golden_img_dir=self.__golden_img_dir, golden_img_name=f'test_{prim_name}_ui.png')
# Test(s)
async def test_rendersettings_ui(self):
await self.__test_render_prim_ui('rendersettings1')
async def test_renderproduct_ui(self):
await self.__test_render_prim_ui('renderproduct1')
async def test_rendervar_ui(self):
await self.__test_render_prim_ui('rendervar1')
| 2,685 | Python | 37.371428 | 114 | 0.686406 |
omniverse-code/kit/exts/omni.kit.property.render/docs/CHANGELOG.md | # Changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [1.1.0] - 2022-02-02
### Changes
- Updated to new AOV design
## [1.0.0] - 2021-10-12
### Initial Version
- Initial Version
| 222 | Markdown | 17.583332 | 80 | 0.657658 |
omniverse-code/kit/exts/omni.kit.property.render/docs/README.md | # omni.kit.property.render
## Introduction
Property window extensions are for viewing and editing Usd Prim Attributes
## This extension supports editing of these Usd Types;
- UsdRenderSettings
- UsdRenderProduct
- UsdRenderVar
## also groups applied API's on the above types when availbale
| 296 | Markdown | 18.799999 | 74 | 0.790541 |
omniverse-code/kit/exts/omni.kit.property.render/docs/index.rst | omni.kit.property.render
###########################
Property Render Settings Values
.. toctree::
:maxdepth: 1
CHANGELOG
| 132 | reStructuredText | 10.083332 | 31 | 0.560606 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/config/extension.toml | [package]
# Semantic Versionning is used: https://semver.org/
version = "1.0.0"
# Lists people or organizations that are considered the "authors" of the package.
authors = ["NVIDIA"]
# The title and description fields are primarly for displaying extension info in UI
title = "Highlight widgets"
description="A label widget to show highlight word."
# URL of the extension source repository.
repository = ""
# Keywords for the extension
keywords = ["kit", "ui", "widget", "label", "hightlight"]
# Location of change log file in target (final) folder of extension, relative to the root.
# More info on writing changelog: https://keepachangelog.com/en/1.0.0/
changelog="docs/CHANGELOG.rst"
# Path (relative to the root) or content of readme markdown file for UI.
readme = "docs/README.md"
# Preview image and icon. Folder named "data" automatically goes in git lfs (see .gitattributes file).
# Preview image is shown in "Overview" of Extensions window. Screenshot of an extension might be a good preview image.
preview_image = "data/preview.png"
# Icon is shown in Extensions window, it is recommended to be square, of size 256x256.
icon = "data/icon.png"
category = "Internal"
# We only depend on testing framework currently:
[dependencies]
"omni.ui" = {}
# Main python module this extension provides, it will be publicly available as "import omni.kit.widget.searchfield".
[[python.module]]
name = "omni.kit.widget.highlight_label"
[settings]
[[test]]
args = [
"--no-window",
"--/app/window/dpiScaleOverride=1.0",
"--/app/window/scaleToMonitor=false",
]
dependencies = [
"omni.kit.renderer.core",
"omni.kit.renderer.capture",
"omni.kit.ui_test",
]
| 1,683 | TOML | 29.071428 | 118 | 0.726679 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/omni/kit/widget/highlight_label/style.py | from omni.ui import color as cl
cl.highlight_default = cl.shade(cl('#848484'))
cl.highlight_highlight = cl.shade(cl('#DFCB4A'))
cl.highlight_selected = cl.shade(cl("#1F2123"))
UI_STYLE = {
"HighlightLabel": {"color": cl.highlight_default},
"HighlightLabel:selected": {"color": cl.highlight_selected},
"HighlightLabel::highlight": {"color": cl.highlight_highlight},
}
| 381 | Python | 30.833331 | 67 | 0.695538 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/omni/kit/widget/highlight_label/__init__.py | from .highlight_label import HighlightLabel
| 44 | Python | 21.499989 | 43 | 0.863636 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/omni/kit/widget/highlight_label/highlight_label.py | import carb
import math
from omni import ui
from typing import Optional, Dict
from .style import UI_STYLE
def split_selection(text, selection, match_case: bool = False):
"""
Split given text to substrings to draw selected text. Result starts with unselected text.
Example: "helloworld" "o" -> ["hell", "o", "w", "o", "rld"]
Example: "helloworld" "helloworld" -> ["", "helloworld"]
"""
if not selection:
return [text, ""]
else:
origin_text = text
if not match_case:
selection = selection.lower()
text = text.lower()
elif text == selection:
return ["", text]
selection_len = len(selection)
result = []
while True:
found = text.find(selection)
result.append(origin_text if found < 0 else origin_text[:found])
if found < 0:
break
else:
result.append(origin_text[found : found + selection_len])
text = text[found + selection_len :]
origin_text = origin_text[found + selection_len :]
return result
class HighlightLabel:
"""
Represents a label widget could show hightlight word.
Args:
text (str): String of label.
Keyword args:
highlight (Optional[str]): Word to show highlight
match_case (bool): Show highlight word with case sensitive. Default False.
width (ui.Length): Widget length. Default ui.Fraction(1)
height (ui.Length): Widget height. Default 0
style (Dict): Custom style
"""
def __init__(
self,
text: str,
highlight: Optional[str] = None,
match_case: bool = False,
width: ui.Length=ui.Fraction(1),
height: ui.Length=0,
style: Dict = {}
):
self._container: Optional[ui.HStack] = None
self.__text = text
self.__hightlight = highlight
self.__match_case = match_case
self.__width = width
self.__height = height
self.__style = UI_STYLE.copy()
self.__style.update(style)
self._build_ui()
def _build_ui(self):
if not self._container:
self._container = ui.HStack(width=self.__width, height=self.__height, style=self.__style)
else:
self._container.clear()
if not self.__hightlight:
with self._container:
ui.Label(
self.__text,
width=0,
name="",
style_type_name_override="HighlightLabel",
)
else:
selection_chain = split_selection(self.__text, self.__hightlight, match_case=self.__match_case)
labelnames_chain = ["", "highlight"]
# Extend the label names depending on the size of the selection chain. Example, if it was [a, b]
# and selection_chain is [z,y,x,w], it will become [a, b, a, b].
labelnames_chain *= int(math.ceil(len(selection_chain) / len(labelnames_chain)))
with self._container:
for current_text, current_name in zip(selection_chain, labelnames_chain):
if not current_text:
continue
ui.Label(
current_text,
width=0,
name=current_name,
style_type_name_override="HighlightLabel",
)
@property
def widget(self) -> Optional[ui.HStack]:
return self._container
@property
def visible(self) -> None:
"""
Widget visibility
"""
return self._container.visible
@visible.setter
def visible(self, value: bool) -> None:
self._container.visible = value
@property
def text(self) -> str:
return self.__text
@text.setter
def text(self, value: str) -> None:
self.__text = value
self._build_ui()
@property
def hightlight(self) -> Optional[str]:
return self.__hightlight
@hightlight.setter
def highlight(self, value: Optional[str]) -> None:
self.__hightlight = value
self._build_ui()
@property
def text(self) -> str:
return self.__text
@text.setter
def text(self, value: str) -> None:
self.__text = value | 4,346 | Python | 28.773972 | 108 | 0.546019 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/omni/kit/widget/highlight_label/tests/test_ui.py | ## Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.ui as ui
from omni.ui.tests.test_base import OmniUiTest
from .. import HighlightLabel
from pathlib import Path
CURRENT_PATH = Path(__file__).parent
TEST_DATA_PATH = CURRENT_PATH.parent.parent.parent.parent.parent.joinpath("data").joinpath("tests")
TEST_WIDTH = 400
TEST_HEIGHT = 200
CUSTOM_UI_STYLE = {
"HighlightLabel": {"color": 0xFFFFFFFF},
"HighlightLabel::highlight": {"color": 0xFF0000FF},
}
class HightlightLabelTestCase(OmniUiTest):
# Before running each test
async def setUp(self):
await super().setUp()
self._golden_img_dir = TEST_DATA_PATH.absolute().joinpath("golden_img").absolute()
# After running each test
async def tearDown(self):
await super().tearDown()
async def test_general(self):
"""Testing general look of SearchField"""
window = await self.create_test_window(width=TEST_WIDTH, height=TEST_HEIGHT)
with window.frame:
with ui.VStack(spacing=10):
HighlightLabel("No highlight")
HighlightLabel("Highlight All", highlight="Highlight All")
HighlightLabel("Highlight 'gh'", highlight="gh")
label = HighlightLabel("Highlight 't' via property")
label.highlight = "t"
HighlightLabel("Highlight 'H' MATCH Case", highlight="H", match_case=True)
HighlightLabel("Match Case All", highlight="Match Case All", match_case=True)
HighlightLabel("Highlight style CUSTOM", highlight="style", style=CUSTOM_UI_STYLE)
await self.docked_test_window(window=window, width=TEST_WIDTH, height=TEST_HEIGHT)
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name="highlight_label.png")
| 2,189 | Python | 42.799999 | 108 | 0.687985 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/docs/CHANGELOG.rst | # CHANGELOG
This document records all notable changes to ``omni.kit.widget.searchfield`` extension.
This project adheres to `Semantic Versioning <https://semver.org/>`_.
## [1.0.0] - 2022-10-10
### Added
- Initial version implementation
| 241 | reStructuredText | 20.999998 | 87 | 0.73029 |
omniverse-code/kit/exts/omni.kit.widget.highlight_label/docs/README.md | # Highlight label widget
| 25 | Markdown | 11.999994 | 24 | 0.8 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/visibility_toggle.py | ## Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import math
import omni.usd
import omni.kit.app
from pxr import UsdGeom
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, wait_stage_loading, get_prims, arrange_windows
class VisibilityToggleUsdStage(AsyncTestCase):
# Before running each test
async def setUp(self):
await arrange_windows("Stage", 512)
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
async def test_l1_eye_visibility_icon(self):
await ui_test.find("Content").focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
def verify_prim_state(expected):
stage = omni.usd.get_context().get_stage()
for prim_path in [prim.GetPath().pathString for prim in stage.TraverseAll() if not omni.usd.is_hidden_type(prim)]:
if not "Looks" in prim_path:
self.assertEqual(UsdGeom.Imageable(stage.GetPrimAtPath(prim_path)).ComputeVisibility(), expected[prim_path])
# veirfy default state
verify_prim_state({"/World": "inherited", "/World/defaultLight": "inherited", "/World/Cone": "inherited", "/World/Cube": "inherited", "/World/Sphere": "inherited", "/World/Cylinder": "inherited"})
# build table of eye buttons
stage_widget = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
widgets = {}
for w in stage_widget.find_all("**/Label[*]"):
widget_name = w.widget.text if w.widget.text != "World (defaultPrim)" else "World"
for wtb in stage_widget.find_all(f"**/ToolButton[*]"):
if math.isclose(wtb.widget.screen_position_y, w.widget.screen_position_y):
widgets[widget_name] = wtb
break
# click world eye & verify
await widgets["World"].click()
verify_prim_state({"/World": "invisible", "/World/defaultLight": "invisible", "/World/Cone": "invisible", "/World/Cube": "invisible", "/World/Sphere": "invisible", "/World/Cylinder": "invisible"})
await widgets["World"].click()
verify_prim_state({"/World": "inherited", "/World/defaultLight": "inherited", "/World/Cone": "inherited", "/World/Cube": "inherited", "/World/Sphere": "inherited", "/World/Cylinder": "inherited"})
# click individual prima eye & verify
for prim_name in ["defaultLight", "Cone", "Cube", "Sphere", "Cylinder"]:
expected = {"/World": "inherited", "/World/defaultLight": "inherited", "/World/Cone": "inherited", "/World/Cube": "inherited", "/World/Sphere": "inherited", "/World/Cylinder": "inherited"}
verify_prim_state(expected)
await widgets[prim_name].click()
expected[f"/World/{prim_name}"] = "invisible"
verify_prim_state(expected)
await widgets[prim_name].click()
| 3,523 | Python | 50.823529 | 204 | 0.652853 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/stage_menu_create_custom_materials.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import omni.kit.app
import omni.usd
from omni.ui.tests.test_base import OmniUiTest
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from pxr import Sdf, UsdShade
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, select_prims, wait_stage_loading, delete_prim_path_children, arrange_windows
from omni.kit.material.library.test_helper import MaterialLibraryTestHelper
class TestCreateMenuContextMenu(OmniUiTest):
# Before running each test
async def setUp(self):
await arrange_windows()
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
DATA = ["/World/Cube", "/World/Cone", "/World/Sphere", "/World/Cylinder"]
async def test_l1_stage_menu_create_custom_materials(self):
stage_window = ui_test.find("Stage")
await stage_window.focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
to_select = self.DATA
# test custom materials
material_test_helper = MaterialLibraryTestHelper()
for material_url, mtl_name in [("mahogany_floorboards.mdl", "mahogany_floorboards"), ("multi_hair.mdl", "OmniHair_Green"), ("multi_hair.mdl", "OmniHair_Brown")]:
# delete any materials in looks
await delete_prim_path_children("/World/Looks")
# select prims
await select_prims(to_select)
# right click on Cube
stage_widget = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
await stage_widget.find(f"**/StringField[*].model.path=='{to_select[0]}'").right_click()
# click on context menu item
await ui_test.select_context_menu("Create/Material/Add MDL File", offset=ui_test.Vec2(50, 10))
# use add material dialog
mdl_path = get_test_data_path(__name__, f"mtl/{material_url}")
await material_test_helper.handle_add_material_dialog(mdl_path, mtl_name)
# wait for material to load & UI to refresh
await wait_stage_loading()
# verify item(s)
for prim_path in to_select:
prim = stage.GetPrimAtPath(prim_path)
bound_material, _ = UsdShade.MaterialBindingAPI(prim).ComputeBoundMaterial()
self.assertTrue(bound_material.GetPrim().IsValid())
self.assertEqual(bound_material.GetPrim().GetPrimPath().pathString, f"/World/Looks/{mtl_name}")
| 3,042 | Python | 43.101449 | 169 | 0.672584 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/select_bound_objects_stage_window.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import platform
import unittest
import omni.kit.app
import omni.usd
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, select_prims, wait_stage_loading, arrange_windows
class SelectBoundObjectsStageWindow(AsyncTestCase):
# Before running each test
async def setUp(self):
await arrange_windows()
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
DATA = [
("/World/Looks/OmniPBR", {"/World/Cone", "/World/Cylinder"}),
("/World/Looks/OmniGlass", {"/World/Cube", "/World/Sphere"}),
]
async def test_l1_select_bound_objects_stage_window(self):
usd_context = omni.usd.get_context()
for to_select, to_verify in self.DATA:
# select prims
await select_prims([to_select])
# right click on Cube
stage_widget = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
await stage_widget.find(f"**/StringField[*].model.path=='{to_select}'").right_click()
# click on context menu item
await ui_test.select_context_menu("Select Bound Objects")
# verify
selected = usd_context.get_selection().get_selected_prim_paths()
self.assertSetEqual(set(selected), to_verify)
| 1,937 | Python | 36.999999 | 121 | 0.681982 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/__init__.py | from .select_bound_objects_stage_window import *
from .stage_menu_create_materials import *
from .stage_menu_create_custom_materials import *
from .stage_assign_material_single import *
from .stage_assign_material_multi import *
from .drag_drop_material_stage_item import *
from .drag_drop_material_stage import *
from .drag_drop_usd_stage_item import *
from .drag_drop_usd_external_stage_item import *
from .drag_drop_external_audio_stage import *
from .visibility_toggle import *
| 482 | Python | 39.249997 | 49 | 0.784232 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/drag_drop_usd_external_stage_item.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import os
import carb
import omni.usd
import omni.kit.app
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from pxr import Sdf, UsdShade
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, wait_stage_loading, arrange_windows
class ExternalDragDropUsdStageItem(AsyncTestCase):
# Before running each test
async def setUp(self):
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
await arrange_windows("Stage", 512)
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
async def test_l1_external_drag_drop_usd_stage_item_reference(self):
await ui_test.find("Content").focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
# set dragDropImport
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
# get file
usd_path = get_test_data_path(__name__, f"shapes/basic_sphere.usda").replace("\\", "/")
# position mouse
await ui_test.find("Stage").click()
await ui_test.human_delay()
# simulate drag/drop
omni.appwindow.get_default_app_window().get_window_drop_event_stream().push(0, 0, {'paths': [usd_path]})
await ui_test.human_delay(50)
# verify
prim = stage.GetPrimAtPath("/World/basic_sphere")
payloads = omni.usd.get_composed_payloads_from_prim(prim)
references = omni.usd.get_composed_references_from_prim(prim)
self.assertEqual(payloads, [])
self.assertEqual(len(references), 1)
for (ref, layer) in references:
absolute_path = layer.ComputeAbsolutePath(ref.assetPath)
self.assertEqual(os.path.normpath(absolute_path).lower(), os.path.normpath(usd_path).lower())
async def test_l1_external_drag_drop_usd_stage_item_payload(self):
await ui_test.find("Content").focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
# set dragDropImport
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "payload")
# get file
usd_path = get_test_data_path(__name__, f"shapes/basic_cone.usda").replace("\\", "/")
# position mouse
await ui_test.find("Stage").click()
await ui_test.human_delay()
# simulate drag/drop
omni.appwindow.get_default_app_window().get_window_drop_event_stream().push(0, 0, {'paths': [usd_path]})
await ui_test.human_delay(50)
# verify
prim = stage.GetPrimAtPath("/World/basic_cone")
payloads = omni.usd.get_composed_payloads_from_prim(prim)
references = omni.usd.get_composed_references_from_prim(prim)
self.assertEqual(references, [])
self.assertEqual(len(payloads), 1)
for (ref, layer) in payloads:
absolute_path = layer.ComputeAbsolutePath(ref.assetPath)
self.assertEqual(os.path.normpath(absolute_path).lower(), os.path.normpath(usd_path).lower())
| 3,800 | Python | 39.870967 | 112 | 0.670263 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/stage_menu_create_materials.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import os
import omni.kit.app
import omni.usd
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from pxr import Sdf, UsdShade
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, select_prims, wait_stage_loading, delete_prim_path_children, arrange_windows
class StageMenuCreateMaterials(AsyncTestCase):
# Before running each test
async def setUp(self):
await arrange_windows(topleft_window="Stage", topleft_height=512, topleft_width=768)
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
DATA = ["/World/Cube", "/World/Cone", "/World/Sphere", "/World/Cylinder"]
async def test_l1_stage_menu_create_materials(self):
stage_window = ui_test.find("Stage")
await stage_window.focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
to_select = self.DATA
# don't write to list returned by get_mdl_list as its ptr to class list
mdl_list = await omni.kit.material.library.get_mdl_list_async(use_hidden=True)
items_to_test = mdl_list.copy()
items_to_test.append(("USD_Preview_Surface", "", None))
items_to_test.append(("USD_Preview_Surface_Texture", "", None))
for mtl_name, _, submenu in items_to_test:
# delete any materials in looks
await delete_prim_path_children("/World/Looks")
# select prims
await select_prims(to_select)
# right click on Cube
stage_widget = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
await stage_widget.find(f"**/StringField[*].model.path=='{to_select[0]}'").right_click()
# click on context menu item
menu_name = f"Create/Material/{submenu}/{mtl_name.replace('_', ' ')}" if submenu else f"Create/Material/{mtl_name.replace('_', ' ')}"
await ui_test.select_context_menu(menu_name)
# wait for material to load & UI to refresh
await wait_stage_loading()
# verify item(s)
for prim_path in to_select:
prim = stage.GetPrimAtPath(prim_path)
bound_material, _ = UsdShade.MaterialBindingAPI(prim).ComputeBoundMaterial()
self.assertTrue(bound_material.GetPrim().IsValid() == True)
if mtl_name in ["USD_Preview_Surface", "USD_Preview_Surface_Texture"]:
cmp_name = mtl_name.replace('USD_', '').replace('_', '')
self.assertTrue(bound_material.GetPrim().GetPrimPath().pathString == f"/World/Looks/{cmp_name}")
else:
self.assertTrue(bound_material.GetPrim().GetPrimPath().pathString == f"/World/Looks/{mtl_name}")
prim = stage.GetPrimAtPath(f"/World/Looks/{mtl_name}/Shader")
asset_path = prim.GetAttribute("info:mdl:sourceAsset").Get()
self.assertFalse(os.path.isabs(asset_path.path))
self.assertTrue(os.path.isabs(asset_path.resolvedPath))
| 3,639 | Python | 46.272727 | 148 | 0.642209 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/drag_drop_external_audio_stage.py | ## Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import os
import carb
import omni.usd
import omni.kit.app
from os import listdir
from os.path import isfile, join
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from pxr import Sdf, UsdShade
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, wait_stage_loading, get_prims, arrange_windows
class ExternalDragDropUsdStageAudio(AsyncTestCase):
# Before running each test
async def setUp(self):
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
await arrange_windows("Stage", 512)
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
async def test_l1_external_drag_drop_audio_viewport_item(self):
await ui_test.find("Content").focus()
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
audio_list = []
audio_path = get_test_data_path(__name__, f"media/")
prims = get_prims(stage)
for item_path in [join(audio_path, f) for f in listdir(audio_path) if isfile(join(audio_path, f))]:
# position mouse
await ui_test.find("Stage").click()
await ui_test.human_delay()
# simulate drag/drop
omni.appwindow.get_default_app_window().get_window_drop_event_stream().push(0, 0, {'paths': [item_path]})
await ui_test.human_delay(50)
prim_list = get_prims(stage, prims)
self.assertEqual(len(prim_list), 1)
audio_list.append([prim_list[0], os.path.relpath(item_path, audio_path)])
prims.append(prim_list[0])
# verify
self.assertEqual(len(audio_list), 5)
for prim, audio_file in audio_list:
asset_path = prim.GetAttribute('filePath').Get()
self.assertTrue(asset_path.resolvedPath.endswith(audio_file))
| 2,559 | Python | 38.999999 | 118 | 0.675655 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/stage_assign_material_multi.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import omni.kit.app
import omni.usd
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from pxr import Sdf, UsdShade
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, select_prims, wait_stage_loading, wait_for_window, arrange_windows
from omni.kit.material.library.test_helper import MaterialLibraryTestHelper
class StageAssignMaterialMulti(AsyncTestCase):
# Before running each test
async def setUp(self):
await arrange_windows()
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
DATA = [["/World/Cube", "/World/Cone", "/World/Sphere", "/World/Cylinder"],
["", "/World/Looks/OmniGlass", "/World/Looks/OmniPBR", "/World/Looks/OmniSurface_Plastic"]]
async def test_l1_stage_assign_material_multi(self):
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
to_select = self.DATA[0]
for index, mtl_path in enumerate(self.DATA[1]):
# select prims
await select_prims(to_select)
# right click on prim
stage_widget = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
await stage_widget.find(f"**/StringField[*].model.path=='{to_select[0]}'").right_click()
# click on context menu item
await ui_test.select_context_menu("Assign Material")
#use assign material dialog
async with MaterialLibraryTestHelper() as material_test_helper:
await material_test_helper.handle_assign_material_dialog(index)
# verify
for prim_path in to_select:
prim = stage.GetPrimAtPath(prim_path)
bound_material, _ = UsdShade.MaterialBindingAPI(prim).ComputeBoundMaterial()
self.assertTrue(bound_material.GetPrim().GetPrimPath().pathString == mtl_path)
| 2,463 | Python | 42.999999 | 138 | 0.681283 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/omni/kit/test_suite/stage_window/tests/drag_drop_usd_stage_item.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
import os
import carb
import omni.usd
import omni.kit.app
from omni.kit.test.async_unittest import AsyncTestCase
from omni.kit import ui_test
from omni.kit.test_suite.helpers import open_stage, get_test_data_path, wait_stage_loading, arrange_windows
from omni.kit.window.content_browser.test_helper import ContentBrowserTestHelper
class DragDropUsdStageItem(AsyncTestCase):
# Before running each test
async def setUp(self):
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
await arrange_windows("Stage", 512)
await open_stage(get_test_data_path(__name__, "bound_shapes.usda"))
# After running each test
async def tearDown(self):
await wait_stage_loading()
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
async def test_l1_drag_drop_usd_stage_item_reference(self):
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
# set dragDropImport
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "reference")
# drag/drop
async with ContentBrowserTestHelper() as content_browser_helper:
await content_browser_helper.toggle_grid_view_async(show_grid_view=False)
await ui_test.human_delay(50)
usd_path = get_test_data_path(__name__, f"shapes/basic_sphere.usda").replace("\\", "/")
stage_window = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
drag_target = stage_window.position + ui_test.Vec2(stage_window.size.x / 2, stage_window.size.y - 32)
await content_browser_helper.drag_and_drop_tree_view(usd_path, drag_target=drag_target)
await wait_stage_loading()
# verify
prim = stage.GetPrimAtPath("/World/basic_sphere")
payloads = omni.usd.get_composed_payloads_from_prim(prim)
references = omni.usd.get_composed_references_from_prim(prim)
self.assertEqual(payloads, [])
self.assertEqual(len(references), 1)
for (ref, layer) in references:
absolute_path = layer.ComputeAbsolutePath(ref.assetPath)
self.assertEqual(os.path.normpath(absolute_path).lower(), os.path.normpath(usd_path).lower())
async def test_l1_drag_drop_usd_stage_item_payload(self):
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
await wait_stage_loading()
# set dragDropImport
carb.settings.get_settings().set("/persistent/app/stage/dragDropImport", "payload")
# drag/drop
async with ContentBrowserTestHelper() as content_browser_helper:
await content_browser_helper.toggle_grid_view_async(show_grid_view=False)
await ui_test.human_delay(50)
usd_path = get_test_data_path(__name__, f"shapes/basic_cone.usda").replace("\\", "/")
stage_window = ui_test.find("Stage//Frame/**/ScrollingFrame/TreeView[*].visible==True")
drag_target = stage_window.position + ui_test.Vec2(stage_window.size.x / 2, stage_window.size.y - 32)
await content_browser_helper.drag_and_drop_tree_view(usd_path, drag_target=drag_target)
await wait_stage_loading()
# verify
prim = stage.GetPrimAtPath("/World/basic_cone")
payloads = omni.usd.get_composed_payloads_from_prim(prim)
references = omni.usd.get_composed_references_from_prim(prim)
self.assertEqual(references, [])
self.assertEqual(len(payloads), 1)
for (ref, layer) in payloads:
absolute_path = layer.ComputeAbsolutePath(ref.assetPath)
self.assertEqual(os.path.normpath(absolute_path).lower(), os.path.normpath(usd_path).lower())
| 4,277 | Python | 47.067415 | 113 | 0.680851 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/docs/CHANGELOG.md | # Changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [1.0.4] - 2022-11-10
- Create/Materials changed to Create/Material
## [1.0.3] - 2022-08-03
### Changes
- Added external drag/drop audio file tests
## [1.0.2] - 2022-07-25
### Changes
- Refactored unittests to make use of content_browser test helpers
## [1.0.1] - 2022-07-01
### Changes
- Updated calls to material handlers in unittests
## [1.0.0] - 2022-02-09
### Changes
- Created
| 484 | Markdown | 20.086956 | 80 | 0.67562 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/docs/README.md | # omni.kit.test_suite.stage_window
## omni.kit.test_suite.stage_window
Test Suite
| 86 | Markdown | 9.874999 | 35 | 0.732558 |
omniverse-code/kit/exts/omni.kit.test_suite.stage_window/docs/index.rst | omni.kit.test_suite.stage_window
################################
stage window tests
.. toctree::
:maxdepth: 1
CHANGELOG
| 130 | reStructuredText | 12.099999 | 32 | 0.523077 |
omniverse-code/kit/exts/omni.kit.livestream.webrtc/config/extension.toml | [package]
version = "0.1.0"
title = "Livestream WebRTC Backend"
category = "Rendering"
[[python.module]]
name = "omni.kit.livestream.webrtc"
[dependencies]
"omni.kit.livestream.core" = {}
"omni.kit.streamsdk.plugins" = {}
[[native.plugin]]
path = "${omni.kit.streamsdk.plugins}/bin/carb.livestream-rtc.plugin"
[settings]
app.livestream.port = 49100
app.livestream.proto = "websocket"
app.livestream.ipversion = "ipv4"
[[test]]
parallelizable = false
unreliable = true # OM-36759
waiver = "Validation of end-to-end workflow covered via the productized, User-facing omni.kit.livestream.webrtc extension." # OM-48092
| 619 | TOML | 23.799999 | 134 | 0.736672 |
omniverse-code/kit/exts/omni.kit.livestream.webrtc/omni/kit/livestream/webrtc/scripts/extension.py | import omni.ext
import omni.kit.livestream.bind
class Extension(omni.ext.IExt):
def __init__(self):
pass
def on_startup(self):
self._kit_livestream = omni.kit.livestream.bind.acquire_livestream_interface()
self._kit_livestream.startup()
def on_shutdown(self):
self._kit_livestream.shutdown()
self._kit_livestream = None
| 376 | Python | 22.562499 | 86 | 0.656915 |
omniverse-code/kit/exts/omni.mtlx/omni/mtlx/extension.py | import omni.ext
import os
from pathlib import Path
# path to set to the environment variable `PXR_MTLX_STDLIB_SEARCH_PATHS` while usd is initialized
# i.e., called by omni.usd.config
def get_mtlx_stdlib_search_path() -> str:
# compute the real path without symlinks in order to get it working with usdmtlx
current_dir = os.path.dirname(__file__)
mtlx_libraries_dir = os.path.join(current_dir, '..', '..', 'libraries')
mtlx_libraries_dir = os.path.realpath(mtlx_libraries_dir)
return str(Path(mtlx_libraries_dir).resolve())
class MtlxExtension(omni.ext.IExt):
def on_startup(self):
pass
def on_shutdown(self):
pass | 670 | Python | 32.549998 | 97 | 0.68806 |
omniverse-code/kit/exts/omni.mtlx/omni/mtlx/__init__.py | from .extension import * | 24 | Python | 23.999976 | 24 | 0.791667 |
omniverse-code/kit/exts/omni.mtlx/omni/mtlx/tests/__init__.py | from .setup import *
from .render import *
| 43 | Python | 13.666662 | 21 | 0.72093 |
omniverse-code/kit/exts/omni.mtlx/omni/mtlx/tests/setup.py | import omni.kit.test
import omni.mtlx
import os
import carb
class SetupTest(omni.kit.test.AsyncTestCase):
def test_verify_mtlx_search_path(self):
path:str = omni.mtlx.get_mtlx_stdlib_search_path()
# check the library folder that contains all mtlx standard library modules
exists = os.path.isdir(path)
if not exists:
carb.log_info(f"MaterialX Standard Library Search Path is not existing: {path}")
self.assertTrue(exists)
carb.log_info(f"MaterialX Standard Library Search Path found: {path}")
# test some important libraries as secondary check
self.assertTrue(os.path.exists(os.path.join(path, "bxdf", "standard_surface.mtlx")))
self.assertTrue(os.path.exists(os.path.join(path, "pbrlib", "genmdl", "pbrlib_genmdl_impl.mtlx")))
| 834 | Python | 40.749998 | 106 | 0.677458 |
omniverse-code/kit/exts/omni.mtlx/omni/mtlx/tests/render.py | #!/usr/bin/env python3
import omni.kit.commands
import omni.kit.test
import omni.usd
from omni.rtx.tests import RtxTest, testSettings, postLoadTestSettings
from omni.rtx.tests.test_common import wait_for_update
from omni.kit.test_helpers_gfx.compare_utils import ComparisonMetric
import carb
from pathlib import Path
import os
EXTENSION_DIR = Path(omni.kit.app.get_app().get_extension_manager().get_extension_path_by_module(__name__))
TESTS_DIR = EXTENSION_DIR.joinpath('data', 'tests')
USD_DIR = TESTS_DIR.joinpath('usd')
GOLDEN_IMAGES_DIR = TESTS_DIR.joinpath('golden')
OUTPUTS_DIR = Path(omni.kit.test.get_test_output_path())
# This class is auto-discoverable by omni.kit.test
class MtlxRenderTest(RtxTest):
WINDOW_SIZE = (1280, 768)
THRESHOLD = 5e-5
# common settings used for all renderers
async def setUp(self):
await super().setUp()
self.set_settings(testSettings)
# Overridden with custom paths
async def capture_and_compare(self,
renderer: str,
img_subdir: Path = "",
test_name=None,
threshold=THRESHOLD,
metric: ComparisonMetric = ComparisonMetric.MEAN_ERROR_SQUARED):
golden_img_dir = GOLDEN_IMAGES_DIR.joinpath(img_subdir)
output_img_dir = OUTPUTS_DIR.joinpath(img_subdir)
golden_img_name = f"{test_name}_{renderer}.png"
return await self._capture_and_compare(golden_img_name, threshold, output_img_dir, golden_img_dir, metric)
# Open the scene, overridden with custom paths
async def open_usd(self, usdSubpath: Path):
path = USD_DIR.joinpath(usdSubpath)
# This settings should be set before stage opening/creation
self.set_settings(testSettings)
# Actually open the stage
self.ctx.open_stage(str(path))
await omni.kit.app.get_app().next_update_async()
# Settings that should override settings that was set on stage opening/creation
self.set_settings(postLoadTestSettings)
# Close the scene
def close_usd(self):
self.ctx.close_stage()
# do string replacements and save a new file
def prepare_temp_file(self, srcFilename, destFilename, replacementMap):
with open(srcFilename, 'r') as srcFile, \
open(destFilename, 'w') as destFile:
src = srcFile.read()
for key, value in replacementMap.items():
src = src.replace(key, value)
destFile.write(src)
# base test script
async def run_image_test(self, usd_file: str, test_name: str = None):
# load the scene
if usd_file:
await self.open_usd(usd_file)
# wait for rendering and capture
await wait_for_update(wait_frames=128+10)
# golden image path need the renderer
settings = carb.settings.get_settings()
renderer:str = settings.get("/renderer/active")
await self.capture_and_compare(renderer = renderer, test_name = test_name)
# close the scene to also test clean-up
# self.close_usd() # getting `[Error] [omni.usd] Stage opening or closing already in progress!!`
await wait_for_update()
# ==============================================================================================
# The tests
# ==============================================================================================
# StandardSurface presets that ship with the MaterialX SDK
# ----------------------------------------------------------------------------------------------
async def test_standardsurface_composition(self):
await self.run_image_test('StandardSurface/Composition.usda', 'mtlx_standardsurface_composition')
# A selection of the materials published by AMD on https://matlib.gpuopen.com
# ----------------------------------------------------------------------------------------------
async def test_amd_composition(self):
await self.run_image_test('AMD/Composition.usda', 'mtlx_amd_composition')
# Open Chess Set as released by the USD Work Group
# ----------------------------------------------------------------------------------------------
async def test_open_chess_set(self):
await self.run_image_test('OpenChessSet/chess_set_light_camera.usda', 'mtlx_open_chess_set')
# MaterialX tests published in the USD workgroup on https://github.com/usd-wg/assets
# Added a single root prim to each test scene and recomposed them into one test case.
# ----------------------------------------------------------------------------------------------
async def test_usd_wg_assets_composition(self):
try:
# replace the token by an absolute file path in order to test absolute paths
sceneDir = USD_DIR.joinpath('usd-wg-assets')
tempSceneFile = sceneDir.joinpath('basicTextured_flatten.usda')
self.prepare_temp_file(
srcFilename = sceneDir.joinpath('basicTextured_flatten_template.usda'),
destFilename = tempSceneFile,
replacementMap = {"${USD_DIR}":f"{sceneDir}"})
tempMtlxFile = sceneDir.joinpath('standard_surface_brass_tiled_absolute_paths.mtlx')
self.prepare_temp_file(
srcFilename = sceneDir.joinpath('standard_surface_brass_tiled_absolute_paths_template.mtlx'),
destFilename = tempMtlxFile,
replacementMap = {"${USD_DIR}":f"{sceneDir}"})
# run the test
await self.run_image_test('usd-wg-assets/Composition.usda', 'mtlx_usd-wg-assets_composition')
finally:
# remove the temp files
if tempSceneFile.exists():
tempSceneFile.unlink()
if tempMtlxFile.exists():
tempMtlxFile.unlink()
| 5,928 | Python | 44.259542 | 114 | 0.579622 |
omniverse-code/kit/exts/omni.kit.test_suite.helpers/omni/kit/test_suite/helpers/helpers.py | import os
import carb
from pkgutil import iter_modules
import omni.usd
import omni.kit.app
from typing import List
from pxr import Usd
from functools import lru_cache
from pathlib import Path
from omni.kit import ui_test
from omni.kit.ui_test import Vec2
from omni.kit.ui_test import WidgetRef
@lru_cache()
def get_test_data_path(module: str, subpath: str = "") -> str:
if not subpath:
ext_path = omni.kit.app.get_app().get_extension_manager().get_extension_path_by_module(module)
return str(Path(ext_path) / "data" / "tests")
return str(Path(get_test_data_path(module)) / subpath)
async def wait():
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
async def wait_stage_loading(usd_context=omni.usd.get_context()):
while True:
_, files_loaded, total_files = usd_context.get_stage_loading_status()
if files_loaded or total_files:
await omni.kit.app.get_app().next_update_async()
continue
break
await wait()
async def open_stage(path: str, usd_context=omni.usd.get_context()):
await usd_context.open_stage_async(path)
await wait_stage_loading(usd_context)
async def select_prims(paths, usd_context=omni.usd.get_context()):
usd_context.get_selection().set_selected_prim_paths(paths, True)
await wait()
# wait for any marterials to load
await wait_stage_loading()
def get_prims(stage, exclude_list=[]):
prims = []
for p in stage.Traverse(Usd.TraverseInstanceProxies(Usd.PrimIsActive and Usd.PrimIsDefined and Usd.PrimIsLoaded)):
if p not in exclude_list:
prims.append(p)
return prims
async def wait_for_window(window_name: str):
MAX_WAIT = 100
# Find active window
for _ in range(MAX_WAIT):
window_root = ui_test.find(f"{window_name}")
if window_root and window_root.widget.visible:
await ui_test.human_delay()
break
await ui_test.human_delay(1)
if not window_root:
raise Exception("Can't find window {window_name}, wait time exceeded.")
async def handle_assign_material_dialog(index, strength_index=0):
carb.log_warn("WARNING: 'handle_assign_material_dialog' is being DEPRECATED. Please use the function of the same name from 'omni.kit.material.library.test_helper.MaterialLibraryTestHelper'")
from pxr import Sdf
# handle assign dialog
prims = omni.usd.get_context().get_selection().get_selected_prim_paths()
if len(prims) == 1:
shape = Sdf.Path(prims[0]).name
window_name = f"Bind material to {shape}###context_menu_bind"
else:
window_name = f"Bind material to {len(prims)} selected models###context_menu_bind"
await wait_for_window(window_name)
# open listbox
widget = ui_test.find(f"{window_name}//Frame/**/Button[*].identifier=='combo_open_button'")
await ui_test.emulate_mouse_move_and_click(widget.center, human_delay_speed=4)
# select material item on listbox
await wait_for_window("MaterialPropertyPopupWindow")
widget = ui_test.find(f"MaterialPropertyPopupWindow//Frame/**/TreeView[*]")
# FIXME - can't use widget.click as open combobox has no readable size and clicks goto stage window
item_name = widget.model.get_item_children(None)[index].name_model.as_string if index else "None"
await ui_test.find(f"MaterialPropertyPopupWindow//Frame/**/Label[*].text=='{item_name}'").click(human_delay_speed=4)
# select strength item on listbox
widget = ui_test.find(f"{window_name}//Frame/**/ComboBox[*]")
if widget:
widget.model.set_value(strength_index)
# click ok
widget = ui_test.find(f"{window_name}//Frame/**/Button[*].identifier=='assign_material_ok_button'")
await ui_test.emulate_mouse_move_and_click(widget.center, human_delay_speed=4)
# wait for materials to load
await ui_test.human_delay()
await wait_stage_loading()
async def handle_create_material_dialog(mdl_path: str, mtl_name: str):
carb.log_warn("WARNING: 'handle_create_material_dialog' is being DEPRECATED. Please use the function of the same name from 'omni.kit.material.library.test_helper.MaterialLibraryTestHelper'")
subid_list = []
def have_subids(id_list):
nonlocal subid_list
subid_list = id_list
await omni.kit.material.library.get_subidentifier_from_mdl(mdl_file=mdl_path, on_complete_fn=have_subids)
if len(subid_list)> 1:
# material has subid and dialog is shown
await wait_for_window("Create Material")
create_widget = ui_test.find("Create Material//Frame/**/Button[*].identifier=='create_material_ok_button'")
subid_widget = ui_test.find("Create Material//Frame/**/ComboBox[*].identifier=='create_material_subid_combo'")
subid_list = subid_widget.model.get_item_list()
subid_index = 0
for index, subid in enumerate(subid_list):
if subid.name == mtl_name:
subid_index = index
subid_widget.model.set_current_index(subid_index)
await ui_test.human_delay()
create_widget.widget.call_clicked_fn()
await ui_test.human_delay(4)
await wait_stage_loading()
async def delete_prim_path_children(prim_path: str):
stage = omni.usd.get_context().get_stage()
root_prim = stage.GetPrimAtPath(prim_path)
purge_list = []
for prim in Usd.PrimRange(root_prim):
if prim.GetPath().pathString != root_prim.GetPath().pathString:
purge_list.append(prim)
for prim in purge_list:
stage.RemovePrim(prim.GetPath())
# wait for refresh after deleting prims
await ui_test.human_delay()
async def build_sdf_asset_frame_dictonary():
widget_table = {}
for frame in ui_test.find_all("Property//Frame/**/CollapsableFrame[*]"):
if frame.widget.title != "Raw USD Properties":
for widget in frame.find_all("Property//Frame/**/StringField[*].identifier!=''"):
if widget.widget.identifier.startswith('sdf_asset_'):
if not frame.widget.title in widget_table:
widget_table[frame.widget.title] = {}
if not widget.widget.identifier in widget_table[frame.widget.title]:
widget_table[frame.widget.title][widget.widget.identifier] = 0
widget_table[frame.widget.title][widget.widget.identifier] += 1
return widget_table
def push_window_height(cls, window_name, new_height=None):
window = ui_test.find(window_name)
if window:
if not hasattr(cls, "_original_window_height"):
cls._original_window_height = {}
cls._original_window_height[window_name] = window.widget.height
if new_height is not None:
window.widget.height = new_height
def pop_window_height(cls, window_name):
window = ui_test.find(window_name)
if window:
if hasattr(cls, "_original_window_height"):
window.widget.height = cls._original_window_height[window_name]
del cls._original_window_height[window_name]
async def handle_multiple_descendents_dialog(stage, prim_path: str, target_prim: str):
root_prim = stage.GetPrimAtPath(prim_path)
if not root_prim:
return
descendents = omni.usd.get_prim_descendents(root_prim)
# skip if only root_prim
if descendents == [root_prim]:
return
await ui_test.human_delay(10)
await wait_for_window("Target prim has multiple descendents")
await ui_test.human_delay(10)
# need to select target_prim in combo_widget
combo_widget = ui_test.find("Target prim has multiple descendents//Frame/**/ComboBox[*].identifier=='multi_descendents_combo'")
combo_list = combo_widget.model.get_item_children(None)
combo_index = 0
for index, item in enumerate(combo_list):
if item.prim.GetPrimPath().pathString == target_prim:
combo_index = index
combo_widget.model.set_current_index(combo_index)
await ui_test.human_delay()
ok_widget = ui_test.find("Target prim has multiple descendents//Frame/**/Button[*].identifier=='multi_descendents_ok_button'")
await ok_widget.click()
await wait_stage_loading()
async def arrange_windows(topleft_window="Stage", topleft_height=421.0, topleft_width=436.0, hide_viewport=False):
from omni.kit.viewport.utility import get_active_viewport_window
viewport_window = get_active_viewport_window()
# omni.ui & legacy viewport synch
await wait()
if viewport_window:
vp_width = int(1436 - topleft_width)
viewport_window.position_x = 0
viewport_window.position_y = 0
viewport_window.width = vp_width
viewport_window.height = 425
viewport_window.visible = (not hide_viewport)
# # XXX: force the legacy API
# if hasattr(viewport_window, 'legacy_window'):
# legacy_window = viewport_window.legacy_window
# legacy_window.set_window_pos(0, 0)
# legacy_window.set_window_size(vp_width, 425)
# viewport.show_hide_window(not hide_viewport)
import omni.ui as ui
content_window = ui.Workspace.get_window("Content")
if content_window:
content_window.position_x = 0.0
content_window.position_y = 448.0
content_window.width = 1436.0 - topleft_width
content_window.height = 421.0
await ui_test.human_delay()
stage_window = ui.Workspace.get_window("Stage")
if stage_window:
stage_window.position_x = 1436.0 - topleft_width
stage_window.position_y = 0.0
stage_window.width = topleft_width
stage_window.height = topleft_height
await ui_test.human_delay()
layer_window = ui.Workspace.get_window("Layer")
if layer_window:
layer_window.position_x = 1436.0 - topleft_width
layer_window.position_y = 0.0
layer_window.width = topleft_width
layer_window.height = topleft_height
await ui_test.human_delay()
tl_window = ui.Workspace.get_window(topleft_window)
if tl_window:
tl_window.focus()
property_window = ui.Workspace.get_window("Property")
if property_window:
property_window.position_x = 1436.0 - topleft_width
property_window.position_y = topleft_height + 27.0
property_window.width = topleft_width
property_window.height = 846.0 - topleft_height
await ui_test.human_delay()
# Wait for the layout to complete
await wait()
return viewport_window
| 10,555 | Python | 37.246377 | 194 | 0.665656 |
omniverse-code/kit/exts/omni.kit.test_suite.helpers/omni/kit/test_suite/helpers/__init__.py | from .helpers import *
| 23 | Python | 10.999995 | 22 | 0.73913 |
omniverse-code/kit/exts/omni.kit.test_suite.helpers/omni/kit/test_suite/helpers/test_populators.py | """Support for utility classes that populate a list of tests from various locations"""
| 87 | Python | 42.999979 | 86 | 0.781609 |
omniverse-code/kit/exts/omni.kit.test_suite.helpers/docs/index.rst | omni.kit.test_suite.helpers
###########################
test suite helpers
.. toctree::
:maxdepth: 1
CHANGELOG
| 120 | reStructuredText | 11.099999 | 27 | 0.533333 |
omniverse-code/kit/exts/omni.kit.window.file_importer/scripts/demo_file_importer.py | # Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
import os
import asyncio
import omni.ui as ui
import omni.usd as usd
from typing import List
from omni.kit.window.file_importer import get_file_importer, ImportOptionsDelegate
# BEGIN-DOC-import_options
class MyImportOptionsDelegate(ImportOptionsDelegate):
def __init__(self):
super().__init__(build_fn=self._build_ui_impl, destroy_fn=self._destroy_impl)
self._widget = None
def _build_ui_impl(self):
self._widget = ui.Frame()
with self._widget:
with ui.VStack():
with ui.HStack(height=24, spacing=2, style={"background_color": 0xFF23211F}):
ui.Label("Prim Path", width=0)
ui.StringField().model = ui.SimpleStringModel()
ui.Spacer(height=8)
def _destroy_impl(self, _):
if self._widget:
self._widget.destroy()
self._widget = None
# END-DOC-import_options
# BEGIN-DOC-tagging_options
class MyTaggingOptionsDelegate(ImportOptionsDelegate):
def __init__(self):
super().__init__(
build_fn=self._build_ui_impl,
filename_changed_fn=self._filename_changed_impl,
selection_changed_fn=self._selection_changed_impl,
destroy_fn=self._destroy_impl
)
self._widget = None
def _build_ui_impl(self, file_type: str=''):
self._widget = ui.Frame()
with self._widget:
with ui.VStack():
ui.Button(f"Tags for {file_type or 'unknown'} type", height=24)
def _filename_changed_impl(self, filename: str):
if filename:
_, ext = os.path.splitext(filename)
self._build_ui_impl(file_type=ext)
def _selection_changed_impl(self, selections: List[str]):
if len(selections) == 1:
_, ext = os.path.splitext(selections[0])
self._build_ui_impl(file_type=ext)
def _destroy_impl(self, _):
if self._widget:
self._widget.destroy()
self._widget = None
# END-DOC-tagging_options
class DemoFileImporterDialog:
"""
Example that demonstrates how to invoke the file importer dialog.
"""
def __init__(self):
self._app_window: ui.Window = None
self._import_options: ImportOptionsDelegate = None
self._tagging_options: ImportOptionsDelegate = None
self.build_ui()
def build_ui(self):
""" """
window_flags = ui.WINDOW_FLAGS_NO_SCROLLBAR
self._app_window = ui.Window("File Importer", width=1000, height=500, flags=window_flags)
with self._app_window.frame:
with ui.VStack(spacing=10):
with ui.HStack(height=30):
ui.Spacer()
button = ui.Button(text="Import File", width=120)
button.set_clicked_fn(self._show_dialog)
ui.Spacer()
asyncio.ensure_future(self._dock_window("File Importer", ui.DockPosition.TOP))
def _show_dialog(self):
# BEGIN-DOC-get_instance
# Get the singleton extension.
file_importer = get_file_importer()
if not file_importer:
return
# END-DOC-get_instance
# BEGIN-DOC-show_window
file_importer.show_window(
title="Import File",
import_handler=self.import_handler,
#filename_url="omniverse://ov-rc/NVIDIA/Samples/Marbles/Marbles_Assets.usd",
)
# END-DOC-show_window
# BEGIN-DOC-add_tagging_options
self._tagging_options = MyTaggingOptionsDelegate()
file_importer.add_import_options_frame("Tagging Options", self._tagging_options)
# END-DOC-add_tagging_options
# BEGIN-DOC-add_import_options
self._import_options = MyImportOptionsDelegate()
file_importer.add_import_options_frame("Import Options", self._import_options)
# END-DOC-add_import_options
def _hide_dialog(self):
# Get the File Importer extension.
file_importer = get_file_importer()
if file_importer:
file_importer.hide()
# BEGIN-DOC-import_handler
def import_handler(self, filename: str, dirname: str, selections: List[str] = []):
# NOTE: Get user inputs from self._import_options, if needed.
print(f"> Import '{filename}' from '{dirname}' or selected files '{selections}'")
# END-DOC-import_handler
async def _dock_window(self, window_title: str, position: ui.DockPosition, ratio: float = 1.0):
frames = 3
while frames > 0:
if ui.Workspace.get_window(window_title):
break
frames = frames - 1
await omni.kit.app.get_app().next_update_async()
window = ui.Workspace.get_window(window_title)
dockspace = ui.Workspace.get_window("DockSpace")
if window and dockspace:
window.dock_in(dockspace, position, ratio=ratio)
window.dock_tab_bar_visible = False
def destroy(self):
if self._app_window:
self._app_window.destroy()
self._app_window = None
if __name__ == "__main__":
view = DemoFileImporterDialog()
| 5,583 | Python | 34.119497 | 99 | 0.615081 |
omniverse-code/kit/exts/omni.kit.window.file_importer/omni/kit/window/file_importer/__init__.py | # Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
"""A standardized dialog for importing files"""
__all__ = ['FileImporterExtension', 'get_file_importer']
from carb import log_warn
from omni.kit.window.filepicker import DetailFrameController as ImportOptionsDelegate
from .extension import FileImporterExtension, get_instance
def get_file_importer() -> FileImporterExtension:
"""Returns the singleton file_importer extension instance"""
instance = get_instance()
if instance is None:
log_warn("File importer extension is no longer alive.")
return instance | 965 | Python | 42.909089 | 85 | 0.780311 |
omniverse-code/kit/exts/omni.kit.window.file_importer/omni/kit/window/file_importer/tests/test_extension.py | ## Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import os
import omni.kit.test
import asyncio
import omni.kit.ui_test as ui_test
import omni.appwindow
from unittest.mock import Mock, patch, ANY
from carb.settings import ISettings
from omni.kit.window.filepicker import FilePickerDialog
from omni.kit.test_suite.helpers import get_test_data_path
from .. import get_file_importer
from ..extension import FileImporterExtension
from ..test_helper import FileImporterTestHelper
class TestFileImporter(omni.kit.test.AsyncTestCase):
"""
Testing omni.kit.window.file_importer extension. NOTE that since the dialog is a singleton, we use an async
lock to ensure that only one test runs at a time. In practice, this is not a issue because only one extension
is accessing the dialog at any given time.
"""
__lock = asyncio.Lock()
async def setUp(self):
self._settings_path = "my_settings"
self._test_settings = {
"/exts/omni.kit.window.file_importer/appSettings": self._settings_path,
f"{self._settings_path}/directory": "C:/temp/folder",
}
async def tearDown(self):
pass
def _mock_settings_get_string_impl(self, name: str) -> str:
return self._test_settings.get(name)
def _mock_settings_set_string_impl(self, name: str, value: str):
self._test_settings[name] = value
async def test_show_window_destroys_previous(self):
"""Testing show_window destroys previously allocated dialog"""
async with self.__lock:
under_test = get_file_importer()
with patch.object(FilePickerDialog, "destroy", autospec=True) as mock_destroy_dialog,\
patch("carb.windowing.IWindowing.hide_window"):
under_test.show_window(title="first")
under_test.show_window(title="second")
mock_destroy_dialog.assert_called()
dialog = mock_destroy_dialog.call_args[0][0]
self.assertEqual(str(dialog._window), "first")
async def test_hide_window_destroys_it(self):
"""Testing that hiding the window destroys it"""
async with self.__lock:
under_test = get_file_importer()
with patch.object(FilePickerDialog, "destroy", autospec=True) as mock_destroy_dialog:
under_test.show_window(title="test")
under_test._dialog.hide()
# Dialog is destroyed after a couple frames
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
mock_destroy_dialog.assert_called()
dialog = mock_destroy_dialog.call_args[0][0]
self.assertEqual(str(dialog._window), "test")
async def test_hide_window_destroys_detached_window(self):
"""Testing that hiding the window destroys detached window."""
async with self.__lock:
under_test = get_file_importer()
with patch.object(FilePickerDialog, "destroy", autospec=True) as mock_destroy_dialog:
under_test.show_window(title="test_detached")
await omni.kit.app.get_app().next_update_async()
under_test.detach_from_main_window()
main_window = omni.appwindow.get_default_app_window().get_window()
self.assertFalse(main_window is under_test._dialog._window.app_window.get_window())
under_test.hide_window()
# Dialog is destroyed after a couple frames
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
mock_destroy_dialog.assert_called()
dialog = mock_destroy_dialog.call_args[0][0]
self.assertEqual(str(dialog._window), "test_detached")
async def test_load_default_settings(self):
"""Testing that dialog applies saved settings"""
async with self.__lock:
under_test = get_file_importer()
with patch('omni.kit.window.file_importer.extension.FilePickerDialog') as mock_dialog,\
patch.object(ISettings, "get_as_string", side_effect=self._mock_settings_get_string_impl):
under_test.show_window(title="test_dialog")
# Retrieve keyword args for the constructor (first call), and confirm called with expected values
constructor_kwargs = mock_dialog.call_args_list[0][1]
self.assertEqual(constructor_kwargs['current_directory'], self._test_settings[f"{self._settings_path}/directory"])
async def test_override_default_settings(self):
"""Testing that user values override default settings"""
async with self.__lock:
test_url = "Omniverse://ov-test/my-folder/my-file.usd"
under_test = get_file_importer()
with patch('omni.kit.window.file_importer.extension.FilePickerDialog') as mock_dialog,\
patch.object(ISettings, "get_as_string", side_effect=self._mock_settings_get_string_impl),\
patch("carb.windowing.IWindowing.hide_window"):
under_test.show_window(title="test_dialog", filename_url=test_url)
# Retrieve keyword args for the constructor (first call), and confirm called with expected values
constructor_kwargs = mock_dialog.call_args_list[0][1]
dirname, filename = os.path.split(test_url)
self.assertEqual(constructor_kwargs['current_directory'], dirname)
self.assertEqual(constructor_kwargs['current_filename'], filename)
async def test_save_settings_on_import(self):
"""Testing that settings are saved on import"""
from ..extension import on_import
my_settings = {
'filename': "my-file.anim.usd",
'directory': "Omniverse://ov-test/my-folder",
}
mock_dialog = Mock()
with patch.object(ISettings, "get_as_string", side_effect=self._mock_settings_get_string_impl),\
patch.object(ISettings, "set_string", side_effect=self._mock_settings_set_string_impl):
on_import(None, mock_dialog, my_settings['filename'], my_settings['directory'])
# Retrieve keyword args for the constructor (first call), and confirm called with expected values
self.assertEqual(my_settings['directory'], self._test_settings[f"{self._settings_path}/directory"])
async def test_show_only_folders(self):
"""Testing show only folders option."""
mock_handler = Mock()
async with self.__lock:
under_test = get_file_importer()
test_path = get_test_data_path(__name__).replace("\\", '/')
async with FileImporterTestHelper() as helper:
with patch("carb.windowing.IWindowing.hide_window"):
# under normal circumstance, files will be shown and could be selected.
under_test.show_window(title="test", filename_url=test_path + "/")
await ui_test.human_delay(10)
item = await helper.get_item_async(None, "dummy.usd")
self.assertIsNotNone(item)
# apply button should be disabled
self.assertFalse(under_test._dialog._widget.file_bar._apply_button.enabled)
await helper.click_cancel_async()
# if shown with show_only_folders, files will not be shown
under_test.show_window(title="test", show_only_folders=True, import_handler=mock_handler)
await ui_test.human_delay(10)
item = await helper.get_item_async(None, "dummy.usd")
self.assertIsNone(item)
# try selecting a folder
selections = await under_test.select_items_async(test_path, filenames=['folder'])
self.assertEqual(len(selections), 1)
selected = selections[0]
# apply button should not be disabled
self.assertTrue(under_test._dialog._widget.file_bar._apply_button.enabled)
await ui_test.human_delay()
await helper.click_apply_async()
mock_handler.assert_called_once_with('', selected.path + "/", selections=[selected.path])
async def test_cancel_handler(self):
"""Testing cancel handler."""
mock_handler = Mock()
async with self.__lock:
under_test = get_file_importer()
async with FileImporterTestHelper() as helper:
under_test.show_window("test_cancel_handler")
await helper.wait_for_popup()
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
await helper.click_cancel_async(cancel_handler=mock_handler)
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
mock_handler.assert_called_once()
class TestFileFilterHandler(omni.kit.test.AsyncTestCase):
async def setUp(self):
ext_type = {
'usd': "*.usd",
'usda': "*.usda",
'usdc': "*.usdc",
'usdz': "*.usdz",
'multiusd': "*.usd, *.usda, *.usdc, *.usdz",
'all': "*.*",
'bad_formatting_type': "*.usd, .usda, *.usdc,*.usdz, , ,"
}
self.test_filenames = [
("test.anim.usd", "anim", ext_type['all'], True),
("test.anim.usd", "anim", ext_type['usd'], True),
("test.anim.usdz", "anim", ext_type['usdz'], True),
("test.anim.usdc", "anim", ext_type['usdz'], False),
("test.anim.usda", None, ext_type['usda'], True),
("test.material.", None, ext_type['usda'], False),
("test.materials.usd", "material", ext_type['usd'], False),
("test.material.", None, ext_type['all'], True),
("test.material.", "anim", ext_type['all'], False),
("test.material.", "material", ext_type['all'], True),
]
for t in ["multiusd", 'bad_formatting_type']:
self.test_filenames.extend([
("test.anim.usd", "anim", ext_type[t], True),
("test.anim.usdz", "anim", ext_type[t], True),
("test.anim.usdc", "anim", ext_type[t], True),
("test.anim.usda", None, ext_type[t], True),
("test.anim.bbb", None, ext_type[t], False),
("test.material.", None, ext_type[t], False),
("test.anim.usdc", "cache", ext_type[t], False),
("test.anim.usdc", None, ext_type[t], True),
])
async def tearDown(self):
pass
async def test_file_filter_handler(self):
"""Testing default file filter handler"""
from ..extension import default_filter_handler
for test_filename in self.test_filenames:
filename, postfix, ext, expected = test_filename
result = default_filter_handler(filename, postfix, ext)
self.assertEqual(result, expected)
| 11,636 | Python | 48.519149 | 126 | 0.601152 |
omniverse-code/kit/exts/omni.kit.window.file_importer/docs/CHANGELOG.md | # Changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [1.0.10] - 2023-01-11
### Changed
- Added `get_config_menu_settings` & `set_config_menu_settings`
## [1.0.9] - 2022-12-20
### Changed
- Improve the file extension filter.
## [1.0.8] - 2022-11-08
### Added
- Updated the docs.
## [1.0.7] - 2022-10-13
### Added
- Added `select_items_async` to file_importer ext and test helper.
## [1.0.6] - 2022-07-07
### Added
- Added test helper function that waits for UI to be ready.
## [1.0.5] - 2022-06-15
### Added
- Adds test helper.
## [1.0.4] - 2022-04-06
### Added
- Disable unittests from loading at startup.
## [1.0.3] - 2022-03-17
### Added
- Selected file extension defaults to USD type.
## [1.0.2] - 2022-02-09
### Added
- Adds click_apply and click_cancel functions.
## [1.0.1] - 2022-02-03
### Added
- Adds postfix and extension options.
## [1.0.0] - 2021-11-16
### Added
- Initial add.
| 945 | Markdown | 18.708333 | 80 | 0.635979 |
omniverse-code/kit/exts/omni.kit.window.file_importer/docs/README.md | # Kit File Importer Extension [omni.kit.window.file_importer]
The File Importer extension
| 91 | Markdown | 21.999995 | 61 | 0.802198 |
omniverse-code/kit/exts/omni.kit.window.file_importer/docs/Overview.md | # Overview
The file_importer extension provides a standardized dialog for importing files. It is a wrapper around the {obj}`FilePickerDialog`,
but with reasonable defaults for common settings, so it's a higher-level entry point to that interface.
Nevertheless, users will still have the ability to customize some parts but we've boiled them down to just the essential ones.
Why you should use this extension:
* Present a consistent file import experience across the app.
* Customize only the essential parts while inheriting sensible defaults elsewhere.
* Reduce boilerplate code.
* Inherit future improvements.
* Checkpoints fully supported if available on the server.
```{image} ../../../../source/extensions/omni.kit.window.file_importer/data/preview.png
---
align: center
---
```
## Quickstart
You can pop-up a dialog in just 2 steps. First, retrieve the extension.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/scripts/demo_file_importer.py
---
language: python
start-after: BEGIN-DOC-get_instance
end-before: END-DOC-get_instance
dedent: 8
---
```
Then, invoke its show_window method.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/scripts/demo_file_importer.py
---
language: python
start-after: BEGIN-DOC-show_window
end-before: END-DOC-show_window
dedent: 8
---
```
Note that the extension is a singleton, meaning there's only one instance of it throughout the app. Basically, we are assuming that you'd
never open more than one instance of the dialog at any one time. The advantage is that we can channel any development through this
single extension and all users will inherit the same changes.
## Customizing the Dialog
You can customize these parts of the dialog.
* Title - The title of the dialog.
* Collections - Which of these collections, ["bookmarks", "omniverse", "my-computer"] to display.
* Filename Url - Url of the file to import.
* Postfix options - Show only files of these content types.
* Extension options - Show only files with these filename extensions.
* Import label - Label for the import button.
* Import handler - User provided callback to handle the import process.
Note that these settings are applied when you show the window. Therefore, each time it's displayed, the dialog can be tailored to
the use case.
## Filter files by type
The user has the option to filter what files get shown in the list view.
One challenge of working in Omniverse is that everything is a USD file. An expected use case is to show only files of a
particular content type. To facilitate this workflow, we suggest adding a postfix to the filename, e.g. "file.animation.usd".
The file bar contains a dropdown that lists the default postfix labels, so you can filter by these. You have the option to
override this list.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/omni/kit/window/file_importer/extension.py
---
language: python
start-after: BEGIN-DOC-file_postfix_options
end-before: END-DOC-file_postfix_options
dedent: 0
---
```
You can also filter by filename extension. By default, we provide the option to show only USD files.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/omni/kit/window/file_importer/extension.py
---
language: python
start-after: BEGIN-DOC-file_extension_types
end-before: END-DOC-file_extension_types
dedent: 0
---
```
If you override either of the lists above, then you'll also need to provide a filter handler. The handler is called to decide whether
or not to display a given file. The default handler is shown below as an example.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/omni/kit/window/file_importer/extension.py
---
language: python
start-after: BEGIN-DOC-file_filter_handler
end-before: END-DOC-file_filter_handler
dedent: 0
---
```
## Import options
A common need is to provide user options for the import process. You create the widget for accepting those inputs,
then add it to the details pane of the dialog. Do this by subclassing from {obj}`ImportOptionsDelegate`
and overriding the methods, {meth}`ImportOptionsDelegate._build_ui_impl` and (optionally) {meth}`ImportOptionsDelegate._destroy_impl`.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/scripts/demo_file_importer.py
---
language: python
start-after: BEGIN-DOC-import_options
end-before: END-DOC-import_options
dedent: 0
---
```
Then provide the controller to the file picker for display.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/scripts/demo_file_importer.py
---
language: python
start-after: BEGIN-DOC-add_import_options
end-before: END-DOC-add_import_options
dedent: 8
---
```
## Import handler
Provide a handler for when the Import button is clicked. The handler should expect a list of :attr:`selections` made from the UI.
```{literalinclude} ../../../../source/extensions/omni.kit.window.file_importer/scripts/demo_file_importer.py
---
language: python
start-after: BEGIN-DOC-import_handler
end-before: END-DOC-import_handler
dedent: 4
---
```
## Demo app
A complete demo, that includes the code snippets above, is included with this extension at "scripts/demo_file_importer.py".
| 5,291 | Markdown | 35 | 139 | 0.756379 |
omniverse-code/kit/exts/omni.kit.window.console/config/extension.toml | [package]
version = "0.2.3"
category = "Internal"
[dependencies]
"omni.ui" = {}
"omni.usd" = {}
"omni.kit.menu.utils" = {}
[[native.library]]
path = "bin/${lib_prefix}omni.kit.window.console.plugin${lib_ext}"
[settings.exts."omni.kit.window.console"]
# Commands to execute on startup:
startupCommands = []
logFilter.verbose = false
logFilter.info = false
logFilter.warning = true
logFilter.error = true
logFilter.fatal = true
# Show input field to input commands
enableInput = true
[[python.module]]
name = "omni.kit.window.console"
[[test]]
args = [
"--/app/window/dpiScaleOverride=1.0",
"--/app/window/scaleToMonitor=false",
"--no-window"
]
dependencies = [
"omni.kit.renderer.capture",
"omni.kit.mainwindow",
]
| 741 | TOML | 18.025641 | 66 | 0.681511 |
omniverse-code/kit/exts/omni.kit.window.console/omni/kit/window/console/__init__.py | from ._console import *
from .scripts import * | 47 | Python | 14.999995 | 23 | 0.723404 |
omniverse-code/kit/exts/omni.kit.window.console/omni/kit/window/console/scripts/console_window.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
__all__ = ["ConsoleWindow"]
import omni.kit
import omni.ui as ui
from .._console import ConsoleWidget
class ConsoleWindow(ui.Window):
"""The Console window"""
def __init__(self):
self._title = "Console"
super().__init__(
self._title,
width=1000,
height=600,
dockPreference=ui.DockPreference.LEFT_BOTTOM,
raster_policy=ui.RasterPolicy.NEVER
)
# Dock it to the same space where Content is docked, make it the second tab and the active tab.
self.deferred_dock_in("Content", ui.DockPolicy.CURRENT_WINDOW_IS_ACTIVE)
self.dock_order = 1
self.set_visibility_changed_fn(self._visibility_changed_fn)
with self.frame:
self._console_widget = ConsoleWidget()
def _visibility_changed_fn(self, visible):
if self._visiblity_changed_listener:
self._visiblity_changed_listener(visible)
def set_visibility_changed_listener(self, listener):
self._visiblity_changed_listener = listener
def destroy(self):
"""
Called by extension before destroying this object. It doesn't happen automatically.
Without this hot reloading doesn't work.
"""
if self._console_widget:
self._console_widget.destroy()
self._console_widget = None
self._visiblity_changed_listener = None
super().destroy()
| 1,868 | Python | 32.374999 | 103 | 0.663812 |
omniverse-code/kit/exts/omni.kit.window.console/omni/kit/window/console/scripts/__init__.py | from .console_extension import ConsoleExtension | 47 | Python | 46.999953 | 47 | 0.893617 |
omniverse-code/kit/exts/omni.kit.window.console/omni/kit/window/console/tests/__init__.py | from .console_tests import *
| 29 | Python | 13.999993 | 28 | 0.758621 |
omniverse-code/kit/exts/omni.kit.window.console/omni/kit/window/console/tests/console_tests.py | ## Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
##
## NVIDIA CORPORATION and its licensors retain all intellectual property
## and proprietary rights in and to this software, related documentation
## and any modifications thereto. Any use, reproduction, disclosure or
## distribution of this software and related documentation without an express
## license agreement from NVIDIA CORPORATION is strictly prohibited.
##
import omni.kit.test
from omni.ui.tests.test_base import OmniUiTest
from omni.kit.window.console import ConsoleWidget
from pathlib import Path
import omni.kit.app
import omni.kit.ui_test as ui_test
class TestConsoleWindow(OmniUiTest):
# Before running each test
async def setUp(self):
await super().setUp()
extension_path = omni.kit.app.get_app().get_extension_manager().get_extension_path_by_module(__name__)
self._golden_img_dir = Path(extension_path).joinpath("data").joinpath("tests").absolute()
# After running each test
async def tearDown(self):
await super().tearDown()
async def test_console(self):
window = await self.create_test_window(1000, 600)
with window.frame:
console_widget = ConsoleWidget()
console_widget.exec_command("clear")
console_window = ui_test.find("Console")
console_window._widget._console_widget.exec_command("clear")
for i in range(5):
await omni.kit.app.get_app().next_update_async()
await self.finalize_test(golden_img_dir=self._golden_img_dir, golden_img_name="test_console.png", use_log=False)
console_widget.visible =False
console_widget.destroy()
console_window._widget.visible =False
console_window._widget.destroy()
| 1,768 | Python | 36.638297 | 120 | 0.701357 |
omniverse-code/kit/exts/omni.usd.libs/pxr/Sdf/__init__.py | #
# Copyright 2016 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
#
from pxr import Tf
Tf.PreparePythonModule()
del Tf
def Find(layerFileName, scenePath=None):
'''Find(layerFileName, scenePath) -> object
layerFileName: string
scenePath: Path
If given a single string argument, returns the menv layer with
the given filename. If given two arguments (a string and a Path), finds
the menv layer with the given filename and returns the scene object
within it at the given path.'''
layer = Layer.Find(layerFileName)
if (scenePath is None): return layer
return layer.GetObjectAtPath(scenePath)
# Test utilities
def _PathElemsToPrefixes(absolute, elements):
if absolute:
string = "/"
else:
string = ""
lastElemWasDotDot = False
didFirst = False
for elem in elements:
if elem == Path.parentPathElement:
# dotdot
if didFirst:
string = string + "/"
else:
didFirst = True
string = string + elem
lastElemWasDotDot = True
elif elem[0] == ".":
# property
if lastElemWasDotDot:
string = string + "/"
string = string + elem
lastElemWasDotDot = False
elif elem[0] == "[":
# rel attr or sub-attr indices, don't care which
string = string + elem
lastElemWasDotDot = False
else:
if didFirst:
string = string + "/"
else:
didFirst = True
string = string + elem
lastElemWasDotDot = False
if not string:
return []
path = Path(string)
return path.GetPrefixes()
| 2,722 | Python | 31.807229 | 74 | 0.646583 |
omniverse-code/kit/exts/omni.usd.libs/pxr/Sdf/__DOC.py | def Execute(result):
result["AssetPath"].__doc__ = """
Contains an asset path and an optional resolved path. Asset paths may
contain non-control UTF-8 encoded characters. Specifically,
U+0000\\.\\.U+001F (C0 controls), U+007F (delete), and
U+0080\\.\\.U+009F (C1 controls) are disallowed. Attempts to construct
asset paths with such characters will issue a TfError and produce the
default-constructed empty asset path.
"""
result["AssetPath"].__init__.func_doc = """__init__()
Construct an empty asset path.
----------------------------------------------------------------------
__init__(path)
Construct an asset path with ``path`` and no associated resolved path.
If the passed ``path`` is not valid UTF-8 or contains C0 or C1 control
characters, raise a TfError and return the default-constructed empty
asset path.
Parameters
----------
path : str
----------------------------------------------------------------------
__init__(path, resolvedPath)
Construct an asset path with ``path`` and an associated
``resolvedPath`` .
If either the passed \path or ``resolvedPath`` are not valid UTF-8 or
either contain C0 or C1 control characters, raise a TfError and return
the default-constructed empty asset path.
Parameters
----------
path : str
resolvedPath : str
"""
result["AttributeSpec"].__doc__ = """
A subclass of SdfPropertySpec that holds typed data.
Attributes are typed data containers that can optionally hold any and
all of the following:
- A single default value.
- An array of knot values describing how the value varies over
time.
- A dictionary of posed values, indexed by name.
The values contained in an attribute must all be of the same type. In
the Python API the ``typeName`` property holds the attribute type. In
the C++ API, you can get the attribute type using the GetTypeName()
method. In addition, all values, including all knot values, must be
the same shape. For information on shapes, see the VtShape class
reference in the C++ documentation.
"""
result["AttributeSpec"].HasColorSpace.func_doc = """HasColorSpace() -> bool
Returns true if this attribute has a colorSpace value authored.
"""
result["AttributeSpec"].ClearColorSpace.func_doc = """ClearColorSpace() -> None
Clears the colorSpace metadata value set on this attribute.
"""
result["BatchNamespaceEdit"].__doc__ = """
A description of an arbitrarily complex namespace edit.
A ``SdfBatchNamespaceEdit`` object describes zero or more namespace
edits. Various types providing a namespace will allow the edits to be
applied in a single operation and also allow testing if this will
work.
Clients are encouraged to group several edits into one object because
that may allow more efficient processing of the edits. If, for
example, you need to reparent several prims it may be faster to add
all of the reparents to a single ``SdfBatchNamespaceEdit`` and apply
them at once than to apply each separately.
Objects that allow applying edits are free to apply the edits in any
way and any order they see fit but they should guarantee that the
resulting namespace will be as if each edit was applied one at a time
in the order they were added.
Note that the above rule permits skipping edits that have no effect or
generate a non-final state. For example, if renaming A to B then to C
we could just rename A to C. This means notices may be elided.
However, implementations must not elide notices that contain
information about any edit that clients must be able to know but
otherwise cannot determine.
"""
result["BatchNamespaceEdit"].__init__.func_doc = """__init__()
Create an empty sequence of edits.
----------------------------------------------------------------------
__init__(arg1)
Parameters
----------
arg1 : BatchNamespaceEdit
----------------------------------------------------------------------
__init__(arg1)
Parameters
----------
arg1 : list[SdfNamespaceEdit]
"""
result["BatchNamespaceEdit"].Add.func_doc = """Add(edit) -> None
Add a namespace edit.
Parameters
----------
edit : NamespaceEdit
----------------------------------------------------------------------
Add(currentPath, newPath, index) -> None
Add a namespace edit.
Parameters
----------
currentPath : NamespaceEdit.Path
newPath : NamespaceEdit.Path
index : NamespaceEdit.Index
"""
result["BatchNamespaceEdit"].Process.func_doc = """Process(processedEdits, hasObjectAtPath, canEdit, details, fixBackpointers) -> bool
Validate the edits and generate a possibly more efficient edit
sequence.
Edits are treated as if they were performed one at time in sequence,
therefore each edit occurs in the namespace resulting from all
previous edits.
Editing the descendants of the object in each edit is implied. If an
object is removed then the new path will be empty. If an object is
removed after being otherwise edited, the other edits will be
processed and included in ``processedEdits`` followed by the removal.
This allows clients to fixup references to point to the object's final
location prior to removal.
This function needs help to determine if edits are allowed. The
callbacks provide that help. ``hasObjectAtPath`` returns ``true`` iff
there's an object at the given path. This path will be in the original
namespace not any intermediate or final namespace. ``canEdit`` returns
``true`` iff the object at the current path can be namespace edited to
the new path, ignoring whether an object already exists at the new
path. Both paths are in the original namespace. If it returns
``false`` it should set the string to the reason why the edit isn't
allowed. It should not write either path to the string.
If ``hasObjectAtPath`` is invalid then this assumes objects exist
where they should and don't exist where they shouldn't. Use this with
care. If ``canEdit`` in invalid then it's assumed all edits are valid.
If ``fixBackpointers`` is ``true`` then target/connection paths are
expected to be in the intermediate namespace resulting from all
previous edits. If ``false`` and any current or new path contains a
target or connection path that has been edited then this will generate
an error.
This method returns ``true`` if the edits are allowed and sets
``processedEdits`` to a new edit sequence at least as efficient as the
input sequence. If not allowed it returns ``false`` and appends
reasons why not to ``details`` .
Parameters
----------
processedEdits : list[SdfNamespaceEdit]
hasObjectAtPath : HasObjectAtPath
canEdit : CanEdit
details : list[SdfNamespaceEditDetail]
fixBackpointers : bool
"""
result["ChangeBlock"].__doc__ = """
**DANGER DANGER DANGER**
Please make sure you have read and fully understand the issues below
before using a changeblock! They are very easy to use in an unsafe way
that could make the system crash or corrupt data. If you have any
questions, please contact the USD team, who would be happy to help!
SdfChangeBlock provides a way to group a round of related changes to
scene description in order to process them more efficiently.
Normally, Sdf sends notification immediately as changes are made so
that downstream representations like UsdStage can update accordingly.
However, sometimes it can be advantageous to group a series of Sdf
changes into a batch so that they can be processed more efficiently,
with a single round of change processing. An example might be when
setting many avar values on a model at the same time.
Opening a changeblock tells Sdf to delay sending notification about
changes until the outermost changeblock is exited. Until then, Sdf
internally queues up the notification it needs to send.
It is *not* safe to use Usd or other downstream API while a
changeblock is open!! This is because those derived representations
will not have had a chance to update while the changeblock is open.
Not only will their view of the world be stale, it could be unsafe to
even make queries from, since they may be holding onto expired handles
to Sdf objects that no longer exist. If you need to make a bunch of
changes to scene description, the best approach is to build a list of
necessary changes that can be performed directly via the Sdf API, then
submit those all inside a changeblock without talking to any
downstream modules. For example, this is how many mutators in Usd
that operate on more than one field or Spec work.
"""
result["ChangeBlock"].__init__.func_doc = """__init__()
"""
result["CleanupEnabler"].__doc__ = """
An RAII class which, when an instance is alive, enables scheduling of
automatic cleanup of SdfLayers.
Any affected specs which no longer contribute to the scene will be
removed when the last SdfCleanupEnabler instance goes out of scope.
Note that for this purpose, SdfPropertySpecs are removed if they have
only required fields (see SdfPropertySpecs::HasOnlyRequiredFields),
but only if the property spec itself was affected by an edit that left
it with only required fields. This will have the effect of
uninstantiating on-demand attributes. For example, if its parent prim
was affected by an edit that left it otherwise inert, it will not be
removed if it contains an SdfPropertySpec with only required fields,
but if the property spec itself is edited leaving it with only
required fields, it will be removed, potentially uninstantiating it if
it's an on-demand property.
SdfCleanupEnablers are accessible in both C++ and Python.
/// SdfCleanupEnabler can be used in the following manner:
.. code-block:: text
{
SdfCleanupEnabler enabler;
// Perform any action that might otherwise leave inert specs around,
// such as removing info from properties or prims, or removing name
// children. i.e:
primSpec->ClearInfo(SdfFieldKeys->Default);
// When enabler goes out of scope on the next line, primSpec will
// be removed if it has been left as an empty over.
}
"""
result["CleanupEnabler"].__init__.func_doc = """__init__()
"""
result["FastUpdateList"].__doc__ = """"""
result["FileFormat"].__doc__ = """
Base class for file format implementations.
"""
result["FileFormat"].GetFileExtensions.func_doc = """GetFileExtensions() -> list[str]
Returns a list of extensions that this format supports.
"""
result["FileFormat"].IsSupportedExtension.func_doc = """IsSupportedExtension(extension) -> bool
Returns true if ``extension`` matches one of the extensions returned
by GetFileExtensions.
Parameters
----------
extension : str
"""
result["FileFormat"].IsPackage.func_doc = """IsPackage() -> bool
Returns true if this file format is a package containing other assets.
"""
result["FileFormat"].CanRead.func_doc = """CanRead(file) -> bool
Returns true if ``file`` can be read by this format.
Parameters
----------
file : str
"""
result["FileFormat"].GetFileExtension.func_doc = """**classmethod** GetFileExtension(s) -> str
Returns the file extension for path or file name ``s`` , without the
leading dot character.
Parameters
----------
s : str
"""
result["FileFormat"].FindAllFileFormatExtensions.func_doc = """**classmethod** FindAllFileFormatExtensions() -> set[str]
Returns a set containing the extension(s) corresponding to all
registered file formats.
"""
result["FileFormat"].FindById.func_doc = """**classmethod** FindById(formatId) -> FileFormat
Returns the file format instance with the specified ``formatId``
identifier.
If a format with a matching identifier is not found, this returns a
null file format pointer.
Parameters
----------
formatId : str
"""
result["FileFormat"].FindByExtension.func_doc = """**classmethod** FindByExtension(path, target) -> FileFormat
Returns the file format instance that supports the extension for
``path`` .
If a format with a matching extension is not found, this returns a
null file format pointer.
An extension may be handled by multiple file formats, but each with a
different target. In such cases, if no ``target`` is specified, the
file format that is registered as the primary plugin will be returned.
Otherwise, the file format whose target matches ``target`` will be
returned.
Parameters
----------
path : str
target : str
----------------------------------------------------------------------
FindByExtension(path, args) -> FileFormat
Returns a file format instance that supports the extension for
``path`` and whose target matches one of those specified by the given
``args`` .
If the ``args`` specify no target, then the file format that is
registered as the primary plugin will be returned. If a format with a
matching extension is not found, this returns a null file format
pointer.
Parameters
----------
path : str
args : FileFormatArguments
"""
result["Layer"].__doc__ = """
A scene description container that can combine with other such
containers to form simple component assets, and successively larger
aggregates. The contents of an SdfLayer adhere to the SdfData data
model. A layer can be ephemeral, or be an asset accessed and
serialized through the ArAsset and ArResolver interfaces.
The SdfLayer class provides a consistent API for accesing and
serializing scene description, using any data store provided by Ar
plugins. Sdf itself provides a UTF-8 text format for layers identified
by the".sdf"identifier extension, but via the SdfFileFormat
abstraction, allows downstream modules and plugins to adapt arbitrary
data formats to the SdfData/SdfLayer model.
The FindOrOpen() method returns a new SdfLayer object with scene
description from any supported asset format. Once read, a layer
remembers which asset it was read from. The Save() method saves the
layer back out to the original asset. You can use the Export() method
to write the layer to a different location. You can use the
GetIdentifier() method to get the layer's Id or GetRealPath() to get
the resolved, full URI.
Layers can have a timeCode range (startTimeCode and endTimeCode). This
range represents the suggested playback range, but has no impact on
the extent of the animation data that may be stored in the layer. The
metadatum"timeCodesPerSecond"is used to annotate how the time ordinate
for samples contained in the file scales to seconds. For example, if
timeCodesPerSecond is 24, then a sample at time ordinate 24 should be
viewed exactly one second after the sample at time ordinate 0.
"""
result["Layer"].GetFileFormat.func_doc = """GetFileFormat() -> FileFormat
Returns the file format used by this layer.
"""
result["Layer"].GetFileFormatArguments.func_doc = """GetFileFormatArguments() -> FileFormatArguments
Returns the file format-specific arguments used during the
construction of this layer.
"""
result["Layer"].StreamsData.func_doc = """StreamsData() -> bool
Returns true if this layer streams data from its serialized data store
on demand, false otherwise.
Layers with streaming data are treated differently to avoid pulling in
data unnecessarily. For example, reloading a streaming layer will not
perform fine-grained change notification, since doing so would require
the full contents of the layer to be loaded.
"""
result["Layer"].IsDetached.func_doc = """IsDetached() -> bool
Returns true if this layer is detached from its serialized data store,
false otherwise.
Detached layers are isolated from external changes to their serialized
data.
"""
result["Layer"].TransferContent.func_doc = """TransferContent(layer) -> None
Copies the content of the given layer into this layer.
Source layer is unmodified.
Parameters
----------
layer : Layer
"""
result["Layer"].CreateNew.func_doc = """**classmethod** CreateNew(identifier, args) -> Layer
Creates a new empty layer with the given identifier.
Additional arguments may be supplied via the ``args`` parameter. These
arguments may control behavior specific to the layer's file format.
Parameters
----------
identifier : str
args : FileFormatArguments
----------------------------------------------------------------------
CreateNew(fileFormat, identifier, args) -> Layer
Creates a new empty layer with the given identifier for a given file
format class.
This function has the same behavior as the other CreateNew function,
but uses the explicitly-specified ``fileFormat`` instead of attempting
to discern the format from ``identifier`` .
Parameters
----------
fileFormat : FileFormat
identifier : str
args : FileFormatArguments
"""
result["Layer"].New.func_doc = """**classmethod** New(fileFormat, identifier, args) -> Layer
Creates a new empty layer with the given identifier for a given file
format class.
The new layer will not be dirty and will not be saved.
Additional arguments may be supplied via the ``args`` parameter. These
arguments may control behavior specific to the layer's file format.
Parameters
----------
fileFormat : FileFormat
identifier : str
args : FileFormatArguments
"""
result["Layer"].FindOrOpen.func_doc = """**classmethod** FindOrOpen(identifier, args) -> Layer
Return an existing layer with the given ``identifier`` and ``args`` ,
or else load it.
If the layer can't be found or loaded, an error is posted and a null
layer is returned.
Arguments in ``args`` will override any arguments specified in
``identifier`` .
Parameters
----------
identifier : str
args : FileFormatArguments
"""
result["Layer"].FindOrOpenRelativeToLayer.func_doc = """**classmethod** FindOrOpenRelativeToLayer(anchor, identifier, args) -> Layer
Return an existing layer with the given ``identifier`` and ``args`` ,
or else load it.
The given ``identifier`` will be resolved relative to the ``anchor``
layer. If the layer can't be found or loaded, an error is posted and a
null layer is returned.
If the ``anchor`` layer is invalid, issues a coding error and returns
a null handle.
Arguments in ``args`` will override any arguments specified in
``identifier`` .
Parameters
----------
anchor : Layer
identifier : str
args : FileFormatArguments
"""
result["Layer"].OpenAsAnonymous.func_doc = """**classmethod** OpenAsAnonymous(layerPath, metadataOnly, tag) -> Layer
Load the given layer from disk as a new anonymous layer.
If the layer can't be found or loaded, an error is posted and a null
layer is returned.
The anonymous layer does not retain any knowledge of the backing file
on the filesystem.
``metadataOnly`` is a flag that asks for only the layer metadata to be
read in, which can be much faster if that is all that is required.
Note that this is just a hint: some FileFormat readers may disregard
this flag and still fully populate the layer contents.
An optional ``tag`` may be specified. See CreateAnonymous for details.
Parameters
----------
layerPath : str
metadataOnly : bool
tag : str
"""
result["Layer"].CreateAnonymous.func_doc = """**classmethod** CreateAnonymous(tag, args) -> Layer
Creates a new *anonymous* layer with an optional ``tag`` .
An anonymous layer is a layer with a system assigned identifier, that
cannot be saved to disk via Save() . Anonymous layers have an
identifier, but no real path or other asset information fields.
Anonymous layers may be tagged, which can be done to aid debugging
subsystems that make use of anonymous layers. The tag becomes the
display name of an anonymous layer, and is also included in the
generated identifier. Untagged anonymous layers have an empty display
name.
Additional arguments may be supplied via the ``args`` parameter. These
arguments may control behavior specific to the layer's file format.
Parameters
----------
tag : str
args : FileFormatArguments
----------------------------------------------------------------------
CreateAnonymous(tag, format, args) -> Layer
Create an anonymous layer with a specific ``format`` .
Parameters
----------
tag : str
format : FileFormat
args : FileFormatArguments
"""
result["Layer"].IsAnonymousLayerIdentifier.func_doc = """**classmethod** IsAnonymousLayerIdentifier(identifier) -> bool
Returns true if the ``identifier`` is an anonymous layer unique
identifier.
Parameters
----------
identifier : str
"""
result["Layer"].GetDisplayNameFromIdentifier.func_doc = """**classmethod** GetDisplayNameFromIdentifier(identifier) -> str
Returns the display name for the given ``identifier`` , using the same
rules as GetDisplayName.
Parameters
----------
identifier : str
"""
result["Layer"].Save.func_doc = """Save(force) -> bool
Returns ``true`` if successful, ``false`` if an error occurred.
Returns ``false`` if the layer has no remembered file name or the
layer type cannot be saved. The layer will not be overwritten if the
file exists and the layer is not dirty unless ``force`` is true.
Parameters
----------
force : bool
"""
result["Layer"].Export.func_doc = """Export(filename, comment, args) -> bool
Exports this layer to a file.
Returns ``true`` if successful, ``false`` if an error occurred.
If ``comment`` is not empty, the layer gets exported with the given
comment. Additional arguments may be supplied via the ``args``
parameter. These arguments may control behavior specific to the
exported layer's file format.
Note that the file name or comment of the original layer is not
updated. This only saves a copy of the layer to the given filename.
Subsequent calls to Save() will still save the layer to it's
previously remembered file name.
Parameters
----------
filename : str
comment : str
args : FileFormatArguments
"""
result["Layer"].ImportFromString.func_doc = """ImportFromString(string) -> bool
Reads this layer from the given string.
Returns ``true`` if successful, otherwise returns ``false`` .
Parameters
----------
string : str
"""
result["Layer"].Clear.func_doc = """Clear() -> None
Clears the layer of all content.
This restores the layer to a state as if it had just been created with
CreateNew() . This operation is Undo-able.
The fileName and whether journaling is enabled are not affected by
this method.
"""
result["Layer"].Reload.func_doc = """Reload(force) -> bool
Reloads the layer from its persistent representation.
This restores the layer to a state as if it had just been created with
FindOrOpen() . This operation is Undo-able.
The fileName and whether journaling is enabled are not affected by
this method.
When called with force = false (the default), Reload attempts to avoid
reloading layers that have not changed on disk. It does so by
comparing the file's modification time (mtime) to when the file was
loaded. If the layer has unsaved modifications, this mechanism is not
used, and the layer is reloaded from disk. If the layer has any
external asset dependencies their modification state will also be
consulted when determining if the layer needs to be reloaded.
Passing true to the ``force`` parameter overrides this behavior,
forcing the layer to be reloaded from disk regardless of whether it
has changed.
Parameters
----------
force : bool
"""
result["Layer"].Import.func_doc = """Import(layerPath) -> bool
Imports the content of the given layer path, replacing the content of
the current layer.
Note: If the layer path is the same as the current layer's real path,
no action is taken (and a warning occurs). For this case use Reload()
.
Parameters
----------
layerPath : str
"""
result["Layer"].ReloadLayers.func_doc = """**classmethod** ReloadLayers(layers, force) -> bool
Reloads the specified layers.
Returns ``false`` if one or more layers failed to reload.
See ``Reload()`` for a description of the ``force`` flag.
Parameters
----------
layers : set[Layer]
force : bool
"""
result["Layer"].UpdateExternalReference.func_doc = """UpdateExternalReference(oldAssetPath, newAssetPath) -> bool
Deprecated
Use UpdateCompositionAssetDependency instead.
Parameters
----------
oldAssetPath : str
newAssetPath : str
"""
result["Layer"].GetCompositionAssetDependencies.func_doc = """GetCompositionAssetDependencies() -> set[str]
Return paths of all assets this layer depends on due to composition
fields.
This includes the paths of all layers referred to by reference,
payload, and sublayer fields in this layer. This function only returns
direct composition dependencies of this layer, i.e. it does not
recurse to find composition dependencies from its dependent layer
assets.
"""
result["Layer"].UpdateCompositionAssetDependency.func_doc = """UpdateCompositionAssetDependency(oldAssetPath, newAssetPath) -> bool
Updates the asset path of a composation dependency in this layer.
If ``newAssetPath`` is supplied, the update works as"rename", updating
any occurrence of ``oldAssetPath`` to ``newAssetPath`` in all
reference, payload, and sublayer fields.
If ``newAssetPath`` is not given, this update behaves as a"delete",
removing all occurrences of ``oldAssetPath`` from all reference,
payload, and sublayer fields.
Parameters
----------
oldAssetPath : str
newAssetPath : str
"""
result["Layer"].GetExternalAssetDependencies.func_doc = """GetExternalAssetDependencies() -> set[str]
Returns a set of resolved paths to all external asset dependencies the
layer needs to generate its contents.
These are additional asset dependencies that are determined by the
layer's file format and will be consulted during Reload() when
determining if the layer needs to be reloaded. This specifically does
not include dependencies related to composition, i.e. this will not
include assets from references, payloads, and sublayers.
"""
result["Layer"].UpdateAssetInfo.func_doc = """UpdateAssetInfo() -> None
Update layer asset information.
Calling this method re-resolves the layer identifier, which updates
asset information such as the layer's resolved path and other asset
info. This may be used to update the layer after external changes to
the underlying asset system.
"""
result["Layer"].GetDisplayName.func_doc = """GetDisplayName() -> str
Returns the layer's display name.
The display name is the base filename of the identifier.
"""
result["Layer"].GetAssetName.func_doc = """GetAssetName() -> str
Returns the asset name associated with this layer.
"""
result["Layer"].GetAssetInfo.func_doc = """GetAssetInfo() -> VtValue
Returns resolve information from the last time the layer identifier
was resolved.
"""
result["Layer"].ComputeAbsolutePath.func_doc = """ComputeAbsolutePath(assetPath) -> str
Returns the path to the asset specified by ``assetPath`` using this
layer to anchor the path if necessary.
Returns ``assetPath`` if it's empty or an anonymous layer identifier.
This method can be used on asset paths that are authored in this layer
to create new asset paths that can be copied to other layers. These
new asset paths should refer to the same assets as the original asset
paths. For example, if the underlying ArResolver is filesystem-based
and ``assetPath`` is a relative filesystem path, this method might
return the absolute filesystem path using this layer's location as the
anchor.
The returned path should in general not be assumed to be an absolute
filesystem path or any other specific form. It is"absolute"in that it
should resolve to the same asset regardless of what layer it's
authored in.
Parameters
----------
assetPath : str
"""
result["Layer"].SplitIdentifier.func_doc = """**classmethod** SplitIdentifier(identifier, layerPath, arguments) -> bool
Splits the given layer identifier into its constituent layer path and
arguments.
Parameters
----------
identifier : str
layerPath : str
arguments : FileFormatArguments
"""
result["Layer"].CreateIdentifier.func_doc = """**classmethod** CreateIdentifier(layerPath, arguments) -> str
Joins the given layer path and arguments into an identifier.
Parameters
----------
layerPath : str
arguments : FileFormatArguments
"""
result["Layer"].Traverse.func_doc = """Traverse(path, func) -> None
Parameters
----------
path : Path
func : TraversalFunction
"""
result["Layer"].HasColorConfiguration.func_doc = """HasColorConfiguration() -> bool
Returns true if color configuration metadata is set in this layer.
GetColorConfiguration() , SetColorConfiguration()
"""
result["Layer"].ClearColorConfiguration.func_doc = """ClearColorConfiguration() -> None
Clears the color configuration metadata authored in this layer.
HasColorConfiguration() , SetColorConfiguration()
"""
result["Layer"].HasColorManagementSystem.func_doc = """HasColorManagementSystem() -> bool
Returns true if colorManagementSystem metadata is set in this layer.
GetColorManagementSystem() , SetColorManagementSystem()
"""
result["Layer"].ClearColorManagementSystem.func_doc = """ClearColorManagementSystem() -> None
Clears the'colorManagementSystem'metadata authored in this layer.
HascolorManagementSystem(), SetColorManagementSystem()
"""
result["Layer"].ClearDefaultPrim.func_doc = """ClearDefaultPrim() -> None
Clear the default prim metadata for this layer.
See GetDefaultPrim() and SetDefaultPrim() .
"""
result["Layer"].HasDefaultPrim.func_doc = """HasDefaultPrim() -> bool
Return true if the default prim metadata is set in this layer.
See GetDefaultPrim() and SetDefaultPrim() .
"""
result["Layer"].HasStartTimeCode.func_doc = """HasStartTimeCode() -> bool
Returns true if the layer has a startTimeCode opinion.
"""
result["Layer"].ClearStartTimeCode.func_doc = """ClearStartTimeCode() -> None
Clear the startTimeCode opinion.
"""
result["Layer"].HasEndTimeCode.func_doc = """HasEndTimeCode() -> bool
Returns true if the layer has an endTimeCode opinion.
"""
result["Layer"].ClearEndTimeCode.func_doc = """ClearEndTimeCode() -> None
Clear the endTimeCode opinion.
"""
result["Layer"].HasTimeCodesPerSecond.func_doc = """HasTimeCodesPerSecond() -> bool
Returns true if the layer has a timeCodesPerSecond opinion.
"""
result["Layer"].ClearTimeCodesPerSecond.func_doc = """ClearTimeCodesPerSecond() -> None
Clear the timeCodesPerSecond opinion.
"""
result["Layer"].HasFramesPerSecond.func_doc = """HasFramesPerSecond() -> bool
Returns true if the layer has a frames per second opinion.
"""
result["Layer"].ClearFramesPerSecond.func_doc = """ClearFramesPerSecond() -> None
Clear the framesPerSecond opinion.
"""
result["Layer"].HasFramePrecision.func_doc = """HasFramePrecision() -> bool
Returns true if the layer has a frames precision opinion.
"""
result["Layer"].ClearFramePrecision.func_doc = """ClearFramePrecision() -> None
Clear the framePrecision opinion.
"""
result["Layer"].HasOwner.func_doc = """HasOwner() -> bool
Returns true if the layer has an owner opinion.
"""
result["Layer"].ClearOwner.func_doc = """ClearOwner() -> None
Clear the owner opinion.
"""
result["Layer"].HasSessionOwner.func_doc = """HasSessionOwner() -> bool
Returns true if the layer has a session owner opinion.
"""
result["Layer"].ClearSessionOwner.func_doc = """ClearSessionOwner() -> None
"""
result["Layer"].HasCustomLayerData.func_doc = """HasCustomLayerData() -> bool
Returns true if CustomLayerData is authored on the layer.
"""
result["Layer"].ClearCustomLayerData.func_doc = """ClearCustomLayerData() -> None
Clears out the CustomLayerData dictionary associated with this layer.
"""
result["Layer"].ScheduleRemoveIfInert.func_doc = """ScheduleRemoveIfInert(spec) -> None
Cause ``spec`` to be removed if it no longer affects the scene when
the last change block is closed, or now if there are no change blocks.
Parameters
----------
spec : Spec
"""
result["Layer"].RemoveInertSceneDescription.func_doc = """RemoveInertSceneDescription() -> None
Removes all scene description in this layer that does not affect the
scene.
This method walks the layer namespace hierarchy and removes any prims
and that are not contributing any opinions.
"""
result["Layer"].ApplyRootPrimOrder.func_doc = """ApplyRootPrimOrder(vec) -> None
Reorders the given list of prim names according to the reorder
rootPrims statement for this layer.
This routine employs the standard list editing operations for ordered
items in a ListEditor.
Parameters
----------
vec : list[str]
"""
result["Layer"].SetDetachedLayerRules.func_doc = """**classmethod** SetDetachedLayerRules(mask) -> None
Sets the rules specifying detached layers.
Newly-created or opened layers whose identifiers are included in
``rules`` will be opened as detached layers. Existing layers that are
now included or no longer included will be reloaded. Any unsaved
modifications to those layers will be lost.
This function is not thread-safe. It may not be run concurrently with
any other functions that open, close, or read from any layers.
The detached layer rules are initially set to exclude all layers. This
may be overridden by setting the environment variables
SDF_LAYER_INCLUDE_DETACHED and SDF_LAYER_EXCLUDE_DETACHED to specify
the initial set of include and exclude patterns in the rules. These
variables can be set to a comma-delimited list of patterns.
SDF_LAYER_INCLUDE_DETACHED may also be set to"\\*"to include all
layers. Note that these environment variables only set the initial
state of the detached layer rules; these values may be overwritten by
subsequent calls to this function.
See SdfLayer::DetachedLayerRules::IsIncluded for details on how the
rules are applied to layer identifiers.
Parameters
----------
mask : DetachedLayerRules
"""
result["Layer"].GetDetachedLayerRules.func_doc = """**classmethod** GetDetachedLayerRules() -> DetachedLayerRules
Returns the current rules for the detached layer set.
"""
result["Layer"].IsIncludedByDetachedLayerRules.func_doc = """**classmethod** IsIncludedByDetachedLayerRules(identifier) -> bool
Returns whether the given layer identifier is included in the current
rules for the detached layer set.
This is equivalent to GetDetachedLayerRules() .IsIncluded(identifier).
Parameters
----------
identifier : str
"""
result["Layer"].IsMuted.func_doc = """**classmethod** IsMuted() -> bool
Returns ``true`` if the current layer is muted.
----------------------------------------------------------------------
IsMuted(path) -> bool
Returns ``true`` if the specified layer path is muted.
Parameters
----------
path : str
"""
result["Layer"].SetMuted.func_doc = """SetMuted(muted) -> None
Mutes the current layer if ``muted`` is ``true`` , and unmutes it
otherwise.
Parameters
----------
muted : bool
"""
result["Layer"].AddToMutedLayers.func_doc = """**classmethod** AddToMutedLayers(mutedPath) -> None
Add the specified path to the muted layers set.
Parameters
----------
mutedPath : str
"""
result["Layer"].RemoveFromMutedLayers.func_doc = """**classmethod** RemoveFromMutedLayers(mutedPath) -> None
Remove the specified path from the muted layers set.
Parameters
----------
mutedPath : str
"""
result["Layer"].GetObjectAtPath.func_doc = """GetObjectAtPath(path) -> Spec
Returns the object at the given ``path`` .
There is no distinction between an absolute and relative path at the
SdLayer level.
Returns ``None`` if there is no object at ``path`` .
Parameters
----------
path : Path
"""
result["Layer"].GetPrimAtPath.func_doc = """GetPrimAtPath(path) -> PrimSpec
Returns the prim at the given ``path`` .
Returns ``None`` if there is no prim at ``path`` . This is simply a
more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
result["Layer"].GetPropertyAtPath.func_doc = """GetPropertyAtPath(path) -> PropertySpec
Returns a property at the given ``path`` .
Returns ``None`` if there is no property at ``path`` . This is simply
a more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
result["Layer"].GetAttributeAtPath.func_doc = """GetAttributeAtPath(path) -> AttributeSpec
Returns an attribute at the given ``path`` .
Returns ``None`` if there is no attribute at ``path`` . This is simply
a more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
result["Layer"].GetRelationshipAtPath.func_doc = """GetRelationshipAtPath(path) -> RelationshipSpec
Returns a relationship at the given ``path`` .
Returns ``None`` if there is no relationship at ``path`` . This is
simply a more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
result["Layer"].SetPermissionToEdit.func_doc = """SetPermissionToEdit(allow) -> None
Sets permission to edit.
Parameters
----------
allow : bool
"""
result["Layer"].SetPermissionToSave.func_doc = """SetPermissionToSave(allow) -> None
Sets permission to save.
Parameters
----------
allow : bool
"""
result["Layer"].CanApply.func_doc = """CanApply(arg1, details) -> NamespaceEditDetail.Result
Check if a batch of namespace edits will succeed.
This returns ``SdfNamespaceEditDetail::Okay`` if they will succeed as
a batch, ``SdfNamespaceEditDetail::Unbatched`` if the edits will
succeed but will be applied unbatched, and
``SdfNamespaceEditDetail::Error`` if they will not succeed. No edits
will be performed in any case.
If ``details`` is not ``None`` and the method does not return ``Okay``
then details about the problems will be appended to ``details`` . A
problem may cause the method to return early, so ``details`` may not
list every problem.
Note that Sdf does not track backpointers so it's unable to fix up
targets/connections to namespace edited objects. Clients must fix
those to prevent them from falling off. In addition, this method will
report failure if any relational attribute with a target to a
namespace edited object is subsequently edited (in the same batch).
Clients should perform edits on relational attributes first.
Clients may wish to report unbatch details to the user to confirm that
the edits should be applied unbatched. This will give the user a
chance to correct any problems that cause batching to fail and try
again.
Parameters
----------
arg1 : BatchNamespaceEdit
details : list[SdfNamespaceEditDetail]
"""
result["Layer"].Apply.func_doc = """Apply(arg1) -> bool
Performs a batch of namespace edits.
Returns ``true`` on success and ``false`` on failure. On failure, no
namespace edits will have occurred.
Parameters
----------
arg1 : BatchNamespaceEdit
"""
result["Layer"].ListAllTimeSamples.func_doc = """ListAllTimeSamples() -> set[float]
"""
result["Layer"].ListTimeSamplesForPath.func_doc = """ListTimeSamplesForPath(path) -> set[float]
Parameters
----------
path : Path
"""
result["Layer"].GetBracketingTimeSamples.func_doc = """GetBracketingTimeSamples(time, tLower, tUpper) -> bool
Parameters
----------
time : float
tLower : float
tUpper : float
"""
result["Layer"].GetNumTimeSamplesForPath.func_doc = """GetNumTimeSamplesForPath(path) -> int
Parameters
----------
path : Path
"""
result["Layer"].GetBracketingTimeSamplesForPath.func_doc = """GetBracketingTimeSamplesForPath(path, time, tLower, tUpper) -> bool
Parameters
----------
path : Path
time : float
tLower : float
tUpper : float
"""
result["Layer"].QueryTimeSample.func_doc = """QueryTimeSample(path, time, value) -> bool
Parameters
----------
path : Path
time : float
value : VtValue
----------------------------------------------------------------------
QueryTimeSample(path, time, value) -> bool
Parameters
----------
path : Path
time : float
value : SdfAbstractDataValue
----------------------------------------------------------------------
QueryTimeSample(path, time, data) -> bool
Parameters
----------
path : Path
time : float
data : T
"""
result["Layer"].SetTimeSample.func_doc = """SetTimeSample(path, time, value) -> None
Parameters
----------
path : Path
time : float
value : VtValue
----------------------------------------------------------------------
SetTimeSample(path, time, value) -> None
Parameters
----------
path : Path
time : float
value : SdfAbstractDataConstValue
----------------------------------------------------------------------
SetTimeSample(path, time, value) -> None
Parameters
----------
path : Path
time : float
value : T
"""
result["Layer"].EraseTimeSample.func_doc = """EraseTimeSample(path, time) -> None
Parameters
----------
path : Path
time : float
"""
result["LayerOffset"].__doc__ = """
Represents a time offset and scale between layers.
The SdfLayerOffset class is an affine transform, providing both a
scale and a translate. It supports vector algebra semantics for
composing SdfLayerOffsets together via multiplication. The
SdfLayerOffset class is unitless: it does not refer to seconds or
frames.
For example, suppose layer A uses layer B, with an offset of X: when
bringing animation from B into A, you first apply the scale of X, and
then the offset. Suppose you have a scale of 2 and an offset of 24:
first multiply B's frame numbers by 2, and then add 24. The animation
from B as seen in A will take twice as long and start 24 frames later.
Offsets are typically used in either sublayers or prim references. For
more information, see the SetSubLayerOffset() method of the SdfLayer
class (the subLayerOffsets property in Python), as well as the
SetReference() and GetReferenceLayerOffset() methods (the latter is
the referenceLayerOffset property in Python) of the SdfPrimSpec class.
"""
result["LayerOffset"].__init__.func_doc = """__init__(offset, scale)
Constructs a new SdfLayerOffset instance.
Parameters
----------
offset : float
scale : float
"""
result["LayerOffset"].IsIdentity.func_doc = """IsIdentity() -> bool
Returns ``true`` if this is an identity transformation, with an offset
of 0.0 and a scale of 1.0.
"""
result["LayerOffset"].GetInverse.func_doc = """GetInverse() -> LayerOffset
Gets the inverse offset, which performs the opposite transformation.
"""
result["LayerTree"].__doc__ = """
A SdfLayerTree is an immutable tree structure representing a sublayer
stack and its recursive structure.
Layers can have sublayers, which can in turn have sublayers of their
own. Clients that want to represent that hierarchical structure in
memory can build a SdfLayerTree for that purpose.
We use TfRefPtr<SdfLayerTree> as handles to LayerTrees, as a simple
way to pass them around as immutable trees without worrying about
lifetime.
"""
result["LayerTree"].__init__.func_doc = """__init__(layer, childTrees, cumulativeOffset)
Parameters
----------
layer : Layer
childTrees : list[SdfLayerTreeHandle]
cumulativeOffset : LayerOffset
"""
result["NamespaceEdit"].__doc__ = """
A single namespace edit. It supports renaming, reparenting,
reparenting with a rename, reordering, and removal.
"""
result["NamespaceEdit"].__init__.func_doc = """__init__()
The default edit maps the empty path to the empty path.
----------------------------------------------------------------------
__init__(currentPath_, newPath_, index_)
The fully general edit.
Parameters
----------
currentPath_ : Path
newPath_ : Path
index_ : Index
"""
result["NamespaceEdit"].Remove.func_doc = """**classmethod** Remove(currentPath) -> This
Returns a namespace edit that removes the object at ``currentPath`` .
Parameters
----------
currentPath : Path
"""
result["NamespaceEdit"].Rename.func_doc = """**classmethod** Rename(currentPath, name) -> This
Returns a namespace edit that renames the prim or property at
``currentPath`` to ``name`` .
Parameters
----------
currentPath : Path
name : str
"""
result["NamespaceEdit"].Reorder.func_doc = """**classmethod** Reorder(currentPath, index) -> This
Returns a namespace edit to reorder the prim or property at
``currentPath`` to index ``index`` .
Parameters
----------
currentPath : Path
index : Index
"""
result["NamespaceEdit"].Reparent.func_doc = """**classmethod** Reparent(currentPath, newParentPath, index) -> This
Returns a namespace edit to reparent the prim or property at
``currentPath`` to be under ``newParentPath`` at index ``index`` .
Parameters
----------
currentPath : Path
newParentPath : Path
index : Index
"""
result["NamespaceEdit"].ReparentAndRename.func_doc = """**classmethod** ReparentAndRename(currentPath, newParentPath, name, index) -> This
Returns a namespace edit to reparent the prim or property at
``currentPath`` to be under ``newParentPath`` at index ``index`` with
the name ``name`` .
Parameters
----------
currentPath : Path
newParentPath : Path
name : str
index : Index
"""
result["NamespaceEditDetail"].__doc__ = """
Detailed information about a namespace edit.
"""
result["NamespaceEditDetail"].Result.__doc__ = """
Validity of an edit.
"""
result["NamespaceEditDetail"].__init__.func_doc = """__init__()
----------------------------------------------------------------------
__init__(arg1, edit, reason)
Parameters
----------
arg1 : Result
edit : NamespaceEdit
reason : str
"""
result["Notice"].__doc__ = """
Wrapper class for Sdf notices.
"""
result["Path"].__doc__ = """
A path value used to locate objects in layers or scenegraphs.
Overview
========
SdfPath is used in several ways:
- As a storage key for addressing and accessing values held in a
SdfLayer
- As a namespace identity for scenegraph objects
- As a way to refer to other scenegraph objects through relative
paths
The paths represented by an SdfPath class may be either relative or
absolute. Relative paths are relative to the prim object that contains
them (that is, if an SdfRelationshipSpec target is relative, it is
relative to the SdfPrimSpec object that owns the SdfRelationshipSpec
object).
SdfPath objects can be readily created from and converted back to
strings, but as SdfPath objects, they have behaviors that make it easy
and efficient to work with them. The SdfPath class provides a full
range of methods for manipulating scene paths by appending a namespace
child, appending a relationship target, getting the parent path, and
so on. Since the SdfPath class uses a node-based representation
internally, you should use the editing functions rather than
converting to and from strings if possible.
Path Syntax
===========
Like a filesystem path, an SdfPath is conceptually just a sequence of
path components. Unlike a filesystem path, each component has a type,
and the type is indicated by the syntax.
Two separators are used between parts of a path. A slash ("/")
following an identifier is used to introduce a namespace child. A
period (".") following an identifier is used to introduce a property.
A property may also have several non-sequential colons (':') in its
name to provide a rudimentary namespace within properties but may not
end or begin with a colon.
A leading slash in the string representation of an SdfPath object
indicates an absolute path. Two adjacent periods indicate the parent
namespace.
Brackets ("["and"]") are used to indicate relationship target paths
for relational attributes.
The first part in a path is assumed to be a namespace child unless it
is preceded by a period. That means:
- ``/Foo`` is an absolute path specifying the root prim Foo.
- ``/Foo/Bar`` is an absolute path specifying namespace child Bar
of root prim Foo.
- ``/Foo/Bar.baz`` is an absolute path specifying property ``baz``
of namespace child Bar of root prim Foo.
- ``Foo`` is a relative path specifying namespace child Foo of the
current prim.
- ``Foo/Bar`` is a relative path specifying namespace child Bar of
namespace child Foo of the current prim.
- ``Foo/Bar.baz`` is a relative path specifying property ``baz`` of
namespace child Bar of namespace child Foo of the current prim.
- ``.foo`` is a relative path specifying the property ``foo`` of
the current prim.
- ``/Foo.bar[/Foo.baz].attrib`` is a relational attribute path. The
relationship ``/Foo.bar`` has a target ``/Foo.baz`` . There is a
relational attribute ``attrib`` on that relationship->target pair.
A Note on Thread-Safety
=======================
SdfPath is strongly thread-safe, in the sense that zero additional
synchronization is required between threads creating or using SdfPath
values. Just like TfToken, SdfPath values are immutable. Internally,
SdfPath uses a global prefix tree to efficiently share representations
of paths, and provide fast equality/hashing operations, but
modifications to this table are internally synchronized. Consequently,
as with TfToken, for best performance it is important to minimize the
number of values created (since it requires synchronized access to
this table) or copied (since it requires atomic ref-counting
operations).
"""
result["Path"].__init__.func_doc = """__init__()
Constructs the default, empty path.
----------------------------------------------------------------------
__init__(path)
Creates a path from the given string.
If the given string is not a well-formed path, this will raise a Tf
error. Note that passing an empty std::string() will also raise an
error; the correct way to get the empty path is SdfPath() .
Internal dot-dots will be resolved by removing the first dot-dot, the
element preceding it, and repeating until no internal dot-dots remain.
Note that most often new paths are expected to be created by asking
existing paths to return modified versions of themselves.
Parameters
----------
path : str
----------------------------------------------------------------------
__init__(primNode)
Parameters
----------
primNode : Sdf_PathPrimNode
----------------------------------------------------------------------
__init__(primPart, propPart)
Parameters
----------
primPart : Sdf_PathPrimNode
propPart : Sdf_PathPropNode
----------------------------------------------------------------------
__init__(primPart, propPart)
Parameters
----------
primPart : Sdf_PathNode
propPart : Sdf_PathNode
"""
result["Path"].IsAbsolutePath.func_doc = """IsAbsolutePath() -> bool
Returns whether the path is absolute.
"""
result["Path"].IsAbsoluteRootPath.func_doc = """IsAbsoluteRootPath() -> bool
Return true if this path is the AbsoluteRootPath() .
"""
result["Path"].IsPrimPath.func_doc = """IsPrimPath() -> bool
Returns whether the path identifies a prim.
"""
result["Path"].IsAbsoluteRootOrPrimPath.func_doc = """IsAbsoluteRootOrPrimPath() -> bool
Returns whether the path identifies a prim or the absolute root.
"""
result["Path"].IsRootPrimPath.func_doc = """IsRootPrimPath() -> bool
Returns whether the path identifies a root prim.
the path must be absolute and have a single element (for example
``/foo`` ).
"""
result["Path"].IsPropertyPath.func_doc = """IsPropertyPath() -> bool
Returns whether the path identifies a property.
A relational attribute is considered to be a property, so this method
will return true for relational attributes as well as properties of
prims.
"""
result["Path"].IsPrimPropertyPath.func_doc = """IsPrimPropertyPath() -> bool
Returns whether the path identifies a prim's property.
A relational attribute is not a prim property.
"""
result["Path"].IsNamespacedPropertyPath.func_doc = """IsNamespacedPropertyPath() -> bool
Returns whether the path identifies a namespaced property.
A namespaced property has colon embedded in its name.
"""
result["Path"].IsPrimVariantSelectionPath.func_doc = """IsPrimVariantSelectionPath() -> bool
Returns whether the path identifies a variant selection for a prim.
"""
result["Path"].ContainsPrimVariantSelection.func_doc = """ContainsPrimVariantSelection() -> bool
Returns whether the path or any of its parent paths identifies a
variant selection for a prim.
"""
result["Path"].ContainsPropertyElements.func_doc = """ContainsPropertyElements() -> bool
Return true if this path contains any property elements, false
otherwise.
A false return indicates a prim-like path, specifically a root path, a
prim path, or a prim variant selection path. A true return indicates a
property-like path: a prim property path, a target path, a relational
attribute path, etc.
"""
result["Path"].ContainsTargetPath.func_doc = """ContainsTargetPath() -> bool
Return true if this path is or has a prefix that's a target path or a
mapper path.
"""
result["Path"].IsRelationalAttributePath.func_doc = """IsRelationalAttributePath() -> bool
Returns whether the path identifies a relational attribute.
If this is true, IsPropertyPath() will also be true.
"""
result["Path"].IsTargetPath.func_doc = """IsTargetPath() -> bool
Returns whether the path identifies a relationship or connection
target.
"""
result["Path"].IsMapperPath.func_doc = """IsMapperPath() -> bool
Returns whether the path identifies a connection mapper.
"""
result["Path"].IsMapperArgPath.func_doc = """IsMapperArgPath() -> bool
Returns whether the path identifies a connection mapper arg.
"""
result["Path"].IsExpressionPath.func_doc = """IsExpressionPath() -> bool
Returns whether the path identifies a connection expression.
"""
result["Path"].GetAncestorsRange.func_doc = """GetAncestorsRange() -> SdfPathAncestorsRange
Return a range for iterating over the ancestors of this path.
The range provides iteration over the prefixes of a path, ordered from
longest to shortest (the opposite of the order of the prefixes
returned by GetPrefixes).
"""
result["Path"].ReplaceName.func_doc = """ReplaceName(newName) -> Path
Return a copy of this path with its final component changed to
*newName*.
This path must be a prim or property path.
This method is shorthand for path.GetParentPath().AppendChild(newName)
for prim paths, path.GetParentPath().AppendProperty(newName) for prim
property paths, and
path.GetParentPath().AppendRelationalAttribute(newName) for relational
attribute paths.
Note that only the final path component is ever changed. If the name
of the final path component appears elsewhere in the path, it will not
be modified.
Some examples:
ReplaceName('/chars/MeridaGroup','AngusGroup')
\\->'/chars/AngusGroup'ReplaceName('/Merida.tx','ty')
\\->'/Merida.ty'ReplaceName('/Merida.tx[targ].tx','ty')
\\->'/Merida.tx[targ].ty'
Parameters
----------
newName : str
"""
result["Path"].GetAllTargetPathsRecursively.func_doc = """GetAllTargetPathsRecursively(result) -> None
Returns all the relationship target or connection target paths
contained in this path, and recursively all the target paths contained
in those target paths in reverse depth-first order.
For example, given the
path:'/A/B.a[/C/D.a[/E/F.a]].a[/A/B.a[/C/D.a]]'this method
produces:'/A/B.a[/C/D.a]','/C/D.a','/C/D.a[/E/F.a]','/E/F.a'
Parameters
----------
result : list[SdfPath]
"""
result["Path"].GetVariantSelection.func_doc = """GetVariantSelection() -> tuple[str, str]
Returns the variant selection for this path, if this is a variant
selection path.
Returns a pair of empty strings if this path is not a variant
selection path.
"""
result["Path"].HasPrefix.func_doc = """HasPrefix(prefix) -> bool
Return true if both this path and *prefix* are not the empty path and
this path has *prefix* as a prefix.
Return false otherwise.
Parameters
----------
prefix : Path
"""
result["Path"].GetParentPath.func_doc = """GetParentPath() -> Path
Return the path that identifies this path's namespace parent.
For a prim path (like'/foo/bar'), return the prim's parent's path
('/foo'). For a prim property path (like'/foo/bar.property'), return
the prim's path ('/foo/bar'). For a target path
(like'/foo/bar.property[/target]') return the property path
('/foo/bar.property'). For a mapper path
(like'/foo/bar.property.mapper[/target]') return the property path
('/foo/bar.property). For a relational attribute path
(like'/foo/bar.property[/target].relAttr') return the relationship
target's path ('/foo/bar.property[/target]'). For a prim variant
selection path (like'/foo/bar{var=sel}') return the prim path
('/foo/bar'). For a root prim path (like'/rootPrim'), return
AbsoluteRootPath() ('/'). For a single element relative prim path
(like'relativePrim'), return ReflexiveRelativePath() ('.'). For
ReflexiveRelativePath() , return the relative parent path ('\\.\\.').
Note that the parent path of a relative parent path ('\\.\\.') is a
relative grandparent path ('\\.\\./\\.\\.'). Use caution writing loops
that walk to parent paths since relative paths have infinitely many
ancestors. To more safely traverse ancestor paths, consider iterating
over an SdfPathAncestorsRange instead, as returend by
GetAncestorsRange() .
"""
result["Path"].GetPrimPath.func_doc = """GetPrimPath() -> Path
Creates a path by stripping all relational attributes, targets,
properties, and variant selections from the leafmost prim path,
leaving the nearest path for which *IsPrimPath()* returns true.
See *GetPrimOrPrimVariantSelectionPath* also.
If the path is already a prim path, the same path is returned.
"""
result["Path"].GetPrimOrPrimVariantSelectionPath.func_doc = """GetPrimOrPrimVariantSelectionPath() -> Path
Creates a path by stripping all relational attributes, targets, and
properties, leaving the nearest path for which
*IsPrimOrPrimVariantSelectionPath()* returns true.
See *GetPrimPath* also.
If the path is already a prim or a prim variant selection path, the
same path is returned.
"""
result["Path"].GetAbsoluteRootOrPrimPath.func_doc = """GetAbsoluteRootOrPrimPath() -> Path
Creates a path by stripping all properties and relational attributes
from this path, leaving the path to the containing prim.
If the path is already a prim or absolute root path, the same path is
returned.
"""
result["Path"].StripAllVariantSelections.func_doc = """StripAllVariantSelections() -> Path
Create a path by stripping all variant selections from all components
of this path, leaving a path with no embedded variant selections.
"""
result["Path"].AppendPath.func_doc = """AppendPath(newSuffix) -> Path
Creates a path by appending a given relative path to this path.
If the newSuffix is a prim path, then this path must be a prim path or
a root path.
If the newSuffix is a prim property path, then this path must be a
prim path or the ReflexiveRelativePath.
Parameters
----------
newSuffix : Path
"""
result["Path"].AppendChild.func_doc = """AppendChild(childName) -> Path
Creates a path by appending an element for ``childName`` to this path.
This path must be a prim path, the AbsoluteRootPath or the
ReflexiveRelativePath.
Parameters
----------
childName : str
"""
result["Path"].AppendProperty.func_doc = """AppendProperty(propName) -> Path
Creates a path by appending an element for ``propName`` to this path.
This path must be a prim path or the ReflexiveRelativePath.
Parameters
----------
propName : str
"""
result["Path"].AppendVariantSelection.func_doc = """AppendVariantSelection(variantSet, variant) -> Path
Creates a path by appending an element for ``variantSet`` and
``variant`` to this path.
This path must be a prim path.
Parameters
----------
variantSet : str
variant : str
"""
result["Path"].AppendTarget.func_doc = """AppendTarget(targetPath) -> Path
Creates a path by appending an element for ``targetPath`` .
This path must be a prim property or relational attribute path.
Parameters
----------
targetPath : Path
"""
result["Path"].AppendRelationalAttribute.func_doc = """AppendRelationalAttribute(attrName) -> Path
Creates a path by appending an element for ``attrName`` to this path.
This path must be a target path.
Parameters
----------
attrName : str
"""
result["Path"].ReplaceTargetPath.func_doc = """ReplaceTargetPath(newTargetPath) -> Path
Replaces the relational attribute's target path.
The path must be a relational attribute path.
Parameters
----------
newTargetPath : Path
"""
result["Path"].AppendMapper.func_doc = """AppendMapper(targetPath) -> Path
Creates a path by appending a mapper element for ``targetPath`` .
This path must be a prim property or relational attribute path.
Parameters
----------
targetPath : Path
"""
result["Path"].AppendMapperArg.func_doc = """AppendMapperArg(argName) -> Path
Creates a path by appending an element for ``argName`` .
This path must be a mapper path.
Parameters
----------
argName : str
"""
result["Path"].AppendExpression.func_doc = """AppendExpression() -> Path
Creates a path by appending an expression element.
This path must be a prim property or relational attribute path.
"""
result["Path"].AppendElementString.func_doc = """AppendElementString(element) -> Path
Creates a path by extracting and appending an element from the given
ascii element encoding.
Attempting to append a root or empty path (or malformed path) or
attempting to append *to* the EmptyPath will raise an error and return
the EmptyPath.
May also fail and return EmptyPath if this path's type cannot possess
a child of the type encoded in ``element`` .
Parameters
----------
element : str
"""
result["Path"].ReplacePrefix.func_doc = """ReplacePrefix(oldPrefix, newPrefix, fixTargetPaths) -> Path
Returns a path with all occurrences of the prefix path ``oldPrefix``
replaced with the prefix path ``newPrefix`` .
If fixTargetPaths is true, any embedded target paths will also have
their paths replaced. This is the default.
If this is not a target, relational attribute or mapper path this will
do zero or one path prefix replacements, if not the number of
replacements can be greater than one.
Parameters
----------
oldPrefix : Path
newPrefix : Path
fixTargetPaths : bool
"""
result["Path"].GetCommonPrefix.func_doc = """GetCommonPrefix(path) -> Path
Returns a path with maximal length that is a prefix path of both this
path and ``path`` .
Parameters
----------
path : Path
"""
result["Path"].RemoveCommonSuffix.func_doc = """RemoveCommonSuffix(otherPath, stopAtRootPrim) -> tuple[Path, Path]
Find and remove the longest common suffix from two paths.
Returns this path and ``otherPath`` with the longest common suffix
removed (first and second, respectively). If the two paths have no
common suffix then the paths are returned as-is. If the paths are
equal then this returns empty paths for relative paths and absolute
roots for absolute paths. The paths need not be the same length.
If ``stopAtRootPrim`` is ``true`` then neither returned path will be
the root path. That, in turn, means that some common suffixes will not
be removed. For example, if ``stopAtRootPrim`` is ``true`` then the
paths /A/B and /B will be returned as is. Were it ``false`` then the
result would be /A and /. Similarly paths /A/B/C and /B/C would return
/A/B and /B if ``stopAtRootPrim`` is ``true`` but /A and / if it's
``false`` .
Parameters
----------
otherPath : Path
stopAtRootPrim : bool
"""
result["Path"].MakeAbsolutePath.func_doc = """MakeAbsolutePath(anchor) -> Path
Returns the absolute form of this path using ``anchor`` as the
relative basis.
``anchor`` must be an absolute prim path.
If this path is a relative path, resolve it using ``anchor`` as the
relative basis.
If this path is already an absolute path, just return a copy.
Parameters
----------
anchor : Path
"""
result["Path"].MakeRelativePath.func_doc = """MakeRelativePath(anchor) -> Path
Returns the relative form of this path using ``anchor`` as the
relative basis.
``anchor`` must be an absolute prim path.
If this path is an absolute path, return the corresponding relative
path that is relative to the absolute path given by ``anchor`` .
If this path is a relative path, return the optimal relative path to
the absolute path given by ``anchor`` . (The optimal relative path
from a given prim path is the relative path with the least leading
dot-dots.
Parameters
----------
anchor : Path
"""
result["Path"].IsValidIdentifier.func_doc = """**classmethod** IsValidIdentifier(name) -> bool
Returns whether ``name`` is a legal identifier for any path component.
Parameters
----------
name : str
"""
result["Path"].IsValidNamespacedIdentifier.func_doc = """**classmethod** IsValidNamespacedIdentifier(name) -> bool
Returns whether ``name`` is a legal namespaced identifier.
This returns ``true`` if IsValidIdentifier() does.
Parameters
----------
name : str
"""
result["Path"].TokenizeIdentifier.func_doc = """**classmethod** TokenizeIdentifier(name) -> list[str]
Tokenizes ``name`` by the namespace delimiter.
Returns the empty vector if ``name`` is not a valid namespaced
identifier.
Parameters
----------
name : str
"""
result["Path"].JoinIdentifier.func_doc = """**classmethod** JoinIdentifier(names) -> str
Join ``names`` into a single identifier using the namespace delimiter.
Any empty strings present in ``names`` are ignored when joining.
Parameters
----------
names : list[str]
----------------------------------------------------------------------
JoinIdentifier(names) -> str
Join ``names`` into a single identifier using the namespace delimiter.
Any empty strings present in ``names`` are ignored when joining.
Parameters
----------
names : list[TfToken]
----------------------------------------------------------------------
JoinIdentifier(lhs, rhs) -> str
Join ``lhs`` and ``rhs`` into a single identifier using the namespace
delimiter.
Returns ``lhs`` if ``rhs`` is empty and vice verse. Returns an empty
string if both ``lhs`` and ``rhs`` are empty.
Parameters
----------
lhs : str
rhs : str
----------------------------------------------------------------------
JoinIdentifier(lhs, rhs) -> str
Join ``lhs`` and ``rhs`` into a single identifier using the namespace
delimiter.
Returns ``lhs`` if ``rhs`` is empty and vice verse. Returns an empty
string if both ``lhs`` and ``rhs`` are empty.
Parameters
----------
lhs : str
rhs : str
"""
result["Path"].StripNamespace.func_doc = """**classmethod** StripNamespace(name) -> str
Returns ``name`` stripped of any namespaces.
This does not check the validity of the name; it just attempts to
remove anything that looks like a namespace.
Parameters
----------
name : str
----------------------------------------------------------------------
StripNamespace(name) -> str
Returns ``name`` stripped of any namespaces.
This does not check the validity of the name; it just attempts to
remove anything that looks like a namespace.
Parameters
----------
name : str
"""
result["Path"].StripPrefixNamespace.func_doc = """**classmethod** StripPrefixNamespace(name, matchNamespace) -> tuple[str, bool]
Returns ( ``name`` , ``true`` ) where ``name`` is stripped of the
prefix specified by ``matchNamespace`` if ``name`` indeed starts with
``matchNamespace`` .
Returns ( ``name`` , ``false`` ) otherwise, with ``name`` unmodified.
This function deals with both the case where ``matchNamespace``
contains the trailing namespace delimiter':'or not.
Parameters
----------
name : str
matchNamespace : str
"""
result["Path"].IsValidPathString.func_doc = """**classmethod** IsValidPathString(pathString, errMsg) -> bool
Return true if ``pathString`` is a valid path string, meaning that
passing the string to the *SdfPath* constructor will result in a
valid, non-empty SdfPath.
Otherwise, return false and if ``errMsg`` is not None, set the
pointed-to string to the parse error.
Parameters
----------
pathString : str
errMsg : str
"""
result["Path"].GetConciseRelativePaths.func_doc = """**classmethod** GetConciseRelativePaths(paths) -> list[SdfPath]
Given some vector of paths, get a vector of concise unambiguous
relative paths.
GetConciseRelativePaths requires a vector of absolute paths. It finds
a set of relative paths such that each relative path is unique.
Parameters
----------
paths : list[SdfPath]
"""
result["Path"].RemoveDescendentPaths.func_doc = """**classmethod** RemoveDescendentPaths(paths) -> None
Remove all elements of *paths* that are prefixed by other elements in
*paths*.
As a side-effect, the result is left in sorted order.
Parameters
----------
paths : list[SdfPath]
"""
result["Path"].RemoveAncestorPaths.func_doc = """**classmethod** RemoveAncestorPaths(paths) -> None
Remove all elements of *paths* that prefix other elements in *paths*.
As a side-effect, the result is left in sorted order.
Parameters
----------
paths : list[SdfPath]
"""
result["Payload"].__doc__ = """
Represents a payload and all its meta data.
A payload represents a prim reference to an external layer. A payload
is similar to a prim reference (see SdfReference) with the major
difference that payloads are explicitly loaded by the user.
Unloaded payloads represent a boundary that lazy composition and
system behaviors will not traverse across, providing a user-visible
way to manage the working set of the scene.
"""
result["Payload"].__init__.func_doc = """__init__(assetPath, primPath, layerOffset)
Create a payload.
See SdfAssetPath for what characters are valid in ``assetPath`` . If
``assetPath`` contains invalid characters, issue an error and set this
payload's asset path to the empty asset path.
Parameters
----------
assetPath : str
primPath : Path
layerOffset : LayerOffset
"""
result["PrimSpec"].__doc__ = """
Represents a prim description in an SdfLayer object.
Every SdfPrimSpec object is defined in a layer. It is identified by
its path (SdfPath class) in the namespace hierarchy of its layer.
SdfPrimSpecs can be created using the New() method as children of
either the containing SdfLayer itself (for"root level"prims), or as
children of other SdfPrimSpec objects to extend a hierarchy. The
helper function SdfCreatePrimInLayer() can be used to quickly create a
hierarchy of primSpecs.
SdfPrimSpec objects have properties of two general types: attributes
(containing values) and relationships (different types of connections
to other prims and attributes). Attributes are represented by the
SdfAttributeSpec class and relationships by the SdfRelationshipSpec
class. Each prim has its own namespace of properties. Properties are
stored and accessed by their name.
SdfPrimSpec objects have a typeName, permission restriction, and they
reference and inherit prim paths. Permission restrictions control
which other layers may refer to, or express opinions about a prim. See
the SdfPermission class for more information.
- Insert doc about references and inherits here.
- Should have validate\\.\\.\\. methods for name, children,
properties
"""
result["PrimSpec"].CanSetName.func_doc = """CanSetName(newName, whyNot) -> bool
Returns true if setting the prim spec's name to ``newName`` will
succeed.
Returns false if it won't, and sets ``whyNot`` with a string
describing why not.
Parameters
----------
newName : str
whyNot : str
"""
result["PrimSpec"].ApplyNameChildrenOrder.func_doc = """ApplyNameChildrenOrder(vec) -> None
Reorders the given list of child names according to the reorder
nameChildren statement for this prim.
This routine employs the standard list editing operation for ordered
items in a ListEditor.
Parameters
----------
vec : list[str]
"""
result["PrimSpec"].RemoveProperty.func_doc = """RemoveProperty(property) -> None
Removes the property.
Parameters
----------
property : PropertySpec
"""
result["PrimSpec"].ApplyPropertyOrder.func_doc = """ApplyPropertyOrder(vec) -> None
Reorders the given list of property names according to the reorder
properties statement for this prim.
This routine employs the standard list editing operation for ordered
items in a ListEditor.
Parameters
----------
vec : list[str]
"""
result["PrimSpec"].GetPrimAtPath.func_doc = """GetPrimAtPath(path) -> PrimSpec
Returns a prim given its ``path`` .
Returns invalid handle if there is no prim at ``path`` . This is
simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
result["PrimSpec"].GetPropertyAtPath.func_doc = """GetPropertyAtPath(path) -> PropertySpec
Returns a property given its ``path`` .
Returns invalid handle if there is no property at ``path`` . This is
simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
result["PrimSpec"].GetAttributeAtPath.func_doc = """GetAttributeAtPath(path) -> AttributeSpec
Returns an attribute given its ``path`` .
Returns invalid handle if there is no attribute at ``path`` . This is
simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
result["PrimSpec"].GetRelationshipAtPath.func_doc = """GetRelationshipAtPath(path) -> RelationshipSpec
Returns a relationship given its ``path`` .
Returns invalid handle if there is no relationship at ``path`` . This
is simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
result["PrimSpec"].HasActive.func_doc = """HasActive() -> bool
Returns true if this prim spec has an opinion about active.
"""
result["PrimSpec"].ClearActive.func_doc = """ClearActive() -> None
Removes the active opinion in this prim spec if there is one.
"""
result["PrimSpec"].HasKind.func_doc = """HasKind() -> bool
Returns true if this prim spec has an opinion about kind.
"""
result["PrimSpec"].ClearKind.func_doc = """ClearKind() -> None
Remove the kind opinion from this prim spec if there is one.
"""
result["PrimSpec"].HasInstanceable.func_doc = """HasInstanceable() -> bool
Returns true if this prim spec has a value authored for its
instanceable flag, false otherwise.
"""
result["PrimSpec"].ClearInstanceable.func_doc = """ClearInstanceable() -> None
Clears the value for the prim's instanceable flag.
"""
result["PrimSpec"].GetVariantNames.func_doc = """GetVariantNames(name) -> list[str]
Returns list of variant names for the given variant set.
Parameters
----------
name : str
"""
result["PrimSpec"].BlockVariantSelection.func_doc = """BlockVariantSelection(variantSetName) -> None
Blocks the variant selected for the given variant set by setting the
variant selection to empty.
Parameters
----------
variantSetName : str
"""
result["PropertySpec"].__doc__ = """
Base class for SdfAttributeSpec and SdfRelationshipSpec.
Scene Spec Attributes (SdfAttributeSpec) and Relationships
(SdfRelationshipSpec) are the basic properties that make up Scene Spec
Prims (SdfPrimSpec). They share many qualities and can sometimes be
treated uniformly. The common qualities are provided by this base
class.
NOTE: Do not use Python reserved words and keywords as attribute
names. This will cause attribute resolution to fail.
"""
result["PropertySpec"].HasDefaultValue.func_doc = """HasDefaultValue() -> bool
Returns true if a default value is set for this attribute.
"""
result["PropertySpec"].ClearDefaultValue.func_doc = """ClearDefaultValue() -> None
Clear the attribute's default value.
"""
result["PseudoRootSpec"].__doc__ = """"""
result["Reference"].__doc__ = """
Represents a reference and all its meta data.
A reference is expressed on a prim in a given layer and it identifies
a prim in a layer stack. All opinions in the namespace hierarchy under
the referenced prim will be composed with the opinions in the
namespace hierarchy under the referencing prim.
The asset path specifies the layer stack being referenced. If this
asset path is non-empty, this reference is considered
an'external'reference to the layer stack rooted at the specified
layer. If this is empty, this reference is considered
an'internal'reference to the layer stack containing (but not
necessarily rooted at) the layer where the reference is authored.
The prim path specifies the prim in the referenced layer stack from
which opinions will be composed. If this prim path is empty, it will
be considered a reference to the default prim specified in the root
layer of the referenced layer stack see SdfLayer::GetDefaultPrim.
The meta data for a reference is its layer offset and custom data. The
layer offset is an affine transformation applied to all anim splines
in the referenced prim's namespace hierarchy, see SdfLayerOffset for
details. Custom data is for use by plugins or other non-tools supplied
extensions that need to be able to store data associated with
references.
"""
result["Reference"].__init__.func_doc = """__init__(assetPath, primPath, layerOffset, customData)
Creates a reference with all its meta data.
The default reference is an internal reference to the default prim.
See SdfAssetPath for what characters are valid in ``assetPath`` . If
``assetPath`` contains invalid characters, issue an error and set this
reference's asset path to the empty asset path.
Parameters
----------
assetPath : str
primPath : Path
layerOffset : LayerOffset
customData : VtDictionary
"""
result["Reference"].IsInternal.func_doc = """IsInternal() -> bool
Returns ``true`` in the case of an internal reference.
An internal reference is a reference with an empty asset path.
"""
result["RelationshipSpec"].__doc__ = """
A property that contains a reference to one or more SdfPrimSpec
instances.
A relationship may refer to one or more target prims or attributes.
All targets of a single relationship are considered to be playing the
same role. Note that ``role`` does not imply that the target prims or
attributes are of the same ``type`` .
Relationships may be annotated with relational attributes. Relational
attributes are named SdfAttributeSpec objects containing values that
describe the relationship. For example, point weights are commonly
expressed as relational attributes.
"""
result["RelationshipSpec"].ReplaceTargetPath.func_doc = """ReplaceTargetPath(oldPath, newPath) -> None
Updates the specified target path.
Replaces the path given by ``oldPath`` with the one specified by
``newPath`` . Relational attributes are updated if necessary.
Parameters
----------
oldPath : Path
newPath : Path
"""
result["RelationshipSpec"].RemoveTargetPath.func_doc = """RemoveTargetPath(path, preserveTargetOrder) -> None
Removes the specified target path.
Removes the given target path and any relational attributes for the
given target path. If ``preserveTargetOrder`` is ``true`` , Erase() is
called on the list editor instead of RemoveItemEdits(). This preserves
the ordered items list.
Parameters
----------
path : Path
preserveTargetOrder : bool
"""
result["Spec"].__doc__ = """
Base class for all Sdf spec classes.
"""
result["Spec"].ListInfoKeys.func_doc = """ListInfoKeys() -> list[str]
Returns the full list of info keys currently set on this object.
This does not include fields that represent names of children.
"""
result["Spec"].GetMetaDataInfoKeys.func_doc = """GetMetaDataInfoKeys() -> list[str]
Returns the list of metadata info keys for this object.
This is not the complete list of keys, it is only those that should be
considered to be metadata by inspectors or other presentation UI.
This is interim API which is likely to change. Only editors with an
immediate specific need (like the Inspector) should use this API.
"""
result["Spec"].GetMetaDataDisplayGroup.func_doc = """GetMetaDataDisplayGroup(key) -> str
Returns this metadata key's displayGroup.
Parameters
----------
key : str
"""
result["Spec"].GetInfo.func_doc = """GetInfo(key) -> VtValue
Gets the value for the given metadata key.
This is interim API which is likely to change. Only editors with an
immediate specific need (like the Inspector) should use this API.
Parameters
----------
key : str
"""
result["Spec"].SetInfo.func_doc = """SetInfo(key, value) -> None
Sets the value for the given metadata key.
It is an error to pass a value that is not the correct type for that
given key.
This is interim API which is likely to change. Only editors with an
immediate specific need (like the Inspector) should use this API.
Parameters
----------
key : str
value : VtValue
"""
result["Spec"].SetInfoDictionaryValue.func_doc = """SetInfoDictionaryValue(dictionaryKey, entryKey, value) -> None
Sets the value for ``entryKey`` to ``value`` within the dictionary
with the given metadata key ``dictionaryKey`` .
Parameters
----------
dictionaryKey : str
entryKey : str
value : VtValue
"""
result["TimeCode"].__doc__ = """
Value type that represents a time code. It's equivalent to a double
type value but is used to indicate that this value should be resolved
by any time based value resolution.
"""
result["TimeCode"].__init__.func_doc = """__init__(time)
Construct a time code with the given time.
A default constructed SdfTimeCode has a time of 0.0. A double value
can implicitly cast to SdfTimeCode.
Parameters
----------
time : float
"""
result["TimeCode"].GetValue.func_doc = """GetValue() -> float
Return the time value.
"""
result["UnregisteredValue"].__doc__ = """
Stores a representation of the value for an unregistered metadata
field encountered during text layer parsing.
This provides the ability to serialize this data to a layer, as well
as limited inspection and editing capabilities (e.g., moving this data
to a different spec or field) even when the data type of the value
isn't known.
"""
result["UnregisteredValue"].__init__.func_doc = """__init__()
Wraps an empty VtValue.
----------------------------------------------------------------------
__init__(value)
Wraps a std::string.
Parameters
----------
value : str
----------------------------------------------------------------------
__init__(value)
Wraps a VtDictionary.
Parameters
----------
value : VtDictionary
----------------------------------------------------------------------
__init__(value)
Wraps a SdfUnregisteredValueListOp.
Parameters
----------
value : UnregisteredValueListOp
"""
result["ValueBlock"].__doc__ = """
A special value type that can be used to explicitly author an opinion
for an attribute's default value or time sample value that represents
having no value. Note that this is different from not having a value
authored.
One could author such a value in two ways.
.. code-block:: text
attribute->SetDefaultValue(VtValue(SdfValueBlock());
\\.\\.\\.
layer->SetTimeSample(attribute->GetPath(), 101, VtValue(SdfValueBlock()));
"""
result["ValueTypeName"].__doc__ = """
Represents a value type name, i.e. an attribute's type name. Usually,
a value type name associates a string with a ``TfType`` and an
optional role, along with additional metadata. A schema registers all
known value type names and may register multiple names for the same
TfType and role pair. All name strings for a given pair are
collectively called its aliases.
A value type name may also represent just a name string, without a
``TfType`` , role or other metadata. This is currently used
exclusively to unserialize and re-serialize an attribute's type name
where that name is not known to the schema.
Because value type names can have aliases and those aliases may change
in the future, clients should avoid using the value type name's string
representation except to report human readable messages and when
serializing. Clients can look up a value type name by string using
``SdfSchemaBase::FindType()`` and shouldn't otherwise need the string.
Aliases compare equal, even if registered by different schemas.
"""
result["ValueTypeName"].__init__.func_doc = """__init__()
Constructs an invalid type name.
----------------------------------------------------------------------
__init__(arg1)
Parameters
----------
arg1 : Sdf_ValueTypeImpl
"""
result["VariantSetSpec"].__doc__ = """
Represents a coherent set of alternate representations for part of a
scene.
An SdfPrimSpec object may contain one or more named SdfVariantSetSpec
objects that define variations on the prim.
An SdfVariantSetSpec object contains one or more named SdfVariantSpec
objects. It may also define the name of one of its variants to be used
by default.
When a prim references another prim, the referencing prim may specify
one of the variants from each of the variant sets of the target prim.
The chosen variant from each set (or the default variant from those
sets that the referencing prim does not explicitly specify) is
composited over the target prim, and then the referencing prim is
composited over the result.
"""
result["VariantSetSpec"].RemoveVariant.func_doc = """RemoveVariant(variant) -> None
Removes ``variant`` from the list of variants.
If the variant set does not currently own ``variant`` , no action is
taken.
Parameters
----------
variant : VariantSpec
"""
result["VariantSpec"].__doc__ = """
Represents a single variant in a variant set.
A variant contains a prim. This prim is the root prim of the variant.
SdfVariantSpecs are value objects. This means they are immutable once
created and they are passed by copy-in APIs. To change a variant spec,
you make a new one and replace the existing one.
"""
result["VariantSpec"].GetVariantNames.func_doc = """GetVariantNames(name) -> list[str]
Returns list of variant names for the given variant set.
Parameters
----------
name : str
"""
result["AssetPath"].resolvedPath = property(result["AssetPath"].resolvedPath.fget, result["AssetPath"].resolvedPath.fset, result["AssetPath"].resolvedPath.fdel, """type : str
Return the resolved asset path, if any.
Note that SdfAssetPath carries a resolved path only if its creator
passed one to the constructor. SdfAssetPath never performs resolution
itself.
----------------------------------------------------------------------
type : str
Overload for rvalues, move out the asset path.
""")
result["BatchNamespaceEdit"].edits = property(result["BatchNamespaceEdit"].edits.fget, result["BatchNamespaceEdit"].edits.fset, result["BatchNamespaceEdit"].edits.fdel, """type : list[SdfNamespaceEdit]
Returns the edits.
""")
result["FileFormat"].formatId = property(result["FileFormat"].formatId.fget, result["FileFormat"].formatId.fset, result["FileFormat"].formatId.fdel, """type : str
Returns the format identifier.
""")
result["FileFormat"].target = property(result["FileFormat"].target.fget, result["FileFormat"].target.fset, result["FileFormat"].target.fdel, """type : str
Returns the target for this file format.
""")
result["FileFormat"].fileCookie = property(result["FileFormat"].fileCookie.fget, result["FileFormat"].fileCookie.fset, result["FileFormat"].fileCookie.fdel, """type : str
Returns the cookie to be used when writing files with this format.
""")
result["FileFormat"].primaryFileExtension = property(result["FileFormat"].primaryFileExtension.fget, result["FileFormat"].primaryFileExtension.fset, result["FileFormat"].primaryFileExtension.fdel, """type : str
Returns the primary file extension for this format.
This is the extension that is reported for layers using this file
format.
""")
result["Layer"].empty = property(result["Layer"].empty.fget, result["Layer"].empty.fset, result["Layer"].empty.fdel, """type : bool
Returns whether this layer has no significant data.
""")
result["Layer"].anonymous = property(result["Layer"].anonymous.fget, result["Layer"].anonymous.fset, result["Layer"].anonymous.fdel, """type : bool
Returns true if this layer is an anonymous layer.
""")
result["Layer"].dirty = property(result["Layer"].dirty.fget, result["Layer"].dirty.fset, result["Layer"].dirty.fdel, """type : bool
Returns ``true`` if the layer is dirty, i.e.
has changed from its persistent representation.
""")
result["LayerOffset"].offset = property(result["LayerOffset"].offset.fget, result["LayerOffset"].offset.fset, result["LayerOffset"].offset.fdel, """type : None
Sets the time offset.
----------------------------------------------------------------------
type : float
Returns the time offset.
""")
result["LayerOffset"].scale = property(result["LayerOffset"].scale.fget, result["LayerOffset"].scale.fset, result["LayerOffset"].scale.fdel, """type : None
Sets the time scale factor.
----------------------------------------------------------------------
type : float
Returns the time scale factor.
""")
result["LayerTree"].layer = property(result["LayerTree"].layer.fget, result["LayerTree"].layer.fset, result["LayerTree"].layer.fdel, """type : Layer
Returns the layer handle this tree node represents.
""")
result["LayerTree"].offset = property(result["LayerTree"].offset.fget, result["LayerTree"].offset.fset, result["LayerTree"].offset.fdel, """type : LayerOffset
Returns the cumulative layer offset from the root of the tree.
""")
result["LayerTree"].childTrees = property(result["LayerTree"].childTrees.fget, result["LayerTree"].childTrees.fset, result["LayerTree"].childTrees.fdel, """type : list[SdfLayerTreeHandle]
Returns the children of this tree node.
""")
result["Path"].isEmpty = property(result["Path"].isEmpty.fget, result["Path"].isEmpty.fset, result["Path"].isEmpty.fdel, """type : bool
Returns true if this is the empty path ( SdfPath::EmptyPath() ).
""")
result["Payload"].assetPath = property(result["Payload"].assetPath.fget, result["Payload"].assetPath.fset, result["Payload"].assetPath.fdel, """type : None
Sets a new asset path for the layer the payload uses.
See SdfAssetPath for what characters are valid in ``assetPath`` . If
``assetPath`` contains invalid characters, issue an error and set this
payload's asset path to the empty asset path.
----------------------------------------------------------------------
type : str
Returns the asset path of the layer that the payload uses.
""")
result["Payload"].primPath = property(result["Payload"].primPath.fget, result["Payload"].primPath.fset, result["Payload"].primPath.fdel, """type : None
Sets a new prim path for the prim that the payload uses.
----------------------------------------------------------------------
type : Path
Returns the scene path of the prim for the payload.
""")
result["Payload"].layerOffset = property(result["Payload"].layerOffset.fget, result["Payload"].layerOffset.fset, result["Payload"].layerOffset.fdel, """type : None
Sets a new layer offset.
----------------------------------------------------------------------
type : LayerOffset
Returns the layer offset associated with the payload.
""")
result["Reference"].assetPath = property(result["Reference"].assetPath.fget, result["Reference"].assetPath.fset, result["Reference"].assetPath.fdel, """type : None
Sets the asset path for the root layer of the referenced layer stack.
This may be set to an empty string to specify an internal reference.
See SdfAssetPath for what characters are valid in ``assetPath`` . If
``assetPath`` contains invalid characters, issue an error and set this
reference's asset path to the empty asset path.
----------------------------------------------------------------------
type : str
Returns the asset path to the root layer of the referenced layer
stack.
This will be empty in the case of an internal reference.
""")
result["Reference"].primPath = property(result["Reference"].primPath.fget, result["Reference"].primPath.fset, result["Reference"].primPath.fdel, """type : None
Sets the path of the referenced prim.
This may be set to an empty path to specify a reference to the default
prim in the referenced layer stack.
----------------------------------------------------------------------
type : Path
Returns the path of the referenced prim.
This will be empty if the referenced prim is the default prim
specified in the referenced layer stack.
""")
result["Reference"].layerOffset = property(result["Reference"].layerOffset.fget, result["Reference"].layerOffset.fset, result["Reference"].layerOffset.fdel, """type : None
Sets a new layer offset.
----------------------------------------------------------------------
type : LayerOffset
Returns the layer offset associated with the reference.
""")
result["Reference"].customData = property(result["Reference"].customData.fget, result["Reference"].customData.fset, result["Reference"].customData.fdel, """type : None
Sets the custom data associated with the reference.
----------------------------------------------------------------------
type : None
Sets a custom data entry for the reference.
If *value* is empty, then this removes the given custom data entry.
----------------------------------------------------------------------
type : VtDictionary
Returns the custom data associated with the reference.
""")
result["UnregisteredValue"].value = property(result["UnregisteredValue"].value.fget, result["UnregisteredValue"].value.fset, result["UnregisteredValue"].value.fdel, """type : VtValue
Returns the wrapped VtValue specified in the constructor.
""")
result["ValueTypeName"].type = property(result["ValueTypeName"].type.fget, result["ValueTypeName"].type.fset, result["ValueTypeName"].type.fdel, """type : Type
Returns the ``TfType`` of the type.
""")
result["ValueTypeName"].role = property(result["ValueTypeName"].role.fget, result["ValueTypeName"].role.fset, result["ValueTypeName"].role.fdel, """type : str
Returns the type's role.
""")
result["ValueTypeName"].defaultValue = property(result["ValueTypeName"].defaultValue.fget, result["ValueTypeName"].defaultValue.fset, result["ValueTypeName"].defaultValue.fdel, """type : VtValue
Returns the default value for the type.
""")
result["ValueTypeName"].defaultUnit = property(result["ValueTypeName"].defaultUnit.fget, result["ValueTypeName"].defaultUnit.fset, result["ValueTypeName"].defaultUnit.fdel, """type : Enum
Returns the default unit enum for the type.
""")
result["ValueTypeName"].scalarType = property(result["ValueTypeName"].scalarType.fget, result["ValueTypeName"].scalarType.fset, result["ValueTypeName"].scalarType.fdel, """type : ValueTypeName
Returns the scalar version of this type name if it's an array type
name, otherwise returns this type name.
If there is no scalar type name then this returns the invalid type
name.
""")
result["ValueTypeName"].arrayType = property(result["ValueTypeName"].arrayType.fget, result["ValueTypeName"].arrayType.fset, result["ValueTypeName"].arrayType.fdel, """type : ValueTypeName
Returns the array version of this type name if it's an scalar type
name, otherwise returns this type name.
If there is no array type name then this returns the invalid type
name.
""")
result["ValueTypeName"].isScalar = property(result["ValueTypeName"].isScalar.fget, result["ValueTypeName"].isScalar.fset, result["ValueTypeName"].isScalar.fdel, """type : bool
Returns ``true`` iff this type is a scalar.
The invalid type is considered neither scalar nor array.
""")
result["ValueTypeName"].isArray = property(result["ValueTypeName"].isArray.fget, result["ValueTypeName"].isArray.fset, result["ValueTypeName"].isArray.fdel, """type : bool
Returns ``true`` iff this type is an array.
The invalid type is considered neither scalar nor array.
""")
result["VariantSpec"].variantSets = property(result["VariantSpec"].variantSets.fget, result["VariantSpec"].variantSets.fset, result["VariantSpec"].variantSets.fdel, """type : SdfVariantSetsProxy
Returns the nested variant sets.
The result maps variant set names to variant sets. Variant sets may be
removed through the proxy.
""") | 95,823 | Python | 22.654406 | 213 | 0.711291 |
omniverse-code/kit/exts/omni.usd.libs/pxr/Sdf/__init__.pyi | from __future__ import annotations
import pxr.Sdf._sdf
import typing
import Boost.Python
import pxr.Sdf
import pxr.Tf
__all__ = [
"AngularUnit",
"AngularUnitDegrees",
"AngularUnitRadians",
"AssetPath",
"AssetPathArray",
"AttributeSpec",
"AuthoringError",
"AuthoringErrorUnrecognizedFields",
"AuthoringErrorUnrecognizedSpecType",
"BatchNamespaceEdit",
"Cat",
"ChangeBlock",
"ChildrenView_Sdf_AttributeChildPolicy_SdfAttributeViewPredicate",
"ChildrenView_Sdf_AttributeChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfAttributeSpec___",
"ChildrenView_Sdf_PrimChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPrimSpec___",
"ChildrenView_Sdf_PropertyChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPropertySpec___",
"ChildrenView_Sdf_RelationshipChildPolicy_SdfRelationshipViewPredicate",
"ChildrenView_Sdf_VariantChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSpec___",
"ChildrenView_Sdf_VariantSetChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSetSpec___",
"CleanupEnabler",
"ComputeAssetPathRelativeToLayer",
"ConvertToValidMetadataDictionary",
"ConvertUnit",
"CopySpec",
"CreatePrimInLayer",
"CreateVariantInLayer",
"DefaultUnit",
"DimensionlessUnit",
"DimensionlessUnitDefault",
"DimensionlessUnitPercent",
"Equal",
"FastUpdateList",
"FileFormat",
"GetNameForUnit",
"GetTypeForValueTypeName",
"GetUnitFromName",
"GetValueTypeNameForValue",
"Int64ListOp",
"IntListOp",
"JustCreatePrimAttributeInLayer",
"JustCreatePrimInLayer",
"Layer",
"LayerOffset",
"LayerTree",
"LengthUnit",
"LengthUnitCentimeter",
"LengthUnitDecimeter",
"LengthUnitFoot",
"LengthUnitInch",
"LengthUnitKilometer",
"LengthUnitMeter",
"LengthUnitMile",
"LengthUnitMillimeter",
"LengthUnitYard",
"ListEditorProxy_SdfNameKeyPolicy",
"ListEditorProxy_SdfPathKeyPolicy",
"ListEditorProxy_SdfPayloadTypePolicy",
"ListEditorProxy_SdfReferenceTypePolicy",
"ListOpType",
"ListOpTypeAdded",
"ListOpTypeAppended",
"ListOpTypeDeleted",
"ListOpTypeExplicit",
"ListOpTypeOrdered",
"ListOpTypePrepended",
"ListProxy_SdfNameKeyPolicy",
"ListProxy_SdfNameTokenKeyPolicy",
"ListProxy_SdfPathKeyPolicy",
"ListProxy_SdfPayloadTypePolicy",
"ListProxy_SdfReferenceTypePolicy",
"ListProxy_SdfSubLayerTypePolicy",
"MapEditProxy_VtDictionary",
"MapEditProxy_map_SdfPath_SdfPath_less_SdfPath__allocator_pair_SdfPath_const__SdfPath_____",
"MapEditProxy_map_string_string_less_string__allocator_pair_stringconst__string_____",
"NamespaceEdit",
"NamespaceEditDetail",
"NotEqual",
"Notice",
"Path",
"PathArray",
"PathListOp",
"Payload",
"PayloadListOp",
"Permission",
"PermissionPrivate",
"PermissionPublic",
"PrimSpec",
"PropertySpec",
"PseudoRootSpec",
"Reference",
"ReferenceListOp",
"RelationshipSpec",
"Spec",
"SpecType",
"SpecTypeAttribute",
"SpecTypeConnection",
"SpecTypeExpression",
"SpecTypeMapper",
"SpecTypeMapperArg",
"SpecTypePrim",
"SpecTypePseudoRoot",
"SpecTypeRelationship",
"SpecTypeRelationshipTarget",
"SpecTypeUnknown",
"SpecTypeVariant",
"SpecTypeVariantSet",
"Specifier",
"SpecifierClass",
"SpecifierDef",
"SpecifierOver",
"StringListOp",
"TimeCode",
"TimeCodeArray",
"TokenListOp",
"UInt64ListOp",
"UIntListOp",
"UnitCategory",
"UnregisteredValue",
"UnregisteredValueListOp",
"ValueBlock",
"ValueHasValidType",
"ValueRoleNames",
"ValueTypeName",
"ValueTypeNames",
"Variability",
"VariabilityUniform",
"VariabilityVarying",
"VariantSetSpec",
"VariantSpec"
]
class AngularUnit(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.AngularUnitDegrees, Sdf.AngularUnitRadians)
pass
class AssetPath(Boost.Python.instance):
"""
Contains an asset path and an optional resolved path. Asset paths may
contain non-control UTF-8 encoded characters. Specifically,
U+0000\.\.U+001F (C0 controls), U+007F (delete), and
U+0080\.\.U+009F (C1 controls) are disallowed. Attempts to construct
asset paths with such characters will issue a TfError and produce the
default-constructed empty asset path.
"""
@property
def path(self) -> None:
"""
:type: None
"""
@property
def resolvedPath(self) -> None:
"""
type : str
Return the resolved asset path, if any.
Note that SdfAssetPath carries a resolved path only if its creator
passed one to the constructor. SdfAssetPath never performs resolution
itself.
----------------------------------------------------------------------
type : str
Overload for rvalues, move out the asset path.
:type: None
"""
__instance_size__ = 80
pass
class AssetPathArray(Boost.Python.instance):
"""
An array of type SdfAssetPath.
"""
_isVtArray = True
pass
class AttributeSpec(PropertySpec, Spec, Boost.Python.instance):
"""
A subclass of SdfPropertySpec that holds typed data.
Attributes are typed data containers that can optionally hold any and
all of the following:
- A single default value.
- An array of knot values describing how the value varies over
time.
- A dictionary of posed values, indexed by name.
The values contained in an attribute must all be of the same type. In
the Python API the ``typeName`` property holds the attribute type. In
the C++ API, you can get the attribute type using the GetTypeName()
method. In addition, all values, including all knot values, must be
the same shape. For information on shapes, see the VtShape class
reference in the C++ documentation.
"""
@staticmethod
def ClearColorSpace() -> None:
"""
ClearColorSpace() -> None
Clears the colorSpace metadata value set on this attribute.
"""
@staticmethod
def HasColorSpace() -> bool:
"""
HasColorSpace() -> bool
Returns true if this attribute has a colorSpace value authored.
"""
@property
def allowedTokens(self) -> None:
"""
The allowed value tokens for this property
:type: None
"""
@property
def colorSpace(self) -> None:
"""
The color-space in which the attribute value is authored.
:type: None
"""
@property
def connectionPathList(self) -> None:
"""
A PathListEditor for the attribute's connection paths.
The list of the connection paths for this attribute may be modified with this PathListEditor.
A PathListEditor may express a list either as an explicit value or as a set of list editing operations. See GdListEditor for more information.
:type: None
"""
@property
def displayUnit(self) -> None:
"""
The display unit for this attribute.
:type: None
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def roleName(self) -> None:
"""
The roleName for this attribute's typeName.
:type: None
"""
@property
def typeName(self) -> None:
"""
The typename of this attribute.
:type: None
"""
@property
def valueType(self) -> None:
"""
The value type of this attribute.
:type: None
"""
ConnectionPathsKey = 'connectionPaths'
DefaultValueKey = 'default'
DisplayUnitKey = 'displayUnit'
pass
class AuthoringError(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.AuthoringErrorUnrecognizedFields, Sdf.AuthoringErrorUnrecognizedSpecType)
pass
class BatchNamespaceEdit(Boost.Python.instance):
"""
A description of an arbitrarily complex namespace edit.
A ``SdfBatchNamespaceEdit`` object describes zero or more namespace
edits. Various types providing a namespace will allow the edits to be
applied in a single operation and also allow testing if this will
work.
Clients are encouraged to group several edits into one object because
that may allow more efficient processing of the edits. If, for
example, you need to reparent several prims it may be faster to add
all of the reparents to a single ``SdfBatchNamespaceEdit`` and apply
them at once than to apply each separately.
Objects that allow applying edits are free to apply the edits in any
way and any order they see fit but they should guarantee that the
resulting namespace will be as if each edit was applied one at a time
in the order they were added.
Note that the above rule permits skipping edits that have no effect or
generate a non-final state. For example, if renaming A to B then to C
we could just rename A to C. This means notices may be elided.
However, implementations must not elide notices that contain
information about any edit that clients must be able to know but
otherwise cannot determine.
"""
@staticmethod
@typing.overload
def Add(edit) -> None:
"""
Add(edit) -> None
Add a namespace edit.
Parameters
----------
edit : NamespaceEdit
----------------------------------------------------------------------
Add a namespace edit.
Parameters
----------
currentPath : NamespaceEdit.Path
newPath : NamespaceEdit.Path
index : NamespaceEdit.Index
"""
@staticmethod
@typing.overload
def Add(currentPath, newPath, index) -> None: ...
@staticmethod
def Process(processedEdits, hasObjectAtPath, canEdit, details, fixBackpointers) -> bool:
"""
Process(processedEdits, hasObjectAtPath, canEdit, details, fixBackpointers) -> bool
Validate the edits and generate a possibly more efficient edit
sequence.
Edits are treated as if they were performed one at time in sequence,
therefore each edit occurs in the namespace resulting from all
previous edits.
Editing the descendants of the object in each edit is implied. If an
object is removed then the new path will be empty. If an object is
removed after being otherwise edited, the other edits will be
processed and included in ``processedEdits`` followed by the removal.
This allows clients to fixup references to point to the object's final
location prior to removal.
This function needs help to determine if edits are allowed. The
callbacks provide that help. ``hasObjectAtPath`` returns ``true`` iff
there's an object at the given path. This path will be in the original
namespace not any intermediate or final namespace. ``canEdit`` returns
``true`` iff the object at the current path can be namespace edited to
the new path, ignoring whether an object already exists at the new
path. Both paths are in the original namespace. If it returns
``false`` it should set the string to the reason why the edit isn't
allowed. It should not write either path to the string.
If ``hasObjectAtPath`` is invalid then this assumes objects exist
where they should and don't exist where they shouldn't. Use this with
care. If ``canEdit`` in invalid then it's assumed all edits are valid.
If ``fixBackpointers`` is ``true`` then target/connection paths are
expected to be in the intermediate namespace resulting from all
previous edits. If ``false`` and any current or new path contains a
target or connection path that has been edited then this will generate
an error.
This method returns ``true`` if the edits are allowed and sets
``processedEdits`` to a new edit sequence at least as efficient as the
input sequence. If not allowed it returns ``false`` and appends
reasons why not to ``details`` .
Parameters
----------
processedEdits : list[SdfNamespaceEdit]
hasObjectAtPath : HasObjectAtPath
canEdit : CanEdit
details : list[SdfNamespaceEditDetail]
fixBackpointers : bool
"""
@property
def edits(self) -> None:
"""
type : list[SdfNamespaceEdit]
Returns the edits.
:type: None
"""
pass
class ChangeBlock(Boost.Python.instance):
"""
**DANGER DANGER DANGER**
Please make sure you have read and fully understand the issues below
before using a changeblock! They are very easy to use in an unsafe way
that could make the system crash or corrupt data. If you have any
questions, please contact the USD team, who would be happy to help!
SdfChangeBlock provides a way to group a round of related changes to
scene description in order to process them more efficiently.
Normally, Sdf sends notification immediately as changes are made so
that downstream representations like UsdStage can update accordingly.
However, sometimes it can be advantageous to group a series of Sdf
changes into a batch so that they can be processed more efficiently,
with a single round of change processing. An example might be when
setting many avar values on a model at the same time.
Opening a changeblock tells Sdf to delay sending notification about
changes until the outermost changeblock is exited. Until then, Sdf
internally queues up the notification it needs to send.
It is *not* safe to use Usd or other downstream API while a
changeblock is open!! This is because those derived representations
will not have had a chance to update while the changeblock is open.
Not only will their view of the world be stale, it could be unsafe to
even make queries from, since they may be holding onto expired handles
to Sdf objects that no longer exist. If you need to make a bunch of
changes to scene description, the best approach is to build a list of
necessary changes that can be performed directly via the Sdf API, then
submit those all inside a changeblock without talking to any
downstream modules. For example, this is how many mutators in Usd
that operate on more than one field or Spec work.
"""
__instance_size__ = 32
pass
class ChildrenView_Sdf_AttributeChildPolicy_SdfAttributeViewPredicate(Boost.Python.instance):
class ChildrenView_Sdf_AttributeChildPolicy_SdfAttributeViewPredicate_Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_AttributeChildPolicy_SdfAttributeViewPredicate_KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_AttributeChildPolicy_SdfAttributeViewPredicate_ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class ChildrenView_Sdf_AttributeChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfAttributeSpec___(Boost.Python.instance):
class ChildrenView_Sdf_AttributeChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfAttributeSpec____Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_AttributeChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfAttributeSpec____KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_AttributeChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfAttributeSpec____ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class ChildrenView_Sdf_PrimChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPrimSpec___(Boost.Python.instance):
class ChildrenView_Sdf_PrimChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPrimSpec____Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_PrimChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPrimSpec____KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_PrimChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPrimSpec____ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class ChildrenView_Sdf_PropertyChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPropertySpec___(Boost.Python.instance):
class ChildrenView_Sdf_PropertyChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPropertySpec____Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_PropertyChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPropertySpec____KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_PropertyChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfPropertySpec____ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class ChildrenView_Sdf_RelationshipChildPolicy_SdfRelationshipViewPredicate(Boost.Python.instance):
class ChildrenView_Sdf_RelationshipChildPolicy_SdfRelationshipViewPredicate_Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_RelationshipChildPolicy_SdfRelationshipViewPredicate_KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_RelationshipChildPolicy_SdfRelationshipViewPredicate_ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class ChildrenView_Sdf_VariantChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSpec___(Boost.Python.instance):
class ChildrenView_Sdf_VariantChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSpec____Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_VariantChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSpec____KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_VariantChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSpec____ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class ChildrenView_Sdf_VariantSetChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSetSpec___(Boost.Python.instance):
class ChildrenView_Sdf_VariantSetChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSetSpec____Iterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_VariantSetChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSetSpec____KeyIterator(Boost.Python.instance):
pass
class ChildrenView_Sdf_VariantSetChildPolicy_SdfChildrenViewTrivialPredicate_SdfHandle_SdfVariantSetSpec____ValueIterator(Boost.Python.instance):
pass
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
pass
class CleanupEnabler(Boost.Python.instance):
"""
An RAII class which, when an instance is alive, enables scheduling of
automatic cleanup of SdfLayers.
Any affected specs which no longer contribute to the scene will be
removed when the last SdfCleanupEnabler instance goes out of scope.
Note that for this purpose, SdfPropertySpecs are removed if they have
only required fields (see SdfPropertySpecs::HasOnlyRequiredFields),
but only if the property spec itself was affected by an edit that left
it with only required fields. This will have the effect of
uninstantiating on-demand attributes. For example, if its parent prim
was affected by an edit that left it otherwise inert, it will not be
removed if it contains an SdfPropertySpec with only required fields,
but if the property spec itself is edited leaving it with only
required fields, it will be removed, potentially uninstantiating it if
it's an on-demand property.
SdfCleanupEnablers are accessible in both C++ and Python.
/// SdfCleanupEnabler can be used in the following manner:
.. code-block:: text
{
SdfCleanupEnabler enabler;
// Perform any action that might otherwise leave inert specs around,
// such as removing info from properties or prims, or removing name
// children. i.e:
primSpec->ClearInfo(SdfFieldKeys->Default);
// When enabler goes out of scope on the next line, primSpec will
// be removed if it has been left as an empty over.
}
"""
__instance_size__ = 24
pass
class DimensionlessUnit(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.DimensionlessUnitPercent, Sdf.DimensionlessUnitDefault)
pass
class FastUpdateList(Boost.Python.instance):
class FastUpdate(Boost.Python.instance):
@property
def path(self) -> None:
"""
:type: None
"""
@property
def value(self) -> None:
"""
:type: None
"""
pass
@property
def fastUpdates(self) -> None:
"""
:type: None
"""
@property
def hasCompositionDependents(self) -> None:
"""
:type: None
"""
pass
class FileFormat(Boost.Python.instance):
"""
Base class for file format implementations.
"""
class Tokens(Boost.Python.instance):
TargetArg = 'target'
pass
@staticmethod
def CanRead(file) -> bool:
"""
CanRead(file) -> bool
Returns true if ``file`` can be read by this format.
Parameters
----------
file : str
"""
@staticmethod
def FindAllFileFormatExtensions(*args, **kwargs) -> None:
"""
**classmethod** FindAllFileFormatExtensions() -> set[str]
Returns a set containing the extension(s) corresponding to all
registered file formats.
"""
@staticmethod
def FindByExtension(path, args) -> FileFormat:
"""
**classmethod** FindByExtension(path, target) -> FileFormat
Returns the file format instance that supports the extension for
``path`` .
If a format with a matching extension is not found, this returns a
null file format pointer.
An extension may be handled by multiple file formats, but each with a
different target. In such cases, if no ``target`` is specified, the
file format that is registered as the primary plugin will be returned.
Otherwise, the file format whose target matches ``target`` will be
returned.
Parameters
----------
path : str
target : str
----------------------------------------------------------------------
Returns a file format instance that supports the extension for
``path`` and whose target matches one of those specified by the given
``args`` .
If the ``args`` specify no target, then the file format that is
registered as the primary plugin will be returned. If a format with a
matching extension is not found, this returns a null file format
pointer.
Parameters
----------
path : str
args : FileFormatArguments
"""
@staticmethod
def FindById(*args, **kwargs) -> None:
"""
**classmethod** FindById(formatId) -> FileFormat
Returns the file format instance with the specified ``formatId``
identifier.
If a format with a matching identifier is not found, this returns a
null file format pointer.
Parameters
----------
formatId : str
"""
@staticmethod
def GetFileExtension(*args, **kwargs) -> None:
"""
**classmethod** GetFileExtension(s) -> str
Returns the file extension for path or file name ``s`` , without the
leading dot character.
Parameters
----------
s : str
"""
@staticmethod
def GetFileExtensions() -> list[str]:
"""
GetFileExtensions() -> list[str]
Returns a list of extensions that this format supports.
"""
@staticmethod
def IsPackage() -> bool:
"""
IsPackage() -> bool
Returns true if this file format is a package containing other assets.
"""
@staticmethod
def IsSupportedExtension(extension) -> bool:
"""
IsSupportedExtension(extension) -> bool
Returns true if ``extension`` matches one of the extensions returned
by GetFileExtensions.
Parameters
----------
extension : str
"""
@property
def expired(self) -> None:
"""
True if this object has expired, False otherwise.
:type: None
"""
@property
def fileCookie(self) -> None:
"""
type : str
Returns the cookie to be used when writing files with this format.
:type: None
"""
@property
def formatId(self) -> None:
"""
type : str
Returns the format identifier.
:type: None
"""
@property
def primaryFileExtension(self) -> None:
"""
type : str
Returns the primary file extension for this format.
This is the extension that is reported for layers using this file
format.
:type: None
"""
@property
def target(self) -> None:
"""
type : str
Returns the target for this file format.
:type: None
"""
pass
class Int64ListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class IntListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class Layer(Boost.Python.instance):
"""
A scene description container that can combine with other such
containers to form simple component assets, and successively larger
aggregates. The contents of an SdfLayer adhere to the SdfData data
model. A layer can be ephemeral, or be an asset accessed and
serialized through the ArAsset and ArResolver interfaces.
The SdfLayer class provides a consistent API for accesing and
serializing scene description, using any data store provided by Ar
plugins. Sdf itself provides a UTF-8 text format for layers identified
by the".sdf"identifier extension, but via the SdfFileFormat
abstraction, allows downstream modules and plugins to adapt arbitrary
data formats to the SdfData/SdfLayer model.
The FindOrOpen() method returns a new SdfLayer object with scene
description from any supported asset format. Once read, a layer
remembers which asset it was read from. The Save() method saves the
layer back out to the original asset. You can use the Export() method
to write the layer to a different location. You can use the
GetIdentifier() method to get the layer's Id or GetRealPath() to get
the resolved, full URI.
Layers can have a timeCode range (startTimeCode and endTimeCode). This
range represents the suggested playback range, but has no impact on
the extent of the animation data that may be stored in the layer. The
metadatum"timeCodesPerSecond"is used to annotate how the time ordinate
for samples contained in the file scales to seconds. For example, if
timeCodesPerSecond is 24, then a sample at time ordinate 24 should be
viewed exactly one second after the sample at time ordinate 0.
"""
class DetachedLayerRules(Boost.Python.instance):
@staticmethod
def Exclude(*args, **kwargs) -> None: ...
@staticmethod
def GetExcluded(*args, **kwargs) -> None: ...
@staticmethod
def GetIncluded(*args, **kwargs) -> None: ...
@staticmethod
def Include(*args, **kwargs) -> None: ...
@staticmethod
def IncludeAll(*args, **kwargs) -> None: ...
@staticmethod
def IncludedAll(*args, **kwargs) -> None: ...
@staticmethod
def IsIncluded(*args, **kwargs) -> None: ...
__instance_size__ = 72
pass
@staticmethod
def AddToMutedLayers(*args, **kwargs) -> None:
"""
**classmethod** AddToMutedLayers(mutedPath) -> None
Add the specified path to the muted layers set.
Parameters
----------
mutedPath : str
"""
@staticmethod
def Apply(arg1) -> bool:
"""
Apply(arg1) -> bool
Performs a batch of namespace edits.
Returns ``true`` on success and ``false`` on failure. On failure, no
namespace edits will have occurred.
Parameters
----------
arg1 : BatchNamespaceEdit
"""
@staticmethod
def ApplyRootPrimOrder(vec) -> None:
"""
ApplyRootPrimOrder(vec) -> None
Reorders the given list of prim names according to the reorder
rootPrims statement for this layer.
This routine employs the standard list editing operations for ordered
items in a ListEditor.
Parameters
----------
vec : list[str]
"""
@staticmethod
def CanApply(arg1, details) -> NamespaceEditDetail.Result:
"""
CanApply(arg1, details) -> NamespaceEditDetail.Result
Check if a batch of namespace edits will succeed.
This returns ``SdfNamespaceEditDetail::Okay`` if they will succeed as
a batch, ``SdfNamespaceEditDetail::Unbatched`` if the edits will
succeed but will be applied unbatched, and
``SdfNamespaceEditDetail::Error`` if they will not succeed. No edits
will be performed in any case.
If ``details`` is not ``None`` and the method does not return ``Okay``
then details about the problems will be appended to ``details`` . A
problem may cause the method to return early, so ``details`` may not
list every problem.
Note that Sdf does not track backpointers so it's unable to fix up
targets/connections to namespace edited objects. Clients must fix
those to prevent them from falling off. In addition, this method will
report failure if any relational attribute with a target to a
namespace edited object is subsequently edited (in the same batch).
Clients should perform edits on relational attributes first.
Clients may wish to report unbatch details to the user to confirm that
the edits should be applied unbatched. This will give the user a
chance to correct any problems that cause batching to fail and try
again.
Parameters
----------
arg1 : BatchNamespaceEdit
details : list[SdfNamespaceEditDetail]
"""
@staticmethod
def Clear() -> None:
"""
Clear() -> None
Clears the layer of all content.
This restores the layer to a state as if it had just been created with
CreateNew() . This operation is Undo-able.
The fileName and whether journaling is enabled are not affected by
this method.
"""
@staticmethod
def ClearColorConfiguration() -> None:
"""
ClearColorConfiguration() -> None
Clears the color configuration metadata authored in this layer.
"""
@staticmethod
def ClearColorManagementSystem() -> None:
"""
ClearColorManagementSystem() -> None
Clears the'colorManagementSystem'metadata authored in this layer.
"""
@staticmethod
def ClearCustomLayerData() -> None:
"""
ClearCustomLayerData() -> None
Clears out the CustomLayerData dictionary associated with this layer.
"""
@staticmethod
def ClearDefaultPrim() -> None:
"""
ClearDefaultPrim() -> None
Clear the default prim metadata for this layer.
See GetDefaultPrim() and SetDefaultPrim() .
"""
@staticmethod
def ClearEndTimeCode() -> None:
"""
ClearEndTimeCode() -> None
Clear the endTimeCode opinion.
"""
@staticmethod
def ClearFramePrecision() -> None:
"""
ClearFramePrecision() -> None
Clear the framePrecision opinion.
"""
@staticmethod
def ClearFramesPerSecond() -> None:
"""
ClearFramesPerSecond() -> None
Clear the framesPerSecond opinion.
"""
@staticmethod
def ClearOwner() -> None:
"""
ClearOwner() -> None
Clear the owner opinion.
"""
@staticmethod
def ClearSessionOwner() -> None:
"""
ClearSessionOwner() -> None
"""
@staticmethod
def ClearStartTimeCode() -> None:
"""
ClearStartTimeCode() -> None
Clear the startTimeCode opinion.
"""
@staticmethod
def ClearTimeCodesPerSecond() -> None:
"""
ClearTimeCodesPerSecond() -> None
Clear the timeCodesPerSecond opinion.
"""
@staticmethod
def ComputeAbsolutePath(assetPath) -> str:
"""
ComputeAbsolutePath(assetPath) -> str
Returns the path to the asset specified by ``assetPath`` using this
layer to anchor the path if necessary.
Returns ``assetPath`` if it's empty or an anonymous layer identifier.
This method can be used on asset paths that are authored in this layer
to create new asset paths that can be copied to other layers. These
new asset paths should refer to the same assets as the original asset
paths. For example, if the underlying ArResolver is filesystem-based
and ``assetPath`` is a relative filesystem path, this method might
return the absolute filesystem path using this layer's location as the
anchor.
The returned path should in general not be assumed to be an absolute
filesystem path or any other specific form. It is"absolute"in that it
should resolve to the same asset regardless of what layer it's
authored in.
Parameters
----------
assetPath : str
"""
@staticmethod
def CreateAnonymous(tag, format, args) -> Layer:
"""
**classmethod** CreateAnonymous(tag, args) -> Layer
Creates a new *anonymous* layer with an optional ``tag`` .
An anonymous layer is a layer with a system assigned identifier, that
cannot be saved to disk via Save() . Anonymous layers have an
identifier, but no real path or other asset information fields.
Anonymous layers may be tagged, which can be done to aid debugging
subsystems that make use of anonymous layers. The tag becomes the
display name of an anonymous layer, and is also included in the
generated identifier. Untagged anonymous layers have an empty display
name.
Additional arguments may be supplied via the ``args`` parameter. These
arguments may control behavior specific to the layer's file format.
Parameters
----------
tag : str
args : FileFormatArguments
----------------------------------------------------------------------
Create an anonymous layer with a specific ``format`` .
Parameters
----------
tag : str
format : FileFormat
args : FileFormatArguments
"""
@staticmethod
def CreateIdentifier(*args, **kwargs) -> None:
"""
**classmethod** CreateIdentifier(layerPath, arguments) -> str
Joins the given layer path and arguments into an identifier.
Parameters
----------
layerPath : str
arguments : FileFormatArguments
"""
@staticmethod
def CreateNew(fileFormat, identifier, args) -> Layer:
"""
**classmethod** CreateNew(identifier, args) -> Layer
Creates a new empty layer with the given identifier.
Additional arguments may be supplied via the ``args`` parameter. These
arguments may control behavior specific to the layer's file format.
Parameters
----------
identifier : str
args : FileFormatArguments
----------------------------------------------------------------------
Creates a new empty layer with the given identifier for a given file
format class.
This function has the same behavior as the other CreateNew function,
but uses the explicitly-specified ``fileFormat`` instead of attempting
to discern the format from ``identifier`` .
Parameters
----------
fileFormat : FileFormat
identifier : str
args : FileFormatArguments
"""
@staticmethod
def DumpLayerInfo(*args, **kwargs) -> None:
"""
Debug helper to examine content of the current layer registry and
the asset/real path of all layers in the registry.
"""
@staticmethod
def EraseTimeSample(path, time) -> None:
"""
EraseTimeSample(path, time) -> None
Parameters
----------
path : Path
time : float
"""
@staticmethod
def Export(filename, comment, args) -> bool:
"""
Export(filename, comment, args) -> bool
Exports this layer to a file.
Returns ``true`` if successful, ``false`` if an error occurred.
If ``comment`` is not empty, the layer gets exported with the given
comment. Additional arguments may be supplied via the ``args``
parameter. These arguments may control behavior specific to the
exported layer's file format.
Note that the file name or comment of the original layer is not
updated. This only saves a copy of the layer to the given filename.
Subsequent calls to Save() will still save the layer to it's
previously remembered file name.
Parameters
----------
filename : str
comment : str
args : FileFormatArguments
"""
@staticmethod
def ExportToString(*args, **kwargs) -> None:
"""
Returns the string representation of the layer.
"""
@staticmethod
def Find(filename) -> LayerPtr:
"""
filename : string
Returns the open layer with the given filename, or None. Note that this is a static class method.
"""
@staticmethod
def FindOrOpen(*args, **kwargs) -> None:
"""
**classmethod** FindOrOpen(identifier, args) -> Layer
Return an existing layer with the given ``identifier`` and ``args`` ,
or else load it.
If the layer can't be found or loaded, an error is posted and a null
layer is returned.
Arguments in ``args`` will override any arguments specified in
``identifier`` .
Parameters
----------
identifier : str
args : FileFormatArguments
"""
@staticmethod
def FindOrOpenRelativeToLayer(*args, **kwargs) -> None:
"""
**classmethod** FindOrOpenRelativeToLayer(anchor, identifier, args) -> Layer
Return an existing layer with the given ``identifier`` and ``args`` ,
or else load it.
The given ``identifier`` will be resolved relative to the ``anchor``
layer. If the layer can't be found or loaded, an error is posted and a
null layer is returned.
If the ``anchor`` layer is invalid, issues a coding error and returns
a null handle.
Arguments in ``args`` will override any arguments specified in
``identifier`` .
Parameters
----------
anchor : Layer
identifier : str
args : FileFormatArguments
"""
@staticmethod
def FindRelativeToLayer(*args, **kwargs) -> None:
"""
Returns the open layer with the given filename, or None. If the filename is a relative path then it's found relative to the given layer. Note that this is a static class method.
"""
@staticmethod
def GetAssetInfo() -> VtValue:
"""
GetAssetInfo() -> VtValue
Returns resolve information from the last time the layer identifier
was resolved.
"""
@staticmethod
def GetAssetName() -> str:
"""
GetAssetName() -> str
Returns the asset name associated with this layer.
"""
@staticmethod
def GetAttributeAtPath(path) -> AttributeSpec:
"""
GetAttributeAtPath(path) -> AttributeSpec
Returns an attribute at the given ``path`` .
Returns ``None`` if there is no attribute at ``path`` . This is simply
a more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
@staticmethod
def GetBracketingTimeSamples(time, tLower, tUpper) -> bool:
"""
GetBracketingTimeSamples(time, tLower, tUpper) -> bool
Parameters
----------
time : float
tLower : float
tUpper : float
"""
@staticmethod
def GetBracketingTimeSamplesForPath(path, time, tLower, tUpper) -> bool:
"""
GetBracketingTimeSamplesForPath(path, time, tLower, tUpper) -> bool
Parameters
----------
path : Path
time : float
tLower : float
tUpper : float
"""
@staticmethod
def GetCompositionAssetDependencies() -> set[str]:
"""
GetCompositionAssetDependencies() -> set[str]
Return paths of all assets this layer depends on due to composition
fields.
This includes the paths of all layers referred to by reference,
payload, and sublayer fields in this layer. This function only returns
direct composition dependencies of this layer, i.e. it does not
recurse to find composition dependencies from its dependent layer
assets.
"""
@staticmethod
def GetDetachedLayerRules(*args, **kwargs) -> None:
"""
**classmethod** GetDetachedLayerRules() -> DetachedLayerRules
Returns the current rules for the detached layer set.
"""
@staticmethod
def GetDisplayName() -> str:
"""
GetDisplayName() -> str
Returns the layer's display name.
The display name is the base filename of the identifier.
"""
@staticmethod
def GetDisplayNameFromIdentifier(*args, **kwargs) -> None:
"""
**classmethod** GetDisplayNameFromIdentifier(identifier) -> str
Returns the display name for the given ``identifier`` , using the same
rules as GetDisplayName.
Parameters
----------
identifier : str
"""
@staticmethod
def GetExternalAssetDependencies() -> set[str]:
"""
GetExternalAssetDependencies() -> set[str]
Returns a set of resolved paths to all external asset dependencies the
layer needs to generate its contents.
These are additional asset dependencies that are determined by the
layer's file format and will be consulted during Reload() when
determining if the layer needs to be reloaded. This specifically does
not include dependencies related to composition, i.e. this will not
include assets from references, payloads, and sublayers.
"""
@staticmethod
def GetExternalReferences(*args, **kwargs) -> None:
"""
Return a list of asset paths for
this layer.
"""
@staticmethod
def GetFileFormat() -> FileFormat:
"""
GetFileFormat() -> FileFormat
Returns the file format used by this layer.
"""
@staticmethod
def GetFileFormatArguments() -> FileFormatArguments:
"""
GetFileFormatArguments() -> FileFormatArguments
Returns the file format-specific arguments used during the
construction of this layer.
"""
@staticmethod
def GetLoadedLayers(*args, **kwargs) -> None:
"""
Return list of loaded layers.
"""
@staticmethod
def GetMutedLayers(*args, **kwargs) -> None:
"""
Return list of muted layers.
"""
@staticmethod
def GetNumTimeSamplesForPath(path) -> int:
"""
GetNumTimeSamplesForPath(path) -> int
Parameters
----------
path : Path
"""
@staticmethod
def GetObjectAtPath(path) -> Spec:
"""
GetObjectAtPath(path) -> Spec
Returns the object at the given ``path`` .
There is no distinction between an absolute and relative path at the
SdLayer level.
Returns ``None`` if there is no object at ``path`` .
Parameters
----------
path : Path
"""
@staticmethod
def GetPrimAtPath(path) -> PrimSpec:
"""
GetPrimAtPath(path) -> PrimSpec
Returns the prim at the given ``path`` .
Returns ``None`` if there is no prim at ``path`` . This is simply a
more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
@staticmethod
def GetPropertyAtPath(path) -> PropertySpec:
"""
GetPropertyAtPath(path) -> PropertySpec
Returns a property at the given ``path`` .
Returns ``None`` if there is no property at ``path`` . This is simply
a more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
@staticmethod
def GetRelationshipAtPath(path) -> RelationshipSpec:
"""
GetRelationshipAtPath(path) -> RelationshipSpec
Returns a relationship at the given ``path`` .
Returns ``None`` if there is no relationship at ``path`` . This is
simply a more specifically typed version of ``GetObjectAtPath()`` .
Parameters
----------
path : Path
"""
@staticmethod
def HasColorConfiguration() -> bool:
"""
HasColorConfiguration() -> bool
Returns true if color configuration metadata is set in this layer.
"""
@staticmethod
def HasColorManagementSystem() -> bool:
"""
HasColorManagementSystem() -> bool
Returns true if colorManagementSystem metadata is set in this layer.
"""
@staticmethod
def HasCustomLayerData() -> bool:
"""
HasCustomLayerData() -> bool
Returns true if CustomLayerData is authored on the layer.
"""
@staticmethod
def HasDefaultPrim() -> bool:
"""
HasDefaultPrim() -> bool
Return true if the default prim metadata is set in this layer.
See GetDefaultPrim() and SetDefaultPrim() .
"""
@staticmethod
def HasEndTimeCode() -> bool:
"""
HasEndTimeCode() -> bool
Returns true if the layer has an endTimeCode opinion.
"""
@staticmethod
def HasFramePrecision() -> bool:
"""
HasFramePrecision() -> bool
Returns true if the layer has a frames precision opinion.
"""
@staticmethod
def HasFramesPerSecond() -> bool:
"""
HasFramesPerSecond() -> bool
Returns true if the layer has a frames per second opinion.
"""
@staticmethod
def HasOwner() -> bool:
"""
HasOwner() -> bool
Returns true if the layer has an owner opinion.
"""
@staticmethod
def HasSessionOwner() -> bool:
"""
HasSessionOwner() -> bool
Returns true if the layer has a session owner opinion.
"""
@staticmethod
def HasStartTimeCode() -> bool:
"""
HasStartTimeCode() -> bool
Returns true if the layer has a startTimeCode opinion.
"""
@staticmethod
def HasTimeCodesPerSecond() -> bool:
"""
HasTimeCodesPerSecond() -> bool
Returns true if the layer has a timeCodesPerSecond opinion.
"""
@staticmethod
def Import(layerPath) -> bool:
"""
Import(layerPath) -> bool
Imports the content of the given layer path, replacing the content of
the current layer.
Note: If the layer path is the same as the current layer's real path,
no action is taken (and a warning occurs). For this case use Reload()
.
Parameters
----------
layerPath : str
"""
@staticmethod
def ImportFromString(string) -> bool:
"""
ImportFromString(string) -> bool
Reads this layer from the given string.
Returns ``true`` if successful, otherwise returns ``false`` .
Parameters
----------
string : str
"""
@staticmethod
def IsAnonymousLayerIdentifier(*args, **kwargs) -> None:
"""
**classmethod** IsAnonymousLayerIdentifier(identifier) -> bool
Returns true if the ``identifier`` is an anonymous layer unique
identifier.
Parameters
----------
identifier : str
"""
@staticmethod
def IsDetached() -> bool:
"""
IsDetached() -> bool
Returns true if this layer is detached from its serialized data store,
false otherwise.
Detached layers are isolated from external changes to their serialized
data.
"""
@staticmethod
def IsIncludedByDetachedLayerRules(*args, **kwargs) -> None:
"""
**classmethod** IsIncludedByDetachedLayerRules(identifier) -> bool
Returns whether the given layer identifier is included in the current
rules for the detached layer set.
This is equivalent to GetDetachedLayerRules() .IsIncluded(identifier).
Parameters
----------
identifier : str
"""
@staticmethod
def IsMuted(path) -> bool:
"""
**classmethod** IsMuted() -> bool
Returns ``true`` if the current layer is muted.
----------------------------------------------------------------------
Returns ``true`` if the specified layer path is muted.
Parameters
----------
path : str
"""
@staticmethod
def ListAllTimeSamples() -> set[float]:
"""
ListAllTimeSamples() -> set[float]
"""
@staticmethod
def ListTimeSamplesForPath(path) -> set[float]:
"""
ListTimeSamplesForPath(path) -> set[float]
Parameters
----------
path : Path
"""
@staticmethod
def New(*args, **kwargs) -> None:
"""
**classmethod** New(fileFormat, identifier, args) -> Layer
Creates a new empty layer with the given identifier for a given file
format class.
The new layer will not be dirty and will not be saved.
Additional arguments may be supplied via the ``args`` parameter. These
arguments may control behavior specific to the layer's file format.
Parameters
----------
fileFormat : FileFormat
identifier : str
args : FileFormatArguments
"""
@staticmethod
def OpenAsAnonymous(*args, **kwargs) -> None:
"""
**classmethod** OpenAsAnonymous(layerPath, metadataOnly, tag) -> Layer
Load the given layer from disk as a new anonymous layer.
If the layer can't be found or loaded, an error is posted and a null
layer is returned.
The anonymous layer does not retain any knowledge of the backing file
on the filesystem.
``metadataOnly`` is a flag that asks for only the layer metadata to be
read in, which can be much faster if that is all that is required.
Note that this is just a hint: some FileFormat readers may disregard
this flag and still fully populate the layer contents.
An optional ``tag`` may be specified. See CreateAnonymous for details.
Parameters
----------
layerPath : str
metadataOnly : bool
tag : str
"""
@staticmethod
@typing.overload
def QueryTimeSample(path, time, value) -> bool:
"""
QueryTimeSample(path, time, value) -> bool
Parameters
----------
path : Path
time : float
value : VtValue
----------------------------------------------------------------------
Parameters
----------
path : Path
time : float
value : SdfAbstractDataValue
----------------------------------------------------------------------
Parameters
----------
path : Path
time : float
data : T
"""
@staticmethod
@typing.overload
def QueryTimeSample(path, time, data) -> bool: ...
@staticmethod
def Reload(force) -> bool:
"""
Reload(force) -> bool
Reloads the layer from its persistent representation.
This restores the layer to a state as if it had just been created with
FindOrOpen() . This operation is Undo-able.
The fileName and whether journaling is enabled are not affected by
this method.
When called with force = false (the default), Reload attempts to avoid
reloading layers that have not changed on disk. It does so by
comparing the file's modification time (mtime) to when the file was
loaded. If the layer has unsaved modifications, this mechanism is not
used, and the layer is reloaded from disk. If the layer has any
external asset dependencies their modification state will also be
consulted when determining if the layer needs to be reloaded.
Passing true to the ``force`` parameter overrides this behavior,
forcing the layer to be reloaded from disk regardless of whether it
has changed.
Parameters
----------
force : bool
"""
@staticmethod
def ReloadLayers(*args, **kwargs) -> None:
"""
**classmethod** ReloadLayers(layers, force) -> bool
Reloads the specified layers.
Returns ``false`` if one or more layers failed to reload.
See ``Reload()`` for a description of the ``force`` flag.
Parameters
----------
layers : set[Layer]
force : bool
"""
@staticmethod
def RemoveFromMutedLayers(*args, **kwargs) -> None:
"""
**classmethod** RemoveFromMutedLayers(mutedPath) -> None
Remove the specified path from the muted layers set.
Parameters
----------
mutedPath : str
"""
@staticmethod
def RemoveInertSceneDescription() -> None:
"""
RemoveInertSceneDescription() -> None
Removes all scene description in this layer that does not affect the
scene.
This method walks the layer namespace hierarchy and removes any prims
and that are not contributing any opinions.
"""
@staticmethod
def Save(force) -> bool:
"""
Save(force) -> bool
Returns ``true`` if successful, ``false`` if an error occurred.
Returns ``false`` if the layer has no remembered file name or the
layer type cannot be saved. The layer will not be overwritten if the
file exists and the layer is not dirty unless ``force`` is true.
Parameters
----------
force : bool
"""
@staticmethod
def ScheduleRemoveIfInert(spec) -> None:
"""
ScheduleRemoveIfInert(spec) -> None
Cause ``spec`` to be removed if it no longer affects the scene when
the last change block is closed, or now if there are no change blocks.
Parameters
----------
spec : Spec
"""
@staticmethod
def SetDetachedLayerRules(*args, **kwargs) -> None:
"""
**classmethod** SetDetachedLayerRules(mask) -> None
Sets the rules specifying detached layers.
Newly-created or opened layers whose identifiers are included in
``rules`` will be opened as detached layers. Existing layers that are
now included or no longer included will be reloaded. Any unsaved
modifications to those layers will be lost.
This function is not thread-safe. It may not be run concurrently with
any other functions that open, close, or read from any layers.
The detached layer rules are initially set to exclude all layers. This
may be overridden by setting the environment variables
SDF_LAYER_INCLUDE_DETACHED and SDF_LAYER_EXCLUDE_DETACHED to specify
the initial set of include and exclude patterns in the rules. These
variables can be set to a comma-delimited list of patterns.
SDF_LAYER_INCLUDE_DETACHED may also be set to"\*"to include all
layers. Note that these environment variables only set the initial
state of the detached layer rules; these values may be overwritten by
subsequent calls to this function.
See SdfLayer::DetachedLayerRules::IsIncluded for details on how the
rules are applied to layer identifiers.
Parameters
----------
mask : DetachedLayerRules
"""
@staticmethod
def SetMuted(muted) -> None:
"""
SetMuted(muted) -> None
Mutes the current layer if ``muted`` is ``true`` , and unmutes it
otherwise.
Parameters
----------
muted : bool
"""
@staticmethod
def SetPermissionToEdit(allow) -> None:
"""
SetPermissionToEdit(allow) -> None
Sets permission to edit.
Parameters
----------
allow : bool
"""
@staticmethod
def SetPermissionToSave(allow) -> None:
"""
SetPermissionToSave(allow) -> None
Sets permission to save.
Parameters
----------
allow : bool
"""
@staticmethod
def SetTimeSample(path, time, value) -> None:
"""
SetTimeSample(path, time, value) -> None
Parameters
----------
path : Path
time : float
value : VtValue
----------------------------------------------------------------------
Parameters
----------
path : Path
time : float
value : SdfAbstractDataConstValue
----------------------------------------------------------------------
Parameters
----------
path : Path
time : float
value : T
"""
@staticmethod
def SplitIdentifier(*args, **kwargs) -> None:
"""
**classmethod** SplitIdentifier(identifier, layerPath, arguments) -> bool
Splits the given layer identifier into its constituent layer path and
arguments.
Parameters
----------
identifier : str
layerPath : str
arguments : FileFormatArguments
"""
@staticmethod
def StreamsData() -> bool:
"""
StreamsData() -> bool
Returns true if this layer streams data from its serialized data store
on demand, false otherwise.
Layers with streaming data are treated differently to avoid pulling in
data unnecessarily. For example, reloading a streaming layer will not
perform fine-grained change notification, since doing so would require
the full contents of the layer to be loaded.
"""
@staticmethod
def TransferContent(layer) -> None:
"""
TransferContent(layer) -> None
Copies the content of the given layer into this layer.
Source layer is unmodified.
Parameters
----------
layer : Layer
"""
@staticmethod
def Traverse(path, func) -> None:
"""
Traverse(path, func) -> None
Parameters
----------
path : Path
func : TraversalFunction
"""
@staticmethod
def UpdateAssetInfo() -> None:
"""
UpdateAssetInfo() -> None
Update layer asset information.
Calling this method re-resolves the layer identifier, which updates
asset information such as the layer's resolved path and other asset
info. This may be used to update the layer after external changes to
the underlying asset system.
"""
@staticmethod
def UpdateCompositionAssetDependency(oldAssetPath, newAssetPath) -> bool:
"""
UpdateCompositionAssetDependency(oldAssetPath, newAssetPath) -> bool
Updates the asset path of a composation dependency in this layer.
If ``newAssetPath`` is supplied, the update works as"rename", updating
any occurrence of ``oldAssetPath`` to ``newAssetPath`` in all
reference, payload, and sublayer fields.
If ``newAssetPath`` is not given, this update behaves as a"delete",
removing all occurrences of ``oldAssetPath`` from all reference,
payload, and sublayer fields.
Parameters
----------
oldAssetPath : str
newAssetPath : str
"""
@staticmethod
def UpdateExternalReference(oldAssetPath, newAssetPath) -> bool:
"""
UpdateExternalReference(oldAssetPath, newAssetPath) -> bool
Deprecated
Use UpdateCompositionAssetDependency instead.
Parameters
----------
oldAssetPath : str
newAssetPath : str
"""
@staticmethod
def _WriteDataFile(*args, **kwargs) -> None: ...
@property
def anonymous(self) -> None:
"""
type : bool
Returns true if this layer is an anonymous layer.
:type: None
"""
@property
def colorConfiguration(self) -> None:
"""
The color configuration asset-path of this layer.
:type: None
"""
@property
def colorManagementSystem(self) -> None:
"""
The name of the color management system used to interpret the colorConfiguration asset.
:type: None
"""
@property
def comment(self) -> None:
"""
The layer's comment string.
:type: None
"""
@property
def customLayerData(self) -> None:
"""
The customLayerData dictionary associated with this layer.
:type: None
"""
@property
def defaultPrim(self) -> None:
"""
The layer's default reference target token.
:type: None
"""
@property
def dirty(self) -> None:
"""
type : bool
Returns ``true`` if the layer is dirty, i.e.
has changed from its persistent representation.
:type: None
"""
@property
def documentation(self) -> None:
"""
The layer's documentation string.
:type: None
"""
@property
def empty(self) -> None:
"""
type : bool
Returns whether this layer has no significant data.
:type: None
"""
@property
def endTimeCode(self) -> None:
"""
The end timeCode of this layer.
The end timeCode of a layer is not a hard limit, but is
more of a hint. A layer's time-varying content is not limited to
the timeCode range of the layer.
:type: None
"""
@property
def expired(self) -> None:
"""
True if this object has expired, False otherwise.
:type: None
"""
@property
def externalReferences(self) -> None:
"""
Return unique list of asset paths of external references for
given layer.
:type: None
"""
@property
def fileExtension(self) -> None:
"""
The layer's file extension.
:type: None
"""
@property
def framePrecision(self) -> None:
"""
The number of digits of precision used in times in this layer.
:type: None
"""
@property
def framesPerSecond(self) -> None:
"""
The frames per second used in this layer.
:type: None
"""
@property
def hasOwnedSubLayers(self) -> None:
"""
Whether this layer's sub layers are expected to have owners.
:type: None
"""
@property
def identifier(self) -> None:
"""
The layer's identifier.
:type: None
"""
@property
def owner(self) -> None:
"""
The owner of this layer.
:type: None
"""
@property
def permissionToEdit(self) -> None:
"""
Return true if permitted to be edited (modified), false otherwise.
:type: None
"""
@property
def permissionToSave(self) -> None:
"""
Return true if permitted to be saved, false otherwise.
:type: None
"""
@property
def pseudoRoot(self) -> None:
"""
The pseudo-root of the layer.
:type: None
"""
@property
def realPath(self) -> None:
"""
The layer's resolved path.
:type: None
"""
@property
def repositoryPath(self) -> None:
"""
The layer's associated repository path
:type: None
"""
@property
def resolvedPath(self) -> None:
"""
The layer's resolved path.
:type: None
"""
@property
def rootPrimOrder(self) -> None:
"""
Get/set the list of root prim names for this layer's 'reorder rootPrims' statement.
:type: None
"""
@property
def rootPrims(self) -> None:
"""
The root prims of this layer, as an ordered dictionary.
The prims may be accessed by index or by name.
Although this property claims it is read only, you can modify the contents of this dictionary to add, remove, or reorder the contents.
:type: None
"""
@property
def sessionOwner(self) -> None:
"""
The session owner of this layer. Only intended for use with session layers.
:type: None
"""
@property
def startTimeCode(self) -> None:
"""
The start timeCode of this layer.
The start timeCode of a layer is not a hard limit, but is
more of a hint. A layer's time-varying content is not limited to
the timeCode range of the layer.
:type: None
"""
@property
def subLayerOffsets(self) -> None:
"""
The sublayer offsets of this layer, as a list. Although this property is claimed to be read only, you can modify the contents of this list by assigning new layer offsets to specific indices.
:type: None
"""
@property
def subLayerPaths(self) -> None:
"""
The sublayer paths of this layer, as a list. Although this property is claimed to be read only, you can modify the contents of this list.
:type: None
"""
@property
def timeCodesPerSecond(self) -> None:
"""
The timeCodes per second used in this layer.
:type: None
"""
@property
def version(self) -> None:
"""
The layer's version.
:type: None
"""
ColorConfigurationKey = 'colorConfiguration'
ColorManagementSystemKey = 'colorManagementSystem'
CommentKey = 'comment'
DocumentationKey = 'documentation'
EndFrameKey = 'endFrame'
EndTimeCodeKey = 'endTimeCode'
FramePrecisionKey = 'framePrecision'
FramesPerSecondKey = 'framesPerSecond'
HasOwnedSubLayers = 'hasOwnedSubLayers'
OwnerKey = 'owner'
SessionOwnerKey = 'sessionOwner'
StartFrameKey = 'startFrame'
StartTimeCodeKey = 'startTimeCode'
TimeCodesPerSecondKey = 'timeCodesPerSecond'
pass
class LayerOffset(Boost.Python.instance):
"""
Represents a time offset and scale between layers.
The SdfLayerOffset class is an affine transform, providing both a
scale and a translate. It supports vector algebra semantics for
composing SdfLayerOffsets together via multiplication. The
SdfLayerOffset class is unitless: it does not refer to seconds or
frames.
For example, suppose layer A uses layer B, with an offset of X: when
bringing animation from B into A, you first apply the scale of X, and
then the offset. Suppose you have a scale of 2 and an offset of 24:
first multiply B's frame numbers by 2, and then add 24. The animation
from B as seen in A will take twice as long and start 24 frames later.
Offsets are typically used in either sublayers or prim references. For
more information, see the SetSubLayerOffset() method of the SdfLayer
class (the subLayerOffsets property in Python), as well as the
SetReference() and GetReferenceLayerOffset() methods (the latter is
the referenceLayerOffset property in Python) of the SdfPrimSpec class.
"""
@staticmethod
def GetInverse() -> LayerOffset:
"""
GetInverse() -> LayerOffset
Gets the inverse offset, which performs the opposite transformation.
"""
@staticmethod
def IsIdentity() -> bool:
"""
IsIdentity() -> bool
Returns ``true`` if this is an identity transformation, with an offset
of 0.0 and a scale of 1.0.
"""
@property
def offset(self) -> None:
"""
type : None
Sets the time offset.
----------------------------------------------------------------------
type : float
Returns the time offset.
:type: None
"""
@property
def scale(self) -> None:
"""
type : None
Sets the time scale factor.
----------------------------------------------------------------------
type : float
Returns the time scale factor.
:type: None
"""
__instance_size__ = 32
pass
class LayerTree(Boost.Python.instance):
"""
A SdfLayerTree is an immutable tree structure representing a sublayer
stack and its recursive structure.
Layers can have sublayers, which can in turn have sublayers of their
own. Clients that want to represent that hierarchical structure in
memory can build a SdfLayerTree for that purpose.
We use TfRefPtr<SdfLayerTree> as handles to LayerTrees, as a simple
way to pass them around as immutable trees without worrying about
lifetime.
"""
@property
def childTrees(self) -> None:
"""
type : list[SdfLayerTreeHandle]
Returns the children of this tree node.
:type: None
"""
@property
def expired(self) -> None:
"""
True if this object has expired, False otherwise.
:type: None
"""
@property
def layer(self) -> None:
"""
type : Layer
Returns the layer handle this tree node represents.
:type: None
"""
@property
def offset(self) -> None:
"""
type : LayerOffset
Returns the cumulative layer offset from the root of the tree.
:type: None
"""
pass
class LengthUnit(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.LengthUnitMillimeter, Sdf.LengthUnitCentimeter, Sdf.LengthUnitDecimeter, Sdf.LengthUnitMeter, Sdf.LengthUnitKilometer, Sdf.LengthUnitInch, Sdf.LengthUnitFoot, Sdf.LengthUnitYard, Sdf.LengthUnitMile)
pass
class ListEditorProxy_SdfNameKeyPolicy(Boost.Python.instance):
@staticmethod
def Add(*args, **kwargs) -> None: ...
@staticmethod
def Append(*args, **kwargs) -> None: ...
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ClearEdits(*args, **kwargs) -> None: ...
@staticmethod
def ClearEditsAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def ContainsItemEdit(*args, **kwargs) -> None: ...
@staticmethod
def CopyItems(*args, **kwargs) -> None: ...
@staticmethod
def Erase(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def ModifyItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def Prepend(*args, **kwargs) -> None: ...
@staticmethod
def Remove(*args, **kwargs) -> None: ...
@staticmethod
def RemoveItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def ReplaceItemEdits(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExpired(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def isOrderedOnly(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
pass
class ListEditorProxy_SdfPathKeyPolicy(Boost.Python.instance):
@staticmethod
def Add(*args, **kwargs) -> None: ...
@staticmethod
def Append(*args, **kwargs) -> None: ...
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ClearEdits(*args, **kwargs) -> None: ...
@staticmethod
def ClearEditsAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def ContainsItemEdit(*args, **kwargs) -> None: ...
@staticmethod
def CopyItems(*args, **kwargs) -> None: ...
@staticmethod
def Erase(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def ModifyItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def Prepend(*args, **kwargs) -> None: ...
@staticmethod
def Remove(*args, **kwargs) -> None: ...
@staticmethod
def RemoveItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def ReplaceItemEdits(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExpired(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def isOrderedOnly(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
pass
class ListEditorProxy_SdfPayloadTypePolicy(Boost.Python.instance):
@staticmethod
def Add(*args, **kwargs) -> None: ...
@staticmethod
def Append(*args, **kwargs) -> None: ...
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ClearEdits(*args, **kwargs) -> None: ...
@staticmethod
def ClearEditsAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def ContainsItemEdit(*args, **kwargs) -> None: ...
@staticmethod
def CopyItems(*args, **kwargs) -> None: ...
@staticmethod
def Erase(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def ModifyItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def Prepend(*args, **kwargs) -> None: ...
@staticmethod
def Remove(*args, **kwargs) -> None: ...
@staticmethod
def RemoveItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def ReplaceItemEdits(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExpired(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def isOrderedOnly(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
pass
class ListEditorProxy_SdfReferenceTypePolicy(Boost.Python.instance):
@staticmethod
def Add(*args, **kwargs) -> None: ...
@staticmethod
def Append(*args, **kwargs) -> None: ...
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ClearEdits(*args, **kwargs) -> None: ...
@staticmethod
def ClearEditsAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def ContainsItemEdit(*args, **kwargs) -> None: ...
@staticmethod
def CopyItems(*args, **kwargs) -> None: ...
@staticmethod
def Erase(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def ModifyItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def Prepend(*args, **kwargs) -> None: ...
@staticmethod
def Remove(*args, **kwargs) -> None: ...
@staticmethod
def RemoveItemEdits(*args, **kwargs) -> None: ...
@staticmethod
def ReplaceItemEdits(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExpired(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def isOrderedOnly(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
pass
class ListOpType(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.ListOpTypeExplicit, Sdf.ListOpTypeAdded, Sdf.ListOpTypePrepended, Sdf.ListOpTypeAppended, Sdf.ListOpTypeDeleted, Sdf.ListOpTypeOrdered)
pass
class ListProxy_SdfNameKeyPolicy(Boost.Python.instance):
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ApplyList(*args, **kwargs) -> None: ...
@staticmethod
def append(*args, **kwargs) -> None: ...
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def count(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def insert(*args, **kwargs) -> None: ...
@staticmethod
def remove(*args, **kwargs) -> None: ...
@staticmethod
def replace(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class ListProxy_SdfNameTokenKeyPolicy(Boost.Python.instance):
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ApplyList(*args, **kwargs) -> None: ...
@staticmethod
def append(*args, **kwargs) -> None: ...
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def count(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def insert(*args, **kwargs) -> None: ...
@staticmethod
def remove(*args, **kwargs) -> None: ...
@staticmethod
def replace(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class ListProxy_SdfPathKeyPolicy(Boost.Python.instance):
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ApplyList(*args, **kwargs) -> None: ...
@staticmethod
def append(*args, **kwargs) -> None: ...
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def count(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def insert(*args, **kwargs) -> None: ...
@staticmethod
def remove(*args, **kwargs) -> None: ...
@staticmethod
def replace(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class ListProxy_SdfPayloadTypePolicy(Boost.Python.instance):
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ApplyList(*args, **kwargs) -> None: ...
@staticmethod
def append(*args, **kwargs) -> None: ...
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def count(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def insert(*args, **kwargs) -> None: ...
@staticmethod
def remove(*args, **kwargs) -> None: ...
@staticmethod
def replace(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class ListProxy_SdfReferenceTypePolicy(Boost.Python.instance):
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ApplyList(*args, **kwargs) -> None: ...
@staticmethod
def append(*args, **kwargs) -> None: ...
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def count(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def insert(*args, **kwargs) -> None: ...
@staticmethod
def remove(*args, **kwargs) -> None: ...
@staticmethod
def replace(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class ListProxy_SdfSubLayerTypePolicy(Boost.Python.instance):
@staticmethod
def ApplyEditsToList(*args, **kwargs) -> None: ...
@staticmethod
def ApplyList(*args, **kwargs) -> None: ...
@staticmethod
def append(*args, **kwargs) -> None: ...
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def count(*args, **kwargs) -> None: ...
@staticmethod
def index(*args, **kwargs) -> None: ...
@staticmethod
def insert(*args, **kwargs) -> None: ...
@staticmethod
def remove(*args, **kwargs) -> None: ...
@staticmethod
def replace(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class MapEditProxy_VtDictionary(Boost.Python.instance):
class MapEditProxy_VtDictionary_Iterator(Boost.Python.instance):
pass
class MapEditProxy_VtDictionary_KeyIterator(Boost.Python.instance):
pass
class MapEditProxy_VtDictionary_ValueIterator(Boost.Python.instance):
pass
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def pop(*args, **kwargs) -> None: ...
@staticmethod
def popitem(*args, **kwargs) -> None: ...
@staticmethod
def setdefault(*args, **kwargs) -> None: ...
@staticmethod
def update(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
__instance_size__ = 32
pass
class MapEditProxy_map_SdfPath_SdfPath_less_SdfPath__allocator_pair_SdfPath_const__SdfPath_____(Boost.Python.instance):
class MapEditProxy_map_SdfPath_SdfPath_less_SdfPath__allocator_pair_SdfPath_const__SdfPath______Iterator(Boost.Python.instance):
pass
class MapEditProxy_map_SdfPath_SdfPath_less_SdfPath__allocator_pair_SdfPath_const__SdfPath______KeyIterator(Boost.Python.instance):
pass
class MapEditProxy_map_SdfPath_SdfPath_less_SdfPath__allocator_pair_SdfPath_const__SdfPath______ValueIterator(Boost.Python.instance):
pass
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def pop(*args, **kwargs) -> None: ...
@staticmethod
def popitem(*args, **kwargs) -> None: ...
@staticmethod
def setdefault(*args, **kwargs) -> None: ...
@staticmethod
def update(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
__instance_size__ = 32
pass
class MapEditProxy_map_string_string_less_string__allocator_pair_stringconst__string_____(Boost.Python.instance):
class MapEditProxy_map_string_string_less_string__allocator_pair_stringconst__string______Iterator(Boost.Python.instance):
pass
class MapEditProxy_map_string_string_less_string__allocator_pair_stringconst__string______KeyIterator(Boost.Python.instance):
pass
class MapEditProxy_map_string_string_less_string__allocator_pair_stringconst__string______ValueIterator(Boost.Python.instance):
pass
@staticmethod
def clear(*args, **kwargs) -> None: ...
@staticmethod
def copy(*args, **kwargs) -> None: ...
@staticmethod
def get(*args, **kwargs) -> None: ...
@staticmethod
def items(*args, **kwargs) -> None: ...
@staticmethod
def keys(*args, **kwargs) -> None: ...
@staticmethod
def pop(*args, **kwargs) -> None: ...
@staticmethod
def popitem(*args, **kwargs) -> None: ...
@staticmethod
def setdefault(*args, **kwargs) -> None: ...
@staticmethod
def update(*args, **kwargs) -> None: ...
@staticmethod
def values(*args, **kwargs) -> None: ...
@property
def expired(self) -> None:
"""
:type: None
"""
__instance_size__ = 32
pass
class NamespaceEdit(Boost.Python.instance):
"""
A single namespace edit. It supports renaming, reparenting,
reparenting with a rename, reordering, and removal.
"""
@staticmethod
def Remove(*args, **kwargs) -> None:
"""
**classmethod** Remove(currentPath) -> This
Returns a namespace edit that removes the object at ``currentPath`` .
Parameters
----------
currentPath : Path
"""
@staticmethod
def Rename(*args, **kwargs) -> None:
"""
**classmethod** Rename(currentPath, name) -> This
Returns a namespace edit that renames the prim or property at
``currentPath`` to ``name`` .
Parameters
----------
currentPath : Path
name : str
"""
@staticmethod
def Reorder(*args, **kwargs) -> None:
"""
**classmethod** Reorder(currentPath, index) -> This
Returns a namespace edit to reorder the prim or property at
``currentPath`` to index ``index`` .
Parameters
----------
currentPath : Path
index : Index
"""
@staticmethod
def Reparent(*args, **kwargs) -> None:
"""
**classmethod** Reparent(currentPath, newParentPath, index) -> This
Returns a namespace edit to reparent the prim or property at
``currentPath`` to be under ``newParentPath`` at index ``index`` .
Parameters
----------
currentPath : Path
newParentPath : Path
index : Index
"""
@staticmethod
def ReparentAndRename(*args, **kwargs) -> None:
"""
**classmethod** ReparentAndRename(currentPath, newParentPath, name, index) -> This
Returns a namespace edit to reparent the prim or property at
``currentPath`` to be under ``newParentPath`` at index ``index`` with
the name ``name`` .
Parameters
----------
currentPath : Path
newParentPath : Path
name : str
index : Index
"""
@property
def currentPath(self) -> None:
"""
:type: None
"""
@property
def index(self) -> None:
"""
:type: None
"""
@property
def newPath(self) -> None:
"""
:type: None
"""
atEnd = -1
same = -2
pass
class NamespaceEditDetail(Boost.Python.instance):
"""
Detailed information about a namespace edit.
"""
class Result(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
"""
Validity of an edit.
"""
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = 'NamespaceEditDetail'
allValues: tuple # value = (Sdf.NamespaceEditDetail.Error, Sdf.NamespaceEditDetail.Unbatched, Sdf.NamespaceEditDetail.Okay)
pass
@property
def edit(self) -> None:
"""
:type: None
"""
@property
def reason(self) -> None:
"""
:type: None
"""
@property
def result(self) -> None:
"""
:type: None
"""
Error: pxr.Sdf.Result # value = Sdf.NamespaceEditDetail.Error
Okay: pxr.Sdf.Result # value = Sdf.NamespaceEditDetail.Okay
Unbatched: pxr.Sdf.Result # value = Sdf.NamespaceEditDetail.Unbatched
pass
class Notice(Boost.Python.instance):
"""
Wrapper class for Sdf notices.
"""
class Base(pxr.Tf.Notice, Boost.Python.instance):
pass
class LayerDidReloadContent(LayerDidReplaceContent, Base, pxr.Tf.Notice, Boost.Python.instance):
pass
class LayerDidReplaceContent(Base, pxr.Tf.Notice, Boost.Python.instance):
pass
class LayerDirtinessChanged(Base, pxr.Tf.Notice, Boost.Python.instance):
pass
class LayerIdentifierDidChange(Base, pxr.Tf.Notice, Boost.Python.instance):
@property
def newIdentifier(self) -> None:
"""
:type: None
"""
@property
def oldIdentifier(self) -> None:
"""
:type: None
"""
pass
class LayerInfoDidChange(Base, pxr.Tf.Notice, Boost.Python.instance):
@staticmethod
def key(*args, **kwargs) -> None: ...
pass
class LayerMutenessChanged(Base, pxr.Tf.Notice, Boost.Python.instance):
@property
def layerPath(self) -> None:
"""
:type: None
"""
@property
def wasMuted(self) -> None:
"""
:type: None
"""
pass
class LayersDidChange(Base, pxr.Tf.Notice, Boost.Python.instance):
@staticmethod
def GetLayers(*args, **kwargs) -> None: ...
@staticmethod
def GetSerialNumber(*args, **kwargs) -> None: ...
pass
class LayersDidChangeSentPerLayer(Base, pxr.Tf.Notice, Boost.Python.instance):
@staticmethod
def GetLayers(*args, **kwargs) -> None: ...
@staticmethod
def GetSerialNumber(*args, **kwargs) -> None: ...
pass
pass
class Path(Boost.Python.instance):
"""
A path value used to locate objects in layers or scenegraphs.
Overview
========
SdfPath is used in several ways:
- As a storage key for addressing and accessing values held in a
SdfLayer
- As a namespace identity for scenegraph objects
- As a way to refer to other scenegraph objects through relative
paths
The paths represented by an SdfPath class may be either relative or
absolute. Relative paths are relative to the prim object that contains
them (that is, if an SdfRelationshipSpec target is relative, it is
relative to the SdfPrimSpec object that owns the SdfRelationshipSpec
object).
SdfPath objects can be readily created from and converted back to
strings, but as SdfPath objects, they have behaviors that make it easy
and efficient to work with them. The SdfPath class provides a full
range of methods for manipulating scene paths by appending a namespace
child, appending a relationship target, getting the parent path, and
so on. Since the SdfPath class uses a node-based representation
internally, you should use the editing functions rather than
converting to and from strings if possible.
Path Syntax
===========
Like a filesystem path, an SdfPath is conceptually just a sequence of
path components. Unlike a filesystem path, each component has a type,
and the type is indicated by the syntax.
Two separators are used between parts of a path. A slash ("/")
following an identifier is used to introduce a namespace child. A
period (".") following an identifier is used to introduce a property.
A property may also have several non-sequential colons (':') in its
name to provide a rudimentary namespace within properties but may not
end or begin with a colon.
A leading slash in the string representation of an SdfPath object
indicates an absolute path. Two adjacent periods indicate the parent
namespace.
Brackets ("["and"]") are used to indicate relationship target paths
for relational attributes.
The first part in a path is assumed to be a namespace child unless it
is preceded by a period. That means:
- ``/Foo`` is an absolute path specifying the root prim Foo.
- ``/Foo/Bar`` is an absolute path specifying namespace child Bar
of root prim Foo.
- ``/Foo/Bar.baz`` is an absolute path specifying property ``baz``
of namespace child Bar of root prim Foo.
- ``Foo`` is a relative path specifying namespace child Foo of the
current prim.
- ``Foo/Bar`` is a relative path specifying namespace child Bar of
namespace child Foo of the current prim.
- ``Foo/Bar.baz`` is a relative path specifying property ``baz`` of
namespace child Bar of namespace child Foo of the current prim.
- ``.foo`` is a relative path specifying the property ``foo`` of
the current prim.
- ``/Foo.bar[/Foo.baz].attrib`` is a relational attribute path. The
relationship ``/Foo.bar`` has a target ``/Foo.baz`` . There is a
relational attribute ``attrib`` on that relationship->target pair.
A Note on Thread-Safety
=======================
SdfPath is strongly thread-safe, in the sense that zero additional
synchronization is required between threads creating or using SdfPath
values. Just like TfToken, SdfPath values are immutable. Internally,
SdfPath uses a global prefix tree to efficiently share representations
of paths, and provide fast equality/hashing operations, but
modifications to this table are internally synchronized. Consequently,
as with TfToken, for best performance it is important to minimize the
number of values created (since it requires synchronized access to
this table) or copied (since it requires atomic ref-counting
operations).
"""
class AncestorsRange(Boost.Python.instance):
class _iterator(Boost.Python.instance):
pass
@staticmethod
def GetPath(*args, **kwargs) -> None: ...
__instance_size__ = 24
pass
class _IsValidPathStringResult(Boost.Python.instance):
@property
def errorMessage(self) -> None:
"""
:type: None
"""
__instance_size__ = 56
pass
@staticmethod
def AppendChild(childName) -> Path:
"""
AppendChild(childName) -> Path
Creates a path by appending an element for ``childName`` to this path.
This path must be a prim path, the AbsoluteRootPath or the
ReflexiveRelativePath.
Parameters
----------
childName : str
"""
@staticmethod
def AppendElementString(element) -> Path:
"""
AppendElementString(element) -> Path
Creates a path by extracting and appending an element from the given
ascii element encoding.
Attempting to append a root or empty path (or malformed path) or
attempting to append *to* the EmptyPath will raise an error and return
the EmptyPath.
May also fail and return EmptyPath if this path's type cannot possess
a child of the type encoded in ``element`` .
Parameters
----------
element : str
"""
@staticmethod
def AppendExpression() -> Path:
"""
AppendExpression() -> Path
Creates a path by appending an expression element.
This path must be a prim property or relational attribute path.
"""
@staticmethod
def AppendMapper(targetPath) -> Path:
"""
AppendMapper(targetPath) -> Path
Creates a path by appending a mapper element for ``targetPath`` .
This path must be a prim property or relational attribute path.
Parameters
----------
targetPath : Path
"""
@staticmethod
def AppendMapperArg(argName) -> Path:
"""
AppendMapperArg(argName) -> Path
Creates a path by appending an element for ``argName`` .
This path must be a mapper path.
Parameters
----------
argName : str
"""
@staticmethod
def AppendPath(newSuffix) -> Path:
"""
AppendPath(newSuffix) -> Path
Creates a path by appending a given relative path to this path.
If the newSuffix is a prim path, then this path must be a prim path or
a root path.
If the newSuffix is a prim property path, then this path must be a
prim path or the ReflexiveRelativePath.
Parameters
----------
newSuffix : Path
"""
@staticmethod
def AppendProperty(propName) -> Path:
"""
AppendProperty(propName) -> Path
Creates a path by appending an element for ``propName`` to this path.
This path must be a prim path or the ReflexiveRelativePath.
Parameters
----------
propName : str
"""
@staticmethod
def AppendRelationalAttribute(attrName) -> Path:
"""
AppendRelationalAttribute(attrName) -> Path
Creates a path by appending an element for ``attrName`` to this path.
This path must be a target path.
Parameters
----------
attrName : str
"""
@staticmethod
def AppendTarget(targetPath) -> Path:
"""
AppendTarget(targetPath) -> Path
Creates a path by appending an element for ``targetPath`` .
This path must be a prim property or relational attribute path.
Parameters
----------
targetPath : Path
"""
@staticmethod
def AppendVariantSelection(variantSet, variant) -> Path:
"""
AppendVariantSelection(variantSet, variant) -> Path
Creates a path by appending an element for ``variantSet`` and
``variant`` to this path.
This path must be a prim path.
Parameters
----------
variantSet : str
variant : str
"""
@staticmethod
def ContainsPrimVariantSelection() -> bool:
"""
ContainsPrimVariantSelection() -> bool
Returns whether the path or any of its parent paths identifies a
variant selection for a prim.
"""
@staticmethod
def ContainsPropertyElements() -> bool:
"""
ContainsPropertyElements() -> bool
Return true if this path contains any property elements, false
otherwise.
A false return indicates a prim-like path, specifically a root path, a
prim path, or a prim variant selection path. A true return indicates a
property-like path: a prim property path, a target path, a relational
attribute path, etc.
"""
@staticmethod
def ContainsTargetPath() -> bool:
"""
ContainsTargetPath() -> bool
Return true if this path is or has a prefix that's a target path or a
mapper path.
"""
@staticmethod
def FindLongestPrefix(*args, **kwargs) -> None: ...
@staticmethod
def FindLongestStrictPrefix(*args, **kwargs) -> None: ...
@staticmethod
def FindPrefixedRange(*args, **kwargs) -> None: ...
@staticmethod
def GetAbsoluteRootOrPrimPath() -> Path:
"""
GetAbsoluteRootOrPrimPath() -> Path
Creates a path by stripping all properties and relational attributes
from this path, leaving the path to the containing prim.
If the path is already a prim or absolute root path, the same path is
returned.
"""
@staticmethod
def GetAllTargetPathsRecursively(result) -> None:
"""
GetAllTargetPathsRecursively(result) -> None
Returns all the relationship target or connection target paths
contained in this path, and recursively all the target paths contained
in those target paths in reverse depth-first order.
For example, given the
path:'/A/B.a[/C/D.a[/E/F.a]].a[/A/B.a[/C/D.a]]'this method
produces:'/A/B.a[/C/D.a]','/C/D.a','/C/D.a[/E/F.a]','/E/F.a'
Parameters
----------
result : list[SdfPath]
"""
@staticmethod
def GetAncestorsRange() -> SdfPathAncestorsRange:
"""
GetAncestorsRange() -> SdfPathAncestorsRange
Return a range for iterating over the ancestors of this path.
The range provides iteration over the prefixes of a path, ordered from
longest to shortest (the opposite of the order of the prefixes
returned by GetPrefixes).
"""
@staticmethod
def GetCommonPrefix(path) -> Path:
"""
GetCommonPrefix(path) -> Path
Returns a path with maximal length that is a prefix path of both this
path and ``path`` .
Parameters
----------
path : Path
"""
@staticmethod
def GetConciseRelativePaths(*args, **kwargs) -> None:
"""
**classmethod** GetConciseRelativePaths(paths) -> list[SdfPath]
Given some vector of paths, get a vector of concise unambiguous
relative paths.
GetConciseRelativePaths requires a vector of absolute paths. It finds
a set of relative paths such that each relative path is unique.
Parameters
----------
paths : list[SdfPath]
"""
@staticmethod
def GetParentPath() -> Path:
"""
GetParentPath() -> Path
Return the path that identifies this path's namespace parent.
For a prim path (like'/foo/bar'), return the prim's parent's path
('/foo'). For a prim property path (like'/foo/bar.property'), return
the prim's path ('/foo/bar'). For a target path
(like'/foo/bar.property[/target]') return the property path
('/foo/bar.property'). For a mapper path
(like'/foo/bar.property.mapper[/target]') return the property path
('/foo/bar.property). For a relational attribute path
(like'/foo/bar.property[/target].relAttr') return the relationship
target's path ('/foo/bar.property[/target]'). For a prim variant
selection path (like'/foo/bar{var=sel}') return the prim path
('/foo/bar'). For a root prim path (like'/rootPrim'), return
AbsoluteRootPath() ('/'). For a single element relative prim path
(like'relativePrim'), return ReflexiveRelativePath() ('.'). For
ReflexiveRelativePath() , return the relative parent path ('\.\.').
Note that the parent path of a relative parent path ('\.\.') is a
relative grandparent path ('\.\./\.\.'). Use caution writing loops
that walk to parent paths since relative paths have infinitely many
ancestors. To more safely traverse ancestor paths, consider iterating
over an SdfPathAncestorsRange instead, as returend by
GetAncestorsRange() .
"""
@staticmethod
def GetPrefixes(*args, **kwargs) -> None:
"""
Returns the prefix paths of this path.
"""
@staticmethod
def GetPrimOrPrimVariantSelectionPath() -> Path:
"""
GetPrimOrPrimVariantSelectionPath() -> Path
Creates a path by stripping all relational attributes, targets, and
properties, leaving the nearest path for which
*IsPrimOrPrimVariantSelectionPath()* returns true.
See *GetPrimPath* also.
If the path is already a prim or a prim variant selection path, the
same path is returned.
"""
@staticmethod
def GetPrimPath() -> Path:
"""
GetPrimPath() -> Path
Creates a path by stripping all relational attributes, targets,
properties, and variant selections from the leafmost prim path,
leaving the nearest path for which *IsPrimPath()* returns true.
See *GetPrimOrPrimVariantSelectionPath* also.
If the path is already a prim path, the same path is returned.
"""
@staticmethod
def GetVariantSelection() -> tuple[str, str]:
"""
GetVariantSelection() -> tuple[str, str]
Returns the variant selection for this path, if this is a variant
selection path.
Returns a pair of empty strings if this path is not a variant
selection path.
"""
@staticmethod
def HasPrefix(prefix) -> bool:
"""
HasPrefix(prefix) -> bool
Return true if both this path and *prefix* are not the empty path and
this path has *prefix* as a prefix.
Return false otherwise.
Parameters
----------
prefix : Path
"""
@staticmethod
def IsAbsolutePath() -> bool:
"""
IsAbsolutePath() -> bool
Returns whether the path is absolute.
"""
@staticmethod
def IsAbsoluteRootOrPrimPath() -> bool:
"""
IsAbsoluteRootOrPrimPath() -> bool
Returns whether the path identifies a prim or the absolute root.
"""
@staticmethod
def IsAbsoluteRootPath() -> bool:
"""
IsAbsoluteRootPath() -> bool
Return true if this path is the AbsoluteRootPath() .
"""
@staticmethod
def IsExpressionPath() -> bool:
"""
IsExpressionPath() -> bool
Returns whether the path identifies a connection expression.
"""
@staticmethod
def IsMapperArgPath() -> bool:
"""
IsMapperArgPath() -> bool
Returns whether the path identifies a connection mapper arg.
"""
@staticmethod
def IsMapperPath() -> bool:
"""
IsMapperPath() -> bool
Returns whether the path identifies a connection mapper.
"""
@staticmethod
def IsNamespacedPropertyPath() -> bool:
"""
IsNamespacedPropertyPath() -> bool
Returns whether the path identifies a namespaced property.
A namespaced property has colon embedded in its name.
"""
@staticmethod
def IsPrimPath() -> bool:
"""
IsPrimPath() -> bool
Returns whether the path identifies a prim.
"""
@staticmethod
def IsPrimPropertyPath() -> bool:
"""
IsPrimPropertyPath() -> bool
Returns whether the path identifies a prim's property.
A relational attribute is not a prim property.
"""
@staticmethod
def IsPrimVariantSelectionPath() -> bool:
"""
IsPrimVariantSelectionPath() -> bool
Returns whether the path identifies a variant selection for a prim.
"""
@staticmethod
def IsPropertyPath() -> bool:
"""
IsPropertyPath() -> bool
Returns whether the path identifies a property.
A relational attribute is considered to be a property, so this method
will return true for relational attributes as well as properties of
prims.
"""
@staticmethod
def IsRelationalAttributePath() -> bool:
"""
IsRelationalAttributePath() -> bool
Returns whether the path identifies a relational attribute.
If this is true, IsPropertyPath() will also be true.
"""
@staticmethod
def IsRootPrimPath() -> bool:
"""
IsRootPrimPath() -> bool
Returns whether the path identifies a root prim.
the path must be absolute and have a single element (for example
``/foo`` ).
"""
@staticmethod
def IsTargetPath() -> bool:
"""
IsTargetPath() -> bool
Returns whether the path identifies a relationship or connection
target.
"""
@staticmethod
def IsValidIdentifier(*args, **kwargs) -> None:
"""
**classmethod** IsValidIdentifier(name) -> bool
Returns whether ``name`` is a legal identifier for any path component.
Parameters
----------
name : str
"""
@staticmethod
def IsValidNamespacedIdentifier(*args, **kwargs) -> None:
"""
**classmethod** IsValidNamespacedIdentifier(name) -> bool
Returns whether ``name`` is a legal namespaced identifier.
This returns ``true`` if IsValidIdentifier() does.
Parameters
----------
name : str
"""
@staticmethod
def IsValidPathString(*args, **kwargs) -> None:
"""
**classmethod** IsValidPathString(pathString, errMsg) -> bool
Return true if ``pathString`` is a valid path string, meaning that
passing the string to the *SdfPath* constructor will result in a
valid, non-empty SdfPath.
Otherwise, return false and if ``errMsg`` is not None, set the
pointed-to string to the parse error.
Parameters
----------
pathString : str
errMsg : str
"""
@staticmethod
@typing.overload
def JoinIdentifier(names) -> str:
"""
**classmethod** JoinIdentifier(names) -> str
Join ``names`` into a single identifier using the namespace delimiter.
Any empty strings present in ``names`` are ignored when joining.
Parameters
----------
names : list[str]
----------------------------------------------------------------------
Join ``names`` into a single identifier using the namespace delimiter.
Any empty strings present in ``names`` are ignored when joining.
Parameters
----------
names : list[TfToken]
----------------------------------------------------------------------
Join ``lhs`` and ``rhs`` into a single identifier using the namespace
delimiter.
Returns ``lhs`` if ``rhs`` is empty and vice verse. Returns an empty
string if both ``lhs`` and ``rhs`` are empty.
Parameters
----------
lhs : str
rhs : str
----------------------------------------------------------------------
Join ``lhs`` and ``rhs`` into a single identifier using the namespace
delimiter.
Returns ``lhs`` if ``rhs`` is empty and vice verse. Returns an empty
string if both ``lhs`` and ``rhs`` are empty.
Parameters
----------
lhs : str
rhs : str
"""
@staticmethod
@typing.overload
def JoinIdentifier(lhs, rhs) -> str: ...
@staticmethod
def MakeAbsolutePath(anchor) -> Path:
"""
MakeAbsolutePath(anchor) -> Path
Returns the absolute form of this path using ``anchor`` as the
relative basis.
``anchor`` must be an absolute prim path.
If this path is a relative path, resolve it using ``anchor`` as the
relative basis.
If this path is already an absolute path, just return a copy.
Parameters
----------
anchor : Path
"""
@staticmethod
def MakeRelativePath(anchor) -> Path:
"""
MakeRelativePath(anchor) -> Path
Returns the relative form of this path using ``anchor`` as the
relative basis.
``anchor`` must be an absolute prim path.
If this path is an absolute path, return the corresponding relative
path that is relative to the absolute path given by ``anchor`` .
If this path is a relative path, return the optimal relative path to
the absolute path given by ``anchor`` . (The optimal relative path
from a given prim path is the relative path with the least leading
dot-dots.
Parameters
----------
anchor : Path
"""
@staticmethod
def RemoveAncestorPaths(*args, **kwargs) -> None:
"""
**classmethod** RemoveAncestorPaths(paths) -> None
Remove all elements of *paths* that prefix other elements in *paths*.
As a side-effect, the result is left in sorted order.
Parameters
----------
paths : list[SdfPath]
"""
@staticmethod
def RemoveCommonSuffix(otherPath, stopAtRootPrim) -> tuple[Path, Path]:
"""
RemoveCommonSuffix(otherPath, stopAtRootPrim) -> tuple[Path, Path]
Find and remove the longest common suffix from two paths.
Returns this path and ``otherPath`` with the longest common suffix
removed (first and second, respectively). If the two paths have no
common suffix then the paths are returned as-is. If the paths are
equal then this returns empty paths for relative paths and absolute
roots for absolute paths. The paths need not be the same length.
If ``stopAtRootPrim`` is ``true`` then neither returned path will be
the root path. That, in turn, means that some common suffixes will not
be removed. For example, if ``stopAtRootPrim`` is ``true`` then the
paths /A/B and /B will be returned as is. Were it ``false`` then the
result would be /A and /. Similarly paths /A/B/C and /B/C would return
/A/B and /B if ``stopAtRootPrim`` is ``true`` but /A and / if it's
``false`` .
Parameters
----------
otherPath : Path
stopAtRootPrim : bool
"""
@staticmethod
def RemoveDescendentPaths(*args, **kwargs) -> None:
"""
**classmethod** RemoveDescendentPaths(paths) -> None
Remove all elements of *paths* that are prefixed by other elements in
*paths*.
As a side-effect, the result is left in sorted order.
Parameters
----------
paths : list[SdfPath]
"""
@staticmethod
def ReplaceName(newName) -> Path:
"""
ReplaceName(newName) -> Path
Return a copy of this path with its final component changed to
*newName*.
This path must be a prim or property path.
This method is shorthand for path.GetParentPath().AppendChild(newName)
for prim paths, path.GetParentPath().AppendProperty(newName) for prim
property paths, and
path.GetParentPath().AppendRelationalAttribute(newName) for relational
attribute paths.
Note that only the final path component is ever changed. If the name
of the final path component appears elsewhere in the path, it will not
be modified.
Some examples:
Parameters
----------
newName : str
"""
@staticmethod
def ReplacePrefix(oldPrefix, newPrefix, fixTargetPaths) -> Path:
"""
ReplacePrefix(oldPrefix, newPrefix, fixTargetPaths) -> Path
Returns a path with all occurrences of the prefix path ``oldPrefix``
replaced with the prefix path ``newPrefix`` .
If fixTargetPaths is true, any embedded target paths will also have
their paths replaced. This is the default.
If this is not a target, relational attribute or mapper path this will
do zero or one path prefix replacements, if not the number of
replacements can be greater than one.
Parameters
----------
oldPrefix : Path
newPrefix : Path
fixTargetPaths : bool
"""
@staticmethod
def ReplaceTargetPath(newTargetPath) -> Path:
"""
ReplaceTargetPath(newTargetPath) -> Path
Replaces the relational attribute's target path.
The path must be a relational attribute path.
Parameters
----------
newTargetPath : Path
"""
@staticmethod
def StripAllVariantSelections() -> Path:
"""
StripAllVariantSelections() -> Path
Create a path by stripping all variant selections from all components
of this path, leaving a path with no embedded variant selections.
"""
@staticmethod
def StripNamespace(name) -> str:
"""
**classmethod** StripNamespace(name) -> str
Returns ``name`` stripped of any namespaces.
This does not check the validity of the name; it just attempts to
remove anything that looks like a namespace.
Parameters
----------
name : str
----------------------------------------------------------------------
Returns ``name`` stripped of any namespaces.
This does not check the validity of the name; it just attempts to
remove anything that looks like a namespace.
Parameters
----------
name : str
"""
@staticmethod
def StripPrefixNamespace(*args, **kwargs) -> None:
"""
**classmethod** StripPrefixNamespace(name, matchNamespace) -> tuple[str, bool]
Returns ( ``name`` , ``true`` ) where ``name`` is stripped of the
prefix specified by ``matchNamespace`` if ``name`` indeed starts with
``matchNamespace`` .
Returns ( ``name`` , ``false`` ) otherwise, with ``name`` unmodified.
This function deals with both the case where ``matchNamespace``
contains the trailing namespace delimiter':'or not.
Parameters
----------
name : str
matchNamespace : str
"""
@staticmethod
def TokenizeIdentifier(*args, **kwargs) -> None:
"""
**classmethod** TokenizeIdentifier(name) -> list[str]
Tokenizes ``name`` by the namespace delimiter.
Returns the empty vector if ``name`` is not a valid namespaced
identifier.
Parameters
----------
name : str
"""
@property
def elementString(self) -> None:
"""
The string representation of the terminal component of this path.
This path can be reconstructed via
thisPath.GetParentPath().AppendElementString(thisPath.element).
None of absoluteRootPath, reflexiveRelativePath, nor emptyPath
possess the above quality; their .elementString is the empty string.
:type: None
"""
@property
def isEmpty(self) -> None:
"""
type : bool
Returns true if this is the empty path ( SdfPath::EmptyPath() ).
:type: None
"""
@property
def name(self) -> None:
"""
The name of the prim, property or relational
attribute identified by the path.
'' for EmptyPath. '.' for ReflexiveRelativePath.
'..' for a path ending in ParentPathElement.
:type: None
"""
@property
def pathElementCount(self) -> None:
"""
The number of path elements in this path.
:type: None
"""
@property
def pathString(self) -> None:
"""
The string representation of this path.
:type: None
"""
@property
def targetPath(self) -> None:
"""
The relational attribute target path for this path.
EmptyPath if this is not a relational attribute path.
:type: None
"""
__instance_size__ = 24
absoluteIndicator = '/'
absoluteRootPath: pxr.Sdf.Path # value = Sdf.Path('/')
childDelimiter = '/'
emptyPath: pxr.Sdf.Path # value = Sdf.Path.emptyPath
expressionIndicator = 'expression'
mapperArgDelimiter = '.'
mapperIndicator = 'mapper'
namespaceDelimiter = ':'
parentPathElement = '..'
propertyDelimiter = '.'
reflexiveRelativePath: pxr.Sdf.Path # value = Sdf.Path('.')
relationshipTargetEnd = ']'
relationshipTargetStart = '['
pass
class PathArray(Boost.Python.instance):
"""
An array of type SdfPath.
"""
_isVtArray = True
pass
class PathListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class Payload(Boost.Python.instance):
"""
Represents a payload and all its meta data.
A payload represents a prim reference to an external layer. A payload
is similar to a prim reference (see SdfReference) with the major
difference that payloads are explicitly loaded by the user.
Unloaded payloads represent a boundary that lazy composition and
system behaviors will not traverse across, providing a user-visible
way to manage the working set of the scene.
"""
@property
def assetPath(self) -> None:
"""
type : None
Sets a new asset path for the layer the payload uses.
See SdfAssetPath for what characters are valid in ``assetPath`` . If
``assetPath`` contains invalid characters, issue an error and set this
payload's asset path to the empty asset path.
----------------------------------------------------------------------
type : str
Returns the asset path of the layer that the payload uses.
:type: None
"""
@property
def layerOffset(self) -> None:
"""
type : None
Sets a new layer offset.
----------------------------------------------------------------------
type : LayerOffset
Returns the layer offset associated with the payload.
:type: None
"""
@property
def primPath(self) -> None:
"""
type : None
Sets a new prim path for the prim that the payload uses.
----------------------------------------------------------------------
type : Path
Returns the scene path of the prim for the payload.
:type: None
"""
__instance_size__ = 72
pass
class PayloadListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class Permission(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.PermissionPublic, Sdf.PermissionPrivate)
pass
class PrimSpec(Spec, Boost.Python.instance):
"""
Represents a prim description in an SdfLayer object.
Every SdfPrimSpec object is defined in a layer. It is identified by
its path (SdfPath class) in the namespace hierarchy of its layer.
SdfPrimSpecs can be created using the New() method as children of
either the containing SdfLayer itself (for"root level"prims), or as
children of other SdfPrimSpec objects to extend a hierarchy. The
helper function SdfCreatePrimInLayer() can be used to quickly create a
hierarchy of primSpecs.
SdfPrimSpec objects have properties of two general types: attributes
(containing values) and relationships (different types of connections
to other prims and attributes). Attributes are represented by the
SdfAttributeSpec class and relationships by the SdfRelationshipSpec
class. Each prim has its own namespace of properties. Properties are
stored and accessed by their name.
SdfPrimSpec objects have a typeName, permission restriction, and they
reference and inherit prim paths. Permission restrictions control
which other layers may refer to, or express opinions about a prim. See
the SdfPermission class for more information.
- Insert doc about references and inherits here.
- Should have validate\.\.\. methods for name, children,
properties
"""
@staticmethod
def ApplyNameChildrenOrder(vec) -> None:
"""
ApplyNameChildrenOrder(vec) -> None
Reorders the given list of child names according to the reorder
nameChildren statement for this prim.
This routine employs the standard list editing operation for ordered
items in a ListEditor.
Parameters
----------
vec : list[str]
"""
@staticmethod
def ApplyPropertyOrder(vec) -> None:
"""
ApplyPropertyOrder(vec) -> None
Reorders the given list of property names according to the reorder
properties statement for this prim.
This routine employs the standard list editing operation for ordered
items in a ListEditor.
Parameters
----------
vec : list[str]
"""
@staticmethod
def BlockVariantSelection(variantSetName) -> None:
"""
BlockVariantSelection(variantSetName) -> None
Blocks the variant selected for the given variant set by setting the
variant selection to empty.
Parameters
----------
variantSetName : str
"""
@staticmethod
def CanSetName(newName, whyNot) -> bool:
"""
CanSetName(newName, whyNot) -> bool
Returns true if setting the prim spec's name to ``newName`` will
succeed.
Returns false if it won't, and sets ``whyNot`` with a string
describing why not.
Parameters
----------
newName : str
whyNot : str
"""
@staticmethod
def ClearActive() -> None:
"""
ClearActive() -> None
Removes the active opinion in this prim spec if there is one.
"""
@staticmethod
def ClearInstanceable() -> None:
"""
ClearInstanceable() -> None
Clears the value for the prim's instanceable flag.
"""
@staticmethod
def ClearKind() -> None:
"""
ClearKind() -> None
Remove the kind opinion from this prim spec if there is one.
"""
@staticmethod
def ClearPayloadList(*args, **kwargs) -> None:
"""
Clears the payloads for this prim.
"""
@staticmethod
def ClearReferenceList(*args, **kwargs) -> None:
"""
Clears the references for this prim.
"""
@staticmethod
def GetAttributeAtPath(path) -> AttributeSpec:
"""
GetAttributeAtPath(path) -> AttributeSpec
Returns an attribute given its ``path`` .
Returns invalid handle if there is no attribute at ``path`` . This is
simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
@staticmethod
def GetObjectAtPath(path) -> object:
"""
path: Path
Returns a prim or property given its namespace path.
If path is relative then it will be interpreted as relative to this prim. If it is absolute then it will be interpreted as absolute in this prim's layer. The return type can be either PrimSpecPtr or PropertySpecPtr.
"""
@staticmethod
def GetPrimAtPath(path) -> PrimSpec:
"""
GetPrimAtPath(path) -> PrimSpec
Returns a prim given its ``path`` .
Returns invalid handle if there is no prim at ``path`` . This is
simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
@staticmethod
def GetPropertyAtPath(path) -> PropertySpec:
"""
GetPropertyAtPath(path) -> PropertySpec
Returns a property given its ``path`` .
Returns invalid handle if there is no property at ``path`` . This is
simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
@staticmethod
def GetRelationshipAtPath(path) -> RelationshipSpec:
"""
GetRelationshipAtPath(path) -> RelationshipSpec
Returns a relationship given its ``path`` .
Returns invalid handle if there is no relationship at ``path`` . This
is simply a more specifically typed version of GetObjectAtPath.
Parameters
----------
path : Path
"""
@staticmethod
def GetVariantNames(name) -> list[str]:
"""
GetVariantNames(name) -> list[str]
Returns list of variant names for the given variant set.
Parameters
----------
name : str
"""
@staticmethod
def HasActive() -> bool:
"""
HasActive() -> bool
Returns true if this prim spec has an opinion about active.
"""
@staticmethod
def HasInstanceable() -> bool:
"""
HasInstanceable() -> bool
Returns true if this prim spec has a value authored for its
instanceable flag, false otherwise.
"""
@staticmethod
def HasKind() -> bool:
"""
HasKind() -> bool
Returns true if this prim spec has an opinion about kind.
"""
@staticmethod
def RemoveProperty(property) -> None:
"""
RemoveProperty(property) -> None
Removes the property.
Parameters
----------
property : PropertySpec
"""
@property
def active(self) -> None:
"""
Whether this prim spec is active.
The default value is true.
:type: None
"""
@property
def assetInfo(self) -> None:
"""
Returns the asset info dictionary for this prim.
The default value is an empty dictionary.
The asset info dictionary is used to annotate prims representing the root-prims of assets (generally organized as models) with various data related to asset management. For example, asset name, root layer identifier, asset version etc.
:type: None
"""
@property
def attributes(self) -> None:
"""
The attributes of this prim, as an ordered dictionary.
:type: None
"""
@property
def comment(self) -> None:
"""
The prim's comment string.
:type: None
"""
@property
def customData(self) -> None:
"""
The custom data for this prim.
The default value for custom data is an empty dictionary.
Custom data is for use by plugins or other non-tools supplied
extensions that need to be able to store data attached to arbitrary
scene objects. Note that if the only objects you want to store data
on are prims, using custom attributes is probably a better choice.
But if you need to possibly store this data on attributes or
relationships or as annotations on reference arcs, then custom data
is an appropriate choice.
:type: None
"""
@property
def documentation(self) -> None:
"""
The prim's documentation string.
:type: None
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def hasPayloads(self) -> None:
"""
Returns true if this prim has payloads set.
:type: None
"""
@property
def hasReferences(self) -> None:
"""
Returns true if this prim has references set.
:type: None
"""
@property
def hidden(self) -> None:
"""
Whether this prim spec will be hidden in browsers.
The default value is false.
:type: None
"""
@property
def inheritPathList(self) -> None:
"""
A PathListEditor for the prim's inherit paths.
The list of the inherit paths for this prim may be modified with this PathListEditor.
A PathListEditor may express a list either as an explicit value or as a set of list editing operations. See PathListEditor for more information.
:type: None
"""
@property
def instanceable(self) -> None:
"""
Whether this prim spec is flagged as instanceable.
The default value is false.
:type: None
"""
@property
def kind(self) -> None:
"""
What kind of model this prim spec represents, if any.
The default is an empty string
:type: None
"""
@property
def name(self) -> None:
"""
The prim's name.
:type: None
"""
@property
def nameChildren(self) -> None:
"""
The prim name children of this prim, as an ordered dictionary.
Note that although this property is described as being read-only, you can modify the contents to add, remove, or reorder children.
:type: None
"""
@property
def nameChildrenOrder(self) -> None:
"""
Get/set the list of child names for this prim's 'reorder nameChildren' statement.
:type: None
"""
@property
def nameParent(self) -> None:
"""
The name parent of this prim.
:type: None
"""
@property
def nameRoot(self) -> None:
"""
The name pseudo-root of this prim.
:type: None
"""
@property
def payloadList(self) -> None:
"""
A PayloadListEditor for the prim's payloads.
The list of the payloads for this prim may be modified with this PayloadListEditor.
A PayloadListEditor may express a list either as an explicit value or as a set of list editing operations. See PayloadListEditor for more information.
:type: None
"""
@property
def permission(self) -> None:
"""
The prim's permission restriction.
The default value is SdfPermissionPublic.
:type: None
"""
@property
def prefix(self) -> None:
"""
The prim's prefix.
:type: None
"""
@property
def prefixSubstitutions(self) -> None:
"""
Dictionary of prefix substitutions.
:type: None
"""
@property
def properties(self) -> None:
"""
The properties of this prim, as an ordered dictionary.
Note that although this property is described as being read-only, you can modify the contents to add, remove, or reorder properties.
:type: None
"""
@property
def propertyOrder(self) -> None:
"""
Get/set the list of property names for this prim's 'reorder properties' statement.
:type: None
"""
@property
def realNameParent(self) -> None:
"""
The name parent of this prim.
:type: None
"""
@property
def referenceList(self) -> None:
"""
A ReferenceListEditor for the prim's references.
The list of the references for this prim may be modified with this ReferenceListEditor.
A ReferenceListEditor may express a list either as an explicit value or as a set of list editing operations. See ReferenceListEditor for more information.
:type: None
"""
@property
def relationships(self) -> None:
"""
The relationships of this prim, as an ordered dictionary.
:type: None
"""
@property
def relocates(self) -> None:
"""
An editing proxy for the prim's map of relocation paths.
The map of source-to-target paths specifying namespace relocation may be set or cleared whole, or individual map entries may be added, removed, or edited.
:type: None
"""
@property
def specializesList(self) -> None:
"""
A PathListEditor for the prim's specializes.
The list of the specializes for this prim may be modified with this PathListEditor.
A PathListEditor may express a list either as an explicit value or as a set of list editing operations. See PathListEditor for more information.
:type: None
"""
@property
def specifier(self) -> None:
"""
The prim's specifier (SpecifierDef or SpecifierOver).
The default value is SpecifierOver.
:type: None
"""
@property
def suffix(self) -> None:
"""
The prim's suffix.
:type: None
"""
@property
def suffixSubstitutions(self) -> None:
"""
Dictionary of prefix substitutions.
:type: None
"""
@property
def symmetricPeer(self) -> None:
"""
The prims's symmetric peer.
:type: None
"""
@property
def symmetryArguments(self) -> None:
"""
Dictionary with prim symmetry arguments.
Although this property is marked read-only, you can modify the contents to add, change, and clear symmetry arguments.
:type: None
"""
@property
def symmetryFunction(self) -> None:
"""
The prim's symmetry function.
:type: None
"""
@property
def typeName(self) -> None:
"""
The type of this prim.
:type: None
"""
@property
def variantSelections(self) -> None:
"""
Dictionary whose keys are variant set names and whose values are the variants chosen for each set.
Although this property is marked read-only, you can modify the contents to add, change, and clear variants.
:type: None
"""
@property
def variantSetNameList(self) -> None:
"""
A StringListEditor for the names of the variant
sets for this prim.
The list of the names of the variants sets of this prim may be
modified with this StringListEditor.
A StringListEditor may express a list either as an explicit value or as a set of list editing operations. See StringListEditor for more information.
Although this property is marked as read-only, the returned object is modifiable.
:type: None
"""
@property
def variantSets(self) -> None:
"""
The VariantSetSpecs for this prim indexed by name.
Although this property is marked as read-only, you can
modify the contents to remove variant sets. New variant sets
are created by creating them with the prim as the owner.
Although this property is marked as read-only, the returned object
is modifiable.
:type: None
"""
ActiveKey = 'active'
AnyTypeToken = '__AnyType__'
CommentKey = 'comment'
CustomDataKey = 'customData'
DocumentationKey = 'documentation'
HiddenKey = 'hidden'
InheritPathsKey = 'inheritPaths'
KindKey = 'kind'
PayloadKey = 'payload'
PermissionKey = 'permission'
PrefixKey = 'prefix'
PrefixSubstitutionsKey = 'prefixSubstitutions'
PrimOrderKey = 'primOrder'
PropertyOrderKey = 'propertyOrder'
ReferencesKey = 'references'
RelocatesKey = 'relocates'
SpecializesKey = 'specializes'
SpecifierKey = 'specifier'
SymmetricPeerKey = 'symmetricPeer'
SymmetryArgumentsKey = 'symmetryArguments'
SymmetryFunctionKey = 'symmetryFunction'
TypeNameKey = 'typeName'
VariantSelectionKey = 'variantSelection'
VariantSetNamesKey = 'variantSetNames'
pass
class PropertySpec(Spec, Boost.Python.instance):
"""
Base class for SdfAttributeSpec and SdfRelationshipSpec.
Scene Spec Attributes (SdfAttributeSpec) and Relationships
(SdfRelationshipSpec) are the basic properties that make up Scene Spec
Prims (SdfPrimSpec). They share many qualities and can sometimes be
treated uniformly. The common qualities are provided by this base
class.
NOTE: Do not use Python reserved words and keywords as attribute
names. This will cause attribute resolution to fail.
"""
@staticmethod
def ClearDefaultValue() -> None:
"""
ClearDefaultValue() -> None
Clear the attribute's default value.
"""
@staticmethod
def HasDefaultValue() -> bool:
"""
HasDefaultValue() -> bool
Returns true if a default value is set for this attribute.
"""
@property
def assetInfo(self) -> None:
"""
Returns the asset info dictionary for this property.
The default value is an empty dictionary.
The asset info dictionary is used to annotate SdfAssetPath-valued attributes pointing to the root-prims of assets (generally organized as models) with various data related to asset management. For example, asset name, root layer identifier, asset version etc.
Note: It is only valid to author assetInfo on attributes that are of type SdfAssetPath.
:type: None
"""
@property
def comment(self) -> None:
"""
A comment describing the property.
:type: None
"""
@property
def custom(self) -> None:
"""
Whether this property spec declares a custom attribute.
:type: None
"""
@property
def customData(self) -> None:
"""
The property's custom data.
The default value for custom data is an empty dictionary.
Custom data is for use by plugins or other non-tools supplied
extensions that need to be able to store data attached to arbitrary
scene objects. Note that if the only objects you want to store data
on are prims, using custom attributes is probably a better choice.
But if you need to possibly store this data on attributes or
relationships or as annotations on reference arcs, then custom data
is an appropriate choice.
:type: None
"""
@property
def default(self) -> None:
"""
The default value of this property.
:type: None
"""
@property
def displayGroup(self) -> None:
"""
DisplayGroup for the property.
:type: None
"""
@property
def displayName(self) -> None:
"""
DisplayName for the property.
:type: None
"""
@property
def documentation(self) -> None:
"""
Documentation for the property.
:type: None
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def hasOnlyRequiredFields(self) -> None:
"""
Indicates whether this spec has any significant data other
than just what is necessary for instantiation.
This is a less strict version of isInert, returning True if
the spec contains as much as the type and name.
:type: None
"""
@property
def hidden(self) -> None:
"""
Whether this property will be hidden in browsers.
:type: None
"""
@property
def name(self) -> None:
"""
The name of the property.
:type: None
"""
@property
def owner(self) -> None:
"""
The owner of this property. Either a relationship or a prim.
:type: None
"""
@property
def permission(self) -> None:
"""
The property's permission restriction.
:type: None
"""
@property
def prefix(self) -> None:
"""
Prefix for the property.
:type: None
"""
@property
def symmetricPeer(self) -> None:
"""
The property's symmetric peer.
:type: None
"""
@property
def symmetryArguments(self) -> None:
"""
Dictionary with property symmetry arguments.
Although this property is marked read-only, you can modify the contents to add, change, and clear symmetry arguments.
:type: None
"""
@property
def symmetryFunction(self) -> None:
"""
The property's symmetry function.
:type: None
"""
@property
def variability(self) -> None:
"""
Returns the variability of the property.
An attribute's variability may be Varying
Uniform, Config or Computed.
For an attribute, the default is Varying, for a relationship the default is Uniform.
Varying relationships may be directly authored 'animating' targetpaths over time.
Varying attributes may be directly authored, animated and
affected on by Actions. They are the most flexible.
Uniform attributes may be authored only with non-animated values
(default values). They cannot be affected by Actions, but they
can be connected to other Uniform attributes.
Config attributes are the same as Uniform except that a Prim
can choose to alter its collection of built-in properties based
on the values of its Config attributes.
Computed attributes may not be authored in scene description.
Prims determine the values of their Computed attributes through
Prim-specific computation. They may not be connected.
:type: None
"""
AssetInfoKey = 'assetInfo'
CommentKey = 'comment'
CustomDataKey = 'customData'
CustomKey = 'custom'
DisplayGroupKey = 'displayGroup'
DisplayNameKey = 'displayName'
DocumentationKey = 'documentation'
HiddenKey = 'hidden'
PermissionKey = 'permission'
PrefixKey = 'prefix'
SymmetricPeerKey = 'symmetricPeer'
SymmetryArgumentsKey = 'symmetryArguments'
SymmetryFunctionKey = 'symmetryFunction'
pass
class PseudoRootSpec(PrimSpec, Spec, Boost.Python.instance):
@property
def expired(self) -> None:
"""
:type: None
"""
pass
class Reference(Boost.Python.instance):
"""
Represents a reference and all its meta data.
A reference is expressed on a prim in a given layer and it identifies
a prim in a layer stack. All opinions in the namespace hierarchy under
the referenced prim will be composed with the opinions in the
namespace hierarchy under the referencing prim.
The asset path specifies the layer stack being referenced. If this
asset path is non-empty, this reference is considered
an'external'reference to the layer stack rooted at the specified
layer. If this is empty, this reference is considered
an'internal'reference to the layer stack containing (but not
necessarily rooted at) the layer where the reference is authored.
The prim path specifies the prim in the referenced layer stack from
which opinions will be composed. If this prim path is empty, it will
be considered a reference to the default prim specified in the root
layer of the referenced layer stack see SdfLayer::GetDefaultPrim.
The meta data for a reference is its layer offset and custom data. The
layer offset is an affine transformation applied to all anim splines
in the referenced prim's namespace hierarchy, see SdfLayerOffset for
details. Custom data is for use by plugins or other non-tools supplied
extensions that need to be able to store data associated with
references.
"""
@staticmethod
def IsInternal() -> bool:
"""
IsInternal() -> bool
Returns ``true`` in the case of an internal reference.
An internal reference is a reference with an empty asset path.
"""
@property
def assetPath(self) -> None:
"""
type : None
Sets the asset path for the root layer of the referenced layer stack.
This may be set to an empty string to specify an internal reference.
See SdfAssetPath for what characters are valid in ``assetPath`` . If
``assetPath`` contains invalid characters, issue an error and set this
reference's asset path to the empty asset path.
----------------------------------------------------------------------
type : str
Returns the asset path to the root layer of the referenced layer
stack.
This will be empty in the case of an internal reference.
:type: None
"""
@property
def customData(self) -> None:
"""
type : None
Sets the custom data associated with the reference.
----------------------------------------------------------------------
type : None
Sets a custom data entry for the reference.
If *value* is empty, then this removes the given custom data entry.
----------------------------------------------------------------------
type : VtDictionary
Returns the custom data associated with the reference.
:type: None
"""
@property
def layerOffset(self) -> None:
"""
type : None
Sets a new layer offset.
----------------------------------------------------------------------
type : LayerOffset
Returns the layer offset associated with the reference.
:type: None
"""
@property
def primPath(self) -> None:
"""
type : None
Sets the path of the referenced prim.
This may be set to an empty path to specify a reference to the default
prim in the referenced layer stack.
----------------------------------------------------------------------
type : Path
Returns the path of the referenced prim.
This will be empty if the referenced prim is the default prim
specified in the referenced layer stack.
:type: None
"""
__instance_size__ = 80
pass
class ReferenceListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class RelationshipSpec(PropertySpec, Spec, Boost.Python.instance):
"""
A property that contains a reference to one or more SdfPrimSpec
instances.
A relationship may refer to one or more target prims or attributes.
All targets of a single relationship are considered to be playing the
same role. Note that ``role`` does not imply that the target prims or
attributes are of the same ``type`` .
Relationships may be annotated with relational attributes. Relational
attributes are named SdfAttributeSpec objects containing values that
describe the relationship. For example, point weights are commonly
expressed as relational attributes.
"""
@staticmethod
def RemoveTargetPath(path, preserveTargetOrder) -> None:
"""
RemoveTargetPath(path, preserveTargetOrder) -> None
Removes the specified target path.
Removes the given target path and any relational attributes for the
given target path. If ``preserveTargetOrder`` is ``true`` , Erase() is
called on the list editor instead of RemoveItemEdits(). This preserves
the ordered items list.
Parameters
----------
path : Path
preserveTargetOrder : bool
"""
@staticmethod
def ReplaceTargetPath(oldPath, newPath) -> None:
"""
ReplaceTargetPath(oldPath, newPath) -> None
Updates the specified target path.
Replaces the path given by ``oldPath`` with the one specified by
``newPath`` . Relational attributes are updated if necessary.
Parameters
----------
oldPath : Path
newPath : Path
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def noLoadHint(self) -> None:
"""
whether the target must be loaded to load the prim this
relationship is attached to.
:type: None
"""
@property
def targetPathList(self) -> None:
"""
A PathListEditor for the relationship's target paths.
The list of the target paths for this relationship may be
modified with this PathListEditor.
A PathListEditor may express a list either as an explicit
value or as a set of list editing operations. See PathListEditor
for more information.
:type: None
"""
TargetsKey = 'targetPaths'
pass
class Spec(Boost.Python.instance):
"""
Base class for all Sdf spec classes.
"""
@staticmethod
def ClearInfo(*args, **kwargs) -> None:
"""
key : string
nClears the value for scene spec info with the given key. After calling this, HasInfo() will return false. To make HasInfo() return true, set a value for that scene spec info.
"""
@staticmethod
def GetAsText(*args, **kwargs) -> None: ...
@staticmethod
def GetFallbackForInfo(*args, **kwargs) -> None:
"""
key : string
Returns the fallback value for the given key.
"""
@staticmethod
def GetInfo(key) -> VtValue:
"""
GetInfo(key) -> VtValue
Gets the value for the given metadata key.
This is interim API which is likely to change. Only editors with an
immediate specific need (like the Inspector) should use this API.
Parameters
----------
key : str
"""
@staticmethod
def GetMetaDataDisplayGroup(key) -> str:
"""
GetMetaDataDisplayGroup(key) -> str
Returns this metadata key's displayGroup.
Parameters
----------
key : str
"""
@staticmethod
def GetMetaDataInfoKeys() -> list[str]:
"""
GetMetaDataInfoKeys() -> list[str]
Returns the list of metadata info keys for this object.
This is not the complete list of keys, it is only those that should be
considered to be metadata by inspectors or other presentation UI.
This is interim API which is likely to change. Only editors with an
immediate specific need (like the Inspector) should use this API.
"""
@staticmethod
def GetTypeForInfo(*args, **kwargs) -> None:
"""
key : string
Returns the type of value for the given key.
"""
@staticmethod
def HasInfo(key) -> bool:
"""
key : string
Returns whether there is a setting for the scene spec info with the given key.
When asked for a value for one of its scene spec info, a valid value will always be returned. But if this API returns false for a scene spec info, the value of that info will be the defined default value.
(XXX: This may change such that it is an error to ask for a value when there is none).
When dealing with a composedLayer, it is not necessary to worry about whether a scene spec info 'has a value' because the composed layer will always have a valid value, even if it is the default.
A spec may or may not have an expressed value for some of its scene spec info.
"""
@staticmethod
def IsInert(*args, **kwargs) -> None:
"""
Indicates whether this spec has any significant data. If ignoreChildren is true, child scenegraph objects will be ignored.
"""
@staticmethod
def ListInfoKeys() -> list[str]:
"""
ListInfoKeys() -> list[str]
Returns the full list of info keys currently set on this object.
This does not include fields that represent names of children.
"""
@staticmethod
def SetInfo(key, value) -> None:
"""
SetInfo(key, value) -> None
Sets the value for the given metadata key.
It is an error to pass a value that is not the correct type for that
given key.
This is interim API which is likely to change. Only editors with an
immediate specific need (like the Inspector) should use this API.
Parameters
----------
key : str
value : VtValue
"""
@staticmethod
def SetInfoDictionaryValue(dictionaryKey, entryKey, value) -> None:
"""
SetInfoDictionaryValue(dictionaryKey, entryKey, value) -> None
Sets the value for ``entryKey`` to ``value`` within the dictionary
with the given metadata key ``dictionaryKey`` .
Parameters
----------
dictionaryKey : str
entryKey : str
value : VtValue
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def isInert(self) -> None:
"""
Indicates whether this spec has any significant data. This is for backwards compatibility, use IsInert instead.
Compatibility note: prior to presto 1.9, isInert (then isEmpty) was true for otherwise inert PrimSpecs with inert inherits, references, or variant sets. isInert is now false in such conditions.
:type: None
"""
@property
def layer(self) -> None:
"""
The owning layer.
:type: None
"""
@property
def path(self) -> None:
"""
The absolute scene path.
:type: None
"""
pass
class SpecType(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.SpecTypeUnknown, Sdf.SpecTypeAttribute, Sdf.SpecTypeConnection, Sdf.SpecTypeExpression, Sdf.SpecTypeMapper, Sdf.SpecTypeMapperArg, Sdf.SpecTypePrim, Sdf.SpecTypePseudoRoot, Sdf.SpecTypeRelationship, Sdf.SpecTypeRelationshipTarget, Sdf.SpecTypeVariant, Sdf.SpecTypeVariantSet)
pass
class Specifier(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.SpecifierDef, Sdf.SpecifierOver, Sdf.SpecifierClass)
pass
class StringListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class TimeCode(Boost.Python.instance):
"""
Value type that represents a time code. It's equivalent to a double
type value but is used to indicate that this value should be resolved
by any time based value resolution.
"""
@staticmethod
def GetValue() -> float:
"""
GetValue() -> float
Return the time value.
"""
__instance_size__ = 24
pass
class TimeCodeArray(Boost.Python.instance):
"""
An array of type SdfTimeCode.
"""
_isVtArray = True
pass
class TokenListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class UInt64ListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class UIntListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class UnregisteredValue(Boost.Python.instance):
"""
Stores a representation of the value for an unregistered metadata
field encountered during text layer parsing.
This provides the ability to serialize this data to a layer, as well
as limited inspection and editing capabilities (e.g., moving this data
to a different spec or field) even when the data type of the value
isn't known.
"""
@property
def value(self) -> None:
"""
type : VtValue
Returns the wrapped VtValue specified in the constructor.
:type: None
"""
__instance_size__ = 32
pass
class UnregisteredValueListOp(Boost.Python.instance):
@staticmethod
def ApplyOperations(*args, **kwargs) -> None: ...
@staticmethod
def Clear(*args, **kwargs) -> None: ...
@staticmethod
def ClearAndMakeExplicit(*args, **kwargs) -> None: ...
@staticmethod
def Create(*args, **kwargs) -> None: ...
@staticmethod
def CreateExplicit(*args, **kwargs) -> None: ...
@staticmethod
def GetAddedOrExplicitItems(*args, **kwargs) -> None: ...
@staticmethod
def HasItem(*args, **kwargs) -> None: ...
@property
def addedItems(self) -> None:
"""
:type: None
"""
@property
def appendedItems(self) -> None:
"""
:type: None
"""
@property
def deletedItems(self) -> None:
"""
:type: None
"""
@property
def explicitItems(self) -> None:
"""
:type: None
"""
@property
def isExplicit(self) -> None:
"""
:type: None
"""
@property
def orderedItems(self) -> None:
"""
:type: None
"""
@property
def prependedItems(self) -> None:
"""
:type: None
"""
__instance_size__ = 168
pass
class ValueBlock(Boost.Python.instance):
"""
A special value type that can be used to explicitly author an opinion
for an attribute's default value or time sample value that represents
having no value. Note that this is different from not having a value
authored.
One could author such a value in two ways.
.. code-block:: text
attribute->SetDefaultValue(VtValue(SdfValueBlock());
\.\.\.
layer->SetTimeSample(attribute->GetPath(), 101, VtValue(SdfValueBlock()));
"""
__instance_size__ = 24
pass
class ValueRoleNames(Boost.Python.instance):
Color = 'Color'
EdgeIndex = 'EdgeIndex'
FaceIndex = 'FaceIndex'
Frame = 'Frame'
Normal = 'Normal'
Point = 'Point'
PointIndex = 'PointIndex'
TextureCoordinate = 'TextureCoordinate'
Transform = 'Transform'
Vector = 'Vector'
pass
class ValueTypeName(Boost.Python.instance):
"""
Represents a value type name, i.e. an attribute's type name. Usually,
a value type name associates a string with a ``TfType`` and an
optional role, along with additional metadata. A schema registers all
known value type names and may register multiple names for the same
TfType and role pair. All name strings for a given pair are
collectively called its aliases.
A value type name may also represent just a name string, without a
``TfType`` , role or other metadata. This is currently used
exclusively to unserialize and re-serialize an attribute's type name
where that name is not known to the schema.
Because value type names can have aliases and those aliases may change
in the future, clients should avoid using the value type name's string
representation except to report human readable messages and when
serializing. Clients can look up a value type name by string using
``SdfSchemaBase::FindType()`` and shouldn't otherwise need the string.
Aliases compare equal, even if registered by different schemas.
"""
@property
def aliasesAsStrings(self) -> None:
"""
:type: None
"""
@property
def arrayType(self) -> None:
"""
type : ValueTypeName
Returns the array version of this type name if it's an scalar type
name, otherwise returns this type name.
If there is no array type name then this returns the invalid type
name.
:type: None
"""
@property
def cppTypeName(self) -> None:
"""
:type: None
"""
@property
def defaultUnit(self) -> None:
"""
type : Enum
Returns the default unit enum for the type.
:type: None
"""
@property
def defaultValue(self) -> None:
"""
type : VtValue
Returns the default value for the type.
:type: None
"""
@property
def isArray(self) -> None:
"""
type : bool
Returns ``true`` iff this type is an array.
The invalid type is considered neither scalar nor array.
:type: None
"""
@property
def isScalar(self) -> None:
"""
type : bool
Returns ``true`` iff this type is a scalar.
The invalid type is considered neither scalar nor array.
:type: None
"""
@property
def role(self) -> None:
"""
type : str
Returns the type's role.
:type: None
"""
@property
def scalarType(self) -> None:
"""
type : ValueTypeName
Returns the scalar version of this type name if it's an array type
name, otherwise returns this type name.
If there is no scalar type name then this returns the invalid type
name.
:type: None
"""
@property
def type(self) -> None:
"""
type : Type
Returns the ``TfType`` of the type.
:type: None
"""
pass
class ValueTypeNames(Boost.Python.instance):
@staticmethod
def Find(*args, **kwargs) -> None: ...
Asset: pxr.Sdf.ValueTypeName
AssetArray: pxr.Sdf.ValueTypeName
Bool: pxr.Sdf.ValueTypeName
BoolArray: pxr.Sdf.ValueTypeName
Color3d: pxr.Sdf.ValueTypeName
Color3dArray: pxr.Sdf.ValueTypeName
Color3f: pxr.Sdf.ValueTypeName
Color3fArray: pxr.Sdf.ValueTypeName
Color3h: pxr.Sdf.ValueTypeName
Color3hArray: pxr.Sdf.ValueTypeName
Color4d: pxr.Sdf.ValueTypeName
Color4dArray: pxr.Sdf.ValueTypeName
Color4f: pxr.Sdf.ValueTypeName
Color4fArray: pxr.Sdf.ValueTypeName
Color4h: pxr.Sdf.ValueTypeName
Color4hArray: pxr.Sdf.ValueTypeName
Double: pxr.Sdf.ValueTypeName
Double2: pxr.Sdf.ValueTypeName
Double2Array: pxr.Sdf.ValueTypeName
Double3: pxr.Sdf.ValueTypeName
Double3Array: pxr.Sdf.ValueTypeName
Double4: pxr.Sdf.ValueTypeName
Double4Array: pxr.Sdf.ValueTypeName
DoubleArray: pxr.Sdf.ValueTypeName
Float: pxr.Sdf.ValueTypeName
Float2: pxr.Sdf.ValueTypeName
Float2Array: pxr.Sdf.ValueTypeName
Float3: pxr.Sdf.ValueTypeName
Float3Array: pxr.Sdf.ValueTypeName
Float4: pxr.Sdf.ValueTypeName
Float4Array: pxr.Sdf.ValueTypeName
FloatArray: pxr.Sdf.ValueTypeName
Frame4d: pxr.Sdf.ValueTypeName
Frame4dArray: pxr.Sdf.ValueTypeName
Half: pxr.Sdf.ValueTypeName
Half2: pxr.Sdf.ValueTypeName
Half2Array: pxr.Sdf.ValueTypeName
Half3: pxr.Sdf.ValueTypeName
Half3Array: pxr.Sdf.ValueTypeName
Half4: pxr.Sdf.ValueTypeName
Half4Array: pxr.Sdf.ValueTypeName
HalfArray: pxr.Sdf.ValueTypeName
Int: pxr.Sdf.ValueTypeName
Int2: pxr.Sdf.ValueTypeName
Int2Array: pxr.Sdf.ValueTypeName
Int3: pxr.Sdf.ValueTypeName
Int3Array: pxr.Sdf.ValueTypeName
Int4: pxr.Sdf.ValueTypeName
Int4Array: pxr.Sdf.ValueTypeName
Int64: pxr.Sdf.ValueTypeName
Int64Array: pxr.Sdf.ValueTypeName
IntArray: pxr.Sdf.ValueTypeName
Matrix2d: pxr.Sdf.ValueTypeName
Matrix2dArray: pxr.Sdf.ValueTypeName
Matrix3d: pxr.Sdf.ValueTypeName
Matrix3dArray: pxr.Sdf.ValueTypeName
Matrix4d: pxr.Sdf.ValueTypeName
Matrix4dArray: pxr.Sdf.ValueTypeName
Normal3d: pxr.Sdf.ValueTypeName
Normal3dArray: pxr.Sdf.ValueTypeName
Normal3f: pxr.Sdf.ValueTypeName
Normal3fArray: pxr.Sdf.ValueTypeName
Normal3h: pxr.Sdf.ValueTypeName
Normal3hArray: pxr.Sdf.ValueTypeName
Point3d: pxr.Sdf.ValueTypeName
Point3dArray: pxr.Sdf.ValueTypeName
Point3f: pxr.Sdf.ValueTypeName
Point3fArray: pxr.Sdf.ValueTypeName
Point3h: pxr.Sdf.ValueTypeName
Point3hArray: pxr.Sdf.ValueTypeName
Quatd: pxr.Sdf.ValueTypeName
QuatdArray: pxr.Sdf.ValueTypeName
Quatf: pxr.Sdf.ValueTypeName
QuatfArray: pxr.Sdf.ValueTypeName
Quath: pxr.Sdf.ValueTypeName
QuathArray: pxr.Sdf.ValueTypeName
String: pxr.Sdf.ValueTypeName
StringArray: pxr.Sdf.ValueTypeName
TexCoord2d: pxr.Sdf.ValueTypeName
TexCoord2dArray: pxr.Sdf.ValueTypeName
TexCoord2f: pxr.Sdf.ValueTypeName
TexCoord2fArray: pxr.Sdf.ValueTypeName
TexCoord2h: pxr.Sdf.ValueTypeName
TexCoord2hArray: pxr.Sdf.ValueTypeName
TexCoord3d: pxr.Sdf.ValueTypeName
TexCoord3dArray: pxr.Sdf.ValueTypeName
TexCoord3f: pxr.Sdf.ValueTypeName
TexCoord3fArray: pxr.Sdf.ValueTypeName
TexCoord3h: pxr.Sdf.ValueTypeName
TexCoord3hArray: pxr.Sdf.ValueTypeName
TimeCode: pxr.Sdf.ValueTypeName
TimeCodeArray: pxr.Sdf.ValueTypeName
Token: pxr.Sdf.ValueTypeName
TokenArray: pxr.Sdf.ValueTypeName
UChar: pxr.Sdf.ValueTypeName
UCharArray: pxr.Sdf.ValueTypeName
UInt: pxr.Sdf.ValueTypeName
UInt64: pxr.Sdf.ValueTypeName
UInt64Array: pxr.Sdf.ValueTypeName
UIntArray: pxr.Sdf.ValueTypeName
Vector3d: pxr.Sdf.ValueTypeName
Vector3dArray: pxr.Sdf.ValueTypeName
Vector3f: pxr.Sdf.ValueTypeName
Vector3fArray: pxr.Sdf.ValueTypeName
Vector3h: pxr.Sdf.ValueTypeName
Vector3hArray: pxr.Sdf.ValueTypeName
pass
class Variability(pxr.Tf.Tf_PyEnumWrapper, pxr.Tf.Enum, Boost.Python.instance):
@staticmethod
def GetValueFromName(*args, **kwargs) -> None: ...
_baseName = ''
allValues: tuple # value = (Sdf.VariabilityVarying, Sdf.VariabilityUniform)
pass
class VariantSetSpec(Spec, Boost.Python.instance):
"""
Represents a coherent set of alternate representations for part of a
scene.
An SdfPrimSpec object may contain one or more named SdfVariantSetSpec
objects that define variations on the prim.
An SdfVariantSetSpec object contains one or more named SdfVariantSpec
objects. It may also define the name of one of its variants to be used
by default.
When a prim references another prim, the referencing prim may specify
one of the variants from each of the variant sets of the target prim.
The chosen variant from each set (or the default variant from those
sets that the referencing prim does not explicitly specify) is
composited over the target prim, and then the referencing prim is
composited over the result.
"""
@staticmethod
def RemoveVariant(variant) -> None:
"""
RemoveVariant(variant) -> None
Removes ``variant`` from the list of variants.
If the variant set does not currently own ``variant`` , no action is
taken.
Parameters
----------
variant : VariantSpec
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def name(self) -> None:
"""
The variant set's name.
:type: None
"""
@property
def owner(self) -> None:
"""
The prim that this variant set belongs to.
:type: None
"""
@property
def variantList(self) -> None:
"""
The variants in this variant set as a list.
:type: None
"""
@property
def variants(self) -> None:
"""
The variants in this variant set as a dict.
:type: None
"""
pass
class VariantSpec(Spec, Boost.Python.instance):
"""
Represents a single variant in a variant set.
A variant contains a prim. This prim is the root prim of the variant.
SdfVariantSpecs are value objects. This means they are immutable once
created and they are passed by copy-in APIs. To change a variant spec,
you make a new one and replace the existing one.
"""
@staticmethod
def GetVariantNames(name) -> list[str]:
"""
GetVariantNames(name) -> list[str]
Returns list of variant names for the given variant set.
Parameters
----------
name : str
"""
@property
def expired(self) -> None:
"""
:type: None
"""
@property
def name(self) -> None:
"""
The variant's name.
:type: None
"""
@property
def owner(self) -> None:
"""
The variant set that this variant belongs to.
:type: None
"""
@property
def primSpec(self) -> None:
"""
The root prim of this variant.
:type: None
"""
@property
def variantSets(self) -> None:
"""
type : SdfVariantSetsProxy
Returns the nested variant sets.
The result maps variant set names to variant sets. Variant sets may be
removed through the proxy.
:type: None
"""
pass
def Cat(*args, **kwargs) -> None:
pass
def ComputeAssetPathRelativeToLayer(*args, **kwargs) -> None:
pass
def ConvertToValidMetadataDictionary(*args, **kwargs) -> None:
pass
def ConvertUnit(*args, **kwargs) -> None:
"""
Convert a unit of measurement to a compatible unit.
"""
def CopySpec(*args, **kwargs) -> None:
pass
def CreatePrimInLayer(*args, **kwargs) -> None:
pass
def CreateVariantInLayer(*args, **kwargs) -> None:
pass
def DefaultUnit(*args, **kwargs) -> None:
"""
For a given unit of measurement get the default compatible unit.
For a given typeName ('Vector', 'Point' etc.) get the default unit of measurement.
"""
def Equal(*args, **kwargs) -> None:
pass
def GetNameForUnit(*args, **kwargs) -> None:
pass
def GetTypeForValueTypeName(*args, **kwargs) -> None:
pass
def GetUnitFromName(*args, **kwargs) -> None:
pass
def GetValueTypeNameForValue(*args, **kwargs) -> None:
pass
def JustCreatePrimAttributeInLayer(*args, **kwargs) -> None:
pass
def JustCreatePrimInLayer(*args, **kwargs) -> None:
pass
def NotEqual(*args, **kwargs) -> None:
pass
def UnitCategory(*args, **kwargs) -> None:
"""
For a given unit of measurement get the unit category.
"""
def ValueHasValidType(*args, **kwargs) -> None:
pass
def _DumpPathStats(*args, **kwargs) -> None:
pass
def _PathGetDebuggerPathText(*args, **kwargs) -> None:
pass
def _PathStress(*args, **kwargs) -> None:
pass
def _TestTakeOwnership(*args, **kwargs) -> None:
pass
AngularUnitDegrees: pxr.Sdf.AngularUnit # value = Sdf.AngularUnitDegrees
AngularUnitRadians: pxr.Sdf.AngularUnit # value = Sdf.AngularUnitRadians
AuthoringErrorUnrecognizedFields: pxr.Sdf.AuthoringError # value = Sdf.AuthoringErrorUnrecognizedFields
AuthoringErrorUnrecognizedSpecType: pxr.Sdf.AuthoringError # value = Sdf.AuthoringErrorUnrecognizedSpecType
DimensionlessUnitDefault: pxr.Sdf.DimensionlessUnit # value = Sdf.DimensionlessUnitDefault
DimensionlessUnitPercent: pxr.Sdf.DimensionlessUnit # value = Sdf.DimensionlessUnitPercent
LengthUnitCentimeter: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitCentimeter
LengthUnitDecimeter: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitDecimeter
LengthUnitFoot: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitFoot
LengthUnitInch: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitInch
LengthUnitKilometer: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitKilometer
LengthUnitMeter: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitMeter
LengthUnitMile: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitMile
LengthUnitMillimeter: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitMillimeter
LengthUnitYard: pxr.Sdf.LengthUnit # value = Sdf.LengthUnitYard
ListOpTypeAdded: pxr.Sdf.ListOpType # value = Sdf.ListOpTypeAdded
ListOpTypeAppended: pxr.Sdf.ListOpType # value = Sdf.ListOpTypeAppended
ListOpTypeDeleted: pxr.Sdf.ListOpType # value = Sdf.ListOpTypeDeleted
ListOpTypeExplicit: pxr.Sdf.ListOpType # value = Sdf.ListOpTypeExplicit
ListOpTypeOrdered: pxr.Sdf.ListOpType # value = Sdf.ListOpTypeOrdered
ListOpTypePrepended: pxr.Sdf.ListOpType # value = Sdf.ListOpTypePrepended
PermissionPrivate: pxr.Sdf.Permission # value = Sdf.PermissionPrivate
PermissionPublic: pxr.Sdf.Permission # value = Sdf.PermissionPublic
SpecTypeAttribute: pxr.Sdf.SpecType # value = Sdf.SpecTypeAttribute
SpecTypeConnection: pxr.Sdf.SpecType # value = Sdf.SpecTypeConnection
SpecTypeExpression: pxr.Sdf.SpecType # value = Sdf.SpecTypeExpression
SpecTypeMapper: pxr.Sdf.SpecType # value = Sdf.SpecTypeMapper
SpecTypeMapperArg: pxr.Sdf.SpecType # value = Sdf.SpecTypeMapperArg
SpecTypePrim: pxr.Sdf.SpecType # value = Sdf.SpecTypePrim
SpecTypePseudoRoot: pxr.Sdf.SpecType # value = Sdf.SpecTypePseudoRoot
SpecTypeRelationship: pxr.Sdf.SpecType # value = Sdf.SpecTypeRelationship
SpecTypeRelationshipTarget: pxr.Sdf.SpecType # value = Sdf.SpecTypeRelationshipTarget
SpecTypeUnknown: pxr.Sdf.SpecType # value = Sdf.SpecTypeUnknown
SpecTypeVariant: pxr.Sdf.SpecType # value = Sdf.SpecTypeVariant
SpecTypeVariantSet: pxr.Sdf.SpecType # value = Sdf.SpecTypeVariantSet
SpecifierClass: pxr.Sdf.Specifier # value = Sdf.SpecifierClass
SpecifierDef: pxr.Sdf.Specifier # value = Sdf.SpecifierDef
SpecifierOver: pxr.Sdf.Specifier # value = Sdf.SpecifierOver
VariabilityUniform: pxr.Sdf.Variability # value = Sdf.VariabilityUniform
VariabilityVarying: pxr.Sdf.Variability # value = Sdf.VariabilityVarying
__MFB_FULL_PACKAGE_NAME = 'sdf'
| 192,206 | unknown | 27.047133 | 311 | 0.603483 |
omniverse-code/kit/exts/omni.usd.libs/pxr/UsdUI/__DOC.py | def Execute(result):
result["Backdrop"].__doc__ = """
Provides a'group-box'for the purpose of node graph organization.
Unlike containers, backdrops do not store the Shader nodes inside of
them. Backdrops are an organizational tool that allows Shader nodes to
be visually grouped together in a node-graph UI, but there is no
direct relationship between a Shader node and a Backdrop.
The guideline for a node-graph UI is that a Shader node is considered
part of a Backdrop when the Backdrop is the smallest Backdrop a Shader
node's bounding-box fits inside.
Backdrop objects are contained inside a NodeGraph, similar to how
Shader objects are contained inside a NodeGraph.
Backdrops have no shading inputs or outputs that influence the
rendered results of a NodeGraph. Therefore they can be safely ignored
during import.
Like Shaders and NodeGraphs, Backdrops subscribe to the
NodeGraphNodeAPI to specify position and size.
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdUITokens. So to set an attribute to the value"rightHanded", use
UsdUITokens->rightHanded as the value.
"""
result["Backdrop"].__init__.func_doc = """__init__(prim)
Construct a UsdUIBackdrop on UsdPrim ``prim`` .
Equivalent to UsdUIBackdrop::Get (prim.GetStage(), prim.GetPath()) for
a *valid* ``prim`` , but will not immediately throw an error for an
invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdUIBackdrop on the prim held by ``schemaObj`` .
Should be preferred over UsdUIBackdrop (schemaObj.GetPrim()), as it
preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["Backdrop"].GetDescriptionAttr.func_doc = """GetDescriptionAttr() -> Attribute
The text label that is displayed on the backdrop in the node graph.
This help-description explains what the nodes in a backdrop do.
Declaration
``uniform token ui:description``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
"""
result["Backdrop"].CreateDescriptionAttr.func_doc = """CreateDescriptionAttr(defaultValue, writeSparsely) -> Attribute
See GetDescriptionAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Backdrop"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["Backdrop"].Get.func_doc = """**classmethod** Get(stage, path) -> Backdrop
Return a UsdUIBackdrop holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdUIBackdrop(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["Backdrop"].Define.func_doc = """**classmethod** Define(stage, path) -> Backdrop
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["Backdrop"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["NodeGraphNodeAPI"].__doc__ = """
This api helps storing information about nodes in node graphs.
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdUITokens. So to set an attribute to the value"rightHanded", use
UsdUITokens->rightHanded as the value.
"""
result["NodeGraphNodeAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdUINodeGraphNodeAPI on UsdPrim ``prim`` .
Equivalent to UsdUINodeGraphNodeAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdUINodeGraphNodeAPI on the prim held by ``schemaObj`` .
Should be preferred over UsdUINodeGraphNodeAPI (schemaObj.GetPrim()),
as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["NodeGraphNodeAPI"].GetPosAttr.func_doc = """GetPosAttr() -> Attribute
Declared relative position to the parent in a node graph.
X is the horizontal position. Y is the vertical position. Higher
numbers correspond to lower positions (coordinates are Qt style, not
cartesian).
These positions are not explicitly meant in pixel space, but rather
assume that the size of a node is approximately 1.0x1.0. Where size-x
is the node width and size-y height of the node. Depending on graph UI
implementation, the size of a node may vary in each direction.
Example: If a node's width is 300 and it is position is at 1000, we
store for x-position: 1000 \\* (1.0/300)
Declaration
``uniform float2 ui:nodegraph:node:pos``
C++ Type
GfVec2f
Usd Type
SdfValueTypeNames->Float2
Variability
SdfVariabilityUniform
"""
result["NodeGraphNodeAPI"].CreatePosAttr.func_doc = """CreatePosAttr(defaultValue, writeSparsely) -> Attribute
See GetPosAttr() , and also Create vs Get Property Methods for when to
use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["NodeGraphNodeAPI"].GetStackingOrderAttr.func_doc = """GetStackingOrderAttr() -> Attribute
This optional value is a useful hint when an application cares about
the visibility of a node and whether each node overlaps another.
Nodes with lower stacking order values are meant to be drawn below
higher ones. Negative values are meant as background. Positive values
are meant as foreground. Undefined values should be treated as 0.
There are no set limits in these values.
Declaration
``uniform int ui:nodegraph:node:stackingOrder``
C++ Type
int
Usd Type
SdfValueTypeNames->Int
Variability
SdfVariabilityUniform
"""
result["NodeGraphNodeAPI"].CreateStackingOrderAttr.func_doc = """CreateStackingOrderAttr(defaultValue, writeSparsely) -> Attribute
See GetStackingOrderAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["NodeGraphNodeAPI"].GetDisplayColorAttr.func_doc = """GetDisplayColorAttr() -> Attribute
This hint defines what tint the node should have in the node graph.
Declaration
``uniform color3f ui:nodegraph:node:displayColor``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Color3f
Variability
SdfVariabilityUniform
"""
result["NodeGraphNodeAPI"].CreateDisplayColorAttr.func_doc = """CreateDisplayColorAttr(defaultValue, writeSparsely) -> Attribute
See GetDisplayColorAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["NodeGraphNodeAPI"].GetIconAttr.func_doc = """GetIconAttr() -> Attribute
This points to an image that should be displayed on the node.
It is intended to be useful for summary visual classification of
nodes, rather than a thumbnail preview of the computed result of the
node in some computational system.
Declaration
``uniform asset ui:nodegraph:node:icon``
C++ Type
SdfAssetPath
Usd Type
SdfValueTypeNames->Asset
Variability
SdfVariabilityUniform
"""
result["NodeGraphNodeAPI"].CreateIconAttr.func_doc = """CreateIconAttr(defaultValue, writeSparsely) -> Attribute
See GetIconAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["NodeGraphNodeAPI"].GetExpansionStateAttr.func_doc = """GetExpansionStateAttr() -> Attribute
The current expansionState of the node in the ui.
'open'= fully expanded'closed'= fully collapsed'minimized'= should
take the least space possible
Declaration
``uniform token ui:nodegraph:node:expansionState``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
open, closed, minimized
"""
result["NodeGraphNodeAPI"].CreateExpansionStateAttr.func_doc = """CreateExpansionStateAttr(defaultValue, writeSparsely) -> Attribute
See GetExpansionStateAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["NodeGraphNodeAPI"].GetSizeAttr.func_doc = """GetSizeAttr() -> Attribute
Optional size hint for a node in a node graph.
X is the width. Y is the height.
This value is optional, because node size is often determined based on
the number of in- and outputs of a node.
Declaration
``uniform float2 ui:nodegraph:node:size``
C++ Type
GfVec2f
Usd Type
SdfValueTypeNames->Float2
Variability
SdfVariabilityUniform
"""
result["NodeGraphNodeAPI"].CreateSizeAttr.func_doc = """CreateSizeAttr(defaultValue, writeSparsely) -> Attribute
See GetSizeAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["NodeGraphNodeAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["NodeGraphNodeAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> NodeGraphNodeAPI
Return a UsdUINodeGraphNodeAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdUINodeGraphNodeAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["NodeGraphNodeAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["NodeGraphNodeAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> NodeGraphNodeAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"NodeGraphNodeAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdUINodeGraphNodeAPI object is returned upon success. An
invalid (or empty) UsdUINodeGraphNodeAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["NodeGraphNodeAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["SceneGraphPrimAPI"].__doc__ = """
Utility schema for display properties of a prim
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdUITokens. So to set an attribute to the value"rightHanded", use
UsdUITokens->rightHanded as the value.
"""
result["SceneGraphPrimAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdUISceneGraphPrimAPI on UsdPrim ``prim`` .
Equivalent to UsdUISceneGraphPrimAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdUISceneGraphPrimAPI on the prim held by ``schemaObj`` .
Should be preferred over UsdUISceneGraphPrimAPI (schemaObj.GetPrim()),
as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["SceneGraphPrimAPI"].GetDisplayNameAttr.func_doc = """GetDisplayNameAttr() -> Attribute
When publishing a nodegraph or a material, it can be useful to provide
an optional display name, for readability.
Declaration
``uniform token ui:displayName``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
"""
result["SceneGraphPrimAPI"].CreateDisplayNameAttr.func_doc = """CreateDisplayNameAttr(defaultValue, writeSparsely) -> Attribute
See GetDisplayNameAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["SceneGraphPrimAPI"].GetDisplayGroupAttr.func_doc = """GetDisplayGroupAttr() -> Attribute
When publishing a nodegraph or a material, it can be useful to provide
an optional display group, for organizational purposes and
readability.
This is because often the usd shading hierarchy is rather flat while
we want to display it in organized groups.
Declaration
``uniform token ui:displayGroup``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
"""
result["SceneGraphPrimAPI"].CreateDisplayGroupAttr.func_doc = """CreateDisplayGroupAttr(defaultValue, writeSparsely) -> Attribute
See GetDisplayGroupAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["SceneGraphPrimAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["SceneGraphPrimAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> SceneGraphPrimAPI
Return a UsdUISceneGraphPrimAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdUISceneGraphPrimAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["SceneGraphPrimAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["SceneGraphPrimAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> SceneGraphPrimAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"SceneGraphPrimAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdUISceneGraphPrimAPI object is returned upon success. An
invalid (or empty) UsdUISceneGraphPrimAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["SceneGraphPrimAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
""" | 19,636 | Python | 20.626652 | 143 | 0.733245 |
omniverse-code/kit/exts/omni.usd.libs/pxr/UsdPhysics/__DOC.py | def Execute(result):
result["ArticulationRootAPI"].__doc__ = """
PhysicsArticulationRootAPI can be applied to a scene graph node, and
marks the subtree rooted here for inclusion in one or more reduced
coordinate articulations. For floating articulations, this should be
on the root body. For fixed articulations (robotics jargon for e.g. a
robot arm for welding that is bolted to the floor), this API can be on
a direct or indirect parent of the root joint which is connected to
the world, or on the joint itself\\.\\.
"""
result["ArticulationRootAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsArticulationRootAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsArticulationRootAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsArticulationRootAPI on the prim held by
``schemaObj`` .
Should be preferred over UsdPhysicsArticulationRootAPI
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["ArticulationRootAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["ArticulationRootAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> ArticulationRootAPI
Return a UsdPhysicsArticulationRootAPI holding the prim adhering to
this schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsArticulationRootAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["ArticulationRootAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["ArticulationRootAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> ArticulationRootAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsArticulationRootAPI"to the
token-valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsArticulationRootAPI object is returned upon success.
An invalid (or empty) UsdPhysicsArticulationRootAPI object is returned
upon failure. See UsdPrim::ApplyAPI() for conditions resulting in
failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["ArticulationRootAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["CollisionAPI"].__doc__ = """
Applies collision attributes to a UsdGeomXformable prim. If a
simulation is running, this geometry will collide with other
geometries that have PhysicsCollisionAPI applied. If a prim in the
parent hierarchy has the RigidBodyAPI applied, this collider is a part
of that body. If there is no body in the parent hierarchy, this
collider is considered to be static.
"""
result["CollisionAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsCollisionAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsCollisionAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsCollisionAPI on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsCollisionAPI (schemaObj.GetPrim()),
as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["CollisionAPI"].GetCollisionEnabledAttr.func_doc = """GetCollisionEnabledAttr() -> Attribute
Determines if the PhysicsCollisionAPI is enabled.
Declaration
``bool physics:collisionEnabled = 1``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
result["CollisionAPI"].CreateCollisionEnabledAttr.func_doc = """CreateCollisionEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetCollisionEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["CollisionAPI"].GetSimulationOwnerRel.func_doc = """GetSimulationOwnerRel() -> Relationship
Single PhysicsScene that will simulate this collider.
By default this object belongs to the first PhysicsScene. Note that if
a RigidBodyAPI in the hierarchy above has a different simulationOwner
then it has a precedence over this relationship.
"""
result["CollisionAPI"].CreateSimulationOwnerRel.func_doc = """CreateSimulationOwnerRel() -> Relationship
See GetSimulationOwnerRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
result["CollisionAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["CollisionAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> CollisionAPI
Return a UsdPhysicsCollisionAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsCollisionAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["CollisionAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["CollisionAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> CollisionAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsCollisionAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsCollisionAPI object is returned upon success. An
invalid (or empty) UsdPhysicsCollisionAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["CollisionAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["CollisionGroup"].__doc__ = """
Defines a collision group for coarse filtering. When a collision
occurs between two objects that have a PhysicsCollisionGroup assigned,
they will collide with each other unless this PhysicsCollisionGroup
pair is filtered. See filteredGroups attribute.
A CollectionAPI:colliders maintains a list of PhysicsCollisionAPI
rel-s that defines the members of this Collisiongroup.
"""
result["CollisionGroup"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsCollisionGroup on UsdPrim ``prim`` .
Equivalent to UsdPhysicsCollisionGroup::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsCollisionGroup on the prim held by ``schemaObj``
.
Should be preferred over UsdPhysicsCollisionGroup
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["CollisionGroup"].GetMergeGroupNameAttr.func_doc = """GetMergeGroupNameAttr() -> Attribute
If non-empty, any collision groups in a stage with a matching
mergeGroup should be considered to refer to the same collection.
Matching collision groups should behave as if there were a single
group containing referenced colliders and filter groups from both
collections.
Declaration
``string physics:mergeGroup``
C++ Type
std::string
Usd Type
SdfValueTypeNames->String
"""
result["CollisionGroup"].CreateMergeGroupNameAttr.func_doc = """CreateMergeGroupNameAttr(defaultValue, writeSparsely) -> Attribute
See GetMergeGroupNameAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["CollisionGroup"].GetInvertFilteredGroupsAttr.func_doc = """GetInvertFilteredGroupsAttr() -> Attribute
Normally, the filter will disable collisions against the selected
filter groups.
However, if this option is set, the filter will disable collisions
against all colliders except for those in the selected filter groups.
Declaration
``bool physics:invertFilteredGroups``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
result["CollisionGroup"].CreateInvertFilteredGroupsAttr.func_doc = """CreateInvertFilteredGroupsAttr(defaultValue, writeSparsely) -> Attribute
See GetInvertFilteredGroupsAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["CollisionGroup"].GetFilteredGroupsRel.func_doc = """GetFilteredGroupsRel() -> Relationship
References a list of PhysicsCollisionGroups with which collisions
should be ignored.
"""
result["CollisionGroup"].CreateFilteredGroupsRel.func_doc = """CreateFilteredGroupsRel() -> Relationship
See GetFilteredGroupsRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
result["CollisionGroup"].GetCollidersCollectionAPI.func_doc = """GetCollidersCollectionAPI() -> CollectionAPI
Return the UsdCollectionAPI interface used for defining what colliders
belong to the CollisionGroup.
"""
result["CollisionGroup"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["CollisionGroup"].Get.func_doc = """**classmethod** Get(stage, path) -> CollisionGroup
Return a UsdPhysicsCollisionGroup holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsCollisionGroup(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["CollisionGroup"].Define.func_doc = """**classmethod** Define(stage, path) -> CollisionGroup
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["CollisionGroup"].ComputeCollisionGroupTable.func_doc = """**classmethod** ComputeCollisionGroupTable(stage) -> CollisionGroupTable
Compute a table encoding all the collision groups filter rules for a
stage.
This can be used as a reference to validate an implementation of the
collision groups filters. The returned table is diagonally symmetric.
Parameters
----------
stage : Stage
"""
result["CollisionGroup"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["DistanceJoint"].__doc__ = """
Predefined distance joint type (Distance between rigid bodies may be
limited to given minimum or maximum distance.)
"""
result["DistanceJoint"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsDistanceJoint on UsdPrim ``prim`` .
Equivalent to UsdPhysicsDistanceJoint::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsDistanceJoint on the prim held by ``schemaObj``
.
Should be preferred over UsdPhysicsDistanceJoint
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["DistanceJoint"].GetMinDistanceAttr.func_doc = """GetMinDistanceAttr() -> Attribute
Minimum distance.
If attribute is negative, the joint is not limited. Units: distance.
Declaration
``float physics:minDistance = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DistanceJoint"].CreateMinDistanceAttr.func_doc = """CreateMinDistanceAttr(defaultValue, writeSparsely) -> Attribute
See GetMinDistanceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DistanceJoint"].GetMaxDistanceAttr.func_doc = """GetMaxDistanceAttr() -> Attribute
Maximum distance.
If attribute is negative, the joint is not limited. Units: distance.
Declaration
``float physics:maxDistance = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DistanceJoint"].CreateMaxDistanceAttr.func_doc = """CreateMaxDistanceAttr(defaultValue, writeSparsely) -> Attribute
See GetMaxDistanceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DistanceJoint"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["DistanceJoint"].Get.func_doc = """**classmethod** Get(stage, path) -> DistanceJoint
Return a UsdPhysicsDistanceJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsDistanceJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["DistanceJoint"].Define.func_doc = """**classmethod** Define(stage, path) -> DistanceJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["DistanceJoint"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["DriveAPI"].__doc__ = """
The PhysicsDriveAPI when applied to any joint primitive will drive the
joint towards a given target. The PhysicsDriveAPI is a multipleApply
schema: drive can be set per
axis"transX","transY","transZ","rotX","rotY","rotZ"or its"linear"for
prismatic joint or"angular"for revolute joints. Setting these as a
multipleApply schema TfToken name will define the degree of freedom
the DriveAPI is applied to. Each drive is an implicit force-limited
damped spring: Force or acceleration = stiffness \\* (targetPosition -
position)
- damping \\* (targetVelocity - velocity)
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
result["DriveAPI"].__init__.func_doc = """__init__(prim, name)
Construct a UsdPhysicsDriveAPI on UsdPrim ``prim`` with name ``name``
.
Equivalent to UsdPhysicsDriveAPI::Get ( prim.GetStage(),
prim.GetPath().AppendProperty("drive:name"));
for a *valid* ``prim`` , but will not immediately throw an error for
an invalid ``prim``
Parameters
----------
prim : Prim
name : str
----------------------------------------------------------------------
__init__(schemaObj, name)
Construct a UsdPhysicsDriveAPI on the prim held by ``schemaObj`` with
name ``name`` .
Should be preferred over UsdPhysicsDriveAPI (schemaObj.GetPrim(),
name), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
name : str
"""
result["DriveAPI"].GetTypeAttr.func_doc = """GetTypeAttr() -> Attribute
Drive spring is for the acceleration at the joint (rather than the
force).
Declaration
``uniform token physics:type ="force"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
force, acceleration
"""
result["DriveAPI"].CreateTypeAttr.func_doc = """CreateTypeAttr(defaultValue, writeSparsely) -> Attribute
See GetTypeAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DriveAPI"].GetMaxForceAttr.func_doc = """GetMaxForceAttr() -> Attribute
Maximum force that can be applied to drive.
Units: if linear drive: mass\\*DIST_UNITS/second/second if angular
drive: mass\\*DIST_UNITS\\*DIST_UNITS/second/second inf means not
limited. Must be non-negative.
Declaration
``float physics:maxForce = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DriveAPI"].CreateMaxForceAttr.func_doc = """CreateMaxForceAttr(defaultValue, writeSparsely) -> Attribute
See GetMaxForceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DriveAPI"].GetTargetPositionAttr.func_doc = """GetTargetPositionAttr() -> Attribute
Target value for position.
Units: if linear drive: distance if angular drive: degrees.
Declaration
``float physics:targetPosition = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DriveAPI"].CreateTargetPositionAttr.func_doc = """CreateTargetPositionAttr(defaultValue, writeSparsely) -> Attribute
See GetTargetPositionAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DriveAPI"].GetTargetVelocityAttr.func_doc = """GetTargetVelocityAttr() -> Attribute
Target value for velocity.
Units: if linear drive: distance/second if angular drive:
degrees/second.
Declaration
``float physics:targetVelocity = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DriveAPI"].CreateTargetVelocityAttr.func_doc = """CreateTargetVelocityAttr(defaultValue, writeSparsely) -> Attribute
See GetTargetVelocityAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DriveAPI"].GetDampingAttr.func_doc = """GetDampingAttr() -> Attribute
Damping of the drive.
Units: if linear drive: mass/second If angular drive:
mass\\*DIST_UNITS\\*DIST_UNITS/second/second/degrees.
Declaration
``float physics:damping = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DriveAPI"].CreateDampingAttr.func_doc = """CreateDampingAttr(defaultValue, writeSparsely) -> Attribute
See GetDampingAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DriveAPI"].GetStiffnessAttr.func_doc = """GetStiffnessAttr() -> Attribute
Stiffness of the drive.
Units: if linear drive: mass/second/second if angular drive:
mass\\*DIST_UNITS\\*DIST_UNITS/degree/second/second.
Declaration
``float physics:stiffness = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["DriveAPI"].CreateStiffnessAttr.func_doc = """CreateStiffnessAttr(defaultValue, writeSparsely) -> Attribute
See GetStiffnessAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["DriveAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
----------------------------------------------------------------------
GetSchemaAttributeNames(includeInherited, instanceName) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes for a given instance name.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved. The names returned will have the
proper namespace prefix.
Parameters
----------
includeInherited : bool
instanceName : str
"""
result["DriveAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> DriveAPI
Return a UsdPhysicsDriveAPI holding the prim adhering to this schema
at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
``path`` must be of the format<path>.drive:name.
This is shorthand for the following:
.. code-block:: text
TfToken name = SdfPath::StripNamespace(path.GetToken());
UsdPhysicsDriveAPI(
stage->GetPrimAtPath(path.GetPrimPath()), name);
Parameters
----------
stage : Stage
path : Path
----------------------------------------------------------------------
Get(prim, name) -> DriveAPI
Return a UsdPhysicsDriveAPI with name ``name`` holding the prim
``prim`` .
Shorthand for UsdPhysicsDriveAPI(prim, name);
Parameters
----------
prim : Prim
name : str
"""
result["DriveAPI"].GetAll.func_doc = """**classmethod** GetAll(prim) -> list[DriveAPI]
Return a vector of all named instances of UsdPhysicsDriveAPI on the
given ``prim`` .
Parameters
----------
prim : Prim
"""
result["DriveAPI"].IsPhysicsDriveAPIPath.func_doc = """**classmethod** IsPhysicsDriveAPIPath(path, name) -> bool
Checks if the given path ``path`` is of an API schema of type
PhysicsDriveAPI.
If so, it stores the instance name of the schema in ``name`` and
returns true. Otherwise, it returns false.
Parameters
----------
path : Path
name : str
"""
result["DriveAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, name, whyNot) -> bool
Returns true if this **multiple-apply** API schema can be applied,
with the given instance name, ``name`` , to the given ``prim`` .
If this schema can not be a applied the prim, this returns false and,
if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
whyNot : str
"""
result["DriveAPI"].Apply.func_doc = """**classmethod** Apply(prim, name) -> DriveAPI
Applies this **multiple-apply** API schema to the given ``prim`` along
with the given instance name, ``name`` .
This information is stored by adding"PhysicsDriveAPI:<i>name</i>"to
the token-valued, listOp metadata *apiSchemas* on the prim. For
example, if ``name`` is'instance1', the
token'PhysicsDriveAPI:instance1'is added to'apiSchemas'.
A valid UsdPhysicsDriveAPI object is returned upon success. An invalid
(or empty) UsdPhysicsDriveAPI object is returned upon failure. See
UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
"""
result["DriveAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["FilteredPairsAPI"].__doc__ = """
API to describe fine-grained filtering. If a collision between two
objects occurs, this pair might be filtered if the pair is defined
through this API. This API can be applied either to a body or
collision or even articulation. The"filteredPairs"defines what objects
it should not collide against. Note that FilteredPairsAPI filtering
has precedence over CollisionGroup filtering.
"""
result["FilteredPairsAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsFilteredPairsAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsFilteredPairsAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsFilteredPairsAPI on the prim held by
``schemaObj`` .
Should be preferred over UsdPhysicsFilteredPairsAPI
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["FilteredPairsAPI"].GetFilteredPairsRel.func_doc = """GetFilteredPairsRel() -> Relationship
Relationship to objects that should be filtered.
"""
result["FilteredPairsAPI"].CreateFilteredPairsRel.func_doc = """CreateFilteredPairsRel() -> Relationship
See GetFilteredPairsRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
result["FilteredPairsAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["FilteredPairsAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> FilteredPairsAPI
Return a UsdPhysicsFilteredPairsAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsFilteredPairsAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["FilteredPairsAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["FilteredPairsAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> FilteredPairsAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsFilteredPairsAPI"to the
token-valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsFilteredPairsAPI object is returned upon success. An
invalid (or empty) UsdPhysicsFilteredPairsAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["FilteredPairsAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["FixedJoint"].__doc__ = """
Predefined fixed joint type (All degrees of freedom are removed.)
"""
result["FixedJoint"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsFixedJoint on UsdPrim ``prim`` .
Equivalent to UsdPhysicsFixedJoint::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsFixedJoint on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsFixedJoint (schemaObj.GetPrim()),
as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["FixedJoint"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["FixedJoint"].Get.func_doc = """**classmethod** Get(stage, path) -> FixedJoint
Return a UsdPhysicsFixedJoint holding the prim adhering to this schema
at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsFixedJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["FixedJoint"].Define.func_doc = """**classmethod** Define(stage, path) -> FixedJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["FixedJoint"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["Joint"].__doc__ = """
A joint constrains the movement of rigid bodies. Joint can be created
between two rigid bodies or between one rigid body and world. By
default joint primitive defines a D6 joint where all degrees of
freedom are free. Three linear and three angular degrees of freedom.
Note that default behavior is to disable collision between jointed
bodies.
"""
result["Joint"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsJoint on UsdPrim ``prim`` .
Equivalent to UsdPhysicsJoint::Get (prim.GetStage(), prim.GetPath())
for a *valid* ``prim`` , but will not immediately throw an error for
an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsJoint on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsJoint (schemaObj.GetPrim()), as it
preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["Joint"].GetLocalPos0Attr.func_doc = """GetLocalPos0Attr() -> Attribute
Relative position of the joint frame to body0's frame.
Declaration
``point3f physics:localPos0 = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Point3f
"""
result["Joint"].CreateLocalPos0Attr.func_doc = """CreateLocalPos0Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalPos0Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetLocalRot0Attr.func_doc = """GetLocalRot0Attr() -> Attribute
Relative orientation of the joint frame to body0's frame.
Declaration
``quatf physics:localRot0 = (1, 0, 0, 0)``
C++ Type
GfQuatf
Usd Type
SdfValueTypeNames->Quatf
"""
result["Joint"].CreateLocalRot0Attr.func_doc = """CreateLocalRot0Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalRot0Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetLocalPos1Attr.func_doc = """GetLocalPos1Attr() -> Attribute
Relative position of the joint frame to body1's frame.
Declaration
``point3f physics:localPos1 = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Point3f
"""
result["Joint"].CreateLocalPos1Attr.func_doc = """CreateLocalPos1Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalPos1Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetLocalRot1Attr.func_doc = """GetLocalRot1Attr() -> Attribute
Relative orientation of the joint frame to body1's frame.
Declaration
``quatf physics:localRot1 = (1, 0, 0, 0)``
C++ Type
GfQuatf
Usd Type
SdfValueTypeNames->Quatf
"""
result["Joint"].CreateLocalRot1Attr.func_doc = """CreateLocalRot1Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalRot1Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetJointEnabledAttr.func_doc = """GetJointEnabledAttr() -> Attribute
Determines if the joint is enabled.
Declaration
``bool physics:jointEnabled = 1``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
result["Joint"].CreateJointEnabledAttr.func_doc = """CreateJointEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetJointEnabledAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetCollisionEnabledAttr.func_doc = """GetCollisionEnabledAttr() -> Attribute
Determines if the jointed subtrees should collide or not.
Declaration
``bool physics:collisionEnabled = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
result["Joint"].CreateCollisionEnabledAttr.func_doc = """CreateCollisionEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetCollisionEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetExcludeFromArticulationAttr.func_doc = """GetExcludeFromArticulationAttr() -> Attribute
Determines if the joint can be included in an Articulation.
Declaration
``uniform bool physics:excludeFromArticulation = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
Variability
SdfVariabilityUniform
"""
result["Joint"].CreateExcludeFromArticulationAttr.func_doc = """CreateExcludeFromArticulationAttr(defaultValue, writeSparsely) -> Attribute
See GetExcludeFromArticulationAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetBreakForceAttr.func_doc = """GetBreakForceAttr() -> Attribute
Joint break force.
If set, joint is to break when this force limit is reached. (Used for
linear DOFs.) Units: mass \\* distance / second / second
Declaration
``float physics:breakForce = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["Joint"].CreateBreakForceAttr.func_doc = """CreateBreakForceAttr(defaultValue, writeSparsely) -> Attribute
See GetBreakForceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetBreakTorqueAttr.func_doc = """GetBreakTorqueAttr() -> Attribute
Joint break torque.
If set, joint is to break when this torque limit is reached. (Used for
angular DOFs.) Units: mass \\* distance \\* distance / second / second
Declaration
``float physics:breakTorque = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["Joint"].CreateBreakTorqueAttr.func_doc = """CreateBreakTorqueAttr(defaultValue, writeSparsely) -> Attribute
See GetBreakTorqueAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Joint"].GetBody0Rel.func_doc = """GetBody0Rel() -> Relationship
Relationship to any UsdGeomXformable.
"""
result["Joint"].CreateBody0Rel.func_doc = """CreateBody0Rel() -> Relationship
See GetBody0Rel() , and also Create vs Get Property Methods for when
to use Get vs Create.
"""
result["Joint"].GetBody1Rel.func_doc = """GetBody1Rel() -> Relationship
Relationship to any UsdGeomXformable.
"""
result["Joint"].CreateBody1Rel.func_doc = """CreateBody1Rel() -> Relationship
See GetBody1Rel() , and also Create vs Get Property Methods for when
to use Get vs Create.
"""
result["Joint"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["Joint"].Get.func_doc = """**classmethod** Get(stage, path) -> Joint
Return a UsdPhysicsJoint holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["Joint"].Define.func_doc = """**classmethod** Define(stage, path) -> Joint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["Joint"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["LimitAPI"].__doc__ = """
The PhysicsLimitAPI can be applied to a PhysicsJoint and will restrict
the movement along an axis. PhysicsLimitAPI is a multipleApply schema:
The PhysicsJoint can be restricted
along"transX","transY","transZ","rotX","rotY","rotZ","distance".
Setting these as a multipleApply schema TfToken name will define the
degree of freedom the PhysicsLimitAPI is applied to. Note that if the
low limit is higher than the high limit, motion along this axis is
considered locked.
"""
result["LimitAPI"].__init__.func_doc = """__init__(prim, name)
Construct a UsdPhysicsLimitAPI on UsdPrim ``prim`` with name ``name``
.
Equivalent to UsdPhysicsLimitAPI::Get ( prim.GetStage(),
prim.GetPath().AppendProperty("limit:name"));
for a *valid* ``prim`` , but will not immediately throw an error for
an invalid ``prim``
Parameters
----------
prim : Prim
name : str
----------------------------------------------------------------------
__init__(schemaObj, name)
Construct a UsdPhysicsLimitAPI on the prim held by ``schemaObj`` with
name ``name`` .
Should be preferred over UsdPhysicsLimitAPI (schemaObj.GetPrim(),
name), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
name : str
"""
result["LimitAPI"].GetLowAttr.func_doc = """GetLowAttr() -> Attribute
Lower limit.
Units: degrees or distance depending on trans or rot axis applied to.
\\-inf means not limited in negative direction.
Declaration
``float physics:low = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["LimitAPI"].CreateLowAttr.func_doc = """CreateLowAttr(defaultValue, writeSparsely) -> Attribute
See GetLowAttr() , and also Create vs Get Property Methods for when to
use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["LimitAPI"].GetHighAttr.func_doc = """GetHighAttr() -> Attribute
Upper limit.
Units: degrees or distance depending on trans or rot axis applied to.
inf means not limited in positive direction.
Declaration
``float physics:high = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["LimitAPI"].CreateHighAttr.func_doc = """CreateHighAttr(defaultValue, writeSparsely) -> Attribute
See GetHighAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["LimitAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
----------------------------------------------------------------------
GetSchemaAttributeNames(includeInherited, instanceName) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes for a given instance name.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved. The names returned will have the
proper namespace prefix.
Parameters
----------
includeInherited : bool
instanceName : str
"""
result["LimitAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> LimitAPI
Return a UsdPhysicsLimitAPI holding the prim adhering to this schema
at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
``path`` must be of the format<path>.limit:name.
This is shorthand for the following:
.. code-block:: text
TfToken name = SdfPath::StripNamespace(path.GetToken());
UsdPhysicsLimitAPI(
stage->GetPrimAtPath(path.GetPrimPath()), name);
Parameters
----------
stage : Stage
path : Path
----------------------------------------------------------------------
Get(prim, name) -> LimitAPI
Return a UsdPhysicsLimitAPI with name ``name`` holding the prim
``prim`` .
Shorthand for UsdPhysicsLimitAPI(prim, name);
Parameters
----------
prim : Prim
name : str
"""
result["LimitAPI"].GetAll.func_doc = """**classmethod** GetAll(prim) -> list[LimitAPI]
Return a vector of all named instances of UsdPhysicsLimitAPI on the
given ``prim`` .
Parameters
----------
prim : Prim
"""
result["LimitAPI"].IsPhysicsLimitAPIPath.func_doc = """**classmethod** IsPhysicsLimitAPIPath(path, name) -> bool
Checks if the given path ``path`` is of an API schema of type
PhysicsLimitAPI.
If so, it stores the instance name of the schema in ``name`` and
returns true. Otherwise, it returns false.
Parameters
----------
path : Path
name : str
"""
result["LimitAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, name, whyNot) -> bool
Returns true if this **multiple-apply** API schema can be applied,
with the given instance name, ``name`` , to the given ``prim`` .
If this schema can not be a applied the prim, this returns false and,
if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
whyNot : str
"""
result["LimitAPI"].Apply.func_doc = """**classmethod** Apply(prim, name) -> LimitAPI
Applies this **multiple-apply** API schema to the given ``prim`` along
with the given instance name, ``name`` .
This information is stored by adding"PhysicsLimitAPI:<i>name</i>"to
the token-valued, listOp metadata *apiSchemas* on the prim. For
example, if ``name`` is'instance1', the
token'PhysicsLimitAPI:instance1'is added to'apiSchemas'.
A valid UsdPhysicsLimitAPI object is returned upon success. An invalid
(or empty) UsdPhysicsLimitAPI object is returned upon failure. See
UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
"""
result["LimitAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["MassAPI"].__doc__ = """
Defines explicit mass properties (mass, density, inertia etc.).
MassAPI can be applied to any object that has a PhysicsCollisionAPI or
a PhysicsRigidBodyAPI.
"""
result["MassAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsMassAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsMassAPI::Get (prim.GetStage(), prim.GetPath())
for a *valid* ``prim`` , but will not immediately throw an error for
an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsMassAPI on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsMassAPI (schemaObj.GetPrim()), as
it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["MassAPI"].GetMassAttr.func_doc = """GetMassAttr() -> Attribute
If non-zero, directly specifies the mass of the object.
Note that any child prim can also have a mass when they apply massAPI.
In this case, the precedence rule is'parent mass overrides the
child's'. This may come as counter-intuitive, but mass is a computed
quantity and in general not accumulative. For example, if a parent has
mass of 10, and one of two children has mass of 20, allowing child's
mass to override its parent results in a mass of -10 for the other
child. Note if mass is 0.0 it is ignored. Units: mass.
Declaration
``float physics:mass = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["MassAPI"].CreateMassAttr.func_doc = """CreateMassAttr(defaultValue, writeSparsely) -> Attribute
See GetMassAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MassAPI"].GetDensityAttr.func_doc = """GetDensityAttr() -> Attribute
If non-zero, specifies the density of the object.
In the context of rigid body physics, density indirectly results in
setting mass via (mass = density x volume of the object). How the
volume is computed is up to implementation of the physics system. It
is generally computed from the collision approximation rather than the
graphical mesh. In the case where both density and mass are specified
for the same object, mass has precedence over density. Unlike mass,
child's prim's density overrides parent prim's density as it is
accumulative. Note that density of a collisionAPI can be also
alternatively set through a PhysicsMaterialAPI. The material density
has the weakest precedence in density definition. Note if density is
0.0 it is ignored. Units: mass/distance/distance/distance.
Declaration
``float physics:density = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["MassAPI"].CreateDensityAttr.func_doc = """CreateDensityAttr(defaultValue, writeSparsely) -> Attribute
See GetDensityAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MassAPI"].GetCenterOfMassAttr.func_doc = """GetCenterOfMassAttr() -> Attribute
Center of mass in the prim's local space.
Units: distance.
Declaration
``point3f physics:centerOfMass = (-inf, -inf, -inf)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Point3f
"""
result["MassAPI"].CreateCenterOfMassAttr.func_doc = """CreateCenterOfMassAttr(defaultValue, writeSparsely) -> Attribute
See GetCenterOfMassAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MassAPI"].GetDiagonalInertiaAttr.func_doc = """GetDiagonalInertiaAttr() -> Attribute
If non-zero, specifies diagonalized inertia tensor along the principal
axes.
Note if diagonalInertial is (0.0, 0.0, 0.0) it is ignored. Units:
mass\\*distance\\*distance.
Declaration
``float3 physics:diagonalInertia = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Float3
"""
result["MassAPI"].CreateDiagonalInertiaAttr.func_doc = """CreateDiagonalInertiaAttr(defaultValue, writeSparsely) -> Attribute
See GetDiagonalInertiaAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MassAPI"].GetPrincipalAxesAttr.func_doc = """GetPrincipalAxesAttr() -> Attribute
Orientation of the inertia tensor's principal axes in the prim's local
space.
Declaration
``quatf physics:principalAxes = (0, 0, 0, 0)``
C++ Type
GfQuatf
Usd Type
SdfValueTypeNames->Quatf
"""
result["MassAPI"].CreatePrincipalAxesAttr.func_doc = """CreatePrincipalAxesAttr(defaultValue, writeSparsely) -> Attribute
See GetPrincipalAxesAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MassAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["MassAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> MassAPI
Return a UsdPhysicsMassAPI holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsMassAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["MassAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["MassAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> MassAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsMassAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsMassAPI object is returned upon success. An invalid
(or empty) UsdPhysicsMassAPI object is returned upon failure. See
UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["MassAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["MassUnits"].__doc__ = """
Container class for static double-precision symbols representing
common mass units of measure expressed in kilograms.
"""
result["MaterialAPI"].__doc__ = """
Adds simulation material properties to a Material. All collisions that
have a relationship to this material will have their collision
response defined through this material.
"""
result["MaterialAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsMaterialAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsMaterialAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsMaterialAPI on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsMaterialAPI (schemaObj.GetPrim()),
as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["MaterialAPI"].GetDynamicFrictionAttr.func_doc = """GetDynamicFrictionAttr() -> Attribute
Dynamic friction coefficient.
Unitless.
Declaration
``float physics:dynamicFriction = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["MaterialAPI"].CreateDynamicFrictionAttr.func_doc = """CreateDynamicFrictionAttr(defaultValue, writeSparsely) -> Attribute
See GetDynamicFrictionAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MaterialAPI"].GetStaticFrictionAttr.func_doc = """GetStaticFrictionAttr() -> Attribute
Static friction coefficient.
Unitless.
Declaration
``float physics:staticFriction = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["MaterialAPI"].CreateStaticFrictionAttr.func_doc = """CreateStaticFrictionAttr(defaultValue, writeSparsely) -> Attribute
See GetStaticFrictionAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MaterialAPI"].GetRestitutionAttr.func_doc = """GetRestitutionAttr() -> Attribute
Restitution coefficient.
Unitless.
Declaration
``float physics:restitution = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["MaterialAPI"].CreateRestitutionAttr.func_doc = """CreateRestitutionAttr(defaultValue, writeSparsely) -> Attribute
See GetRestitutionAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MaterialAPI"].GetDensityAttr.func_doc = """GetDensityAttr() -> Attribute
If non-zero, defines the density of the material.
This can be used for body mass computation, see PhysicsMassAPI. Note
that if the density is 0.0 it is ignored. Units:
mass/distance/distance/distance.
Declaration
``float physics:density = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["MaterialAPI"].CreateDensityAttr.func_doc = """CreateDensityAttr(defaultValue, writeSparsely) -> Attribute
See GetDensityAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MaterialAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["MaterialAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> MaterialAPI
Return a UsdPhysicsMaterialAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsMaterialAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["MaterialAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["MaterialAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> MaterialAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsMaterialAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsMaterialAPI object is returned upon success. An
invalid (or empty) UsdPhysicsMaterialAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["MaterialAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["MeshCollisionAPI"].__doc__ = """
Attributes to control how a Mesh is made into a collider. Can be
applied to only a USDGeomMesh in addition to its PhysicsCollisionAPI.
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
result["MeshCollisionAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsMeshCollisionAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsMeshCollisionAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsMeshCollisionAPI on the prim held by
``schemaObj`` .
Should be preferred over UsdPhysicsMeshCollisionAPI
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["MeshCollisionAPI"].GetApproximationAttr.func_doc = """GetApproximationAttr() -> Attribute
Determines the mesh's collision approximation:"none"- The mesh
geometry is used directly as a collider without any approximation.
"convexDecomposition"- A convex mesh decomposition is performed. This
results in a set of convex mesh colliders."convexHull"- A convex hull
of the mesh is generated and used as the collider."boundingSphere"- A
bounding sphere is computed around the mesh and used as a
collider."boundingCube"- An optimally fitting box collider is computed
around the mesh."meshSimplification"- A mesh simplification step is
performed, resulting in a simplified triangle mesh collider.
Declaration
``uniform token physics:approximation ="none"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
none, convexDecomposition, convexHull, boundingSphere, boundingCube,
meshSimplification
"""
result["MeshCollisionAPI"].CreateApproximationAttr.func_doc = """CreateApproximationAttr(defaultValue, writeSparsely) -> Attribute
See GetApproximationAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["MeshCollisionAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["MeshCollisionAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> MeshCollisionAPI
Return a UsdPhysicsMeshCollisionAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsMeshCollisionAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["MeshCollisionAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["MeshCollisionAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> MeshCollisionAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsMeshCollisionAPI"to the
token-valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsMeshCollisionAPI object is returned upon success. An
invalid (or empty) UsdPhysicsMeshCollisionAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["MeshCollisionAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["PrismaticJoint"].__doc__ = """
Predefined prismatic joint type (translation along prismatic joint
axis is permitted.)
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
result["PrismaticJoint"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsPrismaticJoint on UsdPrim ``prim`` .
Equivalent to UsdPhysicsPrismaticJoint::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsPrismaticJoint on the prim held by ``schemaObj``
.
Should be preferred over UsdPhysicsPrismaticJoint
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["PrismaticJoint"].GetAxisAttr.func_doc = """GetAxisAttr() -> Attribute
Joint axis.
Declaration
``uniform token physics:axis ="X"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
X, Y, Z
"""
result["PrismaticJoint"].CreateAxisAttr.func_doc = """CreateAxisAttr(defaultValue, writeSparsely) -> Attribute
See GetAxisAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["PrismaticJoint"].GetLowerLimitAttr.func_doc = """GetLowerLimitAttr() -> Attribute
Lower limit.
Units: distance. -inf means not limited in negative direction.
Declaration
``float physics:lowerLimit = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["PrismaticJoint"].CreateLowerLimitAttr.func_doc = """CreateLowerLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetLowerLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["PrismaticJoint"].GetUpperLimitAttr.func_doc = """GetUpperLimitAttr() -> Attribute
Upper limit.
Units: distance. inf means not limited in positive direction.
Declaration
``float physics:upperLimit = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["PrismaticJoint"].CreateUpperLimitAttr.func_doc = """CreateUpperLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetUpperLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["PrismaticJoint"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["PrismaticJoint"].Get.func_doc = """**classmethod** Get(stage, path) -> PrismaticJoint
Return a UsdPhysicsPrismaticJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsPrismaticJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["PrismaticJoint"].Define.func_doc = """**classmethod** Define(stage, path) -> PrismaticJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["PrismaticJoint"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["RevoluteJoint"].__doc__ = """
Predefined revolute joint type (rotation along revolute joint axis is
permitted.)
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
result["RevoluteJoint"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsRevoluteJoint on UsdPrim ``prim`` .
Equivalent to UsdPhysicsRevoluteJoint::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsRevoluteJoint on the prim held by ``schemaObj``
.
Should be preferred over UsdPhysicsRevoluteJoint
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["RevoluteJoint"].GetAxisAttr.func_doc = """GetAxisAttr() -> Attribute
Joint axis.
Declaration
``uniform token physics:axis ="X"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
X, Y, Z
"""
result["RevoluteJoint"].CreateAxisAttr.func_doc = """CreateAxisAttr(defaultValue, writeSparsely) -> Attribute
See GetAxisAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RevoluteJoint"].GetLowerLimitAttr.func_doc = """GetLowerLimitAttr() -> Attribute
Lower limit.
Units: degrees. -inf means not limited in negative direction.
Declaration
``float physics:lowerLimit = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["RevoluteJoint"].CreateLowerLimitAttr.func_doc = """CreateLowerLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetLowerLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RevoluteJoint"].GetUpperLimitAttr.func_doc = """GetUpperLimitAttr() -> Attribute
Upper limit.
Units: degrees. inf means not limited in positive direction.
Declaration
``float physics:upperLimit = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["RevoluteJoint"].CreateUpperLimitAttr.func_doc = """CreateUpperLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetUpperLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RevoluteJoint"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["RevoluteJoint"].Get.func_doc = """**classmethod** Get(stage, path) -> RevoluteJoint
Return a UsdPhysicsRevoluteJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsRevoluteJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["RevoluteJoint"].Define.func_doc = """**classmethod** Define(stage, path) -> RevoluteJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["RevoluteJoint"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["RigidBodyAPI"].__doc__ = """
Applies physics body attributes to any UsdGeomXformable prim and marks
that prim to be driven by a simulation. If a simulation is running it
will update this prim's pose. All prims in the hierarchy below this
prim should move accordingly.
"""
result["RigidBodyAPI"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsRigidBodyAPI on UsdPrim ``prim`` .
Equivalent to UsdPhysicsRigidBodyAPI::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsRigidBodyAPI on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsRigidBodyAPI (schemaObj.GetPrim()),
as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["RigidBodyAPI"].GetRigidBodyEnabledAttr.func_doc = """GetRigidBodyEnabledAttr() -> Attribute
Determines if this PhysicsRigidBodyAPI is enabled.
Declaration
``bool physics:rigidBodyEnabled = 1``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
result["RigidBodyAPI"].CreateRigidBodyEnabledAttr.func_doc = """CreateRigidBodyEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetRigidBodyEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RigidBodyAPI"].GetKinematicEnabledAttr.func_doc = """GetKinematicEnabledAttr() -> Attribute
Determines whether the body is kinematic or not.
A kinematic body is a body that is moved through animated poses or
through user defined poses. The simulation derives velocities for the
kinematic body based on the external motion. When a continuous motion
is not desired, this kinematic flag should be set to false.
Declaration
``bool physics:kinematicEnabled = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
result["RigidBodyAPI"].CreateKinematicEnabledAttr.func_doc = """CreateKinematicEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetKinematicEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RigidBodyAPI"].GetStartsAsleepAttr.func_doc = """GetStartsAsleepAttr() -> Attribute
Determines if the body is asleep when the simulation starts.
Declaration
``uniform bool physics:startsAsleep = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
Variability
SdfVariabilityUniform
"""
result["RigidBodyAPI"].CreateStartsAsleepAttr.func_doc = """CreateStartsAsleepAttr(defaultValue, writeSparsely) -> Attribute
See GetStartsAsleepAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RigidBodyAPI"].GetVelocityAttr.func_doc = """GetVelocityAttr() -> Attribute
Linear velocity in the same space as the node's xform.
Units: distance/second.
Declaration
``vector3f physics:velocity = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Vector3f
"""
result["RigidBodyAPI"].CreateVelocityAttr.func_doc = """CreateVelocityAttr(defaultValue, writeSparsely) -> Attribute
See GetVelocityAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RigidBodyAPI"].GetAngularVelocityAttr.func_doc = """GetAngularVelocityAttr() -> Attribute
Angular velocity in the same space as the node's xform.
Units: degrees/second.
Declaration
``vector3f physics:angularVelocity = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Vector3f
"""
result["RigidBodyAPI"].CreateAngularVelocityAttr.func_doc = """CreateAngularVelocityAttr(defaultValue, writeSparsely) -> Attribute
See GetAngularVelocityAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["RigidBodyAPI"].GetSimulationOwnerRel.func_doc = """GetSimulationOwnerRel() -> Relationship
Single PhysicsScene that will simulate this body.
By default this is the first PhysicsScene found in the stage using
UsdStage::Traverse() .
"""
result["RigidBodyAPI"].CreateSimulationOwnerRel.func_doc = """CreateSimulationOwnerRel() -> Relationship
See GetSimulationOwnerRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
result["RigidBodyAPI"].ComputeMassProperties.func_doc = """ComputeMassProperties(diagonalInertia, com, principalAxes, massInfoFn) -> float
Compute mass properties of the rigid body ``diagonalInertia`` Computed
diagonal of the inertial tensor for the rigid body.
``com`` Computed center of mass for the rigid body. ``principalAxes``
Inertia tensor's principal axes orienttion for the rigid body.
``massInfoFn`` Callback function to get collision mass information.
Computed mass of the rigid body
Parameters
----------
diagonalInertia : Vec3f
com : Vec3f
principalAxes : Quatf
massInfoFn : MassInformationFn
"""
result["RigidBodyAPI"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["RigidBodyAPI"].Get.func_doc = """**classmethod** Get(stage, path) -> RigidBodyAPI
Return a UsdPhysicsRigidBodyAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsRigidBodyAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["RigidBodyAPI"].CanApply.func_doc = """**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
result["RigidBodyAPI"].Apply.func_doc = """**classmethod** Apply(prim) -> RigidBodyAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsRigidBodyAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsRigidBodyAPI object is returned upon success. An
invalid (or empty) UsdPhysicsRigidBodyAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
result["RigidBodyAPI"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["Scene"].__doc__ = """
General physics simulation properties, required for simulation.
"""
result["Scene"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsScene on UsdPrim ``prim`` .
Equivalent to UsdPhysicsScene::Get (prim.GetStage(), prim.GetPath())
for a *valid* ``prim`` , but will not immediately throw an error for
an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsScene on the prim held by ``schemaObj`` .
Should be preferred over UsdPhysicsScene (schemaObj.GetPrim()), as it
preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["Scene"].GetGravityDirectionAttr.func_doc = """GetGravityDirectionAttr() -> Attribute
Gravity direction vector in simulation world space.
Will be normalized before use. A zero vector is a request to use the
negative upAxis. Unitless.
Declaration
``vector3f physics:gravityDirection = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Vector3f
"""
result["Scene"].CreateGravityDirectionAttr.func_doc = """CreateGravityDirectionAttr(defaultValue, writeSparsely) -> Attribute
See GetGravityDirectionAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Scene"].GetGravityMagnitudeAttr.func_doc = """GetGravityMagnitudeAttr() -> Attribute
Gravity acceleration magnitude in simulation world space.
A negative value is a request to use a value equivalent to earth
gravity regardless of the metersPerUnit scaling used by this scene.
Units: distance/second/second.
Declaration
``float physics:gravityMagnitude = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["Scene"].CreateGravityMagnitudeAttr.func_doc = """CreateGravityMagnitudeAttr(defaultValue, writeSparsely) -> Attribute
See GetGravityMagnitudeAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["Scene"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["Scene"].Get.func_doc = """**classmethod** Get(stage, path) -> Scene
Return a UsdPhysicsScene holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsScene(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["Scene"].Define.func_doc = """**classmethod** Define(stage, path) -> Scene
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["Scene"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
"""
result["SphericalJoint"].__doc__ = """
Predefined spherical joint type (Removes linear degrees of freedom,
cone limit may restrict the motion in a given range.) It allows two
limit values, which when equal create a circular, else an elliptic
cone limit around the limit axis.
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
result["SphericalJoint"].__init__.func_doc = """__init__(prim)
Construct a UsdPhysicsSphericalJoint on UsdPrim ``prim`` .
Equivalent to UsdPhysicsSphericalJoint::Get (prim.GetStage(),
prim.GetPath()) for a *valid* ``prim`` , but will not immediately
throw an error for an invalid ``prim``
Parameters
----------
prim : Prim
----------------------------------------------------------------------
__init__(schemaObj)
Construct a UsdPhysicsSphericalJoint on the prim held by ``schemaObj``
.
Should be preferred over UsdPhysicsSphericalJoint
(schemaObj.GetPrim()), as it preserves SchemaBase state.
Parameters
----------
schemaObj : SchemaBase
"""
result["SphericalJoint"].GetAxisAttr.func_doc = """GetAxisAttr() -> Attribute
Cone limit axis.
Declaration
``uniform token physics:axis ="X"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
X, Y, Z
"""
result["SphericalJoint"].CreateAxisAttr.func_doc = """CreateAxisAttr(defaultValue, writeSparsely) -> Attribute
See GetAxisAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["SphericalJoint"].GetConeAngle0LimitAttr.func_doc = """GetConeAngle0LimitAttr() -> Attribute
Cone limit from the primary joint axis in the local0 frame toward the
next axis.
(Next axis of X is Y, and of Z is X.) A negative value means not
limited. Units: degrees.
Declaration
``float physics:coneAngle0Limit = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["SphericalJoint"].CreateConeAngle0LimitAttr.func_doc = """CreateConeAngle0LimitAttr(defaultValue, writeSparsely) -> Attribute
See GetConeAngle0LimitAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["SphericalJoint"].GetConeAngle1LimitAttr.func_doc = """GetConeAngle1LimitAttr() -> Attribute
Cone limit from the primary joint axis in the local0 frame toward the
second to next axis.
A negative value means not limited. Units: degrees.
Declaration
``float physics:coneAngle1Limit = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
result["SphericalJoint"].CreateConeAngle1LimitAttr.func_doc = """CreateConeAngle1LimitAttr(defaultValue, writeSparsely) -> Attribute
See GetConeAngle1LimitAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
result["SphericalJoint"].GetSchemaAttributeNames.func_doc = """**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
result["SphericalJoint"].Get.func_doc = """**classmethod** Get(stage, path) -> SphericalJoint
Return a UsdPhysicsSphericalJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsSphericalJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
result["SphericalJoint"].Define.func_doc = """**classmethod** Define(stage, path) -> SphericalJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
result["SphericalJoint"]._GetStaticTfType.func_doc = """**classmethod** _GetStaticTfType() -> Type
""" | 104,653 | Python | 20.249543 | 145 | 0.721862 |
omniverse-code/kit/exts/omni.usd.libs/pxr/UsdPhysics/__init__.pyi | from __future__ import annotations
import pxr.UsdPhysics._usdPhysics
import typing
import Boost.Python
import pxr.Usd
import pxr.UsdGeom
import pxr.UsdPhysics
__all__ = [
"ArticulationRootAPI",
"CollisionAPI",
"CollisionGroup",
"CollisionGroupTable",
"DistanceJoint",
"DriveAPI",
"FilteredPairsAPI",
"FixedJoint",
"GetStageKilogramsPerUnit",
"Joint",
"LimitAPI",
"MassAPI",
"MassUnits",
"MassUnitsAre",
"MaterialAPI",
"MeshCollisionAPI",
"PrismaticJoint",
"RevoluteJoint",
"RigidBodyAPI",
"Scene",
"SetStageKilogramsPerUnit",
"SphericalJoint",
"StageHasAuthoredKilogramsPerUnit",
"Tokens"
]
class ArticulationRootAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
PhysicsArticulationRootAPI can be applied to a scene graph node, and
marks the subtree rooted here for inclusion in one or more reduced
coordinate articulations. For floating articulations, this should be
on the root body. For fixed articulations (robotics jargon for e.g. a
robot arm for welding that is bolted to the floor), this API can be on
a direct or indirect parent of the root joint which is connected to
the world, or on the joint itself\.\.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> ArticulationRootAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsArticulationRootAPI"to the
token-valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsArticulationRootAPI object is returned upon success.
An invalid (or empty) UsdPhysicsArticulationRootAPI object is returned
upon failure. See UsdPrim::ApplyAPI() for conditions resulting in
failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> ArticulationRootAPI
Return a UsdPhysicsArticulationRootAPI holding the prim adhering to
this schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsArticulationRootAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class CollisionAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Applies collision attributes to a UsdGeomXformable prim. If a
simulation is running, this geometry will collide with other
geometries that have PhysicsCollisionAPI applied. If a prim in the
parent hierarchy has the RigidBodyAPI applied, this collider is a part
of that body. If there is no body in the parent hierarchy, this
collider is considered to be static.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> CollisionAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsCollisionAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsCollisionAPI object is returned upon success. An
invalid (or empty) UsdPhysicsCollisionAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def CreateCollisionEnabledAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateCollisionEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetCollisionEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateSimulationOwnerRel() -> Relationship:
"""
CreateSimulationOwnerRel() -> Relationship
See GetSimulationOwnerRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> CollisionAPI
Return a UsdPhysicsCollisionAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsCollisionAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetCollisionEnabledAttr() -> Attribute:
"""
GetCollisionEnabledAttr() -> Attribute
Determines if the PhysicsCollisionAPI is enabled.
Declaration
``bool physics:collisionEnabled = 1``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def GetSimulationOwnerRel() -> Relationship:
"""
GetSimulationOwnerRel() -> Relationship
Single PhysicsScene that will simulate this collider.
By default this object belongs to the first PhysicsScene. Note that if
a RigidBodyAPI in the hierarchy above has a different simulationOwner
then it has a precedence over this relationship.
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class CollisionGroup(pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Defines a collision group for coarse filtering. When a collision
occurs between two objects that have a PhysicsCollisionGroup assigned,
they will collide with each other unless this PhysicsCollisionGroup
pair is filtered. See filteredGroups attribute.
A CollectionAPI:colliders maintains a list of PhysicsCollisionAPI
rel-s that defines the members of this Collisiongroup.
"""
@staticmethod
def ComputeCollisionGroupTable(*args, **kwargs) -> None:
"""
**classmethod** ComputeCollisionGroupTable(stage) -> CollisionGroupTable
Compute a table encoding all the collision groups filter rules for a
stage.
This can be used as a reference to validate an implementation of the
collision groups filters. The returned table is diagonally symmetric.
Parameters
----------
stage : Stage
"""
@staticmethod
def CreateFilteredGroupsRel() -> Relationship:
"""
CreateFilteredGroupsRel() -> Relationship
See GetFilteredGroupsRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
@staticmethod
def CreateInvertFilteredGroupsAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateInvertFilteredGroupsAttr(defaultValue, writeSparsely) -> Attribute
See GetInvertFilteredGroupsAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateMergeGroupNameAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateMergeGroupNameAttr(defaultValue, writeSparsely) -> Attribute
See GetMergeGroupNameAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> CollisionGroup
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> CollisionGroup
Return a UsdPhysicsCollisionGroup holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsCollisionGroup(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetCollidersCollectionAPI() -> CollectionAPI:
"""
GetCollidersCollectionAPI() -> CollectionAPI
Return the UsdCollectionAPI interface used for defining what colliders
belong to the CollisionGroup.
"""
@staticmethod
def GetFilteredGroupsRel() -> Relationship:
"""
GetFilteredGroupsRel() -> Relationship
References a list of PhysicsCollisionGroups with which collisions
should be ignored.
"""
@staticmethod
def GetInvertFilteredGroupsAttr() -> Attribute:
"""
GetInvertFilteredGroupsAttr() -> Attribute
Normally, the filter will disable collisions against the selected
filter groups.
However, if this option is set, the filter will disable collisions
against all colliders except for those in the selected filter groups.
Declaration
``bool physics:invertFilteredGroups``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
@staticmethod
def GetMergeGroupNameAttr() -> Attribute:
"""
GetMergeGroupNameAttr() -> Attribute
If non-empty, any collision groups in a stage with a matching
mergeGroup should be considered to refer to the same collection.
Matching collision groups should behave as if there were a single
group containing referenced colliders and filter groups from both
collections.
Declaration
``string physics:mergeGroup``
C++ Type
std::string
Usd Type
SdfValueTypeNames->String
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class CollisionGroupTable(Boost.Python.instance):
@staticmethod
def GetGroups(*args, **kwargs) -> None: ...
@staticmethod
def IsCollisionEnabled(*args, **kwargs) -> None: ...
__instance_size__ = 72
pass
class DistanceJoint(Joint, pxr.UsdGeom.Imageable, pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Predefined distance joint type (Distance between rigid bodies may be
limited to given minimum or maximum distance.)
"""
@staticmethod
def CreateMaxDistanceAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateMaxDistanceAttr(defaultValue, writeSparsely) -> Attribute
See GetMaxDistanceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateMinDistanceAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateMinDistanceAttr(defaultValue, writeSparsely) -> Attribute
See GetMinDistanceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> DistanceJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> DistanceJoint
Return a UsdPhysicsDistanceJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsDistanceJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetMaxDistanceAttr() -> Attribute:
"""
GetMaxDistanceAttr() -> Attribute
Maximum distance.
If attribute is negative, the joint is not limited. Units: distance.
Declaration
``float physics:maxDistance = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetMinDistanceAttr() -> Attribute:
"""
GetMinDistanceAttr() -> Attribute
Minimum distance.
If attribute is negative, the joint is not limited. Units: distance.
Declaration
``float physics:minDistance = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class DriveAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
The PhysicsDriveAPI when applied to any joint primitive will drive the
joint towards a given target. The PhysicsDriveAPI is a multipleApply
schema: drive can be set per
axis"transX","transY","transZ","rotX","rotY","rotZ"or its"linear"for
prismatic joint or"angular"for revolute joints. Setting these as a
multipleApply schema TfToken name will define the degree of freedom
the DriveAPI is applied to. Each drive is an implicit force-limited
damped spring: Force or acceleration = stiffness \* (targetPosition -
position)
- damping \* (targetVelocity - velocity)
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim, name) -> DriveAPI
Applies this **multiple-apply** API schema to the given ``prim`` along
with the given instance name, ``name`` .
This information is stored by adding"PhysicsDriveAPI:<i>name</i>"to
the token-valued, listOp metadata *apiSchemas* on the prim. For
example, if ``name`` is'instance1', the
token'PhysicsDriveAPI:instance1'is added to'apiSchemas'.
A valid UsdPhysicsDriveAPI object is returned upon success. An invalid
(or empty) UsdPhysicsDriveAPI object is returned upon failure. See
UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, name, whyNot) -> bool
Returns true if this **multiple-apply** API schema can be applied,
with the given instance name, ``name`` , to the given ``prim`` .
If this schema can not be a applied the prim, this returns false and,
if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
whyNot : str
"""
@staticmethod
def CreateDampingAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateDampingAttr(defaultValue, writeSparsely) -> Attribute
See GetDampingAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateMaxForceAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateMaxForceAttr(defaultValue, writeSparsely) -> Attribute
See GetMaxForceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateStiffnessAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateStiffnessAttr(defaultValue, writeSparsely) -> Attribute
See GetStiffnessAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateTargetPositionAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateTargetPositionAttr(defaultValue, writeSparsely) -> Attribute
See GetTargetPositionAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateTargetVelocityAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateTargetVelocityAttr(defaultValue, writeSparsely) -> Attribute
See GetTargetVelocityAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateTypeAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateTypeAttr(defaultValue, writeSparsely) -> Attribute
See GetTypeAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Get(prim, name) -> DriveAPI:
"""
**classmethod** Get(stage, path) -> DriveAPI
Return a UsdPhysicsDriveAPI holding the prim adhering to this schema
at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
``path`` must be of the format<path>.drive:name.
This is shorthand for the following:
.. code-block:: text
TfToken name = SdfPath::StripNamespace(path.GetToken());
UsdPhysicsDriveAPI(
stage->GetPrimAtPath(path.GetPrimPath()), name);
Parameters
----------
stage : Stage
path : Path
----------------------------------------------------------------------
Return a UsdPhysicsDriveAPI with name ``name`` holding the prim
``prim`` .
Shorthand for UsdPhysicsDriveAPI(prim, name);
Parameters
----------
prim : Prim
name : str
"""
@staticmethod
def GetAll(*args, **kwargs) -> None:
"""
**classmethod** GetAll(prim) -> list[DriveAPI]
Return a vector of all named instances of UsdPhysicsDriveAPI on the
given ``prim`` .
Parameters
----------
prim : Prim
"""
@staticmethod
def GetDampingAttr() -> Attribute:
"""
GetDampingAttr() -> Attribute
Damping of the drive.
Units: if linear drive: mass/second If angular drive:
mass\*DIST_UNITS\*DIST_UNITS/second/second/degrees.
Declaration
``float physics:damping = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetMaxForceAttr() -> Attribute:
"""
GetMaxForceAttr() -> Attribute
Maximum force that can be applied to drive.
Units: if linear drive: mass\*DIST_UNITS/second/second if angular
drive: mass\*DIST_UNITS\*DIST_UNITS/second/second inf means not
limited. Must be non-negative.
Declaration
``float physics:maxForce = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(includeInherited, instanceName) -> list[TfToken]:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
----------------------------------------------------------------------
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes for a given instance name.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved. The names returned will have the
proper namespace prefix.
Parameters
----------
includeInherited : bool
instanceName : str
"""
@staticmethod
def GetStiffnessAttr() -> Attribute:
"""
GetStiffnessAttr() -> Attribute
Stiffness of the drive.
Units: if linear drive: mass/second/second if angular drive:
mass\*DIST_UNITS\*DIST_UNITS/degree/second/second.
Declaration
``float physics:stiffness = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetTargetPositionAttr() -> Attribute:
"""
GetTargetPositionAttr() -> Attribute
Target value for position.
Units: if linear drive: distance if angular drive: degrees.
Declaration
``float physics:targetPosition = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetTargetVelocityAttr() -> Attribute:
"""
GetTargetVelocityAttr() -> Attribute
Target value for velocity.
Units: if linear drive: distance/second if angular drive:
degrees/second.
Declaration
``float physics:targetVelocity = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetTypeAttr() -> Attribute:
"""
GetTypeAttr() -> Attribute
Drive spring is for the acceleration at the joint (rather than the
force).
Declaration
``uniform token physics:type ="force"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
force, acceleration
"""
@staticmethod
def IsPhysicsDriveAPIPath(*args, **kwargs) -> None:
"""
**classmethod** IsPhysicsDriveAPIPath(path, name) -> bool
Checks if the given path ``path`` is of an API schema of type
PhysicsDriveAPI.
If so, it stores the instance name of the schema in ``name`` and
returns true. Otherwise, it returns false.
Parameters
----------
path : Path
name : str
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class FilteredPairsAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
API to describe fine-grained filtering. If a collision between two
objects occurs, this pair might be filtered if the pair is defined
through this API. This API can be applied either to a body or
collision or even articulation. The"filteredPairs"defines what objects
it should not collide against. Note that FilteredPairsAPI filtering
has precedence over CollisionGroup filtering.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> FilteredPairsAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsFilteredPairsAPI"to the
token-valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsFilteredPairsAPI object is returned upon success. An
invalid (or empty) UsdPhysicsFilteredPairsAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def CreateFilteredPairsRel() -> Relationship:
"""
CreateFilteredPairsRel() -> Relationship
See GetFilteredPairsRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> FilteredPairsAPI
Return a UsdPhysicsFilteredPairsAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsFilteredPairsAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetFilteredPairsRel() -> Relationship:
"""
GetFilteredPairsRel() -> Relationship
Relationship to objects that should be filtered.
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class FixedJoint(Joint, pxr.UsdGeom.Imageable, pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Predefined fixed joint type (All degrees of freedom are removed.)
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> FixedJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> FixedJoint
Return a UsdPhysicsFixedJoint holding the prim adhering to this schema
at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsFixedJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class Joint(pxr.UsdGeom.Imageable, pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
A joint constrains the movement of rigid bodies. Joint can be created
between two rigid bodies or between one rigid body and world. By
default joint primitive defines a D6 joint where all degrees of
freedom are free. Three linear and three angular degrees of freedom.
Note that default behavior is to disable collision between jointed
bodies.
"""
@staticmethod
def CreateBody0Rel() -> Relationship:
"""
CreateBody0Rel() -> Relationship
See GetBody0Rel() , and also Create vs Get Property Methods for when
to use Get vs Create.
"""
@staticmethod
def CreateBody1Rel() -> Relationship:
"""
CreateBody1Rel() -> Relationship
See GetBody1Rel() , and also Create vs Get Property Methods for when
to use Get vs Create.
"""
@staticmethod
def CreateBreakForceAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateBreakForceAttr(defaultValue, writeSparsely) -> Attribute
See GetBreakForceAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateBreakTorqueAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateBreakTorqueAttr(defaultValue, writeSparsely) -> Attribute
See GetBreakTorqueAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateCollisionEnabledAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateCollisionEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetCollisionEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateExcludeFromArticulationAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateExcludeFromArticulationAttr(defaultValue, writeSparsely) -> Attribute
See GetExcludeFromArticulationAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateJointEnabledAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateJointEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetJointEnabledAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLocalPos0Attr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLocalPos0Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalPos0Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLocalPos1Attr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLocalPos1Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalPos1Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLocalRot0Attr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLocalRot0Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalRot0Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLocalRot1Attr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLocalRot1Attr(defaultValue, writeSparsely) -> Attribute
See GetLocalRot1Attr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> Joint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> Joint
Return a UsdPhysicsJoint holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetBody0Rel() -> Relationship:
"""
GetBody0Rel() -> Relationship
Relationship to any UsdGeomXformable.
"""
@staticmethod
def GetBody1Rel() -> Relationship:
"""
GetBody1Rel() -> Relationship
Relationship to any UsdGeomXformable.
"""
@staticmethod
def GetBreakForceAttr() -> Attribute:
"""
GetBreakForceAttr() -> Attribute
Joint break force.
If set, joint is to break when this force limit is reached. (Used for
linear DOFs.) Units: mass \* distance / second / second
Declaration
``float physics:breakForce = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetBreakTorqueAttr() -> Attribute:
"""
GetBreakTorqueAttr() -> Attribute
Joint break torque.
If set, joint is to break when this torque limit is reached. (Used for
angular DOFs.) Units: mass \* distance \* distance / second / second
Declaration
``float physics:breakTorque = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetCollisionEnabledAttr() -> Attribute:
"""
GetCollisionEnabledAttr() -> Attribute
Determines if the jointed subtrees should collide or not.
Declaration
``bool physics:collisionEnabled = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
@staticmethod
def GetExcludeFromArticulationAttr() -> Attribute:
"""
GetExcludeFromArticulationAttr() -> Attribute
Determines if the joint can be included in an Articulation.
Declaration
``uniform bool physics:excludeFromArticulation = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
Variability
SdfVariabilityUniform
"""
@staticmethod
def GetJointEnabledAttr() -> Attribute:
"""
GetJointEnabledAttr() -> Attribute
Determines if the joint is enabled.
Declaration
``bool physics:jointEnabled = 1``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
@staticmethod
def GetLocalPos0Attr() -> Attribute:
"""
GetLocalPos0Attr() -> Attribute
Relative position of the joint frame to body0's frame.
Declaration
``point3f physics:localPos0 = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Point3f
"""
@staticmethod
def GetLocalPos1Attr() -> Attribute:
"""
GetLocalPos1Attr() -> Attribute
Relative position of the joint frame to body1's frame.
Declaration
``point3f physics:localPos1 = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Point3f
"""
@staticmethod
def GetLocalRot0Attr() -> Attribute:
"""
GetLocalRot0Attr() -> Attribute
Relative orientation of the joint frame to body0's frame.
Declaration
``quatf physics:localRot0 = (1, 0, 0, 0)``
C++ Type
GfQuatf
Usd Type
SdfValueTypeNames->Quatf
"""
@staticmethod
def GetLocalRot1Attr() -> Attribute:
"""
GetLocalRot1Attr() -> Attribute
Relative orientation of the joint frame to body1's frame.
Declaration
``quatf physics:localRot1 = (1, 0, 0, 0)``
C++ Type
GfQuatf
Usd Type
SdfValueTypeNames->Quatf
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class LimitAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
The PhysicsLimitAPI can be applied to a PhysicsJoint and will restrict
the movement along an axis. PhysicsLimitAPI is a multipleApply schema:
The PhysicsJoint can be restricted
along"transX","transY","transZ","rotX","rotY","rotZ","distance".
Setting these as a multipleApply schema TfToken name will define the
degree of freedom the PhysicsLimitAPI is applied to. Note that if the
low limit is higher than the high limit, motion along this axis is
considered locked.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim, name) -> LimitAPI
Applies this **multiple-apply** API schema to the given ``prim`` along
with the given instance name, ``name`` .
This information is stored by adding"PhysicsLimitAPI:<i>name</i>"to
the token-valued, listOp metadata *apiSchemas* on the prim. For
example, if ``name`` is'instance1', the
token'PhysicsLimitAPI:instance1'is added to'apiSchemas'.
A valid UsdPhysicsLimitAPI object is returned upon success. An invalid
(or empty) UsdPhysicsLimitAPI object is returned upon failure. See
UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, name, whyNot) -> bool
Returns true if this **multiple-apply** API schema can be applied,
with the given instance name, ``name`` , to the given ``prim`` .
If this schema can not be a applied the prim, this returns false and,
if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
name : str
whyNot : str
"""
@staticmethod
def CreateHighAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateHighAttr(defaultValue, writeSparsely) -> Attribute
See GetHighAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLowAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLowAttr(defaultValue, writeSparsely) -> Attribute
See GetLowAttr() , and also Create vs Get Property Methods for when to
use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Get(prim, name) -> LimitAPI:
"""
**classmethod** Get(stage, path) -> LimitAPI
Return a UsdPhysicsLimitAPI holding the prim adhering to this schema
at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
``path`` must be of the format<path>.limit:name.
This is shorthand for the following:
.. code-block:: text
TfToken name = SdfPath::StripNamespace(path.GetToken());
UsdPhysicsLimitAPI(
stage->GetPrimAtPath(path.GetPrimPath()), name);
Parameters
----------
stage : Stage
path : Path
----------------------------------------------------------------------
Return a UsdPhysicsLimitAPI with name ``name`` holding the prim
``prim`` .
Shorthand for UsdPhysicsLimitAPI(prim, name);
Parameters
----------
prim : Prim
name : str
"""
@staticmethod
def GetAll(*args, **kwargs) -> None:
"""
**classmethod** GetAll(prim) -> list[LimitAPI]
Return a vector of all named instances of UsdPhysicsLimitAPI on the
given ``prim`` .
Parameters
----------
prim : Prim
"""
@staticmethod
def GetHighAttr() -> Attribute:
"""
GetHighAttr() -> Attribute
Upper limit.
Units: degrees or distance depending on trans or rot axis applied to.
inf means not limited in positive direction.
Declaration
``float physics:high = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetLowAttr() -> Attribute:
"""
GetLowAttr() -> Attribute
Lower limit.
Units: degrees or distance depending on trans or rot axis applied to.
\-inf means not limited in negative direction.
Declaration
``float physics:low = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(includeInherited, instanceName) -> list[TfToken]:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
----------------------------------------------------------------------
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes for a given instance name.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved. The names returned will have the
proper namespace prefix.
Parameters
----------
includeInherited : bool
instanceName : str
"""
@staticmethod
def IsPhysicsLimitAPIPath(*args, **kwargs) -> None:
"""
**classmethod** IsPhysicsLimitAPIPath(path, name) -> bool
Checks if the given path ``path`` is of an API schema of type
PhysicsLimitAPI.
If so, it stores the instance name of the schema in ``name`` and
returns true. Otherwise, it returns false.
Parameters
----------
path : Path
name : str
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class MassAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Defines explicit mass properties (mass, density, inertia etc.).
MassAPI can be applied to any object that has a PhysicsCollisionAPI or
a PhysicsRigidBodyAPI.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> MassAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsMassAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsMassAPI object is returned upon success. An invalid
(or empty) UsdPhysicsMassAPI object is returned upon failure. See
UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def CreateCenterOfMassAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateCenterOfMassAttr(defaultValue, writeSparsely) -> Attribute
See GetCenterOfMassAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateDensityAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateDensityAttr(defaultValue, writeSparsely) -> Attribute
See GetDensityAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateDiagonalInertiaAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateDiagonalInertiaAttr(defaultValue, writeSparsely) -> Attribute
See GetDiagonalInertiaAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateMassAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateMassAttr(defaultValue, writeSparsely) -> Attribute
See GetMassAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreatePrincipalAxesAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreatePrincipalAxesAttr(defaultValue, writeSparsely) -> Attribute
See GetPrincipalAxesAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> MassAPI
Return a UsdPhysicsMassAPI holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsMassAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetCenterOfMassAttr() -> Attribute:
"""
GetCenterOfMassAttr() -> Attribute
Center of mass in the prim's local space.
Units: distance.
Declaration
``point3f physics:centerOfMass = (-inf, -inf, -inf)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Point3f
"""
@staticmethod
def GetDensityAttr() -> Attribute:
"""
GetDensityAttr() -> Attribute
If non-zero, specifies the density of the object.
In the context of rigid body physics, density indirectly results in
setting mass via (mass = density x volume of the object). How the
volume is computed is up to implementation of the physics system. It
is generally computed from the collision approximation rather than the
graphical mesh. In the case where both density and mass are specified
for the same object, mass has precedence over density. Unlike mass,
child's prim's density overrides parent prim's density as it is
accumulative. Note that density of a collisionAPI can be also
alternatively set through a PhysicsMaterialAPI. The material density
has the weakest precedence in density definition. Note if density is
0.0 it is ignored. Units: mass/distance/distance/distance.
Declaration
``float physics:density = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetDiagonalInertiaAttr() -> Attribute:
"""
GetDiagonalInertiaAttr() -> Attribute
If non-zero, specifies diagonalized inertia tensor along the principal
axes.
Note if diagonalInertial is (0.0, 0.0, 0.0) it is ignored. Units:
mass\*distance\*distance.
Declaration
``float3 physics:diagonalInertia = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Float3
"""
@staticmethod
def GetMassAttr() -> Attribute:
"""
GetMassAttr() -> Attribute
If non-zero, directly specifies the mass of the object.
Note that any child prim can also have a mass when they apply massAPI.
In this case, the precedence rule is'parent mass overrides the
child's'. This may come as counter-intuitive, but mass is a computed
quantity and in general not accumulative. For example, if a parent has
mass of 10, and one of two children has mass of 20, allowing child's
mass to override its parent results in a mass of -10 for the other
child. Note if mass is 0.0 it is ignored. Units: mass.
Declaration
``float physics:mass = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetPrincipalAxesAttr() -> Attribute:
"""
GetPrincipalAxesAttr() -> Attribute
Orientation of the inertia tensor's principal axes in the prim's local
space.
Declaration
``quatf physics:principalAxes = (0, 0, 0, 0)``
C++ Type
GfQuatf
Usd Type
SdfValueTypeNames->Quatf
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class MassUnits(Boost.Python.instance):
"""
Container class for static double-precision symbols representing
common mass units of measure expressed in kilograms.
"""
grams = 0.001
kilograms = 1.0
slugs = 14.5939
pass
class MaterialAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Adds simulation material properties to a Material. All collisions that
have a relationship to this material will have their collision
response defined through this material.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> MaterialAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsMaterialAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsMaterialAPI object is returned upon success. An
invalid (or empty) UsdPhysicsMaterialAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def CreateDensityAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateDensityAttr(defaultValue, writeSparsely) -> Attribute
See GetDensityAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateDynamicFrictionAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateDynamicFrictionAttr(defaultValue, writeSparsely) -> Attribute
See GetDynamicFrictionAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateRestitutionAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateRestitutionAttr(defaultValue, writeSparsely) -> Attribute
See GetRestitutionAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateStaticFrictionAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateStaticFrictionAttr(defaultValue, writeSparsely) -> Attribute
See GetStaticFrictionAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> MaterialAPI
Return a UsdPhysicsMaterialAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsMaterialAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetDensityAttr() -> Attribute:
"""
GetDensityAttr() -> Attribute
If non-zero, defines the density of the material.
This can be used for body mass computation, see PhysicsMassAPI. Note
that if the density is 0.0 it is ignored. Units:
mass/distance/distance/distance.
Declaration
``float physics:density = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetDynamicFrictionAttr() -> Attribute:
"""
GetDynamicFrictionAttr() -> Attribute
Dynamic friction coefficient.
Unitless.
Declaration
``float physics:dynamicFriction = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetRestitutionAttr() -> Attribute:
"""
GetRestitutionAttr() -> Attribute
Restitution coefficient.
Unitless.
Declaration
``float physics:restitution = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def GetStaticFrictionAttr() -> Attribute:
"""
GetStaticFrictionAttr() -> Attribute
Static friction coefficient.
Unitless.
Declaration
``float physics:staticFriction = 0``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class MeshCollisionAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Attributes to control how a Mesh is made into a collider. Can be
applied to only a USDGeomMesh in addition to its PhysicsCollisionAPI.
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> MeshCollisionAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsMeshCollisionAPI"to the
token-valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsMeshCollisionAPI object is returned upon success. An
invalid (or empty) UsdPhysicsMeshCollisionAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def CreateApproximationAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateApproximationAttr(defaultValue, writeSparsely) -> Attribute
See GetApproximationAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> MeshCollisionAPI
Return a UsdPhysicsMeshCollisionAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsMeshCollisionAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetApproximationAttr() -> Attribute:
"""
GetApproximationAttr() -> Attribute
Determines the mesh's collision approximation:"none"- The mesh
geometry is used directly as a collider without any approximation.
"convexDecomposition"- A convex mesh decomposition is performed. This
results in a set of convex mesh colliders."convexHull"- A convex hull
of the mesh is generated and used as the collider."boundingSphere"- A
bounding sphere is computed around the mesh and used as a
collider."boundingCube"- An optimally fitting box collider is computed
around the mesh."meshSimplification"- A mesh simplification step is
performed, resulting in a simplified triangle mesh collider.
Declaration
``uniform token physics:approximation ="none"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
none, convexDecomposition, convexHull, boundingSphere, boundingCube,
meshSimplification
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class PrismaticJoint(Joint, pxr.UsdGeom.Imageable, pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Predefined prismatic joint type (translation along prismatic joint
axis is permitted.)
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
@staticmethod
def CreateAxisAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateAxisAttr(defaultValue, writeSparsely) -> Attribute
See GetAxisAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLowerLimitAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLowerLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetLowerLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateUpperLimitAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateUpperLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetUpperLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> PrismaticJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> PrismaticJoint
Return a UsdPhysicsPrismaticJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsPrismaticJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetAxisAttr() -> Attribute:
"""
GetAxisAttr() -> Attribute
Joint axis.
Declaration
``uniform token physics:axis ="X"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
X, Y, Z
"""
@staticmethod
def GetLowerLimitAttr() -> Attribute:
"""
GetLowerLimitAttr() -> Attribute
Lower limit.
Units: distance. -inf means not limited in negative direction.
Declaration
``float physics:lowerLimit = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def GetUpperLimitAttr() -> Attribute:
"""
GetUpperLimitAttr() -> Attribute
Upper limit.
Units: distance. inf means not limited in positive direction.
Declaration
``float physics:upperLimit = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class RevoluteJoint(Joint, pxr.UsdGeom.Imageable, pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Predefined revolute joint type (rotation along revolute joint axis is
permitted.)
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
@staticmethod
def CreateAxisAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateAxisAttr(defaultValue, writeSparsely) -> Attribute
See GetAxisAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateLowerLimitAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateLowerLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetLowerLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateUpperLimitAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateUpperLimitAttr(defaultValue, writeSparsely) -> Attribute
See GetUpperLimitAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> RevoluteJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> RevoluteJoint
Return a UsdPhysicsRevoluteJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsRevoluteJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetAxisAttr() -> Attribute:
"""
GetAxisAttr() -> Attribute
Joint axis.
Declaration
``uniform token physics:axis ="X"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
X, Y, Z
"""
@staticmethod
def GetLowerLimitAttr() -> Attribute:
"""
GetLowerLimitAttr() -> Attribute
Lower limit.
Units: degrees. -inf means not limited in negative direction.
Declaration
``float physics:lowerLimit = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def GetUpperLimitAttr() -> Attribute:
"""
GetUpperLimitAttr() -> Attribute
Upper limit.
Units: degrees. inf means not limited in positive direction.
Declaration
``float physics:upperLimit = inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class RigidBodyAPI(pxr.Usd.APISchemaBase, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Applies physics body attributes to any UsdGeomXformable prim and marks
that prim to be driven by a simulation. If a simulation is running it
will update this prim's pose. All prims in the hierarchy below this
prim should move accordingly.
"""
class MassInformation(Boost.Python.instance):
@property
def centerOfMass(self) -> None:
"""
:type: None
"""
@property
def inertia(self) -> None:
"""
:type: None
"""
@property
def localPos(self) -> None:
"""
:type: None
"""
@property
def localRot(self) -> None:
"""
:type: None
"""
@property
def volume(self) -> None:
"""
:type: None
"""
__instance_size__ = 96
pass
@staticmethod
def Apply(*args, **kwargs) -> None:
"""
**classmethod** Apply(prim) -> RigidBodyAPI
Applies this **single-apply** API schema to the given ``prim`` .
This information is stored by adding"PhysicsRigidBodyAPI"to the token-
valued, listOp metadata *apiSchemas* on the prim.
A valid UsdPhysicsRigidBodyAPI object is returned upon success. An
invalid (or empty) UsdPhysicsRigidBodyAPI object is returned upon
failure. See UsdPrim::ApplyAPI() for conditions resulting in failure.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
"""
@staticmethod
def CanApply(*args, **kwargs) -> None:
"""
**classmethod** CanApply(prim, whyNot) -> bool
Returns true if this **single-apply** API schema can be applied to the
given ``prim`` .
If this schema can not be a applied to the prim, this returns false
and, if provided, populates ``whyNot`` with the reason it can not be
applied.
Note that if CanApply returns false, that does not necessarily imply
that calling Apply will fail. Callers are expected to call CanApply
before calling Apply if they want to ensure that it is valid to apply
a schema.
UsdPrim::GetAppliedSchemas()
UsdPrim::HasAPI()
UsdPrim::CanApplyAPI()
UsdPrim::ApplyAPI()
UsdPrim::RemoveAPI()
Parameters
----------
prim : Prim
whyNot : str
"""
@staticmethod
def ComputeMassProperties(diagonalInertia, com, principalAxes, massInfoFn) -> float:
"""
ComputeMassProperties(diagonalInertia, com, principalAxes, massInfoFn) -> float
Compute mass properties of the rigid body ``diagonalInertia`` Computed
diagonal of the inertial tensor for the rigid body.
``com`` Computed center of mass for the rigid body. ``principalAxes``
Inertia tensor's principal axes orienttion for the rigid body.
``massInfoFn`` Callback function to get collision mass information.
Computed mass of the rigid body
Parameters
----------
diagonalInertia : Vec3f
com : Vec3f
principalAxes : Quatf
massInfoFn : MassInformationFn
"""
@staticmethod
def CreateAngularVelocityAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateAngularVelocityAttr(defaultValue, writeSparsely) -> Attribute
See GetAngularVelocityAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateKinematicEnabledAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateKinematicEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetKinematicEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateRigidBodyEnabledAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateRigidBodyEnabledAttr(defaultValue, writeSparsely) -> Attribute
See GetRigidBodyEnabledAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateSimulationOwnerRel() -> Relationship:
"""
CreateSimulationOwnerRel() -> Relationship
See GetSimulationOwnerRel() , and also Create vs Get Property Methods
for when to use Get vs Create.
"""
@staticmethod
def CreateStartsAsleepAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateStartsAsleepAttr(defaultValue, writeSparsely) -> Attribute
See GetStartsAsleepAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateVelocityAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateVelocityAttr(defaultValue, writeSparsely) -> Attribute
See GetVelocityAttr() , and also Create vs Get Property Methods for
when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> RigidBodyAPI
Return a UsdPhysicsRigidBodyAPI holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsRigidBodyAPI(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetAngularVelocityAttr() -> Attribute:
"""
GetAngularVelocityAttr() -> Attribute
Angular velocity in the same space as the node's xform.
Units: degrees/second.
Declaration
``vector3f physics:angularVelocity = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Vector3f
"""
@staticmethod
def GetKinematicEnabledAttr() -> Attribute:
"""
GetKinematicEnabledAttr() -> Attribute
Determines whether the body is kinematic or not.
A kinematic body is a body that is moved through animated poses or
through user defined poses. The simulation derives velocities for the
kinematic body based on the external motion. When a continuous motion
is not desired, this kinematic flag should be set to false.
Declaration
``bool physics:kinematicEnabled = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
@staticmethod
def GetRigidBodyEnabledAttr() -> Attribute:
"""
GetRigidBodyEnabledAttr() -> Attribute
Determines if this PhysicsRigidBodyAPI is enabled.
Declaration
``bool physics:rigidBodyEnabled = 1``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def GetSimulationOwnerRel() -> Relationship:
"""
GetSimulationOwnerRel() -> Relationship
Single PhysicsScene that will simulate this body.
By default this is the first PhysicsScene found in the stage using
UsdStage::Traverse() .
"""
@staticmethod
def GetStartsAsleepAttr() -> Attribute:
"""
GetStartsAsleepAttr() -> Attribute
Determines if the body is asleep when the simulation starts.
Declaration
``uniform bool physics:startsAsleep = 0``
C++ Type
bool
Usd Type
SdfValueTypeNames->Bool
Variability
SdfVariabilityUniform
"""
@staticmethod
def GetVelocityAttr() -> Attribute:
"""
GetVelocityAttr() -> Attribute
Linear velocity in the same space as the node's xform.
Units: distance/second.
Declaration
``vector3f physics:velocity = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Vector3f
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 48
pass
class Scene(pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
General physics simulation properties, required for simulation.
"""
@staticmethod
def CreateGravityDirectionAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateGravityDirectionAttr(defaultValue, writeSparsely) -> Attribute
See GetGravityDirectionAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateGravityMagnitudeAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateGravityMagnitudeAttr(defaultValue, writeSparsely) -> Attribute
See GetGravityMagnitudeAttr() , and also Create vs Get Property
Methods for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> Scene
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> Scene
Return a UsdPhysicsScene holding the prim adhering to this schema at
``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsScene(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetGravityDirectionAttr() -> Attribute:
"""
GetGravityDirectionAttr() -> Attribute
Gravity direction vector in simulation world space.
Will be normalized before use. A zero vector is a request to use the
negative upAxis. Unitless.
Declaration
``vector3f physics:gravityDirection = (0, 0, 0)``
C++ Type
GfVec3f
Usd Type
SdfValueTypeNames->Vector3f
"""
@staticmethod
def GetGravityMagnitudeAttr() -> Attribute:
"""
GetGravityMagnitudeAttr() -> Attribute
Gravity acceleration magnitude in simulation world space.
A negative value is a request to use a value equivalent to earth
gravity regardless of the metersPerUnit scaling used by this scene.
Units: distance/second/second.
Declaration
``float physics:gravityMagnitude = -inf``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class SphericalJoint(Joint, pxr.UsdGeom.Imageable, pxr.Usd.Typed, pxr.Usd.SchemaBase, Boost.Python.instance):
"""
Predefined spherical joint type (Removes linear degrees of freedom,
cone limit may restrict the motion in a given range.) It allows two
limit values, which when equal create a circular, else an elliptic
cone limit around the limit axis.
For any described attribute *Fallback* *Value* or *Allowed* *Values*
below that are text/tokens, the actual token is published and defined
in UsdPhysicsTokens. So to set an attribute to the value"rightHanded",
use UsdPhysicsTokens->rightHanded as the value.
"""
@staticmethod
def CreateAxisAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateAxisAttr(defaultValue, writeSparsely) -> Attribute
See GetAxisAttr() , and also Create vs Get Property Methods for when
to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateConeAngle0LimitAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateConeAngle0LimitAttr(defaultValue, writeSparsely) -> Attribute
See GetConeAngle0LimitAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def CreateConeAngle1LimitAttr(defaultValue, writeSparsely) -> Attribute:
"""
CreateConeAngle1LimitAttr(defaultValue, writeSparsely) -> Attribute
See GetConeAngle1LimitAttr() , and also Create vs Get Property Methods
for when to use Get vs Create.
If specified, author ``defaultValue`` as the attribute's default,
sparsely (when it makes sense to do so) if ``writeSparsely`` is
``true`` - the default for ``writeSparsely`` is ``false`` .
Parameters
----------
defaultValue : VtValue
writeSparsely : bool
"""
@staticmethod
def Define(*args, **kwargs) -> None:
"""
**classmethod** Define(stage, path) -> SphericalJoint
Attempt to ensure a *UsdPrim* adhering to this schema at ``path`` is
defined (according to UsdPrim::IsDefined() ) on this stage.
If a prim adhering to this schema at ``path`` is already defined on
this stage, return that prim. Otherwise author an *SdfPrimSpec* with
*specifier* == *SdfSpecifierDef* and this schema's prim type name for
the prim at ``path`` at the current EditTarget. Author *SdfPrimSpec* s
with ``specifier`` == *SdfSpecifierDef* and empty typeName at the
current EditTarget for any nonexistent, or existing but not *Defined*
ancestors.
The given *path* must be an absolute prim path that does not contain
any variant selections.
If it is impossible to author any of the necessary PrimSpecs, (for
example, in case *path* cannot map to the current UsdEditTarget 's
namespace) issue an error and return an invalid *UsdPrim*.
Note that this method may return a defined prim whose typeName does
not specify this schema class, in case a stronger typeName opinion
overrides the opinion at the current EditTarget.
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def Get(*args, **kwargs) -> None:
"""
**classmethod** Get(stage, path) -> SphericalJoint
Return a UsdPhysicsSphericalJoint holding the prim adhering to this
schema at ``path`` on ``stage`` .
If no prim exists at ``path`` on ``stage`` , or if the prim at that
path does not adhere to this schema, return an invalid schema object.
This is shorthand for the following:
.. code-block:: text
UsdPhysicsSphericalJoint(stage->GetPrimAtPath(path));
Parameters
----------
stage : Stage
path : Path
"""
@staticmethod
def GetAxisAttr() -> Attribute:
"""
GetAxisAttr() -> Attribute
Cone limit axis.
Declaration
``uniform token physics:axis ="X"``
C++ Type
TfToken
Usd Type
SdfValueTypeNames->Token
Variability
SdfVariabilityUniform
Allowed Values
X, Y, Z
"""
@staticmethod
def GetConeAngle0LimitAttr() -> Attribute:
"""
GetConeAngle0LimitAttr() -> Attribute
Cone limit from the primary joint axis in the local0 frame toward the
next axis.
(Next axis of X is Y, and of Z is X.) A negative value means not
limited. Units: degrees.
Declaration
``float physics:coneAngle0Limit = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetConeAngle1LimitAttr() -> Attribute:
"""
GetConeAngle1LimitAttr() -> Attribute
Cone limit from the primary joint axis in the local0 frame toward the
second to next axis.
A negative value means not limited. Units: degrees.
Declaration
``float physics:coneAngle1Limit = -1``
C++ Type
float
Usd Type
SdfValueTypeNames->Float
"""
@staticmethod
def GetSchemaAttributeNames(*args, **kwargs) -> None:
"""
**classmethod** GetSchemaAttributeNames(includeInherited) -> list[TfToken]
Return a vector of names of all pre-declared attributes for this
schema class and all its ancestor classes.
Does not include attributes that may be authored by custom/extended
methods of the schemas involved.
Parameters
----------
includeInherited : bool
"""
@staticmethod
def _GetStaticTfType(*args, **kwargs) -> None:
"""
**classmethod** _GetStaticTfType() -> Type
"""
__instance_size__ = 40
pass
class Tokens(Boost.Python.instance):
acceleration = 'acceleration'
angular = 'angular'
boundingCube = 'boundingCube'
boundingSphere = 'boundingSphere'
colliders = 'colliders'
convexDecomposition = 'convexDecomposition'
convexHull = 'convexHull'
distance = 'distance'
drive = 'drive'
drive_MultipleApplyTemplate_PhysicsDamping = 'drive:__INSTANCE_NAME__:physics:damping'
drive_MultipleApplyTemplate_PhysicsMaxForce = 'drive:__INSTANCE_NAME__:physics:maxForce'
drive_MultipleApplyTemplate_PhysicsStiffness = 'drive:__INSTANCE_NAME__:physics:stiffness'
drive_MultipleApplyTemplate_PhysicsTargetPosition = 'drive:__INSTANCE_NAME__:physics:targetPosition'
drive_MultipleApplyTemplate_PhysicsTargetVelocity = 'drive:__INSTANCE_NAME__:physics:targetVelocity'
drive_MultipleApplyTemplate_PhysicsType = 'drive:__INSTANCE_NAME__:physics:type'
force = 'force'
kilogramsPerUnit = 'kilogramsPerUnit'
limit = 'limit'
limit_MultipleApplyTemplate_PhysicsHigh = 'limit:__INSTANCE_NAME__:physics:high'
limit_MultipleApplyTemplate_PhysicsLow = 'limit:__INSTANCE_NAME__:physics:low'
linear = 'linear'
meshSimplification = 'meshSimplification'
none = 'none'
physicsAngularVelocity = 'physics:angularVelocity'
physicsApproximation = 'physics:approximation'
physicsAxis = 'physics:axis'
physicsBody0 = 'physics:body0'
physicsBody1 = 'physics:body1'
physicsBreakForce = 'physics:breakForce'
physicsBreakTorque = 'physics:breakTorque'
physicsCenterOfMass = 'physics:centerOfMass'
physicsCollisionEnabled = 'physics:collisionEnabled'
physicsConeAngle0Limit = 'physics:coneAngle0Limit'
physicsConeAngle1Limit = 'physics:coneAngle1Limit'
physicsDensity = 'physics:density'
physicsDiagonalInertia = 'physics:diagonalInertia'
physicsDynamicFriction = 'physics:dynamicFriction'
physicsExcludeFromArticulation = 'physics:excludeFromArticulation'
physicsFilteredGroups = 'physics:filteredGroups'
physicsFilteredPairs = 'physics:filteredPairs'
physicsGravityDirection = 'physics:gravityDirection'
physicsGravityMagnitude = 'physics:gravityMagnitude'
physicsInvertFilteredGroups = 'physics:invertFilteredGroups'
physicsJointEnabled = 'physics:jointEnabled'
physicsKinematicEnabled = 'physics:kinematicEnabled'
physicsLocalPos0 = 'physics:localPos0'
physicsLocalPos1 = 'physics:localPos1'
physicsLocalRot0 = 'physics:localRot0'
physicsLocalRot1 = 'physics:localRot1'
physicsLowerLimit = 'physics:lowerLimit'
physicsMass = 'physics:mass'
physicsMaxDistance = 'physics:maxDistance'
physicsMergeGroup = 'physics:mergeGroup'
physicsMinDistance = 'physics:minDistance'
physicsPrincipalAxes = 'physics:principalAxes'
physicsRestitution = 'physics:restitution'
physicsRigidBodyEnabled = 'physics:rigidBodyEnabled'
physicsSimulationOwner = 'physics:simulationOwner'
physicsStartsAsleep = 'physics:startsAsleep'
physicsStaticFriction = 'physics:staticFriction'
physicsUpperLimit = 'physics:upperLimit'
physicsVelocity = 'physics:velocity'
rotX = 'rotX'
rotY = 'rotY'
rotZ = 'rotZ'
transX = 'transX'
transY = 'transY'
transZ = 'transZ'
x = 'X'
y = 'Y'
z = 'Z'
pass
class _CanApplyResult(Boost.Python.instance):
@property
def whyNot(self) -> None:
"""
:type: None
"""
__instance_size__ = 56
pass
def GetStageKilogramsPerUnit(*args, **kwargs) -> None:
pass
def MassUnitsAre(*args, **kwargs) -> None:
pass
def SetStageKilogramsPerUnit(*args, **kwargs) -> None:
pass
def StageHasAuthoredKilogramsPerUnit(*args, **kwargs) -> None:
pass
__MFB_FULL_PACKAGE_NAME = 'usdPhysics'
| 122,712 | unknown | 25.613099 | 109 | 0.611546 |
omniverse-code/kit/exts/omni.usd.libs/pxr/Work/__DOC.py | def Execute(result):
pass | 29 | Python | 13.999993 | 20 | 0.689655 |
omniverse-code/kit/exts/omni.usd.libs/pxr/Work/__init__.pyi | from __future__ import annotations
import pxr.Work._work
import typing
__all__ = [
"GetConcurrencyLimit",
"GetPhysicalConcurrencyLimit",
"HasConcurrency",
"SetConcurrencyLimit",
"SetConcurrencyLimitArgument",
"SetMaximumConcurrencyLimit"
]
def GetConcurrencyLimit(*args, **kwargs) -> None:
pass
def GetPhysicalConcurrencyLimit(*args, **kwargs) -> None:
pass
def HasConcurrency(*args, **kwargs) -> None:
pass
def SetConcurrencyLimit(*args, **kwargs) -> None:
pass
def SetConcurrencyLimitArgument(*args, **kwargs) -> None:
pass
def SetMaximumConcurrencyLimit(*args, **kwargs) -> None:
pass
__MFB_FULL_PACKAGE_NAME = 'work'
| 672 | unknown | 23.035713 | 57 | 0.700893 |
omniverse-code/kit/exts/omni.usd.libs/pxr/Ar/__DOC.py | def Execute(result):
result["DefaultResolver"].__doc__ = """
Default asset resolution implementation used when no plugin
implementation is provided.
In order to resolve assets specified by relative paths, this resolver
implements a simple"search path"scheme. The resolver will anchor the
relative path to a series of directories and return the first absolute
path where the asset exists.
The first directory will always be the current working directory. The
resolver will then examine the directories specified via the following
mechanisms (in order):
- The currently-bound ArDefaultResolverContext for the calling
thread
- ArDefaultResolver::SetDefaultSearchPath
- The environment variable PXR_AR_DEFAULT_SEARCH_PATH. This is
expected to be a list of directories delimited by the platform's
standard path separator.
ArDefaultResolver supports creating an ArDefaultResolverContext via
ArResolver::CreateContextFromString by passing a list of directories
delimited by the platform's standard path separator.
"""
result["DefaultResolver"].SetDefaultSearchPath.func_doc = """**classmethod** SetDefaultSearchPath(searchPath) -> None
Set the default search path that will be used during asset resolution.
This must be called before the first call to ArGetResolver. The
specified paths will be searched *in addition to, and before* paths
specified via the environment variable PXR_AR_DEFAULT_SEARCH_PATH
Parameters
----------
searchPath : list[str]
"""
result["DefaultResolverContext"].__doc__ = """
Resolver context object that specifies a search path to use during
asset resolution. This object is intended for use with the default
ArDefaultResolver asset resolution implementation; see documentation
for that class for more details on the search path resolution
algorithm.
Example usage:
.. code-block:: text
ArDefaultResolverContext ctx({"/Local/Models", "/Installed/Models"});
{
// Bind the context object:
ArResolverContextBinder binder(ctx);
// While the context is bound, all calls to ArResolver::Resolve
// (assuming ArDefaultResolver is the underlying implementation being
// used) will include the specified paths during resolution.
std::string resolvedPath = resolver.Resolve("ModelName/File.txt")
}
// Once the context is no longer bound (due to the ArResolverContextBinder
// going out of scope), its search path no longer factors into asset
// resolution.
"""
result["DefaultResolverContext"].__init__.func_doc = """__init__()
Default construct a context with no search path.
----------------------------------------------------------------------
__init__(searchPath)
Construct a context with the given ``searchPath`` .
Elements in ``searchPath`` should be absolute paths. If they are not,
they will be anchored to the current working directory.
Parameters
----------
searchPath : list[str]
"""
result["DefaultResolverContext"].GetSearchPath.func_doc = """GetSearchPath() -> list[str]
Return this context's search path.
"""
result["Notice"].__doc__ = """"""
result["ResolvedPath"].__doc__ = """
Represents a resolved asset path.
"""
result["ResolvedPath"].__init__.func_doc = """__init__(resolvedPath)
Construct an ArResolvedPath holding the given ``resolvedPath`` .
Parameters
----------
resolvedPath : str
----------------------------------------------------------------------
__init__(resolvedPath)
This is an overloaded member function, provided for convenience. It
differs from the above function only in what argument(s) it accepts.
Parameters
----------
resolvedPath : str
----------------------------------------------------------------------
__init__()
----------------------------------------------------------------------
__init__(rhs)
Parameters
----------
rhs : ResolvedPath
----------------------------------------------------------------------
__init__(rhs)
Parameters
----------
rhs : ResolvedPath
"""
result["ResolvedPath"].GetPathString.func_doc = """GetPathString() -> str
Return the resolved path held by this object as a string.
"""
result["Resolver"].__doc__ = """
Interface for the asset resolution system. An asset resolver is
responsible for resolving asset information (including the asset's
physical path) from a logical path.
See ar_implementing_resolver for information on how to customize asset
resolution behavior by implementing a subclass of ArResolver. Clients
may use ArGetResolver to access the configured asset resolver.
"""
result["Resolver"].CreateIdentifier.func_doc = """CreateIdentifier(assetPath, anchorAssetPath) -> str
Returns an identifier for the asset specified by ``assetPath`` .
If ``anchorAssetPath`` is not empty, it is the resolved asset path
that ``assetPath`` should be anchored to if it is a relative path.
Parameters
----------
assetPath : str
anchorAssetPath : ResolvedPath
"""
result["Resolver"].CreateIdentifierForNewAsset.func_doc = """CreateIdentifierForNewAsset(assetPath, anchorAssetPath) -> str
Returns an identifier for a new asset specified by ``assetPath`` .
If ``anchorAssetPath`` is not empty, it is the resolved asset path
that ``assetPath`` should be anchored to if it is a relative path.
Parameters
----------
assetPath : str
anchorAssetPath : ResolvedPath
"""
result["Resolver"].Resolve.func_doc = """Resolve(assetPath) -> ResolvedPath
Returns the resolved path for the asset identified by the given
``assetPath`` if it exists.
If the asset does not exist, returns an empty ArResolvedPath.
Parameters
----------
assetPath : str
"""
result["Resolver"].ResolveForNewAsset.func_doc = """ResolveForNewAsset(assetPath) -> ResolvedPath
Returns the resolved path for the given ``assetPath`` that may be used
to create a new asset.
If such a path cannot be computed for ``assetPath`` , returns an empty
ArResolvedPath.
Note that an asset might or might not already exist at the returned
resolved path.
Parameters
----------
assetPath : str
"""
result["Resolver"].CreateDefaultContext.func_doc = """CreateDefaultContext() -> ResolverContext
Return an ArResolverContext that may be bound to this resolver to
resolve assets when no other context is explicitly specified.
The returned ArResolverContext will contain the default context
returned by the primary resolver and all URI resolvers.
"""
result["Resolver"].CreateDefaultContextForAsset.func_doc = """CreateDefaultContextForAsset(assetPath) -> ResolverContext
Return an ArResolverContext that may be bound to this resolver to
resolve the asset located at ``assetPath`` or referenced by that asset
when no other context is explicitly specified.
The returned ArResolverContext will contain the default context for
``assetPath`` returned by the primary resolver and all URI resolvers.
Parameters
----------
assetPath : str
"""
result["Resolver"].CreateContextFromString.func_doc = """CreateContextFromString(contextStr) -> ResolverContext
Return an ArResolverContext created from the primary ArResolver
implementation using the given ``contextStr`` .
Parameters
----------
contextStr : str
----------------------------------------------------------------------
CreateContextFromString(uriScheme, contextStr) -> ResolverContext
Return an ArResolverContext created from the ArResolver registered for
the given ``uriScheme`` using the given ``contextStr`` .
An empty ``uriScheme`` indicates the primary resolver and is
equivalent to CreateContextFromString(string).
If no resolver is registered for ``uriScheme`` , returns an empty
ArResolverContext.
Parameters
----------
uriScheme : str
contextStr : str
"""
result["Resolver"].CreateContextFromStrings.func_doc = """CreateContextFromStrings(contextStrs) -> ResolverContext
Return an ArResolverContext created by combining the ArResolverContext
objects created from the given ``contextStrs`` .
``contextStrs`` is a list of pairs of strings. The first element in
the pair is the URI scheme for the ArResolver that will be used to
create the ArResolverContext from the second element in the pair. An
empty URI scheme indicates the primary resolver.
For example:
.. code-block:: text
ArResolverContext ctx = ArGetResolver().CreateContextFromStrings(
{ {"", "context str 1"},
{"my_scheme", "context str 2"} });
This will use the primary resolver to create an ArResolverContext
using the string"context str 1"and use the resolver registered for
the"my_scheme"URI scheme to create an ArResolverContext using"context
str 2". These contexts will be combined into a single
ArResolverContext and returned.
If no resolver is registered for a URI scheme in an entry in
``contextStrs`` , that entry will be ignored.
Parameters
----------
contextStrs : list[tuple[str, str]]
"""
result["Resolver"].RefreshContext.func_doc = """RefreshContext(context) -> None
Refresh any caches associated with the given context.
If doing so would invalidate asset paths that had previously been
resolved, an ArNotice::ResolverChanged notice will be sent to inform
clients of this.
Parameters
----------
context : ResolverContext
"""
result["Resolver"].GetCurrentContext.func_doc = """GetCurrentContext() -> ResolverContext
Returns the asset resolver context currently bound in this thread.
ArResolver::BindContext, ArResolver::UnbindContext
"""
result["Resolver"].IsContextDependentPath.func_doc = """IsContextDependentPath(assetPath) -> bool
Returns true if ``assetPath`` is a context-dependent path, false
otherwise.
A context-dependent path may result in different resolved paths
depending on what asset resolver context is bound when Resolve is
called. Assets located at the same context-dependent path may not be
the same since those assets may have been loaded from different
resolved paths. In this case, the assets'resolved paths must be
consulted to determine if they are the same.
Parameters
----------
assetPath : str
"""
result["Resolver"].GetExtension.func_doc = """GetExtension(assetPath) -> str
Returns the file extension for the given ``assetPath`` .
The returned extension does not include a"."at the beginning.
Parameters
----------
assetPath : str
"""
result["Resolver"].GetAssetInfo.func_doc = """GetAssetInfo(assetPath, resolvedPath) -> ArAssetInfo
Returns an ArAssetInfo populated with additional metadata (if any)
about the asset at the given ``assetPath`` .
``resolvedPath`` is the resolved path computed for the given
``assetPath`` .
Parameters
----------
assetPath : str
resolvedPath : ResolvedPath
"""
result["Resolver"].GetModificationTimestamp.func_doc = """GetModificationTimestamp(assetPath, resolvedPath) -> Timestamp
Returns an ArTimestamp representing the last time the asset at
``assetPath`` was modified.
``resolvedPath`` is the resolved path computed for the given
``assetPath`` . If a timestamp cannot be retrieved, return an invalid
ArTimestamp.
Parameters
----------
assetPath : str
resolvedPath : ResolvedPath
"""
result["Resolver"].CanWriteAssetToPath.func_doc = """CanWriteAssetToPath(resolvedPath, whyNot) -> bool
Returns true if an asset may be written to the given ``resolvedPath``
, false otherwise.
If this function returns false and ``whyNot`` is not ``nullptr`` , it
may be filled with an explanation.
Parameters
----------
resolvedPath : ResolvedPath
whyNot : str
"""
result["ResolverContext"].__doc__ = """
An asset resolver context allows clients to provide additional data to
the resolver for use during resolution. Clients may provide this data
via context objects of their own (subject to restrictions below). An
ArResolverContext is simply a wrapper around these objects that allows
it to be treated as a single type. Note that an ArResolverContext may
not hold multiple context objects with the same type.
A client-defined context object must provide the following:
- Default and copy constructors
- operator<
- operator==
- An overload for size_t hash_value(const T&)
Note that the user may define a free function:
std::string ArGetDebugString(const Context & ctx); (Where Context is
the type of the user's path resolver context.)
This is optional; a default generic implementation has been
predefined. This function should return a string representation of the
context to be utilized for debugging purposes(such as in TF_DEBUG
statements).
The ArIsContextObject template must also be specialized for this
object to declare that it can be used as a context object. This is to
avoid accidental use of an unexpected object as a context object. The
AR_DECLARE_RESOLVER_CONTEXT macro can be used to do this as a
convenience.
AR_DECLARE_RESOLVER_CONTEXT
ArResolver::BindContext
ArResolver::UnbindContext
ArResolverContextBinder
"""
result["ResolverContext"].__init__.func_doc = """__init__()
Construct an empty asset resolver context.
----------------------------------------------------------------------
__init__(objs)
Construct a resolver context using the given objects ``objs`` .
Each argument must either be an ArResolverContext or a registered
context object. See class documentation for requirements on context
objects.
If an argument is a context object, it will be added to the
constructed ArResolverContext. If an argument is an ArResolverContext,
all of the context objects it holds will be added to the constructed
ArResolverContext.
Arguments are ordered from strong-to-weak. If a context object is
encountered with the same type as a previously-added object, the
previously-added object will remain and the other context object will
be ignored.
Parameters
----------
objs : Objects...
----------------------------------------------------------------------
__init__(ctxs)
Construct a resolver context using the ArResolverContexts in ``ctxs``
.
All of the context objects held by each ArResolverContext in ``ctxs``
will be added to the constructed ArResolverContext.
Arguments are ordered from strong-to-weak. If a context object is
encountered with the same type as a previously-added object, the
previously-added object will remain and the other context object will
be ignored.
Parameters
----------
ctxs : list[ResolverContext]
"""
result["ResolverContext"].IsEmpty.func_doc = """IsEmpty() -> bool
Returns whether this resolver context is empty.
"""
result["ResolverContext"].Get.func_doc = """Get() -> ContextObj
Returns pointer to the context object of the given type held in this
resolver context.
Returns None if this resolver context is not holding an object of the
requested type.
"""
result["ResolverContext"].GetDebugString.func_doc = """GetDebugString() -> str
Returns a debug string representing the contained context objects.
"""
result["ResolverContextBinder"].__doc__ = """
Helper object for managing the binding and unbinding of
ArResolverContext objects with the asset resolver.
Asset Resolver Context Operations
"""
result["ResolverContextBinder"].__init__.func_doc = """__init__(context)
Bind the given ``context`` with the asset resolver.
Calls ArResolver::BindContext on the configured asset resolver and
saves the bindingData populated by that function.
Parameters
----------
context : ResolverContext
----------------------------------------------------------------------
__init__(assetResolver, context)
Bind the given ``context`` to the given ``assetResolver`` .
Calls ArResolver::BindContext on the given ``assetResolver`` and saves
the bindingData populated by that function.
Parameters
----------
assetResolver : Resolver
context : ResolverContext
"""
result["ResolverScopedCache"].__doc__ = """
Helper object for managing asset resolver cache scopes.
A scoped resolution cache indicates to the resolver that results of
calls to Resolve should be cached for a certain scope. This is
important for performance and also for consistency it ensures that
repeated calls to Resolve with the same parameters will return the
same result.
Scoped Resolution Cache
"""
result["ResolverScopedCache"].__init__.func_doc = """__init__(arg1)
Parameters
----------
arg1 : ResolverScopedCache
----------------------------------------------------------------------
__init__()
Begin an asset resolver cache scope.
Calls ArResolver::BeginCacheScope on the configured asset resolver and
saves the cacheScopeData populated by that function.
----------------------------------------------------------------------
__init__(parent)
Begin an asset resolver cache scope that shares data with the given
``parent`` scope.
Calls ArResolver::BeginCacheScope on the configured asset resolver,
saves the cacheScopeData stored in ``parent`` and passes that to that
function.
Parameters
----------
parent : ResolverScopedCache
"""
result["Timestamp"].__doc__ = """
Represents a timestamp for an asset. Timestamps are represented by
Unix time, the number of seconds elapsed since 00:00:00 UTC 1/1/1970.
"""
result["Timestamp"].__init__.func_doc = """__init__()
Create an invalid timestamp.
----------------------------------------------------------------------
__init__(time)
Create a timestamp at ``time`` , which must be a Unix time value.
Parameters
----------
time : float
"""
result["Timestamp"].IsValid.func_doc = """IsValid() -> bool
Return true if this timestamp is valid, false otherwise.
"""
result["Timestamp"].GetTime.func_doc = """GetTime() -> float
Return the time represented by this timestamp as a double.
If this timestamp is invalid, issue a coding error and return a quiet
NaN value.
""" | 17,675 | Python | 22.166448 | 126 | 0.709873 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.