file_path
stringlengths
21
207
content
stringlengths
5
1.02M
size
int64
5
1.02M
lang
stringclasses
9 values
avg_line_length
float64
1.33
100
max_line_length
int64
4
993
alphanum_fraction
float64
0.27
0.93
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdInstanceMappingDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdInstanceMapping Synthetic Data node to expose the scene instances semantic hierarchy information """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdInstanceMappingDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdInstanceMapping Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.lazy inputs.renderResults Outputs: outputs.exec outputs.sdIMInstanceSemanticMap outputs.sdIMInstanceTokens outputs.sdIMLastUpdateTimeDenominator outputs.sdIMLastUpdateTimeNumerator outputs.sdIMMaxSemanticHierarchyDepth outputs.sdIMMinInstanceIndex outputs.sdIMMinSemanticIndex outputs.sdIMNumInstances outputs.sdIMNumSemanticTokens outputs.sdIMNumSemantics outputs.sdIMSemanticLocalTransform outputs.sdIMSemanticTokenMap outputs.sdIMSemanticWorldTransform Predefined Tokens: tokens.InstanceMappingInfoSDhost tokens.InstanceMapSDhost tokens.SemanticLabelTokenSDhost tokens.InstancePrimTokenSDhost tokens.SemanticLocalTransformSDhost tokens.SemanticWorldTransformSDhost """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:lazy', 'bool', 0, None, 'Compute outputs only when connected to a downstream node', {ogn.MetadataKeys.DEFAULT: 'true'}, True, True, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results pointer', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:sdIMInstanceSemanticMap', 'uchar[]', 0, None, 'Raw array of uint16_t of size sdIMNumInstances*sdIMMaxSemanticHierarchyDepth containing the mapping from the instances index to their inherited semantic entities', {}, True, None, False, ''), ('outputs:sdIMInstanceTokens', 'token[]', 0, None, 'Instance array containing the token for every instances', {}, True, None, False, ''), ('outputs:sdIMLastUpdateTimeDenominator', 'uint64', 0, None, 'Time denominator of the last time the data has changed', {}, True, None, False, ''), ('outputs:sdIMLastUpdateTimeNumerator', 'int64', 0, None, 'Time numerator of the last time the data has changed', {}, True, None, False, ''), ('outputs:sdIMMaxSemanticHierarchyDepth', 'uint', 0, None, 'Maximal number of semantic entities inherited by an instance', {}, True, None, False, ''), ('outputs:sdIMMinInstanceIndex', 'uint', 0, None, 'Instance id of the first instance in the instance arrays', {}, True, None, False, ''), ('outputs:sdIMMinSemanticIndex', 'uint', 0, None, 'Semantic id of the first semantic entity in the semantic arrays', {}, True, None, False, ''), ('outputs:sdIMNumInstances', 'uint', 0, None, 'Number of instances in the instance arrays', {}, True, None, False, ''), ('outputs:sdIMNumSemanticTokens', 'uint', 0, None, 'Number of semantics token including the semantic entity path, the semantic entity types and if the number of semantic types is greater than one a ', {}, True, None, False, ''), ('outputs:sdIMNumSemantics', 'uint', 0, None, 'Number of semantic entities in the semantic arrays', {}, True, None, False, ''), ('outputs:sdIMSemanticLocalTransform', 'float[]', 0, None, 'Semantic array of 4x4 float matrices containing the transform from world to local space for every semantic entity', {}, True, None, False, ''), ('outputs:sdIMSemanticTokenMap', 'token[]', 0, None, 'Semantic array of token of size numSemantics * numSemanticTypes containing the mapping from the semantic entities to the semantic entity path and semantic types', {}, True, None, False, ''), ('outputs:sdIMSemanticWorldTransform', 'float[]', 0, None, 'Semantic array of 4x4 float matrices containing the transform from local to world space for every semantic entity', {}, True, None, False, ''), ]) class tokens: InstanceMappingInfoSDhost = "InstanceMappingInfoSDhost" InstanceMapSDhost = "InstanceMapSDhost" SemanticLabelTokenSDhost = "SemanticLabelTokenSDhost" InstancePrimTokenSDhost = "InstancePrimTokenSDhost" SemanticLocalTransformSDhost = "SemanticLocalTransformSDhost" SemanticWorldTransformSDhost = "SemanticWorldTransformSDhost" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def lazy(self): data_view = og.AttributeValueHelper(self._attributes.lazy) return data_view.get() @lazy.setter def lazy(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.lazy) data_view = og.AttributeValueHelper(self._attributes.lazy) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.sdIMInstanceSemanticMap_size = None self.sdIMInstanceTokens_size = None self.sdIMSemanticLocalTransform_size = None self.sdIMSemanticTokenMap_size = None self.sdIMSemanticWorldTransform_size = None self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def sdIMInstanceSemanticMap(self): data_view = og.AttributeValueHelper(self._attributes.sdIMInstanceSemanticMap) return data_view.get(reserved_element_count=self.sdIMInstanceSemanticMap_size) @sdIMInstanceSemanticMap.setter def sdIMInstanceSemanticMap(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMInstanceSemanticMap) data_view.set(value) self.sdIMInstanceSemanticMap_size = data_view.get_array_size() @property def sdIMInstanceTokens(self): data_view = og.AttributeValueHelper(self._attributes.sdIMInstanceTokens) return data_view.get(reserved_element_count=self.sdIMInstanceTokens_size) @sdIMInstanceTokens.setter def sdIMInstanceTokens(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMInstanceTokens) data_view.set(value) self.sdIMInstanceTokens_size = data_view.get_array_size() @property def sdIMLastUpdateTimeDenominator(self): data_view = og.AttributeValueHelper(self._attributes.sdIMLastUpdateTimeDenominator) return data_view.get() @sdIMLastUpdateTimeDenominator.setter def sdIMLastUpdateTimeDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMLastUpdateTimeDenominator) data_view.set(value) @property def sdIMLastUpdateTimeNumerator(self): data_view = og.AttributeValueHelper(self._attributes.sdIMLastUpdateTimeNumerator) return data_view.get() @sdIMLastUpdateTimeNumerator.setter def sdIMLastUpdateTimeNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMLastUpdateTimeNumerator) data_view.set(value) @property def sdIMMaxSemanticHierarchyDepth(self): data_view = og.AttributeValueHelper(self._attributes.sdIMMaxSemanticHierarchyDepth) return data_view.get() @sdIMMaxSemanticHierarchyDepth.setter def sdIMMaxSemanticHierarchyDepth(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMMaxSemanticHierarchyDepth) data_view.set(value) @property def sdIMMinInstanceIndex(self): data_view = og.AttributeValueHelper(self._attributes.sdIMMinInstanceIndex) return data_view.get() @sdIMMinInstanceIndex.setter def sdIMMinInstanceIndex(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMMinInstanceIndex) data_view.set(value) @property def sdIMMinSemanticIndex(self): data_view = og.AttributeValueHelper(self._attributes.sdIMMinSemanticIndex) return data_view.get() @sdIMMinSemanticIndex.setter def sdIMMinSemanticIndex(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMMinSemanticIndex) data_view.set(value) @property def sdIMNumInstances(self): data_view = og.AttributeValueHelper(self._attributes.sdIMNumInstances) return data_view.get() @sdIMNumInstances.setter def sdIMNumInstances(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMNumInstances) data_view.set(value) @property def sdIMNumSemanticTokens(self): data_view = og.AttributeValueHelper(self._attributes.sdIMNumSemanticTokens) return data_view.get() @sdIMNumSemanticTokens.setter def sdIMNumSemanticTokens(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMNumSemanticTokens) data_view.set(value) @property def sdIMNumSemantics(self): data_view = og.AttributeValueHelper(self._attributes.sdIMNumSemantics) return data_view.get() @sdIMNumSemantics.setter def sdIMNumSemantics(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMNumSemantics) data_view.set(value) @property def sdIMSemanticLocalTransform(self): data_view = og.AttributeValueHelper(self._attributes.sdIMSemanticLocalTransform) return data_view.get(reserved_element_count=self.sdIMSemanticLocalTransform_size) @sdIMSemanticLocalTransform.setter def sdIMSemanticLocalTransform(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMSemanticLocalTransform) data_view.set(value) self.sdIMSemanticLocalTransform_size = data_view.get_array_size() @property def sdIMSemanticTokenMap(self): data_view = og.AttributeValueHelper(self._attributes.sdIMSemanticTokenMap) return data_view.get(reserved_element_count=self.sdIMSemanticTokenMap_size) @sdIMSemanticTokenMap.setter def sdIMSemanticTokenMap(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMSemanticTokenMap) data_view.set(value) self.sdIMSemanticTokenMap_size = data_view.get_array_size() @property def sdIMSemanticWorldTransform(self): data_view = og.AttributeValueHelper(self._attributes.sdIMSemanticWorldTransform) return data_view.get(reserved_element_count=self.sdIMSemanticWorldTransform_size) @sdIMSemanticWorldTransform.setter def sdIMSemanticWorldTransform(self, value): data_view = og.AttributeValueHelper(self._attributes.sdIMSemanticWorldTransform) data_view.set(value) self.sdIMSemanticWorldTransform_size = data_view.get_array_size() def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdInstanceMappingDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdInstanceMappingDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdInstanceMappingDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
16,208
Python
48.417683
256
0.676518
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostRenderVarDisplayTextureDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostRenderVarDisplayTexture Synthetic Data node to copy the input aov texture into the corresponding visualization texture """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostRenderVarDisplayTextureDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostRenderVarDisplayTexture Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.cameraFisheyeParams inputs.cameraModel inputs.cameraNearFar inputs.exec inputs.gpu inputs.instanceMapSDCudaPtr inputs.instanceMappingInfoSDPtr inputs.metersPerSceneUnit inputs.mode inputs.parameters inputs.renderVar inputs.renderVarDisplay inputs.rp inputs.sdDisplayHeight inputs.sdDisplayWidth inputs.sdSemBBox3dCamCornersCudaPtr inputs.sdSemBBox3dCamExtentCudaPtr inputs.sdSemBBoxExtentCudaPtr inputs.sdSemBBoxInfosCudaPtr inputs.semanticLabelTokenSDCudaPtr inputs.semanticMapSDCudaPtr inputs.semanticPrimTokenSDCudaPtr inputs.semanticWorldTransformSDCudaPtr Outputs: outputs.cudaPtr outputs.exec outputs.format outputs.height outputs.renderVarDisplay outputs.width Predefined Tokens: tokens.LdrColorSD tokens.Camera3dPositionSD tokens.DistanceToImagePlaneSD tokens.DistanceToCameraSD tokens.InstanceSegmentationSD tokens.SemanticSegmentationSD tokens.NormalSD tokens.TargetMotionSD tokens.BoundingBox2DTightSD tokens.BoundingBox2DLooseSD tokens.BoundingBox3DSD tokens.OcclusionSD tokens.TruncationSD tokens.CrossCorrespondenceSD tokens.SemanticBoundingBox2DExtentTightSD tokens.SemanticBoundingBox2DInfosTightSD tokens.SemanticBoundingBox2DExtentLooseSD tokens.SemanticBoundingBox2DInfosLooseSD tokens.SemanticBoundingBox3DExtentSD tokens.SemanticBoundingBox3DInfosSD tokens.SemanticBoundingBox3DCamCornersSD tokens.SemanticBoundingBox3DDisplayAxesSD tokens.autoMode tokens.colorMode tokens.scaled3dVectorMode tokens.clippedValueMode tokens.normalized3dVectorMode tokens.segmentationMapMode tokens.instanceMapMode tokens.semanticPathMode tokens.semanticLabelMode tokens.semanticBoundingBox2dMode tokens.rawBoundingBox2dMode tokens.semanticProjBoundingBox3dMode tokens.semanticBoundingBox3dMode tokens.rawBoundingBox3dMode tokens.pinhole tokens.perspective tokens.orthographic tokens.fisheyePolynomial """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:cameraFisheyeParams', 'float[]', 0, None, 'Camera fisheye projection parameters', {}, True, [], False, ''), ('inputs:cameraModel', 'int', 0, None, 'Camera model (pinhole or fisheye models)', {}, True, 0, False, ''), ('inputs:cameraNearFar', 'float2', 0, None, 'Camera near/far clipping range', {}, True, [0.0, 0.0], False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, 'gpuFoundations', 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:instanceMapSDCudaPtr', 'uint64', 0, None, 'cuda uint16_t buffer pointer of size numInstances containing the instance parent semantic index', {}, True, 0, False, ''), ('inputs:instanceMappingInfoSDPtr', 'uint64', 0, None, 'uint buffer pointer containing the following information : [numInstances, minInstanceId, numSemantics, minSemanticId, numProtoSemantic]', {}, True, 0, False, ''), ('inputs:metersPerSceneUnit', 'float', 0, None, 'Scene units to meters scale', {}, True, 0.0, False, ''), ('inputs:mode', 'token', 0, None, 'Display mode', {ogn.MetadataKeys.DEFAULT: '"autoMode"'}, True, "autoMode", False, ''), ('inputs:parameters', 'float4', 0, None, 'Display parameters', {ogn.MetadataKeys.DEFAULT: '[0.0, 5.0, 0.33, 0.27]'}, True, [0.0, 5.0, 0.33, 0.27], False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the input RenderVar to display', {}, True, "", False, ''), ('inputs:renderVarDisplay', 'token', 0, None, 'Name of the output display RenderVar', {}, True, "", False, ''), ('inputs:rp', 'uint64', 0, 'renderProduct', 'Pointer to render product for this view', {}, True, 0, False, ''), ('inputs:sdDisplayHeight', 'uint', 0, None, 'Visualization texture Height', {}, True, 0, False, ''), ('inputs:sdDisplayWidth', 'uint', 0, None, 'Visualization texture width', {}, True, 0, False, ''), ('inputs:sdSemBBox3dCamCornersCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the projection of the 3d bounding boxes on the camera plane represented as a float3=(u,v,z,a) for each bounding box corners', {}, True, 0, False, ''), ('inputs:sdSemBBox3dCamExtentCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the 2d extent of the 3d bounding boxes on the camera plane represented as a float6=(u_min,u_max,v_min,v_max,z_min,z_max)', {}, True, 0, False, ''), ('inputs:sdSemBBoxExtentCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the extent of the bounding boxes as a float4=(u_min,v_min,u_max,v_max) for 2D or a float6=(xmin,ymin,zmin,xmax,ymax,zmax) in object space for 3D', {}, True, 0, False, ''), ('inputs:sdSemBBoxInfosCudaPtr', 'uint64', 0, None, 'Cuda buffer containing valid bounding boxes infos', {}, True, 0, False, ''), ('inputs:semanticLabelTokenSDCudaPtr', 'uint64', 0, None, 'cuda uint64_t buffer pointer of size numSemantics containing the semantic label token', {}, True, 0, False, ''), ('inputs:semanticMapSDCudaPtr', 'uint64', 0, None, 'cuda uint16_t buffer pointer of size numSemantics containing the semantic parent semantic index', {}, True, 0, False, ''), ('inputs:semanticPrimTokenSDCudaPtr', 'uint64', 0, None, 'cuda uint64_t buffer pointer of size numSemantics containing the semantic path token', {}, True, 0, False, ''), ('inputs:semanticWorldTransformSDCudaPtr', 'uint64', 0, None, 'cuda float44 buffer pointer of size numSemantics containing the world semantic transform', {}, True, 0, False, ''), ('outputs:cudaPtr', 'uint64', 0, None, 'Display texture CUDA pointer', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:format', 'uint64', 0, None, 'Display texture format', {}, True, None, False, ''), ('outputs:height', 'uint', 0, None, 'Display texture height', {}, True, None, False, ''), ('outputs:renderVarDisplay', 'token', 0, None, 'Name of the output display RenderVar', {}, True, None, False, ''), ('outputs:width', 'uint', 0, None, 'Display texture width', {}, True, None, False, ''), ]) class tokens: LdrColorSD = "LdrColorSD" Camera3dPositionSD = "Camera3dPositionSD" DistanceToImagePlaneSD = "DistanceToImagePlaneSD" DistanceToCameraSD = "DistanceToCameraSD" InstanceSegmentationSD = "InstanceSegmentationSD" SemanticSegmentationSD = "SemanticSegmentationSD" NormalSD = "NormalSD" TargetMotionSD = "TargetMotionSD" BoundingBox2DTightSD = "BoundingBox2DTightSD" BoundingBox2DLooseSD = "BoundingBox2DLooseSD" BoundingBox3DSD = "BoundingBox3DSD" OcclusionSD = "OcclusionSD" TruncationSD = "TruncationSD" CrossCorrespondenceSD = "CrossCorrespondenceSD" SemanticBoundingBox2DExtentTightSD = "SemanticBoundingBox2DExtentTightSD" SemanticBoundingBox2DInfosTightSD = "SemanticBoundingBox2DInfosTightSD" SemanticBoundingBox2DExtentLooseSD = "SemanticBoundingBox2DExtentLooseSD" SemanticBoundingBox2DInfosLooseSD = "SemanticBoundingBox2DInfosLooseSD" SemanticBoundingBox3DExtentSD = "SemanticBoundingBox3DExtentSD" SemanticBoundingBox3DInfosSD = "SemanticBoundingBox3DInfosSD" SemanticBoundingBox3DCamCornersSD = "SemanticBoundingBox3DCamCornersSD" SemanticBoundingBox3DDisplayAxesSD = "SemanticBoundingBox3DDisplayAxesSD" autoMode = "autoMode" colorMode = "colorMode" scaled3dVectorMode = "scaled3dVectorMode" clippedValueMode = "clippedValueMode" normalized3dVectorMode = "normalized3dVectorMode" segmentationMapMode = "segmentationMapMode" instanceMapMode = "instanceMapMode" semanticPathMode = "semanticPathMode" semanticLabelMode = "semanticLabelMode" semanticBoundingBox2dMode = "semanticBoundingBox2dMode" rawBoundingBox2dMode = "rawBoundingBox2dMode" semanticProjBoundingBox3dMode = "semanticProjBoundingBox3dMode" semanticBoundingBox3dMode = "semanticBoundingBox3dMode" rawBoundingBox3dMode = "rawBoundingBox3dMode" pinhole = "pinhole" perspective = "perspective" orthographic = "orthographic" fisheyePolynomial = "fisheyePolynomial" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def cameraFisheyeParams(self): data_view = og.AttributeValueHelper(self._attributes.cameraFisheyeParams) return data_view.get() @cameraFisheyeParams.setter def cameraFisheyeParams(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cameraFisheyeParams) data_view = og.AttributeValueHelper(self._attributes.cameraFisheyeParams) data_view.set(value) self.cameraFisheyeParams_size = data_view.get_array_size() @property def cameraModel(self): data_view = og.AttributeValueHelper(self._attributes.cameraModel) return data_view.get() @cameraModel.setter def cameraModel(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cameraModel) data_view = og.AttributeValueHelper(self._attributes.cameraModel) data_view.set(value) @property def cameraNearFar(self): data_view = og.AttributeValueHelper(self._attributes.cameraNearFar) return data_view.get() @cameraNearFar.setter def cameraNearFar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cameraNearFar) data_view = og.AttributeValueHelper(self._attributes.cameraNearFar) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def instanceMapSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMapSDCudaPtr) return data_view.get() @instanceMapSDCudaPtr.setter def instanceMapSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instanceMapSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.instanceMapSDCudaPtr) data_view.set(value) @property def instanceMappingInfoSDPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) return data_view.get() @instanceMappingInfoSDPtr.setter def instanceMappingInfoSDPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instanceMappingInfoSDPtr) data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) data_view.set(value) @property def metersPerSceneUnit(self): data_view = og.AttributeValueHelper(self._attributes.metersPerSceneUnit) return data_view.get() @metersPerSceneUnit.setter def metersPerSceneUnit(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.metersPerSceneUnit) data_view = og.AttributeValueHelper(self._attributes.metersPerSceneUnit) data_view.set(value) @property def mode(self): data_view = og.AttributeValueHelper(self._attributes.mode) return data_view.get() @mode.setter def mode(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.mode) data_view = og.AttributeValueHelper(self._attributes.mode) data_view.set(value) @property def parameters(self): data_view = og.AttributeValueHelper(self._attributes.parameters) return data_view.get() @parameters.setter def parameters(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.parameters) data_view = og.AttributeValueHelper(self._attributes.parameters) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) @property def renderVarDisplay(self): data_view = og.AttributeValueHelper(self._attributes.renderVarDisplay) return data_view.get() @renderVarDisplay.setter def renderVarDisplay(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVarDisplay) data_view = og.AttributeValueHelper(self._attributes.renderVarDisplay) data_view.set(value) @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) @property def sdDisplayHeight(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayHeight) return data_view.get() @sdDisplayHeight.setter def sdDisplayHeight(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayHeight) data_view = og.AttributeValueHelper(self._attributes.sdDisplayHeight) data_view.set(value) @property def sdDisplayWidth(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayWidth) return data_view.get() @sdDisplayWidth.setter def sdDisplayWidth(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayWidth) data_view = og.AttributeValueHelper(self._attributes.sdDisplayWidth) data_view.set(value) @property def sdSemBBox3dCamCornersCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamCornersCudaPtr) return data_view.get() @sdSemBBox3dCamCornersCudaPtr.setter def sdSemBBox3dCamCornersCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdSemBBox3dCamCornersCudaPtr) data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamCornersCudaPtr) data_view.set(value) @property def sdSemBBox3dCamExtentCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamExtentCudaPtr) return data_view.get() @sdSemBBox3dCamExtentCudaPtr.setter def sdSemBBox3dCamExtentCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdSemBBox3dCamExtentCudaPtr) data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamExtentCudaPtr) data_view.set(value) @property def sdSemBBoxExtentCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxExtentCudaPtr) return data_view.get() @sdSemBBoxExtentCudaPtr.setter def sdSemBBoxExtentCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdSemBBoxExtentCudaPtr) data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxExtentCudaPtr) data_view.set(value) @property def sdSemBBoxInfosCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxInfosCudaPtr) return data_view.get() @sdSemBBoxInfosCudaPtr.setter def sdSemBBoxInfosCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdSemBBoxInfosCudaPtr) data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxInfosCudaPtr) data_view.set(value) @property def semanticLabelTokenSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenSDCudaPtr) return data_view.get() @semanticLabelTokenSDCudaPtr.setter def semanticLabelTokenSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticLabelTokenSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenSDCudaPtr) data_view.set(value) @property def semanticMapSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticMapSDCudaPtr) return data_view.get() @semanticMapSDCudaPtr.setter def semanticMapSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticMapSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticMapSDCudaPtr) data_view.set(value) @property def semanticPrimTokenSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticPrimTokenSDCudaPtr) return data_view.get() @semanticPrimTokenSDCudaPtr.setter def semanticPrimTokenSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticPrimTokenSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticPrimTokenSDCudaPtr) data_view.set(value) @property def semanticWorldTransformSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformSDCudaPtr) return data_view.get() @semanticWorldTransformSDCudaPtr.setter def semanticWorldTransformSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticWorldTransformSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformSDCudaPtr) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def cudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) return data_view.get() @cudaPtr.setter def cudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def renderVarDisplay(self): data_view = og.AttributeValueHelper(self._attributes.renderVarDisplay) return data_view.get() @renderVarDisplay.setter def renderVarDisplay(self, value): data_view = og.AttributeValueHelper(self._attributes.renderVarDisplay) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostRenderVarDisplayTextureDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostRenderVarDisplayTextureDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostRenderVarDisplayTextureDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
26,399
Python
45.234676
256
0.657639
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdTestStageSynchronizationDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdTestStageSynchronization Synthetic Data node to test the pipeline stage synchronization """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdTestStageSynchronizationDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdTestStageSynchronization Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.randomMaxProcessingTimeUs inputs.randomSeed inputs.renderResults inputs.rp inputs.swhFrameNumber inputs.tag inputs.traceError Outputs: outputs.exec outputs.fabricSWHFrameNumber outputs.swhFrameNumber """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'OnDemand connection : trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, 'gpuFoundations', 'PostRender connection : pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:randomMaxProcessingTimeUs', 'uint', 0, None, 'Maximum number of micro-seconds to randomly (uniformely) wait for in order to simulate varying workload', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('inputs:randomSeed', 'uint', 0, None, 'Random seed for the randomization', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'OnDemand connection : pointer to render product results', {}, True, 0, False, ''), ('inputs:rp', 'uint64', 0, 'renderProduct', 'PostRender connection : pointer to render product for this view', {}, True, 0, False, ''), ('inputs:swhFrameNumber', 'uint64', 0, None, 'Fabric frame number', {}, True, 0, False, ''), ('inputs:tag', 'token', 0, None, 'A tag to identify the node', {}, True, "", False, ''), ('inputs:traceError', 'bool', 0, None, 'If true print an error message when the frame numbers are out-of-sync', {ogn.MetadataKeys.DEFAULT: 'false'}, True, False, False, ''), ('outputs:exec', 'execution', 0, None, 'OnDemand connection : trigger', {}, True, None, False, ''), ('outputs:fabricSWHFrameNumber', 'uint64', 0, None, 'Fabric frame number from the fabric', {}, True, None, False, ''), ('outputs:swhFrameNumber', 'uint64', 0, None, 'Fabric frame number', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def randomMaxProcessingTimeUs(self): data_view = og.AttributeValueHelper(self._attributes.randomMaxProcessingTimeUs) return data_view.get() @randomMaxProcessingTimeUs.setter def randomMaxProcessingTimeUs(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.randomMaxProcessingTimeUs) data_view = og.AttributeValueHelper(self._attributes.randomMaxProcessingTimeUs) data_view.set(value) @property def randomSeed(self): data_view = og.AttributeValueHelper(self._attributes.randomSeed) return data_view.get() @randomSeed.setter def randomSeed(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.randomSeed) data_view = og.AttributeValueHelper(self._attributes.randomSeed) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) @property def swhFrameNumber(self): data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) return data_view.get() @swhFrameNumber.setter def swhFrameNumber(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.swhFrameNumber) data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) data_view.set(value) @property def tag(self): data_view = og.AttributeValueHelper(self._attributes.tag) return data_view.get() @tag.setter def tag(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.tag) data_view = og.AttributeValueHelper(self._attributes.tag) data_view.set(value) @property def traceError(self): data_view = og.AttributeValueHelper(self._attributes.traceError) return data_view.get() @traceError.setter def traceError(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.traceError) data_view = og.AttributeValueHelper(self._attributes.traceError) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def fabricSWHFrameNumber(self): data_view = og.AttributeValueHelper(self._attributes.fabricSWHFrameNumber) return data_view.get() @fabricSWHFrameNumber.setter def fabricSWHFrameNumber(self, value): data_view = og.AttributeValueHelper(self._attributes.fabricSWHFrameNumber) data_view.set(value) @property def swhFrameNumber(self): data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) return data_view.get() @swhFrameNumber.setter def swhFrameNumber(self, value): data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdTestStageSynchronizationDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdTestStageSynchronizationDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdTestStageSynchronizationDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
11,401
Python
44.067194
222
0.643277
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdFabricTimeRangeExecutionDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdFabricTimeRangeExecution Read a rational time range from Fabric or RenderVars and signal its execution if the current time fall within this range. The range is [begin,end[, that is the end time does not belong to the range. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdFabricTimeRangeExecutionDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdFabricTimeRangeExecution Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.renderResults inputs.timeRangeBeginDenominatorToken inputs.timeRangeBeginNumeratorToken inputs.timeRangeEndDenominatorToken inputs.timeRangeEndNumeratorToken inputs.timeRangeName Outputs: outputs.exec outputs.timeRangeBeginDenominator outputs.timeRangeBeginNumerator outputs.timeRangeEndDenominator outputs.timeRangeEndNumerator """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, None, 'Pointer to shared context containing gpu foundations.', {}, True, 0, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results', {}, True, 0, False, ''), ('inputs:timeRangeBeginDenominatorToken', 'token', 0, None, 'Attribute name of the range begin time denominator', {ogn.MetadataKeys.DEFAULT: '"timeRangeStartDenominator"'}, True, "timeRangeStartDenominator", False, ''), ('inputs:timeRangeBeginNumeratorToken', 'token', 0, None, 'Attribute name of the range begin time numerator', {ogn.MetadataKeys.DEFAULT: '"timeRangeStartNumerator"'}, True, "timeRangeStartNumerator", False, ''), ('inputs:timeRangeEndDenominatorToken', 'token', 0, None, 'Attribute name of the range end time denominator', {ogn.MetadataKeys.DEFAULT: '"timeRangeEndDenominator"'}, True, "timeRangeEndDenominator", False, ''), ('inputs:timeRangeEndNumeratorToken', 'token', 0, None, 'Attribute name of the range end time numerator', {ogn.MetadataKeys.DEFAULT: '"timeRangeEndNumerator"'}, True, "timeRangeEndNumerator", False, ''), ('inputs:timeRangeName', 'token', 0, None, 'Time range name used to read from the Fabric or RenderVars.', {}, True, "", False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:timeRangeBeginDenominator', 'uint64', 0, None, 'Time denominator of the last time range change (begin)', {}, True, None, False, ''), ('outputs:timeRangeBeginNumerator', 'int64', 0, None, 'Time numerator of the last time range change (begin)', {}, True, None, False, ''), ('outputs:timeRangeEndDenominator', 'uint64', 0, None, 'Time denominator of the last time range change (end)', {}, True, None, False, ''), ('outputs:timeRangeEndNumerator', 'int64', 0, None, 'Time numerator of the last time range change (end)', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) @property def timeRangeBeginDenominatorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginDenominatorToken) return data_view.get() @timeRangeBeginDenominatorToken.setter def timeRangeBeginDenominatorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeBeginDenominatorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginDenominatorToken) data_view.set(value) @property def timeRangeBeginNumeratorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginNumeratorToken) return data_view.get() @timeRangeBeginNumeratorToken.setter def timeRangeBeginNumeratorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeBeginNumeratorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginNumeratorToken) data_view.set(value) @property def timeRangeEndDenominatorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndDenominatorToken) return data_view.get() @timeRangeEndDenominatorToken.setter def timeRangeEndDenominatorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeEndDenominatorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeEndDenominatorToken) data_view.set(value) @property def timeRangeEndNumeratorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndNumeratorToken) return data_view.get() @timeRangeEndNumeratorToken.setter def timeRangeEndNumeratorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeEndNumeratorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeEndNumeratorToken) data_view.set(value) @property def timeRangeName(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeName) return data_view.get() @timeRangeName.setter def timeRangeName(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeName) data_view = og.AttributeValueHelper(self._attributes.timeRangeName) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def timeRangeBeginDenominator(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginDenominator) return data_view.get() @timeRangeBeginDenominator.setter def timeRangeBeginDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginDenominator) data_view.set(value) @property def timeRangeBeginNumerator(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginNumerator) return data_view.get() @timeRangeBeginNumerator.setter def timeRangeBeginNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginNumerator) data_view.set(value) @property def timeRangeEndDenominator(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndDenominator) return data_view.get() @timeRangeEndDenominator.setter def timeRangeEndDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndDenominator) data_view.set(value) @property def timeRangeEndNumerator(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndNumerator) return data_view.get() @timeRangeEndNumerator.setter def timeRangeEndNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndNumerator) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdFabricTimeRangeExecutionDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdFabricTimeRangeExecutionDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdFabricTimeRangeExecutionDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
12,894
Python
47.844697
227
0.669148
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdRenderVarDisplayTextureDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdRenderVarDisplayTexture Synthetic Data node to expose texture resource of a visualization render variable """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdRenderVarDisplayTextureDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdRenderVarDisplayTexture Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.renderResults inputs.renderVarDisplay Outputs: outputs.cudaPtr outputs.exec outputs.format outputs.height outputs.referenceTimeDenominator outputs.referenceTimeNumerator outputs.rpResourcePtr outputs.width """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results pointer', {}, True, 0, False, ''), ('inputs:renderVarDisplay', 'token', 0, None, 'Name of the renderVar', {}, True, "", False, ''), ('outputs:cudaPtr', 'uint64', 0, None, 'Display texture CUDA pointer', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:format', 'uint64', 0, None, 'Display texture format', {}, True, None, False, ''), ('outputs:height', 'uint', 0, None, 'Display texture height', {}, True, None, False, ''), ('outputs:referenceTimeDenominator', 'uint64', 0, None, 'Reference time represented as a rational number : denominator', {}, True, None, False, ''), ('outputs:referenceTimeNumerator', 'int64', 0, None, 'Reference time represented as a rational number : numerator', {}, True, None, False, ''), ('outputs:rpResourcePtr', 'uint64', 0, None, 'Display texture RpResource pointer', {}, True, None, False, ''), ('outputs:width', 'uint', 0, None, 'Display texture width', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) @property def renderVarDisplay(self): data_view = og.AttributeValueHelper(self._attributes.renderVarDisplay) return data_view.get() @renderVarDisplay.setter def renderVarDisplay(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVarDisplay) data_view = og.AttributeValueHelper(self._attributes.renderVarDisplay) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def cudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) return data_view.get() @cudaPtr.setter def cudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def referenceTimeDenominator(self): data_view = og.AttributeValueHelper(self._attributes.referenceTimeDenominator) return data_view.get() @referenceTimeDenominator.setter def referenceTimeDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.referenceTimeDenominator) data_view.set(value) @property def referenceTimeNumerator(self): data_view = og.AttributeValueHelper(self._attributes.referenceTimeNumerator) return data_view.get() @referenceTimeNumerator.setter def referenceTimeNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.referenceTimeNumerator) data_view.set(value) @property def rpResourcePtr(self): data_view = og.AttributeValueHelper(self._attributes.rpResourcePtr) return data_view.get() @rpResourcePtr.setter def rpResourcePtr(self, value): data_view = og.AttributeValueHelper(self._attributes.rpResourcePtr) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdRenderVarDisplayTextureDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdRenderVarDisplayTextureDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdRenderVarDisplayTextureDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
10,061
Python
42.938864
156
0.646158
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostRenderVarTextureToBufferDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostRenderVarTextureToBuffer Expose a device renderVar buffer a texture one. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostRenderVarTextureToBufferDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostRenderVarTextureToBuffer Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.renderVar inputs.renderVarBufferSuffix inputs.rp Outputs: outputs.exec outputs.renderVar """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, None, 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the device renderVar to expose on the host', {}, True, "", False, ''), ('inputs:renderVarBufferSuffix', 'string', 0, None, 'Suffix appended to the renderVar name', {ogn.MetadataKeys.DEFAULT: '"buffer"'}, True, "buffer", False, ''), ('inputs:rp', 'uint64', 0, None, 'Pointer to render product for this view', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:renderVar', 'token', 0, None, 'Name of the resulting renderVar on the host', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.inputs.renderVarBufferSuffix = og.AttributeRole.TEXT role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) @property def renderVarBufferSuffix(self): data_view = og.AttributeValueHelper(self._attributes.renderVarBufferSuffix) return data_view.get() @renderVarBufferSuffix.setter def renderVarBufferSuffix(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVarBufferSuffix) data_view = og.AttributeValueHelper(self._attributes.renderVarBufferSuffix) data_view.set(value) self.renderVarBufferSuffix_size = data_view.get_array_size() @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostRenderVarTextureToBufferDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostRenderVarTextureToBufferDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostRenderVarTextureToBufferDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
8,373
Python
43.780748
168
0.648633
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdRenderProductCameraDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdRenderProductCamera Synthetic Data node to expose the camera data """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdRenderProductCameraDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdRenderProductCamera Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.renderProductPath inputs.renderResults Outputs: outputs.cameraApertureOffset outputs.cameraApertureSize outputs.cameraFStop outputs.cameraFisheyeParams outputs.cameraFocalLength outputs.cameraFocusDistance outputs.cameraModel outputs.cameraNearFar outputs.cameraProjection outputs.cameraViewTransform outputs.exec outputs.metersPerSceneUnit outputs.renderProductResolution Predefined Tokens: tokens.RenderProductCameraSD """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, None, 'Pointer to shared context containing gpu foundations.', {}, True, 0, False, ''), ('inputs:renderProductPath', 'token', 0, None, 'RenderProduct prim path', {}, True, "", False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results', {}, True, 0, False, ''), ('outputs:cameraApertureOffset', 'float2', 0, None, 'Camera horizontal and vertical aperture offset', {}, True, None, False, ''), ('outputs:cameraApertureSize', 'float2', 0, None, 'Camera horizontal and vertical aperture', {}, True, None, False, ''), ('outputs:cameraFStop', 'float', 0, None, 'Camera fStop', {}, True, None, False, ''), ('outputs:cameraFisheyeParams', 'float[]', 0, None, 'Camera fisheye projection parameters', {}, True, None, False, ''), ('outputs:cameraFocalLength', 'float', 0, None, 'Camera focal length', {}, True, None, False, ''), ('outputs:cameraFocusDistance', 'float', 0, None, 'Camera focus distance', {}, True, None, False, ''), ('outputs:cameraModel', 'int', 0, None, 'Camera model (pinhole or fisheye models)', {}, True, None, False, ''), ('outputs:cameraNearFar', 'float2', 0, None, 'Camera near/far clipping range', {}, True, None, False, ''), ('outputs:cameraProjection', 'matrix4d', 0, None, 'Camera projection matrix', {}, True, None, False, ''), ('outputs:cameraViewTransform', 'matrix4d', 0, None, 'Camera view matrix', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes for each newFrame event received', {}, True, None, False, ''), ('outputs:metersPerSceneUnit', 'float', 0, None, 'Scene units to meters scale', {}, True, None, False, ''), ('outputs:renderProductResolution', 'int2', 0, None, 'RenderProduct resolution', {}, True, None, False, ''), ]) class tokens: RenderProductCameraSD = "RenderProductCameraSD" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.cameraProjection = og.AttributeRole.MATRIX role_data.outputs.cameraViewTransform = og.AttributeRole.MATRIX role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def renderProductPath(self): data_view = og.AttributeValueHelper(self._attributes.renderProductPath) return data_view.get() @renderProductPath.setter def renderProductPath(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductPath) data_view = og.AttributeValueHelper(self._attributes.renderProductPath) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.cameraFisheyeParams_size = None self._batchedWriteValues = { } @property def cameraApertureOffset(self): data_view = og.AttributeValueHelper(self._attributes.cameraApertureOffset) return data_view.get() @cameraApertureOffset.setter def cameraApertureOffset(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraApertureOffset) data_view.set(value) @property def cameraApertureSize(self): data_view = og.AttributeValueHelper(self._attributes.cameraApertureSize) return data_view.get() @cameraApertureSize.setter def cameraApertureSize(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraApertureSize) data_view.set(value) @property def cameraFStop(self): data_view = og.AttributeValueHelper(self._attributes.cameraFStop) return data_view.get() @cameraFStop.setter def cameraFStop(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraFStop) data_view.set(value) @property def cameraFisheyeParams(self): data_view = og.AttributeValueHelper(self._attributes.cameraFisheyeParams) return data_view.get(reserved_element_count=self.cameraFisheyeParams_size) @cameraFisheyeParams.setter def cameraFisheyeParams(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraFisheyeParams) data_view.set(value) self.cameraFisheyeParams_size = data_view.get_array_size() @property def cameraFocalLength(self): data_view = og.AttributeValueHelper(self._attributes.cameraFocalLength) return data_view.get() @cameraFocalLength.setter def cameraFocalLength(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraFocalLength) data_view.set(value) @property def cameraFocusDistance(self): data_view = og.AttributeValueHelper(self._attributes.cameraFocusDistance) return data_view.get() @cameraFocusDistance.setter def cameraFocusDistance(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraFocusDistance) data_view.set(value) @property def cameraModel(self): data_view = og.AttributeValueHelper(self._attributes.cameraModel) return data_view.get() @cameraModel.setter def cameraModel(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraModel) data_view.set(value) @property def cameraNearFar(self): data_view = og.AttributeValueHelper(self._attributes.cameraNearFar) return data_view.get() @cameraNearFar.setter def cameraNearFar(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraNearFar) data_view.set(value) @property def cameraProjection(self): data_view = og.AttributeValueHelper(self._attributes.cameraProjection) return data_view.get() @cameraProjection.setter def cameraProjection(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraProjection) data_view.set(value) @property def cameraViewTransform(self): data_view = og.AttributeValueHelper(self._attributes.cameraViewTransform) return data_view.get() @cameraViewTransform.setter def cameraViewTransform(self, value): data_view = og.AttributeValueHelper(self._attributes.cameraViewTransform) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def metersPerSceneUnit(self): data_view = og.AttributeValueHelper(self._attributes.metersPerSceneUnit) return data_view.get() @metersPerSceneUnit.setter def metersPerSceneUnit(self, value): data_view = og.AttributeValueHelper(self._attributes.metersPerSceneUnit) data_view.set(value) @property def renderProductResolution(self): data_view = og.AttributeValueHelper(self._attributes.renderProductResolution) return data_view.get() @renderProductResolution.setter def renderProductResolution(self, value): data_view = og.AttributeValueHelper(self._attributes.renderProductResolution) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdRenderProductCameraDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdRenderProductCameraDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdRenderProductCameraDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
13,883
Python
43.21656
137
0.649643
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostCompRenderVarTexturesDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostCompRenderVarTextures Synthetic Data node to compose a front renderVar texture into a back renderVar texture """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostCompRenderVarTexturesDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostCompRenderVarTextures Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.cudaPtr inputs.format inputs.gpu inputs.height inputs.mode inputs.parameters inputs.renderVar inputs.rp inputs.width Predefined Tokens: tokens.line tokens.grid """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:cudaPtr', 'uint64', 0, None, 'Front texture CUDA pointer', {}, True, 0, False, ''), ('inputs:format', 'uint64', 0, None, 'Front texture format', {}, True, 0, False, ''), ('inputs:gpu', 'uint64', 0, 'gpuFoundations', 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:height', 'uint', 0, None, 'Front texture height', {}, True, 0, False, ''), ('inputs:mode', 'token', 0, None, 'Mode : grid, line', {ogn.MetadataKeys.DEFAULT: '"line"'}, True, "line", False, ''), ('inputs:parameters', 'float3', 0, None, 'Parameters', {ogn.MetadataKeys.DEFAULT: '[0, 0, 0]'}, True, [0, 0, 0], False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the back RenderVar', {ogn.MetadataKeys.DEFAULT: '"LdrColor"'}, True, "LdrColor", False, ''), ('inputs:rp', 'uint64', 0, 'renderProduct', 'Pointer to render product for this view', {}, True, 0, False, ''), ('inputs:width', 'uint', 0, None, 'Front texture width', {}, True, 0, False, ''), ]) class tokens: line = "line" grid = "grid" class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def cudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) return data_view.get() @cudaPtr.setter def cudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cudaPtr) data_view = og.AttributeValueHelper(self._attributes.cudaPtr) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.format) data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.height) data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def mode(self): data_view = og.AttributeValueHelper(self._attributes.mode) return data_view.get() @mode.setter def mode(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.mode) data_view = og.AttributeValueHelper(self._attributes.mode) data_view.set(value) @property def parameters(self): data_view = og.AttributeValueHelper(self._attributes.parameters) return data_view.get() @parameters.setter def parameters(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.parameters) data_view = og.AttributeValueHelper(self._attributes.parameters) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.width) data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostCompRenderVarTexturesDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostCompRenderVarTexturesDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostCompRenderVarTexturesDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
9,264
Python
41.695852
148
0.630937
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdOnNewRenderProductFrameDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdOnNewRenderProductFrame Synthetic Data postprocess node to execute pipeline after the NewFrame event has been received on the given renderProduct """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdOnNewRenderProductFrameDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdOnNewRenderProductFrame Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.renderProductDataPtrs inputs.renderProductPath inputs.renderProductPaths Outputs: outputs.cudaStream outputs.exec outputs.renderProductPath outputs.renderResults """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, 'Received', 'Executes for each newFrame event received', {}, True, None, False, ''), ('inputs:renderProductDataPtrs', 'uint64[]', 0, None, 'HydraRenderProduct data pointers.', {}, True, [], False, ''), ('inputs:renderProductPath', 'token', 0, None, 'Path of the renderProduct to wait for being rendered', {}, True, "", False, ''), ('inputs:renderProductPaths', 'token[]', 0, None, 'Render product path tokens.', {}, True, [], False, ''), ('outputs:cudaStream', 'uint64', 0, None, 'Cuda stream', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes for each newFrame event received', {}, True, None, False, ''), ('outputs:renderProductPath', 'token', 0, None, 'Path of the renderProduct to wait for being rendered', {}, True, None, False, ''), ('outputs:renderResults', 'uint64', 0, None, 'Render results', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderProductDataPtrs(self): data_view = og.AttributeValueHelper(self._attributes.renderProductDataPtrs) return data_view.get() @renderProductDataPtrs.setter def renderProductDataPtrs(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductDataPtrs) data_view = og.AttributeValueHelper(self._attributes.renderProductDataPtrs) data_view.set(value) self.renderProductDataPtrs_size = data_view.get_array_size() @property def renderProductPath(self): data_view = og.AttributeValueHelper(self._attributes.renderProductPath) return data_view.get() @renderProductPath.setter def renderProductPath(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductPath) data_view = og.AttributeValueHelper(self._attributes.renderProductPath) data_view.set(value) @property def renderProductPaths(self): data_view = og.AttributeValueHelper(self._attributes.renderProductPaths) return data_view.get() @renderProductPaths.setter def renderProductPaths(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductPaths) data_view = og.AttributeValueHelper(self._attributes.renderProductPaths) data_view.set(value) self.renderProductPaths_size = data_view.get_array_size() def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def cudaStream(self): data_view = og.AttributeValueHelper(self._attributes.cudaStream) return data_view.get() @cudaStream.setter def cudaStream(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaStream) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderProductPath(self): data_view = og.AttributeValueHelper(self._attributes.renderProductPath) return data_view.get() @renderProductPath.setter def renderProductPath(self, value): data_view = og.AttributeValueHelper(self._attributes.renderProductPath) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdOnNewRenderProductFrameDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdOnNewRenderProductFrameDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdOnNewRenderProductFrameDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
9,113
Python
45.030303
139
0.658839
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdTestPrintRawArrayDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdTestPrintRawArray Synthetic Data test node printing the input linear array """ import numpy import sys import traceback import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdTestPrintRawArrayDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdTestPrintRawArray Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.bufferSize inputs.data inputs.dataFileBaseName inputs.elementCount inputs.elementType inputs.exec inputs.height inputs.mode inputs.randomSeed inputs.referenceNumUniqueRandomValues inputs.referenceSWHFrameNumbers inputs.referenceTolerance inputs.referenceValues inputs.swhFrameNumber inputs.width Outputs: outputs.exec outputs.swhFrameNumber State: state.initialSWHFrameNumber Predefined Tokens: tokens.uint16 tokens.int16 tokens.uint32 tokens.int32 tokens.float32 tokens.token tokens.printFormatted tokens.printReferences tokens.writeToDisk """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:bufferSize', 'uint', 0, None, 'Size (in bytes) of the buffer (0 if the input is a texture)', {}, True, 0, False, ''), ('inputs:data', 'uchar[]', 0, None, 'Buffer array data', {ogn.MetadataKeys.DEFAULT: '[]'}, True, [], False, ''), ('inputs:dataFileBaseName', 'token', 0, None, 'Basename of the output npy file', {ogn.MetadataKeys.DEFAULT: '"/tmp/sdTestRawArray"'}, True, "/tmp/sdTestRawArray", False, ''), ('inputs:elementCount', 'int', 0, None, 'Number of array element', {ogn.MetadataKeys.DEFAULT: '1'}, True, 1, False, ''), ('inputs:elementType', 'token', 0, None, 'Type of the array element', {ogn.MetadataKeys.DEFAULT: '"uint8"'}, True, "uint8", False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:height', 'uint', 0, None, 'Height (0 if the input is a buffer)', {}, True, 0, False, ''), ('inputs:mode', 'token', 0, None, 'Mode in [printFormatted, printReferences, testReferences]', {ogn.MetadataKeys.DEFAULT: '"printFormatted"'}, True, "printFormatted", False, ''), ('inputs:randomSeed', 'int', 0, None, 'Random seed', {}, True, 0, False, ''), ('inputs:referenceNumUniqueRandomValues', 'int', 0, None, 'Number of reference unique random values to compare', {ogn.MetadataKeys.DEFAULT: '7'}, True, 7, False, ''), ('inputs:referenceSWHFrameNumbers', 'uint[]', 0, None, 'Reference swhFrameNumbers relative to the first one', {ogn.MetadataKeys.DEFAULT: '[11, 17, 29]'}, True, [11, 17, 29], False, ''), ('inputs:referenceTolerance', 'float', 0, None, 'Reference tolerance', {ogn.MetadataKeys.DEFAULT: '0.1'}, True, 0.1, False, ''), ('inputs:referenceValues', 'float[]', 0, None, 'Reference data point values', {}, True, [], False, ''), ('inputs:swhFrameNumber', 'uint64', 0, None, 'Frame number', {}, True, 0, False, ''), ('inputs:width', 'uint', 0, None, 'Width (0 if the input is a buffer)', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:swhFrameNumber', 'uint64', 0, None, 'FrameNumber just rendered', {}, True, None, False, ''), ('state:initialSWHFrameNumber', 'int64', 0, None, 'Initial swhFrameNumber', {ogn.MetadataKeys.DEFAULT: '-1'}, True, -1, False, ''), ]) class tokens: uint16 = "uint16" int16 = "int16" uint32 = "uint32" int32 = "int32" float32 = "float32" token = "token" printFormatted = "printFormatted" printReferences = "printReferences" writeToDisk = "writeToDisk" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = {"bufferSize", "dataFileBaseName", "elementCount", "elementType", "exec", "height", "mode", "randomSeed", "referenceNumUniqueRandomValues", "referenceTolerance", "swhFrameNumber", "width", "_setting_locked", "_batchedReadAttributes", "_batchedReadValues"} """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [self._attributes.bufferSize, self._attributes.dataFileBaseName, self._attributes.elementCount, self._attributes.elementType, self._attributes.exec, self._attributes.height, self._attributes.mode, self._attributes.randomSeed, self._attributes.referenceNumUniqueRandomValues, self._attributes.referenceTolerance, self._attributes.swhFrameNumber, self._attributes.width] self._batchedReadValues = [0, "/tmp/sdTestRawArray", 1, "uint8", None, 0, "printFormatted", 0, 7, 0.1, 0, 0] @property def data(self): data_view = og.AttributeValueHelper(self._attributes.data) return data_view.get() @data.setter def data(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.data) data_view = og.AttributeValueHelper(self._attributes.data) data_view.set(value) self.data_size = data_view.get_array_size() @property def referenceSWHFrameNumbers(self): data_view = og.AttributeValueHelper(self._attributes.referenceSWHFrameNumbers) return data_view.get() @referenceSWHFrameNumbers.setter def referenceSWHFrameNumbers(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.referenceSWHFrameNumbers) data_view = og.AttributeValueHelper(self._attributes.referenceSWHFrameNumbers) data_view.set(value) self.referenceSWHFrameNumbers_size = data_view.get_array_size() @property def referenceValues(self): data_view = og.AttributeValueHelper(self._attributes.referenceValues) return data_view.get() @referenceValues.setter def referenceValues(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.referenceValues) data_view = og.AttributeValueHelper(self._attributes.referenceValues) data_view.set(value) self.referenceValues_size = data_view.get_array_size() @property def bufferSize(self): return self._batchedReadValues[0] @bufferSize.setter def bufferSize(self, value): self._batchedReadValues[0] = value @property def dataFileBaseName(self): return self._batchedReadValues[1] @dataFileBaseName.setter def dataFileBaseName(self, value): self._batchedReadValues[1] = value @property def elementCount(self): return self._batchedReadValues[2] @elementCount.setter def elementCount(self, value): self._batchedReadValues[2] = value @property def elementType(self): return self._batchedReadValues[3] @elementType.setter def elementType(self, value): self._batchedReadValues[3] = value @property def exec(self): return self._batchedReadValues[4] @exec.setter def exec(self, value): self._batchedReadValues[4] = value @property def height(self): return self._batchedReadValues[5] @height.setter def height(self, value): self._batchedReadValues[5] = value @property def mode(self): return self._batchedReadValues[6] @mode.setter def mode(self, value): self._batchedReadValues[6] = value @property def randomSeed(self): return self._batchedReadValues[7] @randomSeed.setter def randomSeed(self, value): self._batchedReadValues[7] = value @property def referenceNumUniqueRandomValues(self): return self._batchedReadValues[8] @referenceNumUniqueRandomValues.setter def referenceNumUniqueRandomValues(self, value): self._batchedReadValues[8] = value @property def referenceTolerance(self): return self._batchedReadValues[9] @referenceTolerance.setter def referenceTolerance(self, value): self._batchedReadValues[9] = value @property def swhFrameNumber(self): return self._batchedReadValues[10] @swhFrameNumber.setter def swhFrameNumber(self, value): self._batchedReadValues[10] = value @property def width(self): return self._batchedReadValues[11] @width.setter def width(self, value): self._batchedReadValues[11] = value def __getattr__(self, item: str): if item in self.LOCAL_PROPERTY_NAMES: return object.__getattribute__(self, item) else: return super().__getattr__(item) def __setattr__(self, item: str, new_value): if item in self.LOCAL_PROPERTY_NAMES: object.__setattr__(self, item, new_value) else: super().__setattr__(item, new_value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = {"exec", "swhFrameNumber", "_batchedWriteValues"} """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): value = self._batchedWriteValues.get(self._attributes.exec) if value: return value else: data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): self._batchedWriteValues[self._attributes.exec] = value @property def swhFrameNumber(self): value = self._batchedWriteValues.get(self._attributes.swhFrameNumber) if value: return value else: data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) return data_view.get() @swhFrameNumber.setter def swhFrameNumber(self, value): self._batchedWriteValues[self._attributes.swhFrameNumber] = value def __getattr__(self, item: str): if item in self.LOCAL_PROPERTY_NAMES: return object.__getattribute__(self, item) else: return super().__getattr__(item) def __setattr__(self, item: str, new_value): if item in self.LOCAL_PROPERTY_NAMES: object.__setattr__(self, item, new_value) else: super().__setattr__(item, new_value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) @property def initialSWHFrameNumber(self): data_view = og.AttributeValueHelper(self._attributes.initialSWHFrameNumber) return data_view.get() @initialSWHFrameNumber.setter def initialSWHFrameNumber(self, value): data_view = og.AttributeValueHelper(self._attributes.initialSWHFrameNumber) data_view.set(value) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdTestPrintRawArrayDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdTestPrintRawArrayDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdTestPrintRawArrayDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes) class abi: """Class defining the ABI interface for the node type""" @staticmethod def get_node_type(): get_node_type_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'get_node_type', None) if callable(get_node_type_function): return get_node_type_function() return 'omni.syntheticdata.SdTestPrintRawArray' @staticmethod def compute(context, node): def database_valid(): return True try: per_node_data = OgnSdTestPrintRawArrayDatabase.PER_NODE_DATA[node.node_id()] db = per_node_data.get('_db') if db is None: db = OgnSdTestPrintRawArrayDatabase(node) per_node_data['_db'] = db if not database_valid(): per_node_data['_db'] = None return False except: db = OgnSdTestPrintRawArrayDatabase(node) try: compute_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'compute', None) if callable(compute_function) and compute_function.__code__.co_argcount > 1: return compute_function(context, node) db.inputs._prefetch() db.inputs._setting_locked = True with og.in_compute(): return OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS.compute(db) except Exception as error: stack_trace = "".join(traceback.format_tb(sys.exc_info()[2].tb_next)) db.log_error(f'Assertion raised in compute - {error}\n{stack_trace}', add_context=False) finally: db.inputs._setting_locked = False db.outputs._commit() return False @staticmethod def initialize(context, node): OgnSdTestPrintRawArrayDatabase._initialize_per_node_data(node) initialize_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'initialize', None) if callable(initialize_function): initialize_function(context, node) per_node_data = OgnSdTestPrintRawArrayDatabase.PER_NODE_DATA[node.node_id()] def on_connection_or_disconnection(*args): per_node_data['_db'] = None node.register_on_connected_callback(on_connection_or_disconnection) node.register_on_disconnected_callback(on_connection_or_disconnection) @staticmethod def release(node): release_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'release', None) if callable(release_function): release_function(node) OgnSdTestPrintRawArrayDatabase._release_per_node_data(node) @staticmethod def release_instance(node, target): OgnSdTestPrintRawArrayDatabase._release_per_node_instance_data(node, target) @staticmethod def update_node_version(context, node, old_version, new_version): update_node_version_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'update_node_version', None) if callable(update_node_version_function): return update_node_version_function(context, node, old_version, new_version) return False @staticmethod def initialize_type(node_type): initialize_type_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'initialize_type', None) needs_initializing = True if callable(initialize_type_function): needs_initializing = initialize_type_function(node_type) if needs_initializing: node_type.set_metadata(ogn.MetadataKeys.EXTENSION, "omni.syntheticdata") node_type.set_metadata(ogn.MetadataKeys.TOKENS, "[\"uint16\", \"int16\", \"uint32\", \"int32\", \"float32\", \"token\", \"printFormatted\", \"printReferences\", \"writeToDisk\"]") node_type.set_metadata(ogn.MetadataKeys.CATEGORIES, "graph:action,internal:test") node_type.set_metadata(ogn.MetadataKeys.DESCRIPTION, "Synthetic Data test node printing the input linear array") node_type.set_metadata(ogn.MetadataKeys.EXCLUSIONS, "tests") node_type.set_metadata(ogn.MetadataKeys.LANGUAGE, "Python") OgnSdTestPrintRawArrayDatabase.INTERFACE.add_to_node_type(node_type) node_type.set_has_state(True) @staticmethod def on_connection_type_resolve(node): on_connection_type_resolve_function = getattr(OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS, 'on_connection_type_resolve', None) if callable(on_connection_type_resolve_function): on_connection_type_resolve_function(node) NODE_TYPE_CLASS = None @staticmethod def register(node_type_class): OgnSdTestPrintRawArrayDatabase.NODE_TYPE_CLASS = node_type_class og.register_node_type(OgnSdTestPrintRawArrayDatabase.abi, 1) @staticmethod def deregister(): og.deregister_node_type("omni.syntheticdata.SdTestPrintRawArray")
20,286
Python
43.489035
410
0.627822
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdSimRenderProductCameraDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdSimRenderProductCamera Synthetic Data node to expose the renderProduct camera in the fabric """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdSimRenderProductCameraDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdSimRenderProductCamera Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.renderProductPath Outputs: outputs.exec """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:renderProductPath', 'token', 0, None, 'renderProduct prim path', {}, True, "", False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def renderProductPath(self): data_view = og.AttributeValueHelper(self._attributes.renderProductPath) return data_view.get() @renderProductPath.setter def renderProductPath(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductPath) data_view = og.AttributeValueHelper(self._attributes.renderProductPath) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdSimRenderProductCameraDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdSimRenderProductCameraDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdSimRenderProductCameraDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
5,408
Python
45.62931
126
0.677885
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdUpdateSwhFrameNumberDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdUpdateSwFrameNumber Synthetic Data node to return the current update swhFrameNumber """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdUpdateSwhFrameNumberDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdUpdateSwFrameNumber Class Members: node: Node being evaluated Attribute Value Properties: Outputs: outputs.exec outputs.swhFrameNumber """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:swhFrameNumber', 'uint64', 0, None, 'Fabric frame number', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def swhFrameNumber(self): data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) return data_view.get() @swhFrameNumber.setter def swhFrameNumber(self, value): data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdUpdateSwhFrameNumberDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdUpdateSwhFrameNumberDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdUpdateSwhFrameNumberDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
5,275
Python
45.690265
124
0.679052
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdTextureToLinearArrayDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdTextureToLinearArray SyntheticData node to copy the input texture into a linear array buffer """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdTextureToLinearArrayDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdTextureToLinearArray Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.cudaMipmappedArray inputs.format inputs.height inputs.hydraTime inputs.mipCount inputs.outputHeight inputs.outputWidth inputs.simTime inputs.stream inputs.width Outputs: outputs.data outputs.height outputs.hydraTime outputs.simTime outputs.stream outputs.width """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:cudaMipmappedArray', 'uint64', 0, None, 'Pointer to the CUDA Mipmapped Array', {}, True, 0, False, ''), ('inputs:format', 'uint64', 0, None, 'Format', {}, True, 0, False, ''), ('inputs:height', 'uint', 0, None, 'Height', {}, True, 0, False, ''), ('inputs:hydraTime', 'double', 0, None, 'Hydra time in stage', {}, True, 0.0, False, ''), ('inputs:mipCount', 'uint', 0, None, 'Mip Count', {}, True, 0, False, ''), ('inputs:outputHeight', 'uint', 0, None, 'Requested output height', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('inputs:outputWidth', 'uint', 0, None, 'Requested output width', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('inputs:simTime', 'double', 0, None, 'Simulation time', {}, True, 0.0, False, ''), ('inputs:stream', 'uint64', 0, None, 'Pointer to the CUDA Stream', {}, True, 0, False, ''), ('inputs:width', 'uint', 0, None, 'Width', {}, True, 0, False, ''), ('outputs:data', 'float4[]', 0, None, 'Buffer array data', {ogn.MetadataKeys.MEMORY_TYPE: 'cuda', ogn.MetadataKeys.DEFAULT: '[]'}, True, [], False, ''), ('outputs:height', 'uint', 0, None, 'Buffer array height', {}, True, None, False, ''), ('outputs:hydraTime', 'double', 0, None, 'Hydra time in stage', {}, True, None, False, ''), ('outputs:simTime', 'double', 0, None, 'Simulation time', {}, True, None, False, ''), ('outputs:stream', 'uint64', 0, None, 'Pointer to the CUDA Stream', {}, True, None, False, ''), ('outputs:width', 'uint', 0, None, 'Buffer array width', {}, True, None, False, ''), ]) class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def cudaMipmappedArray(self): data_view = og.AttributeValueHelper(self._attributes.cudaMipmappedArray) return data_view.get() @cudaMipmappedArray.setter def cudaMipmappedArray(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cudaMipmappedArray) data_view = og.AttributeValueHelper(self._attributes.cudaMipmappedArray) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.format) data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.height) data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def hydraTime(self): data_view = og.AttributeValueHelper(self._attributes.hydraTime) return data_view.get() @hydraTime.setter def hydraTime(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.hydraTime) data_view = og.AttributeValueHelper(self._attributes.hydraTime) data_view.set(value) @property def mipCount(self): data_view = og.AttributeValueHelper(self._attributes.mipCount) return data_view.get() @mipCount.setter def mipCount(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.mipCount) data_view = og.AttributeValueHelper(self._attributes.mipCount) data_view.set(value) @property def outputHeight(self): data_view = og.AttributeValueHelper(self._attributes.outputHeight) return data_view.get() @outputHeight.setter def outputHeight(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.outputHeight) data_view = og.AttributeValueHelper(self._attributes.outputHeight) data_view.set(value) @property def outputWidth(self): data_view = og.AttributeValueHelper(self._attributes.outputWidth) return data_view.get() @outputWidth.setter def outputWidth(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.outputWidth) data_view = og.AttributeValueHelper(self._attributes.outputWidth) data_view.set(value) @property def simTime(self): data_view = og.AttributeValueHelper(self._attributes.simTime) return data_view.get() @simTime.setter def simTime(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.simTime) data_view = og.AttributeValueHelper(self._attributes.simTime) data_view.set(value) @property def stream(self): data_view = og.AttributeValueHelper(self._attributes.stream) return data_view.get() @stream.setter def stream(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.stream) data_view = og.AttributeValueHelper(self._attributes.stream) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.width) data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.data_size = 0 self._batchedWriteValues = { } @property def data(self): data_view = og.AttributeValueHelper(self._attributes.data) return data_view.get(reserved_element_count=self.data_size, on_gpu=True) @data.setter def data(self, value): data_view = og.AttributeValueHelper(self._attributes.data) data_view.set(value, on_gpu=True) self.data_size = data_view.get_array_size() @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def hydraTime(self): data_view = og.AttributeValueHelper(self._attributes.hydraTime) return data_view.get() @hydraTime.setter def hydraTime(self, value): data_view = og.AttributeValueHelper(self._attributes.hydraTime) data_view.set(value) @property def simTime(self): data_view = og.AttributeValueHelper(self._attributes.simTime) return data_view.get() @simTime.setter def simTime(self, value): data_view = og.AttributeValueHelper(self._attributes.simTime) data_view.set(value) @property def stream(self): data_view = og.AttributeValueHelper(self._attributes.stream) return data_view.get() @stream.setter def stream(self, value): data_view = og.AttributeValueHelper(self._attributes.stream) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdTextureToLinearArrayDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdTextureToLinearArrayDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdTextureToLinearArrayDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
12,568
Python
41.177852
160
0.620942
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdNoOpDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdNoOp Synthetic Data pass through node """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdNoOpDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdNoOp Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec Outputs: outputs.exec """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdNoOpDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdNoOpDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdNoOpDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
5,209
Python
43.529914
111
0.664235
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdTestInstanceMappingDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdTestInstanceMapping Synthetic Data node to test the instance mapping pipeline """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdTestInstanceMappingDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdTestInstanceMapping Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.instanceMapPtr inputs.instancePrimPathPtr inputs.minInstanceIndex inputs.minSemanticIndex inputs.numInstances inputs.numSemantics inputs.semanticLabelTokenPtrs inputs.semanticLocalTransformPtr inputs.semanticMapPtr inputs.semanticPrimPathPtr inputs.semanticWorldTransformPtr inputs.stage inputs.swhFrameNumber inputs.testCaseIndex Outputs: outputs.exec outputs.semanticFilterPredicate outputs.success Predefined Tokens: tokens.simulation tokens.postRender tokens.onDemand """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:instanceMapPtr', 'uint64', 0, None, 'Array pointer of numInstances uint16_t containing the semantic index of the instance prim first semantic prim parent', {}, True, 0, False, ''), ('inputs:instancePrimPathPtr', 'uint64', 0, None, 'Array pointer of numInstances uint64_t containing the prim path tokens for every instance prims', {}, True, 0, False, ''), ('inputs:minInstanceIndex', 'uint', 0, None, 'Instance index of the first instance prim in the instance arrays', {}, True, 0, False, ''), ('inputs:minSemanticIndex', 'uint', 0, None, 'Semantic index of the first semantic prim in the semantic arrays', {}, True, 0, False, ''), ('inputs:numInstances', 'uint', 0, None, 'Number of instances prim in the instance arrays', {}, True, 0, False, ''), ('inputs:numSemantics', 'uint', 0, None, 'Number of semantic prim in the semantic arrays', {}, True, 0, False, ''), ('inputs:semanticLabelTokenPtrs', 'uint64[]', 0, None, 'Array containing for every input semantic filters the corresponding array pointer of numSemantics uint64_t representing the semantic label of the semantic prim', {}, True, [], False, ''), ('inputs:semanticLocalTransformPtr', 'uint64', 0, None, 'Array pointer of numSemantics 4x4 float matrices containing the transform from world to object space for every semantic prims', {}, True, 0, False, ''), ('inputs:semanticMapPtr', 'uint64', 0, None, 'Array pointer of numSemantics uint16_t containing the semantic index of the semantic prim first semantic prim parent', {}, True, 0, False, ''), ('inputs:semanticPrimPathPtr', 'uint64', 0, None, 'Array pointer of numSemantics uint32_t containing the prim part of the prim path tokens for every semantic prims', {}, True, 0, False, ''), ('inputs:semanticWorldTransformPtr', 'uint64', 0, None, 'Array pointer of numSemantics 4x4 float matrices containing the transform from local to world space for every semantic entity', {}, True, 0, False, ''), ('inputs:stage', 'token', 0, None, 'Stage in {simulation, postrender, ondemand}', {}, True, "", False, ''), ('inputs:swhFrameNumber', 'uint64', 0, None, 'Fabric frame number', {}, True, 0, False, ''), ('inputs:testCaseIndex', 'int', 0, None, 'Test case index', {ogn.MetadataKeys.DEFAULT: '-1'}, True, -1, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:semanticFilterPredicate', 'token', 0, None, 'The semantic filter predicate : a disjunctive normal form of semantic type and label', {}, True, None, False, ''), ('outputs:success', 'bool', 0, None, 'Test value : false if failed', {}, True, None, False, ''), ]) class tokens: simulation = "simulation" postRender = "postRender" onDemand = "onDemand" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def instanceMapPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMapPtr) return data_view.get() @instanceMapPtr.setter def instanceMapPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instanceMapPtr) data_view = og.AttributeValueHelper(self._attributes.instanceMapPtr) data_view.set(value) @property def instancePrimPathPtr(self): data_view = og.AttributeValueHelper(self._attributes.instancePrimPathPtr) return data_view.get() @instancePrimPathPtr.setter def instancePrimPathPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instancePrimPathPtr) data_view = og.AttributeValueHelper(self._attributes.instancePrimPathPtr) data_view.set(value) @property def minInstanceIndex(self): data_view = og.AttributeValueHelper(self._attributes.minInstanceIndex) return data_view.get() @minInstanceIndex.setter def minInstanceIndex(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.minInstanceIndex) data_view = og.AttributeValueHelper(self._attributes.minInstanceIndex) data_view.set(value) @property def minSemanticIndex(self): data_view = og.AttributeValueHelper(self._attributes.minSemanticIndex) return data_view.get() @minSemanticIndex.setter def minSemanticIndex(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.minSemanticIndex) data_view = og.AttributeValueHelper(self._attributes.minSemanticIndex) data_view.set(value) @property def numInstances(self): data_view = og.AttributeValueHelper(self._attributes.numInstances) return data_view.get() @numInstances.setter def numInstances(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.numInstances) data_view = og.AttributeValueHelper(self._attributes.numInstances) data_view.set(value) @property def numSemantics(self): data_view = og.AttributeValueHelper(self._attributes.numSemantics) return data_view.get() @numSemantics.setter def numSemantics(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.numSemantics) data_view = og.AttributeValueHelper(self._attributes.numSemantics) data_view.set(value) @property def semanticLabelTokenPtrs(self): data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenPtrs) return data_view.get() @semanticLabelTokenPtrs.setter def semanticLabelTokenPtrs(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticLabelTokenPtrs) data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenPtrs) data_view.set(value) self.semanticLabelTokenPtrs_size = data_view.get_array_size() @property def semanticLocalTransformPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformPtr) return data_view.get() @semanticLocalTransformPtr.setter def semanticLocalTransformPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticLocalTransformPtr) data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformPtr) data_view.set(value) @property def semanticMapPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticMapPtr) return data_view.get() @semanticMapPtr.setter def semanticMapPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticMapPtr) data_view = og.AttributeValueHelper(self._attributes.semanticMapPtr) data_view.set(value) @property def semanticPrimPathPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticPrimPathPtr) return data_view.get() @semanticPrimPathPtr.setter def semanticPrimPathPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticPrimPathPtr) data_view = og.AttributeValueHelper(self._attributes.semanticPrimPathPtr) data_view.set(value) @property def semanticWorldTransformPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformPtr) return data_view.get() @semanticWorldTransformPtr.setter def semanticWorldTransformPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticWorldTransformPtr) data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformPtr) data_view.set(value) @property def stage(self): data_view = og.AttributeValueHelper(self._attributes.stage) return data_view.get() @stage.setter def stage(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.stage) data_view = og.AttributeValueHelper(self._attributes.stage) data_view.set(value) @property def swhFrameNumber(self): data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) return data_view.get() @swhFrameNumber.setter def swhFrameNumber(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.swhFrameNumber) data_view = og.AttributeValueHelper(self._attributes.swhFrameNumber) data_view.set(value) @property def testCaseIndex(self): data_view = og.AttributeValueHelper(self._attributes.testCaseIndex) return data_view.get() @testCaseIndex.setter def testCaseIndex(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.testCaseIndex) data_view = og.AttributeValueHelper(self._attributes.testCaseIndex) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def semanticFilterPredicate(self): data_view = og.AttributeValueHelper(self._attributes.semanticFilterPredicate) return data_view.get() @semanticFilterPredicate.setter def semanticFilterPredicate(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticFilterPredicate) data_view.set(value) @property def success(self): data_view = og.AttributeValueHelper(self._attributes.success) return data_view.get() @success.setter def success(self, value): data_view = og.AttributeValueHelper(self._attributes.success) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdTestInstanceMappingDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdTestInstanceMappingDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdTestInstanceMappingDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
16,282
Python
45.65616
251
0.6525
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostSemantic3dBoundingBoxCameraProjection Synthetic Data node to project 3d bounding boxes data in camera space. """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostSemantic3dBoundingBoxCameraProjection Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.cameraFisheyeParams inputs.cameraModel inputs.cameraNearFar inputs.exec inputs.gpu inputs.instanceMappingInfoSDPtr inputs.metersPerSceneUnit inputs.renderProductResolution inputs.rp inputs.sdSemBBoxExtentCudaPtr inputs.sdSemBBoxInfosCudaPtr inputs.semanticWorldTransformSDCudaPtr Outputs: outputs.exec outputs.sdSemBBox3dCamCornersCudaPtr outputs.sdSemBBox3dCamExtentCudaPtr Predefined Tokens: tokens.SemanticBoundingBox3DInfosSD tokens.SemanticBoundingBox3DCamCornersSD tokens.SemanticBoundingBox3DCamExtentSD """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:cameraFisheyeParams', 'float[]', 0, None, 'Camera fisheye projection parameters', {}, True, [], False, ''), ('inputs:cameraModel', 'int', 0, None, 'Camera model (pinhole or fisheye models)', {}, True, 0, False, ''), ('inputs:cameraNearFar', 'float2', 0, None, 'Camera near/far clipping range', {ogn.MetadataKeys.DEFAULT: '[1.0, 10000000.0]'}, True, [1.0, 10000000.0], False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, 'gpuFoundations', 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:instanceMappingInfoSDPtr', 'uint64', 0, None, 'uint buffer pointer containing the following information : [numInstances, minInstanceId, numSemantics, minSemanticId, numProtoSemantic]', {}, True, 0, False, ''), ('inputs:metersPerSceneUnit', 'float', 0, None, 'Scene units to meters scale', {ogn.MetadataKeys.DEFAULT: '0.01'}, True, 0.01, False, ''), ('inputs:renderProductResolution', 'int2', 0, None, 'RenderProduct resolution', {ogn.MetadataKeys.DEFAULT: '[65536, 65536]'}, True, [65536, 65536], False, ''), ('inputs:rp', 'uint64', 0, 'renderProduct', 'Pointer to render product for this view', {}, True, 0, False, ''), ('inputs:sdSemBBoxExtentCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the extent of the bounding boxes as a float4=(u_min,v_min,u_max,v_max) for 2D or a float6=(xmin,ymin,zmin,xmax,ymax,zmax) in object space for 3D', {}, True, 0, False, ''), ('inputs:sdSemBBoxInfosCudaPtr', 'uint64', 0, None, 'Cuda buffer containing valid bounding boxes infos', {}, True, 0, False, ''), ('inputs:semanticWorldTransformSDCudaPtr', 'uint64', 0, None, 'cuda float44 buffer pointer of size numSemantics containing the world semantic transform', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:sdSemBBox3dCamCornersCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the projection of the 3d bounding boxes on the camera plane represented as a float4=(u,v,z,a) for each bounding box corners', {}, True, None, False, ''), ('outputs:sdSemBBox3dCamExtentCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the 2d extent of the 3d bounding boxes on the camera plane represented as a float6=(u_min,u_max,v_min,v_max,z_min,z_max)', {}, True, None, False, ''), ]) class tokens: SemanticBoundingBox3DInfosSD = "SemanticBoundingBox3DInfosSD" SemanticBoundingBox3DCamCornersSD = "SemanticBoundingBox3DCamCornersSD" SemanticBoundingBox3DCamExtentSD = "SemanticBoundingBox3DCamExtentSD" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def cameraFisheyeParams(self): data_view = og.AttributeValueHelper(self._attributes.cameraFisheyeParams) return data_view.get() @cameraFisheyeParams.setter def cameraFisheyeParams(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cameraFisheyeParams) data_view = og.AttributeValueHelper(self._attributes.cameraFisheyeParams) data_view.set(value) self.cameraFisheyeParams_size = data_view.get_array_size() @property def cameraModel(self): data_view = og.AttributeValueHelper(self._attributes.cameraModel) return data_view.get() @cameraModel.setter def cameraModel(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cameraModel) data_view = og.AttributeValueHelper(self._attributes.cameraModel) data_view.set(value) @property def cameraNearFar(self): data_view = og.AttributeValueHelper(self._attributes.cameraNearFar) return data_view.get() @cameraNearFar.setter def cameraNearFar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cameraNearFar) data_view = og.AttributeValueHelper(self._attributes.cameraNearFar) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def instanceMappingInfoSDPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) return data_view.get() @instanceMappingInfoSDPtr.setter def instanceMappingInfoSDPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instanceMappingInfoSDPtr) data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) data_view.set(value) @property def metersPerSceneUnit(self): data_view = og.AttributeValueHelper(self._attributes.metersPerSceneUnit) return data_view.get() @metersPerSceneUnit.setter def metersPerSceneUnit(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.metersPerSceneUnit) data_view = og.AttributeValueHelper(self._attributes.metersPerSceneUnit) data_view.set(value) @property def renderProductResolution(self): data_view = og.AttributeValueHelper(self._attributes.renderProductResolution) return data_view.get() @renderProductResolution.setter def renderProductResolution(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductResolution) data_view = og.AttributeValueHelper(self._attributes.renderProductResolution) data_view.set(value) @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) @property def sdSemBBoxExtentCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxExtentCudaPtr) return data_view.get() @sdSemBBoxExtentCudaPtr.setter def sdSemBBoxExtentCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdSemBBoxExtentCudaPtr) data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxExtentCudaPtr) data_view.set(value) @property def sdSemBBoxInfosCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxInfosCudaPtr) return data_view.get() @sdSemBBoxInfosCudaPtr.setter def sdSemBBoxInfosCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdSemBBoxInfosCudaPtr) data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxInfosCudaPtr) data_view.set(value) @property def semanticWorldTransformSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformSDCudaPtr) return data_view.get() @semanticWorldTransformSDCudaPtr.setter def semanticWorldTransformSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticWorldTransformSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformSDCudaPtr) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def sdSemBBox3dCamCornersCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamCornersCudaPtr) return data_view.get() @sdSemBBox3dCamCornersCudaPtr.setter def sdSemBBox3dCamCornersCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamCornersCudaPtr) data_view.set(value) @property def sdSemBBox3dCamExtentCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamExtentCudaPtr) return data_view.get() @sdSemBBox3dCamExtentCudaPtr.setter def sdSemBBox3dCamExtentCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.sdSemBBox3dCamExtentCudaPtr) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
14,909
Python
47.566775
256
0.666108
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdTestSimFabricTimeRangeDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdTestSimFabricTimeRange Testing node : on request write/update a Fabric time range of a given number of frames starting at the current simulation time. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdTestSimFabricTimeRangeDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdTestSimFabricTimeRange Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.numberOfFrames inputs.timeRangeBeginDenominatorToken inputs.timeRangeBeginNumeratorToken inputs.timeRangeEndDenominatorToken inputs.timeRangeEndNumeratorToken inputs.timeRangeName Outputs: outputs.exec Predefined Tokens: tokens.fc_exportToRingbuffer """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:numberOfFrames', 'uint64', 0, None, 'Number of frames to writes.', {}, True, 0, False, ''), ('inputs:timeRangeBeginDenominatorToken', 'token', 0, None, 'Attribute name of the range begin time denominator', {ogn.MetadataKeys.DEFAULT: '"timeRangeStartDenominator"'}, True, "timeRangeStartDenominator", False, ''), ('inputs:timeRangeBeginNumeratorToken', 'token', 0, None, 'Attribute name of the range begin time numerator', {ogn.MetadataKeys.DEFAULT: '"timeRangeStartNumerator"'}, True, "timeRangeStartNumerator", False, ''), ('inputs:timeRangeEndDenominatorToken', 'token', 0, None, 'Attribute name of the range end time denominator', {ogn.MetadataKeys.DEFAULT: '"timeRangeEndDenominator"'}, True, "timeRangeEndDenominator", False, ''), ('inputs:timeRangeEndNumeratorToken', 'token', 0, None, 'Attribute name of the range end time numerator', {ogn.MetadataKeys.DEFAULT: '"timeRangeEndNumerator"'}, True, "timeRangeEndNumerator", False, ''), ('inputs:timeRangeName', 'token', 0, None, 'Time range name used to write to the Fabric.', {ogn.MetadataKeys.DEFAULT: '"TestSimFabricTimeRangeSD"'}, True, "TestSimFabricTimeRangeSD", False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ]) class tokens: fc_exportToRingbuffer = "fc_exportToRingbuffer" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def numberOfFrames(self): data_view = og.AttributeValueHelper(self._attributes.numberOfFrames) return data_view.get() @numberOfFrames.setter def numberOfFrames(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.numberOfFrames) data_view = og.AttributeValueHelper(self._attributes.numberOfFrames) data_view.set(value) @property def timeRangeBeginDenominatorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginDenominatorToken) return data_view.get() @timeRangeBeginDenominatorToken.setter def timeRangeBeginDenominatorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeBeginDenominatorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginDenominatorToken) data_view.set(value) @property def timeRangeBeginNumeratorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginNumeratorToken) return data_view.get() @timeRangeBeginNumeratorToken.setter def timeRangeBeginNumeratorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeBeginNumeratorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeBeginNumeratorToken) data_view.set(value) @property def timeRangeEndDenominatorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndDenominatorToken) return data_view.get() @timeRangeEndDenominatorToken.setter def timeRangeEndDenominatorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeEndDenominatorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeEndDenominatorToken) data_view.set(value) @property def timeRangeEndNumeratorToken(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeEndNumeratorToken) return data_view.get() @timeRangeEndNumeratorToken.setter def timeRangeEndNumeratorToken(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeEndNumeratorToken) data_view = og.AttributeValueHelper(self._attributes.timeRangeEndNumeratorToken) data_view.set(value) @property def timeRangeName(self): data_view = og.AttributeValueHelper(self._attributes.timeRangeName) return data_view.get() @timeRangeName.setter def timeRangeName(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.timeRangeName) data_view = og.AttributeValueHelper(self._attributes.timeRangeName) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdTestSimFabricTimeRangeDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdTestSimFabricTimeRangeDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdTestSimFabricTimeRangeDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
9,546
Python
48.466321
227
0.68238
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdTimeChangeExecutionDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdTimeChangeExecution Set its execution output if the input rational time is more recent that the last registered time. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdTimeChangeExecutionDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdTimeChangeExecution Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.errorOnFutureChange inputs.exec inputs.lastUpdateTimeDenominator inputs.lastUpdateTimeNumerator inputs.renderResults Outputs: outputs.exec """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:errorOnFutureChange', 'bool', 0, None, 'Print error if the last update is in the future.', {ogn.MetadataKeys.DEFAULT: 'false'}, True, False, False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:lastUpdateTimeDenominator', 'uint64', 0, None, 'Time denominator of the last time change', {}, True, 0, False, ''), ('inputs:lastUpdateTimeNumerator', 'int64', 0, None, 'Time numerator of the last time change', {}, True, 0, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def errorOnFutureChange(self): data_view = og.AttributeValueHelper(self._attributes.errorOnFutureChange) return data_view.get() @errorOnFutureChange.setter def errorOnFutureChange(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.errorOnFutureChange) data_view = og.AttributeValueHelper(self._attributes.errorOnFutureChange) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def lastUpdateTimeDenominator(self): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeDenominator) return data_view.get() @lastUpdateTimeDenominator.setter def lastUpdateTimeDenominator(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.lastUpdateTimeDenominator) data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeDenominator) data_view.set(value) @property def lastUpdateTimeNumerator(self): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeNumerator) return data_view.get() @lastUpdateTimeNumerator.setter def lastUpdateTimeNumerator(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.lastUpdateTimeNumerator) data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeNumerator) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdTimeChangeExecutionDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdTimeChangeExecutionDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdTimeChangeExecutionDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
8,054
Python
45.560693
169
0.663894
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdFrameIdentifierDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdFrameIdentifier Synthetic Data node to expose pipeline frame identifier. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdFrameIdentifierDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdFrameIdentifier Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.renderResults Outputs: outputs.durationDenominator outputs.durationNumerator outputs.exec outputs.externalTimeOfSimNs outputs.frameNumber outputs.rationalTimeOfSimDenominator outputs.rationalTimeOfSimNumerator outputs.sampleTimeOffsetInSimFrames outputs.type Predefined Tokens: tokens.NoFrameNumber tokens.FrameNumber tokens.ConstantFramerateFrameNumber """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results', {}, True, 0, False, ''), ('outputs:durationDenominator', 'uint64', 0, None, 'Duration denominator.\nOnly valid if eConstantFramerateFrameNumber', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('outputs:durationNumerator', 'int64', 0, None, 'Duration numerator.\nOnly valid if eConstantFramerateFrameNumber.', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes for each newFrame event received', {}, True, None, False, ''), ('outputs:externalTimeOfSimNs', 'int64', 0, None, 'External time in Ns.\nOnly valid if eConstantFramerateFrameNumber.', {ogn.MetadataKeys.DEFAULT: '-1'}, True, -1, False, ''), ('outputs:frameNumber', 'int64', 0, None, 'Frame number.\nValid if eFrameNumber or eConstantFramerateFrameNumber.', {ogn.MetadataKeys.DEFAULT: '-1'}, True, -1, False, ''), ('outputs:rationalTimeOfSimDenominator', 'uint64', 0, None, 'rational time of simulation denominator.', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('outputs:rationalTimeOfSimNumerator', 'int64', 0, None, 'rational time of simulation numerator.', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('outputs:sampleTimeOffsetInSimFrames', 'uint64', 0, None, 'Sample time offset.\nOnly valid if eConstantFramerateFrameNumber.', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('outputs:type', 'token', 0, None, 'Type of the frame identifier.', {ogn.MetadataKeys.ALLOWED_TOKENS: 'NoFrameNumber,FrameNumber,ConstantFramerateFrameNumber', ogn.MetadataKeys.ALLOWED_TOKENS_RAW: '["NoFrameNumber", "FrameNumber", "ConstantFramerateFrameNumber"]', ogn.MetadataKeys.DEFAULT: '"NoFrameNumber"'}, True, "NoFrameNumber", False, ''), ]) class tokens: NoFrameNumber = "NoFrameNumber" FrameNumber = "FrameNumber" ConstantFramerateFrameNumber = "ConstantFramerateFrameNumber" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def durationDenominator(self): data_view = og.AttributeValueHelper(self._attributes.durationDenominator) return data_view.get() @durationDenominator.setter def durationDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.durationDenominator) data_view.set(value) @property def durationNumerator(self): data_view = og.AttributeValueHelper(self._attributes.durationNumerator) return data_view.get() @durationNumerator.setter def durationNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.durationNumerator) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def externalTimeOfSimNs(self): data_view = og.AttributeValueHelper(self._attributes.externalTimeOfSimNs) return data_view.get() @externalTimeOfSimNs.setter def externalTimeOfSimNs(self, value): data_view = og.AttributeValueHelper(self._attributes.externalTimeOfSimNs) data_view.set(value) @property def frameNumber(self): data_view = og.AttributeValueHelper(self._attributes.frameNumber) return data_view.get() @frameNumber.setter def frameNumber(self, value): data_view = og.AttributeValueHelper(self._attributes.frameNumber) data_view.set(value) @property def rationalTimeOfSimDenominator(self): data_view = og.AttributeValueHelper(self._attributes.rationalTimeOfSimDenominator) return data_view.get() @rationalTimeOfSimDenominator.setter def rationalTimeOfSimDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.rationalTimeOfSimDenominator) data_view.set(value) @property def rationalTimeOfSimNumerator(self): data_view = og.AttributeValueHelper(self._attributes.rationalTimeOfSimNumerator) return data_view.get() @rationalTimeOfSimNumerator.setter def rationalTimeOfSimNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.rationalTimeOfSimNumerator) data_view.set(value) @property def sampleTimeOffsetInSimFrames(self): data_view = og.AttributeValueHelper(self._attributes.sampleTimeOffsetInSimFrames) return data_view.get() @sampleTimeOffsetInSimFrames.setter def sampleTimeOffsetInSimFrames(self, value): data_view = og.AttributeValueHelper(self._attributes.sampleTimeOffsetInSimFrames) data_view.set(value) @property def type(self): data_view = og.AttributeValueHelper(self._attributes.type) return data_view.get() @type.setter def type(self, value): data_view = og.AttributeValueHelper(self._attributes.type) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdFrameIdentifierDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdFrameIdentifierDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdFrameIdentifierDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
11,174
Python
46.151899
353
0.667442
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdSimInstanceMappingDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdSimInstanceMapping Synthetic Data node to update and cache the instance mapping data """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdSimInstanceMappingDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdSimInstanceMapping Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.needTransform inputs.semanticFilterPredicate Outputs: outputs.exec outputs.semanticFilterPredicate """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:needTransform', 'bool', 0, None, 'If true compute the semantic entities world and object transforms', {ogn.MetadataKeys.DEFAULT: 'true'}, True, True, False, ''), ('inputs:semanticFilterPredicate', 'token', 0, None, 'The semantic filter predicate : a disjunctive normal form of semantic type and label', {ogn.MetadataKeys.DEFAULT: '"*:*"'}, True, "*:*", False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:semanticFilterPredicate', 'token', 0, None, 'The semantic filter predicate in normalized form', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def needTransform(self): data_view = og.AttributeValueHelper(self._attributes.needTransform) return data_view.get() @needTransform.setter def needTransform(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.needTransform) data_view = og.AttributeValueHelper(self._attributes.needTransform) data_view.set(value) @property def semanticFilterPredicate(self): data_view = og.AttributeValueHelper(self._attributes.semanticFilterPredicate) return data_view.get() @semanticFilterPredicate.setter def semanticFilterPredicate(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticFilterPredicate) data_view = og.AttributeValueHelper(self._attributes.semanticFilterPredicate) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def semanticFilterPredicate(self): data_view = og.AttributeValueHelper(self._attributes.semanticFilterPredicate) return data_view.get() @semanticFilterPredicate.setter def semanticFilterPredicate(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticFilterPredicate) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdSimInstanceMappingDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdSimInstanceMappingDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdSimInstanceMappingDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
6,784
Python
46.78169
210
0.674971
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdRenderVarToRawArrayDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdRenderVarToRawArray Synthetic Data action node to copy the input rendervar into an output raw array """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdRenderVarToRawArrayDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdRenderVarToRawArray Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.cudaStream inputs.exec inputs.renderResults inputs.renderVar Outputs: outputs.bufferSize outputs.cudaStream outputs.data outputs.exec outputs.format outputs.height outputs.strides outputs.width """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:cudaStream', 'uint64', 0, None, 'Pointer to the CUDA stream', {}, True, 0, False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results pointer', {}, True, 0, False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the renderVar', {}, True, "", False, ''), ('outputs:bufferSize', 'uint64', 0, None, 'Size (in bytes) of the buffer (0 if the input is a texture)', {}, True, None, False, ''), ('outputs:cudaStream', 'uint64', 0, None, 'Pointer to the CUDA stream', {}, True, None, False, ''), ('outputs:data', 'uchar[]', 0, None, 'Buffer array data', {ogn.MetadataKeys.MEMORY_TYPE: 'any', ogn.MetadataKeys.DEFAULT: '[]'}, True, [], False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:format', 'uint64', 0, None, 'Format', {}, True, None, False, ''), ('outputs:height', 'uint', 0, None, 'Height (0 if the input is a buffer)', {}, True, None, False, ''), ('outputs:strides', 'int2', 0, None, 'Strides (in bytes) ([0,0] if the input is a buffer)', {}, True, None, False, ''), ('outputs:width', 'uint', 0, None, 'Width (0 if the input is a buffer)', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def cudaStream(self): data_view = og.AttributeValueHelper(self._attributes.cudaStream) return data_view.get() @cudaStream.setter def cudaStream(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cudaStream) data_view = og.AttributeValueHelper(self._attributes.cudaStream) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.data_size = 0 self._batchedWriteValues = { } @property def bufferSize(self): data_view = og.AttributeValueHelper(self._attributes.bufferSize) return data_view.get() @bufferSize.setter def bufferSize(self, value): data_view = og.AttributeValueHelper(self._attributes.bufferSize) data_view.set(value) @property def cudaStream(self): data_view = og.AttributeValueHelper(self._attributes.cudaStream) return data_view.get() @cudaStream.setter def cudaStream(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaStream) data_view.set(value) class __data: def __init__(self, parent): self._parent = parent @property def cpu(self): data_view = og.AttributeValueHelper(self._parent._attributes.data) return data_view.get(reserved_element_count=self._parent.data_size) @cpu.setter def cpu(self, value): data_view = og.AttributeValueHelper(self._parent._attributes.cpu) data_view.set(value) self._parent.cpu_size = data_view.get_array_size() @property def gpu(self): data_view = og.AttributeValueHelper(self._parent._attributes.data) data_view.gpu_ptr_kind = og.PtrToPtrKind.CPU return data_view.get(reserved_element_count=self._parent.data_size, on_gpu=True) @gpu.setter def gpu(self, value): data_view = og.AttributeValueHelper(self._parent._attributes.gpu) data_view.set(value) self._parent.gpu_size = data_view.get_array_size() @property def data(self): return self.__class__.__data(self) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def strides(self): data_view = og.AttributeValueHelper(self._attributes.strides) return data_view.get() @strides.setter def strides(self, value): data_view = og.AttributeValueHelper(self._attributes.strides) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdRenderVarToRawArrayDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdRenderVarToRawArrayDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdRenderVarToRawArrayDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
11,318
Python
41.552631
158
0.621311
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdOnNewFrameDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdOnNewFrame Synthetic Data postprocess node to execute pipeline after the NewFrame event has been received """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdOnNewFrameDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdOnNewFrame Class Members: node: Node being evaluated Attribute Value Properties: Outputs: outputs.cudaStream outputs.exec outputs.referenceTimeDenominator outputs.referenceTimeNumerator outputs.renderProductDataPtrs outputs.renderProductPaths """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('outputs:cudaStream', 'uint64', 0, None, 'Cuda stream', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, None, 'Executes for each newFrame event received', {}, True, None, False, ''), ('outputs:referenceTimeDenominator', 'uint64', 0, None, 'Reference time represented as a rational number : denominator', {}, True, None, False, ''), ('outputs:referenceTimeNumerator', 'int64', 0, None, 'Reference time represented as a rational number : numerator', {}, True, None, False, ''), ('outputs:renderProductDataPtrs', 'uint64[]', 0, None, 'HydraRenderProduct data pointer.', {}, True, None, False, ''), ('outputs:renderProductPaths', 'token[]', 0, None, 'Render product path tokens.', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.renderProductDataPtrs_size = None self.renderProductPaths_size = None self._batchedWriteValues = { } @property def cudaStream(self): data_view = og.AttributeValueHelper(self._attributes.cudaStream) return data_view.get() @cudaStream.setter def cudaStream(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaStream) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def referenceTimeDenominator(self): data_view = og.AttributeValueHelper(self._attributes.referenceTimeDenominator) return data_view.get() @referenceTimeDenominator.setter def referenceTimeDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.referenceTimeDenominator) data_view.set(value) @property def referenceTimeNumerator(self): data_view = og.AttributeValueHelper(self._attributes.referenceTimeNumerator) return data_view.get() @referenceTimeNumerator.setter def referenceTimeNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.referenceTimeNumerator) data_view.set(value) @property def renderProductDataPtrs(self): data_view = og.AttributeValueHelper(self._attributes.renderProductDataPtrs) return data_view.get(reserved_element_count=self.renderProductDataPtrs_size) @renderProductDataPtrs.setter def renderProductDataPtrs(self, value): data_view = og.AttributeValueHelper(self._attributes.renderProductDataPtrs) data_view.set(value) self.renderProductDataPtrs_size = data_view.get_array_size() @property def renderProductPaths(self): data_view = og.AttributeValueHelper(self._attributes.renderProductPaths) return data_view.get(reserved_element_count=self.renderProductPaths_size) @renderProductPaths.setter def renderProductPaths(self, value): data_view = og.AttributeValueHelper(self._attributes.renderProductPaths) data_view.set(value) self.renderProductPaths_size = data_view.get_array_size() def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdOnNewFrameDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdOnNewFrameDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdOnNewFrameDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
7,864
Python
46.379518
156
0.673449
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostInstanceMappingDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostInstanceMapping Synthetic Data node to compute and store scene instances semantic hierarchy information """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostInstanceMappingDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostInstanceMapping Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.rp inputs.semanticFilterName Outputs: outputs.exec outputs.instanceMapSDCudaPtr outputs.instanceMappingInfoSDPtr outputs.instancePrimTokenSDCudaPtr outputs.lastUpdateTimeDenominator outputs.lastUpdateTimeNumerator outputs.semanticLabelTokenSDCudaPtr outputs.semanticLocalTransformSDCudaPtr outputs.semanticMapSDCudaPtr outputs.semanticPrimTokenSDCudaPtr outputs.semanticWorldTransformSDCudaPtr Predefined Tokens: tokens.InstanceMappingInfoSDhost tokens.SemanticMapSD tokens.SemanticMapSDhost tokens.SemanticPrimTokenSD tokens.SemanticPrimTokenSDhost tokens.InstanceMapSD tokens.InstanceMapSDhost tokens.InstancePrimTokenSD tokens.InstancePrimTokenSDhost tokens.SemanticLabelTokenSD tokens.SemanticLabelTokenSDhost tokens.SemanticLocalTransformSD tokens.SemanticLocalTransformSDhost tokens.SemanticWorldTransformSD tokens.SemanticWorldTransformSDhost """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, 'gpuFoundations', 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:rp', 'uint64', 0, 'renderProduct', 'Pointer to render product for this view', {}, True, 0, False, ''), ('inputs:semanticFilterName', 'token', 0, None, 'Name of the semantic filter to apply to the semanticLabelToken', {ogn.MetadataKeys.DEFAULT: '"default"'}, True, "default", False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:instanceMapSDCudaPtr', 'uint64', 0, None, 'cuda uint16_t buffer pointer of size numInstances containing the instance parent semantic index', {}, True, None, False, ''), ('outputs:instanceMappingInfoSDPtr', 'uint64', 0, None, 'uint buffer pointer containing the following information :\n[ numInstances, minInstanceId, numSemantics, minSemanticId, numProtoSemantic,\n lastUpdateTimeNumeratorHigh, lastUpdateTimeNumeratorLow, , lastUpdateTimeDenominatorHigh, lastUpdateTimeDenominatorLow ]', {}, True, None, False, ''), ('outputs:instancePrimTokenSDCudaPtr', 'uint64', 0, None, 'cuda uint64_t buffer pointer of size numInstances containing the instance path token', {}, True, None, False, ''), ('outputs:lastUpdateTimeDenominator', 'uint64', 0, None, 'Time denominator of the last time the data has changed', {}, True, None, False, ''), ('outputs:lastUpdateTimeNumerator', 'int64', 0, None, 'Time numerator of the last time the data has changed', {}, True, None, False, ''), ('outputs:semanticLabelTokenSDCudaPtr', 'uint64', 0, None, 'cuda uint64_t buffer pointer of size numSemantics containing the semantic label token', {}, True, None, False, ''), ('outputs:semanticLocalTransformSDCudaPtr', 'uint64', 0, None, 'cuda float44 buffer pointer of size numSemantics containing the local semantic transform', {}, True, None, False, ''), ('outputs:semanticMapSDCudaPtr', 'uint64', 0, None, 'cuda uint16_t buffer pointer of size numSemantics containing the semantic parent semantic index', {}, True, None, False, ''), ('outputs:semanticPrimTokenSDCudaPtr', 'uint64', 0, None, 'cuda uint32_t buffer pointer of size numSemantics containing the prim part of the semantic path token', {}, True, None, False, ''), ('outputs:semanticWorldTransformSDCudaPtr', 'uint64', 0, None, 'cuda float44 buffer pointer of size numSemantics containing the world semantic transform', {}, True, None, False, ''), ]) class tokens: InstanceMappingInfoSDhost = "InstanceMappingInfoSDhost" SemanticMapSD = "SemanticMapSD" SemanticMapSDhost = "SemanticMapSDhost" SemanticPrimTokenSD = "SemanticPrimTokenSD" SemanticPrimTokenSDhost = "SemanticPrimTokenSDhost" InstanceMapSD = "InstanceMapSD" InstanceMapSDhost = "InstanceMapSDhost" InstancePrimTokenSD = "InstancePrimTokenSD" InstancePrimTokenSDhost = "InstancePrimTokenSDhost" SemanticLabelTokenSD = "SemanticLabelTokenSD" SemanticLabelTokenSDhost = "SemanticLabelTokenSDhost" SemanticLocalTransformSD = "SemanticLocalTransformSD" SemanticLocalTransformSDhost = "SemanticLocalTransformSDhost" SemanticWorldTransformSD = "SemanticWorldTransformSD" SemanticWorldTransformSDhost = "SemanticWorldTransformSDhost" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) @property def semanticFilterName(self): data_view = og.AttributeValueHelper(self._attributes.semanticFilterName) return data_view.get() @semanticFilterName.setter def semanticFilterName(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticFilterName) data_view = og.AttributeValueHelper(self._attributes.semanticFilterName) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def instanceMapSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMapSDCudaPtr) return data_view.get() @instanceMapSDCudaPtr.setter def instanceMapSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.instanceMapSDCudaPtr) data_view.set(value) @property def instanceMappingInfoSDPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) return data_view.get() @instanceMappingInfoSDPtr.setter def instanceMappingInfoSDPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) data_view.set(value) @property def instancePrimTokenSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.instancePrimTokenSDCudaPtr) return data_view.get() @instancePrimTokenSDCudaPtr.setter def instancePrimTokenSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.instancePrimTokenSDCudaPtr) data_view.set(value) @property def lastUpdateTimeDenominator(self): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeDenominator) return data_view.get() @lastUpdateTimeDenominator.setter def lastUpdateTimeDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeDenominator) data_view.set(value) @property def lastUpdateTimeNumerator(self): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeNumerator) return data_view.get() @lastUpdateTimeNumerator.setter def lastUpdateTimeNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeNumerator) data_view.set(value) @property def semanticLabelTokenSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenSDCudaPtr) return data_view.get() @semanticLabelTokenSDCudaPtr.setter def semanticLabelTokenSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenSDCudaPtr) data_view.set(value) @property def semanticLocalTransformSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformSDCudaPtr) return data_view.get() @semanticLocalTransformSDCudaPtr.setter def semanticLocalTransformSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformSDCudaPtr) data_view.set(value) @property def semanticMapSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticMapSDCudaPtr) return data_view.get() @semanticMapSDCudaPtr.setter def semanticMapSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticMapSDCudaPtr) data_view.set(value) @property def semanticPrimTokenSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticPrimTokenSDCudaPtr) return data_view.get() @semanticPrimTokenSDCudaPtr.setter def semanticPrimTokenSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticPrimTokenSDCudaPtr) data_view.set(value) @property def semanticWorldTransformSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformSDCudaPtr) return data_view.get() @semanticWorldTransformSDCudaPtr.setter def semanticWorldTransformSDCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformSDCudaPtr) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostInstanceMappingDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostInstanceMappingDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostInstanceMappingDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
15,172
Python
47.476038
356
0.679475
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdLinearArrayToTextureDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdLinearArrayToTexture Synthetic Data node to copy the input buffer array into a texture for visualization """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdLinearArrayToTextureDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdLinearArrayToTexture Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.data inputs.exec inputs.height inputs.sdDisplayCudaMipmappedArray inputs.sdDisplayFormat inputs.sdDisplayHeight inputs.sdDisplayStream inputs.sdDisplayWidth inputs.stream inputs.width Outputs: outputs.cudaPtr outputs.exec outputs.format outputs.handlePtr outputs.height outputs.stream outputs.width """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:data', 'float4[]', 0, None, 'Buffer array data', {ogn.MetadataKeys.MEMORY_TYPE: 'cuda'}, True, [], False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:height', 'uint', 0, None, 'Buffer array height', {}, True, 0, False, ''), ('inputs:sdDisplayCudaMipmappedArray', 'uint64', 0, None, 'Visualization texture CUDA mipmapped array pointer', {}, True, 0, False, ''), ('inputs:sdDisplayFormat', 'uint64', 0, None, 'Visualization texture format', {}, True, 0, False, ''), ('inputs:sdDisplayHeight', 'uint', 0, None, 'Visualization texture Height', {}, True, 0, False, ''), ('inputs:sdDisplayStream', 'uint64', 0, None, 'Visualization texture CUDA stream pointer', {}, True, 0, False, ''), ('inputs:sdDisplayWidth', 'uint', 0, None, 'Visualization texture width', {}, True, 0, False, ''), ('inputs:stream', 'uint64', 0, None, 'Pointer to the CUDA Stream', {}, True, 0, False, ''), ('inputs:width', 'uint', 0, None, 'Buffer array width', {}, True, 0, False, ''), ('outputs:cudaPtr', 'uint64', 0, None, 'Display texture CUDA pointer', {}, True, None, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:format', 'uint64', 0, None, 'Display texture format', {}, True, None, False, ''), ('outputs:handlePtr', 'uint64', 0, None, 'Display texture handle reference', {}, True, None, False, ''), ('outputs:height', 'uint', 0, None, 'Display texture height', {}, True, None, False, ''), ('outputs:stream', 'uint64', 0, None, 'Output texture CUDA stream pointer', {}, True, None, False, ''), ('outputs:width', 'uint', 0, None, 'Display texture width', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def data(self): data_view = og.AttributeValueHelper(self._attributes.data) return data_view.get(on_gpu=True) @data.setter def data(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.data) data_view = og.AttributeValueHelper(self._attributes.data) data_view.set(value, on_gpu=True) self.data_size = data_view.get_array_size() @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.height) data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def sdDisplayCudaMipmappedArray(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayCudaMipmappedArray) return data_view.get() @sdDisplayCudaMipmappedArray.setter def sdDisplayCudaMipmappedArray(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayCudaMipmappedArray) data_view = og.AttributeValueHelper(self._attributes.sdDisplayCudaMipmappedArray) data_view.set(value) @property def sdDisplayFormat(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayFormat) return data_view.get() @sdDisplayFormat.setter def sdDisplayFormat(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayFormat) data_view = og.AttributeValueHelper(self._attributes.sdDisplayFormat) data_view.set(value) @property def sdDisplayHeight(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayHeight) return data_view.get() @sdDisplayHeight.setter def sdDisplayHeight(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayHeight) data_view = og.AttributeValueHelper(self._attributes.sdDisplayHeight) data_view.set(value) @property def sdDisplayStream(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayStream) return data_view.get() @sdDisplayStream.setter def sdDisplayStream(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayStream) data_view = og.AttributeValueHelper(self._attributes.sdDisplayStream) data_view.set(value) @property def sdDisplayWidth(self): data_view = og.AttributeValueHelper(self._attributes.sdDisplayWidth) return data_view.get() @sdDisplayWidth.setter def sdDisplayWidth(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sdDisplayWidth) data_view = og.AttributeValueHelper(self._attributes.sdDisplayWidth) data_view.set(value) @property def stream(self): data_view = og.AttributeValueHelper(self._attributes.stream) return data_view.get() @stream.setter def stream(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.stream) data_view = og.AttributeValueHelper(self._attributes.stream) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.width) data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def cudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) return data_view.get() @cudaPtr.setter def cudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def handlePtr(self): data_view = og.AttributeValueHelper(self._attributes.handlePtr) return data_view.get() @handlePtr.setter def handlePtr(self, value): data_view = og.AttributeValueHelper(self._attributes.handlePtr) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def stream(self): data_view = og.AttributeValueHelper(self._attributes.stream) return data_view.get() @stream.setter def stream(self, value): data_view = og.AttributeValueHelper(self._attributes.stream) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdLinearArrayToTextureDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdLinearArrayToTextureDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdLinearArrayToTextureDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
13,547
Python
41.73817
144
0.628552
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostSemanticBoundingBoxDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostSemanticBoundingBox Synthetic Data node to compute the bounding boxes of the scene semantic entities. """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostSemanticBoundingBoxDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostSemanticBoundingBox Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.instanceMapSDCudaPtr inputs.instanceMappingInfoSDPtr inputs.renderProductResolution inputs.renderVar inputs.rp inputs.semanticLocalTransformSDCudaPtr inputs.semanticMapSDCudaPtr Outputs: outputs.exec outputs.sdSemBBoxExtentCudaPtr outputs.sdSemBBoxInfosCudaPtr Predefined Tokens: tokens.BoundingBox2DLooseSD tokens.BoundingBox2DTightSD tokens.SemanticBoundingBox2DExtentLooseSD tokens.SemanticBoundingBox2DInfosLooseSD tokens.SemanticBoundingBox2DExtentTightSD tokens.SemanticBoundingBox2DInfosTightSD tokens.BoundingBox3DSD tokens.SemanticBoundingBox3DExtentSD tokens.SemanticBoundingBox3DInfosSD """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, 'gpuFoundations', 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:instanceMapSDCudaPtr', 'uint64', 0, None, 'cuda uint16_t buffer pointer of size numInstances containing the instance parent semantic index', {}, True, 0, False, ''), ('inputs:instanceMappingInfoSDPtr', 'uint64', 0, None, 'uint buffer pointer containing the following information : [numInstances, minInstanceId, numSemantics, minSemanticId, numProtoSemantic]', {}, True, 0, False, ''), ('inputs:renderProductResolution', 'int2', 0, None, 'RenderProduct resolution', {}, True, [0, 0], False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the BoundingBox RenderVar to process', {}, True, "", False, ''), ('inputs:rp', 'uint64', 0, 'renderProduct', 'Pointer to render product for this view', {}, True, 0, False, ''), ('inputs:semanticLocalTransformSDCudaPtr', 'uint64', 0, None, 'cuda float44 buffer pointer of size numSemantics containing the local semantic transform', {}, True, 0, False, ''), ('inputs:semanticMapSDCudaPtr', 'uint64', 0, None, 'cuda uint16_t buffer pointer of size numSemantics containing the semantic parent semantic index', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:sdSemBBoxExtentCudaPtr', 'uint64', 0, None, 'Cuda buffer containing the extent of the bounding boxes as a float4=(u_min,v_min,u_max,v_max) for 2D or a float6=(xmin,ymin,zmin,xmax,ymax,zmax) in object space for 3D', {}, True, None, False, ''), ('outputs:sdSemBBoxInfosCudaPtr', 'uint64', 0, None, 'Cuda buffer containing valid bounding boxes infos', {}, True, None, False, ''), ]) class tokens: BoundingBox2DLooseSD = "BoundingBox2DLooseSD" BoundingBox2DTightSD = "BoundingBox2DTightSD" SemanticBoundingBox2DExtentLooseSD = "SemanticBoundingBox2DExtentLooseSD" SemanticBoundingBox2DInfosLooseSD = "SemanticBoundingBox2DInfosLooseSD" SemanticBoundingBox2DExtentTightSD = "SemanticBoundingBox2DExtentTightSD" SemanticBoundingBox2DInfosTightSD = "SemanticBoundingBox2DInfosTightSD" BoundingBox3DSD = "BoundingBox3DSD" SemanticBoundingBox3DExtentSD = "SemanticBoundingBox3DExtentSD" SemanticBoundingBox3DInfosSD = "SemanticBoundingBox3DInfosSD" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def instanceMapSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMapSDCudaPtr) return data_view.get() @instanceMapSDCudaPtr.setter def instanceMapSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instanceMapSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.instanceMapSDCudaPtr) data_view.set(value) @property def instanceMappingInfoSDPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) return data_view.get() @instanceMappingInfoSDPtr.setter def instanceMappingInfoSDPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.instanceMappingInfoSDPtr) data_view = og.AttributeValueHelper(self._attributes.instanceMappingInfoSDPtr) data_view.set(value) @property def renderProductResolution(self): data_view = og.AttributeValueHelper(self._attributes.renderProductResolution) return data_view.get() @renderProductResolution.setter def renderProductResolution(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderProductResolution) data_view = og.AttributeValueHelper(self._attributes.renderProductResolution) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) @property def semanticLocalTransformSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformSDCudaPtr) return data_view.get() @semanticLocalTransformSDCudaPtr.setter def semanticLocalTransformSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticLocalTransformSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformSDCudaPtr) data_view.set(value) @property def semanticMapSDCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticMapSDCudaPtr) return data_view.get() @semanticMapSDCudaPtr.setter def semanticMapSDCudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticMapSDCudaPtr) data_view = og.AttributeValueHelper(self._attributes.semanticMapSDCudaPtr) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def sdSemBBoxExtentCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxExtentCudaPtr) return data_view.get() @sdSemBBoxExtentCudaPtr.setter def sdSemBBoxExtentCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxExtentCudaPtr) data_view.set(value) @property def sdSemBBoxInfosCudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxInfosCudaPtr) return data_view.get() @sdSemBBoxInfosCudaPtr.setter def sdSemBBoxInfosCudaPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.sdSemBBoxInfosCudaPtr) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostSemanticBoundingBoxDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostSemanticBoundingBoxDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostSemanticBoundingBoxDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
13,117
Python
46.528985
260
0.670047
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdInstanceMappingPtrDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdInstanceMappingPtr Synthetic Data node to expose the scene instances semantic hierarchy information """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdInstanceMappingPtrDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdInstanceMappingPtr Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.cudaPtr inputs.exec inputs.renderResults inputs.semanticFilerTokens Outputs: outputs.cudaDeviceIndex outputs.exec outputs.instanceMapPtr outputs.instancePrimPathPtr outputs.lastUpdateTimeDenominator outputs.lastUpdateTimeNumerator outputs.minInstanceIndex outputs.minSemanticIndex outputs.numInstances outputs.numSemantics outputs.semanticLabelTokenPtrs outputs.semanticLocalTransformPtr outputs.semanticMapPtr outputs.semanticPrimPathPtr outputs.semanticWorldTransformPtr Predefined Tokens: tokens.InstanceMappingInfoSDhost tokens.InstancePrimTokenSDhost tokens.InstancePrimTokenSD tokens.SemanticPrimTokenSDhost tokens.SemanticPrimTokenSD tokens.InstanceMapSDhost tokens.InstanceMapSD tokens.SemanticMapSDhost tokens.SemanticMapSD tokens.SemanticWorldTransformSDhost tokens.SemanticWorldTransformSD tokens.SemanticLocalTransformSDhost tokens.SemanticLocalTransformSD tokens.SemanticLabelTokenSDhost tokens.SemanticLabelTokenSD """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:cudaPtr', 'bool', 0, None, 'If true, return cuda device pointer instead of host pointer', {ogn.MetadataKeys.DEFAULT: 'false'}, True, False, False, ''), ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results pointer', {}, True, 0, False, ''), ('inputs:semanticFilerTokens', 'token[]', 0, None, 'Tokens identifying the semantic filters applied to the output semantic labels. Each token should correspond to an activated SdSemanticFilter node', {ogn.MetadataKeys.DEFAULT: '[]'}, True, [], False, ''), ('outputs:cudaDeviceIndex', 'int', 0, None, 'If the data is on the device it is the cuda index of this device otherwise it is set to -1', {ogn.MetadataKeys.DEFAULT: '-1'}, True, -1, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:instanceMapPtr', 'uint64', 0, None, 'Array pointer of numInstances uint16_t containing the semantic index of the instance prim first semantic prim parent', {}, True, None, False, ''), ('outputs:instancePrimPathPtr', 'uint64', 0, None, 'Array pointer of numInstances uint64_t containing the prim path tokens for every instance prims', {}, True, None, False, ''), ('outputs:lastUpdateTimeDenominator', 'uint64', 0, None, 'Time denominator of the last time the data has changed', {}, True, None, False, ''), ('outputs:lastUpdateTimeNumerator', 'int64', 0, None, 'Time numerator of the last time the data has changed', {}, True, None, False, ''), ('outputs:minInstanceIndex', 'uint', 0, None, 'Instance index of the first instance prim in the instance arrays', {}, True, None, False, ''), ('outputs:minSemanticIndex', 'uint', 0, None, 'Semantic index of the first semantic prim in the semantic arrays', {}, True, None, False, ''), ('outputs:numInstances', 'uint', 0, None, 'Number of instances prim in the instance arrays', {}, True, None, False, ''), ('outputs:numSemantics', 'uint', 0, None, 'Number of semantic prim in the semantic arrays', {}, True, None, False, ''), ('outputs:semanticLabelTokenPtrs', 'uint64[]', 0, None, 'Array containing for every input semantic filters the corresponding array pointer of numSemantics uint64_t representing the semantic label of the semantic prim', {}, True, None, False, ''), ('outputs:semanticLocalTransformPtr', 'uint64', 0, None, 'Array pointer of numSemantics 4x4 float matrices containing the transform from world to object space for every semantic prims', {}, True, None, False, ''), ('outputs:semanticMapPtr', 'uint64', 0, None, 'Array pointer of numSemantics uint16_t containing the semantic index of the semantic prim first semantic prim parent', {}, True, None, False, ''), ('outputs:semanticPrimPathPtr', 'uint64', 0, None, 'Array pointer of numSemantics uint32_t containing the prim part of the prim path tokens for every semantic prims', {}, True, None, False, ''), ('outputs:semanticWorldTransformPtr', 'uint64', 0, None, 'Array pointer of numSemantics 4x4 float matrices containing the transform from local to world space for every semantic entity', {}, True, None, False, ''), ]) class tokens: InstanceMappingInfoSDhost = "InstanceMappingInfoSDhost" InstancePrimTokenSDhost = "InstancePrimTokenSDhost" InstancePrimTokenSD = "InstancePrimTokenSD" SemanticPrimTokenSDhost = "SemanticPrimTokenSDhost" SemanticPrimTokenSD = "SemanticPrimTokenSD" InstanceMapSDhost = "InstanceMapSDhost" InstanceMapSD = "InstanceMapSD" SemanticMapSDhost = "SemanticMapSDhost" SemanticMapSD = "SemanticMapSD" SemanticWorldTransformSDhost = "SemanticWorldTransformSDhost" SemanticWorldTransformSD = "SemanticWorldTransformSD" SemanticLocalTransformSDhost = "SemanticLocalTransformSDhost" SemanticLocalTransformSD = "SemanticLocalTransformSD" SemanticLabelTokenSDhost = "SemanticLabelTokenSDhost" SemanticLabelTokenSD = "SemanticLabelTokenSD" @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def cudaPtr(self): data_view = og.AttributeValueHelper(self._attributes.cudaPtr) return data_view.get() @cudaPtr.setter def cudaPtr(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.cudaPtr) data_view = og.AttributeValueHelper(self._attributes.cudaPtr) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) @property def semanticFilerTokens(self): data_view = og.AttributeValueHelper(self._attributes.semanticFilerTokens) return data_view.get() @semanticFilerTokens.setter def semanticFilerTokens(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.semanticFilerTokens) data_view = og.AttributeValueHelper(self._attributes.semanticFilerTokens) data_view.set(value) self.semanticFilerTokens_size = data_view.get_array_size() def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.semanticLabelTokenPtrs_size = None self._batchedWriteValues = { } @property def cudaDeviceIndex(self): data_view = og.AttributeValueHelper(self._attributes.cudaDeviceIndex) return data_view.get() @cudaDeviceIndex.setter def cudaDeviceIndex(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaDeviceIndex) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def instanceMapPtr(self): data_view = og.AttributeValueHelper(self._attributes.instanceMapPtr) return data_view.get() @instanceMapPtr.setter def instanceMapPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.instanceMapPtr) data_view.set(value) @property def instancePrimPathPtr(self): data_view = og.AttributeValueHelper(self._attributes.instancePrimPathPtr) return data_view.get() @instancePrimPathPtr.setter def instancePrimPathPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.instancePrimPathPtr) data_view.set(value) @property def lastUpdateTimeDenominator(self): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeDenominator) return data_view.get() @lastUpdateTimeDenominator.setter def lastUpdateTimeDenominator(self, value): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeDenominator) data_view.set(value) @property def lastUpdateTimeNumerator(self): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeNumerator) return data_view.get() @lastUpdateTimeNumerator.setter def lastUpdateTimeNumerator(self, value): data_view = og.AttributeValueHelper(self._attributes.lastUpdateTimeNumerator) data_view.set(value) @property def minInstanceIndex(self): data_view = og.AttributeValueHelper(self._attributes.minInstanceIndex) return data_view.get() @minInstanceIndex.setter def minInstanceIndex(self, value): data_view = og.AttributeValueHelper(self._attributes.minInstanceIndex) data_view.set(value) @property def minSemanticIndex(self): data_view = og.AttributeValueHelper(self._attributes.minSemanticIndex) return data_view.get() @minSemanticIndex.setter def minSemanticIndex(self, value): data_view = og.AttributeValueHelper(self._attributes.minSemanticIndex) data_view.set(value) @property def numInstances(self): data_view = og.AttributeValueHelper(self._attributes.numInstances) return data_view.get() @numInstances.setter def numInstances(self, value): data_view = og.AttributeValueHelper(self._attributes.numInstances) data_view.set(value) @property def numSemantics(self): data_view = og.AttributeValueHelper(self._attributes.numSemantics) return data_view.get() @numSemantics.setter def numSemantics(self, value): data_view = og.AttributeValueHelper(self._attributes.numSemantics) data_view.set(value) @property def semanticLabelTokenPtrs(self): data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenPtrs) return data_view.get(reserved_element_count=self.semanticLabelTokenPtrs_size) @semanticLabelTokenPtrs.setter def semanticLabelTokenPtrs(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticLabelTokenPtrs) data_view.set(value) self.semanticLabelTokenPtrs_size = data_view.get_array_size() @property def semanticLocalTransformPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformPtr) return data_view.get() @semanticLocalTransformPtr.setter def semanticLocalTransformPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticLocalTransformPtr) data_view.set(value) @property def semanticMapPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticMapPtr) return data_view.get() @semanticMapPtr.setter def semanticMapPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticMapPtr) data_view.set(value) @property def semanticPrimPathPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticPrimPathPtr) return data_view.get() @semanticPrimPathPtr.setter def semanticPrimPathPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticPrimPathPtr) data_view.set(value) @property def semanticWorldTransformPtr(self): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformPtr) return data_view.get() @semanticWorldTransformPtr.setter def semanticWorldTransformPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.semanticWorldTransformPtr) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdInstanceMappingPtrDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdInstanceMappingPtrDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdInstanceMappingPtrDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
17,484
Python
46.904109
263
0.669812
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdSemanticFilterDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdSemanticFilter Synthetic Data node to declare a semantic filter. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdSemanticFilterDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdSemanticFilter Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.hierarchicalLabels inputs.matchingLabels inputs.name inputs.predicate Outputs: outputs.exec outputs.name outputs.predicate """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Dependency', {}, True, None, False, ''), ('inputs:hierarchicalLabels', 'bool', 0, None, 'If true the filter consider all labels in the semantic hierarchy above the prims', {ogn.MetadataKeys.DEFAULT: 'false'}, True, False, False, ''), ('inputs:matchingLabels', 'bool', 0, None, 'If true output only the labels matching the filter (if false keep all labels of the matching prims)', {ogn.MetadataKeys.DEFAULT: 'true'}, True, True, False, ''), ('inputs:name', 'token', 0, None, 'Filter unique identifier [if empty, use the normalized predicate as an identifier]', {ogn.MetadataKeys.DEFAULT: '""'}, True, "", False, ''), ('inputs:predicate', 'token', 0, None, 'The semantic filter specification : a disjunctive normal form of semantic type and label', {ogn.MetadataKeys.DEFAULT: '""'}, True, "", False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:name', 'token', 0, None, 'The semantic filter name identifier', {ogn.MetadataKeys.DEFAULT: '""'}, True, "", False, ''), ('outputs:predicate', 'token', 0, None, 'The semantic filter predicate in normalized form', {ogn.MetadataKeys.DEFAULT: '""'}, True, "", False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def hierarchicalLabels(self): data_view = og.AttributeValueHelper(self._attributes.hierarchicalLabels) return data_view.get() @hierarchicalLabels.setter def hierarchicalLabels(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.hierarchicalLabels) data_view = og.AttributeValueHelper(self._attributes.hierarchicalLabels) data_view.set(value) @property def matchingLabels(self): data_view = og.AttributeValueHelper(self._attributes.matchingLabels) return data_view.get() @matchingLabels.setter def matchingLabels(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.matchingLabels) data_view = og.AttributeValueHelper(self._attributes.matchingLabels) data_view.set(value) @property def name(self): data_view = og.AttributeValueHelper(self._attributes.name) return data_view.get() @name.setter def name(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.name) data_view = og.AttributeValueHelper(self._attributes.name) data_view.set(value) @property def predicate(self): data_view = og.AttributeValueHelper(self._attributes.predicate) return data_view.get() @predicate.setter def predicate(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.predicate) data_view = og.AttributeValueHelper(self._attributes.predicate) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def name(self): data_view = og.AttributeValueHelper(self._attributes.name) return data_view.get() @name.setter def name(self, value): data_view = og.AttributeValueHelper(self._attributes.name) data_view.set(value) @property def predicate(self): data_view = og.AttributeValueHelper(self._attributes.predicate) return data_view.get() @predicate.setter def predicate(self, value): data_view = og.AttributeValueHelper(self._attributes.predicate) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdSemanticFilterDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdSemanticFilterDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdSemanticFilterDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
8,988
Python
44.629441
213
0.647864
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdPostRenderVarToHostDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdPostRenderVarToHost Expose a host renderVar from the input device renderVar. """ import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdPostRenderVarToHostDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdPostRenderVarToHost Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.gpu inputs.renderVar inputs.renderVarHostSuffix inputs.rp Outputs: outputs.exec outputs.renderVar """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:gpu', 'uint64', 0, None, 'Pointer to shared context containing gpu foundations', {}, True, 0, False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the device renderVar to expose on the host', {}, True, "", False, ''), ('inputs:renderVarHostSuffix', 'string', 0, None, 'Suffix appended to the renderVar name', {ogn.MetadataKeys.DEFAULT: '"host"'}, True, "host", False, ''), ('inputs:rp', 'uint64', 0, None, 'Pointer to render product for this view', {}, True, 0, False, ''), ('outputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('outputs:renderVar', 'token', 0, None, 'Name of the resulting renderVar on the host', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.inputs.renderVarHostSuffix = og.AttributeRole.TEXT role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def gpu(self): data_view = og.AttributeValueHelper(self._attributes.gpu) return data_view.get() @gpu.setter def gpu(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gpu) data_view = og.AttributeValueHelper(self._attributes.gpu) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) @property def renderVarHostSuffix(self): data_view = og.AttributeValueHelper(self._attributes.renderVarHostSuffix) return data_view.get() @renderVarHostSuffix.setter def renderVarHostSuffix(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVarHostSuffix) data_view = og.AttributeValueHelper(self._attributes.renderVarHostSuffix) data_view.set(value) self.renderVarHostSuffix_size = data_view.get_array_size() @property def rp(self): data_view = og.AttributeValueHelper(self._attributes.rp) return data_view.get() @rp.setter def rp(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.rp) data_view = og.AttributeValueHelper(self._attributes.rp) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdPostRenderVarToHostDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdPostRenderVarToHostDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdPostRenderVarToHostDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
8,304
Python
43.411764
162
0.645592
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/OgnSdRenderVarPtrDatabase.py
"""Support for simplified access to data on nodes of type omni.syntheticdata.SdRenderVarPtr Synthetic Data node exposing the raw pointer data of a rendervar. """ import numpy import omni.graph.core as og import omni.graph.core._omni_graph_core as _og import omni.graph.tools.ogn as ogn class OgnSdRenderVarPtrDatabase(og.Database): """Helper class providing simplified access to data on nodes of type omni.syntheticdata.SdRenderVarPtr Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.exec inputs.renderResults inputs.renderVar Outputs: outputs.bufferSize outputs.cudaDeviceIndex outputs.dataPtr outputs.exec outputs.format outputs.height outputs.strides outputs.width """ # Imprint the generator and target ABI versions in the file for JIT generation GENERATOR_VERSION = (1, 41, 3) TARGET_VERSION = (2, 139, 12) # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, # Is_Required, DefaultValue, Is_Deprecated, DeprecationMsg # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:exec', 'execution', 0, None, 'Trigger', {}, True, None, False, ''), ('inputs:renderResults', 'uint64', 0, None, 'Render results pointer', {}, True, 0, False, ''), ('inputs:renderVar', 'token', 0, None, 'Name of the renderVar', {}, True, "", False, ''), ('outputs:bufferSize', 'uint64', 0, None, 'Size (in bytes) of the buffer (0 if the input is a texture)', {}, True, None, False, ''), ('outputs:cudaDeviceIndex', 'int', 0, None, 'Index of the device where the data lives (-1 for host data)', {ogn.MetadataKeys.DEFAULT: '-1'}, True, -1, False, ''), ('outputs:dataPtr', 'uint64', 0, None, 'Pointer to the raw data (cuda device pointer or host pointer)', {ogn.MetadataKeys.DEFAULT: '0'}, True, 0, False, ''), ('outputs:exec', 'execution', 0, 'Received', 'Executes when the event is received', {}, True, None, False, ''), ('outputs:format', 'uint64', 0, None, 'Format', {}, True, None, False, ''), ('outputs:height', 'uint', 0, None, 'Height (0 if the input is a buffer)', {}, True, None, False, ''), ('outputs:strides', 'int2', 0, None, 'Strides (in bytes) ([0,0] if the input is a buffer)', {}, True, None, False, ''), ('outputs:width', 'uint', 0, None, 'Width (0 if the input is a buffer)', {}, True, None, False, ''), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.exec = og.AttributeRole.EXECUTION role_data.outputs.exec = og.AttributeRole.EXECUTION return role_data class ValuesForInputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedReadAttributes = [] self._batchedReadValues = [] @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.exec) data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def renderResults(self): data_view = og.AttributeValueHelper(self._attributes.renderResults) return data_view.get() @renderResults.setter def renderResults(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderResults) data_view = og.AttributeValueHelper(self._attributes.renderResults) data_view.set(value) @property def renderVar(self): data_view = og.AttributeValueHelper(self._attributes.renderVar) return data_view.get() @renderVar.setter def renderVar(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.renderVar) data_view = og.AttributeValueHelper(self._attributes.renderVar) data_view.set(value) def _prefetch(self): readAttributes = self._batchedReadAttributes newValues = _og._prefetch_input_attributes_data(readAttributes) if len(readAttributes) == len(newValues): self._batchedReadValues = newValues class ValuesForOutputs(og.DynamicAttributeAccess): LOCAL_PROPERTY_NAMES = { } """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self._batchedWriteValues = { } @property def bufferSize(self): data_view = og.AttributeValueHelper(self._attributes.bufferSize) return data_view.get() @bufferSize.setter def bufferSize(self, value): data_view = og.AttributeValueHelper(self._attributes.bufferSize) data_view.set(value) @property def cudaDeviceIndex(self): data_view = og.AttributeValueHelper(self._attributes.cudaDeviceIndex) return data_view.get() @cudaDeviceIndex.setter def cudaDeviceIndex(self, value): data_view = og.AttributeValueHelper(self._attributes.cudaDeviceIndex) data_view.set(value) @property def dataPtr(self): data_view = og.AttributeValueHelper(self._attributes.dataPtr) return data_view.get() @dataPtr.setter def dataPtr(self, value): data_view = og.AttributeValueHelper(self._attributes.dataPtr) data_view.set(value) @property def exec(self): data_view = og.AttributeValueHelper(self._attributes.exec) return data_view.get() @exec.setter def exec(self, value): data_view = og.AttributeValueHelper(self._attributes.exec) data_view.set(value) @property def format(self): data_view = og.AttributeValueHelper(self._attributes.format) return data_view.get() @format.setter def format(self, value): data_view = og.AttributeValueHelper(self._attributes.format) data_view.set(value) @property def height(self): data_view = og.AttributeValueHelper(self._attributes.height) return data_view.get() @height.setter def height(self, value): data_view = og.AttributeValueHelper(self._attributes.height) data_view.set(value) @property def strides(self): data_view = og.AttributeValueHelper(self._attributes.strides) return data_view.get() @strides.setter def strides(self, value): data_view = og.AttributeValueHelper(self._attributes.strides) data_view.set(value) @property def width(self): data_view = og.AttributeValueHelper(self._attributes.width) return data_view.get() @width.setter def width(self, value): data_view = og.AttributeValueHelper(self._attributes.width) data_view.set(value) def _commit(self): _og._commit_output_attributes_data(self._batchedWriteValues) self._batchedWriteValues = { } class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnSdRenderVarPtrDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnSdRenderVarPtrDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnSdRenderVarPtrDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes)
9,891
Python
42.008695
170
0.632899
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/python/nodes/OgnSdTestPrintRawArray.py
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. # import omni.graph.core as og import numpy as np import random class OgnSdTestPrintRawArray: @staticmethod def get_formatted_data(db, data, element_count): formatted_data = data.copy() is2DArray = db.inputs.bufferSize == 0 if not is2DArray: data = data.reshape(data.shape[0] // element_count, element_count) if element_count > 1 else data else: data = ( data.reshape(db.inputs.height, db.inputs.width, element_count) if element_count > 1 else data.reshape(db.inputs.height, db.inputs.width) ) return formatted_data @staticmethod def compute(db) -> bool: if db.state.initialSWHFrameNumber < 0: db.state.initialSWHFrameNumber = db.inputs.swhFrameNumber frameNumber = db.inputs.swhFrameNumber - db.state.initialSWHFrameNumber rd_seed = db.inputs.randomSeed + ((frameNumber * 17) % 491) random.seed(rd_seed) db.outputs.swhFrameNumber = db.inputs.swhFrameNumber db.outputs.exec = og.ExecutionAttributeState.ENABLED elemenType = np.uint8 if db.inputs.elementType == db.tokens.uint16: elemenType = np.uint16 elif db.inputs.elementType == db.tokens.int16: elemenType = np.int16 elif db.inputs.elementType == db.tokens.uint32: elemenType = np.uint32 elif db.inputs.elementType == db.tokens.int32: elemenType = np.int32 elif db.inputs.elementType == db.tokens.float32: elemenType = np.float32 elif db.inputs.elementType == db.tokens.token: elemenType = np.uint64 elementCount = db.inputs.elementCount data = db.inputs.data data = data.view(elemenType) if db.inputs.mode == db.tokens.printFormatted: data = OgnSdTestPrintRawArray.get_formatted_data(db,data, elementCount) print("OgnSdPrintRawArray : ", db.inputs.swhFrameNumber) print(data) elif db.inputs.mode == db.tokens.writeToDisk: data = OgnSdTestPrintRawArray.get_formatted_data(db,data, elementCount) np.save(f"{db.inputs.dataFileBaseName}_{db.inputs.swhFrameNumber}", data) elif (frameNumber in db.inputs.referenceSWHFrameNumbers) and (data.shape[0]>=db.inputs.referenceNumUniqueRandomValues): if (db.inputs.mode == db.tokens.printReferences): ref_values = data.astype(np.float32) random.shuffle(ref_values) ref_values = ref_values[:db.inputs.referenceNumUniqueRandomValues] print(ref_values) else: ref_values = data.astype(np.float32) random.shuffle(ref_values) ref_values = ref_values[:db.inputs.referenceNumUniqueRandomValues] frame_offset = np.where(db.inputs.referenceSWHFrameNumbers == frameNumber)[0][0] reference_offset = frame_offset * db.inputs.referenceNumUniqueRandomValues err = np.square(ref_values - db.inputs.referenceValues[reference_offset:reference_offset+db.inputs.referenceNumUniqueRandomValues]).max() if err >= db.inputs.referenceTolerance: print(f"OgnSdTestPrintRawArray [Error]") return True
3,821
Python
42.431818
153
0.643025
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/python/nodes/OgnSdTestStageManipulationScenarii.py
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. # import omni.usd import omni.graph.core as og from pxr import Gf, Semantics, UsdGeom import numpy as np class OgnSdTestStageManipulationScenarii: _prim_names = ["Sphere", "Capsule", "Plane", "Torus", "Cube", "Cone"] _sem_types = ["type", "class", "genre"] _sem_labels = ["sphere", "capsule", "plane", "torus", "cube", "ball", "cone"] @staticmethod def add_semantics(prim, semantic_label, semantic_type="class"): sem = Semantics.SemanticsAPI.Apply(prim, "Semantics") sem.CreateSemanticTypeAttr() sem.CreateSemanticDataAttr() sem.GetSemanticTypeAttr().Set(semantic_type) sem.GetSemanticDataAttr().Set(semantic_label) @staticmethod def get_random_transform(rng): tf = np.eye(4) tf[:3, :3] = Gf.Matrix3d(Gf.Rotation(rng.rand(3).tolist(), rng.rand(3).tolist())) tf[3, :3] = rng.rand(3).tolist() return Gf.Matrix4d(tf) @staticmethod def compute(db) -> bool: usd_context = omni.usd.get_context() stage = usd_context.get_stage() if not stage: return False rng = np.random.default_rng(db.inputs.randomSeed + ((db.state.frameNumber * 23) % 1579)) world_prim = stage.GetPrimAtPath(db.inputs.worldPrimPath) if not world_prim: world_prim = stage.DefinePrim(db.inputs.worldPrimPath) if world_prim: world_xform_prim = UsdGeom.Xformable(world_prim) if world_prim else None if world_xform_prim: world_xform_prim.AddTransformOp().Set(OgnSdTestStageManipulationScenarii.get_random_transform(rng)) if not world_prim: return False db.state.frameNumber += 1 num_manipulations = rng.randint(0, 3) for manip_index in range(num_manipulations): prims = world_prim.GetChildren() prims.append(world_prim) prim = rng.choice(prims) if not prim : continue manipulation = rng.randint(0, 38) if (manipulation < 11): """create a new children prim""" prim_name = rng.choice(OgnSdTestStageManipulationScenarii._prim_names) prim_path = prim.GetPath().pathString + "/" + prim_name + "_" + str(db.state.frameNumber) + "_" + str(manip_index) new_prim = stage.DefinePrim(prim_path, prim_name) new_prim_color_attr = new_prim.GetAttribute("primvars:displayColor") if new_prim else None if new_prim_color_attr: new_prim_color_attr.Set([rng.rand(3).tolist()]) xform_prim = UsdGeom.Xformable(new_prim) if new_prim else None if xform_prim: xform_prim.AddScaleOp().Set((175.0*rng.random(), 175.0*rng.random(), 175.0*rng.random())) xform_prim.AddTransformOp().Set(OgnSdTestStageManipulationScenarii.get_random_transform(rng)) elif (manipulation >= 11) and (manipulation <12): """remove the prim""" stage.RemovePrim(prim.GetPath()) elif (manipulation >=12) and (manipulation <23): """move the prim""" xform_prim = UsdGeom.Xformable(prim) if xform_prim: xform_prim.ClearXformOpOrder() xform_prim.AddTransformOp().Set(OgnSdTestStageManipulationScenarii.get_random_transform(rng)) elif (manipulation >=23) and (manipulation < 31): """add semantic to the prim""" OgnSdTestStageManipulationScenarii.add_semantics(prim, rng.choice(OgnSdTestStageManipulationScenarii._sem_labels), rng.choice(OgnSdTestStageManipulationScenarii._sem_types)) elif (manipulation >=31) and (manipulation < 39): """change color of the prim""" prim_color_attr = prim.GetAttribute("primvars:displayColor") if prim_color_attr: prim_color_attr.Set([rng.rand(3).tolist()]) return True
4,542
Python
42.266666
189
0.606781
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdRenderProductCamera.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdRenderProductCameraDatabase import OgnSdRenderProductCameraDatabase test_file_name = "OgnSdRenderProductCameraTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdRenderProductCamera") database = OgnSdRenderProductCameraDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 2) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderProductPath")) attribute = test_node.get_attribute("inputs:renderProductPath") db_value = database.inputs.renderProductPath expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:cameraApertureOffset")) attribute = test_node.get_attribute("outputs:cameraApertureOffset") db_value = database.outputs.cameraApertureOffset self.assertTrue(test_node.get_attribute_exists("outputs:cameraApertureSize")) attribute = test_node.get_attribute("outputs:cameraApertureSize") db_value = database.outputs.cameraApertureSize self.assertTrue(test_node.get_attribute_exists("outputs:cameraFStop")) attribute = test_node.get_attribute("outputs:cameraFStop") db_value = database.outputs.cameraFStop self.assertTrue(test_node.get_attribute_exists("outputs:cameraFisheyeParams")) attribute = test_node.get_attribute("outputs:cameraFisheyeParams") db_value = database.outputs.cameraFisheyeParams self.assertTrue(test_node.get_attribute_exists("outputs:cameraFocalLength")) attribute = test_node.get_attribute("outputs:cameraFocalLength") db_value = database.outputs.cameraFocalLength self.assertTrue(test_node.get_attribute_exists("outputs:cameraFocusDistance")) attribute = test_node.get_attribute("outputs:cameraFocusDistance") db_value = database.outputs.cameraFocusDistance self.assertTrue(test_node.get_attribute_exists("outputs:cameraModel")) attribute = test_node.get_attribute("outputs:cameraModel") db_value = database.outputs.cameraModel self.assertTrue(test_node.get_attribute_exists("outputs:cameraNearFar")) attribute = test_node.get_attribute("outputs:cameraNearFar") db_value = database.outputs.cameraNearFar self.assertTrue(test_node.get_attribute_exists("outputs:cameraProjection")) attribute = test_node.get_attribute("outputs:cameraProjection") db_value = database.outputs.cameraProjection self.assertTrue(test_node.get_attribute_exists("outputs:cameraViewTransform")) attribute = test_node.get_attribute("outputs:cameraViewTransform") db_value = database.outputs.cameraViewTransform self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:metersPerSceneUnit")) attribute = test_node.get_attribute("outputs:metersPerSceneUnit") db_value = database.outputs.metersPerSceneUnit self.assertTrue(test_node.get_attribute_exists("outputs:renderProductResolution")) attribute = test_node.get_attribute("outputs:renderProductResolution") db_value = database.outputs.renderProductResolution
5,779
Python
51.072072
108
0.71431
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostRenderVarTextureToBuffer.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostRenderVarTextureToBufferDatabase import OgnSdPostRenderVarTextureToBufferDatabase test_file_name = "OgnSdPostRenderVarTextureToBufferTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostRenderVarTextureToBuffer") database = OgnSdPostRenderVarTextureToBufferDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVar")) attribute = test_node.get_attribute("inputs:renderVar") db_value = database.inputs.renderVar expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVarBufferSuffix")) attribute = test_node.get_attribute("inputs:renderVarBufferSuffix") db_value = database.inputs.renderVarBufferSuffix expected_value = "buffer" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:renderVar")) attribute = test_node.get_attribute("outputs:renderVar") db_value = database.outputs.renderVar
3,879
Python
50.733333
126
0.70018
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostSemantic3dBoundingBoxCameraProjection.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase import OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase test_file_name = "OgnSdPostSemantic3dBoundingBoxCameraProjectionTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostSemantic3dBoundingBoxCameraProjection") database = OgnSdPostSemantic3dBoundingBoxCameraProjectionDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:cameraFisheyeParams")) attribute = test_node.get_attribute("inputs:cameraFisheyeParams") db_value = database.inputs.cameraFisheyeParams expected_value = [] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:cameraModel")) attribute = test_node.get_attribute("inputs:cameraModel") db_value = database.inputs.cameraModel expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:cameraNearFar")) attribute = test_node.get_attribute("inputs:cameraNearFar") db_value = database.inputs.cameraNearFar expected_value = [1.0, 10000000.0] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instanceMappingInfoSDPtr")) attribute = test_node.get_attribute("inputs:instanceMappingInfoSDPtr") db_value = database.inputs.instanceMappingInfoSDPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:metersPerSceneUnit")) attribute = test_node.get_attribute("inputs:metersPerSceneUnit") db_value = database.inputs.metersPerSceneUnit expected_value = 0.01 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderProductResolution")) attribute = test_node.get_attribute("inputs:renderProductResolution") db_value = database.inputs.renderProductResolution expected_value = [65536, 65536] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBoxExtentCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBoxExtentCudaPtr") db_value = database.inputs.sdSemBBoxExtentCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBoxInfosCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBoxInfosCudaPtr") db_value = database.inputs.sdSemBBoxInfosCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticWorldTransformSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticWorldTransformSDCudaPtr") db_value = database.inputs.semanticWorldTransformSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:sdSemBBox3dCamCornersCudaPtr")) attribute = test_node.get_attribute("outputs:sdSemBBox3dCamCornersCudaPtr") db_value = database.outputs.sdSemBBox3dCamCornersCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:sdSemBBox3dCamExtentCudaPtr")) attribute = test_node.get_attribute("outputs:sdSemBBox3dCamExtentCudaPtr") db_value = database.outputs.sdSemBBox3dCamExtentCudaPtr
7,588
Python
55.214814
152
0.71165
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdSimRenderProductCamera.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdSimRenderProductCameraDatabase import OgnSdSimRenderProductCameraDatabase test_file_name = "OgnSdSimRenderProductCameraTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdSimRenderProductCamera") database = OgnSdSimRenderProductCameraDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:renderProductPath")) attribute = test_node.get_attribute("inputs:renderProductPath") db_value = database.inputs.renderProductPath expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec
2,201
Python
50.209301
114
0.71104
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdOnNewFrame.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdOnNewFrameDatabase import OgnSdOnNewFrameDatabase test_file_name = "OgnSdOnNewFrameTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdOnNewFrame") database = OgnSdOnNewFrameDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("outputs:cudaStream")) attribute = test_node.get_attribute("outputs:cudaStream") db_value = database.outputs.cudaStream self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:referenceTimeDenominator")) attribute = test_node.get_attribute("outputs:referenceTimeDenominator") db_value = database.outputs.referenceTimeDenominator self.assertTrue(test_node.get_attribute_exists("outputs:referenceTimeNumerator")) attribute = test_node.get_attribute("outputs:referenceTimeNumerator") db_value = database.outputs.referenceTimeNumerator self.assertTrue(test_node.get_attribute_exists("outputs:renderProductDataPtrs")) attribute = test_node.get_attribute("outputs:renderProductDataPtrs") db_value = database.outputs.renderProductDataPtrs self.assertTrue(test_node.get_attribute_exists("outputs:renderProductPaths")) attribute = test_node.get_attribute("outputs:renderProductPaths") db_value = database.outputs.renderProductPaths
2,775
Python
49.472726
93
0.718198
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostInstanceMapping.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostInstanceMappingDatabase import OgnSdPostInstanceMappingDatabase test_file_name = "OgnSdPostInstanceMappingTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostInstanceMapping") database = OgnSdPostInstanceMappingDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 2) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticFilterName")) attribute = test_node.get_attribute("inputs:semanticFilterName") db_value = database.inputs.semanticFilterName expected_value = "default" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:instanceMapSDCudaPtr")) attribute = test_node.get_attribute("outputs:instanceMapSDCudaPtr") db_value = database.outputs.instanceMapSDCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:instanceMappingInfoSDPtr")) attribute = test_node.get_attribute("outputs:instanceMappingInfoSDPtr") db_value = database.outputs.instanceMappingInfoSDPtr self.assertTrue(test_node.get_attribute_exists("outputs:instancePrimTokenSDCudaPtr")) attribute = test_node.get_attribute("outputs:instancePrimTokenSDCudaPtr") db_value = database.outputs.instancePrimTokenSDCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:lastUpdateTimeDenominator")) attribute = test_node.get_attribute("outputs:lastUpdateTimeDenominator") db_value = database.outputs.lastUpdateTimeDenominator self.assertTrue(test_node.get_attribute_exists("outputs:lastUpdateTimeNumerator")) attribute = test_node.get_attribute("outputs:lastUpdateTimeNumerator") db_value = database.outputs.lastUpdateTimeNumerator self.assertTrue(test_node.get_attribute_exists("outputs:semanticLabelTokenSDCudaPtr")) attribute = test_node.get_attribute("outputs:semanticLabelTokenSDCudaPtr") db_value = database.outputs.semanticLabelTokenSDCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticLocalTransformSDCudaPtr")) attribute = test_node.get_attribute("outputs:semanticLocalTransformSDCudaPtr") db_value = database.outputs.semanticLocalTransformSDCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticMapSDCudaPtr")) attribute = test_node.get_attribute("outputs:semanticMapSDCudaPtr") db_value = database.outputs.semanticMapSDCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticPrimTokenSDCudaPtr")) attribute = test_node.get_attribute("outputs:semanticPrimTokenSDCudaPtr") db_value = database.outputs.semanticPrimTokenSDCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticWorldTransformSDCudaPtr")) attribute = test_node.get_attribute("outputs:semanticWorldTransformSDCudaPtr") db_value = database.outputs.semanticWorldTransformSDCudaPtr
5,579
Python
53.174757
108
0.724503
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdSimInstanceMapping.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdSimInstanceMappingDatabase import OgnSdSimInstanceMappingDatabase test_file_name = "OgnSdSimInstanceMappingTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdSimInstanceMapping") database = OgnSdSimInstanceMappingDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:needTransform")) attribute = test_node.get_attribute("inputs:needTransform") db_value = database.inputs.needTransform expected_value = True actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticFilterPredicate")) attribute = test_node.get_attribute("inputs:semanticFilterPredicate") db_value = database.inputs.semanticFilterPredicate expected_value = "*:*" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:semanticFilterPredicate")) attribute = test_node.get_attribute("outputs:semanticFilterPredicate") db_value = database.outputs.semanticFilterPredicate
2,884
Python
51.454545
106
0.711165
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdNoOp.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdNoOpDatabase import OgnSdNoOpDatabase test_file_name = "OgnSdNoOpTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdNoOp") database = OgnSdNoOpDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec
1,821
Python
45.717948
92
0.695772
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdInstanceMapping.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdInstanceMappingDatabase import OgnSdInstanceMappingDatabase test_file_name = "OgnSdInstanceMappingTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdInstanceMapping") database = OgnSdInstanceMappingDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 2) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:lazy")) attribute = test_node.get_attribute("inputs:lazy") db_value = database.inputs.lazy expected_value = True actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:sdIMInstanceSemanticMap")) attribute = test_node.get_attribute("outputs:sdIMInstanceSemanticMap") db_value = database.outputs.sdIMInstanceSemanticMap self.assertTrue(test_node.get_attribute_exists("outputs:sdIMInstanceTokens")) attribute = test_node.get_attribute("outputs:sdIMInstanceTokens") db_value = database.outputs.sdIMInstanceTokens self.assertTrue(test_node.get_attribute_exists("outputs:sdIMLastUpdateTimeDenominator")) attribute = test_node.get_attribute("outputs:sdIMLastUpdateTimeDenominator") db_value = database.outputs.sdIMLastUpdateTimeDenominator self.assertTrue(test_node.get_attribute_exists("outputs:sdIMLastUpdateTimeNumerator")) attribute = test_node.get_attribute("outputs:sdIMLastUpdateTimeNumerator") db_value = database.outputs.sdIMLastUpdateTimeNumerator self.assertTrue(test_node.get_attribute_exists("outputs:sdIMMaxSemanticHierarchyDepth")) attribute = test_node.get_attribute("outputs:sdIMMaxSemanticHierarchyDepth") db_value = database.outputs.sdIMMaxSemanticHierarchyDepth self.assertTrue(test_node.get_attribute_exists("outputs:sdIMMinInstanceIndex")) attribute = test_node.get_attribute("outputs:sdIMMinInstanceIndex") db_value = database.outputs.sdIMMinInstanceIndex self.assertTrue(test_node.get_attribute_exists("outputs:sdIMMinSemanticIndex")) attribute = test_node.get_attribute("outputs:sdIMMinSemanticIndex") db_value = database.outputs.sdIMMinSemanticIndex self.assertTrue(test_node.get_attribute_exists("outputs:sdIMNumInstances")) attribute = test_node.get_attribute("outputs:sdIMNumInstances") db_value = database.outputs.sdIMNumInstances self.assertTrue(test_node.get_attribute_exists("outputs:sdIMNumSemanticTokens")) attribute = test_node.get_attribute("outputs:sdIMNumSemanticTokens") db_value = database.outputs.sdIMNumSemanticTokens self.assertTrue(test_node.get_attribute_exists("outputs:sdIMNumSemantics")) attribute = test_node.get_attribute("outputs:sdIMNumSemantics") db_value = database.outputs.sdIMNumSemantics self.assertTrue(test_node.get_attribute_exists("outputs:sdIMSemanticLocalTransform")) attribute = test_node.get_attribute("outputs:sdIMSemanticLocalTransform") db_value = database.outputs.sdIMSemanticLocalTransform self.assertTrue(test_node.get_attribute_exists("outputs:sdIMSemanticTokenMap")) attribute = test_node.get_attribute("outputs:sdIMSemanticTokenMap") db_value = database.outputs.sdIMSemanticTokenMap self.assertTrue(test_node.get_attribute_exists("outputs:sdIMSemanticWorldTransform")) attribute = test_node.get_attribute("outputs:sdIMSemanticWorldTransform") db_value = database.outputs.sdIMSemanticWorldTransform
5,727
Python
52.53271
100
0.726035
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdUpdateSwhFrameNumber.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdUpdateSwhFrameNumberDatabase import OgnSdUpdateSwhFrameNumberDatabase test_file_name = "OgnSdUpdateSwhFrameNumberTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdUpdateSwFrameNumber") database = OgnSdUpdateSwhFrameNumberDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:swhFrameNumber")) attribute = test_node.get_attribute("outputs:swhFrameNumber") db_value = database.outputs.swhFrameNumber
1,933
Python
48.589742
110
0.713399
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/__init__.py
"""====== GENERATED BY omni.graph.tools - DO NOT EDIT ======""" import omni.graph.tools._internal as ogi ogi.import_tests_in_directory(__file__, __name__)
155
Python
37.999991
63
0.645161
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdFrameIdentifier.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdFrameIdentifierDatabase import OgnSdFrameIdentifierDatabase test_file_name = "OgnSdFrameIdentifierTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdFrameIdentifier") database = OgnSdFrameIdentifierDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:durationDenominator")) attribute = test_node.get_attribute("outputs:durationDenominator") db_value = database.outputs.durationDenominator self.assertTrue(test_node.get_attribute_exists("outputs:durationNumerator")) attribute = test_node.get_attribute("outputs:durationNumerator") db_value = database.outputs.durationNumerator self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:externalTimeOfSimNs")) attribute = test_node.get_attribute("outputs:externalTimeOfSimNs") db_value = database.outputs.externalTimeOfSimNs self.assertTrue(test_node.get_attribute_exists("outputs:frameNumber")) attribute = test_node.get_attribute("outputs:frameNumber") db_value = database.outputs.frameNumber self.assertTrue(test_node.get_attribute_exists("outputs:rationalTimeOfSimDenominator")) attribute = test_node.get_attribute("outputs:rationalTimeOfSimDenominator") db_value = database.outputs.rationalTimeOfSimDenominator self.assertTrue(test_node.get_attribute_exists("outputs:rationalTimeOfSimNumerator")) attribute = test_node.get_attribute("outputs:rationalTimeOfSimNumerator") db_value = database.outputs.rationalTimeOfSimNumerator self.assertTrue(test_node.get_attribute_exists("outputs:sampleTimeOffsetInSimFrames")) attribute = test_node.get_attribute("outputs:sampleTimeOffsetInSimFrames") db_value = database.outputs.sampleTimeOffsetInSimFrames self.assertTrue(test_node.get_attribute_exists("outputs:type")) attribute = test_node.get_attribute("outputs:type") db_value = database.outputs.type
4,073
Python
50.56962
100
0.718389
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostCompRenderVarTextures.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostCompRenderVarTexturesDatabase import OgnSdPostCompRenderVarTexturesDatabase test_file_name = "OgnSdPostCompRenderVarTexturesTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostCompRenderVarTextures") database = OgnSdPostCompRenderVarTexturesDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:cudaPtr")) attribute = test_node.get_attribute("inputs:cudaPtr") db_value = database.inputs.cudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:format")) attribute = test_node.get_attribute("inputs:format") db_value = database.inputs.format expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:height")) attribute = test_node.get_attribute("inputs:height") db_value = database.inputs.height expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:mode")) attribute = test_node.get_attribute("inputs:mode") db_value = database.inputs.mode expected_value = "line" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:parameters")) attribute = test_node.get_attribute("inputs:parameters") db_value = database.inputs.parameters expected_value = [0, 0, 0] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVar")) attribute = test_node.get_attribute("inputs:renderVar") db_value = database.inputs.renderVar expected_value = "LdrColor" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:width")) attribute = test_node.get_attribute("inputs:width") db_value = database.inputs.width expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False))
5,440
Python
51.825242
120
0.690625
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdRenderVarDisplayTexture.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdRenderVarDisplayTextureDatabase import OgnSdRenderVarDisplayTextureDatabase test_file_name = "OgnSdRenderVarDisplayTextureTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdRenderVarDisplayTexture") database = OgnSdRenderVarDisplayTextureDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 2) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVarDisplay")) attribute = test_node.get_attribute("inputs:renderVarDisplay") db_value = database.inputs.renderVarDisplay expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:cudaPtr")) attribute = test_node.get_attribute("outputs:cudaPtr") db_value = database.outputs.cudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:format")) attribute = test_node.get_attribute("outputs:format") db_value = database.outputs.format self.assertTrue(test_node.get_attribute_exists("outputs:height")) attribute = test_node.get_attribute("outputs:height") db_value = database.outputs.height self.assertTrue(test_node.get_attribute_exists("outputs:referenceTimeDenominator")) attribute = test_node.get_attribute("outputs:referenceTimeDenominator") db_value = database.outputs.referenceTimeDenominator self.assertTrue(test_node.get_attribute_exists("outputs:referenceTimeNumerator")) attribute = test_node.get_attribute("outputs:referenceTimeNumerator") db_value = database.outputs.referenceTimeNumerator self.assertTrue(test_node.get_attribute_exists("outputs:rpResourcePtr")) attribute = test_node.get_attribute("outputs:rpResourcePtr") db_value = database.outputs.rpResourcePtr self.assertTrue(test_node.get_attribute_exists("outputs:width")) attribute = test_node.get_attribute("outputs:width") db_value = database.outputs.width
4,205
Python
49.674698
116
0.707729
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdTestInstanceMapping.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdTestInstanceMappingDatabase import OgnSdTestInstanceMappingDatabase test_file_name = "OgnSdTestInstanceMappingTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdTestInstanceMapping") database = OgnSdTestInstanceMappingDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:instanceMapPtr")) attribute = test_node.get_attribute("inputs:instanceMapPtr") db_value = database.inputs.instanceMapPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instancePrimPathPtr")) attribute = test_node.get_attribute("inputs:instancePrimPathPtr") db_value = database.inputs.instancePrimPathPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:minInstanceIndex")) attribute = test_node.get_attribute("inputs:minInstanceIndex") db_value = database.inputs.minInstanceIndex expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:minSemanticIndex")) attribute = test_node.get_attribute("inputs:minSemanticIndex") db_value = database.inputs.minSemanticIndex expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:numInstances")) attribute = test_node.get_attribute("inputs:numInstances") db_value = database.inputs.numInstances expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:numSemantics")) attribute = test_node.get_attribute("inputs:numSemantics") db_value = database.inputs.numSemantics expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticLabelTokenPtrs")) attribute = test_node.get_attribute("inputs:semanticLabelTokenPtrs") db_value = database.inputs.semanticLabelTokenPtrs expected_value = [] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticLocalTransformPtr")) attribute = test_node.get_attribute("inputs:semanticLocalTransformPtr") db_value = database.inputs.semanticLocalTransformPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticMapPtr")) attribute = test_node.get_attribute("inputs:semanticMapPtr") db_value = database.inputs.semanticMapPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticPrimPathPtr")) attribute = test_node.get_attribute("inputs:semanticPrimPathPtr") db_value = database.inputs.semanticPrimPathPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticWorldTransformPtr")) attribute = test_node.get_attribute("inputs:semanticWorldTransformPtr") db_value = database.inputs.semanticWorldTransformPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:stage")) attribute = test_node.get_attribute("inputs:stage") db_value = database.inputs.stage expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:swhFrameNumber")) attribute = test_node.get_attribute("inputs:swhFrameNumber") db_value = database.inputs.swhFrameNumber expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:testCaseIndex")) attribute = test_node.get_attribute("inputs:testCaseIndex") db_value = database.inputs.testCaseIndex expected_value = -1 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:semanticFilterPredicate")) attribute = test_node.get_attribute("outputs:semanticFilterPredicate") db_value = database.outputs.semanticFilterPredicate self.assertTrue(test_node.get_attribute_exists("outputs:success")) attribute = test_node.get_attribute("outputs:success") db_value = database.outputs.success
8,717
Python
53.830188
108
0.702765
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostSemantic3dBoundingBoxFilter.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostSemantic3dBoundingBoxFilterDatabase import OgnSdPostSemantic3dBoundingBoxFilterDatabase test_file_name = "OgnSdPostSemantic3dBoundingBoxFilterTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostSemantic3dBoundingBoxFilter") database = OgnSdPostSemantic3dBoundingBoxFilterDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instanceMappingInfoSDPtr")) attribute = test_node.get_attribute("inputs:instanceMappingInfoSDPtr") db_value = database.inputs.instanceMappingInfoSDPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:metersPerSceneUnit")) attribute = test_node.get_attribute("inputs:metersPerSceneUnit") db_value = database.inputs.metersPerSceneUnit expected_value = 0.01 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBox3dCamCornersCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBox3dCamCornersCudaPtr") db_value = database.inputs.sdSemBBox3dCamCornersCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBoxInfosCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBoxInfosCudaPtr") db_value = database.inputs.sdSemBBoxInfosCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:viewportNearFar")) attribute = test_node.get_attribute("inputs:viewportNearFar") db_value = database.inputs.viewportNearFar expected_value = [0.0, -1.0] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:sdSemBBoxInfosCudaPtr")) attribute = test_node.get_attribute("outputs:sdSemBBoxInfosCudaPtr") db_value = database.outputs.sdSemBBoxInfosCudaPtr
5,390
Python
53.454545
132
0.707236
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdInstanceMappingPtr.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdInstanceMappingPtrDatabase import OgnSdInstanceMappingPtrDatabase test_file_name = "OgnSdInstanceMappingPtrTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdInstanceMappingPtr") database = OgnSdInstanceMappingPtrDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 2) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:cudaPtr")) attribute = test_node.get_attribute("inputs:cudaPtr") db_value = database.inputs.cudaPtr expected_value = False actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticFilerTokens")) attribute = test_node.get_attribute("inputs:semanticFilerTokens") db_value = database.inputs.semanticFilerTokens expected_value = [] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:cudaDeviceIndex")) attribute = test_node.get_attribute("outputs:cudaDeviceIndex") db_value = database.outputs.cudaDeviceIndex self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:instanceMapPtr")) attribute = test_node.get_attribute("outputs:instanceMapPtr") db_value = database.outputs.instanceMapPtr self.assertTrue(test_node.get_attribute_exists("outputs:instancePrimPathPtr")) attribute = test_node.get_attribute("outputs:instancePrimPathPtr") db_value = database.outputs.instancePrimPathPtr self.assertTrue(test_node.get_attribute_exists("outputs:lastUpdateTimeDenominator")) attribute = test_node.get_attribute("outputs:lastUpdateTimeDenominator") db_value = database.outputs.lastUpdateTimeDenominator self.assertTrue(test_node.get_attribute_exists("outputs:lastUpdateTimeNumerator")) attribute = test_node.get_attribute("outputs:lastUpdateTimeNumerator") db_value = database.outputs.lastUpdateTimeNumerator self.assertTrue(test_node.get_attribute_exists("outputs:minInstanceIndex")) attribute = test_node.get_attribute("outputs:minInstanceIndex") db_value = database.outputs.minInstanceIndex self.assertTrue(test_node.get_attribute_exists("outputs:minSemanticIndex")) attribute = test_node.get_attribute("outputs:minSemanticIndex") db_value = database.outputs.minSemanticIndex self.assertTrue(test_node.get_attribute_exists("outputs:numInstances")) attribute = test_node.get_attribute("outputs:numInstances") db_value = database.outputs.numInstances self.assertTrue(test_node.get_attribute_exists("outputs:numSemantics")) attribute = test_node.get_attribute("outputs:numSemantics") db_value = database.outputs.numSemantics self.assertTrue(test_node.get_attribute_exists("outputs:semanticLabelTokenPtrs")) attribute = test_node.get_attribute("outputs:semanticLabelTokenPtrs") db_value = database.outputs.semanticLabelTokenPtrs self.assertTrue(test_node.get_attribute_exists("outputs:semanticLocalTransformPtr")) attribute = test_node.get_attribute("outputs:semanticLocalTransformPtr") db_value = database.outputs.semanticLocalTransformPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticMapPtr")) attribute = test_node.get_attribute("outputs:semanticMapPtr") db_value = database.outputs.semanticMapPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticPrimPathPtr")) attribute = test_node.get_attribute("outputs:semanticPrimPathPtr") db_value = database.outputs.semanticPrimPathPtr self.assertTrue(test_node.get_attribute_exists("outputs:semanticWorldTransformPtr")) attribute = test_node.get_attribute("outputs:semanticWorldTransformPtr") db_value = database.outputs.semanticWorldTransformPtr
6,279
Python
51.773109
106
0.718904
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdTestSimFabricTimeRange.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdTestSimFabricTimeRangeDatabase import OgnSdTestSimFabricTimeRangeDatabase test_file_name = "OgnSdTestSimFabricTimeRangeTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdTestSimFabricTimeRange") database = OgnSdTestSimFabricTimeRangeDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:numberOfFrames")) attribute = test_node.get_attribute("inputs:numberOfFrames") db_value = database.inputs.numberOfFrames expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeBeginDenominatorToken")) attribute = test_node.get_attribute("inputs:timeRangeBeginDenominatorToken") db_value = database.inputs.timeRangeBeginDenominatorToken expected_value = "timeRangeStartDenominator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeBeginNumeratorToken")) attribute = test_node.get_attribute("inputs:timeRangeBeginNumeratorToken") db_value = database.inputs.timeRangeBeginNumeratorToken expected_value = "timeRangeStartNumerator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeEndDenominatorToken")) attribute = test_node.get_attribute("inputs:timeRangeEndDenominatorToken") db_value = database.inputs.timeRangeEndDenominatorToken expected_value = "timeRangeEndDenominator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeEndNumeratorToken")) attribute = test_node.get_attribute("inputs:timeRangeEndNumeratorToken") db_value = database.inputs.timeRangeEndNumeratorToken expected_value = "timeRangeEndNumerator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeName")) attribute = test_node.get_attribute("inputs:timeRangeName") db_value = database.inputs.timeRangeName expected_value = "TestSimFabricTimeRangeSD" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec
4,732
Python
56.024096
114
0.719146
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdSemanticFilter.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdSemanticFilterDatabase import OgnSdSemanticFilterDatabase test_file_name = "OgnSdSemanticFilterTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdSemanticFilter") database = OgnSdSemanticFilterDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:hierarchicalLabels")) attribute = test_node.get_attribute("inputs:hierarchicalLabels") db_value = database.inputs.hierarchicalLabels expected_value = False actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:matchingLabels")) attribute = test_node.get_attribute("inputs:matchingLabels") db_value = database.inputs.matchingLabels expected_value = True actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:name")) attribute = test_node.get_attribute("inputs:name") db_value = database.inputs.name expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:predicate")) attribute = test_node.get_attribute("inputs:predicate") db_value = database.inputs.predicate expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:name")) attribute = test_node.get_attribute("outputs:name") db_value = database.outputs.name self.assertTrue(test_node.get_attribute_exists("outputs:predicate")) attribute = test_node.get_attribute("outputs:predicate") db_value = database.outputs.predicate
4,014
Python
49.822784
98
0.696064
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostRenderVarDisplayTexture.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostRenderVarDisplayTextureDatabase import OgnSdPostRenderVarDisplayTextureDatabase test_file_name = "OgnSdPostRenderVarDisplayTextureTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostRenderVarDisplayTexture") database = OgnSdPostRenderVarDisplayTextureDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:cameraFisheyeParams")) attribute = test_node.get_attribute("inputs:cameraFisheyeParams") db_value = database.inputs.cameraFisheyeParams expected_value = [] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:cameraModel")) attribute = test_node.get_attribute("inputs:cameraModel") db_value = database.inputs.cameraModel expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:cameraNearFar")) attribute = test_node.get_attribute("inputs:cameraNearFar") db_value = database.inputs.cameraNearFar expected_value = [0.0, 0.0] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instanceMapSDCudaPtr")) attribute = test_node.get_attribute("inputs:instanceMapSDCudaPtr") db_value = database.inputs.instanceMapSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instanceMappingInfoSDPtr")) attribute = test_node.get_attribute("inputs:instanceMappingInfoSDPtr") db_value = database.inputs.instanceMappingInfoSDPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:metersPerSceneUnit")) attribute = test_node.get_attribute("inputs:metersPerSceneUnit") db_value = database.inputs.metersPerSceneUnit expected_value = 0.0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:mode")) attribute = test_node.get_attribute("inputs:mode") db_value = database.inputs.mode expected_value = "autoMode" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:parameters")) attribute = test_node.get_attribute("inputs:parameters") db_value = database.inputs.parameters expected_value = [0.0, 5.0, 0.33, 0.27] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVar")) attribute = test_node.get_attribute("inputs:renderVar") db_value = database.inputs.renderVar expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVarDisplay")) attribute = test_node.get_attribute("inputs:renderVarDisplay") db_value = database.inputs.renderVarDisplay expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdDisplayHeight")) attribute = test_node.get_attribute("inputs:sdDisplayHeight") db_value = database.inputs.sdDisplayHeight expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdDisplayWidth")) attribute = test_node.get_attribute("inputs:sdDisplayWidth") db_value = database.inputs.sdDisplayWidth expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBox3dCamCornersCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBox3dCamCornersCudaPtr") db_value = database.inputs.sdSemBBox3dCamCornersCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBox3dCamExtentCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBox3dCamExtentCudaPtr") db_value = database.inputs.sdSemBBox3dCamExtentCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBoxExtentCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBoxExtentCudaPtr") db_value = database.inputs.sdSemBBoxExtentCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:sdSemBBoxInfosCudaPtr")) attribute = test_node.get_attribute("inputs:sdSemBBoxInfosCudaPtr") db_value = database.inputs.sdSemBBoxInfosCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticLabelTokenSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticLabelTokenSDCudaPtr") db_value = database.inputs.semanticLabelTokenSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticMapSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticMapSDCudaPtr") db_value = database.inputs.semanticMapSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticPrimTokenSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticPrimTokenSDCudaPtr") db_value = database.inputs.semanticPrimTokenSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticWorldTransformSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticWorldTransformSDCudaPtr") db_value = database.inputs.semanticWorldTransformSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:cudaPtr")) attribute = test_node.get_attribute("outputs:cudaPtr") db_value = database.outputs.cudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:format")) attribute = test_node.get_attribute("outputs:format") db_value = database.outputs.format self.assertTrue(test_node.get_attribute_exists("outputs:height")) attribute = test_node.get_attribute("outputs:height") db_value = database.outputs.height self.assertTrue(test_node.get_attribute_exists("outputs:renderVarDisplay")) attribute = test_node.get_attribute("outputs:renderVarDisplay") db_value = database.outputs.renderVarDisplay self.assertTrue(test_node.get_attribute_exists("outputs:width")) attribute = test_node.get_attribute("outputs:width") db_value = database.outputs.width
13,048
Python
54.527659
124
0.702636
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostRenderVarToHost.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostRenderVarToHostDatabase import OgnSdPostRenderVarToHostDatabase test_file_name = "OgnSdPostRenderVarToHostTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostRenderVarToHost") database = OgnSdPostRenderVarToHostDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVar")) attribute = test_node.get_attribute("inputs:renderVar") db_value = database.inputs.renderVar expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVarHostSuffix")) attribute = test_node.get_attribute("inputs:renderVarHostSuffix") db_value = database.inputs.renderVarHostSuffix expected_value = "host" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:renderVar")) attribute = test_node.get_attribute("outputs:renderVar") db_value = database.outputs.renderVar
3,826
Python
50.026666
108
0.696027
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdFabricTimeRangeExecution.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdFabricTimeRangeExecutionDatabase import OgnSdFabricTimeRangeExecutionDatabase test_file_name = "OgnSdFabricTimeRangeExecutionTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdFabricTimeRangeExecution") database = OgnSdFabricTimeRangeExecutionDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeBeginDenominatorToken")) attribute = test_node.get_attribute("inputs:timeRangeBeginDenominatorToken") db_value = database.inputs.timeRangeBeginDenominatorToken expected_value = "timeRangeStartDenominator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeBeginNumeratorToken")) attribute = test_node.get_attribute("inputs:timeRangeBeginNumeratorToken") db_value = database.inputs.timeRangeBeginNumeratorToken expected_value = "timeRangeStartNumerator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeEndDenominatorToken")) attribute = test_node.get_attribute("inputs:timeRangeEndDenominatorToken") db_value = database.inputs.timeRangeEndDenominatorToken expected_value = "timeRangeEndDenominator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeEndNumeratorToken")) attribute = test_node.get_attribute("inputs:timeRangeEndNumeratorToken") db_value = database.inputs.timeRangeEndNumeratorToken expected_value = "timeRangeEndNumerator" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:timeRangeName")) attribute = test_node.get_attribute("inputs:timeRangeName") db_value = database.inputs.timeRangeName expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:timeRangeBeginDenominator")) attribute = test_node.get_attribute("outputs:timeRangeBeginDenominator") db_value = database.outputs.timeRangeBeginDenominator self.assertTrue(test_node.get_attribute_exists("outputs:timeRangeBeginNumerator")) attribute = test_node.get_attribute("outputs:timeRangeBeginNumerator") db_value = database.outputs.timeRangeBeginNumerator self.assertTrue(test_node.get_attribute_exists("outputs:timeRangeEndDenominator")) attribute = test_node.get_attribute("outputs:timeRangeEndDenominator") db_value = database.outputs.timeRangeEndDenominator self.assertTrue(test_node.get_attribute_exists("outputs:timeRangeEndNumerator")) attribute = test_node.get_attribute("outputs:timeRangeEndNumerator") db_value = database.outputs.timeRangeEndNumerator
6,228
Python
55.117117
118
0.71885
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdOnNewRenderProductFrame.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdOnNewRenderProductFrameDatabase import OgnSdOnNewRenderProductFrameDatabase test_file_name = "OgnSdOnNewRenderProductFrameTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdOnNewRenderProductFrame") database = OgnSdOnNewRenderProductFrameDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:renderProductDataPtrs")) attribute = test_node.get_attribute("inputs:renderProductDataPtrs") db_value = database.inputs.renderProductDataPtrs expected_value = [] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderProductPath")) attribute = test_node.get_attribute("inputs:renderProductPath") db_value = database.inputs.renderProductPath expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderProductPaths")) attribute = test_node.get_attribute("inputs:renderProductPaths") db_value = database.inputs.renderProductPaths expected_value = [] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:cudaStream")) attribute = test_node.get_attribute("outputs:cudaStream") db_value = database.outputs.cudaStream self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:renderProductPath")) attribute = test_node.get_attribute("outputs:renderProductPath") db_value = database.outputs.renderProductPath self.assertTrue(test_node.get_attribute_exists("outputs:renderResults")) attribute = test_node.get_attribute("outputs:renderResults") db_value = database.outputs.renderResults
3,920
Python
51.279999
116
0.708673
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdTimeChangeExecution.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdTimeChangeExecutionDatabase import OgnSdTimeChangeExecutionDatabase test_file_name = "OgnSdTimeChangeExecutionTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdTimeChangeExecution") database = OgnSdTimeChangeExecutionDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:errorOnFutureChange")) attribute = test_node.get_attribute("inputs:errorOnFutureChange") db_value = database.inputs.errorOnFutureChange expected_value = False actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:lastUpdateTimeDenominator")) attribute = test_node.get_attribute("inputs:lastUpdateTimeDenominator") db_value = database.inputs.lastUpdateTimeDenominator expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:lastUpdateTimeNumerator")) attribute = test_node.get_attribute("inputs:lastUpdateTimeNumerator") db_value = database.inputs.lastUpdateTimeNumerator expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderResults")) attribute = test_node.get_attribute("inputs:renderResults") db_value = database.inputs.renderResults expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec
3,776
Python
52.197182
108
0.708157
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/ogn/tests/TestOgnSdPostSemanticBoundingBox.py
import os import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts from omni.graph.core.tests.omnigraph_test_utils import _TestGraphAndNode from omni.graph.core.tests.omnigraph_test_utils import _test_clear_scene from omni.graph.core.tests.omnigraph_test_utils import _test_setup_scene from omni.graph.core.tests.omnigraph_test_utils import _test_verify_scene class TestOgn(ogts.OmniGraphTestCase): async def test_data_access(self): from omni.syntheticdata.ogn.OgnSdPostSemanticBoundingBoxDatabase import OgnSdPostSemanticBoundingBoxDatabase test_file_name = "OgnSdPostSemanticBoundingBoxTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_omni_syntheticdata_SdPostSemanticBoundingBox") database = OgnSdPostSemanticBoundingBoxDatabase(test_node) self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) def _attr_error(attribute: og.Attribute, usd_test: bool) -> str: test_type = "USD Load" if usd_test else "Database Access" return f"{node_type_name} {test_type} Test - {attribute.get_name()} value error" self.assertTrue(test_node.get_attribute_exists("inputs:exec")) attribute = test_node.get_attribute("inputs:exec") db_value = database.inputs.exec self.assertTrue(test_node.get_attribute_exists("inputs:gpu")) attribute = test_node.get_attribute("inputs:gpu") db_value = database.inputs.gpu expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instanceMapSDCudaPtr")) attribute = test_node.get_attribute("inputs:instanceMapSDCudaPtr") db_value = database.inputs.instanceMapSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:instanceMappingInfoSDPtr")) attribute = test_node.get_attribute("inputs:instanceMappingInfoSDPtr") db_value = database.inputs.instanceMappingInfoSDPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderProductResolution")) attribute = test_node.get_attribute("inputs:renderProductResolution") db_value = database.inputs.renderProductResolution expected_value = [0, 0] actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:renderVar")) attribute = test_node.get_attribute("inputs:renderVar") db_value = database.inputs.renderVar expected_value = "" actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:rp")) attribute = test_node.get_attribute("inputs:rp") db_value = database.inputs.rp expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticLocalTransformSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticLocalTransformSDCudaPtr") db_value = database.inputs.semanticLocalTransformSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("inputs:semanticMapSDCudaPtr")) attribute = test_node.get_attribute("inputs:semanticMapSDCudaPtr") db_value = database.inputs.semanticMapSDCudaPtr expected_value = 0 actual_value = og.Controller.get(attribute) ogts.verify_values(expected_value, actual_value, _attr_error(attribute, True)) ogts.verify_values(expected_value, db_value, _attr_error(attribute, False)) self.assertTrue(test_node.get_attribute_exists("outputs:exec")) attribute = test_node.get_attribute("outputs:exec") db_value = database.outputs.exec self.assertTrue(test_node.get_attribute_exists("outputs:sdSemBBoxExtentCudaPtr")) attribute = test_node.get_attribute("outputs:sdSemBBoxExtentCudaPtr") db_value = database.outputs.sdSemBBoxExtentCudaPtr self.assertTrue(test_node.get_attribute_exists("outputs:sdSemBBoxInfosCudaPtr")) attribute = test_node.get_attribute("outputs:sdSemBBoxInfosCudaPtr") db_value = database.outputs.sdSemBBoxInfosCudaPtr
6,043
Python
53.45045
116
0.707099
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/viewport_legacy.py
from pathlib import Path from pxr import Sdf import carb.settings import omni.ui as ui import omni.usd from .SyntheticData import SyntheticData from .visualizer_window import VisualizerWindow import weakref CURRENT_PATH = Path(__file__).parent.absolute() ICON_PATH = CURRENT_PATH.parent.parent.parent.joinpath("data") BUTTON_STYLE = { "height": 22, "width": 26, "style": {"Button": {"padding": 4, "background_color": 0x80303030}}, "image_height": 14, "image_width": 26, } MENU_FLAGS = {"flags": ui.WINDOW_FLAGS_POPUP | ui.WINDOW_FLAGS_NO_TITLE_BAR, "auto_resize": True} class ViewportLegacy: _g_visualizers = {} _g_iface = None @staticmethod def create_update_subscription(): import omni.kit.viewport_legacy ViewportLegacy._g_iface = omni.kit.viewport_legacy.get_viewport_interface() if ViewportLegacy._g_iface is None: return import omni.kit.app event_stream = omni.kit.app.get_app().get_update_event_stream() return event_stream.create_subscription_to_pop(ViewportLegacy._on_update, name="omni.syntheticdata update") @staticmethod def close_viewports(): visualizers, ViewportLegacy._g_visualizers = ViewportLegacy._g_visualizers, {} if visualizers: for visualizer, vp_delegate in visualizers.values(): visualizer.close() vp_delegate.destroy() @staticmethod def _on_update(dt): stage = omni.usd.get_context().get_stage() if stage is None: return # retrieve the list of active viewports viewport_names = set([ViewportLegacy._g_iface.get_viewport_window_name(vp) for vp in ViewportLegacy._g_iface.get_instance_list()]) visualizers = ViewportLegacy._g_visualizers # remove obsolete extension viewports data for vp_name in set(visualizers.keys()).difference(viewport_names): visualizer, vp_delegate = visualizers[vp_name] visualizer.close() vp_delegate.destroy() del visualizers[vp_name] # create missing extension viewports data for vp_name in viewport_names.difference(set(visualizers.keys())): vp_delegate = ViewportLegacy(vp_name) visualizer_window = VisualizerWindow(vp_name, vp_delegate) vp_delegate.set_visualizer_window(weakref.proxy(visualizer_window)) visualizers[vp_name] = visualizer_window, vp_delegate # update all valid viewport for vp_name, vis_and_delegate in visualizers.items(): legacy_vp = ViewportLegacy._g_iface.get_viewport_window(ViewportLegacy._g_iface.get_instance(vp_name)) if legacy_vp: visualizer, vp_delegate = vis_and_delegate camera_path = legacy_vp.get_active_camera() vp_delegate._update_legacy_buttons(Sdf.Path(camera_path).name, legacy_vp.is_visible()) visualizer.update(legacy_vp.get_render_product_path(), stage) def __init__(self, name: str): self.__window_name = name self.__visualizer_window = None # initialize ui self.__menus = None self.__btns = {"window": ui.Window(name, detachable=False)} with self.__btns["window"].frame: with ui.VStack(): ui.Spacer(height=4) with ui.HStack(height=0, width=0): self.__btns["spacer"] = ui.Spacer(width=300) self.__btns["icon"] = ui.Button( tooltip="Synthetic Data Sensors", image_url=f"{ICON_PATH}/sensor_icon.svg", **BUTTON_STYLE ) self.__btns["icon"].set_mouse_pressed_fn(lambda x, y, *_: self._show_legacy_ui_menu(x, y)) def __del__(self): self.destroy() def destroy(self): self.__btns = None self.__menus = None self.__window_name = None self.__visualizer_window = None def set_visualizer_window(self, visualizer_window): self.__visualizer_window = visualizer_window self._reset_to_default(None) def _update_legacy_buttons(self, cam_name: str, is_visible: bool): # update the buttons in a legacy viewport (dependent on camera name length) render_mode = carb.settings.get_settings().get("/rtx/rendermode") render_spacing = 15 if render_mode == "RaytracedLighting": render_spacing = 12 elif render_mode == "PathTracing": render_spacing = 31 spacing = 5 + (len(cam_name) + render_spacing) * 15 self.__btns["spacer"].width = ui.Length(max(300, spacing)) self.__btns["window"].visible = is_visible def _build_legacy_ui_menu(self): self.__menus = ui.Window(f"{self.__window_name}-sensor-menu", **MENU_FLAGS) with self.__menus.frame: with ui.VStack(width=200, spacing=5): render_product_combo_model = self.__visualizer_window.render_product_combo_model if render_product_combo_model: with ui.HStack(height=40): ui.Label("RenderProduct", width=150) ui.ComboBox(render_product_combo_model) render_var_combo_model = self.__visualizer_window.render_var_combo_model if render_var_combo_model: with ui.HStack(height=40): ui.Label("RenderVar", width=150) ui.ComboBox(render_var_combo_model) with ui.HStack(height=20): model = ui.FloatSlider(name="angle", min=-100.0, max=100.0).model model.add_value_changed_fn( lambda m: render_var_combo_model.set_combine_angle(m.get_value_as_float()) ) model = ui.FloatSlider(name="x", min=-100.0, max=100.0).model model.add_value_changed_fn( lambda m: render_var_combo_model.set_combine_divide_x(m.get_value_as_float()) ) model = ui.FloatSlider(name="y", min=-100.0, max=100.0).model model.add_value_changed_fn( lambda m: render_var_combo_model.set_combine_divide_y(m.get_value_as_float()) ) with ui.HStack(height=40): ui.Label("Synthetic Data Sensors", width=150) clear_btn = ui.Button("Clear All") show_default_btns = carb.settings.get_settings().get_as_bool("/exts/omni.syntheticdata/menubar/showSensorDefaultButton") if show_default_btns: with ui.HStack(height=40): set_as_default_btn = ui.Button("Set as default") reset_to_default_btn = ui.Button("Reset to default") selection_stack = ui.VStack(spacing=5) clear_btn.set_clicked_fn(lambda ss=selection_stack: self._clear_all(ss)) if show_default_btns: set_as_default_btn.set_clicked_fn(lambda ss=selection_stack: self._set_as_default()) reset_to_default_btn.set_clicked_fn(lambda ss=selection_stack: self._reset_to_default(ss)) selection_stack.clear() with selection_stack: self._build_ui_sensor_selection() self.__menus.visible = False # callback to reset the sensor selection def _clear_all(self, selection_stack): if self.__visualizer_window: self.__visualizer_window.visualization_activation.clear() selection_stack.clear() with selection_stack: self._build_ui_sensor_selection() def _set_as_default(self): if self.__visualizer_window: for sensor in self.__visualizer_window.visualization_activation: SyntheticData.set_visualization_template_name_default_activation(sensor, True) def _reset_to_default(self, selection_stack): # reset the selection if self.__visualizer_window: self.__visualizer_window.visualization_activation.clear() for _, sensor in SyntheticData.get_registered_visualization_template_names_for_display(): if SyntheticData.get_visualization_template_name_default_activation(sensor): self.__visualizer_window.visualization_activation.add(sensor) if not selection_stack is None: selection_stack.clear() with selection_stack: self._build_ui_sensor_selection() def _show_window(self): self.__visualizer_window.toggle_enable_visualization() def _build_ui_sensor_selection(self): for sensor_label, sensor in SyntheticData.get_registered_visualization_template_names_for_display(): with ui.HStack(): ui.Label(sensor_label, width=300) cb = ui.CheckBox( width=0, style={"font_size": 24, "margin": 3}, style_type_name_override="Options.CheckBox" ) cb.model.set_value(sensor in self.__visualizer_window.visualization_activation) cb.model.add_value_changed_fn(lambda c, s=sensor: self.__visualizer_window.on_sensor_item_clicked(c.as_bool, s)) ui.Button("Show", height=40, clicked_fn=lambda: self._show_window()) def _show_legacy_ui_menu(self, x, y): self.__menus = None self._build_legacy_ui_menu() self.__menus.position_x = x self.__menus.position_y = y self.__menus.visible = True @property def render_product_path(self): legacy_vp = ViewportLegacy._g_iface.get_viewport_window(ViewportLegacy._g_iface.get_instance(self.__window_name)) return legacy_vp.get_render_product_path() if legacy_vp else None @render_product_path.setter def render_product_path(self, prim_path: str): legacy_vp = ViewportLegacy._g_iface.get_viewport_window(ViewportLegacy._g_iface.get_instance(self.__window_name)) if legacy_vp: legacy_vp.set_render_product_path(prim_path) @property def usd_context(self): legacy_vp = ViewportLegacy._g_iface.get_viewport_window(ViewportLegacy._g_iface.get_instance(self.__window_name)) usd_context_name = legacy_vp.get_usd_context_name() if hasattr(legacy_vp, 'get_usd_context_name') else '' return omni.usd.get_context(usd_context_name)
10,552
Python
43.154812
138
0.599602
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/helpers.py
import math from functools import lru_cache import numpy.lib.recfunctions as rfn import carb import numpy as np import omni.usd from pxr import UsdGeom, UsdShade, Semantics from .. import _syntheticdata EPS = 1e-8 @lru_cache() def _get_syntheticdata_iface(): return _syntheticdata.acquire_syntheticdata_interface() def _interpolate(p, a, b): p0 = 1.0 - p return [int(p0 * a[0] + p * b[0]), int(p0 * a[1] + p * b[1]), int(p0 * a[2] + p * b[2]), 255] def get_bbox_3d_corners(extents): """Return transformed points in the following order: [LDB, RDB, LUB, RUB, LDF, RDF, LUF, RUF] where R=Right, L=Left, D=Down, U=Up, B=Back, F=Front and LR: x-axis, UD: y-axis, FB: z-axis. Args: extents (numpy.ndarray): A structured numpy array containing the fields: [`x_min`, `y_min`, `x_max`, `y_max`, `transform`. Returns: (numpy.ndarray): Transformed corner coordinates with shape `(N, 8, 3)`. """ rdb = [extents["x_max"], extents["y_min"], extents["z_min"]] ldb = [extents["x_min"], extents["y_min"], extents["z_min"]] lub = [extents["x_min"], extents["y_max"], extents["z_min"]] rub = [extents["x_max"], extents["y_max"], extents["z_min"]] ldf = [extents["x_min"], extents["y_min"], extents["z_max"]] rdf = [extents["x_max"], extents["y_min"], extents["z_max"]] luf = [extents["x_min"], extents["y_max"], extents["z_max"]] ruf = [extents["x_max"], extents["y_max"], extents["z_max"]] tfs = extents["transform"] corners = np.stack((ldb, rdb, lub, rub, ldf, rdf, luf, ruf), 0) corners_homo = np.pad(corners, ((0, 0), (0, 1), (0, 0)), constant_values=1.0) return np.einsum("jki,ikl->ijl", corners_homo, tfs)[..., :3] def reduce_bboxes_2d(bboxes, instance_mappings): """ Reduce 2D bounding boxes of leaf nodes to prims with a semantic label. Args: bboxes (numpy.ndarray): A structured numpy array containing the fields: `[("instanceId", "<u4"), ("semanticId", "<u4"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4")]` instance_mappings (numpy.ndarray): A structured numpy array containing the fields: `[("uniqueId", np.int32), ("name", "O"), ("semanticId", "<u4"), ("semanticLabel", "O"), ("instanceIds", "O"), ("metadata", "O")]` Returns: (numpy.ndarray): A structured numpy array containing the fields: `[("uniqueId", np.int32), ("name", "O"), ("semanticLabel", "O"), ("instanceIds", "O"), ("semanticId", "<u4"), ("metadata", "O"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4")]` """ bboxes = bboxes[bboxes["x_min"] < 2147483647] reduced_bboxes = [] for im in instance_mappings: if im["instanceIds"]: # if mapping has descendant instance ids mask = np.isin(bboxes["instanceId"], im["instanceIds"]) bbox_masked = bboxes[mask] if len(bbox_masked) > 0: reduced_bboxes.append( ( im["uniqueId"], im["name"], im["semanticLabel"], im["metadata"], im["instanceIds"], im["semanticId"], np.min(bbox_masked["x_min"]), np.min(bbox_masked["y_min"]), np.max(bbox_masked["x_max"]), np.max(bbox_masked["y_max"]), ) ) return np.array( reduced_bboxes, dtype=[("uniqueId", np.int32), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O")] + bboxes.dtype.descr[1:], ) def reduce_bboxes_3d(bboxes, instance_mappings): """ Reduce 3D bounding boxes of leaf nodes to prims with a semantic label. Args: bboxes (numpy.ndarray): A structured numpy array containing the fields: `[("instanceId", "<u4"), ("semanticId", "<u4"), ("x_min", "<i4"), ("y_min", "<i4"), ("z_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ("z_max", "<i4"), ("transform", "<f4", (4, 4))]` instance_mappings (numpy.ndarray): A structured numpy array containing the fields: `[("uniqueId", np.int32), ("name", "O"), ("semanticId", "<u4"), ("semanticLabel", "O"), ("instanceIds", "<u4"), ("metadata", "O")]` Returns: (numpy.ndarray): A structured numpy array containing the fields: `[("uniqueId", np.int32), ("name", "O"), ("semanticLabel", "O"), ("instanceIds", "O"), ("metadata", "O"), ("semanticId", "<u4"),("x_min", "<i4"), ("y_min", "<i4"), ("z_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ("z_max", "<i4"), ("transform", "<f4", (4, 4))]` If `corners` field is supplied in `bboxes` argument, the field will be updated accordingly. """ current_time = omni.timeline.get_timeline_interface().get_current_time() reduced_bboxes = [] stage = omni.usd.get_context().get_stage() if "corners" in bboxes.dtype.names: corners = bboxes["corners"] else: # TODO if not corners, use extents corners = get_bbox_3d_corners(bboxes) max_instance_id = bboxes["instanceId"].max() idx_lut = np.empty(max_instance_id + 1, dtype=int) for i, bb_id in enumerate(bboxes["instanceId"]): idx_lut[bb_id] = i for i, im in enumerate(instance_mappings): prim = stage.GetPrimAtPath(im["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(current_time)) tf_inv = np.linalg.inv(tf) # filter instance ids that corresponding to invisible bounding boxes (not filtered in the instance mapping) instIds = [instId for instId in im["instanceIds"] if instId < len(idx_lut) ] idxs = idx_lut[instIds] children_corners = corners[idxs] children_corners_homo = np.pad(children_corners.reshape(-1, 3), ((0, 0), (0, 1)), constant_values=1.0) corners_local = np.einsum("bj,jk->bk", children_corners_homo, tf_inv)[:, :3] corners_local_min = corners_local[..., :3].reshape(-1, 3).min(0) corners_local_max = corners_local[..., :3].reshape(-1, 3).max(0) extents_local = np.stack([corners_local_min, corners_local_max]) row = [ im["uniqueId"], im["name"], im["semanticLabel"], im["metadata"], im["instanceIds"], im["semanticId"], *extents_local.reshape(-1), tf, ] if "corners" in bboxes.dtype.names: world_corners = get_bbox_3d_corners( { "x_min": [extents_local[0, 0]], "x_max": [extents_local[1, 0]], "y_min": [extents_local[0, 1]], "y_max": [extents_local[1, 1]], "z_min": [extents_local[0, 2]], "z_max": [extents_local[1, 2]], "transform": [tf], } ) row.append(world_corners) reduced_bboxes.append(tuple(row)) return np.array( reduced_bboxes, dtype=[("uniqueId", np.int32), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O")] + bboxes.dtype.descr[1:], ) def merge_sensors( bounding_box_2d_tight=None, bounding_box_2d_loose=None, bounding_box_3d=None, occlusion_quadrants=None ): """ Merge sensor structured array outputs. Args: bounding_box_2d_tight (numpy.ndarray, optional): A structured numpy array containing the fields: `[("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("semanticId", "<u4"), ("metadata", "O"), ("instanceIds", "O"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4")]` bounding_box_2d_loose (numpy.ndarray, optional): A structured numpy array containing the fields: `[("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("instanceId", "<u4"), ("semanticId", "<u4"), ("metadata", "O"), ("instanceIds", "O"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4")]` bounding_box_3d (numpy.ndarray, optional): A structured numpy array containing the fields: `[("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("semanticId", "<u4"), ("metadata", "O"), ("instanceIds", "O"), ("x_min", "<i4"), ("y_min", "<i4"), ("z_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ("z_max", "<i4"), ("transform", "<f4", (4, 4))]` occlusion_quadrants (numpy.ndarray, optional): A structured numpy array containing the fields: [("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"),("semanticId", "<u4"), ("metadata", "O"), ("instanceIds", "O"), ("occlusion_quadrant", "O")] Returns: (numpy.ndarray): A structured array containing merged data from the arguments supplied. """ arrays = [] array_suffixes = [] defaults = {"x_min": -1, "x_max": -1, "y_min": -1, "y_max": -1, "z_min": -1, "z_max": -1} # Add valid arrays to merge list and set suffixes if bounding_box_2d_tight is not None: arrays.append(bounding_box_2d_tight) array_suffixes.append("_bbox2d_tight") if bounding_box_2d_loose is not None: arrays.append(bounding_box_2d_loose) array_suffixes.append("_bbox2d_loose") if bounding_box_3d is not None: arrays.append(bounding_box_3d) array_suffixes.append("_bbox3d") if occlusion_quadrants is not None: arrays.append(occlusion_quadrants) array_suffixes.append("_occ") if not arrays: return None r0 = arrays.pop() r0_suf = array_suffixes.pop() while arrays: r1 = arrays.pop() r1_suf = array_suffixes.pop() # Add suffixes r0.dtype.names = [f"{n}{r0_suf}" if n in defaults.keys() else n for n in r0.dtype.names] r1.dtype.names = [f"{n}{r1_suf}" if n in defaults.keys() else n for n in r1.dtype.names] defaults_suf = {} defaults_suf.update({f"{k}{r0_suf}": v for k, v in defaults.items()}) defaults_suf.update({f"{k}{r1_suf}": v for k, v in defaults.items()}) r0 = rfn.join_by( ["uniqueId", "name", "semanticId", "semanticLabel", "metadata", "instanceIds"], r0, r1, defaults=defaults_suf, r1postfix=r0_suf, r2postfix=r1_suf, jointype="outer", usemask=False, ) r0_suf = "" return r0 def get_projection_matrix(fov, aspect_ratio, z_near, z_far): """ Calculate the camera projection matrix. Args: fov (float): Field of View (in radians) aspect_ratio (float): Image aspect ratio (Width / Height) z_near (float): distance to near clipping plane z_far (float): distance to far clipping plane Returns: (numpy.ndarray): View projection matrix with shape `(4, 4)` """ a = -1.0 / math.tan(fov / 2) b = -a * aspect_ratio c = z_far / (z_far - z_near) d = z_near * z_far / (z_far - z_near) return np.array([[a, 0.0, 0.0, 0.0], [0.0, b, 0.0, 0.0], [0.0, 0.0, c, 1.0], [0.0, 0.0, d, 0.0]]) def get_view_proj_mat(view_params): """ Get View Projection Matrix. Args: view_params (dict): dictionary containing view parameters """ z_near, z_far = view_params["clipping_range"] view_matrix = np.linalg.inv(view_params["view_to_world"]) fov = 2 * math.atan(view_params["horizontal_aperture"] / (2 * view_params["focal_length"])) projection_mat = get_projection_matrix(fov, view_params["aspect_ratio"], z_near, z_far) return np.dot(view_matrix, projection_mat) def project_pinhole(points, view_params): """ Project 3D points to 2D camera view using a pinhole camera model. Args: points (numpy.ndarray): Array of points in world frame of shape (num_points, 3). view_params: Returns: (numpy.ndarray): Image-space points of shape (num_points, 3) """ view_proj_matrix = get_view_proj_mat(view_params) homo = np.pad(points, ((0, 0), (0, 1)), constant_values=1.0) tf_points = np.dot(homo, view_proj_matrix) tf_points = tf_points / (tf_points[..., -1:]) tf_points[..., :2] = 0.5 * (tf_points[..., :2] + 1) return tf_points[..., :3] def get_instance_mappings(): """ Get instance mappings. Uses update number as frame ID for caching. """ app = omni.kit.app.get_app_interface() frame_id = app.get_update_number() mappings = _get_instance_mappings(frame_id) return mappings @lru_cache(maxsize=1) def _get_instance_mappings(frame_id=None): """ Get instance mappings. Uses `frame_id` for caching. """ stage = omni.usd.get_context().get_stage() """ Use the C++ API to retrieve the instance mapping """ # _, instance_mappings = _parse_instance_mappings(stage.GetPseudoRoot()) # mappings_raw = [(i + 1, *im) for i, im in enumerate(instance_mappings)] mappings_raw = _get_syntheticdata_iface().get_instance_mapping_list() mappings = np.array( mappings_raw, dtype=[ ("uniqueId", np.int32), ("name", "O"), ("semanticId", np.int32), ("semanticLabel", "O"), ("instanceIds", "O"), ("metadata", "O"), ], ) return mappings def reduce_occlusion(occlusion_data, instance_mappings): """ Reduce occlusion value of leaf nodes to prims with a semantic label. Args: sensor_data (numpy.ndarray): A structured numpy array with the fields: [("instanceId", "<u4"), ("semanticId", "<u4"), ("occlusionRatio", "<f4")], where occlusion ranges from 0 (not occluded) to 1 (fully occluded). Returns: (numpy.ndarray): A structured numpy array with the fields: [("uniqueId", np.int32) ("name", "O"), ("semanticLabel", "O"), ("instanceIds", "O"), ("semanticId", "<u4"), ("metadata", "O"), ("occlusionRatio", "<f4")] """ mapped_data = [] occlusion_data = occlusion_data[~np.isnan(occlusion_data["occlusionRatio"])] for im in instance_mappings: if im["instanceIds"]: # if mapping has descendant instance ids mask = np.isin(occlusion_data["instanceId"], im["instanceIds"]) if mask.sum() > 1: carb.log_warn( f"[syntheticdata.viz] Mapping on {im['name']} contains multiple child meshes, occlusion value may be incorrect." ) occ = occlusion_data[mask] if len(occ) > 0: mapped_data.append( ( im["uniqueId"], im["name"], im["semanticLabel"], im["metadata"], im["instanceIds"], im["semanticId"], np.mean(occ["occlusionRatio"]), ) ) return np.array( mapped_data, dtype=[("uniqueId", np.int32), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O")] + occlusion_data.dtype.descr[1:], ) def _join_struct_arrays(arrays): """ Join N numpy structured arrays. """ n = len(arrays[0]) assert all([len(a) == n for a in arrays]) dtypes = sum(([d for d in a.dtype.descr if d[0]] for a in arrays), []) joined = np.empty(n, dtype=dtypes) for a in arrays: joined[list(a.dtype.names)] = a return joined def _fish_eye_map_to_sphere(screen, screen_norm, theta, max_fov): """ Utility function to map a sample from a disk on the image plane to a sphere. """ direction = np.array([[0, 0, -1]] * screen.shape[0], dtype=np.float) extent = np.zeros(screen.shape[0], dtype=np.float) # A real fisheye have some maximum FOV after which the lens clips. valid_mask = theta <= max_fov # Map to a disk: screen / R normalizes the polar direction in screen space. xy = screen[valid_mask] screen_norm_mask = screen_norm[valid_mask] > 1e-5 xy[screen_norm_mask] = xy[screen_norm_mask] / screen_norm[valid_mask, None] # Map disk to a sphere cos_theta = np.cos(theta[valid_mask]) sin_theta = np.sqrt(1.0 - cos_theta ** 2) # Todo: is this right? Do we assume z is negative (RH coordinate system)? z = -cos_theta xy = xy * sin_theta[:, None] direction[valid_mask] = np.stack([xy[valid_mask, 0], xy[valid_mask, 1], z], axis=1) extent[valid_mask] = 1.0 # < far clip is not a plane, it's a sphere! return direction, extent def project_fish_eye_map_to_sphere(direction): z = direction[:, 2:] cos_theta = -z theta = np.arccos(cos_theta) # TODO currently projecting outside of max FOV sin_theta = np.sqrt(1.0 - cos_theta * cos_theta + EPS) xy = direction[:, :2] / (sin_theta + EPS) return xy, theta def fish_eye_polynomial(ndc, view_params): """ FTheta camera model based on DW src/dw/calibration/cameramodel/CameraModelsNoEigen.hpp """ # Convert NDC pixel position to screen space... well almost. It is screen space but the extent of x is [-0.5, 0.5] # and the extent of y is [-0.5/aspectRatio, 0.5/aspectRatio]. screen = ndc - 0.5 aspect_ratio = view_params["aspect_ratio"] screen[:, 1] /= -aspect_ratio # The FTheta polynomial works at a nominal resolution. So far we have done calculations in NDC to be # resolution independent. Here we scale by the nominal resolution in X. screen = (screen - view_params["ftheta"]["c_ndc"]) * view_params["ftheta"]["width"] # Compute the radial distance on the screen from its center point r = np.sqrt(screen[:, 0] ** 2 + screen[:, 1] ** 2) theta = ftheta_distortion(view_params["ftheta"], r) max_fov = math.radians(view_params["ftheta"]["max_fov"] / 2) return _fish_eye_map_to_sphere(screen, r, theta, max_fov) def project_fish_eye_polynomial(points, view_params): """ Project F-Theta camera model. Args: points (numpy.ndarray): Array of points in world frame of shape (num_points, 3). view_params (dict): dictionary containing view parameters Returns: (numpy.ndarray): Image-space points of shape (num_points, 3) """ points_h = np.pad(points, ((0, 0), (0, 1)), constant_values=1) points_cam_frame = np.einsum("jk,kl->jl", points_h, view_params["world_to_view"])[..., :3] directions = points_cam_frame / np.linalg.norm(points_cam_frame + EPS, axis=1)[:, None] xy, theta = project_fish_eye_map_to_sphere(directions) r = _ftheta_distortion_solver(view_params["ftheta"], theta) screen = xy * r screen = screen / view_params["ftheta"]["width"] + view_params["ftheta"]["c_ndc"] screen[:, 1] *= -view_params["aspect_ratio"] ndc = screen + 0.5 ndc = np.pad(ndc, ((0, 0), (0, 1)), constant_values=0) return ndc def get_view_params(viewport): """ Get view parameters. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Returns: (dict): Dictionary containing view parameters. """ stage = omni.usd.get_context().get_stage() camera = stage.GetPrimAtPath(viewport.camera_path) current_time = omni.timeline.get_timeline_interface().get_current_time() view_to_world = UsdGeom.Imageable(camera).ComputeLocalToWorldTransform(current_time) world_to_view = view_to_world.GetInverse() width, height = viewport.resolution projection_type = camera.GetAttribute("cameraProjectionType").Get(current_time) if projection_type == "fisheyePolynomial": ftheta = { "width": camera.GetAttribute("fthetaWidth").Get(), "height": camera.GetAttribute("fthetaHeight").Get(), "cx": camera.GetAttribute("fthetaCx").Get(), "cy": camera.GetAttribute("fthetaCy").Get(), "poly_a": camera.GetAttribute("fthetaPolyA").Get(), "poly_b": camera.GetAttribute("fthetaPolyB").Get(), "poly_c": camera.GetAttribute("fthetaPolyC").Get(), "poly_d": camera.GetAttribute("fthetaPolyD").Get(), "poly_e": camera.GetAttribute("fthetaPolyE").Get(), "max_fov": camera.GetAttribute("fthetaMaxFov").Get(), } ftheta["edge_fov"] = ftheta_distortion(ftheta, ftheta["width"] / 2) ftheta["c_ndc"] = np.array( [ (ftheta["cx"] - ftheta["width"] / 2) / ftheta["width"], (ftheta["height"] / 2 - ftheta["cy"]) / ftheta["width"], ] ) else: ftheta = None view_params = { "view_to_world": np.array(view_to_world), "world_to_view": np.array(world_to_view), "projection_type": projection_type, "ftheta": ftheta, "width": width, "height": height, "aspect_ratio": width / height, "clipping_range": camera.GetAttribute("clippingRange").Get(current_time), "horizontal_aperture": camera.GetAttribute("horizontalAperture").Get(current_time), "focal_length": camera.GetAttribute("focalLength").Get(current_time), } return view_params def image_to_world(image_coordinates, view_params): """ Map each image coordinate to a corresponding direction vector. Args: pixel (numpy.ndarray): Pixel coordinates of shape (num_pixels, 2) view_params (dict): dictionary containing view parameters Returns: (numpy.ndarray): Direction vectors of shape (num_pixels, 3) """ ndc = image_coordinates / np.array([view_params["width"], view_params["height"]]) direction, extent = fish_eye_polynomial(ndc, view_params) view_to_world = view_params["view_to_world"] origin = np.matmul(np.array([0, 0, 0, 1]), view_to_world)[:3] direction = np.matmul(np.pad(direction, ((0, 0), (0, 1)), constant_values=0), view_to_world)[:, :3] direction /= np.linalg.norm(direction, axis=1, keepdims=True) return origin, direction def world_to_image(points, viewport, view_params=None): """ Project world coordinates to image-space. Args: points (numpy.ndarray): Array of points in world frame of shape (num_points, 3). viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. view_params (dict, Optional): View parameters dictionary obtained from omni.syntheticdata.helpers.get_view_params. Use current viewport state if not provided. Returns: (numpy.ndarray): Image-space points of shape (num_points, 3) """ if view_params is None: view_params = get_view_params(viewport) if view_params["projection_type"] == "pinhole" or view_params["projection_type"] is None: points_image_space = project_pinhole(points, view_params) elif view_params["projection_type"] == "fisheyePolynomial": points_image_space = project_fish_eye_polynomial(points, view_params) else: raise ValueError(f"Projection type {view_params['projection_type']} is not currently supported.") return points_image_space def ftheta_distortion(ftheta, x): """ F-Theta distortion. """ return ftheta["poly_a"] + x * ( ftheta["poly_b"] + x * (ftheta["poly_c"] + x * (ftheta["poly_d"] + x * ftheta["poly_e"])) ) def ftheta_distortion_prime(ftheta, x): """ Derivative to f_theta_distortion. """ return ftheta["poly_b"] + x * (2 * ftheta["poly_c"] + x * (3 * ftheta["poly_d"] + x * 4 * ftheta["poly_e"])) def _ftheta_distortion_solver(ftheta, theta): # Solve for r in theta = f(r), where f(r) is some general polynomial that is guaranteed to be monotonically # increasing up to some maximum r and theta. For theta > maximum theta switch to linear extrapolation. def solver(ftheta, theta): ratio = ftheta["width"] / 2 / ftheta["edge_fov"] guess = theta * ratio # 2 loops provides sufficient precision in working range. for i in range(2): guessed_theta = ftheta_distortion(ftheta, guess) dy = theta - guessed_theta dx = ftheta_distortion_prime(ftheta, guess) mask = dx != 0 guess[mask] += dy[mask] / dx[mask] guess[~mask] += dy[~mask] * ratio return guess # For all points guess r using a linear approximation. guess = solver(ftheta, theta) # Determine which points were actually inside the FOV max_theta = math.radians((ftheta["max_fov"] / 2.0)) inside_fov = theta < max_theta # For all points that were outside the FOV replace their solution with a more stable linear extrapolation. # These outside of FOV points map beyond the maximum r possible for inside FOV points. # These points shouldn't be seen by the camera, but a valid projection is still required. max_r = solver(ftheta, np.array([max_theta])) min_theta = ftheta["poly_a"] # this should always be zero in theory, but the user could define poly_a != 0. extrapolation_slope = max_r / (max_theta - min_theta) guess[~inside_fov] = max_r + extrapolation_slope * (theta[~inside_fov] - max_theta) return guess
25,400
Python
40.369707
132
0.579016
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/sensors.py
import carb import omni.usd import omni.kit from pxr import UsdGeom import numpy as np import asyncio from .. import _syntheticdata from . import helpers from .SyntheticData import * def get_synthetic_data(): sdg = SyntheticData.Get() if not sdg: SyntheticData.Initialize() sdg = SyntheticData.Get() assert sdg return sdg async def next_render_simulation_async(render_product_path, num_simulation_frames_offset=0): """Fetch the current simulation time and wait for a frame to be rendered at or after this time.""" _sdg_iface = helpers._get_syntheticdata_iface() stage_id = omni.usd.get_context().get_stage_id() simulation_rationnal_time = _sdg_iface.get_rational_time_of_simulation(stage_id, num_simulation_frames_offset) simulation_time = simulation_rationnal_time[0]/simulation_rationnal_time[1] if simulation_rationnal_time[1] > 0 else 0 # wait the frame next to this time to be rendered render_f = asyncio.Future() def on_render_event(e: carb.events.IEvent): parsed_payload = _sdg_iface.parse_rendered_simulation_event(e.payload["product_path_handle"], e.payload["results"]) if parsed_payload[0] == render_product_path: render_time = parsed_payload[1]/parsed_payload[2] if parsed_payload[2] > 0 else 0 if (render_time >= simulation_time) and not render_f.done(): render_f.set_result(render_time) sub_render = ( omni.usd.get_context() .get_rendering_event_stream() .create_subscription_to_pop_by_type( int(omni.usd.StageRenderingEventType.NEW_FRAME), on_render_event, name="omni.syntheticdata.sensors.next_render_simulation_async", order=0, ) ) max_num_skipped_update = max(0, num_simulation_frames_offset) + 150 num_skipped_update = 0 app = omni.kit.app.get_app() while (num_skipped_update<max_num_skipped_update) and (not render_f.done()): await app.next_update_async() num_skipped_update+=1 if num_skipped_update >= max_num_skipped_update: raise SyntheticDataException(f"waiting for simulation to be rendered failed.") async def next_sensor_data_async(viewport = None, waitSimFrame: bool = False, inViewportId: int = None): """Wait for frame complete event from Kit for specific viewport. """ # next_sensor_data_async API previously passed inViewportId as ViewportHandle # This is actually incorrect and bad due to waiting on that handle, which can # change for a variety of reasons between the retrieval of the handle and # the wait on it below. if hasattr(viewport, "frame_info"): inViewportId = viewport.frame_info.get("viewport_handle") else: if inViewportId is None: if isinstance(viewport, int): inViewportId = viewport else: inViewportId = 0 viewport = None carb.log_warn(f"Depreacted usage of next_sensor_data_async with inViewportId={inViewportId}, pass the Viewport instead") app = omni.kit.app.get_app() # wait for the next pre_update call pre_f = asyncio.Future() def on_pre_event(e: carb.events.IEvent): if not pre_f.done(): swhFrameNumber = e.payload["SWHFrameNumber"] # drivesim legacy name if not swhFrameNumber: swhFrameNumber = e.payload["frameNumber"] pre_f.set_result(swhFrameNumber) sub_pre = app.get_pre_update_event_stream().create_subscription_to_pop(on_pre_event, name="omni.kit.app._pre_update_async") # wait the next frame to be rendered render_f = asyncio.Future() def on_render_event(e: carb.events.IEvent): # Grab the ViewportHandle to match from the Viewport if we have it or the legacy inViewportId cur_viewport_handle = viewport.frame_info.get("viewport_handle") if viewport else inViewportId viewId = e.payload["viewport_handle"] frameNumber = e.payload["swh_frame_number"] if ((viewId == cur_viewport_handle) and (not waitSimFrame or (pre_f.done() and (frameNumber >= pre_f.result())))) : if not render_f.done(): render_f.set_result(frameNumber) sub_render = ( omni.usd.get_context() .get_rendering_event_stream() .create_subscription_to_pop_by_type( int(omni.usd.StageRenderingEventType.NEW_FRAME), on_render_event, name="omni.syntheticdata.sensors._next_sensor_data_async", order=0, ) ) MAX_NUM_SKIPPED_UPDATE = 150 num_skipped_update = 0 while (num_skipped_update<MAX_NUM_SKIPPED_UPDATE) and (not render_f.done()): await app.next_update_async() num_skipped_update+=1 if num_skipped_update >= MAX_NUM_SKIPPED_UPDATE: raise SyntheticDataException(f"waiting for next frame failed.") def enable_sensors(viewport, sensor_types): """ activate the host buffer copy nodes for given sensor NB: This function is deprecated """ for sensor_type in sensor_types: rendervar_name = SyntheticData.convert_sensor_type_to_rendervar(sensor_type.name) get_synthetic_data().activate_node_template(rendervar_name + "ExportRawArray", 0, [viewport.render_product_path]) def disable_sensors(viewport, sensor_types): """ deactivate the host buffer copy nodes for given sensor NB: This function is deprecated """ for sensor_type in sensor_types: rendervar_name = SyntheticData.convert_sensor_type_to_rendervar(sensor_type.name) get_synthetic_data().deactivate_node_template(rendervar_name + "ExportRawArray", 0, [viewport.render_product_path]) def create_or_retrieve_sensor(viewport, sensor_type): """ Retrieve a sensor for the specified viewport and sensor type. If the sensor does not exist, it is created. Note that the sensor will be uninitialized until a frame is rendered after the sensor is created. NB: This function is deprecated and the asynchronous version below (create_or_retrieve_sensor_async) should be used instead to ensure sensors are properly initialized by the renderer after creation Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. sensor_type (omni.syntheticdata._syntheticdata.SensorType): Type of sensor to retrieve/create. """ enable_sensors(viewport,[sensor_type]) return sensor_type async def create_or_retrieve_sensor_async(viewport, sensor_type): """ Retrieve a sensor for the specified viewport and sensor type. If the sensor does not exist, it is created. Note that the sensor will be uninitialized until a frame is rendered after the sensor is created. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. sensor_type (omni.syntheticdata._syntheticdata.SensorType): Type of sensor to retrieve/create. """ enable_sensors(viewport,[sensor_type]) await next_sensor_data_async(viewport,True) return sensor_type async def initialize_async(viewport, sensor_types): """ Initialize sensors in the list provided. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. sensor_types (list of omni.syntheticdata._syntheticdata.SensorType): List of sensor types to initialize. """ await omni.kit.app.get_app_interface().next_update_async() enable_sensors(viewport, sensor_types) await next_sensor_data_async(viewport,True) def get_sensor_array(viewport, sensor_type, elemType, elemCount, is2DArray): """ Retrieve the sensor array data from the last sensor node evaluation. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. sensor_type : Sensor type to retrieve the data from. is2DArray : True if the array to be retrieved is a 2d array """ output_names = ["outputs:data"] if is2DArray: output_names.append("outputs:width") output_names.append("outputs:height") else: output_names.append("outputs:bufferSize") rendervar_name = SyntheticData.convert_sensor_type_to_rendervar(sensor_type.name) outputs = get_synthetic_data().get_node_attributes(rendervar_name + "ExportRawArray", output_names, viewport.render_product_path) data = outputs["outputs:data"] if outputs and ("outputs:data" in outputs) else None if is2DArray: height = outputs["outputs:height"] if outputs and ("outputs:height" in outputs) else 0 width = outputs["outputs:width"] if outputs and ("outputs:width" in outputs) else 0 bufferSize = height*width*elemCount*np.dtype(elemType).itemsize else: bufferSize = outputs["outputs:bufferSize"] if outputs and ("outputs:bufferSize" in outputs) else 0 if (data is None) or (len(data) < np.dtype(elemType).itemsize): if is2DArray: shape = (0, 0, elemCount) if elemCount > 1 else (0, 0) else: shape = (0, elemCount) if elemCount > 1 else (0) return np.empty(shape, elemType) assert bufferSize == len(data) data = data.view(elemType) assert len(data) > 0 if not is2DArray: return data.reshape(data.shape[0] // elemCount, elemCount) if elemCount > 1 else data return data.reshape(height, width, elemCount) if elemCount > 1 else data.reshape(height, width) def get_rgb(viewport): """ Get RGB sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A uint8 array of shape (height, width, 4) """ return get_sensor_array(viewport, _syntheticdata.SensorType.Rgb, np.uint8, 4, True) def get_depth(viewport): """ Get Inverse Depth sensor output. *** DEPRECATED *** Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape (height, width, 1). """ return get_sensor_array(viewport, _syntheticdata.SensorType.Depth, np.float32, 1, True) def get_depth_linear(viewport): """ Get Linear Depth sensor output. *** DEPRECATED *** Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape (height, width, 1). """ return get_sensor_array(viewport, _syntheticdata.SensorType.DepthLinear, np.float32, 1, True) def get_distance_to_image_plane(viewport): """ Get distance to image plane sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape (height, width, 1). """ return get_sensor_array(viewport, _syntheticdata.SensorType.DistanceToImagePlane, np.float32, 1, True) def get_distance_to_camera(viewport): """ Get distance to camera sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape (height, width, 1). """ return get_sensor_array(viewport, _syntheticdata.SensorType.DistanceToCamera, np.float32, 1, True) def get_camera_3d_position(viewport): """ Get camera space 3d position sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape (height, width, 4). """ return get_sensor_array(viewport, _syntheticdata.SensorType.Camera3dPosition, np.float32, 4, True) def get_bounding_box_3d(viewport, parsed=False, return_corners=False, camera_frame=False, instance_mappings=None): """ Get bounding box 3D sensor output. NB: The semanticId field in the return value is deprecated and contains undefined data Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. parsed (bool): If True, return a single bounding box for each prim with a semantic schema. Otherwise, a bounding box will be provided for each leaf prim. include_corners (bool): if True, calculate and return the 8 corners of each 3D bounding box. Corners are returned in the order: [LDB, RDB, LUB, RUB, LDF, RDF, LUF, RUF] where L=Left, R=Right, D=Down, U=Up, B=Back, F=Front and LR: x-axis, UD: y-axis, FB: z-axis. camera_frame (bool): If True, the transforms and corners will be returned in the camera's reference frame. Otherwise, coordinates are returned with respect to the world frame. Note: The coordinate system is right-handed. instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings`. If not provided (default), a new instance mappings will be computed. Return: (numpy.ndarray): A structured array with the fields: `[('instanceId', '<u4'), ('semanticId', '<u4'), ("metadata", "O"), ('x_min', '<f4'), ('y_min', '<f4'), ('z_min', '<f4'), ('x_max', '<f4'), ('y_max', '<f4'), ('z_max', '<f4'), ('transform', '<f4', (4, 4))]`. If `return_corners` is `True`, an additional field `('corners', '<f4', (8, 3)` is returned. """ BoundingBox3DValuesType = np.dtype( [ ("instanceId", "<u4"), ("semanticId", "<u4"), ("x_min", "<f4"), ("y_min", "<f4"), ("z_min", "<f4"), ("x_max", "<f4"), ("y_max", "<f4"), ("z_max", "<f4"), ("transform", "<f4", (4, 4)), ] ) bboxes_3d_data = get_sensor_array( viewport, _syntheticdata.SensorType.BoundingBox3D, BoundingBox3DValuesType, 1, False ) # Return immediately if empty if bboxes_3d_data.size == 0: return bboxes_3d_data if return_corners: corners = helpers.get_bbox_3d_corners(bboxes_3d_data) corners_struc = np.zeros(len(corners), dtype=[("corners", np.float32, (8, 3))]) corners_struc["corners"] = corners bboxes_3d_data = helpers._join_struct_arrays([bboxes_3d_data, corners_struc]) if parsed: if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() bboxes_3d_data = helpers.reduce_bboxes_3d(bboxes_3d_data, instance_mappings) if camera_frame: stage = omni.usd.get_context().get_stage() camera = stage.GetPrimAtPath(viewport.camera_path) current_time = omni.timeline.get_timeline_interface().get_current_time() tf_mat = np.array(UsdGeom.Camera(camera).ComputeLocalToWorldTransform(current_time)) view_matrix = np.linalg.inv(tf_mat) bboxes_3d_data["transform"] = np.einsum("ijk,kl->ijl", bboxes_3d_data["transform"], view_matrix) if return_corners: corners_homo = np.pad(bboxes_3d_data["corners"], ((0, 0), (0, 0), (0, 1)), constant_values=1.0) bboxes_3d_data["corners"] = np.einsum("ijk,kl->ijl", corners_homo, view_matrix)[..., :3] return bboxes_3d_data def get_bounding_box_2d_tight(viewport, instance_mappings=None): """ Get Bounding Box 2D Tight sensor output. Tight bounding boxes only bound the visible or unoccluded portions of an object. If an object is completely occluded, it is omitted from the returned array. Bounds units are in pixels. NB: The semanticId field in the return value is deprecated and contains undefined data Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings`. If not provided (default), a new instance mappings will be computed. Return: (np.ndarray): A structured numpy array with the fields: [('name', 'O'), ('semanticLabel', 'O'), ('instanceId', '<u4'), ('semanticId', '<u4'), ("metadata", "O"), ('x_min', '<i4'), ('y_min', '<i4'), ('x_max', '<i4'), ('y_max', '<i4')] """ BoundingBox2DValuesType = np.dtype( [ ("instanceId", "<u4"), ("semanticId", "<u4"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ] ) bboxes_2d_data = get_sensor_array( viewport, _syntheticdata.SensorType.BoundingBox2DTight, BoundingBox2DValuesType, 1, is2DArray=False ) if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() bboxes_2d_data = helpers.reduce_bboxes_2d(bboxes_2d_data, instance_mappings) return bboxes_2d_data def get_bounding_box_2d_loose(viewport, instance_mappings=None): """ Get Bounding Box 2D Loose sensor output. Loose bounding boxes bound the full extents of an object, even if totally occluded. Bounds units are in pixels. NB: The semanticId field in the return value is deprecated and contains undefined data Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings`. If not provided (default), a new instance mappings will be computed. Return: (np.ndarray): A structured numpy array with the fields: [('name', 'O'), ('semanticLabel', 'O'), ('instanceId', '<u4'), ('semanticId', '<u4'), ("metadata", "O"), ('x_min', '<i4'), ('y_min', '<i4'), ('x_max', '<i4'), ('y_max', '<i4')] """ BoundingBox2DValuesType = np.dtype( [ ("instanceId", "<u4"), ("semanticId", "<u4"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ] ) bboxes_2d_data = get_sensor_array( viewport, _syntheticdata.SensorType.BoundingBox2DLoose, BoundingBox2DValuesType, 1, is2DArray=False ) if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() bboxes_2d_data = helpers.reduce_bboxes_2d(bboxes_2d_data, instance_mappings) return bboxes_2d_data def get_semantic_segmentation(viewport, parsed=False, return_mapping=False, instance_mappings=None): """Get semantic segmentation sensor output. NB: The non-parsed return value (parsed=False) is deprecated and contains undefined data Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. parsed (bool): If True, map each leaf prim to a parent with a semantic schema applied. Otherwise, each leaf prim is returned as a unique semantic ID. return_mapping (bool): Whether to also return an array mapping instance IDs to their corresponding prims. instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings`. If not provided (default), a new instance mappings will be computed. Return: output (np.ndarray): A uint32 array of shape `(height, width)`. mapping (list): (optional) If `return_mapping` is True, there will be an additional array containing the mapping of instance IDs to their corresponding prims. Each row corresponds to a prim with a SemanticSchema of Type="class". The mapping is provided in the following format: `(ID (int), path (str), semanticID (int), semanticLabel (str), descendentIDs (list of int))` """ if parsed: instance_data = get_sensor_array(viewport, _syntheticdata.SensorType.InstanceSegmentation, np.uint32, 1, True) if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() if len(instance_mappings) == 0: return get_sensor_array(viewport, _syntheticdata.SensorType.SemanticSegmentation, np.uint32, 1, True) semantic_instances = {} for im in instance_mappings[::-1]: semantic_instances.setdefault(im["semanticId"], []).extend(im["instanceIds"]) max_semantic_instance_id = np.max([max(il) for _, il in semantic_instances.items()]) max_instance_id = instance_data.max() lut = np.zeros(max(max_semantic_instance_id, max_instance_id) + 1, dtype=np.uint32) for i, (_, il) in enumerate(semantic_instances.items()): lut[np.array(il)] = i + 1 # +1 to differentiate from ray misses semantic_data = np.take(lut, instance_data) else: semantic_data = get_sensor_array(viewport, _syntheticdata.SensorType.SemanticSegmentation, np.uint32, 1, True) if return_mapping: if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() return semantic_data, instance_mappings else: return semantic_data def get_instance_segmentation(viewport, parsed=False, return_mapping=False, instance_mappings=None): """ Get instance segmentation sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. parsed (bool): If True, map each leaf prim to a parent with a semantic schema applied. Otherwise, each leaf prim is returned as a unique instance. return_mapping (bool): Whether to also return an array mapping instance IDs to their corresponding prims. instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings` If not provided (default), a new instance mappings will be computed. Return: output (np.ndarray): A uint32 array of shape `(height, width)` mapping (list): (optional) If `return_mapping` is True, there will be an additional array containing the mapping of instance IDs to their corresponding prims. Each row corresponds to a prim with a SemanticSchema of Type="class". The mapping is provided in the following format: `(ID (int), path (str), semanticID (int), semanticLabel (str), descendentIDs (list of int))` """ instance_data = get_sensor_array(viewport, _syntheticdata.SensorType.InstanceSegmentation, np.uint32, 1, True) if parsed: if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() if len(instance_mappings) == 0: return instance_data instances_list = [(im[0], im[4]) for im in instance_mappings][::-1] if len(instances_list) == 0: carb.log_warn("[omni.syntheticdata.visualize] No instances found.") return instance_data max_instance_id_list = max([max(il[1]) for il in instances_list]) max_instance_id = instance_data.max() lut = np.zeros(max(max_instance_id, max_instance_id_list) + 1, dtype=np.uint32) for uid, il in instances_list: lut[np.array(il)] = uid instance_data = np.take(lut, instance_data) if return_mapping: if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() return instance_data, instance_mappings else: return instance_data def get_normals(viewport): """ Get Normals sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape `(height, width, 3)` with values in the range of `(-1., 1.)`. """ return get_sensor_array(viewport, _syntheticdata.SensorType.Normal, np.float32, 4, True)[..., :3] def get_motion_vector(viewport): """ Get Motion Vector sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: TOCHECK : this does not describe what the legacy interface was returning (numpy.ndarray): A float32 array of shape `(height, width, 3)` with values in the range of `(-1., 1.)`. """ return get_sensor_array(viewport, _syntheticdata.SensorType.MotionVector, np.float32, 4, True) def get_cross_correspondence(viewport): """ Get Cross Correspondence sensor output. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A float32 array of shape `(height, width, 4)` with values in the range of `(-1., 1.)`. """ return get_sensor_array(viewport, _syntheticdata.SensorType.CrossCorrespondence, np.float32, 4, True) def get_occlusion(viewport, parsed=False, instance_mappings=None): """Get Occlusion values. Returns occlusion of instances as a ratio from 0. to 1. Note that this sensor is only applied to leaf prims. For example, if an asset is composed of multiple sub-meshes, an occlusion value will be calculated for each sub-mesh. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. parsed (bool): If True, map occlusion values to prims with a semantic class. If the mapped prim has more than one child with an occlusion value, a naive average will be performed. Note that this value will likely not be accurate. instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings`. If not provided (default), a new instance mappings will be computed. Return: (numpy.ndarray): A structured numpy array with the fields: [('instanceId', '<u4'), ('semanticId', '<u4'), ('occlusionRatio', '<f4')], where occlusion ranges from 0 (not occluded) to 1 (fully occluded). If `parsed` is True, the additional fields [('name', 'O'), ('semanticLabel', 'O'), ("metadata", "O")] are returned. """ OcclusionType = np.dtype([("instanceId", "<u4"), ("semanticId", "<u4"), ("occlusionRatio", "<f4")]) data = get_sensor_array(viewport, _syntheticdata.SensorType.Occlusion, OcclusionType, 1, is2DArray=False) if parsed: if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() return helpers.reduce_occlusion(data, instance_mappings) return data def get_semantic_data(instance_mappings=None): """ Get Semantic Data. Args: instance_mappings (numpy.ndarray, optional): A structured array returned by `helpers.get_instance_mappings`. If not provided (default), a new instance mappings will be computed. Return: (numpy.ndarray): A structured numpy array with the fields: [('uniqueId', '<i4'), ('name', 'O'), ('semanticLabel', 'O'), ('metadata', 'O')] """ if instance_mappings is None: instance_mappings = helpers.get_instance_mappings() output = [] for row in instance_mappings: output.append((row[0], row[1], row[3], row[5])) output = np.array(output, dtype=[("uniqueId", np.int32), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O")]) return output def get_occlusion_quadrant(viewport, return_bounding_boxes=False): """ Get Occlusion Quadrant. Uses loose and tight bounding boxes to return the occluded quadrant of all prims with semantic class. Note that the label "fully-visible" specifies that the full height and width of the prim's bounds can be determined, and the prim may still be partially occluded. Args: viewport (opaque Viewport instance): Viewport from which to retrieve/create sensor. Return: (numpy.ndarray): A structured numpy array with the fields: [('name', 'O'), ('semanticLabel', 'O'), ('instanceId', '<u4'), ('semanticId', '<u4'), ('occlusion_quadrant', 'O')], where occlusion_quadrant is a string from ['bottom', 'top', 'right', 'left', 'bottom-right', 'bottom-left', 'top-right', 'top-left', 'fully-visible', 'fully-occluded']. If `return_bounding_boxes` is True, the fields `x_min`, `y_min`, `x_max`, `y_max` for with suffixes `_bbox2d_tight` and `_bbox2d_loose` will be returned as well. """ tight_data = get_bounding_box_2d_tight(viewport) loose_data = get_bounding_box_2d_loose(viewport) merged_data = helpers.merge_sensors(bounding_box_2d_tight=tight_data, bounding_box_2d_loose=loose_data) is_fully_occluded = merged_data["x_min_bbox2d_tight"] == -1 is_occluded_left = (merged_data["x_min_bbox2d_tight"] > merged_data["x_min_bbox2d_loose"]) & ~is_fully_occluded is_occluded_right = (merged_data["x_max_bbox2d_tight"] < merged_data["x_max_bbox2d_loose"]) & ~is_fully_occluded is_occluded_top = (merged_data["y_min_bbox2d_tight"] > merged_data["y_min_bbox2d_loose"]) & ~is_fully_occluded is_occluded_bottom = (merged_data["y_max_bbox2d_tight"] < merged_data["y_max_bbox2d_loose"]) & ~is_fully_occluded is_occluded_bottom_left = is_occluded_bottom & is_occluded_left is_occluded_bottom_right = is_occluded_bottom & is_occluded_right is_occluded_top_right = is_occluded_top & is_occluded_right is_occluded_top_left = is_occluded_top & is_occluded_left label = np.array(["fully-visible"] * len(merged_data), dtype=[("occlusion_quadrant", "O")]) label[is_occluded_top] = "top" label[is_occluded_bottom] = "bottom" label[is_occluded_right] = "right" label[is_occluded_left] = "left" label[is_occluded_bottom_left] = "bottom-left" label[is_occluded_bottom_right] = "bottom-right" label[is_occluded_top_left] = "top-left" label[is_occluded_top_right] = "top-right" label[is_fully_occluded] = "fully-occluded" if return_bounding_boxes: output = helpers._join_struct_arrays([merged_data, label]) else: output = helpers._join_struct_arrays( [merged_data[["uniqueId", "name", "semanticLabel", "metadata", "instanceIds"]], label] ) return output
30,094
Python
43.784226
133
0.658271
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/visualize.py
import random import colorsys import numpy as np import carb from PIL import Image, ImageDraw from . import helpers from . import sensors # Colorize Helpers def colorize_distance(image_data): height, width = image_data.shape[:2] colorized_image = np.zeros((height, width, 4)) image_data[image_data == 0.0] = 1e-5 image_data = np.clip(image_data, 0, 255) image_data -= np.min(image_data) image_data /= np.max(image_data) + 1e-8 colorized_image[:, :, 0] = image_data colorized_image[:, :, 1] = image_data colorized_image[:, :, 2] = image_data colorized_image[:, :, 3] = 1 colorized_image = (colorized_image * 255).astype(int) return colorized_image def colorize_segmentation(segmentation_image): segmentation_ids = np.unique(segmentation_image) num_colours = len(segmentation_ids) # This is to avoid generating lots of colours for semantic classes not in frame lut = np.array([segmentation_ids, list(range(num_colours))]) new_segmentation_image = lut[1, np.searchsorted(lut[0, :], segmentation_image)] colours = np.array([[0.0] * 4] + random_colours(num_colours)) segmentation_image_rgba = (colours[new_segmentation_image] * 255).astype(int) return segmentation_image_rgba def colorize_bboxes(bboxes_2d_data, bboxes_2d_rgb): semantic_id_list = [] bbox_2d_list = [] for bbox_2d in bboxes_2d_data: if bbox_2d["semanticId"] > 0: semantic_id_list.append(bbox_2d["semanticId"]) bbox_2d_list.append(bbox_2d) semantic_id_list_np = np.unique(np.array(semantic_id_list)) color_list = random_colours(len(semantic_id_list_np.tolist())) img = Image.fromarray(bboxes_2d_rgb) draw = ImageDraw.Draw(img) for bbox_2d in bbox_2d_list: index = np.where(semantic_id_list_np == bbox_2d["semanticId"])[0][0] bbox_color = color_list[index] draw.rectangle( xy=[(bbox_2d["x_min"], bbox_2d["y_min"]), (bbox_2d["x_max"], bbox_2d["y_max"])], outline=( int(255 * bbox_color[0]), int(255 * bbox_color[1]), int(255 * bbox_color[2]), int(255 * bbox_color[3]), ), width=4, ) return np.asarray(img) def colorize_bboxes_3d(bboxes_3d_corners, rgb): """bboxes_3d_corners: in the local camera frame""" height, width = rgb.shape[:2] # FILTER BOXES mask_uv = ~np.any(np.all(bboxes_3d_corners < 0, axis=1), axis=1) & ~np.any( np.all(bboxes_3d_corners > 1, axis=1), axis=1 ) mask_z = np.all(np.all(bboxes_3d_corners[..., 2:] >= 0, axis=1), axis=1) & np.all( np.all(bboxes_3d_corners[..., 2:] <= 1, axis=1), axis=1 ) bboxes_3d_corners = bboxes_3d_corners[mask_uv & mask_z] bboxes_3d_corners = bboxes_3d_corners[..., :2].reshape(-1, 8, 2) * np.array([[width, height]]) face_idx_list = [[0, 1, 3, 2], [4, 5, 7, 6], [2, 3, 7, 6], [0, 1, 5, 4], [0, 2, 6, 4], [1, 3, 7, 5]] colours = random_colours(len(face_idx_list)) master_overlay = np.zeros_like(rgb) master_overlay_img = Image.fromarray(master_overlay) for face_idxs, colour in zip(face_idx_list, colours): overlay = Image.new("RGBA", (width, height)) draw = ImageDraw.Draw(overlay) colour = [int(c * 255) for c in colour] for p in bboxes_3d_corners: draw.polygon([tuple(xy) for xy in p[face_idxs]], fill=tuple([*colour[:3], 120])) draw.line([tuple(xy) for xy in p[face_idxs]], width=3, fill=tuple(colour)) master_overlay_img = Image.alpha_composite(master_overlay_img, overlay) rgb_img = Image.fromarray(rgb) rgb_img = Image.alpha_composite(rgb_img, master_overlay_img) return np.asarray(rgb_img) def random_colours(N): """ Generate random colors. Generate visually distinct colours by linearly spacing the hue channel in HSV space and then convert to RGB space. """ colour_rand = random.Random(2018) # Produces consistent random colours start = colour_rand.random() hues = [(start + i / N) % 1.0 for i in range(N)] colours = [list(colorsys.hsv_to_rgb(h, 0.9, 1.0)) + [1.0] for i, h in enumerate(hues)] colour_rand.shuffle(colours) return colours def get_bbox2d_tight(viewport): rgb_data = sensors.get_rgb(viewport) bboxes_2d_data = sensors.get_bounding_box_2d_tight(viewport) bboxes_2d_rgb = colorize_bboxes(bboxes_2d_data, rgb_data) return bboxes_2d_rgb def get_bbox2d_loose(viewport): rgb_data = sensors.get_rgb(viewport) bboxes_2d_data = sensors.get_bounding_box_2d_loose(viewport) bboxes_2d_rgb = colorize_bboxes(bboxes_2d_data, rgb_data) return bboxes_2d_rgb def get_normals(viewport): normals = sensors.get_normals(viewport) background_mask = np.sum(normals, axis=-1) == 0.0 # normalize from [-1, 1] to [0, 255] normals = (normals + 1.0) / 2 * 255 # Set background alpha to 0. normals = np.pad(normals, ((0, 0), (0, 0), (0, 1)), constant_values=255) normals[background_mask, 3] = 0.0 return normals.astype(np.uint8) def get_motion_vector(viewport): motion_vector = sensors.get_motion_vector(viewport) _min, _max = motion_vector.min(), motion_vector.max() motion_vector = (motion_vector - _min) / (_max - _min) * 255.0 return motion_vector.astype(np.uint8) def get_cross_correspondence(viewport): cross_correspondence = sensors.get_cross_correspondence(viewport) # normalize from [-1, 1] to [0, 255] # invalid values of -1 convert to 0 cross_correspondence = ((cross_correspondence + 1.0) / 2) * 255 return cross_correspondence.astype(np.uint8) def get_instance_segmentation(viewport, mode=None): if not mode: carb.log_info('[omni.syntheticdata.visualize] No semantic mode provided, defaulting to "parsed"') mode = "parsed" if mode == "raw": instance_data = sensors.get_instance_segmentation(viewport, parsed=False) elif mode == "parsed": instance_data = sensors.get_instance_segmentation(viewport, parsed=True) else: raise NotImplementedError instance_image = colorize_segmentation(instance_data) return instance_image def get_semantic_segmentation(viewport, mode=""): if not mode: carb.log_info('[omni.syntheticdata.visualize] No semantic mode provided, defaulting to "parsed"') mode = "instance_map" # s = time.time() if mode == "raw": semantic_data = sensors.get_semantic_segmentation(viewport, parsed=False) elif mode == "parsed": semantic_data = sensors.get_semantic_segmentation(viewport, parsed=True) else: raise NotImplementedError semantic_image = colorize_segmentation(semantic_data) return semantic_image def get_bbox3d(viewport, mode="parsed"): rgb_data = sensors.get_rgb(viewport) bbox_3d_data = sensors.get_bounding_box_3d(viewport, parsed=(mode == "parsed"), return_corners=True) bbox_3d_corners = bbox_3d_data["corners"] projected_corners = helpers.world_to_image(bbox_3d_corners.reshape(-1, 3), viewport).reshape(-1, 8, 3) bboxes_3d_rgb = colorize_bboxes_3d(projected_corners, rgb_data) return bboxes_3d_rgb # *** DEPRECATED *** def get_depth(viewport, mode="linear"): if mode == "linear": depth_data = sensors.get_depth_linear(viewport) depth_data[depth_data == depth_data.max()] = 0.0 elif mode == "inverse_depth": depth_data = sensors.get_depth(viewport) else: raise ValueError(f"Mode {mode} is invalid. Choose between " "['linear', 'inverse_depth'].") return colorize_distance(depth_data.squeeze()) def get_distance(viewport, mode="image_plane"): if mode == "image_plane": distance_data = sensors.get_distance_to_image_plane(viewport) distance_data[distance_data == distance_data.max()] = 0.0 elif mode == "camera": distance_data = sensors.get_distance_to_camera(viewport) distance_data[distance_data == distance_data.max()] = 0.0 else: raise ValueError(f"Mode {mode} is invalid. Choose between " "['image_plane', 'camera'].") return colorize_distance(distance_data.squeeze())
8,198
Python
36.610092
106
0.643328
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/extension.py
from pxr import Tf, Trace, Usd import carb.settings import omni.kit import omni.ext # legacy extension export from . import helpers from . import visualize from . import sensors from .SyntheticData import * EXTENSION_NAME = "Synthetic Data" _extension_instance = None class Extension(omni.ext.IExt): def __init__(self): super().__init__() self.__viewport_legacy_event_sub = None self.__viewport_legacy_close = None self.__extension_loaded = None self.__menu_container = None def __menubar_core_loaded(self): from .menu import SynthDataMenuContainer self.__menu_container = SynthDataMenuContainer() def __menubar_core_unloaded(self): if self.__menu_container: self.__menu_container.destroy() self.__menu_container = None def __viewport_legcy_loaded(self): from .viewport_legacy import ViewportLegacy self.__viewport_legacy_event_sub = ViewportLegacy.create_update_subscription() self.__viewport_legacy_close = ViewportLegacy.close_viewports def __viewport_legcy_unloaded(self): if self.__viewport_legacy_event_sub: self.__viewport_legacy_event_sub = None if self.__viewport_legacy_close: self.__viewport_legacy_close() self.__viewport_legacy_close = None def on_startup(self, ext_id): global _extension_instance _extension_instance = self carb.log_info("[omni.syntheticdata] SyntheticData startup") settings = carb.settings.get_settings() settings.set_default("/exts/omni.syntheticdata/menubar/visible", True) settings.set_default("/exts/omni.syntheticdata/menubar/order", -1) settings.set_default("/exts/omni.syntheticdata/menubar/showSensorDefaultButton", False) manager = omni.kit.app.get_app().get_extension_manager() self.__extension_loaded = ( manager.subscribe_to_extension_enable( lambda _: self.__menubar_core_loaded(), lambda _: self.__menubar_core_unloaded(), ext_name="omni.kit.viewport.menubar.core", hook_name=f"{ext_id} omni.kit.viewport.menubar.core listener", ), manager.subscribe_to_extension_enable( lambda _: self.__viewport_legcy_loaded(), lambda _: self.__viewport_legcy_unloaded(), ext_name="omni.kit.window.viewport", hook_name=f"{ext_id} omni.kit.window.viewport listener", ) ) self._stage_event_sub = ( omni.usd.get_context() .get_stage_event_stream() .create_subscription_to_pop(self._on_stage_event, name="omni.syntheticdata stage update") ) # force settings stageHistoryFrameCount = settings.get_as_int("/app/settings/fabricDefaultStageFrameHistoryCount") if not stageHistoryFrameCount or (int(stageHistoryFrameCount) < 3): carb.log_error(f"SyntheticData extension needs at least a stageFrameHistoryCount of 3") if settings.get_as_bool("/rtx/gatherColorToDisplayDevice") and settings.get_as_bool("/renderer/multiGpu/enabled"): carb.log_error("SyntheticData extension does not support /rtx/gatherColorToDisplayDevice=true with multiple GPUs.") SyntheticData.Initialize() def _on_stage_event(self, event): if event.type == int(omni.usd.StageEventType.CLOSING): if self.__viewport_legacy_close: self.__viewport_legacy_close() # FIXME : this cause rendering issues (added for unittests) SyntheticData.Get().reset(False) # this is fishy but if we reset the graphs in the closing event the rendering is not happy elif event.type == int(omni.usd.StageEventType.OPENED): SyntheticData.Get().reset(False) if self.__menu_container: self.__menu_container.clear_all() def on_shutdown(self): global _extension_instance _extension_instance = None self.__extension_loaded = None self._stage_event_sub = None self.__viewport_legcy_unloaded() self.__menubar_core_unloaded() SyntheticData.Reset() def get_name(self): return EXTENSION_NAME @staticmethod def get_instance(): return _extension_instance
4,424
Python
36.820513
127
0.630877
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/SyntheticData.py
from sqlite3 import connect from pxr import Sdf, Usd, UsdRender import carb import omni.graph.core as og import omni.usd import omni.kit from dataclasses import dataclass, field """ SyntheticData class is the prototype interface implementation (will be eventually integrated in SynthetiData C++ interface ) - contains the definition of all omnigraphs - expose methods for the user to - add / remove custom nodes to graphs """ _sdg_iface = None class SyntheticDataException(Exception): def __init__(self, message="error"): self.message = message super().__init__(self.message) class SyntheticDataStage: # stage is set automatically from the node connections' stages AUTO = -1 # global simulation : node scheduled in the simulation graph SIMULATION = 0 # prerender : node scheduled in the prerender graph PRE_RENDER = 1 # postrender : node scheduled in the postrender graph for a specific renderproduct POST_RENDER = 2 # on demand : node scheduled in the postprocess graph ON_DEMAND = 3 class SyntheticData: _graphPathRoot = "/Render" _graphName = "SDGPipeline" _simulationGraphPath = "Simulation/" + _graphName _preRenderGraphPath = "PreRender/" + _graphName _postRenderGraphPath = "PostRender/" + _graphName _postProcessGraphPath = "PostProcess/" + _graphName _postProcessGraphTickOrder = -99 # eCheckForHydraRenderComplete + 1 _rendererTemplateName = "GpuInteropEntry" _renderVarBuffSuffix = "buff" _renderVarHostSuffix = "host" _renderVarToHostTemplateName = "PostRenderVarToHost" _renderProductAttributeName = "inputs:renderProductPath" _instanceMappingChangeTriggerTemplateName ="InstanceMappingChangeTrigger" _renderVarHostToDiskTriggerTemplateName ="RenderVarHostToDiskTrigger" _instanceMappingCtrl = "InstanceMappingPre" _defaultSemanticFilterName = "DefaultSemanticFilter" # graph registry : contains node templates used to construct a graph # node template name / id # list containing : # - node type # - list of template dependencies description : # - connection node template name or renderVar name # - index of the render product in the list provided during activation # - dictionnary of inputs / outputs mapping # - node attributes name/value dictionnary to be set during the activation # @dataclass class NodeConnectionTemplate: node_template_id: str render_product_idxs: tuple = (0,) attributes_mapping: dict = field(default_factory=dict) @dataclass class NodeTemplate: pipeline_stage: int node_type_id: str connections: list = field(default_factory=list) attributes: dict = field(default_factory=dict) _ogn_templates_registry = { # --- Camera "RenderProductCameraPrimPath": NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdSimRenderProductCamera" ), "PostRenderProductCamera": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdRenderProductCamera", [ NodeConnectionTemplate( _rendererTemplateName, attributes_mapping={ "outputs:rp": "inputs:renderResults", "outputs:exec": "inputs:exec", "outputs:gpu" : "inputs:gpu" }), NodeConnectionTemplate("RenderProductCameraPrimPath", attributes_mapping={ "outputs:exec": "inputs:exec"}) ] ), # --- GPUInterop _rendererTemplateName: NodeTemplate(SyntheticDataStage.POST_RENDER, "omni.graph.nodes.GpuInteropRenderProductEntry"), # --- InstanceMapping _instanceMappingCtrl : NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdSimInstanceMapping", attributes={"inputs:needTransform": False, "inputs:semanticFilterPredicate":"*:*"} ), _defaultSemanticFilterName: NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdSemanticFilter", attributes={"inputs:name": "default", "inputs:predicate": "*:*"} ), "InstanceMappingTransforms": NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdSimInstanceMapping", [ NodeConnectionTemplate(_instanceMappingCtrl, render_product_idxs=()) ], {"inputs:needTransform": True} ), "InstanceMappingPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostInstanceMapping", [ NodeConnectionTemplate("InstanceIdTokenMapSD"), NodeConnectionTemplate(_instanceMappingCtrl, attributes_mapping={"outputs:exec": "inputs:exec"}, render_product_idxs=()) ], {}, ), # --- NoOp node used to expose the semantic transforms renderVars "InstanceMappingPostWithTransforms": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdNoOp", [ NodeConnectionTemplate("InstanceMappingTransforms", attributes_mapping={"outputs:exec": "inputs:exec"}, render_product_idxs=()), NodeConnectionTemplate("InstanceMappingPost", attributes_mapping={"outputs:exec": "inputs:exec"}) ], {}, ), # --- BoundingBoxes "BoundingBox2DTightReduction": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostSemanticBoundingBox", [ NodeConnectionTemplate("BoundingBox2DTightSD"), NodeConnectionTemplate("InstanceMappingPost") ], {"inputs:renderVar": "BoundingBox2DTightSD"}, ), "BoundingBox2DLooseReduction": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostSemanticBoundingBox", [ NodeConnectionTemplate("BoundingBox2DLooseSD"), NodeConnectionTemplate("InstanceMappingPost") ], {"inputs:renderVar": "BoundingBox2DLooseSD"}, ), "BoundingBox3DReduction": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostSemanticBoundingBox", [ NodeConnectionTemplate("BoundingBox3DSD"), NodeConnectionTemplate("InstanceMappingTransforms", attributes_mapping={"outputs:exec": "inputs:exec"}, render_product_idxs=()), NodeConnectionTemplate("InstanceMappingPost") ], {"inputs:renderVar": "BoundingBox3DSD"}, ), "BoundingBox3DCameraProjection": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostSemantic3dBoundingBoxCameraProjection", [ NodeConnectionTemplate("BoundingBox3DSD"), NodeConnectionTemplate("BoundingBox3DReduction"), NodeConnectionTemplate("PostRenderProductCamera"), NodeConnectionTemplate("InstanceMappingTransforms", attributes_mapping={"outputs:exec": "inputs:exec"}, render_product_idxs=()), NodeConnectionTemplate("InstanceMappingPost") ] ), "BoundingBox3DFilter": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostSemantic3dBoundingBoxFilter", [ NodeConnectionTemplate("BoundingBox3DSD"), NodeConnectionTemplate("BoundingBox3DCameraProjection"), NodeConnectionTemplate("PostRenderProductCamera"), NodeConnectionTemplate("BoundingBox3DReduction"), NodeConnectionTemplate("InstanceMappingPost") ] ), # --- Post-Render triggerers _renderVarHostToDiskTriggerTemplateName: NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdFabricTimeRangeExecution", [ NodeConnectionTemplate ( _rendererTemplateName, attributes_mapping={ "outputs:rp": "inputs:renderResults", "outputs:gpu": "inputs:gpu" } ) ] ), _instanceMappingChangeTriggerTemplateName: NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdTimeChangeExecution", [ NodeConnectionTemplate("InstanceMappingPost"), NodeConnectionTemplate ( _rendererTemplateName, attributes_mapping={ "outputs:rp": "inputs:renderResults" } ) ] ), # --- PostRenderVarDisplay "LdrColorDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("LdrColorSD")], {"inputs:renderVar": "LdrColorSD"}, ), "DistanceToImagePlaneDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("DistanceToImagePlaneSD")], { "inputs:renderVar": "DistanceToImagePlaneSD", "inputs:parameters": [0.0, 100.0, 0.0, 0.0] }, ), "DistanceToCameraDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("DistanceToCameraSD")], { "inputs:renderVar": "DistanceToCameraSD", "inputs:parameters": [0.0, 100.0, 0.0, 0.0] }, ), "Camera3dPositionDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("Camera3dPositionSD")], {"inputs:renderVar": "Camera3dPositionSD"}, ), "NormalDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("NormalSD")], {"inputs:renderVar": "NormalSD"}, ), "CrossCorrespondenceDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("CrossCorrespondenceSD")], {"inputs:renderVar": "CrossCorrespondenceSD"}, ), "TargetMotionDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("TargetMotionSD")], { "inputs:renderVar": "TargetMotionSD", "inputs:parameters": [1.0, 5.0, 0.0, 0.0] }, ), "InstanceIdSegmentationDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [NodeConnectionTemplate("InstanceSegmentationSD")], {"inputs:renderVar": "InstanceSegmentationSD", "inputs:renderVarDisplay": "RawInstanceSegmentationSDDisplay", "inputs:mode": "segmentationMapMode"}, ), "InstanceSegmentationDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [ NodeConnectionTemplate("InstanceSegmentationSD"), NodeConnectionTemplate("InstanceMappingPost") ], {"inputs:renderVar": "InstanceSegmentationSD", "inputs:mode": "semanticPathMode"}, ), "SemanticSegmentationDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [ NodeConnectionTemplate("InstanceSegmentationSD"), NodeConnectionTemplate("InstanceMappingPost"), ], {"inputs:renderVar": "InstanceSegmentationSD", "inputs:renderVarDisplay": "SemanticSegmentationSDDisplay", "inputs:mode": "semanticLabelMode"}, ), "BoundingBox2DTightDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [ NodeConnectionTemplate("LdrColorSD"), NodeConnectionTemplate("InstanceMappingPost"), NodeConnectionTemplate("BoundingBox2DTightReduction"), ], {"inputs:renderVar": "LdrColorSD", "inputs:renderVarDisplay": "BoundingBox2DTightSDDisplay", "inputs:mode": "semanticBoundingBox2dMode"}, ), "BoundingBox2DLooseDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [ NodeConnectionTemplate("LdrColorSD"), NodeConnectionTemplate("InstanceMappingPost"), NodeConnectionTemplate("BoundingBox2DLooseReduction"), ], {"inputs:renderVar": "LdrColorSD", "inputs:renderVarDisplay": "BoundingBox2DLooseSDDisplay", "inputs:mode": "semanticBoundingBox2dMode"}, ), "BoundingBox3DDisplayPost": NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [ NodeConnectionTemplate("LdrColorSD"), NodeConnectionTemplate("Camera3dPositionSD"), NodeConnectionTemplate("PostRenderProductCamera"), NodeConnectionTemplate("InstanceMappingPost"), NodeConnectionTemplate("BoundingBox3DFilter"), NodeConnectionTemplate("BoundingBox3DCameraProjection"), NodeConnectionTemplate("BoundingBox3DReduction"), ], { "inputs:renderVar": "LdrColorSD", "inputs:renderVarDisplay": "BoundingBox3DSDDisplay", "inputs:mode": "semanticBoundingBox3dMode", "inputs:parameters": [0.0, 5.0, 0.027, 0.27] }, ), # --- PostProcess "PostProcessDispatcher": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdOnNewFrame" ), "PostProcessDispatch": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdOnNewRenderProductFrame", [NodeConnectionTemplate("PostProcessDispatcher", render_product_idxs=())] ), "PostProcessRenderProductCamera": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdRenderProductCamera", [ NodeConnectionTemplate("PostProcessDispatch"), NodeConnectionTemplate("PostRenderProductCamera", attributes_mapping={ "outputs:exec": "inputs:exec"}), NodeConnectionTemplate(_rendererTemplateName, attributes_mapping={ "outputs:exec": "inputs:exec"}) # provide the renderResults ] ), "InstanceMapping": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdInstanceMapping", [ NodeConnectionTemplate("PostProcessDispatch"), NodeConnectionTemplate("InstanceMappingPost", attributes_mapping={"outputs:exec": "inputs:exec"}) ] ), "InstanceMappingWithTransforms": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdInstanceMapping", [ NodeConnectionTemplate("PostProcessDispatch"), NodeConnectionTemplate("InstanceMappingTransforms", attributes_mapping={"outputs:exec": "inputs:exec"}, render_product_idxs=()), NodeConnectionTemplate("InstanceMappingPost", attributes_mapping={"outputs:exec": "inputs:exec"}) ] ), "InstanceMappingPtr": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdInstanceMappingPtr", [ NodeConnectionTemplate("PostProcessDispatch"), NodeConnectionTemplate("InstanceMappingPost", attributes_mapping={"outputs:exec": "inputs:exec"}) ] ), "InstanceMappingPtrWithTransforms": NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdInstanceMappingPtr", [ NodeConnectionTemplate("PostProcessDispatch"), NodeConnectionTemplate("InstanceMappingTransforms", attributes_mapping={"outputs:exec": "inputs:exec"}, render_product_idxs=()), NodeConnectionTemplate("InstanceMappingPost", attributes_mapping={"outputs:exec": "inputs:exec"}) ] ) } # set of rendervars associated to the node exposing them : # - renderVar generated by the renderer are exposed by the GpuInteropEntry # - others renderVars are generated by some postRender nodes # FIXME : the list of renderer rendervars should be queried from the renderer _ogn_rendervars = { # default renderer renderVaras "LdrColor": _rendererTemplateName, "HdrColor": _rendererTemplateName, "Depth": _rendererTemplateName, # sd renderer renderVars "LdrColorSD": _rendererTemplateName, "Camera3dPositionSD": _rendererTemplateName, "DistanceToImagePlaneSD": _rendererTemplateName, "DistanceToCameraSD": _rendererTemplateName, "DepthSD": _rendererTemplateName, "DepthLinearSD": _rendererTemplateName, "InstanceSegmentationSD": _rendererTemplateName, "SemanticSegmentationSD": _rendererTemplateName, "NormalSD": _rendererTemplateName, "TargetMotionSD": _rendererTemplateName, "BoundingBox2DTightSD": _rendererTemplateName, "BoundingBox2DLooseSD": _rendererTemplateName, "BoundingBox3DSD": _rendererTemplateName, "OcclusionSD": _rendererTemplateName, "TruncationSD": _rendererTemplateName, "CrossCorrespondenceSD": _rendererTemplateName, "InstanceIdTokenMapSD": _rendererTemplateName, "SemanticIdTokenMapSD": _rendererTemplateName, # postRender nodes rendervars "InstanceMappingInfoSDhost": "InstanceMappingPost", "SemanticMapSD": "InstanceMappingPost", "SemanticMapSDhost": "InstanceMappingPost", "SemanticPrimTokenSD": "InstanceMappingPost", "SemanticPrimTokenSDhost": "InstanceMappingPost", "InstanceMapSD": "InstanceMappingPost", "InstanceMapSDhost": "InstanceMappingPost", "InstancePrimTokenSD": "InstanceMappingPost", "InstancePrimTokenSDhost": "InstanceMappingPost", "SemanticLabelTokenSD": "InstanceMappingPost", "SemanticLabelTokenSDhost": "InstanceMappingPost", "SemanticLocalTransformSD": "InstanceMappingPostWithTransforms", "SemanticLocalTransformSDhost": "InstanceMappingPostWithTransforms", "SemanticWorldTransformSD": "InstanceMappingPostWithTransforms", "SemanticWorldTransformSDhost": "InstanceMappingPostWithTransforms", "SemanticBoundingBox2DExtentTightSD": "BoundingBox2DTightReduction", "SemanticBoundingBox2DInfosTightSD": "BoundingBox2DTightReduction", "SemanticBoundingBox2DExtentLooseSD": "BoundingBox2DLooseReduction", "SemanticBoundingBox2DInfosLooseSD": "BoundingBox2DLooseReduction", "SemanticBoundingBox3DExtentSD": "BoundingBox3DReduction", "SemanticBoundingBox3DInfosSD": "BoundingBox3DReduction", "SemanticBoundingBox3DCamCornersSD": "BoundingBox3DCameraProjection", "SemanticBoundingBox3DCamExtentSD": "BoundingBox3DCameraProjection", "SemanticBoundingBox3DFilterInfosSD": "BoundingBox3DFilter", "RenderProductCameraSD": "PostRenderProductCamera" } _ogn_post_display_types = [ "omni.syntheticdata.SdPostRenderVarDisplayTexture" ] _ogn_display_types = [ "omni.syntheticdata.SdRenderVarDisplayTexture" ] _ogn_default_activated_display_template = [] """lst: List of omnigraph node types conforming the display api. Todo : use reflexivity on the node outputs.""" @staticmethod def renderer_template_name() -> str: """Return the root renderer template name. To be scheduled a post-render node must be downstream to the root renderer node. """ return SyntheticData._rendererTemplateName @staticmethod def rendervar_host_to_disk_trigger_template_name() -> str: """Return the template name of the node triggering the execution of the post host to disk.""" return SyntheticData._renderVarHostToDiskTriggerTemplateName @staticmethod def register_display_rendervar_templates() -> None: """Automatically register SdRenderVarDisplayTexture node template for all registerd nodes whose type is in the post display type. The function is called for every statically registered nodes during the interface initialization . It may be called after having registered nodes whose type is omni.syntheticdata.SdPostRenderVarDisplayTexture. """ ogn_registry_keys = [key for key in SyntheticData._ogn_templates_registry.keys()] for tplName in ogn_registry_keys: tplParams = SyntheticData._ogn_templates_registry[tplName] tplNameDisplay = tplName[:-11] + "Display" if (tplParams.node_type_id in SyntheticData._ogn_post_display_types) and (tplNameDisplay not in SyntheticData._ogn_templates_registry): if "inputs:renderVarDisplay" in tplParams.attributes: renderVarDisplay = tplParams.attributes["inputs:renderVarDisplay"] else: renderVarDisplay = tplParams.attributes["inputs:renderVar"] + "Display" SyntheticData.register_node_template(SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdRenderVarDisplayTexture", [ SyntheticData.NodeConnectionTemplate("PostProcessDispatch"), SyntheticData.NodeConnectionTemplate(tplName, attributes_mapping={"outputs:exec": "inputs:exec"}) ], attributes = {"inputs:renderVarDisplay": renderVarDisplay } ), template_name=tplNameDisplay ) @staticmethod def register_combine_rendervar_templates() -> None: """Automatically register SdPostCompRenderVarTextures node template for all registerd nodes whose type is in the post display type list. The function is called for every statically registered nodes during the interface initialization . It may be called after having registered nodes whose type is in the post display type list. """ ogn_registry_keys = [key for key in SyntheticData._ogn_templates_registry.keys()] for tplName in ogn_registry_keys: tplParams = SyntheticData._ogn_templates_registry[tplName] if (tplParams.node_type_id in SyntheticData._ogn_post_display_types) and ( tplName + "Combine" not in SyntheticData._ogn_templates_registry ): SyntheticData.register_combine_rendervar_template(tplName) @staticmethod def register_combine_rendervar_template(template_name: str) -> None: """Automatically register SdPostCompRenderVarTextures node template for the given template name. Args: template_name: name of the node template for which registering a SdPostCompRenderVarTextures template """ if not template_name in SyntheticData._ogn_templates_registry: raise SyntheticDataException(f'graph node template "{template_name}" not registered') # cannot combine node results from the ondemand graph if SyntheticData._ogn_templates_registry[template_name].pipeline_stage > SyntheticDataStage.POST_RENDER: return templateParams = SyntheticData._ogn_templates_registry[template_name] if templateParams.node_type_id not in SyntheticData._ogn_post_display_types: raise SyntheticDataException(f'graph node template "{template_name}" not registered as a display node') templateNameCombine = template_name + "Combine" if templateNameCombine not in SyntheticData._ogn_templates_registry: SyntheticData.register_node_template(SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostCompRenderVarTextures", [ SyntheticData.NodeConnectionTemplate(SyntheticData._rendererTemplateName), SyntheticData.NodeConnectionTemplate( template_name, attributes_mapping={ "outputs:cudaPtr": "inputs:cudaPtr", "outputs:width": "inputs:width", "outputs:height": "inputs:height", "outputs:format": "inputs:format" } ) ] ), template_name=templateNameCombine, ) @staticmethod def register_device_rendervar_to_host_templates(rendervars: list) -> None: """Automatically register SdPostRenderVarToHost node templates for the given rendervars Args: rendervars: list of renderVar names to register the rendervar device to host copy node template """ # copy the rendervars list since the registration may modify the list rendervars_copy = rendervars.copy() for rv in rendervars_copy: rv_host = rv+SyntheticData._renderVarHostSuffix if rv.endswith(SyntheticData._renderVarHostSuffix) or (rv_host in SyntheticData._ogn_rendervars): continue template_name = rv + "PostCopyToHost" if template_name not in SyntheticData._ogn_templates_registry: SyntheticData.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarToHost", [ SyntheticData.NodeConnectionTemplate(rv), SyntheticData.NodeConnectionTemplate( SyntheticData._rendererTemplateName, attributes_mapping={ "outputs:rp": "inputs:rp", "outputs:gpu": "inputs:gpu" } ) ], { "inputs:renderVar": rv, "inputs:renderVarHostSuffix" : SyntheticData._renderVarHostSuffix } ), rendervars=[rv_host], template_name=template_name, ) @staticmethod def register_device_rendervar_tex_to_buff_templates(rendervars: list) -> None: """Automatically register SdPostRenderVarTextureToBuffer node templates for the given rendervars Args: rendervars: list of renderVar names to register the rendervar device texture to buffer copy node template """ # copy the rendervars list since the registration may modify the list rendervars_copy = rendervars.copy() for rv in rendervars_copy: rv_buff = rv+SyntheticData._renderVarBuffSuffix if rv.endswith(SyntheticData._renderVarBuffSuffix) or (rv_buff in SyntheticData._ogn_rendervars): continue template_name = rv + "PostCopyToBuff" if template_name not in SyntheticData._ogn_templates_registry: SyntheticData.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarTextureToBuffer", [ SyntheticData.NodeConnectionTemplate(rv), SyntheticData.NodeConnectionTemplate( SyntheticData._rendererTemplateName, attributes_mapping={ "outputs:rp": "inputs:rp", "outputs:gpu": "inputs:gpu" } ) ], { "inputs:renderVar": rv, "inputs:renderVarBufferSuffix" : SyntheticData._renderVarBuffSuffix } ), rendervars=[rv_buff], template_name=template_name, ) @staticmethod def register_export_rendervar_ptr_templates(rendervars: list) -> None: """Automatically register SdRenderVarPtr node templates for the given rendervars Args: rendervars: list of renderVar names to register the ptr node template """ for rv in rendervars: template_name = rv + "Ptr" if template_name not in SyntheticData._ogn_templates_registry: SyntheticData.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdRenderVarPtr", [ SyntheticData.NodeConnectionTemplate(rv, (0,), None), SyntheticData.NodeConnectionTemplate("PostProcessDispatch") ], {"inputs:renderVar": rv} ), template_name=template_name, ) @staticmethod def register_export_rendervar_array_templates(rendervars: list) -> None: """Automatically register SdRenderVarToRawArray node templates for the given rendervars Args: rendervars: list of renderVar names to register the export raw array node template """ for rv in rendervars: template_name = rv + "ExportRawArray" if template_name not in SyntheticData._ogn_templates_registry: SyntheticData.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdRenderVarToRawArray", [ SyntheticData.NodeConnectionTemplate(rv, (0,), None), SyntheticData.NodeConnectionTemplate("PostProcessDispatch") ], {"inputs:renderVar": rv} ), template_name=template_name, ) @staticmethod def convert_sensor_type_to_rendervar(legacy_type_name: str) -> None: """Convert of legacy sensor type name to its rendervar name Args: legacy_type_name: legacy sensor type name to convert Returns: the name of the renderVar correspoding to the legacy name """ if legacy_type_name == "Rgb": return "LdrColorSD" elif legacy_type_name == "MotionVector": return "TargetMotionSD" else: return legacy_type_name + "SD" @staticmethod def disable_async_rendering(): """Disable asynchronous rendering Since asyncRendering is not supported by the fabric, graphs are currently not compatible with this mode. """ settings = carb.settings.get_settings() if settings.get("/app/asyncRendering") or settings.get("/app/asyncRenderingLowLatency"): carb.log_warn(f"SyntheticData is not supporting asyncRendering : switching it off.") settings.set("/app/asyncRendering", False) settings.set("/app/asyncRenderingLowLatency", False) @staticmethod def _has_rendervar(renderProductPath: str, renderVar: str, usdStage: Usd.Stage = None) -> bool: if not usdStage: usdStage = omni.usd.get_context().get_stage() if not usdStage: raise SyntheticDataException("No stage provided or in use by default UsdContext") renderProductPrim = usdStage.GetPrimAtPath(renderProductPath) if not renderProductPrim: raise SyntheticDataException(f"invalid renderProduct {renderProductPath}") renderVarPrimPath = f"/Render/Vars/{renderVar}" renderVarPrim = usdStage.GetPrimAtPath(renderVarPrimPath) if not renderVarPrim: return False renderProductRenderVarRel = renderProductPrim.GetRelationship("orderedVars") if not renderProductRenderVarRel: return False return renderVarPrimPath in renderProductRenderVarRel.GetTargets() @staticmethod def _add_rendervar(renderProductPath: str, renderVar: str, usdStage: Usd.Stage = None) -> None: # FIXME : we have to use the legacy Viewport interface to modify the renderproduct, otherwise changes may be overwritten vp_1found = False try: import omni.kit.viewport_legacy vp_iface = omni.kit.viewport_legacy.get_viewport_interface() viewports = vp_iface.get_instance_list() for viewport in viewports: vpw = vp_iface.get_viewport_window(viewport) if vpw.get_render_product_path() == renderProductPath: vpw.add_aov(renderVar, False) vp_1found = True except ImportError: pass # Both Viewport-1 and Viewport-2 won't share a common renderProductPath if vp_1found: return if not usdStage: import omni.usd # this is needed (why ?) usdStage = omni.usd.get_context().get_stage() if not usdStage: raise SyntheticDataException("No stage provided or in use by default UsdContext") with Usd.EditContext(usdStage, usdStage.GetSessionLayer()): renderProductPrim = usdStage.GetPrimAtPath(renderProductPath) if not renderProductPrim: raise SyntheticDataException(f"invalid renderProduct {renderProductPath}") renderVarPrimPath = f"/Render/Vars/{renderVar}" renderVarPrim = usdStage.GetPrimAtPath(renderVarPrimPath) if not renderVarPrim: renderVarPrim = usdStage.DefinePrim(renderVarPrimPath) if not renderVarPrim: raise SyntheticDataException(f"cannot create renderVar {renderVarPrimPath}") renderVarPrim.CreateAttribute("sourceName", Sdf.ValueTypeNames.String).Set(renderVar) renderVarPrim.SetMetadata("hide_in_stage_window", True) renderVarPrim.SetMetadata("no_delete", True) renderProductRenderVarRel = renderProductPrim.GetRelationship("orderedVars") if not renderProductRenderVarRel: renderProductRenderVarRel = renderProductPrim.CreateRelationship("orderedVars") if not renderProductRenderVarRel: raise SyntheticDataException( f"cannot set orderedVars relationship for renderProduct {renderProductPath}") renderProductRenderVarRel.AddTarget(renderVarPrimPath) @staticmethod def _remove_rendervar(renderProductPath: str, renderVar: str, usdStage: Usd.Stage = None) -> None: # we should not remove the LdrColor since it is the default renderVar if renderVar == "LdrColor": return # FIXME : we have to use the legacy Viewport interface to modify the renderproduct, otherwise changes may be overwritten vp_1found = False try: import omni.kit.viewport_legacy vp_iface = omni.kit.viewport_legacy.get_viewport_interface() viewports = vp_iface.get_instance_list() for viewport in viewports: vpw = vp_iface.get_viewport_window(viewport) if vpw.get_render_product_path() == renderProductPath: vpw.add_aov(renderVar, False) vp_1found = True except ImportError: pass # Both Viewport-1 and Viewport-2 won't share a common renderProductPath if vp_1found: return if not usdStage: import omni.usd # this is needed (why ?) usdStage = omni.usd.get_context().get_stage() if not usdStage: raise SyntheticDataException("No stage provided or in use by default UsdContext") with Usd.EditContext(usdStage, usdStage.GetSessionLayer()): renderProductPrim = usdStage.GetPrimAtPath(renderProductPath) if not renderProductPrim: raise SyntheticDataException(f"invalid renderProduct {renderProductPath}") renderVarPrimPath = f"/Render/Vars/{renderVar}" renderProductRenderVarRel = renderProductPrim.GetRelationship("orderedVars") if not renderProductRenderVarRel: return renderProductRenderVarRel.RemoveTarget(renderVarPrimPath) @staticmethod def get_registered_visualization_template_names() -> list: """Get the registered node template names which types are in the display type list Returns: list of registered template names which types are in the display type list """ registeredTemplateName = [] for name, val in SyntheticData._ogn_templates_registry.items(): if val.node_type_id in SyntheticData._ogn_display_types: registeredTemplateName.append(name) return registeredTemplateName @staticmethod def get_registered_visualization_template_names_for_display() -> list: """Get the registered node template names which types are in the display type list and their display name Returns: list of tuples of registered template names which types are in the display type list and their display name """ for sensor in SyntheticData.get_registered_visualization_template_names(): # by convention visualization sensors end with "Display" yield (sensor[0:-7] if sensor.endswith("Display") else sensor, sensor) @staticmethod def get_visualization_template_name_default_activation(template_name:str) -> bool: """Get the default activation status of a visualization node template Args: template_name: the name of the visualization node template to activate/deactivate by default Returns: True if the visualization node template is activated by default, False otherwise """ return template_name in SyntheticData._ogn_default_activated_display_template @staticmethod def reset_visualization_template_name_default_activation(): """Deactivate all visualization node template by default """ SyntheticData._ogn_default_activated_display_template.clear() @staticmethod def set_visualization_template_name_default_activation(template_name:str, activation:bool) -> bool: """Set the default activation status of visualization node template Args: template_name: the name of the visualization node template to activate/deactivate by default activation: True if the visualization node template should be activated/deactivated by default Returns: True if the template name is an activated visualization template name afer the call, False otherwise """ if template_name in SyntheticData.get_registered_visualization_template_names(): is_activated_by_default = template_name in SyntheticData._ogn_default_activated_display_template if activation and not is_activated_by_default: SyntheticData._ogn_default_activated_display_template.append(template_name) elif not activation and is_activated_by_default: SyntheticData._ogn_default_activated_display_template.remove(template_name) return SyntheticData.get_visualization_template_name_default_activation(template_name) @staticmethod def _get_graph_path(stage: int, renderProductPath: str = None) -> str: # simulation stages live in the same graph if stage == SyntheticDataStage.SIMULATION: return f"{SyntheticData._graphPathRoot}/{SyntheticData._simulationGraphPath}" elif stage == SyntheticDataStage.PRE_RENDER: # check if the renderProductPath has already an associated graph usdStage = omni.usd.get_context().get_stage() prim = usdStage.GetPrimAtPath(renderProductPath) ogpreprocesspath_attribute = prim.GetAttribute("ogPreProcessPath") if ogpreprocesspath_attribute: return f"{ogpreprocesspath_attribute.Get()}/{SyntheticData._graphName}" else: return f"{renderProductPath}/{SyntheticData._preRenderGraphPath}" # postprocess stages live in the same graph elif stage == SyntheticDataStage.ON_DEMAND: return f"{SyntheticData._graphPathRoot}/{SyntheticData._postProcessGraphPath}" elif stage == SyntheticDataStage.POST_RENDER: # check if the renderProductPath has already an associated graph usdStage = omni.usd.get_context().get_stage() prim = usdStage.GetPrimAtPath(renderProductPath) ogpostprocesspath_attribute = prim.GetAttribute("ogPostProcessPath") if ogpostprocesspath_attribute: return f"{ogpostprocesspath_attribute.Get()}/{SyntheticData._graphName}" else: return f"{renderProductPath}/{SyntheticData._postRenderGraphPath}" @staticmethod def _get_node_path(templateName: str, renderProductPath: str = None) -> str: if templateName not in SyntheticData._ogn_templates_registry: raise SyntheticDataException(f'graph node template "{templateName}" not registered') nodeStage = SyntheticData._ogn_templates_registry[templateName].pipeline_stage graphPath = SyntheticData._get_graph_path(nodeStage, renderProductPath) # prefix the node name by the renderproduct name for nodes living in the same graph # (simulation and postprocess graphs) nodeName = templateName if renderProductPath: renderProductName = renderProductPath.split("/")[-1] nodeName = f"{renderProductName}_{nodeName}" return f"{graphPath}/{nodeName}" @staticmethod def _unregister_node_template_rec(templateList: list) -> None: if not templateList: return templateDependenciesList = [] for templateName in templateList: if templateName not in SyntheticData._ogn_templates_registry: continue dependencyNames = [] for rv, tpl in SyntheticData._ogn_rendervars.items(): if tpl == templateName: dependencyNames.append(rv) for rv in dependencyNames: SyntheticData._ogn_rendervars.pop(rv) dependencyNames.append(templateName) SyntheticData._ogn_templates_registry.pop(templateName) for otherTemplateName, otherTemplateVal in SyntheticData._ogn_templates_registry.items(): for otherTemplateConnection in otherTemplateVal.connections: if otherTemplateConnection.node_template_id in dependencyNames: templateDependenciesList.append(otherTemplateName) SyntheticData._unregister_node_template_rec(templateDependenciesList) @staticmethod def _connect_nodes(srcNode, dstNode, connectionMap, enable) -> bool: success = True for srcAttrName, dstAttrName in connectionMap.items(): if (not srcNode.get_attribute_exists(srcAttrName)) or (not dstNode.get_attribute_exists(dstAttrName)): carb.log_error( f"SyntheticData failed to (dis)connect node {srcNode.get_prim_path()}:{srcAttrName} to {dstNode.get_prim_path()}:{dstAttrName}" ) success = False # best effort continue dstAttr = dstNode.get_attribute(dstAttrName) srcAttr = srcNode.get_attribute(srcAttrName) if enable: srcAttr.connect(dstAttr, True) else: srcAttr.disconnect(dstAttr, True) return success @staticmethod def _auto_connect_nodes(srcNode, dstNode, enable, srcIndex=0) -> bool: """Connect a source node to destination node The connections are made by matching outputs / inputs node attributes names In case of outputs attributes name clashing, the first node in the list is connected Optionnally outputs attributes name could be indexed : terminated by underscore followed by the srcNode list index (no leading zero) Indexed outputs attributes names take precedence """ success = False for attr in srcNode.get_attributes(): srcAttrName = attr.get_name() if not srcAttrName.startswith("outputs:"): continue dstAttrName = "inputs:%s_%d" % (srcAttrName[8:], srcIndex) if ( not dstNode.get_attribute_exists(dstAttrName) or dstNode.get_attribute(dstAttrName).get_upstream_connection_count() ): dstAttrName = "inputs:%s" % srcAttrName[8:] if ( not dstNode.get_attribute_exists(dstAttrName) or dstNode.get_attribute(dstAttrName).get_upstream_connection_count() ): continue dstAttr = dstNode.get_attribute(dstAttrName) srcAttr = srcNode.get_attribute(srcAttrName) if enable: srcAttr.connect(dstAttr, True) else: srcAttr.disconnect(dstAttr, True) success = True return success @staticmethod def Initialize(): """Initialize interface singleton instance.""" global _sdg_iface if _sdg_iface is None: SyntheticData.register_device_rendervar_tex_to_buff_templates(SyntheticData._ogn_rendervars) SyntheticData.register_device_rendervar_to_host_templates(SyntheticData._ogn_rendervars) SyntheticData.register_display_rendervar_templates() SyntheticData.register_combine_rendervar_templates() SyntheticData.register_export_rendervar_ptr_templates(SyntheticData._ogn_rendervars) SyntheticData.register_export_rendervar_array_templates(SyntheticData._ogn_rendervars) _sdg_iface = SyntheticData() @staticmethod def Get(): """Get the interface singleton instance.""" global _sdg_iface return _sdg_iface @staticmethod def Reset(): """Reset the interface singleton """ global _sdg_iface if _sdg_iface: _sdg_iface.reset() _sdg_iface = None @staticmethod def register_node_template(node_template: NodeTemplate, rendervars: list = None, template_name: str = None) -> str: """Register a node template. Add a node template in the node registry. After the template has been added it may be activated for being executed in its associated stage. Args: node_template : template to be added to the registry rendervars : list of renderVar the node is producing template_name : unique name id of the template Returns: the unique name id of the registered template """ # check type if og.GraphRegistry().get_node_type_version(node_template.node_type_id) is None: raise SyntheticDataException( f"failed to register node template. Type {node_template.node_type_id} is not in the registry") # check template_name if template_name is None: numTypeTemplates = 0 for template in SyntheticData._ogn_templates_registry.values(): if template.node_type_id == node_template.node_type_id: numTypeTemplates += 1 template_name = "%s_%04d" % (node_template.node_type_id.split(".")[-1], numTypeTemplates) elif template_name in SyntheticData._ogn_templates_registry: raise SyntheticDataException( f"failed to register node template. Template {template_name} is already in the registry") elif template_name in SyntheticData._ogn_rendervars: raise SyntheticDataException( f"failed to register node template. Template {template_name} is already registered as a renderVar") # check connections autoStage = SyntheticDataStage.POST_RENDER if rendervars else SyntheticDataStage.SIMULATION i_connections = node_template.connections if node_template.connections else [] for conn in i_connections: conn_name = conn.node_template_id if conn_name in SyntheticData._ogn_rendervars: conn_name = SyntheticData._ogn_rendervars[conn_name] if conn_name not in SyntheticData._ogn_templates_registry: raise SyntheticDataException( f"failed to register node template. Connection template name {conn_name} is not in the registry") conn_stage = SyntheticData._ogn_templates_registry[conn_name].pipeline_stage autoStage = max(autoStage, conn_stage) conn_map = conn.attributes_mapping if conn.attributes_mapping else {} if not type(conn_map) is dict: raise SyntheticDataException( f"failed to register node template. connection attributes map is not a dictionnary") # check stage if node_template.pipeline_stage == SyntheticDataStage.AUTO: node_template.pipeline_stage = autoStage if node_template.pipeline_stage < autoStage: raise SyntheticDataException( f"failed to register node template. Stage {node_template.pipeline_stage} is not compatible with the connections") # check and register renderVars if rendervars: if node_template.pipeline_stage != SyntheticDataStage.POST_RENDER: raise SyntheticDataException( f"failed to register node template. Only postRender nodes may produce renderVars") for rv in rendervars: if (rv in SyntheticData._ogn_templates_registry) or (rv in SyntheticData._ogn_rendervars): raise SyntheticDataException(f"failed to register node template. RenderVar {rv} already registered") else: SyntheticData._ogn_rendervars[rv] = template_name SyntheticData._ogn_templates_registry[template_name] = node_template return template_name @staticmethod def is_node_template_registered(template_name: str) -> bool: """Check if a node template has already been registered. Args: template_name: name of the node template to check Returns: True if the template_name specifie a node template within the registry, False otherwise """ return template_name in SyntheticData._ogn_templates_registry @staticmethod def unregister_node_template(template_name: str) -> None: """Unregister a node template. Remove a node template from the registry and all its dependencies. After removing a template, it cannot be activated anymore, nor its dependent templates. """ SyntheticData._unregister_node_template_rec([template_name]) def _reset_node_graph(self, nodeGraph): graph = nodeGraph.get_wrapped_graph() if graph.is_valid(): for node in graph.get_nodes(): if node.is_valid(): graph.destroy_node(node.get_prim_path(), True) orchestration_graph = nodeGraph.get_graph() if orchestration_graph.is_valid(): orchestration_graph.destroy_node(nodeGraph.get_prim_path(), True) def _set_process_path(self, renderProductPath, graphPath, processPathAttribueName): if not renderProductPath: raise SyntheticDataException("invalid renderProductPath") usdStage = omni.usd.get_context().get_stage() prim = usdStage.GetPrimAtPath(renderProductPath) ogprocesspath_attribute = prim.GetAttribute(processPathAttribueName) if not ogprocesspath_attribute: assert graphPath.endswith("/" + SyntheticData._graphName) ogProcessPath = graphPath[: -len("/" + SyntheticData._graphName)] prim.CreateAttribute(processPathAttribueName, Sdf.ValueTypeNames.String).Set(ogProcessPath) def _get_or_create_graph(self, path: str, stage: int, renderProductPath: object) -> object: if path in self._nodeGraphs: return self._nodeGraphs[path] pipelineStage = og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_SIMULATION executionModel = "push" backingType = og.GraphBackingType.GRAPH_BACKING_TYPE_FABRIC_SHARED if (stage == SyntheticDataStage.PRE_RENDER): pipelineStage = og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_PRERENDER elif (stage == SyntheticDataStage.POST_RENDER): pipelineStage = og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_POSTRENDER elif (stage == SyntheticDataStage.ON_DEMAND): pipelineStage = og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_ONDEMAND executionModel = "execution" usdStage = omni.usd.get_context().get_stage() primExistWorkaround = not usdStage.GetPrimAtPath(path) orchestration_graphs = og.get_global_orchestration_graphs_in_pipeline_stage(pipelineStage) nodeGraph = orchestration_graphs[0].create_graph_as_node( path.replace("/", "_"), path, executionModel, True, primExistWorkaround, backingType, pipelineStage, ) if stage == SyntheticDataStage.PRE_RENDER: self._set_process_path(renderProductPath, path, "ogPreProcessPath") elif stage == SyntheticDataStage.POST_RENDER: self._set_process_path(renderProductPath, path, "ogPostProcessPath") self._nodeGraphs[path] = nodeGraph return nodeGraph def _activate_node_rec(self, templateName: str, renderProductIndex: int = -1, renderProductPaths: list = None, render_var_activations: dict = None) -> None: renderProductPath = renderProductPaths[renderProductIndex] if renderProductIndex > -1 else None # renderVar template if templateName in SyntheticData._ogn_rendervars: renderVarName = templateName templateName = SyntheticData._ogn_rendervars[templateName] if (not render_var_activations is None) and renderProductPath and (templateName == SyntheticData._rendererTemplateName): if renderProductPath not in render_var_activations: render_var_activations[renderProductPath]={renderVarName:0} elif renderVarName not in render_var_activations[renderProductPath]: render_var_activations[renderProductPath][renderVarName]=0 render_var_activations[renderProductPath][renderVarName]+=1 if templateName not in SyntheticData._ogn_templates_registry: raise SyntheticDataException(f"graph node template depends on unregistered template {templateName}") nodePath = SyntheticData._get_node_path(templateName, renderProductPath) if nodePath in self._graphNodes: return templateName template = SyntheticData._ogn_templates_registry[templateName] nodeStage = template.pipeline_stage graphPath = SyntheticData._get_graph_path(nodeStage, renderProductPath) nodeGraph = self._get_or_create_graph(graphPath, nodeStage, renderProductPath) nodeType = template.node_type_id usdStage = omni.usd.get_context().get_stage() primExistWorkaround = not usdStage.GetPrimAtPath(nodePath) self._graphNodes[nodePath] = nodeGraph.get_wrapped_graph().create_node(nodePath, nodeType, primExistWorkaround) node = self._graphNodes[nodePath] # setup static attributes for attrName, attrVal in template.attributes.items(): if node.get_attribute_exists(attrName): node.get_attribute(attrName).set(attrVal) else: carb.log_error(f"SyntheticData failed to set node {nodePath} static attribute {attrName}") # do not return error : the default value in the ogn spec will be used # set inputs:renderProductPathPath if renderProductPath and node.get_attribute_exists(SyntheticData._renderProductAttributeName): node.get_attribute(SyntheticData._renderProductAttributeName).set(renderProductPath) # recursive call for upstream connections for connIndex in range(len(template.connections)): connection = template.connections[connIndex] connTemplateName = connection.node_template_id connRenderProductPaths = [renderProductPaths[idx] for idx in connection.render_product_idxs] if ( renderProductPaths and connection.render_product_idxs) else None # activate the template connTemplateName = self._activate_node_rec(connTemplateName, 0 if connRenderProductPaths else - 1, connRenderProductPaths, render_var_activations) # setup connection attributes connRenderProductPath = connRenderProductPaths[0] if connRenderProductPaths else None connNodePath = SyntheticData._get_node_path(connTemplateName, connRenderProductPath) connNode = self._graphNodes[connNodePath] connMap = connection.attributes_mapping if not connMap is None: if connMap: SyntheticData._connect_nodes(connNode, node, connMap, True) else: SyntheticData._auto_connect_nodes(connNode, node, True, connIndex) return templateName def _deactivate_node_rec( self, templateName: str, renderProductIndex: int = -1, renderProductPaths: list = None, render_var_deactivations: dict = None, only_automatically_activated_nodes: bool = True, manual_deactivation: bool = True ) -> None: renderProductPath = renderProductPaths[renderProductIndex] if renderProductIndex > -1 else None if templateName in SyntheticData._ogn_rendervars: renderVarName = templateName templateName = SyntheticData._ogn_rendervars[templateName] if (not render_var_deactivations is None) and renderProductPath and (templateName == SyntheticData._rendererTemplateName): if renderProductPath not in render_var_deactivations: render_var_deactivations[renderProductPath]={renderVarName:0} elif renderVarName not in render_var_deactivations[renderProductPath]: render_var_deactivations[renderProductPath][renderVarName]=0 render_var_deactivations[renderProductPath][renderVarName]+=1 nodePath = SyntheticData._get_node_path(templateName, renderProductPath) # prevent automatically deactivating manually activated node if (nodePath not in self._graphNodes) or (not manual_deactivation and only_automatically_activated_nodes and (nodePath in self._activatedNodePaths)): return templateName node = self._graphNodes[nodePath] template = SyntheticData._ogn_templates_registry[templateName] # abort if the node has a downstream connection for attr in node.get_attributes(): if attr.get_downstream_connection_count(): return templateName node.get_graph().destroy_node(nodePath, True) self._graphNodes.pop(nodePath) # remove unused connections for connection in template.connections: connTemplateName = connection.node_template_id connRenderProductPaths = [renderProductPaths[idx] for idx in connection.render_product_idxs] if ( renderProductPaths and connection.render_product_idxs) else None # deactivate the template self._deactivate_node_rec(connTemplateName, 0 if connRenderProductPaths else -1, connRenderProductPaths, render_var_deactivations, only_automatically_activated_nodes, False) return templateName def _set_node_attributes(self, nodePath, attributes) -> None: if not attributes: return if nodePath not in self._graphNodes: raise SyntheticDataException(f"invalid node {nodePath}") node = self._graphNodes[nodePath] for attrName, attrVal in attributes.items(): if node.get_attribute_exists(attrName): og.Controller(attribute=node.get_attribute(attrName)).set(value=attrVal) else: raise SyntheticDataException(f"invalid node attribute {nodePath}.{attrName}") def _get_node_attributes(self, nodePath, attribute_names: list, gpu=False) -> dict: if not nodePath in self._graphNodes: return None node = self._graphNodes[nodePath] attributes = {} for attrName in attribute_names: if node.get_attribute_exists(attrName): attributes[attrName] = og.Controller(attribute=node.get_attribute(attrName)).get(on_gpu=gpu) return attributes def _post_process_graph_tick(self, e: carb.events.IEvent): for nodePath, nodeGraph in self._nodeGraphs.items(): graph = nodeGraph.get_wrapped_graph() if graph.get_pipeline_stage() == og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_ONDEMAND: graph.evaluate() def __init__(self) -> None: self._nodeGraphs = {} self._graphNodes = {} self._activatedNodePaths = [] self._render_product_var_activations = {} self._post_process_graph_tick_sub = omni.kit.app.get_app().get_update_event_stream().create_subscription_to_pop( self._post_process_graph_tick, order=SyntheticData._postProcessGraphTickOrder, name="[omni.syntheticdata] PostProcessGraph Tick" ) def reset(self, usd=True, remove_activated_render_vars=False) -> None: """Reset the SyntheticData instance Args: usd : if true reset the graph in the usd stage session layer remove_activated_render_vars : if True and usd is True remove the render vars activated by the node activation If the stage is valid it will destroy every graph created. """ stage = omni.usd.get_context().get_stage() if stage and usd: session_layer = stage.GetSessionLayer() with Usd.EditContext(stage, session_layer): for nodeGraph in self._nodeGraphs.values(): self._reset_node_graph(nodeGraph) if remove_activated_render_vars: for rp, rvs in self._render_product_var_activations.items(): for rv, num_act in rvs.items(): if num_act[1] and (num_act[0] > 0): self._remove_rendervar(rp,rv,stage) self._render_product_var_activations = {} self._activatedNodePaths = [] self._graphNodes = {} self._nodeGraphs = {} def get_graph(self, stage: int = SyntheticDataStage.ON_DEMAND, renderProductPath: str = None) -> object: """Return the graph at a given stage, for a given renderProduct. Gives access to the SyntheticData graphs. Args: stage : SyntheticDataStage of the queried graph renderProductPath : (for POST_RENDER stage only) the renderProductPath for which to get the POST_RENDER graph Returns: the graph at the given stage for the given renderProductPath. """ if renderProductPath and stage != SyntheticDataStage.POST_RENDER: raise SyntheticDataException("invalid graph") graphPath = SyntheticData._get_graph_path(stage, renderProductPath) return self._get_or_create_graph(graphPath, stage, renderProductPath) def activate_node_template( self, template_name: str, render_product_path_index: int = -1, render_product_paths: list = None, attributes: dict = None, stage: Usd.Stage = None, activate_render_vars: bool = True ) -> None: """Activate a registered node. Create a node instance for the given node template and all its missing dependencies (including nodes and renderVar). The node will be executed during the next stage execution. Args: template_name : name of the node template to be activate render_product_path_index : if the node template is associated to a render product, index of the associated render product in the render product path list render_product_paths : render product path list to be used for specifying the render product of the node template and its dependencies to activate attributes : dictionnary of attributes to set to the activated "template_name" node stage : the stage to change, if None use the stage of the current usd context activate_render_vars : if True activate the required render_vars, if False it is the user responsability to activate the required render_vars Return: A dictionnary containing for every render products the list of render var dependencies of this activation NB : if activate_render_vars is True those render vars are added """ if (template_name not in SyntheticData._ogn_templates_registry) and (template_name not in SyntheticData._ogn_rendervars): raise SyntheticDataException(f'graph node template "{template_name}" unregistered') node_path = SyntheticData._get_node_path( template_name, render_product_paths[render_product_path_index] if render_product_path_index > -1 else None ) if node_path in self._activatedNodePaths: return if not stage: stage = omni.usd.get_context().get_stage() if not stage: raise SyntheticDataException("invalid USD stage") session_layer = stage.GetSessionLayer() with Usd.EditContext(stage, session_layer): render_var_activations = {} self._activate_node_rec(template_name, render_product_path_index, render_product_paths, render_var_activations) self._set_node_attributes(node_path, attributes) self._activatedNodePaths.append(node_path) # maintain the render_vars activation number for every render products activated_render_vars = {} for rp, rvs in render_var_activations.items(): if rp not in self._render_product_var_activations: self._render_product_var_activations[rp]={} for rv, num in rvs.items(): need_activation = not self._has_rendervar(rp,rv,stage) if rv not in self._render_product_var_activations[rp]: self._render_product_var_activations[rp][rv] = [num, need_activation and activate_render_vars] else: self._render_product_var_activations[rp][rv][0] += num self._render_product_var_activations[rp][rv][1] = need_activation and activate_render_vars if need_activation: if rp not in activated_render_vars: activated_render_vars[rp]=[] if rv not in activated_render_vars[rp]: activated_render_vars[rp].append(rv) if activate_render_vars: for rp, rvs in activated_render_vars.items(): for rv in rvs: SyntheticData._add_rendervar(rp, rv, stage) return activated_render_vars def is_node_template_activated( self, template_name: str, render_product_path: str = None, only_manually_activated: bool = False ) -> None: """Query the activation status of a node template. Args: template_name : name of the node template to query the activation status render_product_path : render product path for which to check the template activation status (None if not applicable) only_manually_activated: if True check the activation for only the explicitely activated templates ( exclude the automatically activated template ) Return: True if the node template is currently activated and, if only_explicitely_activated is True, if it has been explicitely activated """ node_path = SyntheticData._get_node_path(template_name, render_product_path) return node_path in self._activatedNodePaths if only_manually_activated else node_path in self._graphNodes def deactivate_node_template( self, template_name: str, render_product_path_index: int = -1, render_product_paths: list = [], stage: Usd.Stage = None, deactivate_render_vars: bool = False, recurse_only_automatically_activated: bool = True ) -> None: """Deactivate a registered node. Delete a node instance for the given node template and all its automatically activated dependencies with no more downstream connections. The node won't be executed anymore starting with the next stage execution. Args: template_name : name of the node template to deactivate render_product_path_index : if the node template is associated to a render product, index of the associated render product in the render product path list render_product_paths : render product path list to be used for specifying the render product of the node template and its dependencies to deactivate stage : the stage to change, if None use the stage of the current usd context deactivate_render_vars : if True deactivate the render_vars that have been activated in a call to activate_node_template and which are not used anymore by the managed graphs. Beware that in some cases, some of these render vars maybe actually used by other graphs, hence it is False by default if False it is the user responsability to deactivate the unused render_vars. recurse_only_automatically_activated : if True recursively deactivate only automatically activated upstream nodes without other connections if False recursively deactivate all upstream nodes without other connections Return: A dictionnary containing for every render products path the list of render var dependencies that have been activated by activate_node_template and are not used anymore by the managed graphs. NB : if deactivate_render_vars is True those render vars are removed """ if not stage: stage = omni.usd.get_context().get_stage() if not stage: raise SyntheticDataException("invalid USD stage") session_layer = stage.GetSessionLayer() with Usd.EditContext(stage, session_layer): render_var_deactivations = {} self._deactivate_node_rec(template_name, render_product_path_index, render_product_paths, render_var_deactivations, recurse_only_automatically_activated) node_path = SyntheticData._get_node_path( template_name, render_product_paths[render_product_path_index] if render_product_path_index > -1 else None ) if (node_path in self._activatedNodePaths) and (node_path not in self._graphNodes): self._activatedNodePaths.remove(node_path) # maintain the render_vars activation number for every render products deactivated_render_vars = {} for rp, rvs in render_var_deactivations.items(): valid_rp = rp in self._render_product_var_activations for rv, num in rvs.items(): valid_rv = valid_rp and rv in self._render_product_var_activations[rp] if valid_rv and (self._render_product_var_activations[rp][rv][0] <= num): if self._render_product_var_activations[rp][rv][1]: if rp not in deactivated_render_vars: deactivated_render_vars[rp]=[rv] else: deactivated_render_vars[rp].append(rv) self._render_product_var_activations[rp].pop(rv) elif valid_rv: self._render_product_var_activations[rp][rv][0] -= num if deactivate_render_vars: for rp, rvs in deactivated_render_vars.items(): for rv in rvs: SyntheticData._remove_rendervar(rp, rv, stage) return deactivated_render_vars def connect_node_template(self, src_template_name: str, dst_template_name: str, render_product_path: str=None, connection_map: dict=None): """Connect the given source node template to the destination node template Args: src_template_name : name of the source node template dst_template_name : name of the destination node template render_product_path : render product path of the node templates (None if the node are not specific to a render product) connection_map : attribute mapping for the source inputs to the destination outputs. (None for an automatic mapping based on names) """ src_node_path = SyntheticData._get_node_path(src_template_name, render_product_path) if src_node_path not in self._graphNodes: raise SyntheticDataException(f'cannot connect node template : "{src_node_path}" not activated') else: src_node = self._graphNodes[src_node_path] dst_node_path = SyntheticData._get_node_path(dst_template_name, render_product_path) if dst_node_path not in self._graphNodes: raise SyntheticDataException(f'cannot connect node template : "{dst_node_path}" not activated') else: dst_node = self._graphNodes[dst_node_path] if connection_map: SyntheticData._connect_nodes(src_node, dst_node, connection_map, True) else: SyntheticData._auto_connect_nodes(src_node, dst_node, True) def disconnect_node_template(self, src_template_name: str, dst_template_name: str, render_product_path: str=None, connection_map: dict=None): """Disconnect the given source node template to the destination node template Args: src_template_name : name of the source node template dst_template_name : name of the destination node template render_product_path : render product path of the node templates (None if the node are not specific to a render product) connection_map : attribute mapping for the source inputs to the destination outputs. (None for an automatic mapping based on names) """ src_node_path = SyntheticData._get_node_path(src_template_name, render_product_path) if src_node_path not in self._graphNodes: raise SyntheticDataException(f'cannot disconnect node template : "{src_node_path}" not activated') else: src_node = self._graphNodes[src_node_path] dst_node_path = SyntheticData._get_node_path(dst_template_name, render_product_path) if dst_node_path not in self._graphNodes: raise SyntheticDataException(f'cannot disconnect node template : "{dst_node_path}" not activated') else: dst_node = self._graphNodes[dst_node_path] if connection_map: SyntheticData._connect_nodes(src_node, dst_node, connection_map, False) else: SyntheticData._auto_connect_nodes(src_node, dst_node, False) def request_node_execution(self, template_name: str, render_product_path: str=None): """Request the execution of an activated node. The function will schedule the execution of compute-on-request node Args: template_name : name of the activated node render_product_path : if the activated node is associated to a render product, provide its path Returns: True if the request has been successfull """ node_path = SyntheticData._get_node_path(template_name, render_product_path) if node_path not in self._graphNodes: raise SyntheticDataException(f"invalid node {node_path}") node = self._graphNodes[node_path] if node.is_valid(): node.request_compute() def set_node_attributes(self, template_name: str, attributes: dict, render_product_path: str=None) -> None: """Set the value of an activated node attribute. The function may be used to set the value of multiple activated node input attributes before the execution of its stage. Args: template_name : name of the activated node attributes : dictionnary of attribute name/value to set render_product_path : if the activated node is associated to a render product, provide its path """ node_path = SyntheticData._get_node_path(template_name, render_product_path) self._set_node_attributes(node_path, attributes) def get_node_attributes( self, template_name: str, attribute_names: list, render_product_path=None, gpu=False ) -> dict: """Get the value of several activated node's attributes. The function may be used to retrieve the value of multiple activated node output attributes after the execution of its graph. Args: template_name : name of the activated node attribute_names : list of node attribute names to retrieve the value render_product_path : if the activated node is associated to a render product, provide its path gpu : for array data attribute, get a gpu data Returns: A dictionnary of attribute name/value for every successfully retrieved attributes None if the node is not a valid activated node """ node_path = SyntheticData._get_node_path(template_name, render_product_path) return self._get_node_attributes(node_path, attribute_names, gpu) def set_instance_mapping_semantic_filter(self, predicate="*:*"): """Set the semantic filter predicate to be applied to the instance mapping. Contrary to the default semantic filter this filter affect the instance mapping. All semantic data filtered at this level is not available in the instance mapping. Args: predicate : a semantic filter predicate. predicate examples : "typeA : labelA & !labelB | labelC , typeB: labelA ; typeC: labelD" "typeA : * ; * : labelA" """ SyntheticData._ogn_templates_registry[SyntheticData._instanceMappingCtrl].attributes["inputs:semanticFilterPredicate"] = predicate node_path = SyntheticData._get_node_path(SyntheticData._instanceMappingCtrl) if node_path in self._graphNodes: self.set_node_attributes(SyntheticData._instanceMappingCtrl, {"inputs:semanticFilterPredicate":predicate}) def get_instance_mapping_semantic_filter(self): """ Returns: The semantic filter predicate currently applied to the instance mapping. """ return SyntheticData._ogn_templates_registry[SyntheticData._instanceMappingCtrl].attributes["inputs:semanticFilterPredicate"] def set_default_semantic_filter(self, predicate="*:*", hierarchical_labels=False, matching_labels=True): """Set the default semantic filter predicate. Args: predicate : a semantic filter predicate. hierarchical_labels : option to propagate semantic labels within the hiearchy, from parent to childrens matching_labels : option to remove from the set of labels the one that do not match the predicate predicate examples : "typeA : labelA & !labelB | labelC , typeB: labelA ; typeC: labelD" "typeA : * ; * : labelA" """ node_path = SyntheticData._get_node_path(SyntheticData._defaultSemanticFilterName) attributes = {"inputs:predicate": predicate, "inputs:hierarchicalLabels": hierarchical_labels, "inputs:matchingLabels": matching_labels} if node_path in self._graphNodes: self.set_node_attributes(SyntheticData._defaultSemanticFilterName, attributes) else: self.activate_node_template(SyntheticData._defaultSemanticFilterName, attributes=attributes) def get_default_semantic_filter(self): """Set the default semantic filter predicate. Args: predicate : a semantic filter predicate. hierarchical_labels : option to propagate semantic labels within the hiearchy, from parent to childrens matching_labels : option to remove from the set of labels the one that do not match the predicate predicate examples : "typeA : labelA & !labelB | labelC , typeB: labelA ; typeC: labelD" "typeA : * ; * : labelA" """ attr = self.get_node_attributes(SyntheticData._defaultSemanticFilterName, ["inputs:predicate"]) return attr["inputs:predicate"] if "inputs:predicate" in attr else "*" def enable_rendervar(self, render_product_path:str, render_var:str, usd_stage: Usd.Stage = None) -> None: """Explicitely enable the computation of a render_var for a given render_product. Args: render_product_path : the render_product for which to enable the given render_var computation render_var : the name of the render_var to enable usd_stage : usd stage """ SyntheticData._add_rendervar(render_product_path, render_var, usd_stage) def disable_rendervar(self, render_product_path:str, render_var:str, usd_stage: Usd.Stage = None) -> None: """Explicitely disable the computation of a render_var for a given render_product. Args: render_product_path : the render_product for which to disable the given render_var computation render_var : the name of the render_var to disable usd_stage : usd stage """ SyntheticData._remove_rendervar(render_product_path, render_var, usd_stage) def is_rendervar_used(self, render_product_path:str, render_var:str) -> None: """ query the used status of a render var for a render product Args: render_product_path: the path of the render product renver_var: the name of the render_var Returns: True if the given render var is currently in use by the activated syntheticData nodes for the given render product """ if (render_product_path in self._render_product_var_activations) and (render_var in self._render_product_var_activations[render_product_path]): return self._render_product_var_activations[render_product_path][render_var][0] > 0 else: return False def is_rendervar_enabled(self, render_product_path:str, render_var:str, only_sdg_activated: bool = False, usd_stage: Usd.Stage = None) -> None: """ query the enabled status of a render var for a render product Args: render_product_path: the path of the render product renver_var: the name of the render_var only_sdg_activated: consider only the render var automatically enabled by a call to activate_node_template usd_stage: the usd stage (if None use the current usd context stage) Returns: True if the given render var is currently enabled for the given render product and, if only_sdg_activated is True, if it has been enabled by a call to activate_node_template """ if only_sdg_activated: if (render_product_path in self._render_product_var_activations) and (render_var in self._render_product_var_activations[render_product_path]): return self._render_product_var_activations[render_product_path][render_var][1] else: return False else: return SyntheticData._has_rendervar(render_product_path, render_var, usd_stage)
87,058
Python
47.882089
202
0.634462
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/model.py
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. # __all__ = ['RenderProductModel', 'RenderVarModel'] import omni.usd import omni.ui as ui from .SyntheticData import SyntheticData from pxr import Usd class RenderProductItem(ui.AbstractItem): def __init__(self, model): super().__init__() self.model = model class RenderProductModel(ui.AbstractItemModel): def __init__(self, viewport_name: str, viewport_api): super().__init__() # Omniverse interfaces self._viewport_api = viewport_api self._stage_update = omni.stageupdate.get_stage_update_interface() self._stage_subscription = self._stage_update.create_stage_update_node( "RenderProductModel_" + viewport_name, None, None, None, self._on_prim_created, None, self._on_prim_removed, ) # The list of the cameras is here self._render_products = [] # The current index of the editable_combo box self._current_index = ui.SimpleIntModel() self._current_index.add_value_changed_fn(self._current_index_changed) # Iterate the stage and get all the renderProduct stage = viewport_api.usd_context.get_stage() if stage: for prim in Usd.PrimRange(stage.GetPseudoRoot()): if prim.IsA("UsdRenderProduct"): self._render_products.append( RenderProductItem(ui.SimpleStringModel(prim.GetPath().pathString)) ) def destroy(self): self._viewport_api = None def get_item_children(self, item): return self._render_products def get_item_value_model(self, item, column_id): if item is None: return self._current_index return item.model def _on_prim_created(self, path): self._render_products.append(RenderProductItem(ui.SimpleStringModel(path))) self._item_changed(None) def _on_prim_removed(self, path): render_products = [rp.model.as_string for rp in self._render_products] if path in render_products: index = render_products.index(path) del self._render_products[index] self._current_index.as_int = 0 self._item_changed(None) def _current_index_changed(self, model): index = model.as_int render_product_path = self._render_products[index].model.as_string self._viewport_api.render_product_path = render_product_path self._item_changed(None) class RenderVarItem(ui.AbstractItem): def __init__(self, model): super().__init__() self.model = model class RenderVarModel(ui.AbstractItemModel): def _create_item(self, name): return RenderVarItem(ui.SimpleStringModel(name)) def __init__(self, viewport_api): super().__init__() self._viewport_api = viewport_api self._render_vars = [ self._create_item(rv[0:-7]) for rv in SyntheticData.get_registered_visualization_template_names() ] self._default_index_int = 0 self._current_index = ui.SimpleIntModel() self._current_index.add_value_changed_fn(self._current_index_changed) self._previous_index_int = self._current_index.as_int self._combine_params = [0, 0, -100] def destroy(self): self._viewport_api = None def get_item_children(self, item): return self._render_vars def get_item_value_model(self, item, column_id): if item is None: return self._current_index return item.model def _current_index_changed(self, model): index = model.as_int isdg = SyntheticData.Get() if isdg: render_prod_path = self.get_render_product_path() stage = self._viewport_api.usd_context.get_stage() if self._render_vars[self._previous_index_int].model.as_string != "LdrColor": isdg.deactivate_node_template( self._render_vars[self._previous_index_int].model.as_string + "DisplayPostCombine", 0, [render_prod_path], stage ) if self._render_vars[index].model.as_string != "LdrColor": isdg.activate_node_template( self._render_vars[index].model.as_string + "DisplayPostCombine", 0, [render_prod_path], None, stage ) self._previous_index_int = index self.update_combine() self._item_changed(None) def set_default_item(self): self._current_index.set_value(self._default_index_int) def get_render_product_path(self): if self._viewport_api is None: return None render_prod_path = self._viewport_api.render_product_path # XXX: Issue with Viewport-2 and omni.kit.hydra_texture # The default product path is returned as a string that isn't the prim-path # We can work around it by noting the path isn't absolute and fixing it u pi that case. if render_prod_path and (not render_prod_path.startswith('/')): render_prod_path = f'/Render/RenderProduct_{render_prod_path}' return render_prod_path def set_combine_angle(self, angle): self._combine_params[0] = angle self.update_combine() def set_combine_divide_x(self, divide): self._combine_params[1] = divide self.update_combine() def set_combine_divide_y(self, divide): self._combine_params[2] = divide self.update_combine() def get_combine_angle(self): return self._combine_params[0] def get_combine_divide_x(self): return self._combine_params[1] def get_combine_divide_y(self): return self._combine_params[2] def update_combine(self): if self._render_vars[self._previous_index_int].model.as_string == "LdrColor": return isdg = SyntheticData.Get() if isdg: isdg.set_node_attributes( self._render_vars[self._previous_index_int].model.as_string + "DisplayPostCombine", {"inputs:parameters": self._combine_params}, self.get_render_product_path() )
6,638
Python
34.886486
132
0.62082
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/menu.py
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. # __all__ = ["SynthDataMenuContainer"] from omni.kit.viewport.menubar.core import ( ComboBoxModel, ComboBoxItem, ComboBoxMenuDelegate, CheckboxMenuDelegate, IconMenuDelegate, SliderMenuDelegate, ViewportMenuContainer, ViewportMenuItem, ViewportMenuSeparator ) from .SyntheticData import SyntheticData from .visualizer_window import VisualizerWindow import carb import omni.ui as ui from pathlib import Path import weakref ICON_PATH = Path(carb.tokens.get_tokens_interface().resolve("${omni.syntheticdata}")).joinpath("data") UI_STYLE = {"Menu.Item.Icon::SyntheticData": {"image_url": str(ICON_PATH.joinpath("sensor_icon.svg"))}} class SensorAngleModel(ui.AbstractValueModel): def __init__(self, getter, setter, *args, **kwargs): super().__init__(*args, **kwargs) self.__getter = getter self.__setter = setter def destroy(self): self.__getter = None self.__setter = None def get_value_as_float(self) -> float: return self.__getter() def get_value_as_int(self) -> int: return int(self.get_value_as_float()) def set_value(self, value): value = float(value) if self.get_value_as_float() != value: self.__setter(value) self._value_changed() class SensorVisualizationModel(ui.AbstractValueModel): def __init__(self, sensor: str, visualizer_window, *args, **kwargs): super().__init__(*args, **kwargs) self.__sensor = sensor self.__visualizer_window = visualizer_window def get_value_as_bool(self) -> bool: try: return bool(self.__sensor in self.__visualizer_window.visualization_activation) except: return False def get_value_as_int(self) -> int: return 1 if self.get_value_as_bool() else 0 def set_value(self, enabled): enabled = bool(enabled) if self.get_value_as_bool() != enabled: self.__visualizer_window.on_sensor_item_clicked(enabled, self.__sensor) self._value_changed() def sensor(self): return self.__sensor class MenuContext: def __init__(self, viewport_api): self.__visualizer_window = VisualizerWindow(f"{viewport_api.id}", viewport_api) self.__hide_on_click = False self.__sensor_models = set() def destroy(self): self.__sensor_models = set() self.__visualizer_window.close() @property def hide_on_click(self) -> bool: return self.__hide_on_click def add_render_settings_items(self): render_product_combo_model = self.__visualizer_window.render_product_combo_model if render_product_combo_model: ViewportMenuItem( "RenderProduct", delegate=ComboBoxMenuDelegate(model=render_product_combo_model), hide_on_click=self.__hide_on_click, ) render_var_combo_model = self.__visualizer_window.render_var_combo_model if render_var_combo_model: ViewportMenuItem( "RenderVar", delegate=ComboBoxMenuDelegate(model=render_var_combo_model), hide_on_click=self.__hide_on_click, ) def add_angles_items(self): render_var_combo_model = self.__visualizer_window.render_var_combo_model if render_var_combo_model: ViewportMenuItem( name="Angle", hide_on_click=self.__hide_on_click, delegate=SliderMenuDelegate( model=SensorAngleModel(render_var_combo_model.get_combine_angle, render_var_combo_model.set_combine_angle), min=-100.0, max=100.0, tooltip="Set Combine Angle", ), ) ViewportMenuItem( name="X", hide_on_click=self.__hide_on_click, delegate=SliderMenuDelegate( model=SensorAngleModel(render_var_combo_model.get_combine_divide_x, render_var_combo_model.set_combine_divide_x), min=-100.0, max=100.0, tooltip="Set Combine Divide X", ), ) ViewportMenuItem( name="Y", hide_on_click=self.__hide_on_click, delegate=SliderMenuDelegate( model=SensorAngleModel(render_var_combo_model.get_combine_divide_y, render_var_combo_model.set_combine_divide_y), min=-100.0, max=100.0, tooltip="Set Combine Divide Y", ), ) def add_sensor_selection(self): for sensor_label, sensor in SyntheticData.get_registered_visualization_template_names_for_display(): model = SensorVisualizationModel(sensor, self.__visualizer_window) self.__sensor_models.add(model) ViewportMenuItem( name=sensor_label, hide_on_click=self.__hide_on_click, delegate=CheckboxMenuDelegate(model=model, tooltip=f'Enable "{sensor}" visualization') ) if SyntheticData.get_visualization_template_name_default_activation(sensor): model.set_value(True) def clear_all(self, *args, **kwargs): for smodel in self.__sensor_models: smodel.set_value(False) def set_as_default(self, *args, **kwargs): for smodel in self.__sensor_models: SyntheticData.set_visualization_template_name_default_activation(smodel.sensor(), smodel.get_value_as_bool()) def reset_to_default(self, *args, **kwargs): default_sensors = [] for _, sensor in SyntheticData.get_registered_visualization_template_names_for_display(): if SyntheticData.get_visualization_template_name_default_activation(sensor): default_sensors.append(sensor) for smodel in self.__sensor_models: smodel.set_value(smodel.sensor() in default_sensors) def show_window(self, *args, **kwargs): self.__visualizer_window.toggle_enable_visualization() class SynthDataMenuContainer(ViewportMenuContainer): def __init__(self): super().__init__(name="SyntheticData", visible_setting_path="/exts/omni.syntheticdata/menubar/visible", order_setting_path="/exts/omni.syntheticdata/menubar/order", delegate=IconMenuDelegate("SyntheticData"), # tooltip="Synthetic Data Sensors"), style=UI_STYLE) self.__menu_context: Dict[str, MenuContext] = {} def __del__(self): self.destroy() def destroy(self): for menu_ctx in self.__menu_context.values(): menu_ctx.destroy() self.__menu_context = {} super().destroy() def build_fn(self, desc: dict): viewport_api = desc.get("viewport_api") if not viewport_api: return viewport_api_id = viewport_api.id menu_ctx = self.__menu_context.get(viewport_api_id) if menu_ctx: menu_ctx.destroy() menu_ctx = MenuContext(viewport_api) self.__menu_context[viewport_api_id] = menu_ctx with self: menu_ctx.add_render_settings_items() ViewportMenuSeparator() menu_ctx.add_angles_items() ViewportMenuSeparator() menu_ctx.add_sensor_selection() if carb.settings.get_settings().get_as_bool("/exts/omni.syntheticdata/menubar/showSensorDefaultButton"): ViewportMenuSeparator() ViewportMenuItem(name="Set as default", hide_on_click=menu_ctx.hide_on_click, onclick_fn=menu_ctx.set_as_default) ViewportMenuItem(name="Reset to default", hide_on_click=menu_ctx.hide_on_click, onclick_fn=menu_ctx.reset_to_default) ViewportMenuSeparator() ViewportMenuItem(name="Clear All", hide_on_click=menu_ctx.hide_on_click, onclick_fn=menu_ctx.clear_all) ViewportMenuItem(name="Show Window", hide_on_click=menu_ctx.hide_on_click, onclick_fn=menu_ctx.show_window) super().build_fn(desc) def clear_all(self): for menu_ctx in self.__menu_context.values(): menu_ctx.clear_all()
8,981
Python
36.739496
133
0.595813
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/scripts/visualizer_window.py
# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved. # # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. # __all__ = ["VisualizerWindow"] import omni.ui as ui from .. import _syntheticdata from .SyntheticData import SyntheticData from .model import RenderProductModel, RenderVarModel import math from functools import lru_cache import carb.settings DEBUG_VIEW = False @lru_cache() def _get_syntheticdata_iface(): return _syntheticdata.acquire_syntheticdata_interface() class VisualizerWindow: def __init__(self, name, viewport_api): # create the window self._visualize_window = ui.Window(name + " Sensors Output ", width=800, height=600) self._visualize_window.set_width_changed_fn(lambda _: self._update_visualization_ui()) self._visualize_window.set_height_changed_fn(lambda _: self._update_visualization_ui()) self._visualize_window.visible = False self._render_product_combo_model = RenderProductModel(name, viewport_api) if DEBUG_VIEW else None self._render_var_combo_model = RenderVarModel(viewport_api) self._render_product_path = self._render_var_combo_model.get_render_product_path() # activated visualization contains the set of display node that have been activated through the UI self._visualization_activation = set() # visualisation_data contain the image provider for all currently activated display node self._activated_visualization_data = {} if hasattr(viewport_api, 'subscribe_to_frame_change'): self.__frame_changed_sub = viewport_api.subscribe_to_frame_change(self.__frame_changed) def __frame_changed(self, viewport_api): render_product = self._render_var_combo_model.get_render_product_path() self.update(render_product, viewport_api.stage) def close(self): self.__frame_changed_sub = None if self._visualize_window: self._visualize_window.visible = False self._visualize_window = None if self._render_product_combo_model: self._render_product_combo_model = None if self._render_var_combo_model: self._render_var_combo_model = None self._visualization_activation = set() self._activated_visualization_data = {} @property def render_product_combo_model(self): return self._render_product_combo_model @property def render_var_combo_model(self): return self._render_var_combo_model @property def visualization_activation(self): return self._visualization_activation # callback function for handling sensor selection def on_sensor_item_clicked(self, checked, sensor): if checked: self._visualization_activation.add(sensor) else: self._visualization_activation.remove(sensor) # visualization callback def toggle_enable_visualization(self): if self._visualize_window: self._visualize_window.visible = not self._visualize_window.visible def update(self, render_product_path: str, stage): sdg_iface = SyntheticData.Get() if render_product_path != self._render_product_path: for sensor in self._activated_visualization_data: sdg_iface.deactivate_node_template(sensor,0,[render_product_path]) self._activated_visualization_data = {} if not carb.settings.get_settings().get_as_bool("/exts/omni.syntheticdata/menubar/showSensorDefaultButton"): self._visualization_activation = set() self._render_product_path = render_product_path self._render_var_combo_model.set_default_item() # update the activated sensors visualization_activation = self._visualization_activation.copy() # NB this is not threadsafe to_activate = visualization_activation.difference(set(self._activated_visualization_data.keys())) to_deactivate = set(self._activated_visualization_data.keys()).difference(visualization_activation) self._activated_visualization_data = {} for sensor in visualization_activation: self._activated_visualization_data[sensor] = None for sensor in to_activate: sdg_iface.activate_node_template(sensor, 0, [render_product_path], None, stage) for sensor in to_deactivate: sdg_iface.deactivate_node_template(sensor, 0, [render_product_path], stage) # update the visualization window if self._visualize_window.visible: for sensor in self._activated_visualization_data: # create image provider from the sensor texture data self._activated_visualization_data[sensor] = ui.ImageProvider() display_output_names = ["outputs:rpResourcePtr"] display_outputs = sdg_iface.get_node_attributes(sensor, display_output_names, render_product_path) if display_outputs and all(o in display_outputs for o in display_output_names): resource_opaque_ptr = display_outputs["outputs:rpResourcePtr"] if resource_opaque_ptr: self._activated_visualization_data[sensor].set_image_data( _get_syntheticdata_iface().get_visualizer_resource_from_opaque_pointer(resource_opaque_ptr)) self._update_visualization_ui() def _update_visualization_ui(self): num_sensors = len(self._activated_visualization_data) if num_sensors == 0: rows, columns = 0, 0 else: # Attempt a responsive layout to the number of enabled sensors columns = math.ceil(math.sqrt(num_sensors)) rows = math.ceil(num_sensors / columns) if self._visualize_window.height > self._visualize_window.width: columns, rows = rows, columns enabled_sensors = list(self._activated_visualization_data.keys()) with self._visualize_window.frame: with ui.VStack(): idx = 0 for _ in range(rows): with ui.HStack(): for col in range(columns): sensor = enabled_sensors[idx] with ui.VStack(): ui.Label(sensor, alignment=ui.Alignment.CENTER, height=20) ui.ImageWithProvider( self._activated_visualization_data[sensor], alignment=ui.Alignment.CENTER_TOP ) ui.Spacer(height=20) idx += 1 if col < columns - 1: # Add a spacer if inner grid edge ui.Spacer(width=3) if idx >= len(enabled_sensors): break
7,304
Python
44.372671
121
0.628697
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/__init__.py
""" Presence of this file allows the tests directory to be imported as a module so that all of its contents can be scanned to automatically add tests that are placed into this directory. """ scan_for_test_modules = True from .sensors.test_bbox3d import * from .sensors.test_bbox2d_loose import * from .sensors.test_bbox2d_tight import * from .sensors.test_distance_to_camera import * from .sensors.test_distance_to_image_plane import * from .sensors.test_depth import * # *** DEPRECATED *** from .sensors.test_depth_linear import * # *** DEPRECATED *** from .sensors.test_motion_vector import * from .sensors.test_normals import * from .sensors.test_occlusion import * from .sensors.test_rgb import * from .sensors.test_instance_seg import * from .sensors.test_semantic_seg import * from .sensors.test_cross_correspondence import * from .sensors.test_swh_frame_number import * from .sensors.test_renderproduct_camera import * from .sensors.test_rendervar_buff_host_ptr import * from .sensors.test_semantic_filter import * from .sensors.test_display_rendervar import * from .helpers.test_instance_mapping import * from .helpers.test_projection import * from .helpers.test_bboxes import * from .visualize.test_semantic_seg import * from .pipeline.test_instance_mapping import * from .pipeline.test_swh_frame_number import * from .pipeline.test_renderproduct_camera import * from .pipeline.test_instance_mapping_update import * from .graph.test_graph_manipulation import *
1,476
Python
37.86842
103
0.776423
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/utils.py
import random import numpy as np from pxr import Gf, Semantics def add_semantics(prim, semantic_label, semantic_type="class"): if not prim.HasAPI(Semantics.SemanticsAPI): sem = Semantics.SemanticsAPI.Apply(prim, "Semantics") sem.CreateSemanticTypeAttr() sem.CreateSemanticDataAttr() sem.GetSemanticTypeAttr().Set(semantic_type) sem.GetSemanticDataAttr().Set(semantic_label) def get_random_transform(): camera_tf = np.eye(4) camera_tf[:3, :3] = Gf.Matrix3d(Gf.Rotation(np.random.rand(3).tolist(), np.random.rand(3).tolist())) camera_tf[3, :3] = np.random.rand(3).tolist() return Gf.Matrix4d(camera_tf)
666
Python
30.761903
104
0.689189
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/pipeline/test_renderproduct_camera.py
import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import omni.hydratexture import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage # Test the instance mapping pipeline class TestRenderProductCamera(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) def render_product_path(self, hydra_texture) -> str: '''Return a string to the UsdRender.Product used by the texture''' render_product = hydra_texture.get_render_product_path() if render_product and (not render_product.startswith('/')): render_product = '/Render/RenderProduct_' + render_product return render_product def register_test_rp_cam_pipeline(self): sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestSimRpCam"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdTestRenderProductCamera", attributes={"inputs:stage":"simulation"} ), template_name="TestSimRpCam" ) if not sdg_iface.is_node_template_registered("TestPostRpCam"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdTestRenderProductCamera", [SyntheticData.NodeConnectionTemplate("PostRenderProductCamera")], attributes={"inputs:stage":"postRender"} ), template_name="TestPostRpCam" ) if not sdg_iface.is_node_template_registered("TestOnDemandRpCam"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdTestRenderProductCamera", [ SyntheticData.NodeConnectionTemplate("PostProcessRenderProductCamera"), SyntheticData.NodeConnectionTemplate( "PostProcessDispatch", attributes_mapping={"outputs:renderResults": "inputs:renderResults"}) ], attributes={"inputs:stage":"onDemand"} ), template_name="TestOnDemandRpCam" ) def activate_test_rp_cam_pipeline(self, test_case_index): sdg_iface = SyntheticData.Get() attributes = { "inputs:renderProductCameraPath": self._camera_path, "inputs:width": self._resolution[0], "inputs:height": self._resolution[1], "inputs:traceError": True } sdg_iface.activate_node_template("TestSimRpCam", 0, [self.render_product_path(self._hydra_texture_0)], attributes) sdg_iface.activate_node_template("TestPostRpCam", 0, [self.render_product_path(self._hydra_texture_0)], attributes) sdg_iface.activate_node_template("TestOnDemandRpCam", 0, [self.render_product_path(self._hydra_texture_0)],attributes) async def wait_for_num_frames(self, num_frames, max_num_frames=5000): self._hydra_texture_rendered_counter = 0 wait_frames_left = max_num_frames while (self._hydra_texture_rendered_counter < num_frames) and (wait_frames_left > 0): await omni.kit.app.get_app().next_update_async() wait_frames_left -= 1 async def setUp(self): self._settings = carb.settings.acquire_settings_interface() self._hydra_texture_factory = omni.hydratexture.acquire_hydra_texture_factory_interface() self._usd_context_name = '' self._usd_context = omni.usd.get_context(self._usd_context_name) await self._usd_context.new_stage_async() # camera self._camera_path = "/TestRPCamera" UsdGeom.Camera.Define(omni.usd.get_context().get_stage(), self._camera_path) self._resolution = [980,540] # renderer renderer = "rtx" if renderer not in self._usd_context.get_attached_hydra_engine_names(): omni.usd.add_hydra_engine(renderer, self._usd_context) # create the hydra textures self._hydra_texture_0 = self._hydra_texture_factory.create_hydra_texture( "TEX0", width=self._resolution[0], height=self._resolution[1], usd_context_name=self._usd_context_name, usd_camera_path=self._camera_path, hydra_engine_name=renderer, is_async=self._settings.get("/app/asyncRendering") ) self._hydra_texture_rendered_counter = 0 def on_hydra_texture_0(event: carb.events.IEvent): self._hydra_texture_rendered_counter += 1 self._hydra_texture_rendered_counter_sub = self._hydra_texture_0.get_event_stream().create_subscription_to_push_by_type( omni.hydratexture.EVENT_TYPE_DRAWABLE_CHANGED, on_hydra_texture_0, name='async rendering test drawable update', ) self.register_test_rp_cam_pipeline() async def tearDown(self): self._hydra_texture_rendered_counter_sub = None self._hydra_texture_0 = None self._usd_context.close_stage() omni.usd.release_all_hydra_engines(self._usd_context) self._hydra_texture_factory = None self._settings = None wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def test_case_0(self): self.activate_test_rp_cam_pipeline(0) await self.wait_for_num_frames(33)
5,903
Python
42.09489
128
0.603422
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/pipeline/test_swh_frame_number.py
import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import omni.hydratexture import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage # Test the Fabric frame number synchronization class TestSWHFrameNumber(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) def render_product_path(self, hydra_texture) -> str: '''Return a string to the UsdRender.Product used by the texture''' render_product = hydra_texture.get_render_product_path() if render_product and (not render_product.startswith('/')): render_product = '/Render/RenderProduct_' + render_product return render_product async def wait_for_num_sims(self, num_sims, max_num_sims=5000): self._hydra_texture_rendered_counter = 0 wait_sims_left = max_num_sims while (self._hydra_texture_rendered_counter < num_sims) and (wait_sims_left > 0): await omni.kit.app.get_app().next_update_async() wait_sims_left -= 1 async def setUp(self): self._settings = carb.settings.acquire_settings_interface() self._hydra_texture_factory = omni.hydratexture.acquire_hydra_texture_factory_interface() self._usd_context_name = '' self._usd_context = omni.usd.get_context(self._usd_context_name) await self._usd_context.new_stage_async() # Setup the scene stage = omni.usd.get_context().get_stage() world_prim = UsdGeom.Xform.Define(stage,"/World") UsdGeom.Xformable(world_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(world_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule0_prim = stage.DefinePrim("/World/Capsule0", "Capsule") UsdGeom.Xformable(capsule0_prim).AddTranslateOp().Set((100, 0, 0)) UsdGeom.Xformable(capsule0_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule0_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule0_prim.GetAttribute("primvars:displayColor").Set([(0.3, 1, 0)]) capsule1_prim = stage.DefinePrim("/World/Capsule1", "Capsule") UsdGeom.Xformable(capsule1_prim).AddTranslateOp().Set((-100, 0, 0)) UsdGeom.Xformable(capsule1_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule1_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule1_prim.GetAttribute("primvars:displayColor").Set([(0, 1, 0.3)]) spherelight = UsdLux.SphereLight.Define(stage, "/SphereLight") spherelight.GetIntensityAttr().Set(30000) spherelight.GetRadiusAttr().Set(30) camera_1 = stage.DefinePrim("/Camera1", "Camera") camera_1.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") UsdGeom.Xformable(camera_1).AddTranslateOp().Set((0, 250, 0)) UsdGeom.Xformable(camera_1).AddRotateXYZOp().Set((-90, 0, 0)) # renderer renderer = "rtx" if renderer not in self._usd_context.get_attached_hydra_engine_names(): omni.usd.add_hydra_engine(renderer, self._usd_context) # create the hydra textures self._hydra_texture_0 = self._hydra_texture_factory.create_hydra_texture( "TEX0", 1920, 1080, self._usd_context_name, hydra_engine_name=renderer, is_async=self._settings.get("/app/asyncRendering") ) render_product_path_0 = self.render_product_path(self._hydra_texture_0) self._hydra_texture_rendered_counter = 0 def on_hydra_texture_0(event: carb.events.IEvent): self._hydra_texture_rendered_counter += 1 self._hydra_texture_rendered_counter_sub = self._hydra_texture_0.get_event_stream().create_subscription_to_push_by_type( omni.hydratexture.EVENT_TYPE_DRAWABLE_CHANGED, on_hydra_texture_0, name='async rendering test drawable update', ) self._hydra_texture_1 = self._hydra_texture_factory.create_hydra_texture( "TEX1", 512, 512, self._usd_context_name, str(camera_1.GetPath()), hydra_engine_name=renderer, is_async=self._settings.get("/app/asyncRendering") ) render_product_path_1 = self.render_product_path(self._hydra_texture_1) # SyntheticData singleton interface sdg_iface = SyntheticData.Get() # Register node templates in the SyntheticData register # (a node template is a template for creating a node specified by its type and its connections) # # to illustrate we are using the generic omni.syntheticdata.SdTestStageSynchronization node type which supports every stage of the SyntheticData pipeline. When executed it logs the fabric frame number. # # register a node template in the simulation stage # NB : this node template has no connections if not sdg_iface.is_node_template_registered("TestSyncSim"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, # node tempalte stage "omni.syntheticdata.SdTestStageSynchronization", # node template type attributes={ "inputs:tag":"0", "inputs:randomSeed": 13, "inputs:randomMaxProcessingTimeUs": 33333, "inputs:traceError": True } ), # node template default attribute values (when differs from the default value specified in the .ogn) template_name="TestSyncSim" # node template name ) # register a node template in the postrender stage # NB : this template may be activated for several different renderproducts if not sdg_iface.is_node_template_registered("TestSyncPost"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, # node template stage "omni.syntheticdata.SdTestStageSynchronization", # node template type # node template connections [ # connected to a TestSyncSim node (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate("TestSyncSim", (), None), # connected to a LdrColorSD rendervar (the renderVar will be activated when activating this template) SyntheticData.NodeConnectionTemplate("LdrColorSD"), # connected to a BoundingBox3DSD rendervar (the renderVar will be activated when activating this template) SyntheticData.NodeConnectionTemplate("BoundingBox3DSD") ], attributes={ "inputs:randomSeed": 27, "inputs:randomMaxProcessingTimeUs": 33333, "inputs:traceError": True } ), template_name="TestSyncPost" # node template name ) # register a node template in the postprocess stage # NB : this template may be activated for several different renderproducts if not sdg_iface.is_node_template_registered("TestSyncOnDemand"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node template stage "omni.syntheticdata.SdTestStageSynchronization", # node template type # node template connections [ # connected to a TestSyncSim node (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate("TestSyncSim", (), None), # connected to a PostProcessDispatch node : the PostProcessDispatch node trigger the execution of its downstream connections for every rendered frame # (a PostProcessDispatch node will be activated when activating this template) SyntheticData.NodeConnectionTemplate("PostProcessDispatch") ], attributes={ "inputs:randomSeed": 51, "inputs:randomMaxProcessingTimeUs": 33333, "inputs:traceError": True } # node template default attribute values (when differs from the default value specified in the .ogn) ), template_name="TestSyncOnDemand" # node template name ) # register a node template in the postprocess stage # NB : this template may be activated for any combination of renderproduct pairs if not sdg_iface.is_node_template_registered("TestSyncCross"): # register an accumulator which trigger once when all its upstream connections have triggered sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node template stage "omni.graph.action.RationalTimeSyncGate", # node template type # node template connections [ # connected to the PostProcessDispatcher for the synchronization value SyntheticData.NodeConnectionTemplate( "PostProcessDispatcher", (), { "outputs:referenceTimeNumerator":"inputs:rationalTimeNumerator", "outputs:referenceTimeDenominator":"inputs:rationalTimeDenominator" } ), # connected to a TestSyncOnDemand node for the first renderproduct (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate( "TestSyncOnDemand", (0,), {"outputs:exec":"inputs:execIn"} ), # connected to a TestSyncOnDemand node for the second renderproduct (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate( "TestSyncOnDemand", (1,), {"outputs:exec":"inputs:execIn"} ), ] ), template_name="TestSyncAccum" # node template name ) sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node template stage "omni.syntheticdata.SdTestStageSynchronization", # node template type # node template connections [ # connected to a TestSyncAccum node (a TestSyncAccum node will be activated when activating this template) SyntheticData.NodeConnectionTemplate( "TestSyncAccum", (0,1), { "outputs:execOut":"inputs:exec", "outputs:rationalTimeNumerator":"inputs:swhFrameNumber" } ), SyntheticData.NodeConnectionTemplate( "PostProcessDispatch", (0,), {"outputs:renderResults":"inputs:renderResults"} ) ], attributes={ "inputs:randomSeed": 62, "inputs:randomMaxProcessingTimeUs": 33333, "inputs:traceError": True } ), template_name="TestSyncCross" # node template name ) # Activate the node templates for the renderproducts # this will create the node (and all their missing dependencies) within the associated graphs # # activate the TestSyncSim sdg_iface.activate_node_template("TestSyncSim") # wait for the next update to make sure the simulation node is activated when activating the post-render and post-process nodes # activate the TestSyncPost for the renderpoduct renderpoduct_0 # this will also activate the LdrColorSD and BoundingBox3DSD renderVars for the renderpoduct renderpoduct_0 # this will set the tag node attribute to "1" sdg_iface.activate_node_template("TestSyncPost", 0, [render_product_path_0],{"inputs:tag":"1"}) # activate the TestSyncPost for the renderpoduct renderpoduct_1 # this will also activate the LdrColorSD and BoundingBox3DSD renderVars for the renderpoduct renderpoduct_1 # NB TestSyncSim has already been activated # this will set the tag node attribute to "2" sdg_iface.activate_node_template("TestSyncPost", 0, [render_product_path_1],{"inputs:tag":"2"}) # FIXME : wait a couple of simulation updates as a workaround of an issue with the first # syncGate not being activated await self.wait_for_num_sims(3) # activate the TestSyncCross for the renderpoducts [renderproduct_0, renderproduct_1] # this will also activate : # - TestSyncAccum for the renderpoducts [renderproduct_0, renderproduct_1] # - PostProcessDispatch for the renderpoduct renderproduct_0 # - TestSyncOnDemand for the renderproduct renderproduct_0 # - TestSyncOnDemand for the renderproduct renderproduct_1 # - PostProcessDispatch for the renderpoduct renderproduct_1 # this will set the tag node attribute to "5" and processingTime to 30000 sdg_iface.activate_node_template("TestSyncCross", 0, [render_product_path_0,render_product_path_1],{"inputs:tag":"5"}) # Set some specific attributes to nodes that have been automatically activated # set the tag to the TestSyncOnDemand for renderproduct renderproduct_0 sdg_iface.set_node_attributes("TestSyncOnDemand",{"inputs:tag":"3"},render_product_path_0) # set the tag to the TestSyncOnDemand for renderproduct renderproduct_1 sdg_iface.set_node_attributes("TestSyncOnDemand",{"inputs:tag":"4"},render_product_path_1) # setup members self._num_sims = 555 async def tearDown(self): self._hydra_texture_rendered_counter_sub = None self._hydra_texture_0 = None self._hydra_texture_1 = None self._usd_context.close_stage() omni.usd.release_all_hydra_engines(self._usd_context) self._hydra_texture_factory = None self._settings = None wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def test_pipline(self): """ Test swh frame synhronization """ await self.wait_for_num_sims(self._num_sims)
15,517
Python
51.073825
209
0.595476
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/pipeline/test_instance_mapping.py
import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import omni.hydratexture import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage # Test the instance mapping pipeline class TestInstanceMapping(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) def render_product_path(self, hydra_texture) -> str: '''Return a string to the UsdRender.Product used by the texture''' render_product = hydra_texture.get_render_product_path() if render_product and (not render_product.startswith('/')): render_product = '/Render/RenderProduct_' + render_product return render_product def register_test_instance_mapping_pipeline(self): sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestSimSWHFrameNumber"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdUpdateSwFrameNumber" ), template_name="TestSimSWHFrameNumber" ) if not sdg_iface.is_node_template_registered("TestSimInstanceMapping"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdTestInstanceMapping", [ SyntheticData.NodeConnectionTemplate("TestSimSWHFrameNumber", ()) ], {"inputs:stage":"simulation"} ), template_name="TestSimInstanceMapping" ) if not sdg_iface.is_node_template_registered("TestOnDemandInstanceMapping"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdTestInstanceMapping", [ SyntheticData.NodeConnectionTemplate("InstanceMappingPtrWithTransforms"), SyntheticData.NodeConnectionTemplate("TestSimInstanceMapping", (), attributes_mapping={"outputs:exec": "inputs:exec"}) ], {"inputs:stage":"ondemand"} ), template_name="TestOnDemandInstanceMapping" ) def activate_test_instance_mapping_pipeline(self, case_index): sdg_iface = SyntheticData.Get() sdg_iface.activate_node_template("TestSimInstanceMapping", attributes={"inputs:testCaseIndex":case_index}) sdg_iface.activate_node_template("TestOnDemandInstanceMapping", 0, [self.render_product_path(self._hydra_texture_0)], {"inputs:testCaseIndex":case_index}) sdg_iface.connect_node_template("TestSimInstanceMapping", "InstanceMappingPre", None, {"outputs:semanticFilterPredicate":"inputs:semanticFilterPredicate"}) async def wait_for_num_frames(self, num_frames, max_num_frames=5000): self._hydra_texture_rendered_counter = 0 wait_frames_left = max_num_frames while (self._hydra_texture_rendered_counter < num_frames) and (wait_frames_left > 0): await omni.kit.app.get_app().next_update_async() wait_frames_left -= 1 async def setUp(self): self._settings = carb.settings.acquire_settings_interface() self._hydra_texture_factory = omni.hydratexture.acquire_hydra_texture_factory_interface() self._usd_context_name = '' self._usd_context = omni.usd.get_context(self._usd_context_name) await self._usd_context.new_stage_async() # renderer renderer = "rtx" if renderer not in self._usd_context.get_attached_hydra_engine_names(): omni.usd.add_hydra_engine(renderer, self._usd_context) # create the hydra textures self._hydra_texture_0 = self._hydra_texture_factory.create_hydra_texture( "TEX0", 1920, 1080, self._usd_context_name, hydra_engine_name=renderer, is_async=self._settings.get("/app/asyncRendering") ) self._hydra_texture_rendered_counter = 0 def on_hydra_texture_0(event: carb.events.IEvent): self._hydra_texture_rendered_counter += 1 self._hydra_texture_rendered_counter_sub = self._hydra_texture_0.get_event_stream().create_subscription_to_push_by_type( omni.hydratexture.EVENT_TYPE_DRAWABLE_CHANGED, on_hydra_texture_0, name='async rendering test drawable update', ) self.register_test_instance_mapping_pipeline() async def tearDown(self): self._hydra_texture_rendered_counter_sub = None self._hydra_texture_0 = None self._usd_context.close_stage() omni.usd.release_all_hydra_engines(self._usd_context) self._hydra_texture_factory = None self._settings = None wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def test_case_0(self): self.activate_test_instance_mapping_pipeline(0) await self.wait_for_num_frames(11) async def test_case_1(self): self.activate_test_instance_mapping_pipeline(1) await self.wait_for_num_frames(11) async def test_case_2(self): self.activate_test_instance_mapping_pipeline(2) await self.wait_for_num_frames(11) async def test_case_3(self): self.activate_test_instance_mapping_pipeline(3) await self.wait_for_num_frames(11) async def test_case_4(self): self.activate_test_instance_mapping_pipeline(4) await self.wait_for_num_frames(11) async def test_case_5(self): self.activate_test_instance_mapping_pipeline(5) await self.wait_for_num_frames(11) async def test_case_6(self): self.activate_test_instance_mapping_pipeline(6) await self.wait_for_num_frames(11)
6,297
Python
40.43421
142
0.612514
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/pipeline/test_instance_mapping_update.py
import carb import os.path from pxr import Gf, UsdGeom, UsdLux, Sdf import omni.hydratexture import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage from ..utils import add_semantics # Test the instance mapping update Fabric flag class TestInstanceMappingUpdate(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) # Dictionnary containing the pair (file_path , reference_data). If the reference data is None only the existence of the file is validated. self._golden_references = {} def _texture_render_product_path(self, hydra_texture) -> str: '''Return a string to the UsdRender.Product used by the texture''' render_product = hydra_texture.get_render_product_path() if render_product and (not render_product.startswith('/')): render_product = '/Render/RenderProduct_' + render_product return render_product def _assert_count_equal(self, counter_template_name, count): count_output = SyntheticData.Get().get_node_attributes( counter_template_name, ["outputs:count"], self._render_product_path ) assert "outputs:count" in count_output assert count_output["outputs:count"] == count def _activate_fabric_time_range(self) -> None: sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestSimFabricTimeRange"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdTestSimFabricTimeRange" ), template_name="TestSimFabricTimeRange" ) sdg_iface.activate_node_template( "TestSimFabricTimeRange", attributes={"inputs:timeRangeName":"testFabricTimeRangeTrigger"} ) if not sdg_iface.is_node_template_registered("TestPostRenderFabricTimeRange"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdFabricTimeRangeExecution", [ SyntheticData.NodeConnectionTemplate( SyntheticData.renderer_template_name(), attributes_mapping= { "outputs:rp": "inputs:renderResults", "outputs:gpu": "inputs:gpu" } ) ] ), template_name="TestPostRenderFabricTimeRange" ) sdg_iface.activate_node_template( "TestPostRenderFabricTimeRange", 0, [self._render_product_path], attributes={"inputs:timeRangeName":"testFabricTimeRangeTrigger"} ) if not sdg_iface.is_node_template_registered("TestPostProcessFabricTimeRange"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdFabricTimeRangeExecution", [ SyntheticData.NodeConnectionTemplate("PostProcessDispatch"), SyntheticData.NodeConnectionTemplate("TestPostRenderFabricTimeRange") ] ), template_name="TestPostProcessFabricTimeRange" ) sdg_iface.activate_node_template( "TestPostProcessFabricTimeRange", 0, [self._render_product_path], attributes={"inputs:timeRangeName":"testFabricTimeRangeTrigger"} ) if not sdg_iface.is_node_template_registered("TestPostProcessFabricTimeRangeCounter"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.graph.action.Counter", [ SyntheticData.NodeConnectionTemplate( "TestPostProcessFabricTimeRange", attributes_mapping={"outputs:exec": "inputs:execIn"} ) ] ), template_name="TestPostProcessFabricTimeRangeCounter" ) sdg_iface.activate_node_template( "TestPostProcessFabricTimeRangeCounter", 0, [self._render_product_path] ) def _activate_instance_mapping_update(self) -> None: sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestPostProcessInstanceMappingUpdate"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdTimeChangeExecution", [ SyntheticData.NodeConnectionTemplate("InstanceMappingPtr"), SyntheticData.NodeConnectionTemplate("PostProcessDispatch") ] ), template_name="TestPostProcessInstanceMappingUpdate" ) if not sdg_iface.is_node_template_registered("TestPostProcessInstanceMappingUpdateCounter"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.graph.action.Counter", [ SyntheticData.NodeConnectionTemplate( "TestPostProcessInstanceMappingUpdate", attributes_mapping={"outputs:exec": "inputs:execIn"} ) ] ), template_name="TestPostProcessInstanceMappingUpdateCounter" ) sdg_iface.activate_node_template( "TestPostProcessInstanceMappingUpdateCounter", 0, [self._render_product_path] ) async def _request_fabric_time_range_trigger(self, number_of_frames=1): sdg_iface = SyntheticData.Get() sdg_iface.set_node_attributes("TestSimFabricTimeRange",{"inputs:numberOfFrames":number_of_frames}) sdg_iface.request_node_execution("TestSimFabricTimeRange") await omni.kit.app.get_app().next_update_async() async def setUp(self): """Called at the begining of every tests""" self._settings = carb.settings.acquire_settings_interface() self._hydra_texture_factory = omni.hydratexture.acquire_hydra_texture_factory_interface() self._usd_context_name = '' self._usd_context = omni.usd.get_context(self._usd_context_name) await self._usd_context.new_stage_async() # renderer renderer = "rtx" if renderer not in self._usd_context.get_attached_hydra_engine_names(): omni.usd.add_hydra_engine(renderer, self._usd_context) # create the hydra textures self._hydra_texture_0 = self._hydra_texture_factory.create_hydra_texture( "TEX0", 1920, 1080, self._usd_context_name, hydra_engine_name=renderer, is_async=self._settings.get("/app/asyncRendering") ) self._hydra_texture_rendered_counter = 0 def on_hydra_texture_0(event: carb.events.IEvent): self._hydra_texture_rendered_counter += 1 self._hydra_texture_rendered_counter_sub = self._hydra_texture_0.get_event_stream().create_subscription_to_push_by_type( omni.hydratexture.EVENT_TYPE_DRAWABLE_CHANGED, on_hydra_texture_0, name='async rendering test drawable update', ) stage = omni.usd.get_context().get_stage() world_prim = UsdGeom.Xform.Define(stage,"/World") UsdGeom.Xformable(world_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(world_prim).AddRotateXYZOp().Set((0, 0, 0)) self._render_product_path = self._texture_render_product_path(self._hydra_texture_0) await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path) await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path) async def tearDown(self): """Called at the end of every tests""" self._hydra_texture_rendered_counter_sub = None self._hydra_texture_0 = None self._usd_context.close_stage() omni.usd.release_all_hydra_engines(self._usd_context) self._hydra_texture_factory = None self._settings = None wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def test_case_0(self): """Test case 0 : no time range""" self._activate_fabric_time_range() await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 11) self._assert_count_equal("TestPostProcessFabricTimeRangeCounter", 0) async def test_case_1(self): """Test case 1 : setup a time range of 5 frames""" self._activate_fabric_time_range() await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path) await self._request_fabric_time_range_trigger(5) await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 11) self._assert_count_equal("TestPostProcessFabricTimeRangeCounter", 5) async def test_case_2(self): """Test case 2 : initial instance mapping setup""" self._activate_instance_mapping_update() await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 11) self._assert_count_equal("TestPostProcessInstanceMappingUpdateCounter", 1) async def test_case_3(self): """Test case 3 : setup an instance mapping with 1, 2, 3, 4 changes""" stage = omni.usd.get_context().get_stage() self._activate_instance_mapping_update() await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 1) self._assert_count_equal("TestPostProcessInstanceMappingUpdateCounter", 1) sphere_prim = stage.DefinePrim("/World/Sphere", "Sphere") add_semantics(sphere_prim, "sphere") await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 3) self._assert_count_equal("TestPostProcessInstanceMappingUpdateCounter", 2) sub_sphere_prim = stage.DefinePrim("/World/Sphere/Sphere", "Sphere") await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 5) self._assert_count_equal("TestPostProcessInstanceMappingUpdateCounter", 3) add_semantics(sub_sphere_prim, "sphere") await omni.syntheticdata.sensors.next_render_simulation_async(self._render_product_path, 1) self._assert_count_equal("TestPostProcessInstanceMappingUpdateCounter", 4)
11,297
Python
45.303279
146
0.610605
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_motion_vector.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from PIL import Image from time import time from pathlib import Path import carb import numpy as np from numpy.lib.arraysetops import unique import unittest import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestMotionVector(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" self.output_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "output" def writeDataToImage(self, data, name): if not os.path.isdir(self.output_image_path): os.mkdir(self.output_image_path) data = ((data + 1.0) / 2) * 255 outputPath = str(self.output_image_path) + "/" + name + ".png" print("Writing data to " + outputPath) Image.fromarray(data.astype(np.uint8), "RGBA").save(outputPath) # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.MotionVector) async def test_empty(self): """ Test motion vector sensor on empty stage. """ await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_motion_vector(self.viewport) allChannelsAreZero = np.allclose(data, 0, atol=0.001) if not allChannelsAreZero: self.writeDataToImage(data, "test_empty") assert allChannelsAreZero async def test_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_motion_vector(self.viewport) assert data.dtype == np.float32 async def test_unmoving_cube(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) cube.GetAttribute("primvars:displayColor").Set([(0, 0, 1)]) UsdGeom.Xformable(cube).AddTranslateOp() cube.GetAttribute("xformOp:translate").Set((350, 365, 350), time=0) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_motion_vector(self.viewport) # 4th channel will wary based on geo hit, so we ignore checking it here rgbChannelsAreZero = np.allclose(data[:, [0, 1, 2]], 0, atol=0.001) if not rgbChannelsAreZero: self.writeDataToImage(data, "test_unmoving_cube") assert rgbChannelsAreZero @unittest.skip("OM-44310") async def test_partially_disoccluding_cube(self): # disabling temporarly the test for OMNI-GRAPH support : OM-44310 stage = omni.usd.get_context().get_stage() stage.SetStartTimeCode(0) stage.SetEndTimeCode(100) cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(10) cube.GetAttribute("primvars:displayColor").Set([(0, 0, 1)]) # add translation down to create disocclusion due to fetching from out of screen bounds UsdGeom.Xformable(cube).AddTranslateOp() cube.GetAttribute("xformOp:translate").Set((480, 487, 480), time=0) cube.GetAttribute("xformOp:translate").Set((480, 480, 480), time=0.001) # add rotation around up vector to create disocclusion due to fetching from an incompatible surface UsdGeom.Xformable(cube).AddRotateYOp() cube.GetAttribute("xformOp:rotateY").Set(40, time=0) cube.GetAttribute("xformOp:rotateY").Set(70, time=0.001) await omni.kit.app.get_app().next_update_async() # Render one frame itl = omni.timeline.get_timeline_interface() itl.play() await syn.sensors.next_sensor_data_async(self.viewport, True) data = syn.sensors.get_motion_vector(self.viewport) golden_image = np.load(self.golden_image_path / "motion_partially_disoccluding_cube.npz")["array"] # normalize xy (mvec) to zw channels' value range # x100 seems like a good number to bring mvecs to ~1 data[:, [0, 1]] *= 100 golden_image[:, [0, 1]] *= 100 std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) # OM-41605 - using higher std dev here to make linux run succeed std_dev_tolerance = 0.12 print("Calculated std.dev: " + str(std_dev), " Std dev tolerance: " + str(std_dev_tolerance)) if std_dev >= std_dev_tolerance: self.writeDataToImage(golden_image, "test_partially_disoccluding_cube_golden") self.writeDataToImage(data, "test_partially_disoccluding_cube") np.savez_compressed(self.output_image_path / "motion_partially_disoccluding_cube.npz", array=data) assert std_dev < std_dev_tolerance # After running each test async def tearDown(self): pass
6,078
Python
41.510489
141
0.662718
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_occlusion.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math from time import time from pathlib import Path import carb import numpy as np import unittest import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestOcclusion(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" # Before running each test async def setUp(self): await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() self.viewport = get_active_viewport() # Initialize Sensors syn.sensors.enable_sensors( self.viewport, [ syn._syntheticdata.SensorType.BoundingBox2DLoose, syn._syntheticdata.SensorType.BoundingBox2DTight, syn._syntheticdata.SensorType.Occlusion, ], ) await syn.sensors.next_sensor_data_async(self.viewport,True) async def test_fields_exist(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_occlusion(self.viewport) valid_dtype = [("instanceId", "<u4"), ("semanticId", "<u4"), ("occlusionRatio", "<f4")] assert data.dtype == np.dtype(valid_dtype) async def test_fields_exist_parsed(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_occlusion(self.viewport, parsed=True) valid_dtype = [ ("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O"), ("semanticId", "<u4"), ("occlusionRatio", "<f4"), ] assert data.dtype == np.dtype(valid_dtype) async def test_occlusion(self): path = os.path.join(FILE_DIR, "../data/scenes/occlusion.usda") await omni.usd.get_context().open_stage_async(path) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) occlusion_out = syn.sensors.get_occlusion(self.viewport, parsed=True) for row in occlusion_out: gt = float(row["semanticLabel"]) / 100.0 assert math.isclose(gt, row["occlusionRatio"], abs_tol=0.015), f"Expected {gt}, got {row['occlusionRatio']}" async def test_self_occlusion(self): path = os.path.join(FILE_DIR, "../data/scenes/torus_sphere.usda") await omni.usd.get_context().open_stage_async(path) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) occlusion_out = syn.sensors.get_occlusion(self.viewport) occlusion_out_ratios = np.sort(occlusion_out["occlusionRatio"]) assert np.allclose(occlusion_out_ratios, [0.0, 0.6709], atol=0.05) async def test_full_occlusion(self): path = os.path.join(FILE_DIR, "../data/scenes/cube_full_occlusion.usda") await omni.usd.get_context().open_stage_async(path) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) occlusion_out = syn.sensors.get_occlusion(self.viewport) occlusion_out_ratios = np.sort(occlusion_out["occlusionRatio"]) assert np.allclose(occlusion_out_ratios, [0.0, 1.0], atol=0.05) async def test_occlusion_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) path = os.path.join(FILE_DIR, "../data/scenes/occlusion.usda") await omni.usd.get_context().open_stage_async(path) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) occlusion_out = syn.sensors.get_occlusion(self.viewport, parsed=True) for row in occlusion_out: gt = float(row["semanticLabel"]) / 100.0 assert math.isclose(gt, row["occlusionRatio"], abs_tol=0.015), f"Expected {gt}, got {row['occlusionRatio']}" async def test_occlusion_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be ray traced lighting. settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") path = os.path.join(FILE_DIR, "../data/scenes/occlusion.usda") await omni.usd.get_context().open_stage_async(path) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) occlusion_out = syn.sensors.get_occlusion(self.viewport, parsed=True) for row in occlusion_out: gt = float(row["semanticLabel"]) / 100.0 assert math.isclose(gt, row["occlusionRatio"], abs_tol=0.015), f"Expected {gt}, got {row['occlusionRatio']}" async def test_occlusion_ftheta(self): """ Basic funtionality test of the sensor under ftheta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) path = os.path.join(FILE_DIR, "../data/scenes/occlusion.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((100, 200, 300)) self.viewport.camera_path = camera.GetPath() syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) # Camera type should not affect occlusion. occlusion_out = syn.sensors.get_occlusion(self.viewport, parsed=True) data = np.array([row['occlusionRatio'] for row in occlusion_out]) # np.savez_compressed(self.golden_image_path / 'occlusion_ftheta.npz', array=data) golden = np.load(self.golden_image_path / "occlusion_ftheta.npz")["array"] assert np.isclose(data, golden, atol=1e-3).all() async def test_occlusion_spherical(self): """ Basic funtionality test of the sensor under spherical camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) path = os.path.join(FILE_DIR, "../data/scenes/occlusion.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((100, 200, 300)) self.viewport.camera_path = camera.GetPath() syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) await syn.sensors.next_sensor_data_async(self.viewport,True) # Camera type should not affect occlusion. occlusion_out = syn.sensors.get_occlusion(self.viewport, parsed=True) data = np.array([row['occlusionRatio'] for row in occlusion_out]) # np.savez_compressed(self.golden_image_path / 'occlusion_spherical.npz', array=data) golden = np.load(self.golden_image_path / "occlusion_spherical.npz")["array"] assert np.isclose(data, golden, atol=1e-1).all() @unittest.skip("OM-44310") async def test_occlusion_quadrant(self): # disabling temporarly the test for OMNI-GRAPH support : OM-44310 # Test quadrant sensor. It takes loose and tight bounding boxes to # return the type of occlusion # Expected occlusion value for time=1, 2, 3... TESTS = [ "fully-occluded", "left", "right", "bottom", "top", "fully-visible", # corner occlusion "fully-visible", # corner occlusion "bottom-right", "bottom-left", "top-right", "top-left", "fully-visible", ] path = os.path.join(FILE_DIR, "../data/scenes/occlusion_quadrant.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() syn.sensors.enable_sensors( self.viewport, [ syn._syntheticdata.SensorType.BoundingBox2DLoose, syn._syntheticdata.SensorType.BoundingBox2DTight, syn._syntheticdata.SensorType.Occlusion, ], ) await syn.sensors.next_sensor_data_async(self.viewport,True) timeline_iface = omni.timeline.get_timeline_interface() timeline_iface.set_time_codes_per_second(1) for time, gt in enumerate(TESTS): timeline_iface.set_current_time(time) await omni.kit.app.get_app().next_update_async() # Investigate these in OM-31155 sensor_out = syn.sensors.get_occlusion_quadrant(self.viewport) result = sensor_out["occlusion_quadrant"][0] assert result == gt, f"Got {result}, expected {gt}" # After running each test async def tearDown(self): pass
11,399
Python
43.705882
141
0.645495
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_renderproduct_camera.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import carb from pxr import Gf, UsdGeom, Sdf, UsdLux from omni.kit.viewport.utility import get_active_viewport, create_viewport_window import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage # Test the RenderProductCamera nodes class TestRenderProductCamera(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) self.numLoops = 7 self.multiViewport = False # Setup the scene await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() # Setup viewports / renderproduct # first default viewport with the default perspective camera viewport_0 = get_active_viewport() resolution_0 = viewport_0.resolution camera_0 = UsdGeom.Camera.Define(stage, "/Camera0").GetPrim() viewport_0.camera_path = camera_0.GetPath() render_product_path_0 = viewport_0.render_product_path self.render_product_path_0 = render_product_path_0 # second viewport with a ftheta camera if self.multiViewport: resolution_1 = (512, 512) viewport_window = create_viewport_window(width=resolution_1[0], height=resolution_1[1]) viewport_1 = viewport_window.viewport_api viewport_1.resolution = resolution_1 camera_1 = UsdGeom.Camera.Define(stage, "/Camera1").GetPrim() viewport_1.camera_path = camera_1.GetPath() render_product_path_1 = viewport_1.render_product_path self.render_product_path_1 = render_product_path_1 # SyntheticData singleton interface sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestSimRpCam"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdTestRenderProductCamera", attributes={"inputs:stage":"simulation"} ), template_name="TestSimRpCam" ) if not sdg_iface.is_node_template_registered("TestPostRpCam"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdTestRenderProductCamera", [SyntheticData.NodeConnectionTemplate("PostRenderProductCamera")], attributes={"inputs:stage":"postRender"} ), template_name="TestPostRpCam" ) if not sdg_iface.is_node_template_registered("TestOnDemandRpCam"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdTestRenderProductCamera", [ SyntheticData.NodeConnectionTemplate("PostProcessRenderProductCamera"), SyntheticData.NodeConnectionTemplate( "PostProcessDispatch", attributes_mapping={"outputs:renderResults": "inputs:renderResults"}) ], attributes={"inputs:stage":"onDemand"} ), template_name="TestOnDemandRpCam" ) attributes_0 = { "inputs:renderProductCameraPath":camera_0.GetPath().pathString, "inputs:width":resolution_0[0], "inputs:height":resolution_0[1] } sdg_iface.activate_node_template("TestSimRpCam", 0, [render_product_path_0], attributes_0) sdg_iface.activate_node_template("TestPostRpCam", 0, [render_product_path_0], attributes_0) sdg_iface.activate_node_template("TestOnDemandRpCam", 0, [render_product_path_0],attributes_0) if self.multiViewport: attributes_1 = { "inputs:renderProductCameraPath":camera_1.GetPath().pathString, "inputs:width":resolution_1[0], "inputs:height":resolution_1[1] } sdg_iface.activate_node_template("TestSimRpCam", 0, [render_product_path_1], attributes_1) sdg_iface.activate_node_template("TestPostRpCam", 0, [render_product_path_1], attributes_1) sdg_iface.activate_node_template("TestOnDemandRpCam", 0, [render_product_path_1],attributes_1) async def test_renderproduct_camera(self): """ Test render product camera pipeline """ sdg_iface = SyntheticData.Get() test_outname = "outputs:test" test_attributes_names = [test_outname] for _ in range(3): await omni.kit.app.get_app().next_update_async() for _ in range(self.numLoops): await omni.kit.app.get_app().next_update_async() assert sdg_iface.get_node_attributes("TestSimRpCam", test_attributes_names, self.render_product_path_0)[test_outname] assert sdg_iface.get_node_attributes("TestPostRpCam", test_attributes_names, self.render_product_path_0)[test_outname] assert sdg_iface.get_node_attributes("TestOnDemandRpCam", test_attributes_names, self.render_product_path_0)[test_outname] if self.multiViewport: assert sdg_iface.get_node_attributes("TestSimRpCam", test_attributes_names, self.render_product_path_1)[test_outname] assert sdg_iface.get_node_attributes("TestPostRpCam", test_attributes_names, self.render_product_path_1)[test_outname] assert sdg_iface.get_node_attributes("TestOnDemandRpCam", test_attributes_names, self.render_product_path_1)[test_outname] async def tearDown(self): pass
6,317
Python
46.149253
138
0.626563
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_swh_frame_number.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import unittest import omni.kit.test from omni.kit.viewport.utility import get_active_viewport, create_viewport_window from omni.syntheticdata import SyntheticData, SyntheticDataStage # Test the Fabric frame number synchronization class TestSWHFrameNumber(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) # Setup the scene await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() world_prim = UsdGeom.Xform.Define(stage,"/World") UsdGeom.Xformable(world_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(world_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule0_prim = stage.DefinePrim("/World/Capsule0", "Capsule") UsdGeom.Xformable(capsule0_prim).AddTranslateOp().Set((100, 0, 0)) UsdGeom.Xformable(capsule0_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule0_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule0_prim.GetAttribute("primvars:displayColor").Set([(0.3, 1, 0)]) capsule1_prim = stage.DefinePrim("/World/Capsule1", "Capsule") UsdGeom.Xformable(capsule1_prim).AddTranslateOp().Set((-100, 0, 0)) UsdGeom.Xformable(capsule1_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule1_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule1_prim.GetAttribute("primvars:displayColor").Set([(0, 1, 0.3)]) spherelight = UsdLux.SphereLight.Define(stage, "/SphereLight") spherelight.GetIntensityAttr().Set(30000) spherelight.GetRadiusAttr().Set(30) # first default viewport with the default perspective camera viewport_0 = get_active_viewport() render_product_path_0 = viewport_0.render_product_path # second viewport with a ftheta camera viewport_1_window = create_viewport_window(width=512, height=512) viewport_1 = viewport_1_window.viewport_api camera_1 = stage.DefinePrim("/Camera1", "Camera") camera_1.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") UsdGeom.Xformable(camera_1).AddTranslateOp().Set((0, 250, 0)) UsdGeom.Xformable(camera_1).AddRotateXYZOp().Set((-90, 0, 0)) viewport_1.camera_path = camera_1.GetPath() render_product_path_1 = viewport_1.render_product_path # SyntheticData singleton interface sdg_iface = SyntheticData.Get() # Register node templates in the SyntheticData register # (a node template is a template for creating a node specified by its type and its connections) # # to illustrate we are using the generic omni.syntheticdata.SdTestStageSynchronization node type which supports every stage of the SyntheticData pipeline. When executed it logs the fabric frame number. # # register a node template in the simulation stage # NB : this node template has no connections if not sdg_iface.is_node_template_registered("TestSyncSim"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, # node tempalte stage "omni.syntheticdata.SdTestStageSynchronization", # node template type attributes={"inputs:tag":"0"}), # node template default attribute values (when differs from the default value specified in the .ogn) template_name="TestSyncSim" # node template name ) # register a node template in the postrender stage # NB : this template may be activated for several different renderproducts if not sdg_iface.is_node_template_registered("TestSyncPost"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, # node template stage "omni.syntheticdata.SdTestStageSynchronization", # node template type # node template connections [ # connected to a TestSyncSim node (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate("TestSyncSim", (), None), # connected to a LdrColorSD rendervar (the renderVar will be activated when activating this template) SyntheticData.NodeConnectionTemplate("LdrColorSD"), # connected to a BoundingBox3DSD rendervar (the renderVar will be activated when activating this template) SyntheticData.NodeConnectionTemplate("BoundingBox3DSD") ]), template_name="TestSyncPost" # node template name ) # register a node template in the postprocess stage # NB : this template may be activated for several different renderproducts if not sdg_iface.is_node_template_registered("TestSyncOnDemand"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node template stage "omni.syntheticdata.SdTestStageSynchronization", # node template type # node template connections [ # connected to a TestSyncSim node (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate("TestSyncSim", (), None), # connected to a PostProcessDispatch node : the PostProcessDispatch node trigger the execution of its downstream connections for every rendered frame # (a PostProcessDispatch node will be activated when activating this template) SyntheticData.NodeConnectionTemplate("PostProcessDispatch") ] ), template_name="TestSyncOnDemand" # node template name ) # register a node template in the postprocess stage # NB : this template may be activated for any combination of renderproduct pairs if not sdg_iface.is_node_template_registered("TestSyncCross"): # register an accumulator which trigger once when all its upstream connections have triggered sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node template stage "omni.graph.action.RationalTimeSyncGate", # node template type # node template connections [ # connected to the PostProcessDispatcher for the synchronization value SyntheticData.NodeConnectionTemplate( "PostProcessDispatcher", (), { "outputs:referenceTimeNumerator":"inputs:rationalTimeNumerator", "outputs:referenceTimeDenominator":"inputs:rationalTimeDenominator" } ), # connected to a TestSyncOnDemand node for the first renderproduct (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate( "TestSyncOnDemand", (0,), {"outputs:exec":"inputs:execIn"} ), # connected to a TestSyncOnDemand node for the second renderproduct (a TestSyncSim node will be activated when activating this template) SyntheticData.NodeConnectionTemplate( "TestSyncOnDemand", (1,), {"outputs:exec":"inputs:execIn"} ), ] ), template_name="TestSyncAccum" # node template name ) sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node template stage "omni.syntheticdata.SdTestStageSynchronization", # node template type # node template connections [ # connected to a TestSyncAccum node (a TestSyncAccum node will be activated when activating this template) SyntheticData.NodeConnectionTemplate( "TestSyncAccum", (0,1), { "outputs:execOut":"inputs:exec", "outputs:rationalTimeNumerator":"inputs:swhFrameNumber" } ), SyntheticData.NodeConnectionTemplate( "PostProcessDispatch", (0,), {"outputs:renderResults":"inputs:renderResults"} ) ] ), template_name="TestSyncCross" # node template name ) # Activate the node templates for the renderproducts # this will create the node (and all their missing dependencies) within the associated graphs # # activate the TestSyncPost for the renderpoduct renderpoduct_0 # this will also activate the TestSyncSim node and the LdrColorSD and BoundingBox3DSD renderVars for the renderpoduct renderpoduct_0 # this will set the tag node attribute to "1" sdg_iface.activate_node_template("TestSyncPost", 0, [render_product_path_0],{"inputs:tag":"1"}) # activate the TestSyncPost for the renderpoduct renderpoduct_1 # this will also activate the LdrColorSD and BoundingBox3DSD renderVars for the renderpoduct renderpoduct_1 # NB TestSyncSim has already been activated # this will set the tag node attribute to "2" sdg_iface.activate_node_template("TestSyncPost", 0, [render_product_path_1],{"inputs:tag":"2"}) # activate the TestSyncCross for the renderpoducts [renderproduct_0, renderproduct_1] # this will also activate : # - TestSyncAccum for the renderpoducts [renderproduct_0, renderproduct_1] # - PostProcessDispatch for the renderpoduct renderproduct_0 # - TestSyncOnDemand for the renderproduct renderproduct_0 # - TestSyncOnDemand for the renderproduct renderproduct_1 # - PostProcessDispatch for the renderpoduct renderproduct_1 # this will set the tag node attribute to "5" sdg_iface.activate_node_template("TestSyncCross", 0, [render_product_path_0,render_product_path_1],{"inputs:tag":"5"}) # Set some specific attributes to nodes that have been automatically activated # set the tag to the TestSyncOnDemand for renderproduct renderproduct_0 sdg_iface.set_node_attributes("TestSyncOnDemand",{"inputs:tag":"3"},render_product_path_0) # set the tag to the TestSyncOnDemand for renderproduct renderproduct_1 sdg_iface.set_node_attributes("TestSyncOnDemand",{"inputs:tag":"4"},render_product_path_1) # setup members self.render_product_path_0 = render_product_path_0 self.render_product_path_1 = render_product_path_1 self.numLoops = 33 async def run_loop(self): sdg_iface = SyntheticData.Get() render_product_path_0 = self.render_product_path_0 render_product_path_1 = self.render_product_path_1 test_attributes_names = ["outputs:swhFrameNumber","outputs:fabricSWHFrameNumber"] # ensuring that the setup is taken into account for _ in range(5): await omni.kit.app.get_app().next_update_async() for _ in range(self.numLoops): await omni.kit.app.get_app().next_update_async() # test the post-render pipeline synchronization sync_post_attributes = sdg_iface.get_node_attributes( "TestSyncPost",test_attributes_names,render_product_path_0) assert sync_post_attributes and all(attr in sync_post_attributes for attr in test_attributes_names) assert sync_post_attributes["outputs:swhFrameNumber"] == sync_post_attributes["outputs:fabricSWHFrameNumber"] # test the on-demand pipeline synchronization sync_ondemand_attributes = sdg_iface.get_node_attributes( "TestSyncOnDemand",test_attributes_names,render_product_path_1) assert sync_ondemand_attributes and all(attr in sync_ondemand_attributes for attr in test_attributes_names) assert sync_ondemand_attributes["outputs:swhFrameNumber"] == sync_ondemand_attributes["outputs:fabricSWHFrameNumber"] # test the on-demand cross renderproduct synchronization sync_cross_ondemand_attributes = sdg_iface.get_node_attributes( "TestSyncCross",test_attributes_names,render_product_path_0) assert sync_cross_ondemand_attributes and all(attr in sync_cross_ondemand_attributes for attr in test_attributes_names) assert sync_cross_ondemand_attributes["outputs:swhFrameNumber"] == sync_cross_ondemand_attributes["outputs:fabricSWHFrameNumber"] async def test_sync_idle(self): """ Test swh frame synhronization with : - asyncRendering Off - waitIdle On """ settings = carb.settings.get_settings() settings.set_bool("/app/asyncRendering",False) settings.set_int("/app/settings/fabricDefaultStageFrameHistoryCount",3) settings.set_bool("/app/hydraEngine/waitIdle",True) await self.run_loop() @unittest.skip("DRIVE-3247 : SyntheticData does not support async rendering.") async def test_sync(self): """ Test swh frame synhronization with : - asyncRendering Off - waitIdle Off """ settings = carb.settings.get_settings() settings.set_bool("/app/asyncRendering",False) settings.set_int("/app/settings/fabricDefaultStageFrameHistoryCount",3) settings.set_bool("/app/hydraEngine/waitIdle",False) await self.run_loop() @unittest.skip("DRIVE-3247 : SyntheticData does not support async rendering.") async def test_async(self): """ Test swh frame synhronization with : - asyncRendering On - waitIdle Off """ settings = carb.settings.get_settings() settings.set_bool("/app/asyncRendering",True) settings.set_int("/app/settings/fabricDefaultStageFrameHistoryCount",3) settings.set_bool("/app/hydraEngine/waitIdle",False) await self.run_loop() async def tearDown(self): # reset to the default params settings = carb.settings.get_settings() settings.set_bool("/app/asyncRendering",False) settings.set_bool("/app/hydraEngine/waitIdle",True)
15,885
Python
53.968858
209
0.620648
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_distance_to_image_plane.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestDistanceToImagePlane(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.DistanceToImagePlane ) async def test_parsed_empty(self): """ Test distance-to-image-plane sensor on empty stage. """ # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_image_plane(self.viewport) assert np.all(data > 1000) async def test_parsed_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_image_plane(self.viewport) assert data.dtype == np.float32 async def test_distances(self): stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_image_plane(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position assert np.isclose(data.min(), (n - 1) / 100, atol=1e-5) async def test_distances_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ # Set the rendering mode to be pathtracing settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_image_plane(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position assert np.isclose(data.min(), (n - 1) / 100, atol=1e-5) async def test_distances_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be pathtracing settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_image_plane(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position assert np.isclose(data.min(), (n - 1) / 100, atol=1e-5) # After running each test async def tearDown(self): pass
5,504
Python
38.321428
141
0.633903
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_semantic_filter.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import unittest import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from omni.syntheticdata import SyntheticData from ..utils import add_semantics import numpy as np # Test the semantic filter class TestSemanticFilter(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() # scene # /World [belong_to:world] # /Cube [class:cube] # /Sphere [class:sphere] # /Sphere [class:sphere] # /Capsule [class:capsule] # /Cube [class:cube] # /Capsule [class:capsule] # /Nothing [belong_to:nothing] world_prim = stage.DefinePrim("/World", "Plane") add_semantics(world_prim, "world", "belong_to") world_cube_prim = stage.DefinePrim("/World/Cube", "Cube") add_semantics(world_cube_prim, "cube", "class") world_cube_sphere_prim = stage.DefinePrim("/World/Cube/Sphere", "Sphere") add_semantics(world_cube_sphere_prim, "sphere", "class") world_sphere_prim = stage.DefinePrim("/World/Sphere", "Sphere") add_semantics(world_sphere_prim, "sphere", "class") world_capsule_prim = stage.DefinePrim("/World/Capsule", "Capsule") add_semantics(world_capsule_prim, "capsule", "class") cube_prim = stage.DefinePrim("/Cube", "Cube") add_semantics(cube_prim, "cube", "class") capsule_prim = stage.DefinePrim("/Capsule", "Capsule") add_semantics(capsule_prim, "capsule", "class") nothing_prim = stage.DefinePrim("/Nothing", "Plane") add_semantics(nothing_prim, "nothing", "belong_to") self.render_product_path = get_active_viewport().render_product_path SyntheticData.Get().activate_node_template("SemanticLabelTokenSDExportRawArray", 0, [self.render_product_path]) await omni.kit.app.get_app().next_update_async() def fetch_semantic_label_tokens(self): output_names = ["outputs:data","outputs:bufferSize"] outputs = SyntheticData.Get().get_node_attributes("SemanticLabelTokenSDExportRawArray", output_names, self.render_product_path) assert outputs return outputs["outputs:data"].view(np.uint64) async def wait_for_frames(self): wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def check_num_valid_labels(self, expected_num_valid_labels): await self.wait_for_frames() num_valid_labels = np.count_nonzero(self.fetch_semantic_label_tokens()) assert num_valid_labels == expected_num_valid_labels async def test_semantic_filter_all(self): SyntheticData.Get().set_default_semantic_filter("*:*", True) await self.check_num_valid_labels(8) async def test_semantic_filter_no_world(self): SyntheticData.Get().set_default_semantic_filter("!belong_to:world", True) # /Cube /Capsule /Nothing await self.check_num_valid_labels(3) async def test_semantic_filter_all_class_test(self): SyntheticData.Get().set_default_semantic_filter("class:*", True) await self.check_num_valid_labels(6) async def test_semantic_filter_all_class_no_cube_test(self): SyntheticData.Get().set_default_semantic_filter("class:!cube&*", True) await self.check_num_valid_labels(3) async def test_semantic_filter_only_sphere_or_cube_test(self): SyntheticData.Get().set_default_semantic_filter("class:cube|sphere", True) await self.check_num_valid_labels(4) async def test_semantic_filter_sphere_and_cube_test(self): SyntheticData.Get().set_default_semantic_filter("class:cube&sphere", True) # /World/Cube/Sphere await self.check_num_valid_labels(1) async def test_semantic_filter_world_and_sphere_test(self): SyntheticData.Get().set_default_semantic_filter("class:sphere,belong_to:world", True) await self.check_num_valid_labels(2) async def test_semantic_filter_no_belong_test(self): SyntheticData.Get().set_default_semantic_filter("belong_to:!*", True) # /Cube /Capsule await self.check_num_valid_labels(2) async def test_semantic_filter_world_or_capsule_test(self): SyntheticData.Get().set_default_semantic_filter("belong_to:world;class:capsule", True) await self.check_num_valid_labels(6) async def test_semantic_filter_belong_to_nohierarchy(self): SyntheticData.Get().set_default_semantic_filter("belong_to:*", False) await self.check_num_valid_labels(2) async def test_semantic_filter_getter(self): SyntheticData.Get().set_default_semantic_filter("test:getter", False) await self.wait_for_frames() assert(SyntheticData.Get().get_default_semantic_filter()=="test:getter") async def test_instance_mapping_semantic_filter_all_class_no_cube_test(self): SyntheticData.Get().set_instance_mapping_semantic_filter("class:!cube&*") await self.check_num_valid_labels(4) async def test_instance_mapping_semantic_filter_getter(self): SyntheticData.Get().set_instance_mapping_semantic_filter("test:getter") await self.wait_for_frames() assert(SyntheticData.Get().get_instance_mapping_semantic_filter()=="test:getter") async def tearDown(self): SyntheticData.Get().set_instance_mapping_semantic_filter("*:*") SyntheticData.Get().set_default_semantic_filter("*:*")
5,886
Python
40.457746
135
0.670914
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_depth_linear.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestDepthLinear(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.DepthLinear) async def test_parsed_empty(self): """ Test depth sensor on empty stage. """ # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth_linear(self.viewport) assert np.all(data > 1000) async def test_parsed_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth_linear(self.viewport) assert data.dtype == np.float32 async def test_distances(self): stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth_linear(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position assert np.isclose(data.min(), (n - 1) / 100, atol=1e-5) async def test_distances_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ # Set the rendering mode to be pathtracing settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth_linear(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position assert np.isclose(data.min(), (n - 1) / 100, atol=1e-5) async def test_distances_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be pathtracing settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth_linear(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position assert np.isclose(data.min(), (n - 1) / 100, atol=1e-5) # After running each test async def tearDown(self): pass
5,445
Python
38.751825
141
0.632507
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_display_rendervar.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import unittest import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from omni.syntheticdata import SyntheticData # Test the semantic filter class TestDisplayRenderVar(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): await omni.usd.get_context().new_stage_async() self.render_product_path = get_active_viewport().render_product_path await omni.kit.app.get_app().next_update_async() async def wait_for_frames(self): wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def test_valid_ldrcolor_texture(self): SyntheticData.Get().activate_node_template("LdrColorDisplay", 0, [self.render_product_path]) await self.wait_for_frames() display_output_names = ["outputs:rpResourcePtr", "outputs:width", "outputs:height", "outputs:format"] display_outputs = SyntheticData.Get().get_node_attributes("LdrColorDisplay", display_output_names, self.render_product_path) assert(display_outputs and all(o in display_outputs for o in display_output_names) and display_outputs["outputs:rpResourcePtr"] != 0 and display_outputs["outputs:format"] == 11) SyntheticData.Get().deactivate_node_template("LdrColorDisplay", 0, [self.render_product_path]) async def test_valid_bbox3d_texture(self): SyntheticData.Get().activate_node_template("BoundingBox3DDisplay", 0, [self.render_product_path]) await self.wait_for_frames() display_output_names = ["outputs:rpResourcePtr", "outputs:width", "outputs:height", "outputs:format"] display_outputs = SyntheticData.Get().get_node_attributes("BoundingBox3DDisplay", display_output_names, self.render_product_path) assert(display_outputs and all(o in display_outputs for o in display_output_names) and display_outputs["outputs:rpResourcePtr"] != 0 and display_outputs["outputs:format"] == 11) SyntheticData.Get().deactivate_node_template("BoundingBox3DDisplay", 0, [self.render_product_path]) async def test_valid_cam3dpos_texture(self): SyntheticData.Get().activate_node_template("Camera3dPositionDisplay", 0, [self.render_product_path]) await self.wait_for_frames() display_output_names = ["outputs:rpResourcePtr", "outputs:width", "outputs:height", "outputs:format"] display_outputs = SyntheticData.Get().get_node_attributes("Camera3dPositionDisplay", display_output_names, self.render_product_path) assert(display_outputs and all(o in display_outputs for o in display_output_names) and display_outputs["outputs:rpResourcePtr"] != 0 and display_outputs["outputs:format"] == 11) SyntheticData.Get().deactivate_node_template("Camera3dPositionDisplay", 0, [self.render_product_path])
3,130
Python
61.619999
185
0.720447
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_cross_correspondence.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from PIL import Image from time import time from pathlib import Path import carb import numpy as np from numpy.lib.arraysetops import unique import omni.kit.test from pxr import Gf, UsdGeom from omni.kit.viewport.utility import get_active_viewport, next_viewport_frame_async, create_viewport_window # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 cameras = ["/World/Cameras/CameraFisheyeLeft", "/World/Cameras/CameraPinhole", "/World/Cameras/CameraFisheyeRight"] # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test # This test has to run last and thus it's prefixed as such to force that: # - This is because it has to create additional viewports which makes the test # get stuck if it's not the last one in the OV process session class ZZHasToRunLast_TestCrossCorrespondence(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" self.output_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "output" self.StdDevTolerance = 0.1 self.sensorViewport = None # Before running each test async def setUp(self): global cameras np.random.seed(1234) # Load the scene scenePath = os.path.join(FILE_DIR, "../data/scenes/cross_correspondence.usda") await omni.usd.get_context().open_stage_async(scenePath) await omni.kit.app.get_app().next_update_async() # Get the main-viewport as the sensor-viewport self.sensorViewport = get_active_viewport() await next_viewport_frame_async(self.sensorViewport) # Setup viewports resolution = self.sensorViewport.resolution viewport_windows = [None] * 2 x_pos, y_pos = 12, 75 for i in range(len(viewport_windows)): viewport_windows[i] = create_viewport_window(width=resolution[0], height=resolution[1], position_x=x_pos, position_y=y_pos) viewport_windows[i].width = 500 viewport_windows[i].height = 500 x_pos += 500 # Setup cameras self.sensorViewport.camera_path = cameras[0] for i in range(len(viewport_windows)): viewport_windows[i].viewport_api.camera_path = cameras[i + 1] async def test_golden_image_rt_cubemap(self): settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "RaytracedLighting") settings.set_bool("/rtx/fishEye/useCubemap", True) await omni.kit.app.get_app().next_update_async() # Use default viewport for sensor target as otherwise sensor enablement doesn't work # also the test will get stuck # Initialize Sensor await syn.sensors.create_or_retrieve_sensor_async( self.sensorViewport, syn._syntheticdata.SensorType.CrossCorrespondence ) # Render one frame await syn.sensors.next_sensor_data_async(self.sensorViewport,True) data = syn.sensors.get_cross_correspondence(self.sensorViewport) golden_image = np.load(self.golden_image_path / "cross_correspondence.npz")["array"] # normalize xy (uv offset) to zw channels' value range # x100 seems like a good number to bring uv offset to ~1 data[:, [0, 1]] *= 100 golden_image[:, [0, 1]] *= 100 std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) if std_dev >= self.StdDevTolerance: if not os.path.isdir(self.output_image_path): os.mkdir(self.output_image_path) np.savez_compressed(self.output_image_path / "cross_correspondence.npz", array=data) golden_image = ((golden_image + 1.0) / 2) * 255 data = ((data + 1.0) / 2) * 255 Image.fromarray(golden_image.astype(np.uint8), "RGBA").save( self.output_image_path / "cross_correspondence_golden.png" ) Image.fromarray(data.astype(np.uint8), "RGBA").save(self.output_image_path / "cross_correspondence.png") self.assertTrue(std_dev < self.StdDevTolerance) async def test_golden_image_rt_non_cubemap(self): settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "RaytracedLighting") settings.set_bool("/rtx/fishEye/useCubemap", False) await omni.kit.app.get_app().next_update_async() # Use default viewport for sensor target as otherwise sensor enablement doesn't work # also the test will get stuck # Initialize Sensor await syn.sensors.create_or_retrieve_sensor_async( self.sensorViewport, syn._syntheticdata.SensorType.CrossCorrespondence ) # Render one frame await syn.sensors.next_sensor_data_async(self.sensorViewport,True) data = syn.sensors.get_cross_correspondence(self.sensorViewport) golden_image = np.load(self.golden_image_path / "cross_correspondence.npz")["array"] # normalize xy (uv offset) to zw channels' value range # x100 seems like a good number to bring uv offset to ~1 data[:, [0, 1]] *= 100 golden_image[:, [0, 1]] *= 100 std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) if std_dev >= self.StdDevTolerance: if not os.path.isdir(self.output_image_path): os.mkdir(self.output_image_path) np.savez_compressed(self.output_image_path / "cross_correspondence.npz", array=data) golden_image = ((golden_image + 1.0) / 2) * 255 data = ((data + 1.0) / 2) * 255 Image.fromarray(golden_image.astype(np.uint8), "RGBA").save( self.output_image_path / "cross_correspondence_golden.png" ) Image.fromarray(data.astype(np.uint8), "RGBA").save(self.output_image_path / "cross_correspondence.png") self.assertTrue(std_dev < self.StdDevTolerance) async def test_golden_image_pt(self): settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_bool("/rtx/fishEye/useCubemap", False) await omni.kit.app.get_app().next_update_async() # Use default viewport for sensor target as otherwise sensor enablement doesn't work # also the test will get stuck # Initialize Sensor await syn.sensors.create_or_retrieve_sensor_async( self.sensorViewport, syn._syntheticdata.SensorType.CrossCorrespondence ) # Render one frame await syn.sensors.next_sensor_data_async(self.sensorViewport,True) data = syn.sensors.get_cross_correspondence(self.sensorViewport) golden_image = np.load(self.golden_image_path / "cross_correspondence.npz")["array"] # normalize xy (uv offset) to zw channels' value range # x100 seems like a good number to bring uv offset to ~1 data[:, [0, 1]] *= 100 golden_image[:, [0, 1]] *= 100 std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) if std_dev >= self.StdDevTolerance: if not os.path.isdir(self.output_image_path): os.mkdir(self.output_image_path) np.savez_compressed(self.output_image_path / "cross_correspondence.npz", array=data) golden_image = ((golden_image + 1.0) / 2) * 255 data = ((data + 1.0) / 2) * 255 Image.fromarray(golden_image.astype(np.uint8), "RGBA").save( self.output_image_path / "cross_correspondence_golden.png" ) Image.fromarray(data.astype(np.uint8), "RGBA").save(self.output_image_path / "cross_correspondence.png") self.assertTrue(std_dev < self.StdDevTolerance) async def test_same_position(self): global cameras # Make sure our cross correspondence values converage around 0 when the target and reference cameras are # in the same position settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_bool("/rtx/fishEye/useCubemap", False) # Use default viewport for sensor target as otherwise sensor enablement doesn't work # also the test will get stuck # Move both cameras to the same position camera_left = omni.usd.get_context().get_stage().GetPrimAtPath(cameras[0]) camera_right = omni.usd.get_context().get_stage().GetPrimAtPath(cameras[2]) UsdGeom.XformCommonAPI(camera_left).SetTranslate(Gf.Vec3d(-10, 4, 0)) UsdGeom.XformCommonAPI(camera_right).SetTranslate(Gf.Vec3d(-10, 4, 0)) await omni.kit.app.get_app().next_update_async() # Initialize Sensor await syn.sensors.create_or_retrieve_sensor_async( self.sensorViewport, syn._syntheticdata.SensorType.CrossCorrespondence ) # Render one frame await syn.sensors.next_sensor_data_async(self.sensorViewport,True) raw_data = syn.sensors.get_cross_correspondence(self.sensorViewport) # Get histogram parameters du_scale = float(raw_data.shape[1] - 1) dv_scale = float(raw_data.shape[0] - 1) du_img = raw_data[:, :, 0] * du_scale dv_img = raw_data[:, :, 1] * dv_scale # Clear all invalid pixels by setting them to 10000.0 invalid_mask = (raw_data[:, :, 2] == -1) du_img[invalid_mask] = 10000.0 dv_img[invalid_mask] = 10000.0 # Selection mask du_selected = (du_img >= -1.0) & (du_img < 1.0) dv_selected = (dv_img >= -1.0) & (dv_img < 1.0) # Calculate bins bins = np.arange(-1.0, 1.0 + 0.1, 0.1) # calculate histograms for cross correspondence values along eacheach axis hist_du, edges_du = np.histogram(du_img[du_selected], bins=bins) hist_dv, edges_dv = np.histogram(dv_img[dv_selected], bins=bins) # ensure the (0.0, 0.0) bins contain the most values self.assertTrue(np.argmax(hist_du) == 10) self.assertTrue(np.argmax(hist_dv) == 10) # After running each test async def tearDown(self): pass
10,904
Python
42.795181
141
0.646735
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_stage_manipulation.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import carb import random from pxr import Gf, UsdGeom, UsdLux, Sdf import unittest import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage from omni.kit.viewport.utility import get_active_viewport FILE_DIR = os.path.dirname(os.path.realpath(__file__)) # Test the ogn node repeatability under stage manipulation class TestStageManipulation(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): path = os.path.join(FILE_DIR, "../data/scenes/scene_instance_test.usda") await omni.usd.get_context().open_stage_async(path) #await omni.usd.get_context().new_stage_async() viewport = get_active_viewport() self.render_product_path = viewport.render_product_path # SyntheticData singleton interface sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestStageManipulationScenarii"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.SIMULATION, "omni.syntheticdata.SdTestStageManipulationScenarii", attributes={"inputs:worldPrimPath":"/World"} ), template_name="TestStageManipulationScenarii" # node template name ) render_vars = [ #"SemanticMapSD", #"SemanticPrimTokenSD", #"InstanceMapSD", #"InstancePrimTokenSD", #"SemanticLabelTokenSD", #"SemanticLocalTransformSD", #"SemanticWorldTransformSD", "SemanticBoundingBox2DExtentTightSD", #"SemanticBoundingBox2DInfosTightSD", "SemanticBoundingBox2DExtentLooseSD", #"SemanticBoundingBox2DInfosLooseSD", "SemanticBoundingBox3DExtentSD", "SemanticBoundingBox3DInfosSD" ] for rv in render_vars: template_name = "TestRawArray" + rv if not sdg_iface.is_node_template_registered(template_name): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, "omni.syntheticdata.SdTestPrintRawArray", [SyntheticData.NodeConnectionTemplate(rv + "ExportRawArray")] ), template_name=template_name ) self.num_loops = 37 async def render_var_test(self, render_var, ref_values, num_references_values, element_type, rand_seed=0, mode="printReferences"): sdg_iface = SyntheticData.Get() sdg_iface.activate_node_template("TestStageManipulationScenarii") sdg_iface.activate_node_template("TestRawArray" + render_var, 0, [self.render_product_path], {"inputs:elementType": element_type, "inputs:referenceValues": ref_values, "inputs:randomSeed": rand_seed, "inputs:mode": mode, "inputs:referenceNumUniqueRandomValues": num_references_values}) for _ in range(self.num_loops): await omni.kit.app.get_app().next_update_async() sdg_iface.deactivate_node_template("TestRawArray" + render_var, 0, [self.render_product_path]) sdg_iface.deactivate_node_template("TestStageManipulationScenarii") @unittest.skip("Unimplemented") async def test_semantic_map(self): await self.render_var_test("SemanticMapSD", [], "uint16", 2) async def test_semantic_bbox3d_extent(self): await self.render_var_test("SemanticBoundingBox3DExtentSD", [ 87.556404, 223.83577, -129.42677, -155.79227, -49.999996, 421.41083, 88.13742, -50.000004, 49.999905, 39.782856, -50.000004, -155.52794, -16.202198, -50.0, 136.29709, -104.94976, -155.52792, 87.556404, -50.000008, 223.83577, 49.99991, -87.8103, -50.0, -50.00001, 276.29846, 50.000004, 421.41083, -50.0, 60.42457, 223.83574, -129.42676, 312.2204, 277.44424, -50.000004, -37.84166, 87.556404, 188.92877, 136.2971, 50.000004 ], 13, "float32", 3, mode="testReferences") # async def test_semantic_bbox3d_infos(self): # await self.render_var_test("SemanticBoundingBox3DInfosSD", # [ # -50.000008, 57.119793, 49.9999, -50.000004, -50.000015, -50.000004, 62.03122, # -50.000008, -50.000004, -50.000004, -50.0, 50.0, -50.0, 57.119793, # 9.5100141e-01, -4.7552836e-01, 6.1506079e+02, 1.0000000e+00, -1.0000000e+00, 1.3421423e+03, 4.9999901e+01 # ], 11, "int32", 4, mode="printReferences") async def test_semantic_bbox2d_extent_loose(self): await self.render_var_test("SemanticBoundingBox2DExtentLooseSD", [ 733, 479, 532, 507, 460, 611, 763, 309, 17, 827, 789, 698, 554, 947, 789, 581, 534, 156, 582, 323, 825, 298, 562, 959, 595, 299, 117, 445, 572, 31, 622, 609, 228 ], 11, "int32", 5, mode="testReferences") async def test_semantic_bbox2d_extent_tight(self): await self.render_var_test("SemanticBoundingBox2DExtentTightSD", [ 0.0000000e+00, 5.0700000e+02, 1.1600000e+02, 7.4600000e+02, 5.9500000e+02, 2.1474836e+09, 2.1474836e+09, 2.5300000e+02, 3.6100000e+02, 1.7000000e+01, 0.0000000e+00, 2.1474836e+09, 2.1474836e+09, 2.1474836e+09, 2.1474836e+09, 2.1474836e+09, 0.0000000e+00, 0.0000000e+00, 0.0000000e+00, 2.1474836e+09, 0.0000000e+00, 2.1474836e+09, 0.0000000e+00, 3.1000000e+01, 5.3900000e+02, 2.3600000e+02, 2.1474836e+09, 5.7200000e+02, 8.9200000e+02, 9.0500000e+02, 5.6200000e+02, 5.1300000e+02, 0.0000000e+00 ], 11, "int32", 9, mode="testReferences") async def tearDown(self): pass
6,181
Python
48.456
177
0.621582
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_bbox3d.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import unittest import uuid import math import shutil import asyncio from time import time import carb.tokens import carb.settings import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Usd, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from .. import utils FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 TMP = carb.tokens.get_tokens_interface().resolve("${temp}") # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestBBox3D(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.BoundingBox3D) async def test_parsed_empty(self): """ Test 3D bounding box on empty stage. """ bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True) assert not bool(bbox3d_data) async def test_fields_exist(self): """ Test the correctness of the output dtype. """ stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") utils.add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport, True) bbox3d_data_raw = syn.sensors.get_bounding_box_3d(self.viewport, parsed=False, return_corners=False) bbox3d_data_parsed = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True) raw_dtype = np.dtype( [ ("instanceId", "<u4"), ("semanticId", "<u4"), ("x_min", "<f4"), ("y_min", "<f4"), ("z_min", "<f4"), ("x_max", "<f4"), ("y_max", "<f4"), ("z_max", "<f4"), ("transform", "<f4", (4, 4)), ] ) parsed_dtype = np.dtype( [ ("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O"), ("semanticId", "<u4"), ("x_min", "<f4"), ("y_min", "<f4"), ("z_min", "<f4"), ("x_max", "<f4"), ("y_max", "<f4"), ("z_max", "<f4"), ("transform", "<f4", (4, 4)), ("corners", "<f4", (8, 3)), ] ) assert bbox3d_data_raw.dtype == raw_dtype assert bbox3d_data_parsed.dtype == parsed_dtype async def test_parsed_nested_Y_pathtracing(self): """ Test 3D bounding box with nested semantics and transforms, Y-Up, in pathtracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) # Create 2 cubes (size=1) under a parent prim stage = omni.usd.get_context().get_stage() UsdGeom.SetStageUpAxis(stage, "Y") parent = stage.DefinePrim("/World/Parent", "Xform") child1 = stage.DefinePrim("/World/Parent/Child1", "Cube") child2 = stage.DefinePrim("/World/Parent/Child2", "Cube") child1.GetAttribute("size").Set(1.0) child2.GetAttribute("size").Set(1.0) utils.add_semantics(parent, "parent") utils.add_semantics(child1, "child1") utils.add_semantics(child2, "child2") UsdGeom.Xformable(parent).ClearXformOpOrder() UsdGeom.Xformable(child1).ClearXformOpOrder() UsdGeom.Xformable(child2).ClearXformOpOrder() UsdGeom.Xformable(parent).AddRotateYOp().Set(45) UsdGeom.Xformable(child1).AddTranslateOp().Set((-0.5, 0.5, 0.0)) UsdGeom.Xformable(child1).AddRotateYOp().Set(45) UsdGeom.Xformable(child2).AddTranslateOp().Set((0.5, -0.5, 0.0)) await omni.kit.app.get_app().next_update_async() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True) parent_bbox = [row for row in bbox3d_data if row["name"] == parent.GetPath()][0] child1_bbox = [row for row in bbox3d_data if row["name"] == child1.GetPath()][0] child2_bbox = [row for row in bbox3d_data if row["name"] == child2.GetPath()][0] # Only takes into account child transforms a = math.cos(math.pi / 4) parent_bounds = [[-a - 0.5, -1.0, -a], [1.0, 1.0, a]] child1_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] child2_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] # Doesn't take into account transforms for bbox, bounds in zip([parent_bbox, child1_bbox, child2_bbox], [parent_bounds, child1_bounds, child2_bounds]): self.assertAlmostEqual(bbox["x_min"], bounds[0][0], places=5) self.assertAlmostEqual(bbox["y_min"], bounds[0][1], places=5) self.assertAlmostEqual(bbox["z_min"], bounds[0][2], places=5) self.assertAlmostEqual(bbox["x_max"], bounds[1][0], places=5) self.assertAlmostEqual(bbox["y_max"], bounds[1][1], places=5) self.assertAlmostEqual(bbox["z_max"], bounds[1][2], places=5) prim = stage.GetPrimAtPath(bbox["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(0.0)) gf_range = Gf.Range3f(*bounds) gf_corners = np.array([gf_range.GetCorner(i) for i in range(8)]) gf_corners = np.pad(gf_corners, ((0, 0), (0, 1)), constant_values=1.0) gf_corners = np.dot(gf_corners, tf)[:, :3] assert np.allclose(bbox["corners"], gf_corners, atol=1e-5) async def test_parsed_nested_Y_ray_traced_lighting(self): """ Test 3D bounding box with nested semantics and transforms, Y-Up, in ray traced lighting mode. """ # Set the rendering mode to be ray traced lighting. settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") # Create 2 cubes (size=1) under a parent prim stage = omni.usd.get_context().get_stage() UsdGeom.SetStageUpAxis(stage, "Y") parent = stage.DefinePrim("/World/Parent", "Xform") child1 = stage.DefinePrim("/World/Parent/Child1", "Cube") child2 = stage.DefinePrim("/World/Parent/Child2", "Cube") child1.GetAttribute("size").Set(1.0) child2.GetAttribute("size").Set(1.0) utils.add_semantics(parent, "parent") utils.add_semantics(child1, "child1") utils.add_semantics(child2, "child2") UsdGeom.Xformable(parent).ClearXformOpOrder() UsdGeom.Xformable(child1).ClearXformOpOrder() UsdGeom.Xformable(child2).ClearXformOpOrder() UsdGeom.Xformable(parent).AddRotateYOp().Set(45) UsdGeom.Xformable(child1).AddTranslateOp().Set((-0.5, 0.5, 0.0)) UsdGeom.Xformable(child1).AddRotateYOp().Set(45) UsdGeom.Xformable(child2).AddTranslateOp().Set((0.5, -0.5, 0.0)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True) parent_bbox = [row for row in bbox3d_data if row["name"] == parent.GetPath()][0] child1_bbox = [row for row in bbox3d_data if row["name"] == child1.GetPath()][0] child2_bbox = [row for row in bbox3d_data if row["name"] == child2.GetPath()][0] # Only takes into account child transforms a = math.cos(math.pi / 4) parent_bounds = [[-a - 0.5, -1.0, -a], [1.0, 1.0, a]] child1_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] child2_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] # Doesn't take into account transforms for bbox, bounds in zip([parent_bbox, child1_bbox, child2_bbox], [parent_bounds, child1_bounds, child2_bounds]): self.assertAlmostEqual(bbox["x_min"], bounds[0][0], places=5) self.assertAlmostEqual(bbox["y_min"], bounds[0][1], places=5) self.assertAlmostEqual(bbox["z_min"], bounds[0][2], places=5) self.assertAlmostEqual(bbox["x_max"], bounds[1][0], places=5) self.assertAlmostEqual(bbox["y_max"], bounds[1][1], places=5) self.assertAlmostEqual(bbox["z_max"], bounds[1][2], places=5) prim = stage.GetPrimAtPath(bbox["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(0.0)) gf_range = Gf.Range3f(*bounds) gf_corners = np.array([gf_range.GetCorner(i) for i in range(8)]) gf_corners = np.pad(gf_corners, ((0, 0), (0, 1)), constant_values=1.0) gf_corners = np.dot(gf_corners, tf)[:, :3] assert np.allclose(bbox["corners"], gf_corners, atol=1e-5) async def test_parsed_nested_Y(self): """ Test 3D bounding box with nested semantics and transforms, Y-Up. """ # Create 2 cubes (size=1) under a parent prim stage = omni.usd.get_context().get_stage() UsdGeom.SetStageUpAxis(stage, "Y") parent = stage.DefinePrim("/World/Parent", "Xform") child1 = stage.DefinePrim("/World/Parent/Child1", "Cube") child2 = stage.DefinePrim("/World/Parent/Child2", "Cube") child1.GetAttribute("size").Set(1.0) child2.GetAttribute("size").Set(1.0) utils.add_semantics(parent, "parent") utils.add_semantics(child1, "child1") utils.add_semantics(child2, "child2") UsdGeom.Xformable(parent).ClearXformOpOrder() UsdGeom.Xformable(child1).ClearXformOpOrder() UsdGeom.Xformable(child2).ClearXformOpOrder() UsdGeom.Xformable(parent).AddRotateYOp().Set(45) UsdGeom.Xformable(child1).AddTranslateOp().Set((-0.5, 0.5, 0.0)) UsdGeom.Xformable(child1).AddRotateYOp().Set(45) UsdGeom.Xformable(child2).AddTranslateOp().Set((0.5, -0.5, 0.0)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True) parent_bbox = [row for row in bbox3d_data if row["name"] == parent.GetPath()][0] child1_bbox = [row for row in bbox3d_data if row["name"] == child1.GetPath()][0] child2_bbox = [row for row in bbox3d_data if row["name"] == child2.GetPath()][0] # Only takes into account child transforms a = math.cos(math.pi / 4) parent_bounds = [[-a - 0.5, -1.0, -a], [1.0, 1.0, a]] child1_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] child2_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] # Doesn't take into account transforms for bbox, bounds in zip([parent_bbox, child1_bbox, child2_bbox], [parent_bounds, child1_bounds, child2_bounds]): self.assertAlmostEqual(bbox["x_min"], bounds[0][0], places=5) self.assertAlmostEqual(bbox["y_min"], bounds[0][1], places=5) self.assertAlmostEqual(bbox["z_min"], bounds[0][2], places=5) self.assertAlmostEqual(bbox["x_max"], bounds[1][0], places=5) self.assertAlmostEqual(bbox["y_max"], bounds[1][1], places=5) self.assertAlmostEqual(bbox["z_max"], bounds[1][2], places=5) prim = stage.GetPrimAtPath(bbox["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(0.0)) gf_range = Gf.Range3f(*bounds) gf_corners = np.array([gf_range.GetCorner(i) for i in range(8)]) gf_corners = np.pad(gf_corners, ((0, 0), (0, 1)), constant_values=1.0) gf_corners = np.dot(gf_corners, tf)[:, :3] assert np.allclose(bbox["corners"], gf_corners, atol=1e-5) async def test_parsed_nested_Z(self): """ Test 3D bounding box with nested semantics and transforms, Z-Up. """ # Create 2 cubes (size=1) under a parent prim stage = omni.usd.get_context().get_stage() UsdGeom.SetStageUpAxis(stage, "Z") parent = stage.DefinePrim("/World/Parent", "Xform") child1 = stage.DefinePrim("/World/Parent/Child1", "Cube") child2 = stage.DefinePrim("/World/Parent/Child2", "Cube") child1.GetAttribute("size").Set(1.0) child2.GetAttribute("size").Set(1.0) utils.add_semantics(parent, "parent") utils.add_semantics(child1, "child1") utils.add_semantics(child2, "child2") UsdGeom.Xformable(parent).ClearXformOpOrder() UsdGeom.Xformable(child1).ClearXformOpOrder() UsdGeom.Xformable(child2).ClearXformOpOrder() UsdGeom.Xformable(parent).AddRotateYOp().Set(45) UsdGeom.Xformable(child1).AddTranslateOp().Set((-0.5, 0.5, 0.0)) UsdGeom.Xformable(child1).AddRotateYOp().Set(45) UsdGeom.Xformable(child2).AddTranslateOp().Set((0.5, -0.5, 0.0)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True) parent_bbox = [row for row in bbox3d_data if row["name"] == parent.GetPath()][0] child1_bbox = [row for row in bbox3d_data if row["name"] == child1.GetPath()][0] child2_bbox = [row for row in bbox3d_data if row["name"] == child2.GetPath()][0] # Only takes into account child transforms a = math.cos(math.pi / 4) parent_bounds = [[-a - 0.5, -1.0, -a], [1.0, 1.0, a]] child1_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] child2_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] # Doesn't take into account transforms for bbox, bounds in zip([parent_bbox, child1_bbox, child2_bbox], [parent_bounds, child1_bounds, child2_bounds]): self.assertAlmostEqual(bbox["x_min"], bounds[0][0], places=5) self.assertAlmostEqual(bbox["y_min"], bounds[0][1], places=5) self.assertAlmostEqual(bbox["z_min"], bounds[0][2], places=5) self.assertAlmostEqual(bbox["x_max"], bounds[1][0], places=5) self.assertAlmostEqual(bbox["y_max"], bounds[1][1], places=5) self.assertAlmostEqual(bbox["z_max"], bounds[1][2], places=5) prim = stage.GetPrimAtPath(bbox["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(0.0)) gf_range = Gf.Range3f(*bounds) gf_corners = np.array([gf_range.GetCorner(i) for i in range(8)]) gf_corners = np.pad(gf_corners, ((0, 0), (0, 1)), constant_values=1.0) gf_corners = np.dot(gf_corners, tf)[:, :3] assert np.allclose(bbox["corners"], gf_corners, atol=1e-5) @unittest.skip("OM-45008") async def test_camera_frame_simple_ftheta(self): """ Test 3D bounding box in a simple scene under ftheta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() # TEST SIMPLE SCENE cube = stage.DefinePrim("/Cube", "Cube") cube.GetAttribute("size").Set(2.0) UsdGeom.Xformable(cube).AddTranslateOp().Set((10.0, 1.0, 2)) utils.add_semantics(cube, "cube") camera = stage.DefinePrim("/Camera", "Camera") camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") UsdGeom.Xformable(camera).AddTranslateOp().Set((10.0, 0.0, 0.0)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=True ) # TODO: find the correct value of distorted result. # The f theta will distort the result. extents = Gf.Range3d([-1.0, 0, 1], [1.0, 2.0, 3]) corners = np.array([[extents.GetCorner(i) for i in range(8)]]) assert not np.allclose(bbox3d_data[0]["corners"], corners) @unittest.skip("OM-45008") async def test_camera_frame_simple_spherical(self): """ Test 3D bounding box in a simple scene under fisheye spherical camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() # TEST SIMPLE SCENE cube = stage.DefinePrim("/Cube", "Cube") cube.GetAttribute("size").Set(2.0) UsdGeom.Xformable(cube).AddTranslateOp().Set((10.0, 1.0, 2)) utils.add_semantics(cube, "cube") camera = stage.DefinePrim("/Camera", "Camera") camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") UsdGeom.Xformable(camera).AddTranslateOp().Set((10.0, 0.0, 0.0)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=True ) # TODO: find the correct value of distorted result. # The spherical camera will distort the result. extents = Gf.Range3d([-1.0, 0, 1], [1.0, 2.0, 3]) corners = np.array([[extents.GetCorner(i) for i in range(8)]]) assert not np.allclose(bbox3d_data[0]["corners"], corners) async def test_camera_frame_simple(self): """ Test 3D bounding box in a simple scene. """ stage = omni.usd.get_context().get_stage() # TEST SIMPLE SCENE cube = stage.DefinePrim("/Cube", "Cube") cube.GetAttribute("size").Set(2.0) UsdGeom.Xformable(cube).AddTranslateOp().Set((10.0, 0.0, 10.0)) utils.add_semantics(cube, "cube") camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((10.0, 0.0, 0.0)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=True ) extents = Gf.Range3d([-1.0, -1.0, 9.0], [1.0, 1.0, 11.0]) corners = np.array([[extents.GetCorner(i) for i in range(8)]]) assert np.allclose(bbox3d_data[0]["corners"], corners) tf = np.eye(4) tf[3, 2] = 10.0 assert np.allclose(bbox3d_data[0]["transform"], tf) async def test_camera_frame_reference(self): """ Test 3D bounding box in a simple scene. """ ref_path = os.path.join(TMP, f"ref_stage{uuid.uuid1()}.usd") ref_stage = Usd.Stage.CreateNew(ref_path) world = ref_stage.DefinePrim("/World", "Xform") world_tf = utils.get_random_transform() UsdGeom.Xformable(world).AddTransformOp().Set(world_tf) cube = ref_stage.DefinePrim("/World/Cube", "Cube") cube.GetAttribute("size").Set(2.0) cube_tf = Gf.Matrix4d().SetTranslateOnly((10.0, 0.0, 10.0)) UsdGeom.Xformable(cube).AddTransformOp().Set(cube_tf) utils.add_semantics(cube, "cube") camera = ref_stage.DefinePrim("/World/Camera", "Camera") camera_tf = cube_tf UsdGeom.Xformable(camera).AddTransformOp().Set(camera_tf) ref_stage.Save() # omni.usd.get_context().new_stage() # await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() rig = stage.DefinePrim("/Rig", "Xform") rig_tf = utils.get_random_transform() UsdGeom.Xformable(rig).AddTransformOp().Set(rig_tf) ref = stage.DefinePrim("/Rig/Ref") ref.GetReferences().AddReference(ref_path, "/World") self.viewport.camera_path = "/Rig/Ref/Camera" # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data_world = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=False ) bbox3d_data_camera = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=True ) extents = Gf.Range3d([-1.0, -1.0, -1.0], [1.0, 1.0, 1.0]) corners = np.array([[extents.GetCorner(i) for i in range(8)]]) assert np.allclose(bbox3d_data_camera[0]["corners"], corners) combined_tf = np.matmul(cube_tf, np.matmul(world_tf, rig_tf)) corners_tf = np.matmul(np.pad(corners.reshape(-1, 3), ((0, 0), (0, 1)), constant_values=1), combined_tf) corners_tf = corners_tf[:, :3].reshape(-1, 8, 3) assert np.allclose(bbox3d_data_world[0]["corners"], corners_tf) # tf = np.eye(4) # tf[3, 2] = 10.0 assert np.allclose(bbox3d_data_world[0]["transform"], combined_tf) pt_camera_min = [bbox3d_data_camera[0][f"{a}_min"] for a in ["x", "y", "z"]] pt_camera_min = np.array([*pt_camera_min, 1.0]) pt_camera_max = [bbox3d_data_camera[0][f"{a}_max"] for a in ["x", "y", "z"]] pt_camera_max = np.array([*pt_camera_max, 1.0]) assert np.allclose(np.matmul(pt_camera_min, bbox3d_data_camera[0]["transform"])[:3], corners[0, 0]) assert np.allclose(np.matmul(pt_camera_max, bbox3d_data_camera[0]["transform"])[:3], corners[0, 7]) async def test_camera_frame_Y(self): # TEST NESTED TRANSFORMS, UP AXIS # Create 2 cubes (size=1) under a parent prim stage = omni.usd.get_context().get_stage() UsdGeom.SetStageUpAxis(stage, "Y") parent = stage.DefinePrim("/World/Parent", "Xform") child1 = stage.DefinePrim("/World/Parent/Child1", "Cube") child2 = stage.DefinePrim("/World/Parent/Child2", "Cube") camera = stage.DefinePrim("/World/Camera", "Camera") child1.GetAttribute("size").Set(1.0) child2.GetAttribute("size").Set(1.0) utils.add_semantics(parent, "parent") utils.add_semantics(child1, "child1") utils.add_semantics(child2, "child2") UsdGeom.Xformable(parent).ClearXformOpOrder() UsdGeom.Xformable(child1).ClearXformOpOrder() UsdGeom.Xformable(child2).ClearXformOpOrder() UsdGeom.Xformable(camera).ClearXformOpOrder() UsdGeom.Xformable(parent).AddRotateYOp().Set(45) UsdGeom.Xformable(child1).AddTranslateOp().Set((-0.5, 0.5, 0.0)) UsdGeom.Xformable(child1).AddRotateYOp().Set(45) UsdGeom.Xformable(child2).AddTranslateOp().Set((0.5, -0.5, 0.0)) # Move camera with random transform camera_tf = utils.get_random_transform() UsdGeom.Xformable(camera).AddTransformOp().Set(Gf.Matrix4d(camera_tf)) camera_tf_inv = np.linalg.inv(camera_tf) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=True ) parent_bbox = [row for row in bbox3d_data if row["name"] == parent.GetPath()][0] child1_bbox = [row for row in bbox3d_data if row["name"] == child1.GetPath()][0] child2_bbox = [row for row in bbox3d_data if row["name"] == child2.GetPath()][0] # Only takes into account child transforms a = math.cos(math.pi / 4) parent_bounds = [[-a - 0.5, -1.0, -a], [1.0, 1.0, a]] child1_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] child2_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] # Doesn't take into account transforms for bbox, bounds in zip([parent_bbox, child1_bbox, child2_bbox], [parent_bounds, child1_bounds, child2_bounds]): self.assertAlmostEqual(bbox["x_min"], bounds[0][0], places=5) self.assertAlmostEqual(bbox["y_min"], bounds[0][1], places=5) self.assertAlmostEqual(bbox["z_min"], bounds[0][2], places=5) self.assertAlmostEqual(bbox["x_max"], bounds[1][0], places=5) self.assertAlmostEqual(bbox["y_max"], bounds[1][1], places=5) self.assertAlmostEqual(bbox["z_max"], bounds[1][2], places=5) prim = stage.GetPrimAtPath(bbox["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(0.0)) gf_range = Gf.Range3f(*bounds) gf_corners = np.array([gf_range.GetCorner(i) for i in range(8)]) gf_corners = np.pad(gf_corners, ((0, 0), (0, 1)), constant_values=1.0) gf_corners = np.dot(gf_corners, tf) gf_corners = np.dot(gf_corners, camera_tf_inv)[:, :3] assert np.allclose(bbox["corners"], gf_corners, atol=1e-5) async def test_camera_frame_Z(self): # TEST NESTED TRANSFORMS, UP AXIS # Create 2 cubes (size=1) under a parent prim stage = omni.usd.get_context().get_stage() UsdGeom.SetStageUpAxis(stage, "Z") parent = stage.DefinePrim("/World/Parent", "Xform") child1 = stage.DefinePrim("/World/Parent/Child1", "Cube") child2 = stage.DefinePrim("/World/Parent/Child2", "Cube") camera = stage.DefinePrim("/World/Camera", "Camera") child1.GetAttribute("size").Set(1.0) child2.GetAttribute("size").Set(1.0) utils.add_semantics(parent, "parent") utils.add_semantics(child1, "child1") utils.add_semantics(child2, "child2") UsdGeom.Xformable(parent).ClearXformOpOrder() UsdGeom.Xformable(child1).ClearXformOpOrder() UsdGeom.Xformable(child2).ClearXformOpOrder() UsdGeom.Xformable(camera).ClearXformOpOrder() UsdGeom.Xformable(parent).AddRotateYOp().Set(45) UsdGeom.Xformable(child1).AddTranslateOp().Set((-0.5, 0.5, 0.0)) UsdGeom.Xformable(child1).AddRotateYOp().Set(45) UsdGeom.Xformable(child2).AddTranslateOp().Set((0.5, -0.5, 0.0)) # Move camera with random transform camera_tf = np.eye(4) camera_tf[:3, :3] = Gf.Matrix3d(Gf.Rotation(np.random.rand(3).tolist(), np.random.rand(3).tolist())) camera_tf[3, :3] = np.random.rand(1, 3) UsdGeom.Xformable(camera).AddTransformOp().Set(Gf.Matrix4d(camera_tf)) camera_tf_inv = np.linalg.inv(camera_tf) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d( self.viewport, parsed=True, return_corners=True, camera_frame=True ) parent_bbox = [row for row in bbox3d_data if row["name"] == parent.GetPath()][0] child1_bbox = [row for row in bbox3d_data if row["name"] == child1.GetPath()][0] child2_bbox = [row for row in bbox3d_data if row["name"] == child2.GetPath()][0] # Only takes into account child transforms a = math.cos(math.pi / 4) parent_bounds = [[-a - 0.5, -1.0, -a], [1.0, 1.0, a]] child1_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] child2_bounds = [[-0.5, -0.5, -0.5], [0.5, 0.5, 0.5]] # Doesn't take into account transforms for bbox, bounds in zip([parent_bbox, child1_bbox, child2_bbox], [parent_bounds, child1_bounds, child2_bounds]): self.assertAlmostEqual(bbox["x_min"], bounds[0][0], places=5) self.assertAlmostEqual(bbox["y_min"], bounds[0][1], places=5) self.assertAlmostEqual(bbox["z_min"], bounds[0][2], places=5) self.assertAlmostEqual(bbox["x_max"], bounds[1][0], places=5) self.assertAlmostEqual(bbox["y_max"], bounds[1][1], places=5) self.assertAlmostEqual(bbox["z_max"], bounds[1][2], places=5) prim = stage.GetPrimAtPath(bbox["name"]) tf = np.array(UsdGeom.Imageable(prim).ComputeLocalToWorldTransform(0.0)) gf_range = Gf.Range3f(*bounds) gf_corners = np.array([gf_range.GetCorner(i) for i in range(8)]) gf_corners = np.pad(gf_corners, ((0, 0), (0, 1)), constant_values=1.0) gf_corners = np.dot(gf_corners, tf) gf_corners = np.dot(gf_corners, camera_tf_inv)[:, :3] assert np.allclose(bbox["corners"], gf_corners, atol=1e-5) @unittest.skip("OM-46398") async def test_bbox_3d_scene_instance(self): """ Test sensor on scene instance. """ path = os.path.join(FILE_DIR, "../data/scenes/scene_instance_test.usda") await omni.usd.get_context().open_stage_async(path) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_bounding_box_3d_(self.viewport) # should be 3 prims in the scene # TODO: add more complicated test assert len(data) == 3 async def test_bbox_3d_skinned_mesh(self): """ Test sensor on skeletal mesh. Also test visibility toggling """ path = os.path.join(FILE_DIR, "../data/scenes/can.usda") await omni.usd.get_context().open_stage_async(path) can_stage = omni.usd.get_context().get_stage() can_prim = can_stage.GetPrimAtPath("/Root/group1/pCylinder1") can_bounds = [[-2.25, -2.00, -0.11], [5.14, 2.00, 9.80]] await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.BoundingBox3D) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True, camera_frame=True) # should be 1 prims in the scene assert len(bbox3d_data) == 1 self.assertAlmostEqual(bbox3d_data[0]["x_min"], can_bounds[0][0], places=2) self.assertAlmostEqual(bbox3d_data[0]["y_min"], can_bounds[0][1], places=2) self.assertAlmostEqual(bbox3d_data[0]["z_min"], can_bounds[0][2], places=2) self.assertAlmostEqual(bbox3d_data[0]["x_max"], can_bounds[1][0], places=2) self.assertAlmostEqual(bbox3d_data[0]["y_max"], can_bounds[1][1], places=2) self.assertAlmostEqual(bbox3d_data[0]["z_max"], can_bounds[1][2], places=2) UsdGeom.Imageable(can_prim).MakeInvisible() # hide the can await omni.kit.app.get_app().next_update_async() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True, camera_frame=True) # should be 0 prims in the scene once the visibility is toggled assert len(bbox3d_data) == 0 UsdGeom.Imageable(can_prim).MakeVisible() # make the can visible again await omni.kit.app.get_app().next_update_async() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox3d_data = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True, return_corners=True, camera_frame=True) # should be 1 prims in the scene now that the can is visible again assert len(bbox3d_data) == 1 # ensure that the 3D bbox extents are the same after visibility has been toggled self.assertAlmostEqual(bbox3d_data[0]["x_min"], can_bounds[0][0], places=2) self.assertAlmostEqual(bbox3d_data[0]["y_min"], can_bounds[0][1], places=2) self.assertAlmostEqual(bbox3d_data[0]["z_min"], can_bounds[0][2], places=2) self.assertAlmostEqual(bbox3d_data[0]["x_max"], can_bounds[1][0], places=2) self.assertAlmostEqual(bbox3d_data[0]["y_max"], can_bounds[1][1], places=2) self.assertAlmostEqual(bbox3d_data[0]["z_max"], can_bounds[1][2], places=2) # After running each test async def tearDown(self): pass
33,169
Python
46.116477
141
0.610902
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_depth.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestDepth(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.Depth) async def test_parsed_empty(self): """ Test depth sensor on empty stage. """ # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth(self.viewport) assert data.sum() == 0 async def test_parsed_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth(self.viewport) assert data.dtype == np.float32 async def test_distances(self): stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth(self.viewport) assert np.isclose(data.min(), 0, atol=1e-5) # The front of the cube is 1 ahead of its center position assert np.isclose(data.max(), 1 / (n - 1), atol=1e-5) async def test_distances_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ # Set the rendering mode to be pathtracing settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth(self.viewport) assert np.isclose(data.min(), 0, atol=1e-5) # The front of the cube is 1 ahead of its center position assert np.isclose(data.max(), 1 / (n - 1), atol=1e-5) async def test_distances_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be pathtracing settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth(self.viewport) assert np.isclose(data.min(), 0, atol=1e-5) # The front of the cube is 1 ahead of its center position assert np.isclose(data.max(), 1 / (n - 1), atol=1e-5) async def test_ftheta_camera(self): """ Test the functionality of the sensor under f-theta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() # Add a cube at the centre of the scene cube_prim = stage.DefinePrim("/Cube", "Cube") add_semantics(cube_prim, "cube") cube = UsdGeom.Cube(cube_prim) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_depth(self.viewport) await omni.kit.app.get_app().next_update_async() # Centre of the data should be half of the cube edge's length, adjusted to correct scale. edge_length = cube.GetSizeAttr().Get() assert np.isclose(1 / (edge_length - 1), data.max(), atol=1e-3) assert np.isclose(1 / (np.sqrt(((edge_length) ** 2)*2) - 1), data[data > 0].min(), atol=1e-1) # After running each test async def tearDown(self): pass
6,951
Python
38.954023
141
0.630557
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_semantic_seg.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time from pathlib import Path import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics import unittest FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestSemanticSeg(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() await syn.sensors.initialize_async( self.viewport, [ syn._syntheticdata.SensorType.SemanticSegmentation, syn._syntheticdata.SensorType.InstanceSegmentation ] ) async def test_empty(self): """ Test semantic segmentation on empty stage. """ data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) assert data.sum() == 0 async def test_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) assert data.dtype == np.uint32 async def test_cube(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) # np.savez_compressed(self.golden_image_path / 'semantic_seg_cube.npz', array=data) golden_image = np.load(self.golden_image_path / "semantic_seg_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 0.1 async def test_cube_sphere(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) sphere_prim = stage.DefinePrim("/Sphere", "Sphere") UsdGeom.XformCommonAPI(sphere_prim).SetTranslate((300, 0, 0)) add_semantics(sphere_prim, "sphere") sphere = UsdGeom.Sphere(sphere_prim) sphere.GetRadiusAttr().Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube.npz', array=data) assert len(data) != 0 async def test_cube_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) golden_image = np.load(self.golden_image_path / "semantic_seg_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 0.1 async def test_cube_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) golden_image = np.load(self.golden_image_path / "semantic_seg_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 0.1 async def test_cube_ftheta(self): """ Basic funtionality test of the sensor under f theta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await omni.kit.app.get_app().next_update_async() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((100, 100, 100)) self.viewport.camera_path = camera.GetPath() await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) # np.savez_compressed(self.golden_image_path / 'semantic_seg_cube_ftheta.npz', array=data) golden_image = np.load(self.golden_image_path / "semantic_seg_cube_ftheta.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 0.1 async def test_cube_spherical(self): """ Basic funtionality test of the sensor under spherical camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await omni.kit.app.get_app().next_update_async() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be spherical fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((100, 100, 100)) self.viewport.camera_path = camera.GetPath() await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) # np.savez_compressed(self.golden_image_path / 'semantic_seg_cube_spherical.npz', array=data) golden_image = np.load(self.golden_image_path / "semantic_seg_cube_spherical.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 0.1 @unittest.skip("OM-46393") async def test_geom_subset(self): """ Test sensor on GeomSubset. """ path = os.path.join(FILE_DIR, "../data/scenes/streetlamp_03_golden.usd") await omni.usd.get_context().open_stage_async(path) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) assert len(data) != 0 @unittest.skip("OM-46394") async def test_sem_seg_scene_instance(self): """ Test sensor on scene instance. """ path = os.path.join(FILE_DIR, "../data/scenes/scene_instance_test.usda") await omni.usd.get_context().open_stage_async(path) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_semantic_segmentation(self.viewport, parsed=True) # TODO add more complicated test assert len(data) != 0 # After running each test async def tearDown(self): pass
9,147
Python
40.022421
141
0.652892
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_bbox2d_tight.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import unittest import carb import numpy as np import omni.kit.test from pxr import Gf, UsdGeom from omni.kit.viewport.utility import get_active_viewport # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestBBox2DTight(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.BoundingBox2DTight ) async def test_parsed_empty(self): """ Test 2D bounding box on empty stage. """ bbox2d_data = syn.sensors.get_bounding_box_2d_tight(self.viewport) assert not bool(bbox2d_data) async def test_fields_exist(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_tight(self.viewport) valid_dtype = [ ("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O"), ("semanticId", "<u4"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ] assert bbox2d_data.dtype == np.dtype(valid_dtype) async def test_cube(self): """ Basic test for the sensor. """ stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_tight(self.viewport) assert bbox2d_data[0] x_min, y_min, x_max, y_max = bbox2d_data[0][6], bbox2d_data[0][7], bbox2d_data[0][8], bbox2d_data[0][9] assert x_min == 301 assert y_min == 21 assert x_max == 978 assert y_max == 698 @unittest.skip("OM-46398") async def test_bbox_2d_tight_scene_instance(self): """ Test sensor on scene instance. """ settings = carb.settings.get_settings() if settings.get("/rtx/hydra/enableSemanticSchema"): path = os.path.join(FILE_DIR, "../data/scenes/scene_instance_test.usda") await omni.usd.get_context().open_stage_async(path) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_bounding_box_2d_tight(self.viewport) # should be 3 prims in the scene. # TODO: Add more complicated test assert len(data) == 3 async def test_cube_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_tight(self.viewport) x_min, y_min, x_max, y_max = bbox2d_data[0][6], bbox2d_data[0][7], bbox2d_data[0][8], bbox2d_data[0][9] assert x_min == 301 assert y_min == 21 assert x_max == 978 assert y_max == 698 async def test_cube_ray_traced_lighting(self): """ Basic test for the sensor, but in ray traced lighting mode. """ # Set the rendering mode to be ray traced lighting. settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_tight(self.viewport) x_min, y_min, x_max, y_max = bbox2d_data[0][6], bbox2d_data[0][7], bbox2d_data[0][8], bbox2d_data[0][9] assert x_min == 301 assert y_min == 21 assert x_max == 978 assert y_max == 698 # After running each test async def tearDown(self): pass
6,427
Python
34.711111
141
0.616617
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_distance_to_camera.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os from time import time from pathlib import Path import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestDistanceToCamera(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.DistanceToCamera) async def test_parsed_empty(self): """ Test distance-to-camera sensor on empty stage. """ # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) assert np.all(data > 1000) async def test_parsed_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) assert data.dtype == np.float32 async def test_distances(self): stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position # TODO get a more precise calculation of eye distance assert np.isclose(data.min(), (n - 1) / 100, atol=1e-1) async def test_distances_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ # Set the rendering mode to be pathtracing settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position # TODO get a more precise calculation of eye distance assert np.isclose(data.min(), (n - 1) / 100, atol=1e-1) async def test_distances_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be pathtracing settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() for n in range(10, 100, 10): cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # n = 5 UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -n)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) assert data.max() > 1000 # The front of the cube is 1 ahead of its center position # TODO get a more precise calculation of eye distance assert np.isclose(data.min(), (n - 1) / 100, atol=1e-1) async def test_ftheta_camera(self): """ Test the functionality of the sensor under f-theta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() # Add a cube at the centre of the scene cube_prim = stage.DefinePrim("/Cube", "Cube") add_semantics(cube_prim, "cube") cube = UsdGeom.Cube(cube_prim) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) await omni.kit.app.get_app().next_update_async() # Centre of the data should be half of the cube edge's length, adjusted to correct scale. edge_length = (cube.GetSizeAttr().Get() - 1) / 100 # The max should be sqrt(((edge_length / 2) ** 2) * 2), which a pinhole camera won't see. assert np.isclose(np.sqrt(((edge_length / 2) ** 2)*2), data[data != np.inf].max(), atol=1e-3) async def test_spherical_camera(self): """ Test the functionality of the sensor under fisheye spherical camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be spherical camera camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") # Set the Camera at the centre of the stage. UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() sphere_prim = stage.DefinePrim("/Sphere", "Sphere") add_semantics(sphere_prim, "sphere") sphere = UsdGeom.Sphere(sphere_prim) sphere.GetRadiusAttr().Set(20) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_distance_to_camera(self.viewport) # np.savez_compressed(self.golden_image_path / 'distance_to_camera_spherical.npz', array=data) golden_image = np.load(self.golden_image_path / "distance_to_camera_spherical.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 # After running each test async def tearDown(self): pass
8,890
Python
40.353488
141
0.637233
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_normals.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time from pathlib import Path import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestNormals(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.Normal) async def test_parsed_empty(self): """ Test normals sensor on empty stage. """ # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) assert np.allclose(data, 0, 1e-3) async def test_parsed_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) assert data.dtype == np.float32 async def test_neg_z(self): """ Test that negative z faces are distinct from background """ stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddRotateYOp().Set(180) UsdGeom.Xformable(camera).AddTranslateOp().Set((0.0, 0.0, 20.0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) assert len(np.unique(data)) == 2 async def test_rotated_cube(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) # np.savez_compressed(self.golden_image_path / 'normals_cube.npz', array=data) golden_image = np.load(self.golden_image_path / "normals_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_rotated_cube_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) # np.savez_compressed(self.golden_image_path / 'normals_cube.npz', array=data) golden_image = np.load(self.golden_image_path / "normals_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_rotated_cube_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be ray traced lighting. settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) # np.savez_compressed(self.golden_image_path / 'normals_cube.npz', array=data) golden_image = np.load(self.golden_image_path / "normals_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_rotated_cube_ftheta(self): """ Basic funtionality test of the sensor in f theta camera. """ # Set the mode to path traced for f theta camera. settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await omni.kit.app.get_app().next_update_async() # Setting up camera. camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((200, 200, 200)) self.viewport.camera_path = camera.GetPath() await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) # np.savez_compressed(self.golden_image_path / 'normals_cube_ftheta.npz', array=data) golden_image = np.load(self.golden_image_path / "normals_cube_ftheta.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_rotated_cube_spherical(self): """ Basic funtionality test of the sensor in fisheye spherical camera. """ # Set the mode to path traced. settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) # Setting up camera. camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((200, 200, 200)) self.viewport.camera_path = camera.GetPath() await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_normals(self.viewport) # np.savez_compressed(self.golden_image_path / 'normals_cube_spherical.npz', array=data) golden_image = np.load(self.golden_image_path / "normals_cube_spherical.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 # After running each test async def tearDown(self): pass
8,401
Python
40.800995
141
0.653256
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_rgb.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time from pathlib import Path import unittest from PIL import Image import carb import numpy as np from numpy.lib.arraysetops import unique import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf, UsdLux # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestRGB(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" # Before running each test async def setUp(self): np.random.seed(1234) settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async(self.viewport, syn._syntheticdata.SensorType.Rgb) async def test_empty(self): """ Test RGB sensor on empty stage. """ # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_rgb(self.viewport) std_dev = np.sqrt(np.square(data - np.zeros_like(data)).astype(float).mean()) assert std_dev < 2 async def test_cube(self): """ Test RGB sensor on stage with cube. """ stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) cube.GetAttribute("primvars:displayColor").Set([(0, 0, 1)]) await omni.kit.app.get_app().next_update_async() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_rgb(self.viewport) golden_image = np.asarray(Image.open(str(self.golden_image_path / "rgb_cube.png"))) std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_rgb(self.viewport) assert data.dtype == np.uint8 @unittest.skip("OM-44741") async def test_cube_polynomial(self): """ Test RGB sensor on stage with cube. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) cube.GetAttribute("primvars:displayColor").Set([(0, 0, 1)]) await omni.kit.app.get_app().next_update_async() # TODO: Add a light camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be spherical camera camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 200)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_rgb(self.viewport) # image = Image.fromarray(data) # image.save(str(self.golden_image_path / "rgb_cube_ftheta.png")) golden_image = np.asarray(Image.open(str(self.golden_image_path / "rgb_cube_ftheta.png"))) std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 @unittest.skip("OM-44741") async def test_cube_spherical(self): """ Test RGB sensor on stage with cube. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) cube.GetAttribute("primvars:displayColor").Set([(0, 0, 1)]) await omni.kit.app.get_app().next_update_async() # TODO: Add a light camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be spherical camera camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 200)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_rgb(self.viewport) # image = Image.fromarray(data) # image.save(str(self.golden_image_path / "rgb_cube_spherical.png")) golden_image = np.asarray(Image.open(str(self.golden_image_path / "rgb_cube_spherical.png"))) std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 # After running each test async def tearDown(self): pass
6,372
Python
38.098159
141
0.650345
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_rendervar_buff_host_ptr.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import unittest import numpy as np import ctypes import omni.kit.test from omni.gpu_foundation_factory import TextureFormat from omni.kit.viewport.utility import get_active_viewport from pxr import UsdGeom, UsdLux # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics # Test the SyntheticData following nodes : # - SdPostRenderVarTextureToBuffer : node to convert a texture device rendervar into a buffer device rendervar # - SdPostRenderVarToHost : node to readback a device rendervar into a host rendervar # - SdRenderVarPtr : node to expose in the action graph, raw device / host pointers on the renderVars # # the tests consists in pulling the ptr data and comparing it with the data ouputed by : # - SdRenderVarToRawArray # class TestRenderVarBuffHostPtr(omni.kit.test.AsyncTestCase): _tolerance = 1.1 _outputs_ptr = ["outputs:dataPtr","outputs:width","outputs:height","outputs:bufferSize","outputs:format", "outputs:strides"] _outputs_arr = ["outputs:data","outputs:width","outputs:height","outputs:bufferSize","outputs:format"] @staticmethod def _texture_element_size(texture_format): if texture_format == int(TextureFormat.RGBA16_SFLOAT): return 8 elif texture_format == int(TextureFormat.RGBA32_SFLOAT): return 16 elif texture_format == int(TextureFormat.R32_SFLOAT): return 4 elif texture_format == int(TextureFormat.RGBA8_UNORM): return 4 elif texture_format == int(TextureFormat.R32_UINT): return 4 else: return 0 @staticmethod def _assert_equal_tex_infos(out_a, out_b): assert((out_a["outputs:width"] == out_b["outputs:width"]) and (out_a["outputs:height"] == out_b["outputs:height"]) and (out_a["outputs:format"] == out_b["outputs:format"])) @staticmethod def _assert_equal_buff_infos(out_a, out_b): assert((out_a["outputs:bufferSize"] == out_b["outputs:bufferSize"])) @staticmethod def _assert_equal_data(data_a, data_b): assert(np.amax(np.square(data_a - data_b)) < TestRenderVarBuffHostPtr._tolerance) def _get_raw_array(self, render_var): ptr_outputs = syn.SyntheticData.Get().get_node_attributes(render_var + "ExportRawArray", TestRenderVarBuffHostPtr._outputs_arr, self.render_product) is_texture = ptr_outputs["outputs:width"] > 0 if is_texture: elem_size = TestRenderVarBuffHostPtr._texture_element_size(ptr_outputs["outputs:format"]) arr_shape = (ptr_outputs["outputs:height"], ptr_outputs["outputs:width"], elem_size) ptr_outputs["outputs:data"] = ptr_outputs["outputs:data"].reshape(arr_shape) return ptr_outputs def _get_ptr_array(self, render_var, ptr_suffix): ptr_outputs = syn.SyntheticData.Get().get_node_attributes(render_var + ptr_suffix, TestRenderVarBuffHostPtr._outputs_ptr, self.render_product) c_ptr = ctypes.cast(ptr_outputs["outputs:dataPtr"],ctypes.POINTER(ctypes.c_ubyte)) is_texture = ptr_outputs["outputs:width"] > 0 if is_texture: elem_size = TestRenderVarBuffHostPtr._texture_element_size(ptr_outputs["outputs:format"]) arr_shape = (ptr_outputs["outputs:height"], ptr_outputs["outputs:width"], elem_size) arr_strides = ptr_outputs["outputs:strides"] buffer_size = arr_strides[1] * arr_shape[1] arr_strides = (arr_strides[1], arr_strides[0], 1) data_ptr = np.ctypeslib.as_array(c_ptr,shape=(buffer_size,)) data_ptr = np.lib.stride_tricks.as_strided(data_ptr, shape=arr_shape, strides=arr_strides) else: data_ptr = np.ctypeslib.as_array(c_ptr,shape=(ptr_outputs["outputs:bufferSize"],)) ptr_outputs["outputs:dataPtr"] = data_ptr return ptr_outputs def _assert_equal_rv_ptr(self, render_var:str, ptr_suffix:str, texture=None): arr_out = self._get_raw_array(render_var) ptr_out = self._get_ptr_array(render_var,ptr_suffix) if not texture is None: if texture: TestRenderVarBuffHostPtr._assert_equal_tex_infos(arr_out,ptr_out) else: TestRenderVarBuffHostPtr._assert_equal_buff_infos(arr_out,ptr_out) TestRenderVarBuffHostPtr._assert_equal_data(arr_out["outputs:data"],ptr_out["outputs:dataPtr"]) def _assert_equal_rv_ptr_size(self, render_var:str, ptr_suffix:str, arr_size:int): ptr_out = self._get_ptr_array(render_var,ptr_suffix) data_ptr = ptr_out["outputs:dataPtr"] # helper for setting the value : print the size if None if arr_size is None: print(f"EqualRVPtrSize : {render_var} = {data_ptr.size}") else: assert(data_ptr.size==arr_size) def _assert_equal_rv_arr(self, render_var:str, ptr_suffix:str, texture=None): arr_out_a = self._get_raw_array(render_var) arr_out_b = self._get_raw_array(render_var+ptr_suffix) if not texture is None: if texture: TestRenderVarBuffHostPtr._assert_equal_tex_infos(arr_out_a,arr_out_b) else: TestRenderVarBuffHostPtr._assert_equal_buff_infos(arr_out_a,arr_out_b) TestRenderVarBuffHostPtr._assert_equal_data( arr_out_a["outputs:data"].flatten(),arr_out_b["outputs:data"].flatten()) def _assert_executed_rv_ptr(self, render_var:str, ptr_suffix:str): ptr_outputs = syn.SyntheticData.Get().get_node_attributes(render_var + ptr_suffix, ["outputs:exec"], self.render_product) assert(ptr_outputs["outputs:exec"]>0) def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() world_prim = UsdGeom.Xform.Define(stage,"/World") UsdGeom.Xformable(world_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(world_prim).AddRotateXYZOp().Set((0, 0, 0)) sphere_prim = stage.DefinePrim("/World/Sphere", "Sphere") add_semantics(sphere_prim, "sphere") UsdGeom.Xformable(sphere_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(sphere_prim).AddScaleOp().Set((77, 77, 77)) UsdGeom.Xformable(sphere_prim).AddRotateXYZOp().Set((-90, 0, 0)) sphere_prim.GetAttribute("primvars:displayColor").Set([(1, 0.3, 1)]) capsule0_prim = stage.DefinePrim("/World/Sphere/Capsule0", "Capsule") add_semantics(capsule0_prim, "capsule0") UsdGeom.Xformable(capsule0_prim).AddTranslateOp().Set((3, 0, 0)) UsdGeom.Xformable(capsule0_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule0_prim.GetAttribute("primvars:displayColor").Set([(0.3, 1, 0)]) capsule1_prim = stage.DefinePrim("/World/Sphere/Capsule1", "Capsule") add_semantics(capsule1_prim, "capsule1") UsdGeom.Xformable(capsule1_prim).AddTranslateOp().Set((-3, 0, 0)) UsdGeom.Xformable(capsule1_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule1_prim.GetAttribute("primvars:displayColor").Set([(0, 1, 0.3)]) capsule2_prim = stage.DefinePrim("/World/Sphere/Capsule2", "Capsule") add_semantics(capsule2_prim, "capsule2") UsdGeom.Xformable(capsule2_prim).AddTranslateOp().Set((0, 3, 0)) UsdGeom.Xformable(capsule2_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule2_prim.GetAttribute("primvars:displayColor").Set([(0.7, 0.1, 0.4)]) capsule3_prim = stage.DefinePrim("/World/Sphere/Capsule3", "Capsule") add_semantics(capsule3_prim, "capsule3") UsdGeom.Xformable(capsule3_prim).AddTranslateOp().Set((0, -3, 0)) UsdGeom.Xformable(capsule3_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule3_prim.GetAttribute("primvars:displayColor").Set([(0.1, 0.7, 0.4)]) spherelight = UsdLux.SphereLight.Define(stage, "/SphereLight") spherelight.GetIntensityAttr().Set(30000) spherelight.GetRadiusAttr().Set(30) self.viewport = get_active_viewport() self.render_product = self.viewport.render_product_path await omni.kit.app.get_app().next_update_async() async def test_host_arr(self): render_vars = [ "BoundingBox2DLooseSD", "SemanticLocalTransformSD" ] for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "ExportRawArray", 0, [self.render_product]) syn.SyntheticData.Get().activate_node_template(render_var + "hostExportRawArray", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var in render_vars: self._assert_equal_rv_arr(render_var,"host", False) async def test_host_ptr_size(self): render_vars = { "BoundingBox3DSD" : 576, "BoundingBox2DLooseSD" : 144, "SemanticLocalTransformSD" : 320, "Camera3dPositionSD" : 14745600, "SemanticMapSD" : 10, "InstanceSegmentationSD" : 3686400, "SemanticBoundingBox3DCamExtentSD" : 120, "SemanticBoundingBox3DFilterInfosSD" : 24 } for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "hostPtr", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var, arr_size in render_vars.items(): self._assert_equal_rv_ptr_size(render_var,"hostPtr", arr_size) async def test_buff_arr(self): render_vars = [ "Camera3dPositionSD", "DistanceToImagePlaneSD", ] for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "ExportRawArray", 0, [self.render_product]) syn.SyntheticData.Get().activate_node_template(render_var + "buffExportRawArray", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var in render_vars: self._assert_equal_rv_arr(render_var, "buff") async def test_host_ptr(self): render_vars = [ "BoundingBox2DTightSD", "BoundingBox3DSD", "InstanceMapSD" ] for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "ExportRawArray", 0, [self.render_product]) syn.SyntheticData.Get().activate_node_template(render_var + "hostPtr", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var in render_vars: self._assert_equal_rv_ptr(render_var,"hostPtr",False) self._assert_executed_rv_ptr(render_var,"hostPtr") async def test_host_ptr_tex(self): render_vars = [ "NormalSD", "DistanceToCameraSD" ] for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "ExportRawArray", 0, [self.render_product]) syn.SyntheticData.Get().activate_node_template(render_var + "hostPtr", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var in render_vars: self._assert_equal_rv_ptr(render_var,"hostPtr",True) async def test_buff_host_ptr(self): render_vars = [ "LdrColorSD", "InstanceSegmentationSD", ] for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "ExportRawArray", 0, [self.render_product]) syn.SyntheticData.Get().activate_node_template(render_var + "buffhostPtr", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var in render_vars: self._assert_equal_rv_ptr(render_var, "buffhostPtr",True) async def test_empty_semantic_host_ptr(self): await omni.usd.get_context().new_stage_async() self.viewport = get_active_viewport() self.render_product = self.viewport.render_product_path await omni.kit.app.get_app().next_update_async() render_vars = [ "BoundingBox2DTightSD", "BoundingBox3DSD", "InstanceMapSD" ] for render_var in render_vars: syn.SyntheticData.Get().activate_node_template(render_var + "hostPtr", 0, [self.render_product]) await syn.sensors.next_render_simulation_async(self.render_product, 1) for render_var in render_vars: self._assert_executed_rv_ptr(render_var,"hostPtr") # After running each test async def tearDown(self): pass
13,257
Python
48.103704
156
0.646225
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_bbox2d_loose.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import unittest import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestBBox2DLoose(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.BoundingBox2DLoose ) async def test_parsed_empty(self): """ Test 2D bounding box on empty stage. """ bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) assert not bool(bbox2d_data) async def test_bbox_2d_loose_fields_exist(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) valid_dtype = [ ("uniqueId", "<i4"), ("name", "O"), ("semanticLabel", "O"), ("metadata", "O"), ("instanceIds", "O"), ("semanticId", "<u4"), ("x_min", "<i4"), ("y_min", "<i4"), ("x_max", "<i4"), ("y_max", "<i4"), ] assert bbox2d_data.dtype == np.dtype(valid_dtype) async def test_bbox_2d_loose_cube(self): """ Basic test for the sensor. """ stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) assert bbox2d_data['x_min'] == 301 assert bbox2d_data['y_min'] == 21 assert bbox2d_data['x_max'] == 978 assert bbox2d_data['y_max'] == 698 async def test_cube_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) assert bbox2d_data['x_min'] == 301 assert bbox2d_data['y_min'] == 21 assert bbox2d_data['x_max'] == 978 assert bbox2d_data['y_max'] == 698 async def test_cube_ray_traced_lighting(self): """ Basic test for the sensor, but in ray traced lighting mode. """ # Set the rendering mode to be ray traced lighting. settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) assert bbox2d_data['x_min'] == 301 assert bbox2d_data['y_min'] == 21 assert bbox2d_data['x_max'] == 978 assert bbox2d_data['y_max'] == 698 async def test_cube_ftheta(self): """ Basic funtionality test of the sensor in ftheta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) assert bbox2d_data['x_min'] == 612 assert bbox2d_data['y_min'] == 325 assert bbox2d_data['x_max'] == 671 assert bbox2d_data['y_max'] == 384 async def test_cube_spherical(self): """ Basic funtionality test of the sensor in fisheye spherical camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") UsdGeom.Xformable(camera).AddTranslateOp().Set((0, 0, 0)) self.viewport.camera_path = camera.GetPath() await omni.kit.app.get_app().next_update_async() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") UsdGeom.XformCommonAPI(cube).SetTranslate((0, 0, -10)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) bbox2d_data = syn.sensors.get_bounding_box_2d_loose(self.viewport) assert bbox2d_data['x_min'] == 617 assert bbox2d_data['y_min'] == 335 assert bbox2d_data['x_max'] == 662 assert bbox2d_data['y_max'] == 384 # After running each test async def tearDown(self): pass
8,176
Python
36.337899
141
0.627691
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/sensors/test_instance_seg.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time from pathlib import Path import unittest import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Gf, UsdGeom, Sdf # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 200 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestInstanceSeg(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.golden_image_path = Path(os.path.dirname(os.path.abspath(__file__))) / ".." / "data" / "golden" # Before running each test async def setUp(self): settings = carb.settings.get_settings() np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.InstanceSegmentation ) # TODO # async def test_parsed_empty(self): # """ Test instance segmentation on empty stage. # """ # data = syn.sensors.get_instance_segmentation(self.viewport, parsed=True) # assert data.sum() == 0 async def test_parsed_dtype(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") await omni.kit.app.get_app().next_update_async() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport, parsed=True) assert data.dtype == np.uint32 async def test_cube(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport, return_mapping=False) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube.npz', array=data) golden_image = np.load(self.golden_image_path / "instance_seg_cube.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_cube_sphere(self): stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) sphere_prim = stage.DefinePrim("/Sphere", "Sphere") UsdGeom.XformCommonAPI(sphere_prim).SetTranslate((300, 0, 0)) add_semantics(sphere_prim, "sphere") sphere = UsdGeom.Sphere(sphere_prim) sphere.GetRadiusAttr().Set(100) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube_sphere.npz', array=data) golden_image = np.load(self.golden_image_path / "instance_seg_cube_sphere.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_cube_pathtracing(self): """ Basic funtionality test of the sensor, but in path tracing mode. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube_pathtracing.npz', array=data) golden_image = np.load(self.golden_image_path / "instance_seg_cube_pathtracing.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_cube_ray_traced_lighting(self): """ Basic funtionality test of the sensor, but in ray traced lighting. """ # Set the rendering mode to be ray traced lighting. settings_interface = carb.settings.get_settings() settings_interface.set_string("/rtx/rendermode", "RayTracedLighting") stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube_ray_traced_lighting.npz', array=data) golden_image = np.load(self.golden_image_path / "instance_seg_cube_ray_traced_lighting.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_cube_ftheta(self): """ Basic funtionality test of the sensor under ftheta camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await omni.kit.app.get_app().next_update_async() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be polynomial fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyePolynomial") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((100, 100, 100)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube_ftheta.npz', array=data) golden_image = np.load(self.golden_image_path / "instance_seg_cube_ftheta.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 async def test_cube_spherical(self): """ Basic funtionality test of the sensor under fisheye spherical camera. """ settings = carb.settings.get_settings() settings.set_string("/rtx/rendermode", "PathTracing") settings.set_int("/rtx/pathtracing/spp", 32) settings.set_int("/persistent/app/viewport/displayOptions", 0) stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) await omni.kit.app.get_app().next_update_async() camera = stage.DefinePrim("/Camera", "Camera") # Set the camera to be spherical fish eye camera. camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set("fisheyeSpherical") # Set the Camera's position UsdGeom.Xformable(camera).AddTranslateOp().Set((100, 100, 100)) self.viewport.camera_path = camera.GetPath() # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) # np.savez_compressed(self.golden_image_path / 'instance_seg_cube_spherical.npz', array=data) golden_image = np.load(self.golden_image_path / "instance_seg_cube_spherical.npz")["array"] std_dev = np.sqrt(np.square(data - golden_image).astype(float).mean()) assert std_dev < 2 @unittest.skip("OM-46393") async def test_geom_subset(self): """ Test sensor on GeomSubset. """ path = os.path.join(FILE_DIR, "../data/scenes/streetlamp_03_golden.usd") await omni.usd.get_context().open_stage_async(path) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) assert len(data) != 0 async def test_instance_seg_scene_instance(self): """ Test sensor on scene instance. """ settings = carb.settings.get_settings() path = os.path.join(FILE_DIR, "../data/scenes/scene_instance_test.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.InstanceSegmentation ) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport) assert len(data) != 0 async def test_instance_seg_scene_instance_benchchair(self): """ Test sensor on scene instanced bench and chair data. """ settings = carb.settings.get_settings() path = os.path.join(FILE_DIR, "../data/scenes/BenchChair_SceneInstance_Mini.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.InstanceSegmentation ) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport,parsed=True) assert len(data) != 0 # should be 4 semantic objects in the scene. assert data.max() == 4 async def test_instance_seg_point_instance_benchchair(self): """ Test sensor on point instanced bench and chair data. """ settings = carb.settings.get_settings() path = os.path.join(FILE_DIR, "../data/scenes/BenchChair_Mini.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.InstanceSegmentation ) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport,parsed=True) assert len(data) != 0 assert data.max() == 2 async def test_instance_seg_point_instance_shapes(self): """ Test sensor on point instanced shapes that have semantics on the mesh. """ settings = carb.settings.get_settings() path = os.path.join(FILE_DIR, "../data/scenes/point_instancer_semantic_shapes.usda") await omni.usd.get_context().open_stage_async(path) await omni.kit.app.get_app().next_update_async() await syn.sensors.create_or_retrieve_sensor_async( self.viewport, syn._syntheticdata.SensorType.InstanceSegmentation ) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) data = syn.sensors.get_instance_segmentation(self.viewport,parsed=True) assert len(data) != 0 assert data.max() == 2 # After running each test async def tearDown(self): settings = carb.settings.get_settings() pass
12,645
Python
40.598684
141
0.652669
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/helpers/test_projection.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Sdf, UsdGeom, Vt # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestProjection(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): await omni.usd.get_context().new_stage_async() # Setup viewport self.viewport = get_active_viewport() self.stage = omni.usd.get_context().get_stage() prim = self.stage.DefinePrim("/World", "Xform") self.stage.SetDefaultPrim(prim) cube = self.stage.DefinePrim("/World/Cube", "Cube") add_semantics(cube, "cube") usd_camera = UsdGeom.Camera.Define(self.stage, "/World/Camera") usd_camera.AddTranslateOp() self.camera = usd_camera.GetPrim() self.camera.CreateAttribute("cameraProjectionType", Sdf.ValueTypeNames.Token).Set(Vt.Token("pinhole")) self.camera.CreateAttribute("fthetaWidth", Sdf.ValueTypeNames.Float).Set(960) self.camera.CreateAttribute("fthetaHeight", Sdf.ValueTypeNames.Float).Set(604) self.camera.CreateAttribute("fthetaCx", Sdf.ValueTypeNames.Float).Set(460) self.camera.CreateAttribute("fthetaCy", Sdf.ValueTypeNames.Float).Set(340) self.camera.CreateAttribute("fthetaMaxFov", Sdf.ValueTypeNames.Float).Set(200.0) self.camera.CreateAttribute("fthetaPolyA", Sdf.ValueTypeNames.Float).Set(0.0) self.camera.CreateAttribute("fthetaPolyB", Sdf.ValueTypeNames.Float).Set(0.0059) self.camera.CreateAttribute("fthetaPolyC", Sdf.ValueTypeNames.Float).Set(0.0) self.camera.CreateAttribute("fthetaPolyD", Sdf.ValueTypeNames.Float).Set(0.0) self.camera.CreateAttribute("fthetaPolyE", Sdf.ValueTypeNames.Float).Set(0.0) self.viewport.camera_path = self.camera.GetPath() syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.BoundingBox3D]) await syn.sensors.next_sensor_data_async(self.viewport, True) async def test_fisheye_polynomial_max_fov(self): """ Test that fisheye polynomial projection can safely project outside of max FOV world space points """ self.camera.GetAttribute("cameraProjectionType").Set(Vt.Token("fisheyePolynomial")) self.camera.GetAttribute("xformOp:translate").Set((0.0, 0.0, 0.0)) self.camera.GetAttribute("fthetaMaxFov").Set(120) self.camera.GetAttribute("fthetaPolyB").Set(0.0005) # introduce a max in the polynomial around r = 4082.5 which has theta = 1.360827 rads (~80 deg). self.camera.GetAttribute("fthetaPolyD").Set(-1E-11) # A correct fish eye camera projection will have monotonically increasing r as theta increases. points = [] stationary_angle = 1.360827 theta_spacing = (2.0*math.pi)/ 90.0 # 4 deg spacing num_adjacent_points = 8 # test monotonical behaviour for 40 degrees on each side of max point start_angle = stationary_angle - num_adjacent_points * theta_spacing for i in range(0, 2*num_adjacent_points): theta = start_angle + i * theta_spacing # place points in the x-z plane x = math.sin(theta) z = -math.cos(theta) # camera looks down z-axis in negative direction points.append([x, 0, z]) points = np.asarray(points) projected = syn.helpers.world_to_image(points, self.viewport) r = np.linalg.norm(projected, axis=1) monotonic = np.all(r[1:] > r[:-1]) # check each element is greater than the element before it assert monotonic async def test_pinhole(self): """ Test pinhole projection """ self.camera.GetAttribute("xformOp:translate").Set((0.0, 0.0, 9.0)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) # Get 3D bbox bbox3d = syn.sensors.get_bounding_box_3d(self.viewport, return_corners=True, parsed=True) # Project corners corners = bbox3d["corners"] projected = syn.helpers.world_to_image(corners.reshape(-1, 3), self.viewport).reshape(-1, 8, 3) # GT # Confirmed visually to be correct GT = [ [ [0.26139346, 0.9241894, 0.9000009], [0.73860654, 0.9241894, 0.9000009], [0.26139346, 0.0758106, 0.9000009], [0.73860654, 0.0758106, 0.9000009], [0.20174183, 1.03023675, 0.87500088], [0.79825817, 1.03023675, 0.87500088], [0.20174183, -0.03023675, 0.87500088], [0.79825817, -0.03023675, 0.87500088], ] ] # Validate assert np.allclose(GT, projected) async def test_fisheye_polynomial(self): """ Test fisheye polynomial projection (F-Theta) """ self.camera.GetAttribute("xformOp:translate").Set((0.0, 0.0, 3.0)) self.camera.GetAttribute("cameraProjectionType").Set(Vt.Token("fisheyePolynomial")) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport, True) # Get 3D bbox bbox3d = syn.sensors.get_bounding_box_3d(self.viewport, return_corners=True, parsed=True) # Project corners corners = bbox3d["corners"] projected = syn.helpers.world_to_image(corners.reshape(-1, 3), self.viewport).reshape(-1, 8, 3) # GT # Confirmed visually to be correct GT = [ [ [0.43674065, 0.6457944, 0.0], [0.52159268, 0.6457944, 0.0], [0.43674065, 0.49494634, 0.0], [0.52159268, 0.49494634, 0.0], [0.40232877, 0.70697108, 0.0], [0.55600456, 0.70697108, 0.0], [0.40232877, 0.43376967, 0.0], [0.55600456, 0.43376967, 0.0], ] ] # Validate assert np.allclose(GT, projected) # Run the operation in reverse view_params = syn.helpers.get_view_params(self.viewport) proj_i2w = projected[0, :, :2] proj_i2w[..., 0] *= view_params["width"] proj_i2w[..., 1] *= view_params["height"] origin, directions = syn.helpers.image_to_world(proj_i2w, view_params) gt_corner_directions = corners[0] - origin gt_corner_directions /= np.linalg.norm(gt_corner_directions, axis=1, keepdims=True) assert np.allclose(gt_corner_directions, directions) # FOR VISUAL DEBUGGING self.camera.GetAttribute("clippingRange").Set((0.1, 1000000)) for i, d in enumerate(directions): s = self.stage.DefinePrim(f"/World/pt{i}", "Sphere") UsdGeom.Xformable(s).AddTranslateOp().Set(tuple((d + origin).tolist())) s.GetAttribute("radius").Set(0.03) async def test_fisheye_polynomial_edge(self): """ Test fisheye polynomial projection (F-Theta) at edge of FOV """ self.camera.GetAttribute("xformOp:translate").Set((4.0, 0.0, 0.5)) self.camera.GetAttribute("cameraProjectionType").Set(Vt.Token("fisheyePolynomial")) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport, True) # Get 3D bbox bbox3d = syn.sensors.get_bounding_box_3d(self.viewport, return_corners=True, parsed=True) # Project corners corners = bbox3d["corners"] projected = syn.helpers.world_to_image(corners.reshape(-1, 3), self.viewport).reshape(-1, 8, 3) # GT # Confirmed visually to be correct GT = [ [ [0.25675408, 0.6494504, 0.0], [0.2902532, 0.68231909, 0.0], [0.25675408, 0.49129034, 0.0], [0.2902532, 0.45842165, 0.0], [0.19030016, 0.67307846, 0.0], [0.18980286, 0.74184522, 0.0], [0.19030016, 0.46766228, 0.0], [0.18980286, 0.39889552, 0.0], ] ] # Validate assert np.allclose(GT, projected) # Run the operation in reverse view_params = syn.helpers.get_view_params(self.viewport) proj_i2w = projected[0, :, :2] proj_i2w[..., 0] *= view_params["width"] proj_i2w[..., 1] *= view_params["height"] origin, directions = syn.helpers.image_to_world(proj_i2w, view_params) gt_corner_directions = corners[0] - origin gt_corner_directions /= np.linalg.norm(gt_corner_directions, axis=1, keepdims=True) assert np.allclose(gt_corner_directions, directions) # FOR VISUAL DEBUGGING self.camera.GetAttribute("clippingRange").Set((0.1, 1000000)) for i, d in enumerate(directions): s = self.stage.DefinePrim(f"/World/pt{i}", "Sphere") UsdGeom.Xformable(s).AddTranslateOp().Set(tuple((d + origin).tolist())) # After running each test async def tearDown(self): pass
9,584
Python
41.790178
141
0.623748
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/helpers/test_instance_mapping.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import UsdPhysics # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestHelpersInstanceMappings(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): # Setup viewport self.viewport = get_active_viewport() await omni.usd.get_context().new_stage_async() self.stage = omni.usd.get_context().get_stage() prim = self.stage.DefinePrim("/World", "Xform") self.stage.SetDefaultPrim(prim) async def test_non_semantic_schemas(self): """ Test mixture of applied schemas including non-semantics. """ prim = self.stage.DefinePrim("/World/Cone", "Cone") # Add semantics schema add_semantics(prim, "Je ne suis pas un cone.") # Non-semantics schema UsdPhysics.RigidBodyAPI.Apply(prim) await syn.sensors.next_sensor_data_async(self.viewport,True) # Get instance mappings instance_mappings = syn.helpers.get_instance_mappings() # Validate cone_im = instance_mappings[0] assert cone_im["uniqueId"] == 1 assert cone_im["name"] == "/World/Cone" assert cone_im["semanticId"] == 1 assert cone_im["semanticLabel"] == "Je ne suis pas un cone." # After running each test async def tearDown(self): pass
2,050
Python
31.555555
141
0.687805
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/helpers/test_bboxes.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import math import asyncio from time import time import carb import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import Sdf, UsdGeom, Vt # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestBBoxes(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): await omni.usd.get_context().new_stage_async() # Setup viewport self.viewport = get_active_viewport() await omni.usd.get_context().new_stage_async() self.stage = omni.usd.get_context().get_stage() prim = self.stage.DefinePrim("/World", "Xform") self.stage.SetDefaultPrim(prim) marked_cube = self.stage.DefinePrim("/World/MarkedCube0", "Cube") add_semantics(marked_cube, "cube") marked_cube.GetAttribute("size").Set(100) UsdGeom.XformCommonAPI(marked_cube).SetTranslate((3, 3, 0)) unmarked_cube = self.stage.DefinePrim("/World/UnmarkedCube", "Cube") unmarked_cube.GetAttribute("size").Set(100) UsdGeom.XformCommonAPI(unmarked_cube).SetTranslate((3, 3, -100)) await omni.kit.app.get_app().next_update_async() syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.BoundingBox2DLoose]) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.BoundingBox2DTight]) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.BoundingBox3D]) syn.sensors.enable_sensors(self.viewport, [syn._syntheticdata.SensorType.Occlusion]) async def test_reduce_bboxes_3d(self): """Verify that reduce_bboxes_3d removes a cube without a semantic label""" # Render one frame await syn.sensors.next_sensor_data_async(self.viewport,True) # Get 3D bbox bbox = syn.sensors.get_bounding_box_3d(self.viewport, return_corners=True) assert np.allclose(bbox["z_min"], [-50, -50]) # Transform of unmarked cube should be included in pre-reduced bbox but not included in reduced bbox UNMARKED_CUBE_GT = [[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [3.0, 3.0, -100.0, 1.0]]] assert np.allclose(bbox["transform"][0], UNMARKED_CUBE_GT) or np.allclose( bbox["transform"][1], UNMARKED_CUBE_GT ) instance_mappings = syn.helpers.get_instance_mappings() bbox_reduced = syn.helpers.reduce_bboxes_3d(bbox, instance_mappings) assert np.allclose(bbox_reduced["z_min"], [-50]) assert np.allclose( bbox_reduced["transform"], [[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [3.0, 3.0, 0.0, 1.0]]], ) async def test_reduce_occlusion(self): """Verify that reduce_occlusion removes a cube without a semantic label""" # Add an extra cube cube = self.stage.DefinePrim("/World/MarkedCube1", "Cube") add_semantics(cube, "cube") cube.GetAttribute("size").Set(100) UsdGeom.XformCommonAPI(cube).SetTranslate((3, -10, 0)) # Render one frame await syn.sensors.next_sensor_data_async(self.viewport, True) # Get occlusion occlusion = syn.sensors.get_occlusion(self.viewport) occlusion_ratios = np.sort(occlusion["occlusionRatio"]) assert np.allclose(occlusion_ratios, [0.0327, 0.38059998, 0.8886], atol=0.05) instance_mappings = syn.helpers.get_instance_mappings() reduced_occlusion = syn.helpers.reduce_occlusion(occlusion, instance_mappings) reduced_occlusion_ratios = np.sort(reduced_occlusion["occlusionRatio"]) assert np.allclose(reduced_occlusion_ratios, [0.0327, 0.8886], atol=0.05) async def test_merge_sensors(self): """Verify that merge_sensors merges the data correctly""" # Render one frame await syn.sensors.next_sensor_data_async(self.viewport, True) # Get bounding boxes and merge bounding_box_2d_tight = syn.sensors.get_bounding_box_2d_tight(self.viewport) bounding_box_2d_loose = syn.sensors.get_bounding_box_2d_loose(self.viewport) bounding_box_3d = syn.sensors.get_bounding_box_3d(self.viewport, parsed=True) merged_data = syn.helpers.merge_sensors(bounding_box_2d_tight, bounding_box_2d_loose, bounding_box_3d) for suffix, data_source in [ ("_bbox2d_tight", bounding_box_2d_tight), ("_bbox2d_loose", bounding_box_2d_loose), ("_bbox3d", bounding_box_3d), ]: suffix_present = False for key in merged_data.dtype.fields: if key.endswith(suffix): sub_key = key[: -len(suffix)] assert merged_data[key] == data_source[key] suffix_present = True assert suffix_present # After running each test async def tearDown(self): pass
5,483
Python
43.225806
141
0.655845
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/visualize/test_warp_post_vis.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import unittest import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage from ..utils import add_semantics class TestWarpPostVisualization(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) async def setUp(self): # Setup the scene await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() world_prim = UsdGeom.Xform.Define(stage,"/World") UsdGeom.Xformable(world_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(world_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule0_prim = stage.DefinePrim("/World/Capsule0", "Capsule") add_semantics(capsule0_prim, "capsule_0") UsdGeom.Xformable(capsule0_prim).AddTranslateOp().Set((100, 0, 0)) UsdGeom.Xformable(capsule0_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule0_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule0_prim.GetAttribute("primvars:displayColor").Set([(0.3, 1, 0)]) capsule1_prim = stage.DefinePrim("/World/Capsule1", "Capsule") add_semantics(capsule0_prim, "capsule_1") UsdGeom.Xformable(capsule1_prim).AddTranslateOp().Set((-100, 0, 0)) UsdGeom.Xformable(capsule1_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule1_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule1_prim.GetAttribute("primvars:displayColor").Set([(0, 1, 0.3)]) spherelight = UsdLux.SphereLight.Define(stage, "/SphereLight") spherelight.GetIntensityAttr().Set(30000) spherelight.GetRadiusAttr().Set(30) # Setup viewports / renderproduct vp_iface = omni.kit.viewport_legacy.get_viewport_interface() viewport = vp_iface.get_viewport_window() render_product_path = viewport.get_render_product_path() # SyntheticData singleton interface sdg_iface = SyntheticData.Get() if not sdg_iface.is_node_template_registered("TestWarpPostVisualization"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node tempalte stage "omni.syntheticdata.SdTestWarpPostVisulation", # node template type # node template connections [ SyntheticData.NodeConnectionTemplate("LdrColorSDExportRawArray"), ]), template_name="TestWarpPostVisualization" # node template name ) if not sdg_iface.is_node_template_registered("TestWarpPostVisualizationDisplay"): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node tempalte stage "omni.syntheticdata.SdLinearArrayToTexture", # node template type # node template connections [ SyntheticData.NodeConnectionTemplate("TestWarpPostVisualization"), ]), template_name="TestWarpPostVisualizationDisplay" # node template name ) sdg_iface.activate_node_template("TestWarpPostVisualizationDisplay", 0, [render_product_path]) self.numLoops = 100 async def run_loop(self): # ensuring that the setup is taken into account for _ in range(5): await omni.kit.app.get_app().next_update_async() for _ in range(self.numLoops): await omni.kit.app.get_app().next_update_async() async def test_display(self): """ Test display """ await self.run_loop() async def tearDown(self): pass
4,156
Python
40.989899
109
0.619105
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/visualize/test_post_vis.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import unittest import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage from ..utils import add_semantics class TestPostVisualization(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) def activate_post_vis(self,render_product_path, render_var): sdg_iface = SyntheticData.Get() render_var_post_display = "Test" + render_var + "PostDisplay" if not sdg_iface.is_node_template_registered(render_var_post_display): sdg_iface.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.ON_DEMAND, # node tempalte stage "omni.syntheticdata.SdLinearArrayToTexture", # node template type # node template connections [ SyntheticData.NodeConnectionTemplate(render_var), ]), template_name=render_var_post_display ) sdg_iface.activate_node_template(render_var_post_display, 0, [render_product_path]) async def setUp(self): # Setup the scene await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() world_prim = UsdGeom.Xform.Define(stage,"/World") UsdGeom.Xformable(world_prim).AddTranslateOp().Set((0, 0, 0)) UsdGeom.Xformable(world_prim).AddRotateXYZOp().Set((0, 0, 0)) capsule0_prim = stage.DefinePrim("/World/Capsule0", "Capsule") add_semantics(capsule0_prim, "capsule_0") UsdGeom.Xformable(capsule0_prim).AddTranslateOp().Set((100, 0, 0)) UsdGeom.Xformable(capsule0_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule0_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule0_prim.GetAttribute("primvars:displayColor").Set([(0.3, 1, 0)]) capsule1_prim = stage.DefinePrim("/World/Capsule1", "Capsule") add_semantics(capsule0_prim, "capsule_1") UsdGeom.Xformable(capsule1_prim).AddTranslateOp().Set((-100, 0, 0)) UsdGeom.Xformable(capsule1_prim).AddScaleOp().Set((30, 30, 30)) UsdGeom.Xformable(capsule1_prim).AddRotateXYZOp().Set((-90, 0, 0)) capsule1_prim.GetAttribute("primvars:displayColor").Set([(0, 1, 0.3)]) spherelight = UsdLux.SphereLight.Define(stage, "/SphereLight") spherelight.GetIntensityAttr().Set(30000) spherelight.GetRadiusAttr().Set(30) # Setup viewports / renderproduct vp_iface = omni.kit.viewport_legacy.get_viewport_interface() viewport = vp_iface.get_viewport_window() render_product_path = viewport.get_render_product_path() self.activate_post_vis("LdrColorSD") self.numLoops = 100 async def run_loop(self): # ensuring that the setup is taken into account for _ in range(5): await omni.kit.app.get_app().next_update_async() for _ in range(self.numLoops): await omni.kit.app.get_app().next_update_async() async def test_display(self): """ Test display """ await self.run_loop() async def tearDown(self): pass
3,587
Python
38.866666
109
0.624756
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/visualize/test_semantic_seg.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import os import numpy as np import omni.kit.test from omni.kit.viewport.utility import get_active_viewport from pxr import UsdGeom # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import omni.syntheticdata as syn from ..utils import add_semantics FILE_DIR = os.path.dirname(os.path.realpath(__file__)) TIMEOUT = 50 # Having a test class derived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class TestSemanticSegVis(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): np.random.seed(1234) # Setup viewport self.viewport = get_active_viewport() # Initialize Sensor await omni.usd.get_context().new_stage_async() stage = omni.usd.get_context().get_stage() await omni.kit.app.get_app().next_update_async() syn.sensors.enable_sensors( self.viewport, [syn._syntheticdata.SensorType.SemanticSegmentation, syn._syntheticdata.SensorType.InstanceSegmentation], ) async def test_parsed_empty(self): """ Test semantic segmentation returns zero array with empty scene """ await syn.sensors.next_sensor_data_async(self.viewport, True) data = syn.visualize.get_semantic_segmentation(self.viewport, mode="parsed") assert np.array_equal(data, np.zeros_like(data).astype(np.uint8)) async def test_number_of_classes(self): """ Test that number of classes in output matches number of classes in scene """ stage = omni.usd.get_context().get_stage() cube = stage.DefinePrim("/Cube1", "Cube") add_semantics(cube, "cube1") UsdGeom.Xformable(cube).AddTranslateOp().Set((0, 10, 0)) cube = stage.DefinePrim("/Cube2", "Cube") add_semantics(cube, "cube2") UsdGeom.Xformable(cube).AddTranslateOp().Set((0, -10, 0)) await syn.sensors.next_sensor_data_async(self.viewport, True) data = syn.visualize.get_semantic_segmentation(self.viewport, mode="parsed") data_non_bkg = data[data.sum(axis=-1) != 0] # Remove background, encoded as (0, 0, 0, 0) assert len(np.unique(data_non_bkg, axis=0)) == 2 # After running each test async def tearDown(self): pass
2,581
Python
39.343749
141
0.680356
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/graph/test_graph_manipulation.py
import carb from pxr import Gf, UsdGeom, UsdLux, Sdf import omni.hydratexture import omni.kit.test from omni.syntheticdata import SyntheticData, SyntheticDataStage # Test the instance mapping pipeline class TestGraphManipulation(omni.kit.test.AsyncTestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) def render_product_path(self, hydra_texture) -> str: '''Return a string to the UsdRender.Product used by the texture''' render_product = hydra_texture.get_render_product_path() if render_product and (not render_product.startswith('/')): render_product = '/Render/RenderProduct_' + render_product return render_product async def setUp(self): self._settings = carb.settings.acquire_settings_interface() self._hydra_texture_factory = omni.hydratexture.acquire_hydra_texture_factory_interface() self._usd_context_name = '' self._usd_context = omni.usd.get_context(self._usd_context_name) await self._usd_context.new_stage_async() self._stage = omni.usd.get_context().get_stage() # renderer renderer = "rtx" if renderer not in self._usd_context.get_attached_hydra_engine_names(): omni.usd.add_hydra_engine(renderer, self._usd_context) # create the hydra textures self._hydra_texture_0 = self._hydra_texture_factory.create_hydra_texture( "TEX0", 1920, 1080, self._usd_context_name, hydra_engine_name=renderer, is_async=self._settings.get("/app/asyncRendering") ) self._render_product_path_0 = self.render_product_path(self._hydra_texture_0) self._hydra_texture_rendered_counter = 0 def on_hydra_texture_0(event: carb.events.IEvent): self._hydra_texture_rendered_counter += 1 self._hydra_texture_rendered_counter_sub = self._hydra_texture_0.get_event_stream().create_subscription_to_push_by_type( omni.hydratexture.EVENT_TYPE_DRAWABLE_CHANGED, on_hydra_texture_0, name='async rendering test drawable update', ) async def tearDown(self): self._hydra_texture_rendered_counter_sub = None self._hydra_texture_0 = None self._usd_context.close_stage() omni.usd.release_all_hydra_engines(self._usd_context) self._hydra_texture_factory = None self._settings = None wait_iterations = 6 for _ in range(wait_iterations): await omni.kit.app.get_app().next_update_async() async def test_rendervar_enable(self): isdg = SyntheticData.Get() render_var = "BoundingBox3DSD" assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.enable_rendervar(self._render_product_path_0, render_var, self._stage) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.disable_rendervar(self._render_product_path_0, render_var, self._stage) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) async def test_rendervar_auto_activation(self): isdg = SyntheticData.Get() render_var = "BoundingBox3DSD" assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.activate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], {}, self._stage, True) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) isdg.deactivate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], self._stage, True) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) async def test_rendervar_manual_activation(self): isdg = SyntheticData.Get() render_var = "BoundingBox3DSD" assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) assert(not isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,False)) isdg.activate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], {}, self._stage, False) assert(isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,False)) assert(isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,True)) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.enable_rendervar(self._render_product_path_0, render_var, self._stage) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) isdg.deactivate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], self._stage, False) assert(not isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,True)) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.disable_rendervar(self._render_product_path_0, render_var, self._stage) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) async def test_rendervar_hybrid_activation(self): isdg = SyntheticData.Get() render_var = "BoundingBox3DSD" assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.activate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], {}, self._stage, False) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.enable_rendervar(self._render_product_path_0, render_var, self._stage) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.deactivate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], self._stage, True) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) isdg.disable_rendervar(self._render_product_path_0, render_var, self._stage) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) async def test_rendervar_initially_activated(self): isdg = SyntheticData.Get() render_var = "BoundingBox3DSD" assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.enable_rendervar(self._render_product_path_0, render_var, self._stage) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.activate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], {}, self._stage, True) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) isdg.deactivate_node_template("BoundingBox3DReduction",0, [self._render_product_path_0], self._stage, True) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.disable_rendervar(self._render_product_path_0, render_var, self._stage) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) async def test_rendervar_multiple_activation(self): isdg = SyntheticData.Get() render_var = "BoundingBox3DSD" if not isdg.is_node_template_registered("BoundingBox3DDisplayPostDuplicate"): isdg.register_node_template( SyntheticData.NodeTemplate( SyntheticDataStage.POST_RENDER, "omni.syntheticdata.SdPostRenderVarDisplayTexture", [ SyntheticData.NodeConnectionTemplate("LdrColorSD"), SyntheticData.NodeConnectionTemplate("Camera3dPositionSD"), SyntheticData.NodeConnectionTemplate("PostRenderProductCamera"), SyntheticData.NodeConnectionTemplate("InstanceMappingPost"), SyntheticData.NodeConnectionTemplate("BoundingBox3DReduction") ], { "inputs:renderVar": "LdrColorSD", "inputs:renderVarDisplay": "BoundingBox3DSDDisplay", "inputs:mode": "semanticBoundingBox3dMode", "inputs:parameters": [0.0, 5.0, 0.027, 0.27] } ), # node template default attribute values (when differs from the default value specified in the .ogn) template_name="BoundingBox3DDisplayPostDuplicate" # node template name ) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, False, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.activate_node_template("BoundingBox3DDisplayPost",0, [self._render_product_path_0], {}, self._stage, True) assert(not isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,True)) assert(isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,False)) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) isdg.activate_node_template("BoundingBox3DDisplayPostDuplicate",0, [self._render_product_path_0], {}, self._stage, True) isdg.deactivate_node_template("BoundingBox3DDisplayPost",0, [self._render_product_path_0], self._stage, True) assert(isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) assert(isdg.is_rendervar_used(self._render_product_path_0, render_var)) assert(not isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,True)) assert(isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,False)) isdg.deactivate_node_template("BoundingBox3DDisplayPostDuplicate",0, [self._render_product_path_0], self._stage, True) assert(not isdg.is_node_template_activated("BoundingBox3DReduction",self._render_product_path_0,False)) assert(not isdg.is_rendervar_enabled(self._render_product_path_0, render_var, True, self._stage)) assert(not isdg.is_rendervar_used(self._render_product_path_0, render_var))
12,732
Python
63.30808
128
0.680647
omniverse-code/kit/exts/omni.syntheticdata/omni/syntheticdata/tests/data/golden/view_np_image.py
import os import sys import matplotlib.pyplot as plt import numpy as np image = np.load(sys.argv[1])["array"] print(image.shape) # np.savez_compressed(f"{os.path.splitext(sys.argv[1])[0]}.npz", array=image) # image = (image - image.min()) / image.ptp() plt.imshow(image) plt.show()
283
Python
22.666665
77
0.70318
omniverse-code/kit/exts/omni.syntheticdata/docs/index.rst
omni.syntheticdata //////////////////////////# Introduction ************ This extension provides both C++ and python bindings that allow users to extract ground truth data from scenes loaded and rendered in Omniverse Kit and use it for DL/RL training purposes. Data can be accessed either in host memory or directly on device memory to provide high performance training. The scene data is provided by generating USD data that can be rendered through the Kit renderer. Core Concepts ************* Sensor ====== Ground truth data is accessed through various sensors that are associated with a view in the renderer. The sensors generally provide access to synthetic data and are either represented as images or buffers of attribute data. Attribute data elements are usually associated with a particular instance in a scene, which is usually represented by a mesh specified in the USD data. Sensors are objects that are managed by the user either through the API or the UI. Synthetic Image Data ==================== Synthetic image data is represented by sensors as a 2D image. Examples of synthetic image data include RGB data, depth data, and segmentation data. The data can be in any valid image format supported by the renderer. Synthetic Attribute Data ======================== Synthetic attribute data is represented by sensors as raw structured data that can be accessed as an array. The data structures used to store array elements depend on the type of sensor. Examples of synthetic attribute data include bounding boxes. See the data structures defined below to see how various attribute data arrays define their data. Instance ======== An instance is a single segmentation unit in a scene that is usually represented as a mesh. An instance is usually represented in sensor data as a unique unsigned integer ID. The renderer currently limits scenes to having 2^24 unique instances. Semantic Class ============== A semantic class is a classification given to a scene instance that can be used for training purposes. It is provided as a unique string and is usually represented in sensor data as a unique unsigned integer ID. Semantic class strings can be anything that will be used to identify scene instances, such as "car", "tree", "large", "broken", etc. The renderer currently limits scenes to having 2^16 unique semantic classes. Semantic class data is specified inside the USD scene data through the Semantic API schema. Segmentation ============ Segmentation data is usually represented by sensors as synthetic image data and is used to segment image data within a view. Examples include instance segmentation which will represent each pixel in the image data with an instance ID and semantic segmentation which will represent each pixel in the image data with a semantic ID. Accessing Data on Device Memory =============================== Device Memory is usually GPU memory. Synthetic data can be accessed directly on device memory with python by using PyTorch tensors. Accessing Data on Host Memory ============================= Device Memory is usually system memory. Synthetic data can be accessed directly on host memory with python through numpy arrays. Data Structures *************** Below are the various data structures specified by the C++ API and accessed through python using pybind. SensorType ========== .. code:: enum class SensorType : uint32_t { // These sensors represent image data eRgb = 0, ///< RGB data eCamera3dPosition, ///< camera space 3d position eDistanceToImagePlane, ///< distance to image plane in meters eDistanceToCamera, ///< distance to camera in meters eDepth, ///< depth data (***DEPRECATED***) eDepthLinear, ///< linear depth data (in meters) (***DEPRECATED***) eInstanceSegmentation, ///< instance segmentation data eSemanticSegmentation, ///< semantic segmentation data (***DEPRECATED***) eNormal, ///< normal vector data eMotionVector, ///< motion vector data eCrossCorrespondence, ///< cross correspondence data // These sensors represent instance attribute data eBoundingBox2DTight, ///< tight 2D bounding box data, only contains non-occluded pixels eBoundingBox2DLoose, ///< loose 2D bounding box data, also contains occluded pixels eBoundingBox3D, ///< 3D view space bounding box data eOcclusion, ///< occlusion data eTruncation, ///< truncation data // These track valid sensor types eSensorTypeCount, ///< the total number of valid sensor outputs eSensorTypeInvalid = 0x7FFFFFFF ///< invalid sensor marker }; SensorResourceType ================== .. code:: enum class SensorResourceType { eTexture, ///< image data sensors eBuffer ///< attribute data sensors }; SensorInfo ========== .. code:: struct SensorInfo { SensorType type; ///< sensor type SensorResourceType resType; ///< sensor resource type union { struct { uint32_t width; ///< sensor width of texture sensors uint32_t height; ///< sensor height of texture sensors uint32_t bpp; ///< bytes per pixel stored for texture sensors uint32_t rowSize; ///< texture row stride in bytes } tex; struct { size_t size; ///< size in bytes of buffer sensors } buff; }; ///< sensor parameters }; BoundingBox2DValues =================== .. code:: struct BoundingBox2DValues { uint32_t instanceId; ///< instance ID uint32_t semanticId; ///< semantic ID *** DEPRECATED *** int32_t x_min; ///< left extent int32_t y_min; ///< top extent int32_t x_max; ///< right extent int32_t y_max; ///< bottom extent }; BoundingBox3DValues =================== .. code:: struct BoundingBox3DValues { uint32_t instanceId; ///< instance ID uint32_t semanticId; ///< semantic ID *** DEPRECATED *** float x_min; ///< left extent float y_min; ///< top extent float z_min; ///< front extent float x_max; ///< right extent float y_max; ///< bottom extent float z_max; ///< back extent }; OcclusionValues =============== .. code:: struct OcclusionValues { uint32_t instanceId; ///< instance ID uint32_t semanticId; ///< semantic ID *** DEPRECATED *** float occlusionRatio; ///< ratio of instance that is occluded }; TruncationValues ================ .. code:: struct TruncationValues { uint32_t instanceId; ///< instance ID uint32_t semanticId; ///< semantic ID *** DEPRECATED *** float truncationRatio; ///< ratio of instance that is truncated }; Python API Docs **************** Pybind API ========== .. code:: // Creates a sensor of specified type if none exist otherwise return the existing sensor. // // Args: // // arg0 (type): The sensor type to return create_sensor(sensors::SensorType type) .. code:: // Destroys the specified sensor. // // Args: // // arg0 (type): The sensor type to destroy destroy_sensor(sensors::SensorType type) .. code:: // Returns the width of the specified image sensor. // // Args: // // arg0 (type): The sensor to retrieve the width for get_sensor_width(carb::sensors::SensorType type) .. code:: // Returns the height of the specified image sensor. // // Args: // // arg0 (type): The sensor to retrieve the height for get_sensor_height(carb::sensors::SensorType type) .. code:: // Returns the bytes per pixel of the specified image sensor. // // Args: // // arg0 (type): The sensor to retrieve the bytes per pixel for get_sensor_bpp(carb::sensors::SensorType type) .. code:: // Returns the row size in bytes of the specified image sensor. // // Args: // // arg0 (type): The sensor to retrieve the row size for get_sensor_row_size(carb::sensors::SensorType type) .. code:: // Returns the size in bytes of the specified attribute sensor. // // Args: // // arg0 (type): The sensor to retrieve the size for get_sensor_size(carb::sensors::SensorType type) .. code:: // Returns a pointer to the sensor's data on device memory // // Args: // // arg0 (type): The sensor to retrieve the data for get_sensor_device_data(carb::sensors::SensorType type) .. code:: // Returns a pointer to the sensor's data on host memory // // Args: // // arg0 (type): The sensor to retrieve the host data for get_sensor_host_data(carb::sensors::SensorType type) .. code:: // Returns floating point tensor data of the image sensor on device memory // // Args: // // arg0 (type): The image sensor to retrieve the tensor data for // // arg1 (width): The width of the image sensor // // arg2 (height): The height of the image sensor // // arg3 (rowSize): The row size in bytes of the image sensor get_sensor_device_float_2d_tensor(carb::sensors::SensorType type, size_t width, size_t height, size_t rowSize) .. code:: // Returns 32-bit integer tensor data of the image sensor on device memory // // Args: // // arg0 (type): The image sensor to retrieve the tensor data for // // arg1 (width): The width of the image sensor // // arg2 (height): The height of the image sensor // // arg3 (rowSize): The row size in bytes of the image sensor get_sensor_device_int32_2d_tensor(carb::sensors::SensorType type, size_t width, size_t height, size_t rowSize) .. code:: // Returns 8-bit integer vector tensor data of the image sensor on device memory // // Args: // // arg0 (type): The image sensor to retrieve the tensor data for // // arg1 (width): The width of the image sensor // // arg2 (height): The height of the image sensor // // arg3 (rowSize): The row size in bytes of the image sensor get_sensor_device_uint8_3d_tensor(carb::sensors::SensorType type, size_t width, size_t height, size_t rowSize) .. code:: // Returns 32-bit integer numpy array data of the image sensor on host memory // // Args: // // arg0 (type): The image sensor to retrieve the numpy data for // // arg1 (width): The width of the image sensor // // arg2 (height): The height of the image sensor // // arg3 (rowSize): The row size in bytes of the image sensor get_sensor_host_uint32_texture_array(carb::sensors::SensorType type, size_t width, size_t height, size_t rowSize) .. code:: // Returns floating point numpy array data of the image sensor on host memory // // Args: // // arg0 (type): The image sensor to retrieve the numpy data for // // arg1 (width): The width of the image sensor // // arg2 (height): The height of the image sensor // // arg3 (rowSize): The row size in bytes of the image sensor get_sensor_host_float_texture_array(carb::sensors::SensorType type, size_t width, size_t height, size_t rowSize) .. code:: // Returns floating point numpy array data of the attribute sensor on host memory // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_float_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns 32-bit unsigned integer numpy array data of the attribute sensor on host memory // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_uint32_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns 32-bit signed integer numpy array data of the attribute sensor on host memory // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_int32_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns a numpy array of BoundingBox2DValues data for the attribute sensor on host memory // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_bounding_box_2d_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns a numpy array of BoundingBox3DValues data for the attribute sensor on host memory // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_bounding_box_3d_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns a numpy array of OcclusionValues data for the attribute sensor on host memory // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_occlusion_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns a numpy array of TruncationValues data for the attribute sensor on host memory (TODO) // // Args: // // arg0 (type): The attribute sensor to retrieve the numpy data for // // arg1 (size): The size of the attribute sensor in bytes get_sensor_host_truncation_buffer_array(carb::sensors::SensorType type, size_t size) .. code:: // Returns the instance ID of the specified mesh as represented by sensor data // // Args: // // arg0 (uri): The representation of the mesh in the USD scene get_instance_segmentation_id(const char* uri) .. code:: // DEPRECATED (v0.3.0) Returns the semantic ID of the specified name and type as represented by sensor data // // Args: // // arg0 (type): The semantic type name // // arg1 (data): The semantic data name get_semantic_segmentation_id_from_data(const char* type, const char* data) .. code:: // DEPRECATED (v0.3.0) Returns the semantic class name of the semantic ID represented by sensor data // // Args: // // arg0 (semanticId): The semantic ID get_semantic_segmentation_data_from_id(uint16_t semanticId) .. code:: // DEPRECATED (v0.3.0) Specify which semantic classes to retrieve bounding boxes for // // Args: // // arg0 (semanticId): The semantic ID to retrieve bounding boxes for set_bounding_box_semantic_segmentation_id(uint16_t semanticId) .. code:: // DEPRECATED (v0.3.0) Specify which semantic classes to retrieve bounding boxes for // // Args: // // arg0 (data): The semantic data class name to retrieve bounding boxes for set_bounding_box_semantic_segmentation_data(std::string data)
15,223
reStructuredText
30.196721
119
0.650923