file_path
stringlengths 21
207
| content
stringlengths 5
1.02M
| size
int64 5
1.02M
| lang
stringclasses 9
values | avg_line_length
float64 1.33
100
| max_line_length
int64 4
993
| alphanum_fraction
float64 0.27
0.93
|
---|---|---|---|---|---|---|
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/new_faucet.py | import carb
import math
from pathlib import Path
import pxr
from pxr import Usd, UsdLux, UsdGeom, Sdf, Gf, Vt, UsdPhysics, PhysxSchema
import sys
#put schemaHelpers.py into path
from omni.kitchen.asset.layout.fluid.schemaHelpers import PhysxParticleInstancePrototype, \
addPhysxParticleSystem, addPhysxParticlesSimple
import omni.timeline
from typing import List
from omni.kitchen.asset.task_check.newJointCheck import JointCheck
import math
from .utils import generate_cylinder_y, point_sphere
from ...param import IS_IN_ISAAC_SIM
from .constants import PARTICLE_PROPERTY, particel_scale
from omni.physx.scripts import particleUtils
def setGridFilteringPass(gridFilteringFlags: int, passIndex: int, operation: int, numRepetitions: int = 1):
numRepetitions = max(0, numRepetitions - 1)
shift = passIndex * 4
gridFilteringFlags &= ~(3 << shift)
gridFilteringFlags |= (((operation) << 2) | numRepetitions) << shift
return gridFilteringFlags
class Faucet():
def __init__(self,
liquid_material_path = "/World/Looks/OmniSurface_ClearWater", inflow_path:str = "/World/faucet/inflow",
link_paths:List[str] = ["/World/faucet/link_0"]
):
"""! Faucet class
@param particle_params : parameters for particles
@param iso_surface_params: parameters for iso_surface
@param liquid_material_path: parameters for liquid materials
@param inflow_path: used to compute the location of water drops
@param link_paths: used to compute the rotation of faucet handle and determine the speed and size of water drops
@param particle_params: parameters related to particle systems
@return an instance of Faucet class
"""
# particle Instance path
# self.particleInstanceStr_tmp = "/particlesInstance"
# self.particle_params = particle_params
# self.iso_surface_params = iso_surface_params
self.liquid_material_path = liquid_material_path
# inflow position
self.stage = omni.usd.get_context().get_stage()
self.inflow_path = inflow_path
self.inflow_prim = self.stage.GetPrimAtPath(inflow_path)
mat = omni.usd.utils.get_world_transform_matrix(self.inflow_prim)
# if IS_IN_ISAAC_SIM:
# from omni.isaac.core.prims import XFormPrim
# self.inflow_position, _ = XFormPrim(self.inflow_path).get_world_pose()
# self.inflow_position = Gf.Vec3f(*self.inflow_position.tolist())
# else:
self.inflow_position = Gf.Vec3f(*mat.ExtractTranslation())
self.link_paths = link_paths
self.list_of_point_instancers = []
self.active_indexes_for_point_instancers = []
self.rate_checkers = []
for link in link_paths:
path = Path(link)
self.rate_checkers.append(JointCheck( str(path.parent), str(path.name) ))
self.create()
# print("particleSystemPath", self.particleSystemPath)
def is_off(self):
rate = self.rate_checkers[0].compute_distance()/100.0
return rate < 0.1
def point_sphere(self, samples, scale):
"""! create locations for each particles
@param samples: the number of particles per sphere
@param scale: the scale(radius) of the water drop
"""
indices = [x + 0.5 for x in range(0, samples)]
phi = [math.acos(1 - 2 * x / samples) for x in indices]
theta = [math.pi * (1 + 5**0.5) * x for x in indices]
x = [math.cos(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
y = [math.sin(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
z = [math.cos(ph) * scale for ph in phi]
points = [Gf.Vec3f(x, y, z) for (x, y, z) in zip(x, y, z)]
return points
def create_ball(self, rate = 1):
"""! create a water drop
@param pos: the center of the water drop
@param rate: the number of particles for each water drop
"""
# create sphere on points
self.set_up_particle_system(rate)
def set_up_particle_system(self, rate):
self.particleInstanceStr_tmp = self.particleInstanceStr + "/particlesInstance" + str(self.it)
particleInstancePath = omni.usd.get_stage_next_free_path(self.stage, self.particleInstanceStr_tmp, False)
particleInstancePath = pxr.Sdf.Path(particleInstancePath)
proto = PhysxParticleInstancePrototype()
proto.selfCollision = True
proto.fluid = True
proto.collisionGroup = 0
proto.mass = PARTICLE_PROPERTY._particle_mass
protoArray = [proto]
positions_list = []
velocities_list = []
protoIndices_list = []
cylinder_height = 2
cylinder_radius = 1.5
lowerCenter = Gf.Vec3f(0, -cylinder_height, 0) # self.inflow_position
# lowerCenter = self.inflow_position
particle_rest_offset = self._particleSystemSchemaParameters["fluid_rest_offset"]
positions_list = generate_cylinder_y(lowerCenter, h=cylinder_height, radius=cylinder_radius, sphereDiameter=particle_rest_offset * 4.0)
for _ in range(len(positions_list)):
velocities_list.append(pxr.Gf.Vec3f(0, 0, 0))
protoIndices_list.append(0)
# print("positions_list", len(positions_list))
self.positions_list = positions_list
protoIndices = pxr.Vt.IntArray(protoIndices_list)
positions = pxr.Vt.Vec3fArray(positions_list)
velocities = pxr.Vt.Vec3fArray(velocities_list)
print("particleInstancePath", particleInstancePath.pathString)
particleUtils.add_physx_particleset_pointinstancer(
self.stage,
particleInstancePath,
positions,
velocities,
self.particleSystemPath,
self_collision=True,
fluid=True,
particle_group=0,
particle_mass=PARTICLE_PROPERTY._particle_mass,
density=0.0,
)
prototypePath = particleInstancePath.pathString + "/particlePrototype0"
sphere = UsdGeom.Sphere.Define(self.stage, Sdf.Path(prototypePath))
spherePrim = sphere.GetPrim()
# spherePrim.GetAttribute('visibility').Set('invisible')
color_rgb = [207/255.0, 244/255.0, 254/255.0]
color = pxr.Vt.Vec3fArray([pxr.Gf.Vec3f(color_rgb[0], color_rgb[1], color_rgb[2])])
sphere.CreateDisplayColorAttr(color)
# spherePrim.CreateAttribute("enableAnisotropy", Sdf.ValueTypeNames.Bool, True).Set(True)
def create(self):
"""! initialize the related parameters for faucet
create physics scenes
create particle systems
create isosurface
"""
self._setup_callbacks()
self.it = 0
self.counter = 10
self.set_up_fluid_physical_scene()
def set_up_fluid_physical_scene(self, gravityMagnitude = 100.0):
"""
Fluid / PhysicsScene
"""
default_prim_path = self.stage.GetDefaultPrim().GetPath()
if default_prim_path.pathString == '':
# default_prim_path = pxr.Sdf.Path('/World')
root = UsdGeom.Xform.Define(self.stage, "/World").GetPrim()
self.stage.SetDefaultPrim(root)
default_prim_path = self.stage.GetDefaultPrim().GetPath()
self.stage = omni.usd.get_context().get_stage()
particleSystemStr = default_prim_path.AppendPath("Fluid").pathString
self.physicsScenePath = default_prim_path.AppendChild("physicsScene")
self.particleSystemPath = Sdf.Path(particleSystemStr)
self.particleInstanceStr = "/World/game/inflow"
# print("particleInstanceStr", self.particleInstanceStr)
# Physics scene
self._gravityMagnitude = gravityMagnitude
self._gravityDirection = Gf.Vec3f(0.0, -1.0, 0.0)
physicsScenePath = default_prim_path.AppendChild("physicsScene")
if self.stage.GetPrimAtPath('/World/physicsScene'):
scene = UsdPhysics.Scene.Get(self.stage, physicsScenePath)
else:
scene = UsdPhysics.Scene.Define(self.stage, physicsScenePath)
scene.CreateGravityDirectionAttr().Set(self._gravityDirection)
scene.CreateGravityMagnitudeAttr().Set(self._gravityMagnitude)
physxSceneAPI = PhysxSchema.PhysxSceneAPI.Apply(scene.GetPrim())
physxSceneAPI.CreateEnableCCDAttr().Set(True)
physxSceneAPI.GetTimeStepsPerSecondAttr().Set(120)
self._fluidSphereDiameter = PARTICLE_PROPERTY._fluidSphereDiameter #0.24
# solver parameters:
# self._solverPositionIterations = 10
# self._solverVelocityIterations = 10
# self._particleSystemSchemaParameters = {
# "contact_offset": 0.3,
# "particle_contact_offset": 0.25,
# "rest_offset": 0.25,
# "solid_rest_offset": 0,
# "fluid_rest_offset": 0.5 * self._fluidSphereDiameter + 0.03,
# "solver_position_iterations": self._solverPositionIterations,
# "solver_velocity_iterations": self._solverVelocityIterations,
# "wind": Gf.Vec3f(0, 0, 0),
# }
self._particleSystemSchemaParameters = PARTICLE_PROPERTY._particleSystemSchemaParameters
# self._particleSystemAttributes = {
# "cohesion": 7.4,
# "smoothing": 0.8,
# "anisotropyScale": 1.0,
# "anisotropyMin": 0.2,
# "anisotropyMax": 2.0,
# "surfaceTension": 2.0, #0.74,
# "vorticityConfinement": 0.5,
# "viscosity": 5.0,
# "particleFriction": 0.34,
# "maxParticles": 20000,
# }
self._particleSystemAttributes = PARTICLE_PROPERTY._particleSystemAttributes
self._particleSystemAttributes["maxParticles"] = 2000
self._particleSystemAttributes["viscosity"] = 0.001
self._particleSystem = particleUtils.add_physx_particle_system(
self.stage, self.particleSystemPath, **self._particleSystemSchemaParameters, simulation_owner=Sdf.Path(self.physicsScenePath.pathString)
)
# addPhysxParticleSystem(
# self.stage, self.particleSystemPath, **self._particleSystemSchemaParameters, \
# scenePath=pxr.Sdf.Path(self.physicsScenePath.pathString)
# )
# particleSystem = self.stage.GetPrimAtPath(self.particleSystemPath)
# for key, value in self._particleSystemAttributes.items():
# particleSystem.GetAttribute(key).Set(value)
# filterSmooth = 1
# filtering = 0
# passIndex = 0
# filtering = setGridFilteringPass(filtering, passIndex, filterSmooth)
# passIndex = passIndex + 1
# filtering = setGridFilteringPass(filtering, passIndex, filterSmooth)
# passIndex = passIndex + 1
# self.iso_surface_params = {
# "maxIsosurfaceVertices": [Sdf.ValueTypeNames.Int, True, 1024 * 1024],
# "maxIsosurfaceTriangles": [Sdf.ValueTypeNames.Int, True, 2 * 1024 * 1024],
# "maxNumIsosurfaceSubgrids": [Sdf.ValueTypeNames.Int, True, 1024 * 4],
# "isosurfaceGridSpacing": [Sdf.ValueTypeNames.Float, True, 0.2],
# "isosurfaceKernelRadius": [Sdf.ValueTypeNames.Float, True, 0.5 ],
# "isosurfaceLevel": [ Sdf.ValueTypeNames.Float, True, -0.3 ],
# "isosurfaceGridFilteringFlags": [Sdf.ValueTypeNames.Int, True, filtering ],
# "isosurfaceGridSmoothingRadiusRelativeToCellSize": [Sdf.ValueTypeNames.Float, True, 0.3 ],
# "isosurfaceEnableAnisotropy": [Sdf.ValueTypeNames.Bool, True, False ],
# "isosurfaceAnisotropyMin": [ Sdf.ValueTypeNames.Float, True, 0.1 ],
# "isosurfaceAnisotropyMax": [ Sdf.ValueTypeNames.Float, True, 2.0 ],
# "isosurfaceAnisotropyRadius": [ Sdf.ValueTypeNames.Float, True, 0.5 ],
# "numIsosurfaceMeshSmoothingPasses": [ Sdf.ValueTypeNames.Int, True, 5 ],
# "numIsosurfaceMeshNormalSmoothingPasses": [ Sdf.ValueTypeNames.Int, True, 5 ],
# "isosurfaceDoNotCastShadows": [Sdf.ValueTypeNames.Bool, True, True ]
# }
# particleSystem.CreateAttribute("enableIsosurface", Sdf.ValueTypeNames.Bool, True).Set(True)
# for key,value in self.iso_surface_params.items():
# if isinstance(value, list):
# particleSystem.CreateAttribute(key, value[0], value[1]).Set(value[2])
# else:
# particleSystem.GetAttribute(key).Set(value)
# self.stage.SetInterpolationType(Usd.InterpolationTypeHeld)
def _setup_callbacks(self):
"""! callbacks registered with timeline and physics steps to drop water
"""
# callbacks
self._timeline = omni.timeline.get_timeline_interface()
stream = self._timeline.get_timeline_event_stream()
self._timeline_subscription = stream.create_subscription_to_pop(self._on_timeline_event)
# subscribe to Physics updates:
self._physics_update_subscription = omni.physx.get_physx_interface().subscribe_physics_step_events(
self.on_physics_step
)
# events = omni.physx.get_physx_interface().get_simulation_event_stream()
# self._simulation_event_sub = events.create_subscription_to_pop(self._on_simulation_event)
def _on_timeline_event(self, e):
if e.type == int(omni.timeline.TimelineEventType.STOP):
self.it = 0
self._physics_update_subscription = None
self._timeline_subscription = None
def on_physics_step(self, dt):
xformCache = UsdGeom.XformCache()
# compute location to dispense water
pose = xformCache.GetLocalToWorldTransform(self.stage.GetPrimAtPath(self.inflow_path))
pos_faucet = Gf.Vec3f(pose.ExtractTranslation())
##TODO hangle multiple faucet handles
rate = self.rate_checkers[0].compute_distance()/100.0
if rate > 1:
rate = 1
# if self.it == 0:
# iso2Prim = self.stage.GetPrimAtPath(self.particleSystemPath.pathString +"/Isosurface")
# rel = iso2Prim.CreateRelationship("material:binding", False)
# # rel.SetTargets([Sdf.Path(self.liquid_material_path)])
# rel.SetTargets([Sdf.Path("/World/game/other_Basin_1/Looks/OmniSurface_ClearWater")])
#TODO we can have the water keep running, but we should delete some particles that are too old and not in containers.
#this implementation will stop after 200 balls
if self.it > 200:
return
if rate < 0.1:
return
# emit a ball based on rate
rate = min(0.35, rate)
if (self.counter < 100 - rate*200 ):
self.counter = self.counter + 1
return
self.counter = 0
self.it = self.it + 1
self.create_ball(rate)
def __del__(self):
self._physics_update_subscription = None
self._timeline_subscription = None
| 15,294 | Python | 40.675749 | 152 | 0.628874 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/constants.py | from ...param import APP_VERION
from pxr import Gf
particel_scale = 2.5
if APP_VERION.startswith("2022"):
class PARTICLE_PROPERTY:
_fluidSphereDiameter = 0.24 * particel_scale
_particleSystemSchemaParameters = {
"contact_offset": 0.3 * particel_scale,
"particle_contact_offset": 0.25 * particel_scale,
"rest_offset": 0.25 * particel_scale,
"solid_rest_offset": 0,
"fluid_rest_offset": 0.5 * _fluidSphereDiameter + 0.03 * particel_scale,
"solver_position_iterations": 10,
"wind": Gf.Vec3f(0, 0, 0),
"max_velocity": 40 ,
}
_particleMaterialAttributes = {
"friction": 0.34,
"viscosity": 0.0,
"vorticity_confinement": 0.5,
"surface_tension": 0.74,
"cohesion": 0.1,
# "cfl_coefficient": 1.0,
}
_particleSystemAttributes = {
"cohesion": 0.0,
"smoothing": 0.8,
"anisotropyScale": 1.0,
"anisotropyMin": 0.2,
"anisotropyMax": 2.0,
"surfaceTension": 0.74,
"vorticityConfinement": 0.5,
"viscosity": 0.0,
"particleFriction": 0.34,
"maxVelocity": 40,
}
_particle_mass = 1e-6 * particel_scale*particel_scale
_particle_scale = (0.5, 0.5, 0.5)
_cup_rest_offset = 0.0
_cup_contact_offset = 1.0
_cup_mass = 1
_gravityMagnitude = 100
else:
class PARTICLE_PROPERTY:
_fluidSphereDiameter = 0.24 * particel_scale
_particleSystemSchemaParameters = {
"contact_offset": 0.3 * particel_scale,
"particle_contact_offset": 0.25 * particel_scale,
"rest_offset": 0.25 * particel_scale,
"solid_rest_offset": 0,
"fluid_rest_offset": 0.5 * _fluidSphereDiameter + 0.03 * particel_scale,
"solver_position_iterations": 10,
"solver_velocity_iterations": 10,
"wind": Gf.Vec3f(0, 0, 0),
}
_particleSystemAttributes = {
"cohesion": 7.4,
"smoothing": 0.8,
"anisotropyScale": 1.0,
"anisotropyMin": 0.2,
"anisotropyMax": 2.0,
"surfaceTension": 0.74,
"vorticityConfinement": 0.5,
"viscosity": 5.0,
"particleFriction": 0.34,
"maxVelocity": 40,
}
_particle_mass = 1e-6 * particel_scale
_particle_scale = (0.5, 0.5, 0.5)
_cup_rest_offset = 0.0
_cup_contact_offset = 1.0
_cup_mass = 1
_gravityMagnitude = 100
| 2,780 | Python | 32.506024 | 84 | 0.496403 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/cup_data.py | from ..param import ROOT
CUP_ROOT = ROOT + "/3dmodels/cup/"
NEW_CUP_ROOT = ROOT + "/sample/custom/Cup/"
FAUCET_INFO = {
"1028": {
"inflow_pos": [-17.4121, 4.63152, 0],
"joints":[
"link_2/joint_0",
"link_2/joint_1",
]
},
"148": {
"inflow_pos": [-17.30, 4.10 , 0],
"joints":[
"link_1/joint_0",
]
},
"149": {
"inflow_pos": [-10.80, 7.0 , 0],
"joints":[
"link_3/joint_0",
"link_3/joint_1",
]
},
"153": {
"inflow_pos": [-13.4587, 7.00 , -2.5],
"joints":[
"link_1/joint_0",
]
},
"154": {
"inflow_pos": [-7.0, 19.00 , 0.0],
"joints":[
"link_2/joint_0",
"link_2/joint_1",
]
},
"156": {
"inflow_pos": [-17.00, 6.00 , 0.0],
"joints":[
"link_1/joint_0",
]
},
"693": {
"inflow_pos": [-14.3453, -6.21179, -0.20894],
"joints":[
"link_2/joint_1",
]
},
"1034": {
"inflow_pos": [-17.967, 4.04622, 4.11386],
"joints":[
"link_1/joint_0",
]
},
"1052": {
"inflow_pos": [-14.8737, 4.21977, 1.06383],
"joints":[
"link_2/joint_0",
]
},
"1053": {
"inflow_pos": [-9.99254, 1.0, 0],
"joints":[
"link_1/joint_0",
]
}
}
CUP_PARTICLE_INFO = [
{
"usd_path": NEW_CUP_ROOT + "0/cup.usd",
"mesh_name": "cupShape",
#"volume_container": "cup_volume",
"cylinder_height": 15.0,
"cylinder_radius": 4.5,
"particle_offset": [0, 1.05, 0],
"cup_offset": [0, 0, 0],
"scale": 1.0
},
{
"usd_path": NEW_CUP_ROOT + "1/cup.usd",
"mesh_name": "cupShape",
"volume_container": "cup_volume",
"cylinder_height": 15.0,
"cylinder_radius": 4.5,
"particle_offset": [0, 1.05, 0],
"cup_offset": [0, 0, 0],
"scale": 1.0
},
{
"usd_path": CUP_ROOT + "bottle0.usd",
"mesh_name": "D_printable_bottle",
"cylinder_height": 15.0,
"cylinder_radius": 4.5,
"particle_offset": [2.0, 1.05, 0],
"cup_offset": [0, 0, 0],
"scale": 0.25
},
{
"usd_path": CUP_ROOT + "bottle1.usd",
"mesh_name": "bioshock_salts_bottle_final",
"cylinder_height": 14.0,
"cylinder_radius": 3.0,
"particle_offset": [0.0, -10, -2.7],
# "particle_offset": [0.0, 0, -5],
"cup_offset": [0, 2.1, 0],
# "cup_offset": [0, 0, 0],
"scale": 5.0
},
{
"usd_path": CUP_ROOT + "mug0.usd",
"mesh_name": "geom",
"cylinder_height": 15.0,
"cylinder_radius": 3.0,
"particle_offset": [0.0, 1.05, 0],
"cup_offset": [0, 0, 0],
"scale": 1.2
},
{
"usd_path": CUP_ROOT + "mug1.usd",
"mesh_name": "SM_mug_2_mesh",
"cylinder_height": 15.0,
"cylinder_radius": 3.0,
"particle_offset": [0.0, 1.05, 0],
"cup_offset": [0, 0, 0],
"scale": 1.2
},
{
"usd_path": CUP_ROOT + "jar0.usd",
"mesh_name": "mesh",
"cylinder_height": 18.0,
"cylinder_radius": 5.0,
"particle_offset": [0.0, 1.05, 0],
"cup_offset": [0, 0, 0],
"scale": 1.2
},
] | 3,753 | Python | 24.026667 | 55 | 0.375966 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/__init__.py | # from .faucet import Faucet, particle_params, iso_surface_params | 65 | Python | 64.999935 | 65 | 0.8 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/utils.py | import math
from pxr import Gf
import numpy as np
import copy
def point_sphere(samples, scale):
indices = [x + 0.5 for x in range(0, samples)]
phi = [math.acos(1 - 2 * x / samples) for x in indices]
theta = [math.pi * (1 + 5**0.5) * x for x in indices]
x = [math.cos(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
y = [math.sin(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
z = [math.cos(ph) * scale for ph in phi]
points = [Gf.Vec3f(x, y, z) for (x, y, z) in zip(x, y, z)]
return points
#generate inside mesh
def swapPositions(list, pos1, pos2):
list[pos1], list[pos2] = list[pos2], list[pos1]
return list
def generate_inside_mesh(lowerCenter: Gf.Vec3f, h: float, radius: float, sphereDiameter: float, mesh, scale):
# print("bounds: ", mesh.bounds)
# samples = generate_hcp_samples(Gf.Vec3f(-radius, 0, -radius), Gf.Vec3f(radius, h, radius), sphereDiameter)
min_bound = list(mesh.bounds[0])
max_bound = list(mesh.bounds[1])
min_bound = [min_bound[0], min_bound[2], min_bound[1]]
max_bound = [max_bound[0], max_bound[2], max_bound[1]]
min_bound = (item * scale for item in min_bound)
max_bound = (item * scale for item in max_bound)
samples = generate_hcp_samples(Gf.Vec3f(*min_bound), Gf.Vec3f(*max_bound), sphereDiameter*2)
finalSamples = []
import copy
import trimesh
samples_copy = copy.deepcopy(samples)
samples_copy = [ [ sample_copy[0]/scale, sample_copy[1]/scale, sample_copy[2]/scale ] for sample_copy in samples_copy ]
samples_copy = [ [ sample_copy[0], sample_copy[2], sample_copy[1] ] for sample_copy in samples_copy ]
# print("num particles: ", len(samples_copy))
print("eva contains:")
contains = mesh.contains(samples_copy)
# signed_distance = trimesh.proximity.ProximityQuery(mesh).signed_distance(samples_copy)
# contains = signed_distance >= 0
print("eva done:")
for contain, sample in zip(contains, samples):
if contain:
finalSamples.append(sample)
print("length: ", len(finalSamples) )
return finalSamples
def in_hull(p, hull):
"""
Test if points in `p` are in `hull`
`p` should be a `NxK` coordinates of `N` points in `K` dimensions
`hull` is either a scipy.spatial.Delaunay object or the `MxK` array of the
coordinates of `M` points in `K`dimensions for which Delaunay triangulation
will be computed
"""
try:
from scipy.spatial import Delaunay
except:
import omni
omni.kit.pipapi.install("scipy")
from scipy.spatial import Delaunay
if not isinstance(hull,Delaunay):
hull = Delaunay(hull)
return hull.find_simplex(p)>=0
def generate_inside_point_cloud(sphereDiameter, cloud_points, scale = 1):
"""
Generate sphere packs inside a point cloud
"""
offset = 2
min_x = np.min(cloud_points[:, 0]) + offset
min_y = np.min(cloud_points[:, 1]) + offset
min_z = np.min(cloud_points[:, 2]) + offset
max_x = np.max(cloud_points[:, 0])
max_y = np.max(cloud_points[:, 1])
max_z = np.max(cloud_points[:, 2])
min_bound = [min_x, min_y, min_z]
max_bound = [max_x, max_y, max_z]
min_bound = [item * scale for item in min_bound]
max_bound = [item * scale for item in max_bound]
samples = generate_hcp_samples(Gf.Vec3f(*min_bound), Gf.Vec3f(*max_bound), sphereDiameter)
samples_copy = np.array(copy.deepcopy(samples))
print("samples_copy", samples_copy.shape)
finalSamples = []
contains = in_hull(samples, cloud_points)
max_particles = 2000
for contain, sample in zip(contains, samples):
if contain and len(finalSamples) < max_particles:
finalSamples.append(sample)
print("length: ", len(finalSamples) )
return finalSamples
# generate cylinder points
def generate_cylinder_y(lowerCenter: Gf.Vec3f, h: float, radius: float, sphereDiameter: float):
samples = generate_hcp_samples(Gf.Vec3f(-radius, 0, -radius), Gf.Vec3f(radius, h, radius), sphereDiameter)
finalSamples = []
for p in samples:
r2 = p[0] * p[0] + p[2] * p[2]
if r2 <= radius * radius:
finalSamples.append(p + lowerCenter)
return finalSamples
# Generates hexagonal close packed samples inside an axis aligned bounding box
def generate_hcp_samples(boxMin: Gf.Vec3f, boxMax: Gf.Vec3f, sphereDiameter: float):
layerDistance = math.sqrt(2.0 / 3.0) * sphereDiameter
rowShift = math.sqrt(3.0) / 2.0 * sphereDiameter
result = []
layer1Offset = (1.0 / 3.0) * (
Gf.Vec2f(0, 0) + Gf.Vec2f(0.5 * sphereDiameter, rowShift) + Gf.Vec2f(sphereDiameter, 0)
)
zIndex = 0
while True:
z = boxMin[2] + zIndex * layerDistance
if z > boxMax[2]:
break
yOffset = layer1Offset[1] if zIndex % 2 == 1 else 0
yIndex = 0
while True:
y = boxMin[1] + yIndex * rowShift + yOffset
if y > boxMax[1]:
break
xOffset = 0
if zIndex % 2 == 1:
xOffset += layer1Offset[0]
if yIndex % 2 == 1:
xOffset -= 0.5 * sphereDiameter
elif yIndex % 2 == 1:
xOffset += 0.5 * sphereDiameter
xIndex = 0
while True:
x = boxMin[0] + xIndex * sphereDiameter + xOffset
if x > boxMax[0]:
break
result.append(Gf.Vec3f(x, y, z))
xIndex += 1
yIndex += 1
zIndex += 1
return result
def get_quat_from_extrinsic_xyz_rotation(angleXrad: float = 0.0, angleYrad: float = 0.0, angleZrad: float = 0.0):
def rotate_around_axis(x, y, z, angle):
s = math.sin(0.5 * angle)
return Gf.Quatf(math.cos(0.5 * angle), s * x, s * y, s * z)
# angles are in radians
rotX = rotate_around_axis(1, 0, 0, angleXrad)
rotY = rotate_around_axis(0, 1, 0, angleYrad)
rotZ = rotate_around_axis(0, 0, 1, angleZrad)
return rotZ * rotY * rotX | 6,155 | Python | 31.230366 | 123 | 0.59805 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/fluid_setup.py | import carb
import math
from pxr import Usd, UsdGeom, Sdf, Gf, Vt, UsdPhysics, PhysxSchema
import omni.timeline
import omni.physxdemos as demo
from .schemaHelpers import PhysxParticleInstancePrototype, addPhysxParticleSystem
ASYNC_SIMULATION = "/persistent/physics/asyncSimRender"
def setGridFilteringPass(gridFilteringFlags: int, passIndex: int, operation: int, numRepetitions: int = 1):
numRepetitions = max(0, numRepetitions - 1)
shift = passIndex * 4
gridFilteringFlags &= ~(3 << shift)
gridFilteringFlags |= (((operation) << 2) | numRepetitions) << shift
return gridFilteringFlags
class FluidFill(demo.Base):
def __init__(self, pos = Gf.Vec3f(0 , 20, 0.0)):
self.stage = omni.usd.get_context().get_stage()
self.pos = pos
xformCache = UsdGeom.XformCache()
pose = xformCache.GetLocalToWorldTransform(self.stage.GetPrimAtPath("/World/mobility/link_0"))
pos_link = Gf.Vec3f(pose.ExtractTranslation())
self.rot_link_init = Gf.Quatf(pose.ExtractRotationQuat())
# print("attributes: ", self.stage.GetPrimAtPath("/World/faucet/link_0").GetAttributes())
self.init_orient = self.stage.GetPrimAtPath("/World/mobility/link_0").GetAttribute("xformOp:orient").Get()
def point_sphere(self, samples, scale):
indices = [x + 0.5 for x in range(0, samples)]
phi = [math.acos(1 - 2 * x / samples) for x in indices]
theta = [math.pi * (1 + 5**0.5) * x for x in indices]
x = [math.cos(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
y = [math.sin(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
z = [math.cos(ph) * scale for ph in phi]
points = [Gf.Vec3f(x, y, z) for (x, y, z) in zip(x, y, z)]
return points
def create_ball(self, stage, pos, rate = 1):
# create sphere on points
# print("scale: ", rate)
points = self.point_sphere( 10+int(90 * rate), 1)
# points = self.point_sphere( int(80 * rate), 1)
# basePos = Gf.Vec3f(11.0, 12.0, 35.0) + pos
basePos = pos
positions = [Gf.Vec3f(x) + Gf.Vec3f(basePos) for x in points]
radius = 0.1
# particleSpacing = 2.0 * radius * 0.6
particleSpacing = 2.0 * radius * 0.6
positions_list = positions
velocities_list = [Gf.Vec3f(0.0, 0.0, 0.0)] * len(positions)
protoIndices_list = [0] * len(positions)
protoIndices = Vt.IntArray(protoIndices_list)
positions = Vt.Vec3fArray(positions_list)
velocities = Vt.Vec3fArray(velocities_list)
particleInstanceStr = "/particlesInstance" + str(self.it)
particleInstancePath = Sdf.Path(particleInstanceStr)
# Create point instancer
pointInstancer = UsdGeom.PointInstancer.Define(stage, particleInstancePath)
prototypeRel = pointInstancer.GetPrototypesRel()
# Create particle instance prototypes
particlePrototype = PhysxParticleInstancePrototype()
particlePrototype.selfCollision = True
particlePrototype.fluid = True
particlePrototype.collisionGroup = 0
particlePrototype.mass = 0.001
prototypePath = particleInstancePath.pathString + "/particlePrototype"
sphere = UsdGeom.Sphere.Define(stage, Sdf.Path(prototypePath))
spherePrim = sphere.GetPrim()
sphere.GetRadiusAttr().Set(particleSpacing)
# color_rgb = [0.0, 0.08, 0.30]
# color = Vt.Vec3fArray([Gf.Vec3f(color_rgb[0], color_rgb[1], color_rgb[2])])
# sphere.CreateDisplayColorAttr(color)
spherePrim = sphere.GetPrim()
spherePrim.GetAttribute('visibility').Set('invisible')
# spherePrim.GetVisibilityAttr().Set(False)
spherePrim.CreateAttribute("enableAnisotropy", Sdf.ValueTypeNames.Bool, True).Set(True)
particleInstanceApi = PhysxSchema.PhysxParticleAPI.Apply(spherePrim)
particleInstanceApi.CreateSelfCollisionAttr().Set(particlePrototype.selfCollision)
particleInstanceApi.CreateFluidAttr().Set(particlePrototype.fluid)
particleInstanceApi.CreateParticleGroupAttr().Set(particlePrototype.collisionGroup)
particleInstanceApi.CreateMassAttr().Set(particlePrototype.mass)
# Reference simulation owner using PhysxPhysicsAPI
physicsApi = PhysxSchema.PhysxPhysicsAPI.Apply(spherePrim)
physicsApi.CreateSimulationOwnerRel().SetTargets([self.particleSystemPath])
# add prototype references to point instancer
prototypeRel.AddTarget(Sdf.Path(prototypePath))
# Set active particle indices
activeIndices = []
for i in range(len(positions)):
activeIndices.append(protoIndices[i])
orientations = [Gf.Quath(1.0, Gf.Vec3h(0.0, 0.0, 0.0))] * len(positions)
angular_velocities = [Gf.Vec3f(0.0, 0.0, 0.0)] * len(positions)
pointInstancer.GetProtoIndicesAttr().Set(activeIndices)
pointInstancer.GetPositionsAttr().Set(positions)
pointInstancer.GetOrientationsAttr().Set(orientations)
pointInstancer.GetVelocitiesAttr().Set(velocities)
pointInstancer.GetAngularVelocitiesAttr().Set(angular_velocities)
def create(self, stage):
self._setup_callbacks()
self.stage = stage
self.it = 0
self.counter = 10
# set up axis to z
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
UsdGeom.SetStageMetersPerUnit(stage, 0.01)
# light
# sphereLight = UsdLux.SphereLight.Define(stage, Sdf.Path("/SphereLight"))
# sphereLight.CreateRadiusAttr(150)
# sphereLight.CreateIntensityAttr(30000)
# sphereLight.AddTranslateOp().Set(Gf.Vec3f(650.0, 0.0, 1150.0))
# Physics scene
scenePath = Sdf.Path("/physicsScene")
scene = UsdPhysics.Scene.Define(stage, scenePath)
scene.CreateGravityDirectionAttr().Set(Gf.Vec3f(0.0, -1.0, 0.0))
scene.CreateGravityMagnitudeAttr().Set(9.81)
# Particle System
particleSystemPath = Sdf.Path("/particleSystem0")
self.particleSystemPath = particleSystemPath
particleSpacing = 0.2
restOffset = particleSpacing * 0.9
solidRestOffset = restOffset
fluidRestOffset = restOffset * 0.6
particleContactOffset = max(solidRestOffset + 0.001, fluidRestOffset / 0.6)
contactOffset = restOffset + 0.001
addPhysxParticleSystem(
stage,
particleSystemPath,
contactOffset,
restOffset,
particleContactOffset,
solidRestOffset,
fluidRestOffset,
4,
1,
Gf.Vec3f(0, 0, 0),
scenePath
)
particleSystem = stage.GetPrimAtPath(particleSystemPath)
# particle system settings
particleSystem.GetAttribute("cohesion").Set(0.002)
particleSystem.GetAttribute("smoothing").Set(0.8)
particleSystem.GetAttribute("anisotropyScale").Set(1.0)
particleSystem.GetAttribute("anisotropyMin").Set(0.2)
particleSystem.GetAttribute("anisotropyMax").Set(2.0)
particleSystem.GetAttribute("viscosity").Set(0.0091)
particleSystem.GetAttribute("surfaceTension").Set(0.0074)
particleSystem.GetAttribute("particleFriction").Set(0.1)
particleSystem.CreateAttribute("maxParticleNeighborhood", Sdf.ValueTypeNames.Int, True).Set(64)
particleSystem.GetAttribute("maxParticles").Set(20000)
# apply isoSurface params
particleSystem.CreateAttribute("enableIsosurface", Sdf.ValueTypeNames.Bool, True).Set(True)
particleSystem.CreateAttribute("maxIsosurfaceVertices", Sdf.ValueTypeNames.Int, True).Set(1024 * 1024)
particleSystem.CreateAttribute("maxIsosurfaceTriangles", Sdf.ValueTypeNames.Int, True).Set(2 * 1024 * 1024)
particleSystem.CreateAttribute("maxNumIsosurfaceSubgrids", Sdf.ValueTypeNames.Int, True).Set(1024 * 4)
particleSystem.CreateAttribute("isosurfaceGridSpacing", Sdf.ValueTypeNames.Float, True).Set(0.2)
filterSmooth = 1
filtering = 0
passIndex = 0
filtering = setGridFilteringPass(filtering, passIndex, filterSmooth)
passIndex = passIndex + 1
filtering = setGridFilteringPass(filtering, passIndex, filterSmooth)
passIndex = passIndex + 1
particleSystem.CreateAttribute("isosurfaceKernelRadius", Sdf.ValueTypeNames.Float, True).Set(0.5)
particleSystem.CreateAttribute("isosurfaceLevel", Sdf.ValueTypeNames.Float, True).Set(-0.3)
particleSystem.CreateAttribute("isosurfaceGridFilteringFlags", Sdf.ValueTypeNames.Int, True).Set(filtering)
particleSystem.CreateAttribute(
"isosurfaceGridSmoothingRadiusRelativeToCellSize", Sdf.ValueTypeNames.Float, True
).Set(0.3)
particleSystem.CreateAttribute("isosurfaceEnableAnisotropy", Sdf.ValueTypeNames.Bool, True).Set(False)
particleSystem.CreateAttribute("isosurfaceAnisotropyMin", Sdf.ValueTypeNames.Float, True).Set(0.1)
particleSystem.CreateAttribute("isosurfaceAnisotropyMax", Sdf.ValueTypeNames.Float, True).Set(2.0)
particleSystem.CreateAttribute("isosurfaceAnisotropyRadius", Sdf.ValueTypeNames.Float, True).Set(0.5)
particleSystem.CreateAttribute("numIsosurfaceMeshSmoothingPasses", Sdf.ValueTypeNames.Int, True).Set(5)
particleSystem.CreateAttribute("numIsosurfaceMeshNormalSmoothingPasses", Sdf.ValueTypeNames.Int, True).Set(5)
particleSystem.CreateAttribute("isosurfaceDoNotCastShadows", Sdf.ValueTypeNames.Bool, True).Set(True)
stage.SetInterpolationType(Usd.InterpolationTypeHeld)
def _setup_callbacks(self):
# callbacks
self._timeline = omni.timeline.get_timeline_interface()
stream = self._timeline.get_timeline_event_stream()
self._timeline_subscription = stream.create_subscription_to_pop(self._on_timeline_event)
# subscribe to Physics updates:
self._physics_update_subscription = omni.physx.get_physx_interface().subscribe_physics_step_events(
self.on_physics_step
)
def _on_timeline_event(self, e):
if e.type == int(omni.timeline.TimelineEventType.STOP):
self.it = 0
self.on_shutdown()
def step(self):
self.on_physics_step(None)
def on_physics_step(self, dt):
# import transforms3d
import math
xformCache = UsdGeom.XformCache()
# stop after 80 balls
# if (self.it > 80):
# return
pose = xformCache.GetLocalToWorldTransform(self.stage.GetPrimAtPath("/World/faucet/inflow"))
pos_faucet = Gf.Vec3f(pose.ExtractTranslation())
rot_faucet = Gf.Quatf(pose.ExtractRotationQuat())
pose = xformCache.GetLocalToWorldTransform(self.stage.GetPrimAtPath("/World/faucet/link_0"))
pos_link = Gf.Vec3f(pose.ExtractTranslation())
rot_link = Gf.Quatf(pose.ExtractRotationQuat())
diff = rot_link * self.rot_link_init.GetInverse()
real = diff.GetReal()
img = [diff.GetImaginary()[0],diff.GetImaginary()[1], diff.GetImaginary()[2] ]
#angle = transforms3d.euler.quat2euler([real, img[0], img[1], img[2]], axes='sxyz')
#sum_angle = abs(math.degrees(angle[0])) + abs(math.degrees(angle[1])) + abs(math.degrees(angle[2]))
rate = 1 #(sum_angle/30.0)
# print("pre rate:", rate)
if rate > 1:
rate = 1
# print("rate: ", rate)
# print("sum_angle", sum_angle)
if self.it == 0:
iso2Prim = self.stage.GetPrimAtPath("/particleSystem0/Isosurface")
rel = iso2Prim.CreateRelationship("material:binding", False)
rel.SetTargets([Sdf.Path("/World/Looks/OmniSurface_ClearWater")])
# rel.SetTargets([Sdf.Path("/World/Looks/OmniSurface_OrangeJuice")])
if self.it > 200:
return
# emit a ball every 10 physics steps
if (self.counter < 20 - rate):
self.counter = self.counter + 1
return
self.counter = 0
self.it = self.it + 1
# print(faucet_prim.GetAttribute('xformOp:translate'))
# openness = 0.6 + 0.5 * rate
# print("openess", openness)
if rate < 0.1:
return
self.create_ball(self.stage, pos_faucet, rate)
def on_shutdown(self):
self._physics_update_subscription = None
self._timeline_subscription = None
# restore settings
# isregistry = carb.settings.acquire_settings_interface()
# isregistry.set_bool(ASYNC_SIMULATION, self._async_simulation)
def on_startup(self):
isregistry = carb.settings.acquire_settings_interface()
self._async_simulation = carb.settings.get_settings().get_as_bool(ASYNC_SIMULATION)
isregistry.set_bool(ASYNC_SIMULATION, True)
isregistry.set_int("persistent/simulation/minFrameRate", 60)
from omni.physx import acquire_physx_interface
physx = acquire_physx_interface()
physx.overwrite_gpu_setting(1)
physx.reset_simulation()
fluid_fill = FluidFill()
stage = omni.usd.get_context().get_stage()
fluid_fill.create(stage)
_timeline = omni.timeline.get_timeline_interface()
stream = _timeline.get_timeline_event_stream()
def _on_timeline_event(e):
if e.type == int(omni.timeline.TimelineEventType.STOP):
fluid_fill.on_shutdown()
_timeline_subscription = stream.create_subscription_to_pop(_on_timeline_event)
# for i in range(10):
# fluid_fill.step()
| 13,709 | Python | 39.56213 | 117 | 0.6584 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/schemaHelpers.py | from pxr import Usd, UsdGeom, Sdf, Gf, Vt, PhysxSchema
class PhysxParticleInstancePrototype:
def __init__(self, mass=0.0, phase=0):
self.mass = mass
self.phase = phase
def addPhysxParticleSystem(
stage,
particle_system_path,
contact_offset,
rest_offset,
particle_contact_offset,
solid_rest_offset,
fluid_rest_offset,
solver_position_iterations,
solver_velocity_iterations,
wind,
scenePath,
):
particle_system = PhysxSchema.PhysxParticleSystem.Define(stage, particle_system_path)
if particle_system:
particle_system.CreateContactOffsetAttr().Set(contact_offset)
particle_system.CreateRestOffsetAttr().Set(rest_offset)
particle_system.CreateParticleContactOffsetAttr().Set(particle_contact_offset)
particle_system.CreateSolidRestOffsetAttr().Set(solid_rest_offset)
particle_system.CreateFluidRestOffsetAttr().Set(fluid_rest_offset)
particle_system.CreateSolverPositionIterationCountAttr().Set(solver_position_iterations)
particle_system.CreateSolverVelocityIterationCountAttr().Set(solver_velocity_iterations)
particle_system.CreateWindAttr().Set(wind)
# Reference simulation owner using PhysxPhysicsAPI
physics_api = PhysxSchema.PhysxPhysicsAPI.Apply(particle_system.GetPrim())
physics_api.CreateSimulationOwnerRel().SetTargets([scenePath])
return particle_system
else:
return None
def addPhysxParticlesSimple(stage, path, prototypes, prototype_indices, positions, velocities, particle_system_path):
prototype_base_path = path.pathString + "/particlePrototype"
# Create point instancer
shape_list = UsdGeom.PointInstancer.Define(stage, path)
mesh_list = shape_list.GetPrototypesRel()
# Create particle instance prototypes
for i in range(len(prototypes)):
prototype_path = prototype_base_path + str(i)
geom_sphere = UsdGeom.Sphere.Define(stage, Sdf.Path(prototype_path))
particle_instance_api = PhysxSchema.PhysxParticleAPI.Apply(geom_sphere.GetPrim())
particle_instance_api.CreateSelfCollisionAttr().Set(prototypes[i].selfCollision)
particle_instance_api.CreateFluidAttr().Set(prototypes[i].fluid)
particle_instance_api.CreateParticleGroupAttr().Set(prototypes[i].collisionGroup)
particle_instance_api.CreateMassAttr().Set(prototypes[i].mass)
# Reference simulation owner using PhysxPhysicsAPI
physics_api = PhysxSchema.PhysxPhysicsAPI.Apply(geom_sphere.GetPrim())
physics_api.CreateSimulationOwnerRel().SetTargets([particle_system_path])
# add mesh references to point instancer
mesh_list.AddTarget(Sdf.Path(prototype_path))
# Set particle instance data
mesh_indices = []
for i in range(len(positions)):
mesh_indices.append(prototype_indices[i])
orientations = [Gf.Quath(1.0, Gf.Vec3h(0.0, 0.0, 0.0))] * len(positions)
angular_velocities = [Gf.Vec3f(0.0, 0.0, 0.0)] * len(positions)
shape_list.GetProtoIndicesAttr().Set(mesh_indices)
shape_list.GetPositionsAttr().Set(positions)
shape_list.GetOrientationsAttr().Set(orientations)
shape_list.GetVelocitiesAttr().Set(velocities)
shape_list.GetAngularVelocitiesAttr().Set(angular_velocities)
def addPhysxClothWithConstraints(
stage,
path,
positions,
normals,
rest_positions,
velocities,
inv_masses,
triangle_indices,
spring_connections,
spring_stiffnesses,
spring_dampings,
spring_rest_lengths,
self_collision,
self_collision_filter,
inv_gravity,
particle_group,
particle_system_path,
):
mesh = UsdGeom.Mesh.Define(stage, path)
prim = mesh.GetPrim()
mesh.CreateDoubleSidedAttr().Set(True)
vertex_count_attr = mesh.CreateFaceVertexCountsAttr()
vertex_indices_attr = mesh.CreateFaceVertexIndicesAttr()
norm_attr = mesh.CreateNormalsAttr()
point_attr = mesh.CreatePointsAttr()
# Triangle array's vertex count per face is always 3
vertex_count = 3
array_size = int(len(triangle_indices) / 3)
index_array = Vt.IntArray(array_size, vertex_count)
vertex_count_attr.Set(index_array)
vertex_indices_attr.Set(triangle_indices)
norm_attr.Set(normals)
point_attr.Set(positions)
cloth_api = PhysxSchema.PhysxClothAPI.Apply(prim)
cloth_api.CreateSelfCollisionAttr().Set(self_collision)
cloth_api.CreateSelfCollisionFilterAttr().Set(self_collision_filter)
cloth_api.CreateParticleGroupAttr().Set(particle_group)
# Reference simulation owner using PhysxPhysicsAPI
physics_api = PhysxSchema.PhysxPhysicsAPI.Apply(prim)
physics_api.CreateSimulationOwnerRel().SetTargets([particle_system_path])
# Custom attributes
prim.CreateAttribute("invGravity", Sdf.ValueTypeNames.Bool).Set(inv_gravity)
prim.CreateAttribute("springConnections", Sdf.ValueTypeNames.Int2Array).Set(spring_connections)
prim.CreateAttribute("springStiffnesses", Sdf.ValueTypeNames.FloatArray).Set(spring_stiffnesses)
prim.CreateAttribute("springDampings", Sdf.ValueTypeNames.FloatArray).Set(spring_dampings)
prim.CreateAttribute("springRestLengths", Sdf.ValueTypeNames.FloatArray).Set(spring_rest_lengths)
prim.CreateAttribute("restPositions", Sdf.ValueTypeNames.Point3fArray).Set(rest_positions)
prim.CreateAttribute("velocities", Sdf.ValueTypeNames.Point3fArray).Set(velocities)
prim.CreateAttribute("inverseMasses", Sdf.ValueTypeNames.FloatArray).Set(inv_masses)
def addPhysxCloth(
stage,
path,
dynamic_mesh_path,
initial_velocity,
initial_mass,
stretch_stiffness,
bend_stiffness,
shear_stiffness,
self_collision,
self_collision_filter,
inv_gravity,
particle_group,
particle_system_path,
):
mesh = UsdGeom.Mesh.Define(stage, path)
prim = mesh.GetPrim()
if dynamic_mesh_path:
prim.GetReferences().AddReference(dynamic_mesh_path)
cloth_api = PhysxSchema.PhysxClothAPI.Apply(prim)
cloth_api.CreateDefaultParticleVelocityAttr().Set(initial_velocity)
cloth_api.CreateDefaultParticleMassAttr().Set(initial_mass)
cloth_api.CreateStretchStiffnessAttr().Set(stretch_stiffness)
cloth_api.CreateBendStiffnessAttr().Set(bend_stiffness)
cloth_api.CreateShearStiffnessAttr().Set(shear_stiffness)
cloth_api.CreateSelfCollisionAttr().Set(self_collision)
cloth_api.CreateSelfCollisionFilterAttr().Set(self_collision_filter)
cloth_api.CreateParticleGroupAttr().Set(particle_group)
# Reference simulation owner using PhysxPhysicsAPI
physics_api = PhysxSchema.PhysxPhysicsAPI.Apply(prim)
physics_api.CreateSimulationOwnerRel().SetTargets([particle_system_path])
# Custom attributes
prim.CreateAttribute("invGravity", Sdf.ValueTypeNames.Bool).Set(inv_gravity)
def applyInflatableApi(stage, path, pressure):
prim = stage.GetPrimAtPath(path)
# TODO: Add more checks here
if prim.IsValid():
inflatable_api = PhysxSchema.PhysxInflatableAPI.Apply(prim)
inflatable_api.CreatePressureAttr().Set(pressure)
def _get_rigid_attachments(stage, prim: Usd.Prim):
attachments = []
rigidAttachmentRel = prim.CreateRelationship("physxRigidAttachments")
for attachment_path in rigidAttachmentRel.GetTargets():
attachment = PhysxSchema.PhysxRigidAttachment.Get(stage, attachment_path)
if attachment:
attachments.append(attachment)
return attachments
# def _get_rigid_attachment_target(attachment: PhysxSchema.PhysxRigidAttachment):
# targets = attachment.GetRigidRel().GetTargets()
# if len(targets) <= 0:
# return Sdf.Path()
# else:
# return targets[0]
# def _create_rigid_attachment(
# stage, attachment_path: Sdf.Path, rigidbody_path: Sdf.Path, deformable_path: Sdf.Path
# ) -> PhysxSchema.PhysxRigidAttachment:
# attachment = PhysxSchema.PhysxRigidAttachment.Define(stage, attachment_path)
# attachment.GetRigidRel().SetTargets([rigidbody_path])
# attachment.GetDeformableRel().SetTargets([deformable_path])
# return attachment
# def add_deformable_to_rigid_body_attachment(
# stage, target_attachment_path: Sdf.Path, deformable_path: Sdf.Path, rigid_path: Sdf.Path
# ):
# deformable_prim = stage.GetPrimAtPath(deformable_path)
# softbody_xformable = UsdGeom.Xformable(deformable_prim)
# rigidbody_prim = stage.GetPrimAtPath(rigid_path)
# rigidbody_xformable = UsdGeom.Xformable(rigidbody_prim)
# attachments = _get_rigid_attachments(stage, deformable_prim)
# if any(_get_rigid_attachment_target(attachment) == rigid_path for attachment in attachments):
# return False
# # Create new attachment
# attachment = _create_rigid_attachment(stage, target_attachment_path, rigid_path, deformable_path)
# attachment_prim = attachment.GetPrim()
# attachment_prim.CreateAttribute("physxEnableHaloParticleFiltering", Sdf.ValueTypeNames.Bool).Set(True)
# attachment_prim.CreateAttribute("physxEnableVolumeParticleAttachments", Sdf.ValueTypeNames.Bool).Set(True)
# attachment_prim.CreateAttribute("physxEnableSurfaceTetraAttachments", Sdf.ValueTypeNames.Bool).Set(False)
# sb_bound = softbody_xformable.ComputeLocalBound(
# Usd.TimeCode.Default(), purpose1=softbody_xformable.GetPurposeAttr().Get()
# )
# sb_size = sb_bound.ComputeAlignedBox().GetSize()
# avg_dim = (sb_size[0] + sb_size[1] + sb_size[2]) / 3.0
# default_rad = avg_dim * 0.05
# attachment_prim.CreateAttribute("physxHaloParticleFilteringRadius", Sdf.ValueTypeNames.Float).Set(default_rad * 4)
# attachment_prim.CreateAttribute("physxVolumeParticleAttachmentRadius", Sdf.ValueTypeNames.Float).Set(default_rad)
# attachment_prim.CreateAttribute("physxSurfaceSamplingRadius", Sdf.ValueTypeNames.Float).Set(default_rad)
# # Update soft body relationship
# attachments.append(attachment)
# attachment_paths = [attachment.GetPath() for attachment in attachments]
# deformable_prim.CreateRelationship("physxRigidAttachments").SetTargets(attachment_paths)
# # Store the global xforms
# globalPose = rigidbody_xformable.ComputeLocalToWorldTransform(Usd.TimeCode.Default())
# attachment_prim.CreateAttribute("physxRigidBodyXform", Sdf.ValueTypeNames.Matrix4d).Set(globalPose)
# globalPose = softbody_xformable.ComputeLocalToWorldTransform(Usd.TimeCode.Default())
# attachment_prim.CreateAttribute("physxDeformableXform", Sdf.ValueTypeNames.Matrix4d).Set(globalPose)
| 10,557 | Python | 38.103704 | 120 | 0.735531 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/layout/fluid/faucet.py | import carb
import math
from pathlib import Path
from pxr import Usd, UsdLux, UsdGeom, Sdf, Gf, Vt, UsdPhysics, PhysxSchema
import sys
#put schemaHelpers.py into path
from omni.kitchen.asset.layout.fluid.schemaHelpers import PhysxParticleInstancePrototype, addPhysxParticleSystem
import omni.timeline
from typing import List
from omni.kitchen.asset.task_check.newJointCheck import JointCheck
import math
ASYNC_SIMULATION = "/persistent/physics/asyncSimRender"
def setGridFilteringPass(gridFilteringFlags: int, passIndex: int, operation: int, numRepetitions: int = 1):
numRepetitions = max(0, numRepetitions - 1)
shift = passIndex * 4
gridFilteringFlags &= ~(3 << shift)
gridFilteringFlags |= (((operation) << 2) | numRepetitions) << shift
return gridFilteringFlags
def norm(a):
square_sum = 0
for item in a:
square_sum += item * item
return math.sqrt(square_sum)
# https://math.stackexchange.com/questions/2346982/slerp-inverse-given-3-quaternions-find-t
def quarternion_slerp_inverse(q0, q1, q):
q1_inv = q1.GetInverse()
q0_inv = q0.GetInverse()
q_inv = q.GetInverse()
tmp_1 = (q0_inv * q).GetNormalized()
real = tmp_1.GetReal()
img = [ tmp_1.GetImaginary()[0], tmp_1.GetImaginary()[1], tmp_1.GetImaginary()[2] ]
# print("1: ", real)
# print("term 1 cos: ", math.acos(real))
term21 = [ math.acos(real) / norm(img) * item for item in img]
log_tmp1 = [0, term21[0], term21[1], term21[2]]
tmp_2 = (q0_inv * q1).GetNormalized()
real = tmp_2.GetReal()
img = [ tmp_2.GetImaginary()[0], tmp_2.GetImaginary()[1], tmp_2.GetImaginary()[2] ]
# print("2: ", real)
# print("term 2 cos: ", math.acos(real))
term22 = [ math.acos(real) / norm(img) * item for item in img ]
log_tmp2 = [0, term22[0], term22[1], term22[2]]
rates = []
if abs(term21[0]) < 0.0001 and abs(term22[0]) < 0.0001:
rates.append(None)
else:
t1 = (term21[0] / term22[0])
rates.append(t1)
if abs(term21[1]) < 0.0001 and abs(term22[1]) < 0.0001:
rates.append(None)
else:
t2 = (term21[1] / term22[1])
rates.append(t2)
if abs(term21[2]) < 0.0001 and abs(term22[2]) < 0.0001:
rates.append(None)
else:
t3 = (term21[2] / term22[2])
rates.append(t3)
# print("rates pre: ", rates)
rates = list(filter(lambda x: x is not None, rates))
# print("rates post: ", rates)
# length = len(rates)
# for i in range(length):
# for j in range(i+1, length):
# if not abs(rates[i] - rates[j]) <= 0.001:
# raise Exception("not the same")
# print("rates: ", rates)
return max(rates)
# https://math.stackexchange.com/questions/167827/compute-angle-between-quaternions-in-matlab
def rotation_diff(q0, q1):
z = q0.GetNormalized() * q1.GetNormalized().GetConjugate()
z_real = abs(z.GetReal())
if z_real > 1:
z_real = 1
elif z_real < -1:
z_real = -1
angle = math.acos(abs(z_real)) * 2
return math.degrees(angle)
class Faucet():
def __init__(self, particle_params = None, iso_surface_params = None,
liquid_material_path = "/World/Looks/OmniSurface_ClearWater", inflow_path:str = "/World/faucet/inflow",
link_paths:List[str] = ["/World/faucet/link_0"]
):
"""! Faucet class
@param particle_params : parameters for particles
@param iso_surface_params: parameters for iso_surface
@param liquid_material_path: parameters for liquid materials
@param inflow_path: used to compute the location of water drops
@param link_paths: used to compute the rotation of faucet handle and determine the speed and size of water drops
@param particle_params: parameters related to particle systems
@return an instance of Faucet class
"""
# particle Instance path
self.particleInstanceStr_tmp = "/particlesInstance"
self.particle_params = particle_params
self.iso_surface_params = iso_surface_params
self.liquid_material_path = liquid_material_path
#Not sure if the isregistry thing works
isregistry = carb.settings.acquire_settings_interface()
self._async_simulation = carb.settings.get_settings().get_as_bool(ASYNC_SIMULATION)
isregistry.set_bool(ASYNC_SIMULATION, True)
isregistry.set_int("persistent/simulation/minFrameRate", 30)
self.stage = omni.usd.get_context().get_stage()
self.inflow_path = inflow_path
self.link_paths = link_paths
self.list_of_point_instancers = []
self.active_indexes_for_point_instancers = []
self.rate_checkers = []
for link in link_paths:
path = Path(link)
self.rate_checkers.append(JointCheck( str(path.parent), str(path.name) ))
self.create()
def point_sphere(self, samples, scale):
"""! create locations for each particles
@param samples: the number of particles per sphere
@param scale: the scale(radius) of the water drop
"""
indices = [x + 0.5 for x in range(0, samples)]
phi = [math.acos(1 - 2 * x / samples) for x in indices]
theta = [math.pi * (1 + 5**0.5) * x for x in indices]
x = [math.cos(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
y = [math.sin(th) * math.sin(ph) * scale for (th, ph) in zip(theta, phi)]
z = [math.cos(ph) * scale for ph in phi]
points = [Gf.Vec3f(x, y, z) for (x, y, z) in zip(x, y, z)]
return points
def create_ball(self, pos, rate = 1):
"""! create a water drop
@param pos: the center of the water drop
@param rate: the number of particles for each water drop
"""
# create sphere on points
points = self.point_sphere( 10+int(90 * rate), 1)
# basePos = Gf.Vec3f(11.0, 12.0, 35.0) + pos
basePos = pos
positions = [Gf.Vec3f(x) + Gf.Vec3f(basePos) for x in points]
radius = 0.2
# particleSpacing = 2.0 * radius * 0.6
particleSpacing = 2.0 * radius * 0.6
positions_list = positions
velocities_list = [Gf.Vec3f(0.0, 0.0, 0.0)] * len(positions)
protoIndices_list = [0] * len(positions)
protoIndices = Vt.IntArray(protoIndices_list)
positions = Vt.Vec3fArray(positions_list)
velocities = Vt.Vec3fArray(velocities_list)
# particleInstanceStr = "/particlesInstance" + str(self.it)
particleInstanceStr = omni.usd.get_stage_next_free_path(self.stage, self.particleInstanceStr_tmp, False)
particleInstancePath = Sdf.Path(particleInstanceStr)
# Create point instancer
pointInstancer = UsdGeom.PointInstancer.Define(self.stage, particleInstancePath)
prototypeRel = pointInstancer.GetPrototypesRel()
# Create particle instance prototypes
particlePrototype = PhysxParticleInstancePrototype()
particlePrototype.selfCollision = True
particlePrototype.fluid = True
particlePrototype.collisionGroup = 0
particlePrototype.mass = 0.5e-5
prototypePath = particleInstancePath.pathString + "/particlePrototype"
sphere = UsdGeom.Sphere.Define(self.stage, Sdf.Path(prototypePath))
spherePrim = sphere.GetPrim()
sphere.GetRadiusAttr().Set(particleSpacing)
spherePrim = sphere.GetPrim()
spherePrim.GetAttribute('visibility').Set('invisible')
spherePrim.CreateAttribute("enableAnisotropy", Sdf.ValueTypeNames.Bool, True).Set(True)
particleInstanceApi = PhysxSchema.PhysxParticleAPI.Apply(spherePrim)
particleInstanceApi.CreateSelfCollisionAttr().Set(particlePrototype.selfCollision)
particleInstanceApi.CreateFluidAttr().Set(particlePrototype.fluid)
particleInstanceApi.CreateParticleGroupAttr().Set(particlePrototype.collisionGroup)
particleInstanceApi.CreateMassAttr().Set(particlePrototype.mass)
# Reference simulation owner using PhysxPhysicsAPI
physicsApi = PhysxSchema.PhysxPhysicsAPI.Apply(spherePrim)
physicsApi.CreateSimulationOwnerRel().SetTargets([self.particleSystemPath])
# add prototype references to point instancer
prototypeRel.AddTarget(Sdf.Path(prototypePath))
# Set active particle indices
activeIndices = []
for i in range(len(positions)):
activeIndices.append(protoIndices[i])
orientations = [Gf.Quath(1.0, Gf.Vec3h(0.0, 0.0, 0.0))] * len(positions)
angular_velocities = [Gf.Vec3f(0.0, 0.0, 0.0)] * len(positions)
pointInstancer.GetProtoIndicesAttr().Set(activeIndices)
pointInstancer.GetPositionsAttr().Set(positions)
pointInstancer.GetOrientationsAttr().Set(orientations)
pointInstancer.GetVelocitiesAttr().Set(velocities)
pointInstancer.GetAngularVelocitiesAttr().Set(angular_velocities)
self.list_of_point_instancers.append(pointInstancer)
self.active_indexes_for_point_instancers.append(activeIndices)
def create(self):
"""! initialize the related parameters for faucet
create physics scenes
create particle systems
create isosurface
"""
self._setup_callbacks()
self.it = 0
self.counter = 10
# Physics scene
scenePath = Sdf.Path("/physicsScene")
# Particle System
self.particleSystemPath = omni.usd.get_stage_next_free_path(self.stage, "/particleSystem", False)
# particleSystemPath = Sdf.Path("/particleSystem0")
self.particleSystemPath = self.particleSystemPath
_fluidSphereDiameter = 0.24
_solverPositionIterations = 10
_solverVelocityIterations = 1
_particleSystemSchemaParameters = {
"contact_offset": 0.3,
"particle_contact_offset": 0.25,
"rest_offset": 0.25,
"solid_rest_offset": 0,
"fluid_rest_offset": 0.5 * _fluidSphereDiameter + 0.03,
"solver_position_iterations": _solverPositionIterations,
"solver_velocity_iterations": _solverVelocityIterations,
"wind": Gf.Vec3f(0, 0, 0),
}
addPhysxParticleSystem(
self.stage,
self.particleSystemPath,
**_particleSystemSchemaParameters,
scenePath = scenePath
)
particleSystem = self.stage.GetPrimAtPath(self.particleSystemPath)
# particle system settings
if self.particle_params is not None:
for key,value in self.particle_params.items():
if isinstance(value, list):
particleSystem.CreateAttribute(key, value[0], value[1]).Set(value[2])
else:
particleSystem.GetAttribute(key).Set(value)
# apply isoSurface params
if self.iso_surface_params is not None:
particleSystem.CreateAttribute("enableIsosurface", Sdf.ValueTypeNames.Bool, True).Set(True)
for key,value in self.iso_surface_params.items():
if isinstance(value, list):
particleSystem.CreateAttribute(key, value[0], value[1]).Set(value[2])
else:
particleSystem.GetAttribute(key).Set(value)
self.stage.SetInterpolationType(Usd.InterpolationTypeHeld)
def _setup_callbacks(self):
"""! callbacks registered with timeline and physics steps to drop water
"""
# callbacks
self._timeline = omni.timeline.get_timeline_interface()
stream = self._timeline.get_timeline_event_stream()
self._timeline_subscription = stream.create_subscription_to_pop(self._on_timeline_event)
# subscribe to Physics updates:
self._physics_update_subscription = omni.physx.get_physx_interface().subscribe_physics_step_events(
self.on_physics_step
)
# events = omni.physx.get_physx_interface().get_simulation_event_stream()
# self._simulation_event_sub = events.create_subscription_to_pop(self._on_simulation_event)
def _on_timeline_event(self, e):
if e.type == int(omni.timeline.TimelineEventType.STOP):
self.it = 0
self._physics_update_subscription = None
self._timeline_subscription = None
def on_physics_step(self, dt):
xformCache = UsdGeom.XformCache()
# compute location to dispense water
pose = xformCache.GetLocalToWorldTransform(self.stage.GetPrimAtPath(self.inflow_path))
pos_faucet = Gf.Vec3f(pose.ExtractTranslation())
##TODO hangle multiple faucet handles
rate = self.rate_checkers[0].compute_distance()/100.0
if rate > 1:
rate = 1
if self.it == 0:
iso2Prim = self.stage.GetPrimAtPath(self.particleSystemPath+"/Isosurface")
rel = iso2Prim.CreateRelationship("material:binding", False)
# rel.SetTargets([Sdf.Path(self.liquid_material_path)])
# rel.SetTargets([Sdf.Path("/World/Looks/OmniSurface_OrangeJuice")])
#TODO we can have the water keep running, but we should delete some particles that are too old and not in containers.
#this implementation will stop after 300 balls
if self.it > 300:
return
if rate < 0.1:
return
# emit a ball based on rate
if (self.counter < 20 - rate):
self.counter = self.counter + 1
return
self.counter = 0
self.it = self.it + 1
self.create_ball( pos_faucet, rate)
def __del__(self):
self._physics_update_subscription = None
self._timeline_subscription = None
#TODO not sure if this works
isregistry = carb.settings.acquire_settings_interface()
isregistry.set_bool(ASYNC_SIMULATION, self._async_simulation)
# if __name__ == '__main__':
from omni.physx import acquire_physx_interface
physx = acquire_physx_interface()
physx.overwrite_gpu_setting(1)
physx.reset_simulation()
particle_params = {
"cohesion": 0.02,
"smoothing": 0.8,
"anisotropyScale": 1.0,
"anisotropyMin": 0.2,
"anisotropyMax": 2.0,
"viscosity": 0.0091,
"surfaceTension": 0.0074,
"particleFriction": 0.1,
"maxParticleNeighborhood": [ Sdf.ValueTypeNames.Int, True, 64],
"maxParticles": 20000
}
filterSmooth = 1
filtering = 0
passIndex = 0
filtering = setGridFilteringPass(filtering, passIndex, filterSmooth)
passIndex = passIndex + 1
filtering = setGridFilteringPass(filtering, passIndex, filterSmooth)
passIndex = passIndex + 1
iso_surface_params = {
"maxIsosurfaceVertices": [Sdf.ValueTypeNames.Int, True, 1024 * 1024],
"maxIsosurfaceTriangles": [Sdf.ValueTypeNames.Int, True, 2 * 1024 * 1024],
"maxNumIsosurfaceSubgrids": [Sdf.ValueTypeNames.Int, True, 1024 * 4],
"isosurfaceGridSpacing": [Sdf.ValueTypeNames.Float, True, 0.2],
"isosurfaceKernelRadius": [Sdf.ValueTypeNames.Float, True, 0.5 ],
"isosurfaceLevel": [ Sdf.ValueTypeNames.Float, True, -0.3 ],
"isosurfaceGridFilteringFlags": [Sdf.ValueTypeNames.Int, True, filtering ],
"isosurfaceGridSmoothingRadiusRelativeToCellSize": [Sdf.ValueTypeNames.Float, True, 0.3 ],
"isosurfaceEnableAnisotropy": [Sdf.ValueTypeNames.Bool, True, False ],
"isosurfaceAnisotropyMin": [ Sdf.ValueTypeNames.Float, True, 0.1 ],
"isosurfaceAnisotropyMax": [ Sdf.ValueTypeNames.Float, True, 2.0 ],
"isosurfaceAnisotropyRadius": [ Sdf.ValueTypeNames.Float, True, 0.5 ],
"numIsosurfaceMeshSmoothingPasses": [ Sdf.ValueTypeNames.Int, True, 5 ],
"numIsosurfaceMeshNormalSmoothingPasses": [ Sdf.ValueTypeNames.Int, True, 5 ],
"isosurfaceDoNotCastShadows": [Sdf.ValueTypeNames.Bool, True, True ]
}
# fluid_fill = Faucet(particle_params=particle_params, iso_surface_params=iso_surface_params,
# liquid_material_path = "/World/Looks/OmniSurface_ClearWater",
# inflow_path = "/World/faucet/inflow",
# link_paths = ["/World/faucet/link_1/joint_0"])
# fluid_fill = Faucet(particle_params=particle_params, iso_surface_params=iso_surface_params,
# liquid_material_path = "/World/Looks/OmniSurface_ClearWater",
# inflow_path = "/World/mobility/inflow",
# link_paths = ["/World/mobility/link_1/joint_0"])
| 16,733 | Python | 38.006993 | 125 | 0.633419 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/robot_setup/numpy_utils.py | import numpy as np
def orientation_error(desired, current):
cc = quat_conjugate(current)
q_r = quat_mul(desired, cc)
return q_r[:, 0:3] * np.sign(q_r[:, 3])[:, None]
def quat_mul(a, b):
assert a.shape == b.shape
shape = a.shape
a = a.reshape(-1, 4)
b = b.reshape(-1, 4)
x1, y1, z1, w1 = a[:, 0], a[:, 1], a[:, 2], a[:, 3]
x2, y2, z2, w2 = b[:, 0], b[:, 1], b[:, 2], b[:, 3]
ww = (z1 + x1) * (x2 + y2)
yy = (w1 - y1) * (w2 + z2)
zz = (w1 + y1) * (w2 - z2)
xx = ww + yy + zz
qq = 0.5 * (xx + (z1 - x1) * (x2 - y2))
w = qq - ww + (z1 - y1) * (y2 - z2)
x = qq - xx + (x1 + w1) * (x2 + w2)
y = qq - yy + (w1 - x1) * (y2 + z2)
z = qq - zz + (z1 + y1) * (w2 - x2)
quat = np.stack([x, y, z, w], axis=-1).reshape(shape)
return quat
def normalize(x, eps: float = 1e-9):
return x / np.clip(np.linalg.norm(x, axis=-1), a_min=eps, a_max=None)[:, None]
def quat_unit(a):
return normalize(a)
def quat_from_angle_axis(angle, axis):
theta = (angle / 2)[:, None]
xyz = normalize(axis) * np.sin(theta)
w = np.cos(theta)
return quat_unit(np.concatenate([xyz, w], axis=-1))
def quat_rotate(q, v):
shape = q.shape
q_w = q[:, -1]
q_vec = q[:, :3]
a = v * (2.0 * q_w ** 2 - 1.0)[:, None]
b = np.cross(q_vec, v) * q_w[:, None] * 2.0
c = q_vec * np.sum(q_vec * v, axis=1).reshape(shape[0], -1) * 2.0
return a + b + c
def quat_conjugate(a):
shape = a.shape
a = a.reshape(-1, 4)
return np.concatenate((-a[:, :3], a[:, -1:]), axis=-1).reshape(shape)
def quat_axis(q, axis=0):
basis_vec = np.zeros((q.shape[0], 3))
basis_vec[:, axis] = 1
return quat_rotate(q, basis_vec)
| 1,714 | Python | 24.984848 | 82 | 0.491832 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/robot_setup/controller.py | # controller
import carb
class Controller():
w = False
s = False
a = False
d = False
q = False
e = False
up = False
down = False
left = False
right = False
# Controller.scale = 0.1
left_control = False
def __init__(self) -> None:
self.user_control = 0.25
self.network_control = 0.25
Controller.reset_movement()
@classmethod
def reset_movement(cls):
Controller.w = False
Controller.s = False
Controller.a = False
Controller.d = False
Controller.q = False
Controller.e = False
Controller.up = False
Controller.down = False
Controller.left = False
Controller.right = False
# Controller.left_control = False
def handle_keyboard_event(self, event):
if (
event.type == carb.input.KeyboardEventType.KEY_PRESS
or event.type == carb.input.KeyboardEventType.KEY_REPEAT
):
# print("event input", event.input)
if event.input == carb.input.KeyboardInput.W:
Controller.w = True
if event.input == carb.input.KeyboardInput.S:
Controller.s = True
if event.input == carb.input.KeyboardInput.A:
Controller.a = True
if event.input == carb.input.KeyboardInput.D:
Controller.d = True
if event.input == carb.input.KeyboardInput.Q:
Controller.q = True
if event.input == carb.input.KeyboardInput.E:
Controller.e = True
if event.input == carb.input.KeyboardInput.UP:
Controller.up = True
if event.input == carb.input.KeyboardInput.DOWN:
Controller.down = True
if event.input == carb.input.KeyboardInput.LEFT:
Controller.left = True
if event.input == carb.input.KeyboardInput.RIGHT:
Controller.right = True
if event.input == carb.input.KeyboardInput.LEFT_CONTROL:
Controller.left_control = True
if event.type == carb.input.KeyboardEventType.KEY_RELEASE:
# print("event release", event.input)
if event.input == carb.input.KeyboardInput.W:
Controller.w = False
if event.input == carb.input.KeyboardInput.S:
Controller.s = False
if event.input == carb.input.KeyboardInput.A:
Controller.a = False
if event.input == carb.input.KeyboardInput.D:
Controller.d = False
if event.input == carb.input.KeyboardInput.Q:
Controller.q = False
if event.input == carb.input.KeyboardInput.E:
Controller.e = False
if event.input == carb.input.KeyboardInput.UP:
Controller.up = False
if event.input == carb.input.KeyboardInput.DOWN:
Controller.down = False
if event.input == carb.input.KeyboardInput.LEFT:
Controller.left = False
if event.input == carb.input.KeyboardInput.RIGHT:
Controller.right = False
if event.input == carb.input.KeyboardInput.LEFT_CONTROL:
Controller.left_control = False
def PoolUserControl(self):
return self.user_control
def PoolNetworkControl(self):
return 0.1 if Controller.w else 0.25
def QueryMove(self):
move = [0, 0, 0]
if Controller.w:
move[0] += 1
if Controller.s:
move[0] -= 1
if Controller.a:
move[1] += 1
if Controller.d:
move[1] -= 1
if Controller.q:
move[2] -= 1
if Controller.e:
move[2] += 1
return move
def QueryRotation(self):
rotation = [0, 0]
if Controller.up:
rotation[0] += 1
if Controller.down:
rotation[0] -= 1
if Controller.left:
rotation[1] += 1
if Controller.right:
rotation[1] -= 1
return rotation
def QueryGripper(self):
if not Controller.left_control:
return 1 # open
else:
return -1 # close | 4,343 | Python | 29.591549 | 68 | 0.534423 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/robot_setup/franka_tensor.py | from numpy.lib.index_tricks import fill_diagonal
import omni
import carb
import types
import numpy as np
import importlib
import os
import shutil
from ..param import IS_IN_CREAT, IS_IN_ISAAC_SIM, APP_VERION, SAVE_ROOT
from .controller import Controller
from .numpy_utils import orientation_error
from pxr import Usd, UsdGeom, Gf
class FrankaTensor():
def __init__(self, save_path, build_HUD = True):
"""
Franka tensor controller
::params:
save_path: path to save the recordings
build_HUD: build UI
"""
carb.log_info("Franks Tensor started (only in Create/Isaac-Sim >= 2022.1.0)")
self._is_stopped = True
self._tensor_started = False
self._tensor_api = None
self._flatcache_was_enabled = True
self._tensorapi_was_enabled = True
# stage
self.stage = omni.usd.get_context().get_stage()
self.franka_prim = self.stage.GetPrimAtPath("/World/game/franka")
# property
self.is_replay = False
self.is_record = False
# counting and index
self.count_down = 80
self.button_status = 0
self.npz_index = 0
self.is_start = True
# setup subscriptions:
self._setup_callbacks()
self._enable_tensor_api()
# task info
self.save_path = save_path
self.record_lines = []
# controller
self.controller = Controller()
def _enable_tensor_api(self):
manager = omni.kit.app.get_app().get_extension_manager()
self._tensorapi_was_enabled = manager.is_extension_enabled("omni.physx.tensors")
if not self._tensorapi_was_enabled:
manager.set_extension_enabled_immediate("omni.physx.tensors", True)
self._tensor_api = importlib.import_module("omni.physics.tensors")
# "PRIVATE" METHODS #
def _can_callback_physics_step(self) -> bool:
if self._is_stopped:
return False
if self._tensor_started or self._tensor_api is None:
return True
self._tensor_started = True
self.on_tensor_start(self._tensor_api)
return True
def on_tensor_start(self, tensorApi: types.ModuleType):
"""
This method is called when
1. the tensor API is enabled, and
2. when the simulation data is ready for the user to setup views using the tensor API.
"""
# if IS_IN_CREAT and APP_VERION >= "2022.1.1":
sim = tensorApi.create_simulation_view("numpy")
sim.set_subspace_roots("/World/game/*")
# franka view
self.frankas = sim.create_articulation_view("/World/game/franka")
self.franka_indices = np.arange(self.frankas.count, dtype=np.int32)
# !!!
# self.default_dof_pos = np.array([0.0, 0.0, 0.0, -0.95, 0.0, 1.12, 0.0, 0.02, 0.02])
self.default_dof_pos = np.array([1.2024134e-02, -5.6960440e-01, 7.3155526e-05, -2.8114836e+00,
-4.8544933e-03, 3.0270250e+00, 7.2893953e-01, 3.9919264e+00, 4.0000000e+00])
# set default dof pos:
init_dof_pos = np.stack(1 * [np.array(self.default_dof_pos, dtype=np.float32)])
self.frankas.set_dof_position_targets(init_dof_pos, self.franka_indices)
self.last_gripper_action = 1 # open as default
# end effector view
self.hands = sim.create_rigid_body_view("/World/game/franka/panda_hand")
# get initial hand transforms
# init_hand_transforms = self.hands.get_transforms().copy()
# self.hand_pos = init_hand_transforms[:, :3]
# self.hand_rot = init_hand_transforms[:, 3:]
# target
# self.target_pos = self.default_dof_pos[None, :]
# self.target_hand_transform = init_hand_transforms
def _setup_callbacks(self):
stream = omni.timeline.get_timeline_interface().get_timeline_event_stream()
self._timeline_sub = stream.create_subscription_to_pop(self._on_timeline_event)
# subscribe to Physics updates:
self._physics_update_sub = omni.physx.get_physx_interface().subscribe_physics_step_events(self._on_physics_step)
events = omni.physx.get_physx_interface().get_simulation_event_stream_v2()
self._simulation_event_subscription = events.create_subscription_to_pop(self.on_simulation_event)
# subscribute to keyboard
self._appwindow = omni.appwindow.get_default_app_window()
self._input = carb.input.acquire_input_interface()
self._keyboard = self._appwindow.get_keyboard()
self._sub_keyboard = self._input.subscribe_to_keyboard_events(self._keyboard, self._sub_keyboard_event)
def _sub_keyboard_event(self, event, *args, **kwargs):
self.controller.handle_keyboard_event(event)
def _on_timeline_event(self, e):
if e.type == int(omni.timeline.TimelineEventType.STOP):
self._is_stopped = True
self._tensor_started = False
# !!!
self._timeline_sub = None
self._simulation_event_subscription = None
self._physics_update_sub = None
self._input.unsubscribe_to_keyboard_events(self._keyboard, self._sub_keyboard)
if e.type == int(omni.timeline.TimelineEventType.PLAY):
self._is_stopped = False
# call user implementation
# self.on_timeline_event(e)
def _on_physics_step(self, dt):
if not self._can_callback_physics_step():
return
# call user implementation
self.on_physics_step(dt)
def on_simulation_event(self, e):
"""
This method is called on simulation events. See omni.physx.bindings._physx.SimulationEvent.
"""
pass
def on_physics_step(self, dt):
"""
This method is called on each physics step callback, and the first callback is issued
after the on_tensor_start method is called if the tensor API is enabled.
"""
self.count_down -= 1
# self.dof_pos = self.frankas.get_dof_positions()
# print("dof_pos", self.dof_pos)
# playing
if not self.is_replay:
if self.count_down == 0:
self.count_down = 6 # TODO: unify count_down is play and replay
if self.is_record:
current_dof_pos = self.frankas.get_dof_positions()
with open(os.path.join(self.save_path, 'record.csv'), 'a') as f:
f.write(",".join(list([str(e) for e in current_dof_pos[0]] + [str(self.last_gripper_action)])) + '\n')
# get movement from keyboard
move_vec = self.controller.QueryMove()
query_move = move_vec != [0, 0, 0]
# get rotation from keyboard
rotation_vec = self.controller.QueryRotation()
query_rotation = rotation_vec != [0, 0]
# get gripper
gripper_val = self.controller.QueryGripper()
query_gripper = self.last_gripper_action != gripper_val
# get end effector transforms
hand_transforms = self.hands.get_transforms().copy()
current_hand_pos, current_hand_rot = hand_transforms[:, :3], hand_transforms[:, 3:]
# update record
if query_move or query_rotation or query_gripper or self.is_start:
self.hand_pos = current_hand_pos
self.hand_rot = current_hand_rot
self.last_gripper_action = gripper_val
self.is_start = False
# print("current_dof_pos", self.frankas.get_dof_positions())
# # if no input
# if not query_move and not query_rotation and not query_gripper:
# return
# get franka xform mat # FIXME: time code?
mat = UsdGeom.Xformable(self.franka_prim).ComputeLocalToWorldTransform(Usd.TimeCode.Default())
move_vec_4d = Gf.Vec4d(move_vec[0], move_vec[1], move_vec[2], 0)
hand_move = move_vec_4d * mat
hand_move_np = np.array([[hand_move[0], hand_move[1], hand_move[2]]])
target_pos = self.hand_pos + hand_move_np
target_rot = self.hand_rot
dof_target = self.move_to_target(target_pos, target_rot)
if query_rotation:
dof_target[...,5] += rotation_vec[0] * 0.1 # slowly but surely
dof_target[...,6] += rotation_vec[1] * 0.2
# print("last_gripper_action", self.last_gripper_action)
dof_target[...,[-2, -1]] = 5 if self.last_gripper_action > 0 else -1
self.frankas.set_dof_position_targets(dof_target, np.arange(1))
# replaying
else: # self.is_replay:
if self.count_down == 0:
self.count_down = 4
# pause when record not exist
if len(self.record_lines) == 0:
omni.timeline.get_timeline_interface().pause()
return
# load joint
record_line = self.record_lines.pop(0)
self.target_pos = np.array([record_line[:-1]])
self.last_gripper_action = record_line[-1]
# load discreet gripper
self.target_pos[...,[-2, -1]] = 5 if self.last_gripper_action > 0 else -1
# print("target_pos", self.target_pos)
self.frankas.set_dof_position_targets(self.target_pos, self.franka_indices)
def load_record(self):
if not os.path.exists(os.path.join(self.save_path, 'record.csv')):
carb.log_error( "please start & record first")
return
with open(os.path.join(self.save_path, 'record.csv'), 'r') as f:
for line in f.readlines():
self.record_lines.append([float(e) for e in line.split(",")])
######################################### robot control #########################################
def move_to_target(self, goal_pos, goal_rot):
"""
Move hand to target points
"""
# get end effector transforms
hand_transforms = self.hands.get_transforms().copy()
hand_pos, hand_rot = hand_transforms[:, :3], hand_transforms[:, 3:]
#hand_rot = hand_rot[:,[1,2,3,0]] # WXYZ
# get franka DOF states
dof_pos = self.frankas.get_dof_positions()
# compute position and orientation error
pos_err = goal_pos - hand_pos
orn_err = orientation_error(goal_rot, hand_rot)
dpose = np.concatenate([pos_err, orn_err], -1)[:, None].transpose(0, 2, 1)
jacobians = self.frankas.get_jacobians()
# jacobian entries corresponding to franka hand
franka_hand_index = 8 # !!!
j_eef = jacobians[:, franka_hand_index - 1, :]
# solve damped least squares
j_eef_T = np.transpose(j_eef, (0, 2, 1))
d = 0.05 # damping term
lmbda = np.eye(6) * (d ** 2)
u = (j_eef_T @ np.linalg.inv(j_eef @ j_eef_T + lmbda) @ dpose).reshape(1, 9)
# update position targets
pos_targets = dof_pos + u # * 0.3
return pos_targets | 11,463 | Python | 36.342019 | 126 | 0.565297 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/task_check/__init__.py | from .base_checker import BaseChecker
# from .grasp_checker import GraspChecker
# from .joint_checker import JointChecker
# from .orient_checker import OrientChecker
# from .container_checker import ContainerChecker
# from .water_checker import WaterChecker
# from .tap_water_checker import TapWaterChecker | 307 | Python | 42.999994 | 49 | 0.820847 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/vrkitchen/indoor/kit/task_check/base_checker.py | from lib2to3.pgen2.token import BACKQUOTE
import os
import json
from pxr import PhysxSchema, UsdPhysics
# task completion checking
import pxr
import omni
import carb
from omni.physx.scripts import physicsUtils
from ..param import DATA_PATH_NEW
from ..layout.randomizer import Randomizer
class BaseChecker():
SUCCESS_UI = None
IS_REPLAY = False
def __init__(self, task_type, task_id, robot_id, mission_id, annotator="Steven", run_time = True) -> None:
"""
::params:
:run_time: is run-time task checker or not
"""
# property
self.task_type = task_type
self.task_id = str(task_id)
self.mission_id = str(mission_id)
self.robot_id = str(robot_id)
self.data_path = DATA_PATH_NEW
self.annotator = annotator
# keep the old mission identifier temporarily
self.old_mission_identifier = self.task_type + " " + self.task_id + " " + self.robot_id + " " + self.mission_id
self.mission_identifier_prefix = self.task_type + " " + self.task_id + " "#+ self.robot_id + " " + self.mission_id
self.mission_identifier_suffix = self.mission_id
# scene
self.stage = omni.usd.get_context().get_stage()
self.default_prim_path_str = self.stage.GetDefaultPrim().GetPath().pathString
self.timeline = omni.timeline.get_timeline_interface()
self.current_mission = self.register_mission()
self.success_steps = 0
self.success = False
self.time = 0
# tasks
if run_time:
self.create_task_callback()
# log
self.total_step = 0
self.print_every = 240
self.checking_interval = 15
# get time per second
physicsScenePath = "/World/physicsScene"
scene = UsdPhysics.Scene.Get(self.stage, physicsScenePath)
if not scene:
carb.log_warn("physics scene not found")
physxSceneAPI = PhysxSchema.PhysxSceneAPI.Apply(scene.GetPrim())
self.steps_per_second = physxSceneAPI.GetTimeStepsPerSecondAttr().Get()
def register_mission(self):
"""
Register mission
"""
task_folder = os.path.join(self.data_path, self.annotator, "task", self.task_type, str(self.task_id))
if not os.path.exists(task_folder):
raise carb.log_warn(f"Task folder not exist at {task_folder}")
self.mission_file_path = os.path.join(task_folder, "missions.json")
if os.path.exists(self.mission_file_path):
self.missions = json.load(open(self.mission_file_path))
carb.log_info(f"Loading missions.json at path {self.mission_file_path}")
else:
self.missions = {}
with open(self.mission_file_path, "w") as f:
json.dump(self.missions, f, indent = 4)
carb.log_info(f"Saving missions.json at path {self.mission_file_path}")
for key, value in self.missions.items():
if key.startswith(self.mission_identifier_prefix) and key.endswith(self.mission_identifier_suffix):
return self.missions[key]
else:
return {}
def get_diff(self):
raise NotImplementedError
def create_task_callback(self):
stream = self.timeline.get_timeline_event_stream()
self._timeline_subscription = stream.create_subscription_to_pop(self._on_timeline_event)
# subscribe to Physics updates:
self._physics_update_subscription = omni.physx.get_physx_interface().subscribe_physics_step_events(
self._on_physics_step
)
def _on_timeline_event(self, e):
"""
set up timeline event
"""
if e.type == int(omni.timeline.TimelineEventType.STOP):
self.it = 0
self.time = 0
self.reset()
def reset(self):
"""
Reset event
"""
self._physics_update_subscription = None
self._timeline_subscription = None
# self._setup_callbacks()
def _on_success_hold(self):
try:
if (self.success_steps - 1) % 240 == 0:
carb.log_info("hold on")
BaseChecker.SUCCESS_UI.model.set_value("hold on")
except:
pass
def _on_success(self):
carb.log_info("task sucess")
self.success = True
try:
BaseChecker.SUCCESS_UI.model.set_value("task sucess")
if self.timeline.is_playing() and not BaseChecker.IS_REPLAY:
self.timeline.pause()
except:
pass
def _on_not_success(self):
# carb.log_info("task not sucess")
self.success_steps = 0
self.success = False
try:
BaseChecker.SUCCESS_UI.model.set_value("")
except:
pass
def _on_physics_step(self, dt):
"""
Physics event
"""
# print("timestep: ", self.time)
if self.time == 0:
stage = omni.usd.get_context().get_stage()
prim_list = list(stage.TraverseAll())
prim_list = [ item for item in prim_list if 'Isosurface' in item.GetPath().pathString and item.GetTypeName() == 'Mesh' ]
from pxr import Sdf
water_path = Randomizer(None, 1).get_water_material()
for iso2Prim in prim_list:
# omni.kit.commands.execute(
# "CreateAndBindMdlMaterialFromLibrary",
# mdl_name='/media/nikepupu/fast/omni_lib/lib_path/isaac_sim-2021.2.1/kit/mdl/core/Base/OmniSurfacePresets.mdl',
# mtl_name='OmniSurface_ClearWater',
# mtl_created_list=None,
# )
# water_path = '/World/Looks/OmniSurface_ClearWater'
rel = iso2Prim.CreateRelationship("material:binding", False)
rel.SetTargets([Sdf.Path(water_path)])
# Randomizer.get_water_material(iso2Prim)
self.time += 1
self.start_checking()
def start_checking(self):
if self.success_steps > self.steps_per_second * 2:
self._on_success()
def save_mission(self):
"""
save mission
"""
self.missions[self.old_mission_identifier] = self.current_mission
with open(self.mission_file_path, "w") as f:
json.dump(self.missions, f, indent = 4)
carb.log_info(f"Saving missions.json at path {self.mission_file_path}")
| 6,634 | Python | 35.059782 | 132 | 0.572053 |
yizhouzhao/VRKitchen2.0-IndoorKit/exts/vrkitchen.indoor.kit/config/extension.toml | [package]
# Semantic Versionning is used: https://semver.org/
version = "1.0.0"
# The title and description fields are primarily for displaying extension info in UI
title = "Indoorkit"
description="The python extension with omniverse for robotic tasks"
# preview
preview_image = "icons/preview.png"
icon = "icons/logo_large.png"
# Path (relative to the root) or content of readme markdown file for UI.
readme = "docs/README.md"
# URL of the extension source repository.
repository = ""
# One of categories for UI.
category = "Example"
# Keywords for the extension
keywords = ["kit", "example", "robotics", "machine learning"]
# Use omni.ui to build simple UI
[dependencies]
"omni.kit.uiapp" = {}
"omni.syntheticdata" = {}
# Main python module this extension provides, it will be publicly available as "import omni.hello.world".
[[python.module]]
name = "vrkitchen.indoor.kit"
| 886 | TOML | 25.088235 | 105 | 0.733634 |
yizhouzhao/VRKitchen2.0-IndoorKit/data/readme.md | # data folder | 13 | Markdown | 12.999987 | 13 | 0.769231 |
isaac-orbit/orbit.ext_template/pyproject.toml | # This section defines the build system requirements
[build-system]
requires = ["setuptools >= 61.0"]
build-backend = "setuptools.build_meta"
# Project metadata
[project]
version = "0.1.0"
name = "ext_template" # TODO
description = "Extension Template for Orbit" # TODO
keywords = ["extension", "template", "orbit"] # TODO
readme = "README.md"
requires-python = ">=3.10"
license = {file = "LICENSE.txt"}
classifiers = [
"Programming Language :: Python :: 3",
]
authors = [
{name = "Nico Burger", email = "[email protected]"}, # TODO
]
maintainers = [
{name = "Nico Burger", email = "[email protected]"}, # TODO
]
# Tool dependent subtables
[tool.setuptools]
py-modules = [
'orbit'
] # TODO, add modules required for your extension
| 767 | TOML | 22.999999 | 71 | 0.67927 |
isaac-orbit/orbit.ext_template/README.md | # Extension Template for Orbit
[](https://docs.omniverse.nvidia.com/isaacsim/latest/overview.html)
[](https://isaac-orbit.github.io/orbit/)
[](https://docs.python.org/3/whatsnew/3.10.html)
[](https://releases.ubuntu.com/20.04/)
[](https://pre-commit.com/)
## Overview
This repository serves as a template for building projects or extensions based on Orbit. It allows you to develop in an isolated environment, outside of the core Orbit repository. Furthermore, this template serves three use cases:
- **Python Package**
Can be installed into Isaac Sim's Python environment, making it suitable for users who want to integrate their extension to `Orbit` as a python package.
- **Project Template**
Ensures access to `Isaac Sim` and `Orbit` functionalities, which can be used as a project template.
- **Omniverse Extension**
Can be used as an Omniverse extension, ideal for projects that leverage the Omniverse platform's graphical user interface.
**Key Features:**
- `Isolation` Work outside the core Orbit repository, ensuring that your development efforts remain self-contained.
- `Flexibility` This template is set up to allow your code to be run as an extension in Omniverse.
**Keywords:** extension, template, orbit
### License
The source code is released under a [BSD 3-Clause license](https://opensource.org/licenses/BSD-3-Clause).
**Author: The ORBIT Project Developers<br />
Affiliation: [The AI Institute](https://theaiinstitute.com/)<br />
Maintainer: Nico Burger, [email protected]**
## Setup
Depending on the use case defined [above](#overview), follow the instructions to set up your extension template. Start with the [Basic Setup](#basic-setup), which is required for either use case.
### Basic Setup
#### Dependencies
This template depends on Isaac Sim and Orbit. For detailed instructions on how to install these dependencies, please refer to the [installation guide](https://isaac-orbit.github.io/orbit/source/setup/installation.html).
- [Isaac Sim](https://docs.omniverse.nvidia.com/isaacsim/latest/index.html)
- [Orbit](https://isaac-orbit.github.io/orbit/)
#### Configuration
Decide on a name for your project or extension. This guide will refer to this name as `<your_extension_name>`.
- Create a new repository based off this template [here](https://github.com/new?owner=isaac-orbit&template_name=orbit.ext_template&template_owner=isaac-orbit). Name your forked repository using the following convention: `"orbit.<your_extension_name>"`.
- Clone your forked repository to a location **outside** the orbit repository.
```bash
git clone <your_repository_url>
```
- Configure the template. Search for and replace **`TODO`**'s according to your extension's needs within the following files:
- `config/extension.toml`
- `pyproject.toml`
- Rename your source folder.
```bash
cd orbit.<your_extension_name>
mv orbit/ext_template orbit/<your_extension_name>
```
- Set up a symbolic link from Orbit to this directory.
This makes it convenient to index the python modules and look for extensions shipped with Isaac Sim and Orbit.
```bash
ln -s <your_orbit_path> _orbit
```
#### Environment (Optional)
For clarity, we will be using the `${ISAACSIM_PATH}/python.sh` command to call the Orbit specific python interpreter. However, you might be working from within a virtual environment, allowing you to use the `python` command directly, instead of `${ISAACSIM_PATH}/python.sh`. Information on setting up a virtual environment for Orbit can be found [here](https://isaac-orbit.github.io/orbit/source/setup/installation.html#setting-up-the-environment). The `ISAACSIM_PATH` should already be set from installing Orbit, see [here](https://isaac-orbit.github.io/orbit/source/setup/installation.html#configuring-the-environment-variables).
#### Configure Python Interpreter
In the provided configuration, we set the default Python interpreter to use the Python executable provided by Omniverse. This is specified in the `.vscode/settings.json` file:
```json
"python.defaultInterpreterPath": "${env:ISAACSIM_PATH}/python.sh"
```
This setup requires you to have set up the `ISAACSIM_PATH` environment variable. If you want to use a different Python interpreter, you need to change the Python interpreter used by selecting and activating the Python interpreter of your choice in the bottom left corner of VSCode, or opening the command palette (`Ctrl+Shift+P`) and selecting `Python: Select Interpreter`.
#### Set up IDE
To setup the IDE, please follow these instructions:
1. Open the `orbit.<your_extension_template>` directory on Visual Studio Code IDE
2. Run VSCode Tasks, by pressing Ctrl+Shift+P, selecting Tasks: Run Task and running the setup_python_env in the drop down menu.
If everything executes correctly, it should create a file .python.env in the .vscode directory. The file contains the python paths to all the extensions provided by Isaac Sim and Omniverse. This helps in indexing all the python modules for intelligent suggestions while writing code.
### Setup as Python Package / Project Template
From within this repository, install your extension as a Python package to the Isaac Sim Python executable.
```bash
${ISAACSIM_PATH}/python.sh -m pip install --upgrade pip
${ISAACSIM_PATH}/python.sh -m pip install -e .
```
### Setup as Omniverse Extension
To enable your extension, follow these steps:
1. **Add the search path of your repository** to the extension manager:
- Navigate to the extension manager using `Window` -> `Extensions`.
- Click on the **Hamburger Icon** (☰), then go to `Settings`.
- In the `Extension Search Paths`, enter the path that goes up to your repository's location without actually including the repository's own directory. For example, if your repository is located at `/home/code/orbit.ext_template`, you should add `/home/code` as the search path.
- If not already present, in the `Extension Search Paths`, enter the path that leads to your local Orbit directory. For example: `/home/orbit/source/extensions`
- Click on the **Hamburger Icon** (☰), then click `Refresh`.
2. **Search and enable your extension**:
- Find your extension under the `Third Party` category.
- Toggle it to enable your extension.
## Usage
### Python Package
Import your python package within `Isaac Sim` and `Orbit` using:
```python
import orbit.<your_extension_name>
```
### Project Template
We provide an example for training and playing a policy for ANYmal on flat terrain. Install [RSL_RL](https://github.com/leggedrobotics/rsl_rl) outside of the orbit repository, e.g. `home/code/rsl_rl`.
```bash
git clone https://github.com/leggedrobotics/rsl_rl.git
cd rsl_rl
${ISAACSIM_PATH}/python.sh -m pip install -e .
```
Train a policy.
```bash
cd <path_to_your_extension>
${ISAACSIM_PATH}/python.sh scripts/rsl_rl/train.py --task Isaac-Velocity-Flat-Anymal-D-Template-v0 --num_envs 4096 --headless
```
Play the trained policy.
```bash
${ISAACSIM_PATH}/python.sh scripts/rsl_rl/play.py --task Isaac-Velocity-Flat-Anymal-D-Template-Play-v0 --num_envs 16
```
### Omniverse Extension
We provide an example UI extension that will load upon enabling your extension defined in `orbit/ext_template/ui_extension_example.py`. For more information on UI extensions, enable and check out the source code of the `omni.isaac.ui_template` extension and refer to the introduction on [Isaac Sim Workflows 1.2.3. GUI](https://docs.omniverse.nvidia.com/isaacsim/latest/introductory_tutorials/tutorial_intro_workflows.html#gui).
## Pre-Commit
Pre-committing involves using a framework to automate the process of enforcing code quality standards before code is actually committed to a version control system, like Git. This process involves setting up hooks that run automated checks, such as code formatting, linting (checking for programming errors, bugs, stylistic errors, and suspicious constructs), and running tests. If these checks pass, the commit is allowed; if not, the commit is blocked until the issues are resolved. This ensures that all code committed to the repository adheres to the defined quality standards, leading to a cleaner, more maintainable codebase. To do so, we use the [pre-commit](https://pre-commit.com/) module. Install the module using:
```bash
pip install pre-commit
```
Run the pre-commit with:
```bash
pre-commit run --all-files
```
## Finalize
You are all set and no longer need the template instructions
- The `orbit/ext_template` and `scripts/rsl_rl` directories act as a reference template for your convenience. Delete them if no longer required.
- When ready, use this `README.md` as a template and customize where appropriate.
## Docker / Cluster
We are currently working on a docker and cluster setup for this template. In the meanwhile, please refer to the current setup provided in the Orbit [documentation](https://isaac-orbit.github.io/orbit/source/deployment/index.html).
## Troubleshooting
### Docker Container
When running within a docker container, the following error has been encountered: `ModuleNotFoundError: No module named 'orbit'`. To mitigate, please comment out the docker specific environment definitions in `.vscode/launch.json` and run the following:
```bash
echo -e "\nexport PYTHONPATH=\$PYTHONPATH:/workspace/orbit.<your_extension_name>" >> ~/.bashrc
source ~/.bashrc
```
## Bugs & Feature Requests
Please report bugs and request features using the [Issue Tracker](https://github.com/isaac-orbit/orbit.ext_template/issues).
| 9,898 | Markdown | 47.053398 | 724 | 0.761871 |
isaac-orbit/orbit.ext_template/scripts/rsl_rl/play.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Script to play a checkpoint if an RL agent from RSL-RL."""
from __future__ import annotations
"""Launch Isaac Sim Simulator first."""
import argparse
from omni.isaac.orbit.app import AppLauncher
# local imports
import cli_args # isort: skip
# add argparse arguments
parser = argparse.ArgumentParser(description="Train an RL agent with RSL-RL.")
parser.add_argument("--cpu", action="store_true", default=False, help="Use CPU pipeline.")
parser.add_argument("--num_envs", type=int, default=None, help="Number of environments to simulate.")
parser.add_argument("--task", type=str, default=None, help="Name of the task.")
parser.add_argument("--seed", type=int, default=None, help="Seed used for the environment")
# append RSL-RL cli arguments
cli_args.add_rsl_rl_args(parser)
# append AppLauncher cli args
AppLauncher.add_app_launcher_args(parser)
args_cli = parser.parse_args()
# launch omniverse app
app_launcher = AppLauncher(args_cli)
simulation_app = app_launcher.app
"""Rest everything follows."""
import os
import gymnasium as gym
import omni.isaac.contrib_tasks # noqa: F401
import omni.isaac.orbit_tasks # noqa: F401
import torch
from omni.isaac.orbit_tasks.utils import get_checkpoint_path, parse_env_cfg
from omni.isaac.orbit_tasks.utils.wrappers.rsl_rl import (
RslRlOnPolicyRunnerCfg,
RslRlVecEnvWrapper,
export_policy_as_onnx,
)
from rsl_rl.runners import OnPolicyRunner
# Import extensions to set up environment tasks
import orbit.ext_template.tasks # noqa: F401 TODO: import orbit.<your_extension_name>
def main():
"""Play with RSL-RL agent."""
# parse configuration
env_cfg = parse_env_cfg(args_cli.task, use_gpu=not args_cli.cpu, num_envs=args_cli.num_envs)
agent_cfg: RslRlOnPolicyRunnerCfg = cli_args.parse_rsl_rl_cfg(args_cli.task, args_cli)
# create isaac environment
env = gym.make(args_cli.task, cfg=env_cfg)
# wrap around environment for rsl-rl
env = RslRlVecEnvWrapper(env)
# specify directory for logging experiments
log_root_path = os.path.join("logs", "rsl_rl", agent_cfg.experiment_name)
log_root_path = os.path.abspath(log_root_path)
print(f"[INFO] Loading experiment from directory: {log_root_path}")
resume_path = get_checkpoint_path(log_root_path, agent_cfg.load_run, agent_cfg.load_checkpoint)
print(f"[INFO]: Loading model checkpoint from: {resume_path}")
# load previously trained model
ppo_runner = OnPolicyRunner(env, agent_cfg.to_dict(), log_dir=None, device=agent_cfg.device)
ppo_runner.load(resume_path)
print(f"[INFO]: Loading model checkpoint from: {resume_path}")
# obtain the trained policy for inference
policy = ppo_runner.get_inference_policy(device=env.unwrapped.device)
# export policy to onnx
export_model_dir = os.path.join(os.path.dirname(resume_path), "exported")
export_policy_as_onnx(ppo_runner.alg.actor_critic, export_model_dir, filename="policy.onnx")
# reset environment
obs, _ = env.get_observations()
# simulate environment
while simulation_app.is_running():
# run everything in inference mode
with torch.inference_mode():
# agent stepping
actions = policy(obs)
# env stepping
obs, _, _, _ = env.step(actions)
# close the simulator
env.close()
if __name__ == "__main__":
# run the main execution
main()
# close sim app
simulation_app.close()
| 3,566 | Python | 32.027777 | 101 | 0.706955 |
isaac-orbit/orbit.ext_template/scripts/rsl_rl/cli_args.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import annotations
import argparse
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from omni.isaac.orbit_tasks.utils.wrappers.rsl_rl import RslRlOnPolicyRunnerCfg
def add_rsl_rl_args(parser: argparse.ArgumentParser):
"""Add RSL-RL arguments to the parser.
Args:
parser: The parser to add the arguments to.
"""
# create a new argument group
arg_group = parser.add_argument_group("rsl_rl", description="Arguments for RSL-RL agent.")
# -- experiment arguments
arg_group.add_argument(
"--experiment_name", type=str, default=None, help="Name of the experiment folder where logs will be stored."
)
arg_group.add_argument("--run_name", type=str, default=None, help="Run name suffix to the log directory.")
# -- load arguments
arg_group.add_argument("--resume", type=bool, default=None, help="Whether to resume from a checkpoint.")
arg_group.add_argument("--load_run", type=str, default=None, help="Name of the run folder to resume from.")
arg_group.add_argument("--checkpoint", type=str, default=None, help="Checkpoint file to resume from.")
# -- logger arguments
arg_group.add_argument(
"--logger", type=str, default=None, choices={"wandb", "tensorboard", "neptune"}, help="Logger module to use."
)
arg_group.add_argument(
"--log_project_name", type=str, default=None, help="Name of the logging project when using wandb or neptune."
)
def parse_rsl_rl_cfg(task_name: str, args_cli: argparse.Namespace) -> RslRlOnPolicyRunnerCfg:
"""Parse configuration for RSL-RL agent based on inputs.
Args:
task_name: The name of the environment.
args_cli: The command line arguments.
Returns:
The parsed configuration for RSL-RL agent based on inputs.
"""
from omni.isaac.orbit_tasks.utils.parse_cfg import load_cfg_from_registry
# load the default configuration
rslrl_cfg: RslRlOnPolicyRunnerCfg = load_cfg_from_registry(task_name, "rsl_rl_cfg_entry_point")
# override the default configuration with CLI arguments
if args_cli.seed is not None:
rslrl_cfg.seed = args_cli.seed
if args_cli.resume is not None:
rslrl_cfg.resume = args_cli.resume
if args_cli.load_run is not None:
rslrl_cfg.load_run = args_cli.load_run
if args_cli.checkpoint is not None:
rslrl_cfg.load_checkpoint = args_cli.checkpoint
if args_cli.run_name is not None:
rslrl_cfg.run_name = args_cli.run_name
if args_cli.logger is not None:
rslrl_cfg.logger = args_cli.logger
# set the project name for wandb and neptune
if rslrl_cfg.logger in {"wandb", "neptune"} and args_cli.log_project_name:
rslrl_cfg.wandb_project = args_cli.log_project_name
rslrl_cfg.neptune_project = args_cli.log_project_name
return rslrl_cfg
| 2,981 | Python | 38.759999 | 117 | 0.688695 |
isaac-orbit/orbit.ext_template/scripts/rsl_rl/train.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Script to train RL agent with RSL-RL."""
from __future__ import annotations
"""Launch Isaac Sim Simulator first."""
import argparse
import os
from omni.isaac.orbit.app import AppLauncher
# local imports
import cli_args # isort: skip
# add argparse arguments
parser = argparse.ArgumentParser(description="Train an RL agent with RSL-RL.")
parser.add_argument("--video", action="store_true", default=False, help="Record videos during training.")
parser.add_argument("--video_length", type=int, default=200, help="Length of the recorded video (in steps).")
parser.add_argument("--video_interval", type=int, default=2000, help="Interval between video recordings (in steps).")
parser.add_argument("--cpu", action="store_true", default=False, help="Use CPU pipeline.")
parser.add_argument("--num_envs", type=int, default=None, help="Number of environments to simulate.")
parser.add_argument("--task", type=str, default=None, help="Name of the task.")
parser.add_argument("--seed", type=int, default=None, help="Seed used for the environment")
# append RSL-RL cli arguments
cli_args.add_rsl_rl_args(parser)
# append AppLauncher cli args
AppLauncher.add_app_launcher_args(parser)
args_cli = parser.parse_args()
# load cheaper kit config in headless
if args_cli.headless:
app_experience = f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.gym.headless.kit"
else:
app_experience = f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit"
# launch omniverse app
app_launcher = AppLauncher(args_cli, experience=app_experience)
simulation_app = app_launcher.app
"""Rest everything follows."""
import os
from datetime import datetime
import gymnasium as gym
import omni.isaac.orbit_tasks # noqa: F401
import torch
from omni.isaac.orbit.envs import RLTaskEnvCfg
from omni.isaac.orbit.utils.dict import print_dict
from omni.isaac.orbit.utils.io import dump_pickle, dump_yaml
from omni.isaac.orbit_tasks.utils import get_checkpoint_path, parse_env_cfg
from omni.isaac.orbit_tasks.utils.wrappers.rsl_rl import (
RslRlOnPolicyRunnerCfg,
RslRlVecEnvWrapper,
)
from rsl_rl.runners import OnPolicyRunner
# Import extensions to set up environment tasks
import orbit.ext_template.tasks # noqa: F401 TODO: import orbit.<your_extension_name>
torch.backends.cuda.matmul.allow_tf32 = True
torch.backends.cudnn.allow_tf32 = True
torch.backends.cudnn.deterministic = False
torch.backends.cudnn.benchmark = False
def main():
"""Train with RSL-RL agent."""
# parse configuration
env_cfg: RLTaskEnvCfg = parse_env_cfg(args_cli.task, use_gpu=not args_cli.cpu, num_envs=args_cli.num_envs)
agent_cfg: RslRlOnPolicyRunnerCfg = cli_args.parse_rsl_rl_cfg(args_cli.task, args_cli)
# specify directory for logging experiments
log_root_path = os.path.join("logs", "rsl_rl", agent_cfg.experiment_name)
log_root_path = os.path.abspath(log_root_path)
print(f"[INFO] Logging experiment in directory: {log_root_path}")
# specify directory for logging runs: {time-stamp}_{run_name}
log_dir = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
if agent_cfg.run_name:
log_dir += f"_{agent_cfg.run_name}"
log_dir = os.path.join(log_root_path, log_dir)
# create isaac environment
env = gym.make(args_cli.task, cfg=env_cfg, render_mode="rgb_array" if args_cli.video else None)
# wrap for video recording
if args_cli.video:
video_kwargs = {
"video_folder": os.path.join(log_dir, "videos"),
"step_trigger": lambda step: step % args_cli.video_interval == 0,
"video_length": args_cli.video_length,
"disable_logger": True,
}
print("[INFO] Recording videos during training.")
print_dict(video_kwargs, nesting=4)
env = gym.wrappers.RecordVideo(env, **video_kwargs)
# wrap around environment for rsl-rl
env = RslRlVecEnvWrapper(env)
# create runner from rsl-rl
runner = OnPolicyRunner(env, agent_cfg.to_dict(), log_dir=log_dir, device=agent_cfg.device)
# write git state to logs
runner.add_git_repo_to_log(__file__)
# save resume path before creating a new log_dir
if agent_cfg.resume:
# get path to previous checkpoint
resume_path = get_checkpoint_path(log_root_path, agent_cfg.load_run, agent_cfg.load_checkpoint)
print(f"[INFO]: Loading model checkpoint from: {resume_path}")
# load previously trained model
runner.load(resume_path)
# set seed of the environment
env.seed(agent_cfg.seed)
# dump the configuration into log-directory
dump_yaml(os.path.join(log_dir, "params", "env.yaml"), env_cfg)
dump_yaml(os.path.join(log_dir, "params", "agent.yaml"), agent_cfg)
dump_pickle(os.path.join(log_dir, "params", "env.pkl"), env_cfg)
dump_pickle(os.path.join(log_dir, "params", "agent.pkl"), agent_cfg)
# run training
runner.learn(num_learning_iterations=agent_cfg.max_iterations, init_at_random_ep_len=True)
# close the simulator
env.close()
if __name__ == "__main__":
# run the main execution
main()
# close sim app
simulation_app.close()
| 5,231 | Python | 36.640288 | 117 | 0.703307 |
isaac-orbit/orbit.ext_template/config/extension.toml | [package]
# Semantic Versioning is used: https://semver.org/
version = "0.1.0"
# Description
title = "Extension Template" # TODO: Please adapt to your title.
description="Extension Template for Orbit" #TODO: Please adapt to your description.
repository = "https://github.com/isaac-orbit/orbit.ext_template.git" # TODO: Please adapt to your repository.
keywords = ["extension", "template", "orbit"] # TODO: Please adapt to your keywords.
category = "orbit"
readme = "README.md"
[dependencies]
"omni.kit.uiapp" = {}
"omni.isaac.orbit" = {}
"omni.isaac.orbit_assets" = {}
"omni.isaac.orbit_tasks" = {}
"omni.isaac.core" = {}
"omni.isaac.gym" = {}
"omni.replicator.isaac" = {}
# Note: You can add additional dependencies here for your extension.
# For example, if you want to use the omni.kit module, you can add it as a dependency:
# "omni.kit" = {}
[[python.module]]
name = "orbit.ext_template" # TODO: Please adapt to your package name.
| 946 | TOML | 32.821427 | 110 | 0.700846 |
isaac-orbit/orbit.ext_template/orbit/ext_template/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""
Python module serving as a project/extension template.
"""
# Register Gym environments.
from .tasks import *
# Register UI extensions.
from .ui_extension_example import *
| 300 | Python | 19.066665 | 56 | 0.743333 |
isaac-orbit/orbit.ext_template/orbit/ext_template/ui_extension_example.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
import omni.ext
import omni.ui as ui
# Functions and vars are available to other extension as usual in python: `example.python_ext.some_public_function(x)`
def some_public_function(x: int):
print("[orbit.ext_template] some_public_function was called with x: ", x)
return x**x
# Any class derived from `omni.ext.IExt` in top level module (defined in `python.modules` of `extension.toml`) will be
# instantiated when extension gets enabled and `on_startup(ext_id)` will be called. Later when extension gets disabled
# on_shutdown() is called.
class ExampleExtension(omni.ext.IExt):
# ext_id is current extension id. It can be used with extension manager to query additional information, like where
# this extension is located on filesystem.
def on_startup(self, ext_id):
print("[orbit.ext_template] startup")
self._count = 0
self._window = ui.Window("My Window", width=300, height=300)
with self._window.frame:
with ui.VStack():
label = ui.Label("")
def on_click():
self._count += 1
label.text = f"count: {self._count}"
def on_reset():
self._count = 0
label.text = "empty"
on_reset()
with ui.HStack():
ui.Button("Add", clicked_fn=on_click)
ui.Button("Reset", clicked_fn=on_reset)
def on_shutdown(self):
print("[orbit.ext_template] shutdown")
| 1,650 | Python | 33.395833 | 119 | 0.609697 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Package containing task implementations for various robotic environments."""
import os
import toml
# Conveniences to other module directories via relative paths
ORBIT_TASKS_EXT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../"))
"""Path to the extension source directory."""
ORBIT_TASKS_METADATA = toml.load(os.path.join(ORBIT_TASKS_EXT_DIR, "config", "extension.toml"))
"""Extension metadata dictionary parsed from the extension.toml file."""
# Configure the module-level variables
__version__ = ORBIT_TASKS_METADATA["package"]["version"]
##
# Register Gym environments.
##
from omni.isaac.orbit_tasks.utils import import_packages
# The blacklist is used to prevent importing configs from sub-packages
_BLACKLIST_PKGS = ["utils"]
# Import all configs in this package
import_packages(__name__, _BLACKLIST_PKGS)
| 969 | Python | 29.312499 | 95 | 0.744066 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Locomotion environments for legged robots."""
from .velocity import * # noqa
| 205 | Python | 21.888886 | 56 | 0.731707 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/velocity_env_cfg.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import annotations
import math
from dataclasses import MISSING
import omni.isaac.orbit.sim as sim_utils
from omni.isaac.orbit.assets import ArticulationCfg, AssetBaseCfg
from omni.isaac.orbit.envs import RLTaskEnvCfg
from omni.isaac.orbit.managers import CurriculumTermCfg as CurrTerm
from omni.isaac.orbit.managers import ObservationGroupCfg as ObsGroup
from omni.isaac.orbit.managers import ObservationTermCfg as ObsTerm
from omni.isaac.orbit.managers import RandomizationTermCfg as RandTerm
from omni.isaac.orbit.managers import RewardTermCfg as RewTerm
from omni.isaac.orbit.managers import SceneEntityCfg
from omni.isaac.orbit.managers import TerminationTermCfg as DoneTerm
from omni.isaac.orbit.scene import InteractiveSceneCfg
from omni.isaac.orbit.sensors import ContactSensorCfg, RayCasterCfg, patterns
from omni.isaac.orbit.terrains import TerrainImporterCfg
from omni.isaac.orbit.utils import configclass
from omni.isaac.orbit.utils.noise import AdditiveUniformNoiseCfg as Unoise
import orbit.ext_template.tasks.locomotion.velocity.mdp as mdp
##
# Pre-defined configs
##
from omni.isaac.orbit.terrains.config.rough import ROUGH_TERRAINS_CFG # isort: skip
##
# Scene definition
##
@configclass
class MySceneCfg(InteractiveSceneCfg):
"""Configuration for the terrain scene with a legged robot."""
# ground terrain
terrain = TerrainImporterCfg(
prim_path="/World/ground",
terrain_type="generator",
terrain_generator=ROUGH_TERRAINS_CFG,
max_init_terrain_level=5,
collision_group=-1,
physics_material=sim_utils.RigidBodyMaterialCfg(
friction_combine_mode="multiply",
restitution_combine_mode="multiply",
static_friction=1.0,
dynamic_friction=1.0,
),
visual_material=sim_utils.MdlFileCfg(
mdl_path="{NVIDIA_NUCLEUS_DIR}/Materials/Base/Architecture/Shingles_01.mdl",
project_uvw=True,
),
debug_vis=False,
)
# robots
robot: ArticulationCfg = MISSING
# sensors
height_scanner = RayCasterCfg(
prim_path="{ENV_REGEX_NS}/Robot/base",
offset=RayCasterCfg.OffsetCfg(pos=(0.0, 0.0, 20.0)),
attach_yaw_only=True,
pattern_cfg=patterns.GridPatternCfg(resolution=0.1, size=[1.6, 1.0]),
debug_vis=False,
mesh_prim_paths=["/World/ground"],
)
contact_forces = ContactSensorCfg(prim_path="{ENV_REGEX_NS}/Robot/.*", history_length=3, track_air_time=True)
# lights
light = AssetBaseCfg(
prim_path="/World/light",
spawn=sim_utils.DistantLightCfg(color=(0.75, 0.75, 0.75), intensity=3000.0),
)
sky_light = AssetBaseCfg(
prim_path="/World/skyLight",
spawn=sim_utils.DomeLightCfg(color=(0.13, 0.13, 0.13), intensity=1000.0),
)
##
# MDP settings
##
@configclass
class CommandsCfg:
"""Command specifications for the MDP."""
base_velocity = mdp.UniformVelocityCommandCfg(
asset_name="robot",
resampling_time_range=(10.0, 10.0),
rel_standing_envs=0.02,
rel_heading_envs=1.0,
heading_command=True,
heading_control_stiffness=0.5,
debug_vis=True,
ranges=mdp.UniformVelocityCommandCfg.Ranges(
lin_vel_x=(-1.0, 1.0), lin_vel_y=(-1.0, 1.0), ang_vel_z=(-1.0, 1.0), heading=(-math.pi, math.pi)
),
)
@configclass
class ActionsCfg:
"""Action specifications for the MDP."""
joint_pos = mdp.JointPositionActionCfg(asset_name="robot", joint_names=[".*"], scale=0.5, use_default_offset=True)
@configclass
class ObservationsCfg:
"""Observation specifications for the MDP."""
@configclass
class PolicyCfg(ObsGroup):
"""Observations for policy group."""
# observation terms (order preserved)
base_lin_vel = ObsTerm(func=mdp.base_lin_vel, noise=Unoise(n_min=-0.1, n_max=0.1))
base_ang_vel = ObsTerm(func=mdp.base_ang_vel, noise=Unoise(n_min=-0.2, n_max=0.2))
projected_gravity = ObsTerm(
func=mdp.projected_gravity,
noise=Unoise(n_min=-0.05, n_max=0.05),
)
velocity_commands = ObsTerm(func=mdp.generated_commands, params={"command_name": "base_velocity"})
joint_pos = ObsTerm(func=mdp.joint_pos_rel, noise=Unoise(n_min=-0.01, n_max=0.01))
joint_vel = ObsTerm(func=mdp.joint_vel_rel, noise=Unoise(n_min=-1.5, n_max=1.5))
actions = ObsTerm(func=mdp.last_action)
height_scan = ObsTerm(
func=mdp.height_scan,
params={"sensor_cfg": SceneEntityCfg("height_scanner")},
noise=Unoise(n_min=-0.1, n_max=0.1),
clip=(-1.0, 1.0),
)
def __post_init__(self):
self.enable_corruption = True
self.concatenate_terms = True
# observation groups
policy: PolicyCfg = PolicyCfg()
@configclass
class RandomizationCfg:
"""Configuration for randomization."""
# startup
physics_material = RandTerm(
func=mdp.randomize_rigid_body_material,
mode="startup",
params={
"asset_cfg": SceneEntityCfg("robot", body_names=".*"),
"static_friction_range": (0.8, 0.8),
"dynamic_friction_range": (0.6, 0.6),
"restitution_range": (0.0, 0.0),
"num_buckets": 64,
},
)
add_base_mass = RandTerm(
func=mdp.add_body_mass,
mode="startup",
params={"asset_cfg": SceneEntityCfg("robot", body_names="base"), "mass_range": (-5.0, 5.0)},
)
# reset
base_external_force_torque = RandTerm(
func=mdp.apply_external_force_torque,
mode="reset",
params={
"asset_cfg": SceneEntityCfg("robot", body_names="base"),
"force_range": (0.0, 0.0),
"torque_range": (-0.0, 0.0),
},
)
reset_base = RandTerm(
func=mdp.reset_root_state_uniform,
mode="reset",
params={
"pose_range": {"x": (-0.5, 0.5), "y": (-0.5, 0.5), "yaw": (-3.14, 3.14)},
"velocity_range": {
"x": (-0.5, 0.5),
"y": (-0.5, 0.5),
"z": (-0.5, 0.5),
"roll": (-0.5, 0.5),
"pitch": (-0.5, 0.5),
"yaw": (-0.5, 0.5),
},
},
)
reset_robot_joints = RandTerm(
func=mdp.reset_joints_by_scale,
mode="reset",
params={
"position_range": (0.5, 1.5),
"velocity_range": (0.0, 0.0),
},
)
# interval
push_robot = RandTerm(
func=mdp.push_by_setting_velocity,
mode="interval",
interval_range_s=(10.0, 15.0),
params={"velocity_range": {"x": (-0.5, 0.5), "y": (-0.5, 0.5)}},
)
@configclass
class RewardsCfg:
"""Reward terms for the MDP."""
# -- task
track_lin_vel_xy_exp = RewTerm(
func=mdp.track_lin_vel_xy_exp, weight=1.0, params={"command_name": "base_velocity", "std": math.sqrt(0.25)}
)
track_ang_vel_z_exp = RewTerm(
func=mdp.track_ang_vel_z_exp, weight=0.5, params={"command_name": "base_velocity", "std": math.sqrt(0.25)}
)
# -- penalties
lin_vel_z_l2 = RewTerm(func=mdp.lin_vel_z_l2, weight=-2.0)
ang_vel_xy_l2 = RewTerm(func=mdp.ang_vel_xy_l2, weight=-0.05)
dof_torques_l2 = RewTerm(func=mdp.joint_torques_l2, weight=-1.0e-5)
dof_acc_l2 = RewTerm(func=mdp.joint_acc_l2, weight=-2.5e-7)
action_rate_l2 = RewTerm(func=mdp.action_rate_l2, weight=-0.01)
feet_air_time = RewTerm(
func=mdp.feet_air_time,
weight=0.125,
params={
"sensor_cfg": SceneEntityCfg("contact_forces", body_names=".*FOOT"),
"command_name": "base_velocity",
"threshold": 0.5,
},
)
undesired_contacts = RewTerm(
func=mdp.undesired_contacts,
weight=-1.0,
params={"sensor_cfg": SceneEntityCfg("contact_forces", body_names=".*THIGH"), "threshold": 1.0},
)
# -- optional penalties
flat_orientation_l2 = RewTerm(func=mdp.flat_orientation_l2, weight=0.0)
dof_pos_limits = RewTerm(func=mdp.joint_pos_limits, weight=0.0)
@configclass
class TerminationsCfg:
"""Termination terms for the MDP."""
time_out = DoneTerm(func=mdp.time_out, time_out=True)
base_contact = DoneTerm(
func=mdp.illegal_contact,
params={"sensor_cfg": SceneEntityCfg("contact_forces", body_names="base"), "threshold": 1.0},
)
@configclass
class CurriculumCfg:
"""Curriculum terms for the MDP."""
terrain_levels = CurrTerm(func=mdp.terrain_levels_vel)
##
# Environment configuration
##
@configclass
class LocomotionVelocityRoughEnvCfg(RLTaskEnvCfg):
"""Configuration for the locomotion velocity-tracking environment."""
# Scene settings
scene: MySceneCfg = MySceneCfg(num_envs=4096, env_spacing=2.5)
# Basic settings
observations: ObservationsCfg = ObservationsCfg()
actions: ActionsCfg = ActionsCfg()
commands: CommandsCfg = CommandsCfg()
# MDP settings
rewards: RewardsCfg = RewardsCfg()
terminations: TerminationsCfg = TerminationsCfg()
randomization: RandomizationCfg = RandomizationCfg()
curriculum: CurriculumCfg = CurriculumCfg()
def __post_init__(self):
"""Post initialization."""
# general settings
self.decimation = 4
self.episode_length_s = 20.0
# simulation settings
self.sim.dt = 0.005
self.sim.disable_contact_processing = True
self.sim.physics_material = self.scene.terrain.physics_material
# update sensor update periods
# we tick all the sensors based on the smallest update period (physics update period)
if self.scene.height_scanner is not None:
self.scene.height_scanner.update_period = self.decimation * self.sim.dt
if self.scene.contact_forces is not None:
self.scene.contact_forces.update_period = self.sim.dt
# check if terrain levels curriculum is enabled - if so, enable curriculum for terrain generator
# this generates terrains with increasing difficulty and is useful for training
if getattr(self.curriculum, "terrain_levels", None) is not None:
if self.scene.terrain.terrain_generator is not None:
self.scene.terrain.terrain_generator.curriculum = True
else:
if self.scene.terrain.terrain_generator is not None:
self.scene.terrain.terrain_generator.curriculum = False
| 10,649 | Python | 32.596214 | 118 | 0.626538 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Locomotion environments with velocity-tracking commands.
These environments are based on the `legged_gym` environments provided by Rudin et al.
Reference:
https://github.com/leggedrobotics/legged_gym
"""
| 336 | Python | 24.923075 | 86 | 0.764881 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/mdp/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""This sub-module contains the functions that are specific to the locomotion environments."""
from omni.isaac.orbit.envs.mdp import * # noqa: F401, F403
from .curriculums import * # noqa: F401, F403
from .rewards import * # noqa: F401, F403
| 370 | Python | 29.916664 | 94 | 0.732432 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/mdp/curriculums.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Common functions that can be used to create curriculum for the learning environment.
The functions can be passed to the :class:`omni.isaac.orbit.managers.CurriculumTermCfg` object to enable
the curriculum introduced by the function.
"""
from __future__ import annotations
from collections.abc import Sequence
from typing import TYPE_CHECKING
import torch
from omni.isaac.orbit.assets import Articulation
from omni.isaac.orbit.managers import SceneEntityCfg
from omni.isaac.orbit.terrains import TerrainImporter
if TYPE_CHECKING:
from omni.isaac.orbit.envs import RLTaskEnv
def terrain_levels_vel(
env: RLTaskEnv, env_ids: Sequence[int], asset_cfg: SceneEntityCfg = SceneEntityCfg("robot")
) -> torch.Tensor:
"""Curriculum based on the distance the robot walked when commanded to move at a desired velocity.
This term is used to increase the difficulty of the terrain when the robot walks far enough and decrease the
difficulty when the robot walks less than half of the distance required by the commanded velocity.
.. note::
It is only possible to use this term with the terrain type ``generator``. For further information
on different terrain types, check the :class:`omni.isaac.orbit.terrains.TerrainImporter` class.
Returns:
The mean terrain level for the given environment ids.
"""
# extract the used quantities (to enable type-hinting)
asset: Articulation = env.scene[asset_cfg.name]
terrain: TerrainImporter = env.scene.terrain
command = env.command_manager.get_command("base_velocity")
# compute the distance the robot walked
distance = torch.norm(asset.data.root_pos_w[env_ids, :2] - env.scene.env_origins[env_ids, :2], dim=1)
# robots that walked far enough progress to harder terrains
move_up = distance > terrain.cfg.terrain_generator.size[0] / 2
# robots that walked less than half of their required distance go to simpler terrains
move_down = distance < torch.norm(command[env_ids, :2], dim=1) * env.max_episode_length_s * 0.5
move_down *= ~move_up
# update terrain levels
terrain.update_env_origins(env_ids, move_up, move_down)
# return the mean terrain level
return torch.mean(terrain.terrain_levels.float())
| 2,376 | Python | 41.446428 | 112 | 0.742424 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/mdp/rewards.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import annotations
from typing import TYPE_CHECKING
import torch
from omni.isaac.orbit.managers import SceneEntityCfg
from omni.isaac.orbit.sensors import ContactSensor
if TYPE_CHECKING:
from omni.isaac.orbit.envs import RLTaskEnv
def feet_air_time(env: RLTaskEnv, command_name: str, sensor_cfg: SceneEntityCfg, threshold: float) -> torch.Tensor:
"""Reward long steps taken by the feet using L2-kernel.
This function rewards the agent for taking steps that are longer than a threshold. This helps ensure
that the robot lifts its feet off the ground and takes steps. The reward is computed as the sum of
the time for which the feet are in the air.
If the commands are small (i.e. the agent is not supposed to take a step), then the reward is zero.
"""
# extract the used quantities (to enable type-hinting)
contact_sensor: ContactSensor = env.scene.sensors[sensor_cfg.name]
# compute the reward
first_contact = contact_sensor.compute_first_contact(env.step_dt)[:, sensor_cfg.body_ids]
last_air_time = contact_sensor.data.last_air_time[:, sensor_cfg.body_ids]
reward = torch.sum((last_air_time - threshold) * first_contact, dim=1)
# no reward for zero command
reward *= torch.norm(env.command_manager.get_command(command_name)[:, :2], dim=1) > 0.1
return reward
def feet_air_time_positive_biped(env, command_name: str, threshold: float, sensor_cfg: SceneEntityCfg) -> torch.Tensor:
"""Reward long steps taken by the feet for bipeds.
This function rewards the agent for taking steps up to a specified threshold and also keep one foot at
a time in the air.
If the commands are small (i.e. the agent is not supposed to take a step), then the reward is zero.
"""
contact_sensor: ContactSensor = env.scene.sensors[sensor_cfg.name]
# compute the reward
air_time = contact_sensor.data.current_air_time[:, sensor_cfg.body_ids]
contact_time = contact_sensor.data.current_contact_time[:, sensor_cfg.body_ids]
in_contact = contact_time > 0.0
in_mode_time = torch.where(in_contact, contact_time, air_time)
single_stance = torch.sum(in_contact.int(), dim=1) == 1
reward = torch.min(torch.where(single_stance.unsqueeze(-1), in_mode_time, 0.0), dim=1)[0]
reward = torch.clamp(reward, max=threshold)
# no reward for zero command
reward *= torch.norm(env.command_manager.get_command(command_name)[:, :2], dim=1) > 0.1
return reward
| 2,595 | Python | 43.75862 | 119 | 0.717148 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/config/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
"""Configurations for velocity-based locomotion environments."""
# We leave this file empty since we don't want to expose any configs in this package directly.
# We still need this file to import the "config" module in the parent package.
| 363 | Python | 35.399996 | 94 | 0.763085 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/config/anymal_d/rough_env_cfg.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from omni.isaac.orbit.utils import configclass
from orbit.ext_template.tasks.locomotion.velocity.velocity_env_cfg import (
LocomotionVelocityRoughEnvCfg,
)
##
# Pre-defined configs
##
from omni.isaac.orbit_assets.anymal import ANYMAL_D_CFG # isort: skip
@configclass
class AnymalDRoughEnvCfg(LocomotionVelocityRoughEnvCfg):
def __post_init__(self):
# post init of parent
super().__post_init__()
# switch robot to anymal-d
self.scene.robot = ANYMAL_D_CFG.replace(prim_path="{ENV_REGEX_NS}/Robot")
@configclass
class AnymalDRoughEnvCfg_PLAY(AnymalDRoughEnvCfg):
def __post_init__(self):
# post init of parent
super().__post_init__()
# make a smaller scene for play
self.scene.num_envs = 50
self.scene.env_spacing = 2.5
# spawn the robot randomly in the grid (instead of their terrain levels)
self.scene.terrain.max_init_terrain_level = None
# reduce the number of terrains to save memory
if self.scene.terrain.terrain_generator is not None:
self.scene.terrain.terrain_generator.num_rows = 5
self.scene.terrain.terrain_generator.num_cols = 5
self.scene.terrain.terrain_generator.curriculum = False
# disable randomization for play
self.observations.policy.enable_corruption = False
# remove random pushing
self.randomization.base_external_force_torque = None
self.randomization.push_robot = None
| 1,617 | Python | 32.020408 | 81 | 0.683364 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/config/anymal_d/flat_env_cfg.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from omni.isaac.orbit.utils import configclass
from .rough_env_cfg import AnymalDRoughEnvCfg
@configclass
class AnymalDFlatEnvCfg(AnymalDRoughEnvCfg):
def __post_init__(self):
# post init of parent
super().__post_init__()
# override rewards
self.rewards.flat_orientation_l2.weight = -5.0
self.rewards.dof_torques_l2.weight = -2.5e-5
self.rewards.feet_air_time.weight = 0.5
# change terrain to flat
self.scene.terrain.terrain_type = "plane"
self.scene.terrain.terrain_generator = None
# no height scan
self.scene.height_scanner = None
self.observations.policy.height_scan = None
# no terrain curriculum
self.curriculum.terrain_levels = None
class AnymalDFlatEnvCfg_PLAY(AnymalDFlatEnvCfg):
def __post_init__(self) -> None:
# post init of parent
super().__post_init__()
# make a smaller scene for play
self.scene.num_envs = 50
self.scene.env_spacing = 2.5
# disable randomization for play
self.observations.policy.enable_corruption = False
# remove random pushing
self.randomization.base_external_force_torque = None
self.randomization.push_robot = None
| 1,382 | Python | 30.431817 | 60 | 0.656295 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/config/anymal_d/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
import gymnasium as gym
from . import agents, flat_env_cfg, rough_env_cfg
##
# Register Gym environments.
##
gym.register(
id="Isaac-Velocity-Flat-Anymal-D-Template-v0",
entry_point="omni.isaac.orbit.envs:RLTaskEnv",
disable_env_checker=True,
kwargs={
"env_cfg_entry_point": flat_env_cfg.AnymalDFlatEnvCfg,
"rsl_rl_cfg_entry_point": agents.rsl_rl_cfg.AnymalDFlatPPORunnerCfg,
},
)
gym.register(
id="Isaac-Velocity-Flat-Anymal-D-Template-Play-v0",
entry_point="omni.isaac.orbit.envs:RLTaskEnv",
disable_env_checker=True,
kwargs={
"env_cfg_entry_point": flat_env_cfg.AnymalDFlatEnvCfg_PLAY,
"rsl_rl_cfg_entry_point": agents.rsl_rl_cfg.AnymalDFlatPPORunnerCfg,
},
)
gym.register(
id="Isaac-Velocity-Rough-Anymal-D-Template-v0",
entry_point="omni.isaac.orbit.envs:RLTaskEnv",
disable_env_checker=True,
kwargs={
"env_cfg_entry_point": rough_env_cfg.AnymalDRoughEnvCfg,
"rsl_rl_cfg_entry_point": agents.rsl_rl_cfg.AnymalDRoughPPORunnerCfg,
},
)
gym.register(
id="Isaac-Velocity-Rough-Anymal-D-Template-Play-v0",
entry_point="omni.isaac.orbit.envs:RLTaskEnv",
disable_env_checker=True,
kwargs={
"env_cfg_entry_point": rough_env_cfg.AnymalDRoughEnvCfg_PLAY,
"rsl_rl_cfg_entry_point": agents.rsl_rl_cfg.AnymalDRoughPPORunnerCfg,
},
)
| 1,498 | Python | 27.283018 | 77 | 0.688251 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/config/anymal_d/agents/rsl_rl_cfg.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from omni.isaac.orbit.utils import configclass
from omni.isaac.orbit_tasks.utils.wrappers.rsl_rl import (
RslRlOnPolicyRunnerCfg,
RslRlPpoActorCriticCfg,
RslRlPpoAlgorithmCfg,
)
@configclass
class AnymalDRoughPPORunnerCfg(RslRlOnPolicyRunnerCfg):
num_steps_per_env = 24
max_iterations = 1500
save_interval = 50
experiment_name = "anymal_d_rough"
empirical_normalization = False
policy = RslRlPpoActorCriticCfg(
init_noise_std=1.0,
actor_hidden_dims=[512, 256, 128],
critic_hidden_dims=[512, 256, 128],
activation="elu",
)
algorithm = RslRlPpoAlgorithmCfg(
value_loss_coef=1.0,
use_clipped_value_loss=True,
clip_param=0.2,
entropy_coef=0.005,
num_learning_epochs=5,
num_mini_batches=4,
learning_rate=1.0e-3,
schedule="adaptive",
gamma=0.99,
lam=0.95,
desired_kl=0.01,
max_grad_norm=1.0,
)
@configclass
class AnymalDFlatPPORunnerCfg(AnymalDRoughPPORunnerCfg):
def __post_init__(self):
super().__post_init__()
self.max_iterations = 300
self.experiment_name = "anymal_d_flat"
self.policy.actor_hidden_dims = [128, 128, 128]
self.policy.critic_hidden_dims = [128, 128, 128]
| 1,417 | Python | 26.26923 | 58 | 0.645025 |
isaac-orbit/orbit.ext_template/orbit/ext_template/tasks/locomotion/velocity/config/anymal_d/agents/__init__.py | # Copyright (c) 2022-2024, The ORBIT Project Developers.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
from . import rsl_rl_cfg # noqa: F401, F403
| 168 | Python | 23.142854 | 56 | 0.720238 |
isaac-orbit/orbit.ext_template/docs/CHANGELOG.rst | Changelog
---------
0.1.0 (2024-01-29)
~~~~~~~~~~~~~~~~~~
Added
^^^^^
* Created an initial template for building an extension or project based on Orbit
| 155 | reStructuredText | 13.181817 | 81 | 0.593548 |
MomentFactory/Omniverse-NDI-extension/README.md | # mf.ov.ndi
An extension to enable NDI® live video input in Omniverse.
## Getting started
- Requires Omniverse Kit >= 105
- Requires USD Composer > 2023.1.0
- Requires [NDI® 5.5.3 runtime for Windows](https://go.ndi.tv/tools-for-windows)
Previous releases should still be supported in USD composer 2022.x
This plugin leverages the `dynamic://` keyword which is currently a beta feature of Omniverse.
## Using the extension
⚠️You should disable Render Settings > Raytracing > Eco Mode for the extension to work properly.
### Enable the extension
In USD Composer :
- Windows > Extensions.
- Switch to THIRD PARY tab.
- Install and enable the extension.
You may want to use [example.usda](./example.usda) in Create for your first test.
### Extension window
If not opened automatically : Windows > NDI®.

### Window header
- ➕**Create Dynamic Texture**: Creates a new material and shader under /Looks with the associated configuration for dynamic texture rendering. The text field `myDynamicMaterial` allows to customize the identifier for the dynamic texture to register.
- 🔄**Discover Dynamic Textures** searches through the USD stage hierarchy for any material with a `dynamic://` asset source (like the one created by “Create Dynamic Material”). Will add a collapsible section for each unique id found
- ⏹️**Stop all streams** stops the reception of the video stream for every dynamic texture. A material with a dynamic texture source will still display the last frame it received.
### Dynamic material component
Each dynamic texture will be represented in a collapsible component.
The title is the name of your dynamic texture.
- ☑️ Indicates the health of the video feed.
- **NDI® feed combobox** Select which NDI® feed to use for this dynamic texture identifier. This value is saved in USD as a custom property in the shader under `ndi:source`
- ⏸️ Allows to start/stop the video feed.
- 🖼️ Allows to switch the feed to Low bandwidth mode, saving performance by decreasing resolution for a particular feed.
- 🗇 To copy to clipboard the identifiers of the dynamic texture Example `dynamic://myDynamicMaterial`
## Resources
- Inspired by : [kit-extension-template](https://github.com/NVIDIA-Omniverse/kit-extension-template)
- [kit-cv-video-example](https://github.com/jshrake-nvidia/kit-cv-video-example)
- [kit-dynamic-texture-example](https://github.com/jshrake-nvidia/kit-dynamic-texture-example)
- [ndi-python](https://github.com/buresu/ndi-python)
## Known issues
- Currently implemented with Python, performance could be greatly improved with C++ (but limited by DynamicTextureProvider implementation)
- You can ignore warnings in the form of `[Warning] [omni.hydra] Material parameter '...' was assigned to incompatible texture: '...'`
- You can ignore warnings in the form of `[Warning] [omni.ext._impl._internal] mf.ov.ndi-... -> <class 'mf.ov.ndi...'>: extension object is still alive, something holds a reference on it...`
- You can ignore the first istance of `[Warning] Could not get stage`, because the extension loads before the stage is initialized | 3,126 | Markdown | 51.116666 | 249 | 0.760077 |
MomentFactory/Omniverse-NDI-extension/tools/scripts/link_app.py | import argparse
import json
import os
import sys
import packmanapi
import urllib3
def find_omniverse_apps():
http = urllib3.PoolManager()
try:
r = http.request("GET", "http://127.0.0.1:33480/components")
except Exception as e:
print(f"Failed retrieving apps from an Omniverse Launcher, maybe it is not installed?\nError: {e}")
sys.exit(1)
apps = {}
for x in json.loads(r.data.decode("utf-8")):
latest = x.get("installedVersions", {}).get("latest", "")
if latest:
for s in x.get("settings", []):
if s.get("version", "") == latest:
root = s.get("launch", {}).get("root", "")
apps[x["slug"]] = (x["name"], root)
break
return apps
def create_link(src, dst):
print(f"Creating a link '{src}' -> '{dst}'")
packmanapi.link(src, dst)
APP_PRIORITIES = ["code", "create", "view"]
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Create folder link to Kit App installed from Omniverse Launcher")
parser.add_argument(
"--path",
help="Path to Kit App installed from Omniverse Launcher, e.g.: 'C:/Users/bob/AppData/Local/ov/pkg/create-2021.3.4'",
required=False,
)
parser.add_argument(
"--app", help="Name of Kit App installed from Omniverse Launcher, e.g.: 'code', 'create'", required=False
)
args = parser.parse_args()
path = args.path
if not path:
print("Path is not specified, looking for Omniverse Apps...")
apps = find_omniverse_apps()
if len(apps) == 0:
print(
"Can't find any Omniverse Apps. Use Omniverse Launcher to install one. 'Code' is the recommended app for developers."
)
sys.exit(0)
print("\nFound following Omniverse Apps:")
for i, slug in enumerate(apps):
name, root = apps[slug]
print(f"{i}: {name} ({slug}) at: '{root}'")
if args.app:
selected_app = args.app.lower()
if selected_app not in apps:
choices = ", ".join(apps.keys())
print(f"Passed app: '{selected_app}' is not found. Specify one of the following found Apps: {choices}")
sys.exit(0)
else:
selected_app = next((x for x in APP_PRIORITIES if x in apps), None)
if not selected_app:
selected_app = next(iter(apps))
print(f"\nSelected app: {selected_app}")
_, path = apps[selected_app]
if not os.path.exists(path):
print(f"Provided path doesn't exist: {path}")
else:
SCRIPT_ROOT = os.path.dirname(os.path.realpath(__file__))
create_link(f"{SCRIPT_ROOT}/../../app", path)
print("Success!")
| 2,814 | Python | 32.117647 | 133 | 0.562189 |
MomentFactory/Omniverse-NDI-extension/tools/packman/config.packman.xml | <config remotes="cloudfront">
<remote2 name="cloudfront">
<transport actions="download" protocol="https" packageLocation="d4i3qtqj3r0z5.cloudfront.net/${name}@${version}" />
</remote2>
</config>
| 211 | XML | 34.333328 | 123 | 0.691943 |
MomentFactory/Omniverse-NDI-extension/tools/packman/bootstrap/install_package.py | # Copyright 2019 NVIDIA CORPORATION
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import shutil
import sys
import tempfile
import zipfile
__author__ = "hfannar"
logging.basicConfig(level=logging.WARNING, format="%(message)s")
logger = logging.getLogger("install_package")
class TemporaryDirectory:
def __init__(self):
self.path = None
def __enter__(self):
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, type, value, traceback):
# Remove temporary data created
shutil.rmtree(self.path)
def install_package(package_src_path, package_dst_path):
with zipfile.ZipFile(package_src_path, allowZip64=True) as zip_file, TemporaryDirectory() as temp_dir:
zip_file.extractall(temp_dir)
# Recursively copy (temp_dir will be automatically cleaned up on exit)
try:
# Recursive copy is needed because both package name and version folder could be missing in
# target directory:
shutil.copytree(temp_dir, package_dst_path)
except OSError as exc:
logger.warning("Directory %s already present, packaged installation aborted" % package_dst_path)
else:
logger.info("Package successfully installed to %s" % package_dst_path)
install_package(sys.argv[1], sys.argv[2])
| 1,844 | Python | 33.166666 | 108 | 0.703362 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/NDItools.py | from .eventsystem import EventSystem
import carb.profiler
import logging
from .deps import NDIlib as ndi
import numpy as np
import omni.ui
import threading
import time
from typing import List
import warp as wp
class NDItools():
def __init__(self):
self._ndi_ok = False
self._ndi_find = None
self._ndi_init()
self._ndi_find_init()
self._finder = None
self._create_finder()
self._streams = []
stream = omni.kit.app.get_app().get_update_event_stream()
self._sub = stream.create_subscription_to_pop(self._on_update, name="update")
def destroy(self):
self._sub.unsubscribe()
self._sub = None
self._finder.destroy()
for stream in self._streams:
stream.destroy()
self._streams.clear()
if self._ndi_ok:
if self._ndi_find is not None:
ndi.find_destroy(self._ndi_find)
ndi.destroy()
self._ndi_ok = False
def is_ndi_ok(self) -> bool:
return self._ndi_ok
def _on_update(self, e):
to_remove = []
for stream in self._streams:
if not stream.is_running():
to_remove.append(stream)
for stream in to_remove:
self._streams.remove(stream)
EventSystem.send_event(EventSystem.STREAM_STOP_TIMEOUT_EVENT, payload={"dynamic_id": stream.get_id()})
stream.destroy()
def _ndi_init(self):
if not ndi.initialize():
logger = logging.getLogger(__name__)
logger.error("Could not initialize NDI®")
return
self._ndi_ok = True
def _ndi_find_init(self):
self._ndi_find = ndi.find_create_v2()
if self._ndi_find is None:
logger = logging.getLogger(__name__)
logger.error("Could not initialize NDI® find")
return
def _create_finder(self):
if self._ndi_find:
self._finder = NDIfinder(self)
def get_ndi_find(self):
return self._ndi_find
def get_stream(self, dynamic_id):
return next((x for x in self._streams if x.get_id() == dynamic_id), None)
def try_add_stream(self, dynamic_id: str, ndi_source: str, lowbandwidth: bool,
update_fps_fn, update_dimensions_fn) -> bool:
stream: NDIVideoStream = NDIVideoStream(dynamic_id, ndi_source, lowbandwidth, self,
update_fps_fn, update_dimensions_fn)
if not stream.is_ok:
logger = logging.getLogger(__name__)
logger.error(f"Error opening stream: {ndi_source}")
return False
self._streams.append(stream)
return True
def try_add_stream_proxy(self, dynamic_id: str, ndi_source: str, fps: float,
lowbandwidth: bool) -> bool:
stream: NDIVideoStreamProxy = NDIVideoStreamProxy(dynamic_id, ndi_source, fps, lowbandwidth)
if not stream.is_ok:
logger = logging.getLogger(__name__)
logger.error(f"Error opening stream: {ndi_source}")
return False
self._streams.append(stream)
return True
def stop_stream(self, dynamic_id: str):
stream = self.get_stream(dynamic_id)
if stream is not None:
self._streams.remove(stream)
stream.destroy()
def stop_all_streams(self):
for stream in self._streams:
stream.destroy()
self._streams.clear()
class NDIfinder():
SLEEP_INTERVAL: float = 2 # seconds
def __init__(self, tools: NDItools):
self._tools = tools
self._previous_sources: List[str] = []
self._is_running = True
self._thread = threading.Thread(target=self._search)
self._thread.start()
def destroy(self):
self._is_running = False
self._thread.join()
self._thread = None
def _search(self):
find = self._tools.get_ndi_find()
if find:
while self._is_running:
sources = ndi.find_get_current_sources(find)
result = [s.ndi_name for s in sources]
delta = set(result) ^ set(self._previous_sources)
if len(delta) > 0:
self._previous_sources = result
EventSystem.send_event(EventSystem.NDIFINDER_NEW_SOURCES, payload={"sources": result})
time.sleep(NDIfinder.SLEEP_INTERVAL)
self._is_running = False
class NDIVideoStream():
NO_FRAME_TIMEOUT = 5 # seconds
def __init__(self, dynamic_id: str, ndi_source: str, lowbandwidth: bool, tools: NDItools,
update_fps_fn, update_dimensions_fn):
wp.init()
self._dynamic_id = dynamic_id
self._ndi_source = ndi_source
self._lowbandwidth = lowbandwidth
self._thread: threading.Thread = None
self._ndi_recv = None
self._update_fps_fn = update_fps_fn
self._fps_current = 0.0
self._fps_avg_total = 0.0
self._fps_avg_count = 0
self._fps_expected = 0.0
self._update_dimensions_fn = update_dimensions_fn
self.is_ok = False
if not tools.is_ndi_ok():
return
ndi_find = tools.get_ndi_find()
source = None
sources = ndi.find_get_current_sources(ndi_find)
source_candidates = [s for s in sources if s.ndi_name == self._ndi_source]
if len(source_candidates) != 0:
source = source_candidates[0]
if source is None:
logger = logging.getLogger(__name__)
logger.error(f"TIMEOUT: Could not find source at \"{self._ndi_source}\".")
return
if lowbandwidth:
recv_create_desc = self.get_recv_low_bandwidth()
else:
recv_create_desc = self.get_recv_high_bandwidth()
self._ndi_recv = ndi.recv_create_v3(recv_create_desc)
if self._ndi_recv is None:
logger = logging.getLogger(__name__)
logger.error("Could not create NDI® receiver")
return
ndi.recv_connect(self._ndi_recv, source)
self._is_running = True
self._thread = threading.Thread(target=self._update_texture, args=(self._dynamic_id, ))
self._thread.start()
self.is_ok = True
def _update_fps(self):
self._update_fps_fn(self._fps_current, self._fps_avg_total / self._fps_avg_count if self._fps_avg_count != 0 else 0, self._fps_expected)
def destroy(self):
self._update_fps()
self._is_running = False
self._thread.join()
self._thread = None
ndi.recv_destroy(self._ndi_recv)
def get_id(self) -> str:
return self._dynamic_id
def is_running(self) -> bool:
return self._is_running
def get_recv_high_bandwidth(self):
recv_create_desc = ndi.RecvCreateV3()
recv_create_desc.color_format = ndi.RECV_COLOR_FORMAT_RGBX_RGBA
recv_create_desc.bandwidth = ndi.RECV_BANDWIDTH_HIGHEST
return recv_create_desc
def get_recv_low_bandwidth(self):
recv_create_desc = ndi.RecvCreateV3()
recv_create_desc.color_format = ndi.RECV_COLOR_FORMAT_RGBX_RGBA
recv_create_desc.bandwidth = ndi.RECV_BANDWIDTH_LOWEST
return recv_create_desc
@carb.profiler.profile
def _update_texture(self, dynamic_id: str):
carb.profiler.begin(0, 'Omniverse NDI®::Init')
dynamic_texture = omni.ui.DynamicTextureProvider(dynamic_id)
last_read = time.time() - 1 # Make sure we run on the first frame
fps = 120.0
no_frame_chances = NDIVideoStream.NO_FRAME_TIMEOUT * fps
index = 0
self._fps_avg_total = 0.0
self._fps_avg_count = 0
carb.profiler.end(0)
while self._is_running:
carb.profiler.begin(1, 'Omniverse NDI®::loop outer')
now = time.time()
time_delta = now - last_read
if (time_delta < 1.0 / fps):
carb.profiler.end(1)
continue
carb.profiler.begin(2, 'Omniverse NDI®::loop inner')
self._fps_current = 1.0 / time_delta
last_read = now
carb.profiler.begin(3, 'Omniverse NDI®::receive frame')
t, v, _, _ = ndi.recv_capture_v2(self._ndi_recv, 0)
carb.profiler.end(3)
if t == ndi.FRAME_TYPE_VIDEO:
carb.profiler.begin(3, 'Omniverse NDI®::prepare frame')
fps = v.frame_rate_N / v.frame_rate_D
self._fps_expected = fps
if (index == 0):
self._fps_current = fps
color_format = v.FourCC
frame = v.data
height, width, channels = frame.shape
isGPU = height == width
carb.profiler.end(3)
if isGPU:
carb.profiler.begin(3, 'Omniverse NDI®::begin gpu')
with wp.ScopedDevice("cuda"):
# CUDA doesnt handle non square texture well, so we need to resize if the te
# We are keeping this code in case we find a workaround
#
# carb.profiler.begin(4, 'Omniverse NDI®::begin cpu resize')
# frame = np.resize(frame, (width, width, channels))
# carb.profiler.end(4)
# 38 ms
carb.profiler.begin(4, 'Omniverse NDI®::gpu uploading')
pixels_data = wp.from_numpy(frame, dtype=wp.uint8, device="cuda")
carb.profiler.end(4)
# 1 ms
carb.profiler.begin(4, 'Omniverse NDI®::create gpu texture')
self._update_dimensions_fn(width, height, str(color_format))
dynamic_texture.set_bytes_data_from_gpu(pixels_data.ptr, [width, width])
carb.profiler.end(4)
carb.profiler.end(3)
else:
carb.profiler.begin(3, 'Omniverse NDI®::begin cpu')
self._update_dimensions_fn(width, height, str(color_format))
dynamic_texture.set_data_array(frame, [width, height, channels])
carb.profiler.end(3)
ndi.recv_free_video_v2(self._ndi_recv, v)
carb.profiler.end(3)
self._fps_avg_total += self._fps_current
self._fps_avg_count += 1
self._update_fps()
index += 1
if t == ndi.FRAME_TYPE_NONE:
no_frame_chances -= 1
if (no_frame_chances <= 0):
self._is_running = False
else:
no_frame_chances = NDIVideoStream.NO_FRAME_TIMEOUT * fps
carb.profiler.end(2)
carb.profiler.end(1)
class NDIVideoStreamProxy():
def __init__(self, dynamic_id: str, ndi_source: str, fps: float, lowbandwidth: bool):
self._dynamic_id = dynamic_id
self._ndi_source = ndi_source
self._fps = fps
self._lowbandwidth = lowbandwidth
self._thread: threading.Thread = None
self.is_ok = False
denominator = 1
if lowbandwidth:
denominator = 3
w = int(1920 / denominator) # TODO: dimensions from name like for fps
h = int(1080 / denominator)
self._is_running = True
self._thread = threading.Thread(target=self._update_texture, args=(self._dynamic_id, self._fps, w, h, ))
self._thread.start()
self.is_ok = True
def destroy(self):
self._is_running = False
self._thread.join()
self._thread = None
def get_id(self) -> str:
return self._dynamic_id
def is_running(self) -> bool:
return self._is_running
@carb.profiler.profile
def _update_texture(self, dynamic_id: str, fps: float, width: float, height: float):
carb.profiler.begin(0, 'Omniverse NDI®::Init')
color = np.array([255, 0, 0, 255], np.uint8)
channels = len(color)
dynamic_texture = omni.ui.DynamicTextureProvider(dynamic_id)
frame = np.full((height, width, channels), color, dtype=np.uint8)
last_read = time.time() - 1
carb.profiler.end(0)
while self._is_running:
carb.profiler.begin(1, 'Omniverse NDI®::Proxy loop outer')
now = time.time()
time_delta = now - last_read
if (time_delta < 1.0 / fps):
carb.profiler.end(1)
continue
carb.profiler.begin(2, 'Omniverse NDI®::Proxy loop inner')
last_read = now
carb.profiler.begin(3, 'Omniverse NDI®::set_data')
dynamic_texture.set_data_array(frame, [width, height, channels])
carb.profiler.end(3)
carb.profiler.end(2)
carb.profiler.end(1)
| 13,024 | Python | 33.276316 | 144 | 0.552288 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/extension.py | from .window import Window
import asyncio
import omni.ext
import omni.kit.app
import omni.kit.ui
class MFOVNdiExtension(omni.ext.IExt):
MENU_PATH = f"Window/{Window.WINDOW_NAME}"
def on_startup(self, _):
self._menu = None
self._window: Window = None
editor_menu = omni.kit.ui.get_editor_menu()
if editor_menu:
self._menu = editor_menu.add_item(
MFOVNdiExtension.MENU_PATH, self._show_window, toggle=True, value=True
)
self._show_window(None, True)
def on_shutdown(self):
if self._menu:
self._menu = None
if self._window:
self._destroy_window()
def _destroy_window(self):
self._window.destroy()
self._window = None
def _set_menu(self, visible):
editor_menu = omni.kit.ui.get_editor_menu()
if editor_menu:
editor_menu.set_value(MFOVNdiExtension.MENU_PATH, visible)
async def _destroy_window_async(self):
await omni.kit.app.get_app().next_update_async()
if self._window:
self._destroy_window()
def _visibility_changed_fn(self, visible):
self._set_menu(visible)
if not visible:
asyncio.ensure_future(self._destroy_window_async())
def _show_window(self, _, value):
if value:
self._window = Window(width=800, height=275)
self._window.set_visibility_changed_fn(self._visibility_changed_fn)
elif self._window:
self._destroy_window()
| 1,541 | Python | 27.036363 | 86 | 0.595717 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/__init__.py | from .extension import *
| 25 | Python | 11.999994 | 24 | 0.76 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/model.py | from .bindings import Binding, BindingsModel
from .comboboxModel import ComboboxModel
from .NDItools import NDItools
from .USDtools import DynamicPrim, USDtools
import logging
import re
from typing import List
class Model():
def __init__(self):
self._bindings_model: BindingsModel = BindingsModel()
self._ndi: NDItools = NDItools()
def destroy(self):
self._ndi.destroy()
self._bindings_model.destroy()
# region bindings
def get_bindings_count(self) -> int:
return self._bindings_model.count()
def get_binding_data_from_index(self, index: int):
return self._bindings_model.get(index)
def get_ndi_source_list(self) -> List[str]:
return self._bindings_model.get_source_list()
def apply_new_binding_source(self, dynamic_id: str, new_source: str):
self._bindings_model.bind(dynamic_id, new_source)
def apply_lowbandwidth_value(self, dynamic_id: str, value: bool):
self._bindings_model.set_low_bandwidth(dynamic_id, value)
# endregion
# region dynamic
def create_dynamic_material(self, name: str):
safename = USDtools.make_name_valid(name)
if name != safename:
logger = logging.getLogger(__name__)
logger.warn(f"Name \"{name}\" was not a valid USD identifier, changed it to \"{safename}\"")
final_name = safename
index = 1
while (self._bindings_model.find_binding_from_id(final_name) is not None):
suffix = str(index) if index >= 10 else "0" + str(index) # name, name_01, name_02, ..., name_99, name_100
final_name = safename + "_" + suffix
index += 1
USDtools.create_dynamic_material(final_name)
self.search_for_dynamic_material()
def search_for_dynamic_material(self):
result: List[DynamicPrim] = USDtools.find_all_dynamic_sources()
self._bindings_model.update_dynamic_prims(result)
def _get_prims_with_id(self, dynamic_id: str) -> List[DynamicPrim]:
prims: List[DynamicPrim] = self._bindings_model.get_prim_list()
return [x for x in prims if x.dynamic_id == dynamic_id]
def set_ndi_source_prim_attr(self, dynamic_id: str, source: str):
for prim in self._get_prims_with_id(dynamic_id):
USDtools.set_prim_ndi_attribute(prim.path, source)
def set_lowbandwidth_prim_attr(self, dynamic_id: str, value: bool):
for prim in self._get_prims_with_id(dynamic_id):
USDtools.set_prim_lowbandwidth_attribute(prim.path, value)
# endregion
# region stream
def try_add_stream(self, binding: Binding, lowbandwidth: bool, update_fps_fn, update_dimensions_fn) -> bool:
if self._ndi.get_stream(binding.dynamic_id) is not None:
logger = logging.getLogger(__name__)
logger.warning(f"There's already a stream running for {binding.dynamic_id}")
return False
if binding.ndi_source == ComboboxModel.NONE_VALUE:
logger = logging.getLogger(__name__)
logger.warning("Won't create stream without NDI® source")
return False
if binding.ndi_source == ComboboxModel.PROXY_VALUE:
fps = float(re.search("\((.*)\)", binding.ndi_source).group(1).split("p")[1])
success: bool = self._ndi.try_add_stream_proxy(binding.dynamic_id, binding.ndi_source, fps, lowbandwidth)
return success
else:
success: bool = self._ndi.try_add_stream(binding.dynamic_id, binding.ndi_source, lowbandwidth,
update_fps_fn, update_dimensions_fn)
return success
def stop_stream(self, binding: Binding):
self._ndi.stop_stream(binding.dynamic_id)
def stop_all_streams(self):
self._ndi.stop_all_streams()
# endregion
| 3,820 | Python | 37.989796 | 118 | 0.638482 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/eventsystem.py | import carb.events
import omni.kit.app
class EventSystem():
BINDINGS_CHANGED_EVENT = carb.events.type_from_string("mf.ov.ndi.BINDINGS_CHANGED_EVENT")
COMBOBOX_CHANGED_EVENT = carb.events.type_from_string("mf.ov.ndi.COMBOBOX_CHANGED_EVENT")
NDIFINDER_NEW_SOURCES = carb.events.type_from_string("mf.ov.ndi.NDIFINDER_NEW_SOURCES")
COMBOBOX_SOURCE_CHANGE_EVENT = carb.events.type_from_string("mf.ov.ndi.COMBOBOX_SOURCE_CHANGE_EVENT")
NDI_STATUS_CHANGE_EVENT = carb.events.type_from_string("mf.ov.ndi.NDI_STATUS_CHANGE_EVENT")
STREAM_STOP_TIMEOUT_EVENT = carb.events.type_from_string("mf.ov.ndi.STREAM_STOP_TIMEOUT_EVENT")
def subscribe(event: int, cb: callable) -> carb.events.ISubscription:
bus = omni.kit.app.get_app().get_message_bus_event_stream()
return bus.create_subscription_to_push_by_type(event, cb)
def send_event(event: int, payload: dict = {}):
bus = omni.kit.app.get_app().get_message_bus_event_stream()
bus.push(event, payload=payload)
| 1,015 | Python | 49.799998 | 105 | 0.717241 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/USDtools.py | from .bindings import DynamicPrim
import logging
import numpy as np
import omni.ext
from pxr import Usd, UsdGeom, UsdShade, Sdf, UsdLux, Tf
from typing import List
from unidecode import unidecode
class USDtools():
ATTR_NDI_NAME = 'ndi:source'
ATTR_BANDWIDTH_NAME = "ndi:lowbandwidth"
PREFIX = "dynamic://"
SCOPE_NAME = "NDI_Looks"
def get_stage() -> Usd.Stage:
usd_context = omni.usd.get_context()
return usd_context.get_stage()
def make_name_valid(name: str) -> str:
return Tf.MakeValidIdentifier(unidecode(name))
def create_dynamic_material(safename: str):
stage = USDtools.get_stage()
if not stage:
logger = logging.getLogger(__name__)
logger.error("Could not get stage")
return
scope_path: str = f"{stage.GetDefaultPrim().GetPath()}/{USDtools.SCOPE_NAME}"
UsdGeom.Scope.Define(stage, scope_path)
USDtools._create_material_and_shader(stage, scope_path, safename)
USDtools._fill_dynamic_with_magenta(safename)
def _create_material_and_shader(stage: Usd.Stage, scope_path: str, safename: str):
material_path = f"{scope_path}/{safename}"
material: UsdShade.Material = UsdShade.Material.Define(stage, material_path)
shader: UsdShade.Shader = UsdShade.Shader.Define(stage, f"{material_path}/Shader")
shader.SetSourceAsset("OmniPBR.mdl", "mdl")
shader.SetSourceAssetSubIdentifier("OmniPBR", "mdl")
shader.CreateIdAttr("OmniPBR")
shader.CreateInput("diffuse_texture", Sdf.ValueTypeNames.Asset).Set(f"{USDtools.PREFIX}{safename}")
material.CreateSurfaceOutput().ConnectToSource(shader.ConnectableAPI(), "surface")
def _fill_dynamic_with_magenta(safename: str):
magenta = np.array([255, 0, 255, 255], np.uint8)
frame = np.full((1, 1, 4), magenta, dtype=np.uint8)
height, width, channels = frame.shape
dynamic_texture = omni.ui.DynamicTextureProvider(safename)
dynamic_texture.set_data_array(frame, [width, height, channels])
def find_all_dynamic_sources() -> List[DynamicPrim]:
stage = USDtools.get_stage()
if not stage:
logger = logging.getLogger(__name__)
logger.warning("Could not get stage")
return []
dynamic_sources: List[str] = []
dynamic_shaders, dynamic_sources = USDtools._find_all_dynamic_shaders(stage, dynamic_sources)
dynamic_lights, _ = USDtools._find_all_dynamic_lights(stage, dynamic_sources)
return dynamic_shaders + dynamic_lights
def _find_all_dynamic_shaders(stage: Usd.Stage, sources: List[str]):
shaders: List[UsdShade.Shader] = [UsdShade.Shader(x) for x in stage.Traverse() if x.IsA(UsdShade.Shader)]
result: List[DynamicPrim] = []
prefix_length: int = len(USDtools.PREFIX)
for shader in shaders:
albedo = shader.GetInput("diffuse_texture").Get()
# roughness = shader.GetInput("reflectionroughness_texture").Get()
# metallic = shader.GetInput("metallic_texture").Get()
# orm = shader.GetInput("ORM_texture").Get()
# ambient_occlusion = shader.GetInput("ao_texture").Get()
emissive = shader.GetInput("emissive_color_texture").Get()
# emissive_mask = shader.GetInput("emissive_mask_texture").Get()
# opacity = shader.GetInput("opacity_texture").Get()
# normal = shader.GetInput("normalmap_texture").Get()
# normal_detail = shader.GetInput("detail_normalmap_texture").Get()
values_set = set([albedo, emissive])
values_unique = list(values_set)
for texture_value in values_unique:
if texture_value:
path: str = texture_value.path
if len(path) > prefix_length:
candidate = path[:prefix_length]
if candidate == USDtools.PREFIX:
name = path[prefix_length:]
if name not in sources:
sources.append(name)
attr_ndi = shader.GetPrim().GetAttribute(USDtools.ATTR_NDI_NAME)
attr_ndi = attr_ndi.Get() if attr_ndi.IsValid() else None
attr_low = shader.GetPrim().GetAttribute(USDtools.ATTR_BANDWIDTH_NAME)
attr_low = attr_low.Get() if attr_low.IsValid() else False
p = DynamicPrim(shader.GetPath().pathString, name, attr_ndi, attr_low)
result.append(p)
return result, sources
def _find_all_dynamic_lights(stage: Usd.Stage, sources: List[str]):
rect_lights: List[UsdLux.Rectlight] = [UsdLux.RectLight(x) for x in stage.Traverse() if x.IsA(UsdLux.RectLight)]
result: List[DynamicPrim] = []
prefix_length: int = len(USDtools.PREFIX)
for rect_light in rect_lights:
# TODO: Filter those that have "isProjector" (the attribute doesn't exist)
attribute = rect_light.GetPrim().GetAttribute("texture:file").Get()
if attribute:
path: str = attribute.path
if len(path) > prefix_length:
candidate = path[:prefix_length]
if candidate == USDtools.PREFIX:
name = path[prefix_length:]
if name not in sources:
attr_ndi = rect_light.GetPrim().GetAttribute(USDtools.ATTR_NDI_NAME)
attr_ndi = attr_ndi.Get() if attr_ndi.IsValid() else None
attr_low = rect_light.GetPrim().GetAttribute(USDtools.ATTR_BANDWIDTH_NAME)
attr_low = attr_low.Get() if attr_low.IsValid() else False
p = DynamicPrim(rect_light.GetPath().pathString, name, attr_ndi, attr_low)
result.append(p)
return result, sources
def set_prim_ndi_attribute(path: str, value: str):
stage = USDtools.get_stage()
if not stage:
logger = logging.getLogger(__name__)
logger.error("Could not get stage")
return
prim: Usd.Prim = stage.GetPrimAtPath(path)
if not prim.IsValid():
logger = logging.getLogger(__name__)
logger.error(f"Could not set the ndi attribute of prim at {path}")
return
prim.CreateAttribute(USDtools.ATTR_NDI_NAME, Sdf.ValueTypeNames.String).Set(value)
def set_prim_lowbandwidth_attribute(path: str, value: bool):
stage = USDtools.get_stage()
if not stage:
logger = logging.getLogger(__name__)
logger.error("Could not get stage")
return
prim: Usd.Prim = stage.GetPrimAtPath(path)
if not prim.IsValid():
logger = logging.getLogger(__name__)
logger.error(f"Could not set the bandwidth attribute of prim at {path}")
prim.CreateAttribute(USDtools.ATTR_BANDWIDTH_NAME, Sdf.ValueTypeNames.Bool).Set(value)
# region stage events
def subscribe_to_stage_events(callback):
return (
omni.usd.get_context()
.get_stage_event_stream()
.create_subscription_to_pop(callback, name="mf.ov.ndi.STAGE_EVENT")
)
def is_StageEventType_OPENED(type) -> bool:
return type == int(omni.usd.StageEventType.OPENED)
def is_StageEventType_CLOSE(type) -> bool:
return type == int(omni.usd.StageEventType.CLOSING) or type == int(omni.usd.StageEventType.CLOSED)
# endregion
| 7,708 | Python | 44.081871 | 120 | 0.597172 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/comboboxModel.py | from .eventsystem import EventSystem
import omni.ui as ui
from typing import List
class ComboboxItem(ui.AbstractItem):
def __init__(self, value: str):
super().__init__()
self.model = ui.SimpleStringModel(value)
def value(self):
return self.model.get_value_as_string()
class ComboboxModel(ui.AbstractItemModel):
NONE_VALUE = "NONE"
PROXY_VALUE = "PROXY (1080p30) - RED"
def __init__(self, items: List[str], selected: str, name: str, index: int):
super().__init__()
self._name = name
self._index = index
# minimal model implementation
self._current_index = ui.SimpleIntModel()
self._current_index.add_value_changed_fn(lambda a: self._current_index_changed_fn())
self.set_items_and_current(items, selected)
def _current_index_changed_fn(self):
self._item_changed(None)
EventSystem.send_event(EventSystem.COMBOBOX_CHANGED_EVENT,
payload={"id": self._name, "index": self._index, "value": self._current_value()})
def set_items_and_current(self, items: List[str], current: str):
self._items = [ComboboxItem(text) for text in items]
self._set_current_from_value(current)
def _set_current_from_value(self, current: str):
index = next((i for i, item in enumerate(self._items) if item.value() == current), 0)
self._current_index.set_value(index)
self._item_changed(None)
def _current_value(self) -> str:
current_item = self._items[self._current_index.get_value_as_int()]
return current_item.value()
# minimal model implementation
def get_item_children(self, item):
return self._items
# minimal model implementation
def get_item_value_model(self, item, _):
if item is None:
return self._current_index
return item.model
| 1,893 | Python | 31.655172 | 112 | 0.628632 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/window.py | from .bindings import Binding
from .comboboxModel import ComboboxModel
from .eventsystem import EventSystem
from .model import Model
from .USDtools import USDtools
import asyncio
import carb.events
import omni.ui as ui
import omni.kit.app
import pyperclip
from typing import List
class Window(ui.Window):
WINDOW_NAME = "NDI®"
DEFAULT_TEXTURE_NAME = "myDynamicMaterial"
NEW_TEXTURE_BTN_TXT = "Create Dynamic Texture"
DISCOVER_TEX_BTN_TXT = "Discover Dynamic Textures"
STOP_STREAMS_BTN_TXT = "Stop all streams"
EMPTY_TEXTURE_LIST_TXT = "No dynamic texture found"
def __init__(self, delegate=None, **kwargs):
self._model: Model = Model()
self._bindingPanels: List[BindingPanel] = []
self._last_material_name = Window.DEFAULT_TEXTURE_NAME
super().__init__(Window.WINDOW_NAME, **kwargs)
self.frame.set_build_fn(self._build_fn)
self._subscribe()
self._model.search_for_dynamic_material()
def destroy(self):
for panel in self._bindingPanels:
panel.destroy()
self._model.destroy()
self._unsubscribe()
super().destroy()
def _subscribe(self):
self._sub: List[carb.events.ISubscription] = []
self._sub.append(EventSystem.subscribe(EventSystem.BINDINGS_CHANGED_EVENT, self._bindings_updated_evt_callback))
self._sub.append(EventSystem.subscribe(EventSystem.COMBOBOX_CHANGED_EVENT, self._combobox_changed_evt_callback))
self._sub.append(EventSystem.subscribe(EventSystem.COMBOBOX_SOURCE_CHANGE_EVENT,
self._ndi_sources_changed_evt_callback))
self._sub.append(EventSystem.subscribe(EventSystem.NDI_STATUS_CHANGE_EVENT,
self._ndi_status_change_evt_callback))
self._sub.append(EventSystem.subscribe(EventSystem.STREAM_STOP_TIMEOUT_EVENT,
self._stream_stop_timeout_evt_callback))
self._sub.append(USDtools.subscribe_to_stage_events(self._stage_event_evt_callback))
def _unsubscribe(self):
for sub in self._sub:
sub.unsubscribe()
sub = None
self._sub.clear()
def _build_fn(self):
with ui.VStack(style={"margin": 3}):
self._ui_section_header()
self._ui_section_bindings()
# region events callback
def _bindings_updated_evt_callback(self, e: carb.events.IEvent):
self.frame.rebuild()
def _combobox_changed_evt_callback(self, e: carb.events.IEvent):
value: str = e.payload["value"]
dynamic_id = e.payload["id"]
panel_index = e.payload["index"]
self._model.apply_new_binding_source(dynamic_id, value)
self._model.set_ndi_source_prim_attr(dynamic_id, value)
if (len(self._bindingPanels) > panel_index):
self._bindingPanels[panel_index].combobox_item_changed()
def _ndi_sources_changed_evt_callback(self, e: carb.events.IEvent):
for panel in self._bindingPanels:
panel.combobox_items_changed(e.payload["sources"])
def _ndi_status_change_evt_callback(self, e: carb.events.IEvent):
for panel in self._bindingPanels:
panel.check_for_ndi_status()
def _stream_stop_timeout_evt_callback(self, e: carb.events.IEvent):
panel: BindingPanel = next(x for x in self._bindingPanels if x.get_dynamic_id() == e.payload["dynamic_id"])
panel.on_stop_stream()
def _stage_event_evt_callback(self, e: carb.events.IEvent):
if USDtools.is_StageEventType_OPENED(e.type):
self._model.search_for_dynamic_material()
self._model.stop_all_streams()
if USDtools.is_StageEventType_CLOSE(e.type):
self._model.stop_all_streams()
# endregion
# region UI
def _ui_section_header(self):
button_style = {"Button": {"stack_direction": ui.Direction.LEFT_TO_RIGHT}}
with ui.HStack(height=0):
self._dynamic_name = ui.StringField()
self._dynamic_name.model.set_value(self._last_material_name)
ui.Button(Window.NEW_TEXTURE_BTN_TXT, image_url="resources/glyphs/menu_plus.svg", image_width=24,
style=button_style, clicked_fn=self._on_click_create_dynamic_material)
with ui.HStack(height=0):
ui.Button(Window.DISCOVER_TEX_BTN_TXT, image_url="resources/glyphs/menu_refresh.svg", image_width=24,
style=button_style, clicked_fn=self._on_click_refresh_materials)
ui.Button(Window.STOP_STREAMS_BTN_TXT, clicked_fn=self._on_click_stop_all_streams)
def _ui_section_bindings(self):
self._bindingPanels = []
with ui.ScrollingFrame():
with ui.VStack():
count: int = self._model.get_bindings_count()
if count == 0:
ui.Label(Window.EMPTY_TEXTURE_LIST_TXT)
else:
for i in range(count):
self._bindingPanels.append(BindingPanel(i, self, height=0))
# endregion
# region controls
def _on_click_create_dynamic_material(self):
self._stop_all_streams()
name: str = self._dynamic_name.model.get_value_as_string()
self._last_material_name = name
self._model.create_dynamic_material(name)
def _on_click_refresh_materials(self):
self._stop_all_streams()
self._model.search_for_dynamic_material()
def _on_click_stop_all_streams(self):
self._stop_all_streams()
def _stop_all_streams(self):
self._model.stop_all_streams()
for panel in self._bindingPanels:
panel.on_stop_stream()
# endregion
# region BindingPanel Callable
def get_binding_data_from_index(self, index: int):
return self._model.get_binding_data_from_index(index)
def get_choices_for_combobox(self) -> List[str]:
return self._model.get_ndi_source_list()
def apply_lowbandwidth_value(self, dynamic_id: str, value: bool):
self._model.apply_lowbandwidth_value(dynamic_id, value)
self._model.set_lowbandwidth_prim_attr(dynamic_id, value)
def try_add_stream(self, binding: Binding, lowbandwidth: bool, update_fps_fn, update_dimensions_fn) -> bool:
return self._model.try_add_stream(binding, lowbandwidth, update_fps_fn, update_dimensions_fn)
def stop_stream(self, binding: Binding):
return self._model.stop_stream(binding)
# endregion
class BindingPanel(ui.CollapsableFrame):
NDI_COLOR_STOPPED = "#E6E7E8"
NDI_COLOR_PLAYING = "#78B159"
NDI_COLOR_WARNING = "#F4900C"
NDI_COLOR_INACTIVE = "#DD2E45"
NDI_STATUS = "resources/glyphs/circle.svg"
PLAY_ICON = "resources/glyphs/timeline_play.svg"
PAUSE_ICON = "resources/glyphs/toolbar_pause.svg"
COPY_ICON = "resources/glyphs/copy.svg"
LOW_BANDWIDTH_ICON = "resources/glyphs/AOV_dark.svg"
PLAYPAUSE_BTN_NAME = "play_pause_btn"
BANDWIDTH_BTN_NAME = "low_bandwidth_btn"
COPYPATH_BTN_NAME = "copy_path_btn"
RUNNING_LABEL_SUFFIX = " - running"
def __init__(self, index: int, window: Window, **kwargs):
self._index = index
self._window = window
binding, _, ndi = self._get_data()
choices = self._get_choices()
self._dynamic_id = binding.dynamic_id
self._lowbandwidth_value = binding.lowbandwidth
self._is_playing = False
super().__init__(binding.dynamic_id, **kwargs)
self._info_window = None
with self:
with ui.HStack():
self._status_icon = ui.Image(BindingPanel.NDI_STATUS, width=20,
mouse_released_fn=self._show_info_window)
self._set_ndi_status_icon(ndi.active)
self._combobox_alt = ui.Label("")
self._set_combobox_alt_text(binding.ndi_source)
self._combobox_alt.visible = False
self._combobox = ComboboxModel(choices, binding.ndi_source, binding.dynamic_id, self._index)
self._combobox_ui = ui.ComboBox(self._combobox)
self.play_pause_toolbutton = ui.Button(text="", image_url=BindingPanel.PLAY_ICON, height=30,
width=30, clicked_fn=self._on_click_play_pause_ndi,
name=BindingPanel.PLAYPAUSE_BTN_NAME)
self._lowbandwidth_toolbutton = ui.ToolButton(image_url=BindingPanel.LOW_BANDWIDTH_ICON, width=30,
height=30, tooltip="Low bandwidth mode",
clicked_fn=self._set_low_bandwidth_value,
name=BindingPanel.BANDWIDTH_BTN_NAME)
self._lowbandwidth_toolbutton.model.set_value(self._lowbandwidth_value)
ui.Button("", image_url=BindingPanel.COPY_ICON, width=30, height=30, clicked_fn=self._on_click_copy,
tooltip="Copy dynamic texture path(dynamic://*)", name=BindingPanel.COPYPATH_BTN_NAME)
def destroy(self):
self._info_window_destroy()
# region Info Window
def _show_info_window(self, _x, _y, button, _modifier):
if (button == 0): # left click
binding, _, _ = self._get_data()
if not self._info_window:
self._info_window = StreamInfoWindow(f"{self._dynamic_id} info", binding.ndi_source,
width=280, height=200)
self._info_window.set_visibility_changed_fn(self._info_window_visibility_changed)
elif self._info_window:
self._info_window_destroy()
def _info_window_visibility_changed(self, visible):
if not visible:
asyncio.ensure_future(self._info_window_destroy_async())
def _info_window_destroy(self):
if self._info_window:
self._info_window.destroy()
self._info_window = None
async def _info_window_destroy_async(self):
await omni.kit.app.get_app().next_update_async()
if self._info_window:
self._info_window_destroy()
def update_fps(self, fps_current: float, fps_average: float, fps_expected: float):
if self._info_window:
self._info_window.set_fps_values(fps_current, fps_average, fps_expected)
def update_details(self, width: int, height: int, color_format: str):
if self._info_window:
self._info_window.set_stream_details(width, height, color_format)
# endregion
def combobox_items_changed(self, items: List[str]):
binding, _, _ = self._get_data()
self._combobox.set_items_and_current(items, binding.ndi_source)
def check_for_ndi_status(self):
_, _, ndi = self._get_data()
self._set_ndi_status_icon(ndi.active)
def combobox_item_changed(self):
binding, _, ndi = self._get_data()
self._set_combobox_alt_text(binding.ndi_source)
self._set_ndi_status_icon(ndi.active)
if self._info_window:
self._info_window.set_stream_name(binding.ndi_source)
def get_dynamic_id(self) -> str:
return self._dynamic_id
def _get_data(self):
return self._window.get_binding_data_from_index(self._index)
def _get_choices(self):
return self._window.get_choices_for_combobox()
def _on_click_copy(self):
pyperclip.copy(f"{USDtools.PREFIX}{self._dynamic_id}")
def _set_low_bandwidth_value(self):
self._lowbandwidth_value = not self._lowbandwidth_value
self._window.apply_lowbandwidth_value(self._dynamic_id, self._lowbandwidth_value)
def _on_play_stream(self):
self._is_playing = True
self.play_pause_toolbutton.image_url = BindingPanel.PAUSE_ICON
self._lowbandwidth_toolbutton.enabled = False
self._combobox_ui.visible = False
self._combobox_alt.visible = True
self.check_for_ndi_status()
def on_stop_stream(self):
self._is_playing = False
self.play_pause_toolbutton.image_url = BindingPanel.PLAY_ICON
self._lowbandwidth_toolbutton.enabled = True
self._combobox_ui.visible = True
self._combobox_alt.visible = False
self.check_for_ndi_status()
def _on_click_play_pause_ndi(self):
binding, _, _ = self._get_data()
if self._is_playing:
self._window.stop_stream(binding)
self.on_stop_stream()
else:
if self._window.try_add_stream(binding, self._lowbandwidth_value, self.update_fps, self.update_details):
self._on_play_stream()
def _set_combobox_alt_text(self, text: str):
self._combobox_alt.text = f"{text}{BindingPanel.RUNNING_LABEL_SUFFIX}"
def _set_ndi_status_icon(self, active: bool):
if active and self._is_playing:
self._status_icon.style = {"color": ui.color(BindingPanel.NDI_COLOR_PLAYING)}
elif active and not self._is_playing:
self._status_icon.style = {"color": ui.color(BindingPanel.NDI_COLOR_STOPPED)}
elif not active and self._is_playing:
self._status_icon.style = {"color": ui.color(BindingPanel.NDI_COLOR_WARNING)}
else: # not active and not self._is_playing
self._status_icon.style = {"color": ui.color(BindingPanel.NDI_COLOR_INACTIVE)}
class StreamInfoWindow(ui.Window):
def __init__(self, dynamic_id: str, ndi_id: str, delegate=None, **kwargs):
super().__init__(dynamic_id, **kwargs)
self.frame.set_build_fn(self._build_fn)
self._stream_name = ndi_id
def destroy(self):
super().destroy()
def _build_fn(self):
with ui.VStack(height=0):
with ui.HStack():
ui.Label("Stream name:")
self._stream_name_model = ui.StringField(enabled=False).model
self._stream_name_model.set_value(self._stream_name)
with ui.HStack():
ui.Label("Current fps:")
self._fps_current_model = ui.FloatField(enabled=False).model
self._fps_current_model.set_value(0.0)
with ui.HStack():
ui.Label("Average fps:")
self._fps_average_model = ui.FloatField(enabled=False).model
self._fps_average_model.set_value(0.0)
with ui.HStack():
ui.Label("Expected fps:")
self._fps_expected_model = ui.FloatField(enabled=False).model
self._fps_expected_model.set_value(0.0)
with ui.HStack():
ui.Label("Width:")
self._dimensions_width_model = ui.IntField(enabled=False).model
self._dimensions_width_model.set_value(0)
with ui.HStack():
ui.Label("Height:")
self._dimensions_height_model = ui.IntField(enabled=False).model
self._dimensions_height_model.set_value(0)
with ui.HStack():
ui.Label("Color format:")
self._color_format_model = ui.StringField(enabled=False).model
self._color_format_model.set_value("")
def set_fps_values(self, fps_current: float, fps_average: float, fps_expected: float):
# If this property exists, all the other do as well since its the last one to be initialized
if hasattr(self, "_fps_expected_model"):
self._fps_current_model.set_value(fps_current)
self._fps_average_model.set_value(fps_average)
self._fps_expected_model.set_value(fps_expected)
def set_stream_name(self, name: str):
# No need to check if attribute exists because no possibility of concurrency between build fn and caller
self._stream_name_model.set_value(name)
# Reset other values
self._fps_current_model.set_value(0.0)
self._fps_average_model.set_value(0.0)
self._fps_expected_model.set_value(0.0)
self._dimensions_width_model.set_value(0)
self._dimensions_height_model.set_value(0)
self._color_format_model.set_value("")
def set_stream_details(self, width: int, height: int, color_format: str):
if hasattr(self, "_color_format_model"):
self._dimensions_width_model.set_value(width)
self._dimensions_height_model.set_value(height)
# Original format is similar to FourCCVideoType.FOURCC_VIDEO_TYPE_RGBA, we want to display only "RGBA"
color_format_simple = color_format.split("_")[-1]
self._color_format_model.set_value(color_format_simple)
| 16,724 | Python | 41.128463 | 120 | 0.61032 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/bindings.py | from .comboboxModel import ComboboxModel
from .eventsystem import EventSystem
import carb.events
from dataclasses import dataclass
from typing import List
@dataclass
class DynamicPrim:
path: str
dynamic_id: str
ndi_source_attr: str
lowbandwidth_attr: bool
@dataclass
class Binding():
dynamic_id: str
ndi_source: str
lowbandwidth: bool
@dataclass
class NDIData():
source: str
active: bool
class BindingsModel():
NONE_DATA = NDIData(ComboboxModel.NONE_VALUE, False)
def __init__(self):
self._bindings: List[Binding] = []
self._dynamic_prims: List[DynamicPrim] = []
self._ndi_sources: List[NDIData] = []
self._ndi_sources.append(BindingsModel.NONE_DATA)
self._sub = EventSystem.subscribe(EventSystem.NDIFINDER_NEW_SOURCES, self._ndi_sources_change_evt_callback)
def destroy(self):
self._sub.unsubscribe()
self._sub = None
self._dynamic_prims = []
self._bindings = []
self._ndi_sources = []
def count(self):
return len(self._bindings)
def get(self, index: int) -> Binding:
binding: Binding = self._bindings[index]
prim: DynamicPrim = self.find_binding_from_id(binding.dynamic_id)
ndi: NDIData = self._find_ndi_from_source(binding.ndi_source)
return binding, prim, ndi
def get_source_list(self) -> List[str]:
return [x.source for x in self._ndi_sources]
def _get_non_static_source_list(self) -> List[NDIData]:
return self._ndi_sources[1:] # Excludes NONE_DATA
def get_prim_list(self) -> List[str]:
return [x for x in self._dynamic_prims]
def bind(self, dynamic_id, new_source):
binding: Binding = self.find_binding_from_id(dynamic_id)
binding.ndi_source = new_source
def set_low_bandwidth(self, dynamic_id: str, value: bool):
binding: Binding = self.find_binding_from_id(dynamic_id)
binding.lowbandwidth = value
def find_binding_from_id(self, dynamic_id: str) -> Binding:
return next((x for x in self._bindings if x.dynamic_id == dynamic_id), None)
def _find_binding_from_ndi(self, ndi_source: str) -> Binding:
return next((x for x in self._bindings if x.source == ndi_source), None)
def _find_ndi_from_source(self, ndi_source: str) -> NDIData:
if ndi_source is None:
return self._ndi_sources[0]
return next((x for x in self._ndi_sources if x.source == ndi_source), None)
def update_dynamic_prims(self, prims: List[DynamicPrim]):
self._dynamic_prims = prims
self._update_ndi_from_prims()
self._update_bindings_from_prims()
EventSystem.send_event(EventSystem.BINDINGS_CHANGED_EVENT)
def _update_ndi_from_prims(self):
for dynamic_prim in self._dynamic_prims:
ndi: NDIData = self._find_ndi_from_source(dynamic_prim.ndi_source_attr)
if ndi is None:
self._ndi_sources.append(NDIData(dynamic_prim.ndi_source_attr, False))
def _update_bindings_from_prims(self):
self._bindings.clear()
for dynamic_prim in self._dynamic_prims:
source_attr = dynamic_prim.ndi_source_attr
source: str = source_attr if source_attr is not None else BindingsModel.NONE_DATA.source
self._bindings.append(Binding(dynamic_prim.dynamic_id, source, dynamic_prim.lowbandwidth_attr))
def _ndi_sources_change_evt_callback(self, e: carb.events.IEvent):
sources = e.payload["sources"]
self._update_ndi_new_and_active_sources(sources)
self._update_ndi_inactive_sources(sources)
EventSystem.send_event(EventSystem.COMBOBOX_SOURCE_CHANGE_EVENT,
payload={"sources": [x.source for x in self._ndi_sources]})
EventSystem.send_event(EventSystem.NDI_STATUS_CHANGE_EVENT)
def _update_ndi_new_and_active_sources(self, sources: List[str]):
for source in sources:
data: NDIData = self._find_ndi_from_source(source)
if data is None:
data = NDIData(source, True)
self._ndi_sources.append(data)
else:
data.active = True
def _update_ndi_inactive_sources(self, sources: List[str]):
for ndi in self._get_non_static_source_list():
is_active = next((x for x in sources if x == ndi.source), None)
if is_active is None:
ndi.active = False
| 4,477 | Python | 33.984375 | 115 | 0.635694 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/deps/NDIlib/__init__.py | import os
import sys
if os.name == 'nt' and sys.version_info.major >= 3 and sys.version_info.minor >= 8:
os.add_dll_directory(os.path.dirname(__file__))
from .NDIlib import *
| 181 | Python | 21.749997 | 83 | 0.679558 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/tests/__init__.py | from .test_USDtools import *
from .test_ui import *
| 52 | Python | 16.666661 | 28 | 0.730769 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/tests/test_ui.py | import omni.kit.test
from ..window import Window, BindingPanel
from ..comboboxModel import ComboboxModel
from .test_utils import (make_stage, close_stage, get_window, DYNAMIC_ID1, DYNAMIC_ID2, create_dynamic_material,
create_dynamic_rectlight, refresh_dynamic_list, get_dynamic_material_prim, add_proxy_source)
class UITestsHeader(omni.kit.test.AsyncTestCase):
def setUp(self):
self._stage = make_stage()
self._window = get_window()
def tearDown(self):
close_stage()
async def test_create_material_button(self):
field = self._window.find("**/StringField[*]")
field.widget.model.set_value(DYNAMIC_ID1)
self.assertEqual(field.widget.model.get_value_as_string(), DYNAMIC_ID1)
button = self._window.find(f"**/Button[*].text=='{Window.NEW_TEXTURE_BTN_TXT}'")
await button.click()
prim = get_dynamic_material_prim(DYNAMIC_ID1)
self.assertTrue(prim.IsValid)
async def test_texture_discovery(self):
create_dynamic_material()
create_dynamic_rectlight()
await refresh_dynamic_list(self._window)
panels = self._window.find_all("**/BindingPanel[*]")
self.assertEqual(len(panels), 2)
panel1_found = False
panel2_found = False
for panel in panels:
labels = panel.find_all("**/Label[*]")
for label in labels:
if label.widget.text == DYNAMIC_ID1:
panel1_found = True
elif label.widget.text == DYNAMIC_ID2:
panel2_found = True
self.assertTrue(panel1_found)
self.assertTrue(panel2_found)
class UITestsPanel(omni.kit.test.AsyncTestCase):
def setUp(self):
self._stage = make_stage()
self._window = get_window()
def tearDown(self):
close_stage()
async def test_no_panel_on_start(self):
await refresh_dynamic_list(self._window)
panel = self._window.find("**/BindingPanel[*]")
self.assertIsNone(panel)
label = self._window.find("**/Label[*]")
self.assertEqual(label.widget.text, Window.EMPTY_TEXTURE_LIST_TXT)
async def test_combobox_defaults(self):
await refresh_dynamic_list(self._window)
add_proxy_source(self._window.widget)
button = self._window.find(f"**/Button[*].text=='{Window.NEW_TEXTURE_BTN_TXT}'")
await button.click()
combobox = self._window.find("**/ComboBox[*]")
model = combobox.widget.model
self.assertEqual(model._current_value(), ComboboxModel.NONE_VALUE)
model._current_index.set_value(1)
self.assertNotEquals(model._current_value(), ComboboxModel.NONE_VALUE)
model._current_index.set_value(0)
self.assertEqual(model._current_value(), ComboboxModel.NONE_VALUE)
async def test_low_bandwidth_btn(self):
await refresh_dynamic_list(self._window)
button = self._window.find(f"**/Button[*].text=='{Window.NEW_TEXTURE_BTN_TXT}'")
await button.click()
panel = self._window.find("**/BindingPanel[*]")
binding, _, _ = panel.widget._get_data()
self.assertFalse(binding.lowbandwidth)
button = panel.find(f"**/ToolButton[*].name=='{BindingPanel.BANDWIDTH_BTN_NAME}'")
await button.click()
binding, _, _ = panel.widget._get_data()
self.assertTrue(binding.lowbandwidth)
await button.click()
binding, _, _ = panel.widget._get_data()
self.assertFalse(binding.lowbandwidth)
async def test_low_bandwidth_stream(self):
await refresh_dynamic_list(self._window)
add_proxy_source(self._window.widget)
button = self._window.find(f"**/Button[*].text=='{Window.NEW_TEXTURE_BTN_TXT}'")
await button.click()
combobox = self._window.find("**/ComboBox[*]")
combobox.widget.model._set_current_from_value(ComboboxModel.PROXY_VALUE)
panel = self._window.find("**/BindingPanel[*]")
button_bandwidth = panel.find(f"**/ToolButton[*].name=='{BindingPanel.BANDWIDTH_BTN_NAME}'")
button_playpause = button = panel.find(f"**/Button[*].name=='{BindingPanel.PLAYPAUSE_BTN_NAME}'")
self.assertTrue(panel.widget._lowbandwidth_toolbutton.enabled)
await button_playpause.click()
self.assertFalse(self._window.widget._model._ndi._streams[0]._lowbandwidth)
self.assertFalse(panel.widget._lowbandwidth_toolbutton.enabled)
await button_playpause.click()
self.assertTrue(panel.widget._lowbandwidth_toolbutton.enabled)
await button_bandwidth.click()
await button_playpause.click()
self.assertTrue(self._window.widget._model._ndi._streams[0]._lowbandwidth)
await button_playpause.click()
async def test_proxy_play_pause(self):
await refresh_dynamic_list(self._window)
add_proxy_source(self._window.widget)
button_create = self._window.find(f"**/Button[*].text=='{Window.NEW_TEXTURE_BTN_TXT}'")
await button_create.click()
combobox = self._window.find("**/ComboBox[*]")
combobox.widget.model._set_current_from_value(ComboboxModel.PROXY_VALUE)
panel = self._window.find("**/BindingPanel[*]")
button_playpause = panel.find(f"**/Button[*].name=='{BindingPanel.PLAYPAUSE_BTN_NAME}'")
self.assertTrue(panel.widget._combobox_ui.visible)
self.assertFalse(panel.widget._combobox_alt.visible)
await button_playpause.click()
self.assertGreater(len(self._window.widget._model._ndi._streams), 0)
self.assertFalse(panel.widget._combobox_ui.visible)
self.assertTrue(panel.widget._combobox_alt.visible)
await button_playpause.click()
self.assertEquals(len(self._window.widget._model._ndi._streams), 0)
async def test_proxy_multiple(self):
await refresh_dynamic_list(self._window)
field = self._window.find("**/StringField[*]")
button_create = self._window.find(f"**/Button[*].text=='{Window.NEW_TEXTURE_BTN_TXT}'")
field.widget.model.set_value(DYNAMIC_ID1)
await button_create.click()
field.widget.model.set_value(DYNAMIC_ID2)
await button_create.click()
comboboxes = self._window.find_all("**/ComboBox[*]")
for combobox in comboboxes:
combobox.widget.model._set_current_from_value(ComboboxModel.PROXY_VALUE)
buttons_playpause = self._window.find_all(f"**/Button[*].name=='{BindingPanel.PLAYPAUSE_BTN_NAME}'")
for button_playpause in buttons_playpause:
await button_playpause.click()
self.assertEquals(len(self._window.widget._model._ndi._streams), 2)
button_stopall = self._window.find(f"**/Button[*].text=='{Window.STOP_STREAMS_BTN_TXT}'")
await button_stopall.click()
self.assertEquals(len(self._window.widget._model._ndi._streams), 0)
panels = self._window.find_all("**/BindingPanel[*]")
for panel in panels:
self.assertTrue(panel.widget._combobox_ui.visible)
self.assertFalse(panel.widget._combobox_alt.visible)
| 7,140 | Python | 38.672222 | 117 | 0.640196 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/tests/test_utils.py | import omni
import omni.kit.ui_test as ui_test
from pxr import Usd, UsdLux, UsdShade
from ..USDtools import USDtools
from ..window import Window
from ..eventsystem import EventSystem
from ..comboboxModel import ComboboxModel
SOURCE1 = "MY-PC (Test Pattern)"
SOURCE2 = "MY-PC (Test Pattern 2)"
DYNAMIC_ID1 = "myDynamicMaterial1"
DYNAMIC_ID2 = "myDynamicMaterial2"
DUMMY_PATH = "/path/to/dummy"
RECTLIGHT_NAME = "MyRectLight"
DEFAULT_PRIM_NAME = "World"
def make_stage() -> Usd.Stage:
usd_context = omni.usd.get_context()
usd_context.new_stage()
# self._stage = Usd.Stage.CreateInMemory()
stage = usd_context.get_stage()
prim = stage.DefinePrim(f"/{DEFAULT_PRIM_NAME}")
stage.SetDefaultPrim(prim)
return stage
def get_stage() -> Usd.Stage:
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
return stage
def close_stage():
usd_context = omni.usd.get_context()
assert usd_context.can_close_stage()
usd_context.close_stage()
def get_window():
return ui_test.find(Window.WINDOW_NAME)
def create_dynamic_material() -> UsdShade.Material:
USDtools.create_dynamic_material(DYNAMIC_ID1)
return get_dynamic_material_prim(DYNAMIC_ID1)
def create_dynamic_rectlight():
stage = get_stage()
path: str = f"{stage.GetDefaultPrim().GetPath()}/{RECTLIGHT_NAME}"
light = UsdLux.RectLight.Define(stage, path)
light.GetPrim().GetAttribute("texture:file").Set(f"{USDtools.PREFIX}{DYNAMIC_ID2}")
def get_dynamic_material_prim(name: str):
usd_context = omni.usd.get_context()
stage = usd_context.get_stage()
return stage.GetPrimAtPath(f"{stage.GetDefaultPrim().GetPath()}/{USDtools.SCOPE_NAME}/{name}")
async def refresh_dynamic_list(window):
button = window.find(f"**/Button[*].text=='{Window.DISCOVER_TEX_BTN_TXT}'")
await button.click()
def add_proxy_source(window):
EventSystem.send_event(EventSystem.NDIFINDER_NEW_SOURCES, payload={"sources": [ComboboxModel.PROXY_VALUE]})
| 1,995 | Python | 27.112676 | 111 | 0.707268 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/mf/ov/ndi/tests/test_USDtools.py | from ..USDtools import USDtools
from .test_utils import make_stage, close_stage, create_dynamic_material, create_dynamic_rectlight, SOURCE1
import omni.kit.test
class USDValidNameUnitTest(omni.kit.test.AsyncTestCase):
async def test_name_valid(self):
self.check_name_valid("myDynamicMaterial", "myDynamicMaterial")
self.check_name_valid("789testing123numbers456", "_89testing123numbers456")
self.check_name_valid("", "_")
self.check_name_valid("àâáäãåÀÂÁÃÅÄ", "aaaaaaAAAAAA")
self.check_name_valid("èêéëÈÊÉË", "eeeeEEEE")
self.check_name_valid("ìîíïÌÎÍÏ", "iiiiIIII")
self.check_name_valid("òôóöõøÒÔÓÕÖØ", "ooooooOOOOOO")
self.check_name_valid("ùûúüÙÛÚÜ", "uuuuUUUU")
self.check_name_valid("æœÆŒçǰðÐñÑýÝþÞÿß", "aeoeAEOEcCdegdDnNyYthThyss")
self.check_name_valid("!¡¿@#$%?&*()-_=+/`^~.,'\\<>`;:¤{}[]|\"¦¨«»¬¯±´·¸÷",
"___________________________________________________")
self.check_name_valid("¢£¥§©ªº®¹²³µ¶¼½¾×", "C_PSY_SS_c_ao_r_123uP_1_4_1_2_3_4x")
def check_name_valid(self, source, expected):
v: str = USDtools.make_name_valid(source)
self.assertEqual(v, expected, f"Expected \"{v}\", derived from \"{source}\", to equals \"{expected}\"")
class USDToolsUnitTest(omni.kit.test.AsyncTestCase):
def setUp(self):
self._stage = make_stage()
def tearDown(self):
close_stage()
async def test_create_dynamic_material(self):
material = create_dynamic_material()
prim = self._stage.GetPrimAtPath(material.GetPath())
self.assertIsNotNone(prim)
async def test_find_dynamic_sources(self):
create_dynamic_material()
create_dynamic_rectlight()
sources = USDtools.find_all_dynamic_sources()
self.assertEqual(len(sources), 2)
async def test_set_property_ndi(self):
material = create_dynamic_material()
path = material.GetPath()
USDtools.set_prim_ndi_attribute(path, SOURCE1)
attr = material.GetPrim().GetAttribute(USDtools.ATTR_NDI_NAME)
self.assertEqual(attr.Get(), SOURCE1)
async def test_set_property_bandwidth(self):
material = create_dynamic_material()
path = material.GetPath()
USDtools.set_prim_lowbandwidth_attribute(path, True)
attr = material.GetPrim().GetAttribute(USDtools.ATTR_BANDWIDTH_NAME)
self.assertTrue(attr.Get())
USDtools.set_prim_lowbandwidth_attribute(path, False)
self.assertFalse(attr.Get())
| 2,554 | Python | 38.921874 | 111 | 0.634299 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/config/extension.toml | [package]
version = "1.0.1"
title = "MF NDI® extension"
description = "An extension to enable NDI® live video input in Omniverse."
authors = ["Moment Factory","Frederic Lestage"]
readme = "docs/README.md"
changelog = "docs/CHANGELOG.md"
repository = "https://github.com/MomentFactory/Omniverse-NDI-extension"
category = "Services"
keywords = ["NDI®", "texture", "live-feed", "video", "broadcast", "audiovisual", "realtime","streaming","voip"]
preview_image = "data/preview.png"
icon = "data/mf-ov-extensions-icons.png"
[dependencies]
"omni.kit.uiapp" = {}
"omni.warp" = {}
[[python.module]]
name = "mf.ov.ndi"
[python.pipapi]
requirements = [
"unidecode"
]
use_online_index = true
[[test]]
args = [
"--/app/window/dpiScaleOverride=1.0",
"--/app/window/scaleToMonitor=false"
]
dependencies = [
"omni.kit.ui_test",
"omni.usd"
]
timeout = 60
[package.target]
kit = ["105.1"]
[package.writeTarget]
kit = true
| 936 | TOML | 18.93617 | 111 | 0.67094 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/docs/CHANGELOG.md | # Changelog
## [1.0.1] - 2023-12-21
### Fixed
- Stream properly shut down when closing, opening, or reopening stage
## [1.0.0] - 2023-07-18
### Added
- Compatibility with USD Composer 2023.1.1
### Changed
- Creating a texture with the same name doesn't block (will follow pattern: "name", "name_01", "name_02", ...)
- Bundle pip dependency `ndi-python` with the extension
- Search now look for dynamic texture in the emissive texture field
## [0.12.0] - 2023-05-18
### Added
- Display stream statistics when running (fps, dimensions, color format)
- Opens as a new window when left-clicking on the status dot of a particular stream
### Changed
- Improved performances when using GPU for texture copy when stream source is square
### Fixed
- Streams stop when refreshing or adding a new dynamic texture to prevent ghost streams
- Removed the possibility of overwriting a stream when creating a new one with the same name
- Removed the double color conversion by requesting RGBA color profile from NDI®
## Removed
- Proxy stream no longer available in the list of streams
## [0.11.0] - 2023-04-20
### Changed
- NDI® status now displayed as a dot with colors
- red: NDI® source offline
- white: NDI® source online
- green: Stream playing
- orange: NDI® drops
- Code refactor
## [0.10.0] - 2023-04-12
### Added
- Unit and UI tests
## [0.9.0] - 2023-04-04
### Changed
- UI rework: less text, more icons
- Documentation and icons Overhaul
- Material are now created in a scope under the default prim instead of an Xform
- Updated example.usd to reflect this change
- Material identifier now keep the letter if there's an accent (i.e. é becomes e)
## [0.8.1] - 2023-03-29
### Changed
- Dedicated NDI® threads no longer daemonic
### Fixed
- Stream UI did not go back to its non running state when using the button "kill all streams"
- Order of calls on application shutdown that could cause a crash on hot reload (affected developers)
- Status of NDI® source not properly reflected in the icons
## [0.8.0] - 2023-03-27
### Added
- Profiling in NDI® stream functions
### Changed
- When a stream is running, it is no longer possible to change its source, display a running status instead
### Fixed
- Selected NDI® source in combobox doesn't change when wources are updated
- Make name USD safe when creating new material instead of throwing error
- NDI® status icon goes back to valid if a NDI® source is started after being closed
- Won't attempt to start a stream if it isn't valid (i.e. can't find the source)
## [0.7.0] - 2023-03-21
### Fixed
- Removed the parts of the extension that caused the app to freeze. Might still encounter low fps during the following:
- Starting a stream
- Closing a NDI® source while the stream is still running in the extension
- Using Remote Connection 1 or proxy as a stream source
## [0.6.0] - 2023-03-16
### Changed
- Stream Optimization (no need to flatten the NDI® frame)
- Individual streams now run in different thread
- Removed refresh NDI® feed button in favor of a watcher that runs on a second thread
- If a NDI® source closes while the stream is still running in the extension, it will automatically stop after a few seconds (5)
### Fixed
- Extension is now know as mf.ov.ndi
- Omniverse app won't crash when the NDI® source is closed and a stream is still running
- The app will still freeze for a few seconds
## [0.5.0] - 2023-03-07
### Added
- Support for receiving the low bandwidth version of a NDI® stream (this is a feature of NDI® that we now support)
## [0.4.0] - 2023-03-03
### Added
- Support for dynamic rect light (works when `IsProjector` is enabled to simulate a projector)
### Changed
- Now uses the [recommended logging](https://docs.omniverse.nvidia.com/kit/docs/kit-manual/latest/guide/logging.html) system
### Removed
- Obsolete pip dependency to open-cv
## [0.3.1] - 2023-03-02
### Fixed
- Crash when searching for dynamic textures and finding those that aren't dynamic
## [0.3.0] - 2023-03-01
### Added
- Can use a proxy feed which simulates a solid red color 1920x1080 at 30fps
## Fixed
- Filling dynamic texture with a default magenta color
- Fixes frame drop when assigning a dynamic texture without pushing data to it first
- Fixes for developpers to the window management targeting hot reload
### Changed
- Menu element is now at "Window > NDI® Dynamic Texture" instead of "Window > NDI® > NDI® Dynamic Texture"
## [0.2.0] - 2023-02-28
### Added
- Support for managing multiple NDI® feeds
## [0.1.0] - 2023-02-22
### Added
- Initial version of extension
- Supports one NDI® feed
| 4,632 | Markdown | 30.09396 | 128 | 0.722582 |
MomentFactory/Omniverse-NDI-extension/exts/mf.ov.ndi/docs/README.md | # NDI® extension for Omniverse [mf.ov.ndi]
Copyright 2023 Moment Factory Studios Inc.
An extension to enable NDI® live video input in Omniverse.
- Requires Omniverse Kit >= 105
- Requires USD Composer > 2023.1.0
- Requires [NDI® 5.5.3 runtime for Windows](https://go.ndi.tv/tools-for-windows)
| 295 | Markdown | 31.888885 | 80 | 0.745763 |
MomentFactory/Omniverse-NDI-extension/PACKAGE-LICENSES/NDI-LICENSE.md | NewTek’s NDI® Software Development Kit (SDK) License Agreement
Please read this document carefully before proceeding. You (the undersigned Licensee) hereby agree to the terms of this
NDI® Software Development Kit (SDK) License Agreement (the "License") in order to use the SDK. NewTek, Inc.
(“NewTek”) agrees to license you certain rights as set forth herein under these terms.
1. Definitions
a. "SDK" means the entire NewTek NDI® Software Development Kit, including those portions pertaining to the
Specific SDK, provided to you pursuant to this License, including any source code, compiled executables or
libraries, and all documentation provided to you to assist you in building tools that use the NDI® Software for data
transfer over a local network.
b. "Products" means your software product(s) and/or service(s) that you develop or that are developed on your behalf
through the use of the SDK and that are designed to be, and/or are, used, sold and/or distributed to work closely
with other NewTek Products or Third Party Video Products.
c. “NewTek Products refers to NewTek’s video line of products distributed by NewTek and any upgrades.
d. “SDK Documentation” refers to the documentation included with the Software Development Kit including that
portion pertaining to the Specific SDK.
e. “Specific SDK” refers to the specific SDK for which you intend to use the NewTek SDK and this license (for example:
NDI® Send, NDI® Receive, NDI® Find, or other SDK’s that are available from time to time. These are examples
only and NewTek may add or subtract to this list at its discretion, and you agree to use them only in accordance
with this Agreement), and includes the documentation relating to it.
f. “Third Party Video Products” refers to products of third parties developed by or for them also using the NewTek
NDI® Software Development Kit in any way.
2. License
a. Pursuant to the terms, conditions and requirements of this License and the SDK Documentation, you are hereby
granted a nonexclusive royalty-free license to use the sample code, object code and documentation included in
the SDK for the sole purpose of developing Products using the Specific SDK, and to distribute, only in accordance
with the SDK Documentation requirements, object code included in the SDK solely as used by such Products (your
Product and compiled sample code referred to as the “Bundled Product”).
b. If you are making a product release you must use a version of the SDK that is less than thirty (30) days old if there
is one.
c. This is a License only, and no employment, joint venture, partnership, or other business venture is created by this
License.
d. Unless otherwise stated in the SDK, no files within the SDK and the Specific SDK may be distributed. Certain files
within the SDK or the Specific SDK may be distributed, said files and their respective distribution license are
individually identified within the SDK documentation. This is not a license to create revisions or other derivative
works of any NewTek software or technology.
e. You agree to comply with the steps outlined in the SDK Documentation, including the SDK manual for the Specific
SDK. Different obligations and restrictions may be imposed by NewTek with respect to different Specific SDK’s.
NewTek will not agree to sponsor your Product or show affiliation; however NewTek shall have the right to test the
Product, and if it does not work or operate to NewTek’s satisfaction, NewTek may terminate this license pursuant to
Section 10. Notwithstanding that NewTek may test the Product, it does not warrant the test; it is for NewTek’s
benefit, and you agree not to promote in your Product marketing or elsewhere any results or that NewTek has
tested the Product.
f. You acknowledge that information provided to NewTek to induce NewTek to enter into this license with you about
your experience in the industry, sales, distribution, SDK experience or otherwise, whether such information is
provided to NewTek verbally or in writing, is true.
g. NewTek makes the SDK available for developers for developing Products only, under these specific conditions
herein, and if any, or all, of the terms of this license are not enforceable within your legal jurisdiction in any way, or
if any clause is voided or modified in any way, then you may not enter into this agreement, any license and
permission granted herein is revoked and withdrawn as of the date you first downloaded and/or used the SDK, and
you are then unauthorized to copy, create derivative works, or otherwise use the SDK in any way.
3. Restrictions and Confidentiality.
a. “Confidential Information” includes the SDK and all specifications, source code, example code, tools and
documentation provided within the SDK, and any support thereof, and any other proprietary information provided
by NewTek and identified as Confidential in the course of assisting You with your NDI® implementation.
Confidential Information does not include information that: 1) is or becomes generally available to the public other
than as a result of a disclosure by You, or 2) becomes available to You on a non-confidential basis from a source
that was not prohibited from disclosing such information. Except as authorized herein, or in the SDK
Documentation, or as otherwise approved in writing by NewTek: 1) The disclosure to you of the SDK and all other
Confidential Information shall not be disclosed to any third party 2)You agree not to commercialize the
Confidential Information for yours or others benefit in any way; 3) You will not make or distribute copies of the
SDK, or other Confidential Information or electronically transfer the SDK to any individual within your business or
company who does not need to know or view the SDK, and under no circumstances shall you disclose it, or any
part of it, to any company, or any other individual not employed directly by the business or company you
represent, without express written permission of NewTek.
b. You will not modify, sell, rent, transfer, resell for profit, distribute, or create derivative works based upon the SDK or
any part thereof other than as set forth herein, and you will not allow independent contractors to create derivative
works; however, you may use the SDK to create your own program for the primary purpose of making it or your
Product compatible with the NDI® network APIs, a NewTek Product, or for other purposes expressly set forth by
you in advance in writing and agreed to in writing by NewTek. In the case of your derivative works based upon the
SDK, you may create and revise your Product using the SDK, and sell, rent, transfer, resell for profit and distribute,
so long as it is for the Principal objective for which you were provided the SDK and it otherwise complies with this
agreement, including the requirement that your Product or any other Third Party Product using any portion of the
SDK continues to use the current SDK as required herein and functions properly using the SDK. NewTek reserves
the right to determine at any time the compliance of your Product or any Third Party Product as properly using the
SDK including maintaining current and complete NDI® compatability. Notwithstanding anything to the contrary
herein, no intellectual property claim, whether in patent, trademark, copyright, or otherwise, is made by NewTek in
or to your Product (except as to the SDK including software code and/or Libraries, and copyright rights therein,
and any Confidential Information used in or with the Product).
c. You will comply with applicable export control and trade sanctions laws, rules, regulations and licenses and will not
export or re-export, directly or indirectly, the SDK into any country, to any organization or individual prohibited by
the United States Export Administration Act and the regulations thereunder.
d. Any direct or indirect distribution of your Product or any Bundled Products by you that include your Product, shall
be under the terms of a license agreement containing terms that: (i) prohibit any modifications to the SDK or any
part thereof, (ii) prohibit any reverse engineering, disassembly or recompilation of the the SDK or any part thereof,
or any protocols used in the SDK, and further prohibit any attempt to do so; (iii) disclaim any and all warranties on
behalf of NewTek and each of its licensors, (iv) disclaim, to the extent permitted by applicable law, liability of
NewTek and/or its licensors for any damages, whether direct, indirect, incidental or consequential, arising from the
use of the Product or Bundled Products, (v) comply fully with all relevant export laws and regulations of the United
States to assure that the Bundled Products or any part thereof is not exported, directly or indirectly, in violation of
United States law; (vi) include the appropriate copyright notice showing NewTek, Inc. as copyright owner; (vii)
require all third party developers using your Product to develop Third Party Products to comply with the terms of
the NewTek SDK license, including that such Third Party Products have current and complete NDI® compatability,
and further require such third party developers to include in their End User License Agreement the terms of this
paragraph 3d.
e. You agree not to use the SDK for any unlawful propose or in any way to cause injury, harm or damage to NewTek,
Inc., or its Products, trademarks, reputation and/or goodwill, or use information provided pursuant to the SDK, to
interfere with NewTek in the commercialization of NewTek Products.
f. You agree to use NewTek trademarks (NewTek trademarks include, but are not limited to NDI®, NDI|HX™,
NewTek™, TriCaster®, and LightWave 3D®), only in accordance with applicable policies of NewTek for such
trademark usage by software developers in effect from time to time, which policy may be amended at any time
with or without notice. NewTek’s trademarks shall not be utilized within the Product itself, or on the Product
packaging or promotion, or on websites, except to identify that the Product is compatible with NewTek’s pertinent Video Product, and in all cases where NewTek trademarks are utilized, special and clear notations shall be provided
that the marks are NewTek trademarks. Your Product is not a product of NewTek and no promotion, packaging, or
use of NewTek trademarks shall suggest sponsorship by NewTek of your Products, except where specifically
authorized by NewTek in writing. Any distribution of your Product in a fraudulent manner, or in any other manner
or method that violates any civil or criminal laws shall constitute a default under this agreement and result in
immediate revocation of any right to utilize NewTek’s marks.
g. NewTek owns or has licensed copyright rights to the SDK. To the extent any of the SDK is incorporated into your
Product, you agree to include all applicable copyright notices, along with yours, indicating NewTek’s copyright
rights as applicable and as requested by NewTek.
h. You agree that by using the SDK, or any portion or part of the NDI® Software, in your Products, that you shall not
at any time during the term create, use or distribute Products utilizing the NDI® SDK that are not interoperable
with, or have significantly degraded performance of functionality when working with, NewTek Products or Third
Party Video Products that are created with or utilize in whole or in part the SDK. Your Products and Third Party
Products must maintain current and complete NDI® compatability at all times.
i. You agree to not to reverse engineer, disassemble or recompile the SDK or any part thereof, or any protocols used
in the SDK, or attempt to do so.
j. You agree not to use the SDK, or cause the SDK to be used, for any purpose that it was not designed for, and in
particular, you agree not to use the SDK for any purpose but for the precise purposes as expressly identified to
NewTek in writing that is the basis of the SDK and this license, and you agree you will not attempt to violate any of
the foregoing, or encourage third parties to do so.
4. Software Defect Reporting
If you find software defects in the SDK, you agree to make reasonable effort to report them to NewTek in accordance
with the SDK documentation or in such other manner as NewTek directs in writing. NewTek will evaluate and, at its sole
discretion, may address them in a future revision of the SDK. NewTek does not warrant the SDK to be free of defects.
5. Updates
You understand and agree that NewTek may amend, modify, change, and/or cease distribution or production of the SDK
at any time. You understand that you are not entitled to receive any upgrades, updates, or future versions of the SDK
under this License. NewTek does not warrant or represent that its future updates and revisions will be compatible with
your Product, and NewTek does not warrant that its updates and/or revisions will allow your Product to be compatible
with or without modifications to your Product.
6. Ownership
Nothing herein is intended to convey to you any patent, trademark, copyright, trade secret or other Intellectual Property
owned by NewTek or its Licensors in the SDK or in any NewTek software, hardware, products, trade names, or
trademarks. NewTek and its suppliers or licensors shall retain all right, title, and interest to the foregoing Intellectual
Property and to the SDK. All rights not expressly granted herein are reserved by NewTek.
7. Indemnity and Limitations
You agree to indemnify and hold NewTek harmless from any third party claim, loss, or damage (including attorney's fees)
related to your use, sale or distribution of the SDK. THE SDK IS PROVIDED TO YOU FREE OF CHARGE, AND ON AN "AS
IS" BASIS AND "WITH ALL FAULTS", WITHOUT ANY TECHNICAL SUPPORT OR WARRANTY OF ANY KIND FROM
NEWTEK. YOU ASSUME ALL RISKS THAT THE SDK IS SUITABLE OR ACCURATE FOR YOUR NEEDS AND YOUR USE OF
THE SDK IS AT YOUR OWN DISCRETION AND RISK. NEWTEK AND ITS LICENSORS DISCLAIM ALL EXPRESS AND IMPLIED
WARRANTIES FOR THE SDK INCLUDING, WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY OR FITNESS
FOR A PARTICULAR PURPOSE. ALSO, THERE IS NO WARRANTY OF NON-INFRINGEMENT, TITLE OR QUIET ENJOYMENT.
SOME STATES DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO THE ABOVE EXCLUSION MAY NOT
APPLY TO YOU. YOU MAY ALSO HAVE OTHER LEGAL RIGHTS THAT VARY FROM STATE TO STATE.
8. Limitation of Damages
NEITHER NEWTEK NOR ITS SUPPLIERS OR LICENSORS SHALL BE LIABLE FOR ANY INDIRECT, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES OR LOSS (INCLUDING DAMAGES FOR LOSS OF BUSINESS, LOSS OF PROFITS, OR THE
LIKE), ARISING OUT OF THIS LICENSE WHETHER BASED ON BREACH OF CONTRACT, TORT (INCLUDING NEGLIGENCE),STRICT LIABILITY, PRODUCT LIABILITY OR OTHERWISE, EVEN IF NEWTEK OR ITS REPRESENTATIVES HAVE BEEN
ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. SOME STATES DO NOT ALLOW THE LIMITATION OR EXCLUSION OF
LIABILITY FOR INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS LIMITATION OR EXCLUSION MAY NOT APPLY TO
YOU. The limited warranty, exclusive remedies and limited liability set forth above are fundamental elements of the basis
of the bargain between NewTek and you. You agree that NewTek would not be able to provide the Software on an
economic basis without such limitations. IN NO EVENT WILL NEWTEK BE LIABLE FOR ANY AMOUNT GREATER THAN
WHAT YOU ACTUALLY PAID FOR THE SDK.
9. US Government - Restricted Rights
The SDK and accompanying materials are provided with Restricted Rights. Use, duplication, or disclosure by the U.S.
Government is subject to restrictions as set forth in this License and as provided in Federal Regulations, as applicable.
(Manufacturer: NewTek, Inc., 5131 Beckwith Blvd., San Antonio, TX 78249).
10. Termination
Either party may terminate this License upon thirty (30) days written notice. Either party may also terminate if the other
party materially defaults in the performance of any provision of this License, the non-defaulting party gives written notice
to the other party of such default, and the defaulting party fails to cure such default within ten (10) days after receipt of
such notice. Upon the termination of this License, the rights and licenses granted to you by NewTek pursuant to this
License will automatically cease. Nothing herein shall prevent either party from pursuing any injunctive relief at any time
if necessary, or seeking any other remedies available in equity. Each party reserves the right to pursue all legal and
equitable remedies available. Upon termination, all SDK materials shall be promptly returned to NewTek, and any and all
copies stored in electronic or other format shall be deleted and destroyed, and any rights to use NewTek’s trademarks
are revoked. If this License is terminated for any reason, the provisions of Sections 1, 3, 6, 7, 8, 9, 10, and 11 shall survive
such termination.
11. General
Notices given hereunder may be sent to either party at the address below by either overnight mail or by email and are
deemed effective when sent. This License shall be governed by the laws of the State of Texas, without regard to its choice
of law rules and you agree to exclusive jurisdiction therein. This License contains the complete agreement between you
and NewTek with respect to the subject matter (SDK) of this License, and supersedes all prior or contemporaneous
agreements or understandings, whether oral or written. It does not replace any licenses accompanying NewTek
Products. You may not assign this SDK License. | 17,449 | Markdown | 95.944444 | 229 | 0.808184 |
MomentFactory/Omniverse-NDI-extension/PACKAGE-LICENSES/USD-LICENSE.md | Universal Scene Description (USD) components are licensed under the following terms:
Modified Apache 2.0 License
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor
and its affiliates, except as required to comply with Section 4(c) of
the License and to reproduce the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
============================================================
RapidJSON
============================================================
Tencent is pleased to support the open source community by making RapidJSON available.
Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved.
If you have downloaded a copy of the RapidJSON binary from Tencent, please note that the RapidJSON binary is licensed under the MIT License.
If you have downloaded a copy of the RapidJSON source code from Tencent, please note that RapidJSON source code is licensed under the MIT License, except for the third-party components listed below which are subject to different license terms. Your integration of RapidJSON into your own projects may require compliance with the MIT License, as well as the other licenses applicable to the third-party components included within RapidJSON. To avoid the problematic JSON license in your own projects, it's sufficient to exclude the bin/jsonchecker/ directory, as it's the only code under the JSON license.
A copy of the MIT License is included in this file.
Other dependencies and licenses:
Open Source Software Licensed Under the BSD License:
--------------------------------------------------------------------
The msinttypes r29
Copyright (c) 2006-2013 Alexander Chemeris
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Open Source Software Licensed Under the JSON License:
--------------------------------------------------------------------
json.org
Copyright (c) 2002 JSON.org
All Rights Reserved.
JSON_checker
Copyright (c) 2002 JSON.org
All Rights Reserved.
Terms of the JSON License:
---------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Terms of the MIT License:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
============================================================
pygilstate_check
============================================================
The MIT License (MIT)
Copyright (c) 2014, Pankaj Pandey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================
double-conversion
============================================================
Copyright 2006-2011, the V8 project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
============================================================
OpenEXR/IlmBase/Half
============================================================
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2002, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
============================================================
Apple Technical Q&A QA1361 - Detecting the Debugger
https://developer.apple.com/library/content/qa/qa1361/_index.html
============================================================
Sample code project: Detecting the Debugger
Version: 1.0
Abstract: Shows how to determine if code is being run under the debugger.
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
============================================================
LZ4
============================================================
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2017, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://www.lz4.org
- LZ4 source repository : https://github.com/lz4/lz4
============================================================
stb
============================================================
stb_image - v2.19 - public domain image loader - http://nothings.org/stb
no warranty implied; use at your own risk
stb_image_resize - v0.95 - public domain image resizing
by Jorge L Rodriguez (@VinoBS) - 2014
http://github.com/nothings/stb
stb_image_write - v1.09 - public domain - http://nothings.org/stb/stb_image_write.h
writes out PNG/BMP/TGA/JPEG/HDR images to C stdio - Sean Barrett 2010-2015
no warranty implied; use at your own risk
ALTERNATIVE B - Public Domain (www.unlicense.org)
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
software, either in source code form or as a compiled binary, for any purpose,
commercial or non-commercial, and by any means.
In jurisdictions that recognize copyright laws, the author or authors of this
software dedicate any and all copyright interest in the software to the public
domain. We make this dedication for the benefit of the public at large and to
the detriment of our heirs and successors. We intend this dedication to be an
overt act of relinquishment in perpetuity of all present and future rights to
this software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| 26,573 | Markdown | 57.792035 | 739 | 0.727317 |
MomentFactory/Omniverse-MPCDI-converter/bootstrap.py | # Copyright 2023 NVIDIA CORPORATION
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import io
import packmanapi
import os
import sys
REPO_ROOT = os.path.dirname(os.path.realpath(__file__))
REPO_DEPS_FILE = os.path.join(REPO_ROOT, "deps", "repo-deps.packman.xml")
if __name__ == "__main__":
# pull all repo dependencies first
# and add them to the python path
with contextlib.redirect_stdout(io.StringIO()):
deps = packmanapi.pull(REPO_DEPS_FILE)
for dep_path in deps.values():
if dep_path not in sys.path:
sys.path.append(dep_path)
sys.path.append(REPO_ROOT)
import omni.repo.usd
omni.repo.usd.bootstrap(REPO_ROOT) | 1,191 | Python | 31.216215 | 74 | 0.717044 |
MomentFactory/Omniverse-MPCDI-converter/repo.toml | # common settings for repo_usd for all USD plug-ins
[repo_usd]
usd_root = "${root}/_build/usd-deps/nv-usd/%{config}"
# usd_root = "${root}/../../USD_INSTALLATIONS/USD_22-11"
usd_python_root = "${root}/_build/usd-deps/python"
generate_plugin_buildfiles = true
plugin_buildfile_format = "cmake"
generate_root_buildfile = true
[repo_usd.plugin.mpcdiFileFormat]
plugin_dir = "${root}/src/usd-plugins/fileFormat/mpcdiFileFormat"
install_root = "${root}/_install/%{platform}/%{config}/mpcdiFileFormat"
include_dir = "include/mpcdiFileFormat"
additional_include_dirs = [
"../../../../_build/usd-deps/nv_usd/%{config}/include/tbb"
]
public_headers = [
"api.h",
"iMpcdiDataProvider.h",
"mpcdiDataProviderFactory.h"
]
private_headers = [
"mpcdiData.cpp",
"mpcdiPluginManager.h",
"mpcdiFileFormat.h",
"tinyxml2.h"
]
cpp_files = [
"mpcdiData.cpp",
"mpcdiDataProviderFactory.cpp",
"iMpcdiDataProvider.cpp",
"mpcdiPluginManager.cpp",
"mpcdiFileFormat.cpp",
"tinyxml2.cpp"
]
resource_files = [
"plugInfo.json"
]
usd_lib_dependencies = [
"arch",
"tf",
"plug",
"vt",
"gf",
"sdf",
"js",
"pcp",
"usdGeom",
"usd",
"usdLux"
]
| 1,211 | TOML | 21.867924 | 71 | 0.639967 |
MomentFactory/Omniverse-MPCDI-converter/README.md | # mf.ov.mpcdi_converter
An Omniverse extension for MPDCI files.
Support MPCDI* to OpenUSD conversion as well as References to MPDCI files through a native USD FileFormat plugin.
MPCDI* is a VESA interchange format for videoprojectors technical data.
*Multiple Projection Common Data Interchange
MPCDIv2 is under Copyright © 2013 – 2015 Video Electronics Standards Association. All rights reserved.
## Requirements
- Requires Omniverse Kit >= 105.1
- Tested in USD Composer 2023.2.2 and 2023.2.0
## Build
The extension comes pre-built for Omniverse users but here are the steps if you want to build it by yourself.
### Build DLL for Omniverse
Just run `build.bat`.
### Test in Omniverse
1. `Window` > `Extensions`
2. ☰ > Settings
3. ✚ Add `_install\windows-x86_64\release` folder to the Extension Search Paths
4. The user extension should appear on the left
5. `Autoload` needs to be checked for the FileFormat plugin to be correctly loaded at USD Runtime.
### Build DLL for USDview
The dependency configuration is contained in the [usd-deps.packman.xml](deps/usd-deps.packman.xml) file
To switch to the correct OpenUSD version for USDview compilation, it is required to edit the packman configuration file to:
```
<project toolsVersion="5.6">
<dependency name="nv-usd" linkPath="../_build/usd-deps/nv-usd/${config}">
<package name="usd.py310.${platform}.usdview.${config}" version="0.23.05-tc.47+v23.05.b53573ea" />
</dependency>
<dependency name="python" linkPath="../_build/usd-deps/python">
<package name="python" version="3.10.13+nv1-${platform}" />
</dependency>
</project>
```
Then build as usual with `./build.bat`
To run USDview :
- `source setenvwindows`
- `usdview resource/scene.usda`
### Other OpenUSD compatible platforms
Waiting for an improved build process, we documented how you can build for other platforms (Unreal, Blender) in [this repo](https://github.com/MomentFactory/Omniverse-MVR-GDTF-converter).
## Using the extension
Enable the Extension ( `Window` > `Extensions` from USD Composer ).
[A sample MPCDI file](./exts/mf.ov.mpcdi_converter/mf/ov/mpcdi_converter/sample/Cube-mapping.mpcdi.xml) is provided.
### Reference an MPCDI file
To reference an MPCDI file, just drag and drop the file on your viewport or your Stage Window.
### Convert an MPCDI file
Three ways to convert from USD Composer :
1. `File` > `Import`.
Or from the Content window :
2. `+Import` button.
3. Right click > `Convert to USD` on an `.mpcdi.xml` file.
## Implementation note
- Since they are no projectors in Omniverse, a projector will be represented as:
- A camera with the frustum of the projector
- A child `RectLight` with the correct frustum that represents the light emitted
- A simple mesh to represent the physical projector box
- Each buffer is represented as a scope in the scene tree with each projector as a child.
- MPCDI \<Extensions\> are currently ignored
- The frustum of each projector is currently calculated with a focus distance of 2 unit and a focal length of 10.
## Resources
- Inspired by : [NVIDIA' usd-plugin-sample](https://github.com/NVIDIA-Omniverse/usd-plugin-samples)
- [MPCDI Christie Digital Github](https://github.com/ChristieDigital/mpcdi/blob/master/MPCDI_explained.md)
- MPCDIv2 standard can be downloaded from [the VESA website](https://vesa.org/vesa-standards/)
- MPCDIv2 is under Copyright © 2013 – 2015 Video Electronics Standards Association. All rights reserved.
## Known issues
- While USD Cameras support Lens shift through the `offset`, the `RectLight` used to simulate the projector light does not offer such feature yet.
- Does not support yet the full MPCDI zip archive, only `.mpcdi.xml`
- XML extension usage : Fileformat plugin doesn't support having multiple extenions such as .mpcdi.xml (while Omniverse allows it). Currently this extension uses the .xml extension, which is not very convenient. | 3,903 | Markdown | 40.094736 | 211 | 0.755829 |
MomentFactory/Omniverse-MPCDI-converter/deps/target-deps.packman.xml | <project toolsVersion="5.6">
<dependency name="libcurl" linkPath="../_build/target-deps/libcurl">
<package name="libcurl" version="8.1.2-3-${platform}-static-release"/>
</dependency>
<dependency name="zlib" linkPath="../_build/target-deps/zlib">
<package name="zlib" version="1.2.13+nv1-${platform}" />
</dependency>
<dependency name="openssl" linkPath="../_build/target-deps/openssl">
<package name="openssl" version="3.0.10-3-${platform}-static-release" />
</dependency>
<dependency name="omni-geospatial" linkPath="../_build/target-deps/omni-geospatial">
<package name="omni-geospatial" version="2.0.3-pxr_23_05+mr17.384.337fb43b.tc.${platform}.${config}" />
</dependency>
</project> | 718 | XML | 50.357139 | 107 | 0.683844 |
MomentFactory/Omniverse-MPCDI-converter/deps/repo-deps.packman.xml | <project toolsVersion="5.6">
<dependency name="repo_usd" linkPath="../_repo/repo_usd">
<package name="repo_usd" version="4.0.1" />
</dependency>
</project> | 171 | XML | 33.399993 | 61 | 0.619883 |
MomentFactory/Omniverse-MPCDI-converter/deps/usd-deps.packman.xml | <project toolsVersion="5.6">
<dependency name="nv-usd" linkPath="../_build/usd-deps/nv-usd/${config}">
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-win64_py310_${config}-dev_omniverse" platforms="windows-x86_64" />
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-linux64_py310-centos_${config}-dev_omniverse" platforms="linux-x86_64" />
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-linux-aarch64_py310_${config}-dev_omniverse" platforms="linux-aarch64" />
</dependency>
<dependency name="python" linkPath="../_build/usd-deps/python">
<package name="python" version="3.10.13+nv1-${platform}" />
</dependency>
</project>
| 682 | XML | 61.090904 | 136 | 0.693548 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/api.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_MPCDI_API_H_
#define OMNI_MPCDI_API_H_
#include "pxr/base/arch/export.h"
#if defined(PXR_STATIC)
# define MPCDI_API
# define MPCDI_API_TEMPLATE_CLASS(...)
# define MPCDI_API_TEMPLATE_STRUCT(...)
# define MPCDI_LOCAL
#else
# if defined(MPCDIFILEFORMAT_EXPORTS)
# define MPCDI_API ARCH_EXPORT
# define MPCDI_API_TEMPLATE_CLASS(...) ARCH_EXPORT_TEMPLATE(class, __VA_ARGS__)
# define MPCDI_API_TEMPLATE_STRUCT(...) ARCH_EXPORT_TEMPLATE(struct, __VA_ARGS__)
# else
# define MPCDI_API ARCH_IMPORT
# define MPCDI_API_TEMPLATE_CLASS(...) ARCH_IMPORT_TEMPLATE(class, __VA_ARGS__)
# define MPCDI_API_TEMPLATE_STRUCT(...) ARCH_IMPORT_TEMPLATE(struct, __VA_ARGS__)
# endif
# define MPCDI_LOCAL ARCH_HIDDEN
#endif
#endif | 1,368 | C | 35.026315 | 87 | 0.707602 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/iMpcdiDataProvider.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_MPCDI_IMPCDIDATAPROVIDER_H_
#define OMNI_MPCDI_IMPCDIDATAPROVIDER_H_
#include <unordered_map>
#include <functional>
#include <memory>
#include <pxr/pxr.h>
#include <pxr/base/tf/token.h>
#include <pxr/base/vt/value.h>
#include <pxr/usd/sdf/fileFormat.h>
#include <pxr/usd/sdf/specType.h>
#include <pxr/usd/sdf/primSpec.h>
#include "api.h"
PXR_NAMESPACE_OPEN_SCOPE
///
/// \struct EdfDataParameters
///
/// Represents a class used to hold the specific metadata
/// parameter values used to construct the dynamic layer.
///
struct EdfDataParameters
{
public:
std::string dataProviderId;
std::unordered_map<std::string, std::string> providerArgs;
// conversion functions to and from USD structures
static EdfDataParameters FromFileFormatArgs(const SdfFileFormat::FileFormatArguments& args);
};
///
/// \class IEdfSourceData
///
/// Interface for data providers to create prim / attribute information
/// and to read back attribute values as needed.
///
class IEdfSourceData
{
public:
MPCDI_API virtual ~IEdfSourceData();
/// Creates a new prim from data read from a back-end data source.
/// \param parentPath The prim path that will be the parent of the newly created prim.
/// \param name The name of the new prim. This must be a valid USD identifier.
/// \param specifier The spec type of the new prim (e.g., def, over, etc.).
/// \param typeName The name of the type of the prim.
///
MPCDI_API virtual void CreatePrim(const SdfPath& parentPath, const std::string& name, const SdfSpecifier& specifier,
const TfToken& typeName) = 0;
/// Creates a new attribute on the specified prim.
/// \param parentPrimPath The prim path of the prim that will contain the attribute.
/// \param name The name of the attribute.
/// \param typeName The name of the type of the attribute.
/// \param variability The variability of the attribute (e.g., uniformm, varying, etc.).
/// \param value The default value of the new attribute.
///
MPCDI_API virtual void CreateAttribute(const SdfPath& parentPrimPath, const std::string& name, const SdfValueTypeName& typeName,
const SdfVariability& variability, const VtValue& value) = 0;
/// Sets the value of a field on a prim at the given path.
/// If the value exists, the current value will be overwritten.
/// \param primPath The full path of the prim to set the field value for.
/// \param fieldName The name of the field to set.
/// \param value The value to set.
///
MPCDI_API virtual void SetField(const SdfPath& primPath, const TfToken& fieldName, const VtValue& value) = 0;
/// Determines if the field fieldName exists on the given prim path.
/// If the field exists, the current value will be returned in value if value is valid.
/// \param primPath The full path of the prim to look for the field.
/// \param fieldName The name of the field to look for on the prim.
/// \param value A pointer to a VtValue object that will be filled with the value of
/// the field if it exists.
///
MPCDI_API virtual bool HasField(const SdfPath& primPath, const TfToken& fieldName, VtValue* value) = 0;
/// Determines if the attribute at the given path exists and if so, returns the default value.
/// \param attributePath The full path of the attribute (i.e., primPath + "." + attributeName).
/// \param defaultValue A pointer to a VtValue object that will be filled with the default value
/// of the attribute if it exists.
///
MPCDI_API virtual bool HasAttribute(const SdfPath& attributePath, VtValue* defaultValue) = 0;
};
///
/// \class IEdfDataProvider
///
/// Interface for acquring data from an external data system based on a set of
/// metadata parameters fed to a dynamic payload. This object is responsible for
/// acquiring the data from the external system and turning it into USD representations
/// that can be added to a layer.
///
class IEdfDataProvider
{
public:
MPCDI_API virtual ~IEdfDataProvider();
// disallow copies
IEdfDataProvider(const IEdfDataProvider&) = delete;
IEdfDataProvider& operator=(const IEdfDataProvider&) = delete;
/// Asks the data provider to read whatever information they would like to read
/// from the back-end system when a payload layer is first opened.
///
/// \param sourceData The source data interface which the data provider
/// can use to create prims / attributes as needed when
/// they read data from their back-end.
///
MPCDI_API virtual bool Read(std::shared_ptr<IEdfSourceData> sourceData) = 0;
/// Asks the data provider to read whatever would be considered the
/// children of the provided prim path. This gives the opportunity
/// for the data provider to defer reading hierarhical children
/// from their back-end store all at once when the data is large.
///
/// \param primPath The path of the prim to create children for.
/// This value is either "/Data", indicating the root
/// of the hierarchy, or the full path to a prim
/// that was created by the data provider previously
/// on a Read / ReadChildren call.
///
/// \param sourceData The source data interface which the data provider
/// can use to create prims / attributes as needed when
/// they read data from their back-end.
///
MPCDI_API virtual bool ReadChildren(const std::string& primPath, std::shared_ptr<IEdfSourceData> sourceData) = 0;
/// Asks the data provider whether all of its data was read on the initial
/// Read call (i.e. the data has been cached in the source) or not.
///
/// \returns True if all data was read on initial Read, false otherwise.
MPCDI_API virtual bool IsDataCached() const = 0;
protected:
MPCDI_API IEdfDataProvider(const EdfDataParameters& parameters);
MPCDI_API const EdfDataParameters& GetParameters() const;
private:
EdfDataParameters _parameters;
};
PXR_NAMESPACE_CLOSE_SCOPE
#endif | 6,567 | C | 38.329341 | 129 | 0.716004 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/tinyxml2.h | /*
Original code by Lee Thomason (www.grinninglizard.com)
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any
damages arising from the use of this software.
Permission is granted to anyone to use this software for any
purpose, including commercial applications, and to alter it and
redistribute it freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must
not claim that you wrote the original software. If you use this
software in a product, an acknowledgment in the product documentation
would be appreciated but is not required.
2. Altered source versions must be plainly marked as such, and
must not be misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
*/
#ifndef TINYXML2_INCLUDED
#define TINYXML2_INCLUDED
#if defined(ANDROID_NDK) || defined(__BORLANDC__) || defined(__QNXNTO__)
# include <ctype.h>
# include <limits.h>
# include <stdio.h>
# include <stdlib.h>
# include <string.h>
# if defined(__PS3__)
# include <stddef.h>
# endif
#else
# include <cctype>
# include <climits>
# include <cstdio>
# include <cstdlib>
# include <cstring>
#endif
#include <stdint.h>
/*
TODO: intern strings instead of allocation.
*/
/*
gcc:
g++ -Wall -DTINYXML2_DEBUG tinyxml2.cpp xmltest.cpp -o gccxmltest.exe
Formatting, Artistic Style:
AStyle.exe --style=1tbs --indent-switches --break-closing-brackets --indent-preprocessor tinyxml2.cpp tinyxml2.h
*/
#if defined( _DEBUG ) || defined (__DEBUG__)
# ifndef TINYXML2_DEBUG
# define TINYXML2_DEBUG
# endif
#endif
#ifdef _MSC_VER
# pragma warning(push)
# pragma warning(disable: 4251)
#endif
#ifdef _WIN32
# ifdef TINYXML2_EXPORT
# define TINYXML2_LIB __declspec(dllexport)
# elif defined(TINYXML2_IMPORT)
# define TINYXML2_LIB __declspec(dllimport)
# else
# define TINYXML2_LIB
# endif
#elif __GNUC__ >= 4
# define TINYXML2_LIB __attribute__((visibility("default")))
#else
# define TINYXML2_LIB
#endif
#if !defined(TIXMLASSERT)
#if defined(TINYXML2_DEBUG)
# if defined(_MSC_VER)
# // "(void)0," is for suppressing C4127 warning in "assert(false)", "assert(true)" and the like
# define TIXMLASSERT( x ) do { if ( !((void)0,(x))) { __debugbreak(); } } while(false)
# elif defined (ANDROID_NDK)
# include <android/log.h>
# define TIXMLASSERT( x ) do { if ( !(x)) { __android_log_assert( "assert", "grinliz", "ASSERT in '%s' at %d.", __FILE__, __LINE__ ); } } while(false)
# else
# include <assert.h>
# define TIXMLASSERT assert
# endif
#else
# define TIXMLASSERT( x ) do {} while(false)
#endif
#endif
/* Versioning, past 1.0.14:
http://semver.org/
*/
static const int TIXML2_MAJOR_VERSION = 9;
static const int TIXML2_MINOR_VERSION = 0;
static const int TIXML2_PATCH_VERSION = 0;
#define TINYXML2_MAJOR_VERSION 9
#define TINYXML2_MINOR_VERSION 0
#define TINYXML2_PATCH_VERSION 0
// A fixed element depth limit is problematic. There needs to be a
// limit to avoid a stack overflow. However, that limit varies per
// system, and the capacity of the stack. On the other hand, it's a trivial
// attack that can result from ill, malicious, or even correctly formed XML,
// so there needs to be a limit in place.
static const int TINYXML2_MAX_ELEMENT_DEPTH = 500;
namespace tinyxml2
{
class XMLDocument;
class XMLElement;
class XMLAttribute;
class XMLComment;
class XMLText;
class XMLDeclaration;
class XMLUnknown;
class XMLPrinter;
/*
A class that wraps strings. Normally stores the start and end
pointers into the XML file itself, and will apply normalization
and entity translation if actually read. Can also store (and memory
manage) a traditional char[]
Isn't clear why TINYXML2_LIB is needed; but seems to fix #719
*/
class TINYXML2_LIB StrPair
{
public:
enum Mode {
NEEDS_ENTITY_PROCESSING = 0x01,
NEEDS_NEWLINE_NORMALIZATION = 0x02,
NEEDS_WHITESPACE_COLLAPSING = 0x04,
TEXT_ELEMENT = NEEDS_ENTITY_PROCESSING | NEEDS_NEWLINE_NORMALIZATION,
TEXT_ELEMENT_LEAVE_ENTITIES = NEEDS_NEWLINE_NORMALIZATION,
ATTRIBUTE_NAME = 0,
ATTRIBUTE_VALUE = NEEDS_ENTITY_PROCESSING | NEEDS_NEWLINE_NORMALIZATION,
ATTRIBUTE_VALUE_LEAVE_ENTITIES = NEEDS_NEWLINE_NORMALIZATION,
COMMENT = NEEDS_NEWLINE_NORMALIZATION
};
StrPair() : _flags( 0 ), _start( 0 ), _end( 0 ) {}
~StrPair();
void Set( char* start, char* end, int flags ) {
TIXMLASSERT( start );
TIXMLASSERT( end );
Reset();
_start = start;
_end = end;
_flags = flags | NEEDS_FLUSH;
}
const char* GetStr();
bool Empty() const {
return _start == _end;
}
void SetInternedStr( const char* str ) {
Reset();
_start = const_cast<char*>(str);
}
void SetStr( const char* str, int flags=0 );
char* ParseText( char* in, const char* endTag, int strFlags, int* curLineNumPtr );
char* ParseName( char* in );
void TransferTo( StrPair* other );
void Reset();
private:
void CollapseWhitespace();
enum {
NEEDS_FLUSH = 0x100,
NEEDS_DELETE = 0x200
};
int _flags;
char* _start;
char* _end;
StrPair( const StrPair& other ); // not supported
void operator=( const StrPair& other ); // not supported, use TransferTo()
};
/*
A dynamic array of Plain Old Data. Doesn't support constructors, etc.
Has a small initial memory pool, so that low or no usage will not
cause a call to new/delete
*/
template <class T, int INITIAL_SIZE>
class DynArray
{
public:
DynArray() :
_mem( _pool ),
_allocated( INITIAL_SIZE ),
_size( 0 )
{
}
~DynArray() {
if ( _mem != _pool ) {
delete [] _mem;
}
}
void Clear() {
_size = 0;
}
void Push( T t ) {
TIXMLASSERT( _size < INT_MAX );
EnsureCapacity( _size+1 );
_mem[_size] = t;
++_size;
}
T* PushArr( int count ) {
TIXMLASSERT( count >= 0 );
TIXMLASSERT( _size <= INT_MAX - count );
EnsureCapacity( _size+count );
T* ret = &_mem[_size];
_size += count;
return ret;
}
T Pop() {
TIXMLASSERT( _size > 0 );
--_size;
return _mem[_size];
}
void PopArr( int count ) {
TIXMLASSERT( _size >= count );
_size -= count;
}
bool Empty() const {
return _size == 0;
}
T& operator[](int i) {
TIXMLASSERT( i>= 0 && i < _size );
return _mem[i];
}
const T& operator[](int i) const {
TIXMLASSERT( i>= 0 && i < _size );
return _mem[i];
}
const T& PeekTop() const {
TIXMLASSERT( _size > 0 );
return _mem[ _size - 1];
}
int Size() const {
TIXMLASSERT( _size >= 0 );
return _size;
}
int Capacity() const {
TIXMLASSERT( _allocated >= INITIAL_SIZE );
return _allocated;
}
void SwapRemove(int i) {
TIXMLASSERT(i >= 0 && i < _size);
TIXMLASSERT(_size > 0);
_mem[i] = _mem[_size - 1];
--_size;
}
const T* Mem() const {
TIXMLASSERT( _mem );
return _mem;
}
T* Mem() {
TIXMLASSERT( _mem );
return _mem;
}
private:
DynArray( const DynArray& ); // not supported
void operator=( const DynArray& ); // not supported
void EnsureCapacity( int cap ) {
TIXMLASSERT( cap > 0 );
if ( cap > _allocated ) {
TIXMLASSERT( cap <= INT_MAX / 2 );
const int newAllocated = cap * 2;
T* newMem = new T[newAllocated];
TIXMLASSERT( newAllocated >= _size );
memcpy( newMem, _mem, sizeof(T)*_size ); // warning: not using constructors, only works for PODs
if ( _mem != _pool ) {
delete [] _mem;
}
_mem = newMem;
_allocated = newAllocated;
}
}
T* _mem;
T _pool[INITIAL_SIZE];
int _allocated; // objects allocated
int _size; // number objects in use
};
/*
Parent virtual class of a pool for fast allocation
and deallocation of objects.
*/
class MemPool
{
public:
MemPool() {}
virtual ~MemPool() {}
virtual int ItemSize() const = 0;
virtual void* Alloc() = 0;
virtual void Free( void* ) = 0;
virtual void SetTracked() = 0;
};
/*
Template child class to create pools of the correct type.
*/
template< int ITEM_SIZE >
class MemPoolT : public MemPool
{
public:
MemPoolT() : _blockPtrs(), _root(0), _currentAllocs(0), _nAllocs(0), _maxAllocs(0), _nUntracked(0) {}
~MemPoolT() {
MemPoolT< ITEM_SIZE >::Clear();
}
void Clear() {
// Delete the blocks.
while( !_blockPtrs.Empty()) {
Block* lastBlock = _blockPtrs.Pop();
delete lastBlock;
}
_root = 0;
_currentAllocs = 0;
_nAllocs = 0;
_maxAllocs = 0;
_nUntracked = 0;
}
virtual int ItemSize() const {
return ITEM_SIZE;
}
int CurrentAllocs() const {
return _currentAllocs;
}
virtual void* Alloc() {
if ( !_root ) {
// Need a new block.
Block* block = new Block;
_blockPtrs.Push( block );
Item* blockItems = block->items;
for( int i = 0; i < ITEMS_PER_BLOCK - 1; ++i ) {
blockItems[i].next = &(blockItems[i + 1]);
}
blockItems[ITEMS_PER_BLOCK - 1].next = 0;
_root = blockItems;
}
Item* const result = _root;
TIXMLASSERT( result != 0 );
_root = _root->next;
++_currentAllocs;
if ( _currentAllocs > _maxAllocs ) {
_maxAllocs = _currentAllocs;
}
++_nAllocs;
++_nUntracked;
return result;
}
virtual void Free( void* mem ) {
if ( !mem ) {
return;
}
--_currentAllocs;
Item* item = static_cast<Item*>( mem );
#ifdef TINYXML2_DEBUG
memset( item, 0xfe, sizeof( *item ) );
#endif
item->next = _root;
_root = item;
}
void Trace( const char* name ) {
printf( "Mempool %s watermark=%d [%dk] current=%d size=%d nAlloc=%d blocks=%d\n",
name, _maxAllocs, _maxAllocs * ITEM_SIZE / 1024, _currentAllocs,
ITEM_SIZE, _nAllocs, _blockPtrs.Size() );
}
void SetTracked() {
--_nUntracked;
}
int Untracked() const {
return _nUntracked;
}
// This number is perf sensitive. 4k seems like a good tradeoff on my machine.
// The test file is large, 170k.
// Release: VS2010 gcc(no opt)
// 1k: 4000
// 2k: 4000
// 4k: 3900 21000
// 16k: 5200
// 32k: 4300
// 64k: 4000 21000
// Declared public because some compilers do not accept to use ITEMS_PER_BLOCK
// in private part if ITEMS_PER_BLOCK is private
enum { ITEMS_PER_BLOCK = (4 * 1024) / ITEM_SIZE };
private:
MemPoolT( const MemPoolT& ); // not supported
void operator=( const MemPoolT& ); // not supported
union Item {
Item* next;
char itemData[ITEM_SIZE];
};
struct Block {
Item items[ITEMS_PER_BLOCK];
};
DynArray< Block*, 10 > _blockPtrs;
Item* _root;
int _currentAllocs;
int _nAllocs;
int _maxAllocs;
int _nUntracked;
};
/**
Implements the interface to the "Visitor pattern" (see the Accept() method.)
If you call the Accept() method, it requires being passed a XMLVisitor
class to handle callbacks. For nodes that contain other nodes (Document, Element)
you will get called with a VisitEnter/VisitExit pair. Nodes that are always leafs
are simply called with Visit().
If you return 'true' from a Visit method, recursive parsing will continue. If you return
false, <b>no children of this node or its siblings</b> will be visited.
All flavors of Visit methods have a default implementation that returns 'true' (continue
visiting). You need to only override methods that are interesting to you.
Generally Accept() is called on the XMLDocument, although all nodes support visiting.
You should never change the document from a callback.
@sa XMLNode::Accept()
*/
class TINYXML2_LIB XMLVisitor
{
public:
virtual ~XMLVisitor() {}
/// Visit a document.
virtual bool VisitEnter( const XMLDocument& /*doc*/ ) {
return true;
}
/// Visit a document.
virtual bool VisitExit( const XMLDocument& /*doc*/ ) {
return true;
}
/// Visit an element.
virtual bool VisitEnter( const XMLElement& /*element*/, const XMLAttribute* /*firstAttribute*/ ) {
return true;
}
/// Visit an element.
virtual bool VisitExit( const XMLElement& /*element*/ ) {
return true;
}
/// Visit a declaration.
virtual bool Visit( const XMLDeclaration& /*declaration*/ ) {
return true;
}
/// Visit a text node.
virtual bool Visit( const XMLText& /*text*/ ) {
return true;
}
/// Visit a comment node.
virtual bool Visit( const XMLComment& /*comment*/ ) {
return true;
}
/// Visit an unknown node.
virtual bool Visit( const XMLUnknown& /*unknown*/ ) {
return true;
}
};
// WARNING: must match XMLDocument::_errorNames[]
enum XMLError {
XML_SUCCESS = 0,
XML_NO_ATTRIBUTE,
XML_WRONG_ATTRIBUTE_TYPE,
XML_ERROR_FILE_NOT_FOUND,
XML_ERROR_FILE_COULD_NOT_BE_OPENED,
XML_ERROR_FILE_READ_ERROR,
XML_ERROR_PARSING_ELEMENT,
XML_ERROR_PARSING_ATTRIBUTE,
XML_ERROR_PARSING_TEXT,
XML_ERROR_PARSING_CDATA,
XML_ERROR_PARSING_COMMENT,
XML_ERROR_PARSING_DECLARATION,
XML_ERROR_PARSING_UNKNOWN,
XML_ERROR_EMPTY_DOCUMENT,
XML_ERROR_MISMATCHED_ELEMENT,
XML_ERROR_PARSING,
XML_CAN_NOT_CONVERT_TEXT,
XML_NO_TEXT_NODE,
XML_ELEMENT_DEPTH_EXCEEDED,
XML_ERROR_COUNT
};
/*
Utility functionality.
*/
class TINYXML2_LIB XMLUtil
{
public:
static const char* SkipWhiteSpace( const char* p, int* curLineNumPtr ) {
TIXMLASSERT( p );
while( IsWhiteSpace(*p) ) {
if (curLineNumPtr && *p == '\n') {
++(*curLineNumPtr);
}
++p;
}
TIXMLASSERT( p );
return p;
}
static char* SkipWhiteSpace( char* const p, int* curLineNumPtr ) {
return const_cast<char*>( SkipWhiteSpace( const_cast<const char*>(p), curLineNumPtr ) );
}
// Anything in the high order range of UTF-8 is assumed to not be whitespace. This isn't
// correct, but simple, and usually works.
static bool IsWhiteSpace( char p ) {
return !IsUTF8Continuation(p) && isspace( static_cast<unsigned char>(p) );
}
inline static bool IsNameStartChar( unsigned char ch ) {
if ( ch >= 128 ) {
// This is a heuristic guess in attempt to not implement Unicode-aware isalpha()
return true;
}
if ( isalpha( ch ) ) {
return true;
}
return ch == ':' || ch == '_';
}
inline static bool IsNameChar( unsigned char ch ) {
return IsNameStartChar( ch )
|| isdigit( ch )
|| ch == '.'
|| ch == '-';
}
inline static bool IsPrefixHex( const char* p) {
p = SkipWhiteSpace(p, 0);
return p && *p == '0' && ( *(p + 1) == 'x' || *(p + 1) == 'X');
}
inline static bool StringEqual( const char* p, const char* q, int nChar=INT_MAX ) {
if ( p == q ) {
return true;
}
TIXMLASSERT( p );
TIXMLASSERT( q );
TIXMLASSERT( nChar >= 0 );
return strncmp( p, q, nChar ) == 0;
}
inline static bool IsUTF8Continuation( const char p ) {
return ( p & 0x80 ) != 0;
}
static const char* ReadBOM( const char* p, bool* hasBOM );
// p is the starting location,
// the UTF-8 value of the entity will be placed in value, and length filled in.
static const char* GetCharacterRef( const char* p, char* value, int* length );
static void ConvertUTF32ToUTF8( unsigned long input, char* output, int* length );
// converts primitive types to strings
static void ToStr( int v, char* buffer, int bufferSize );
static void ToStr( unsigned v, char* buffer, int bufferSize );
static void ToStr( bool v, char* buffer, int bufferSize );
static void ToStr( float v, char* buffer, int bufferSize );
static void ToStr( double v, char* buffer, int bufferSize );
static void ToStr(int64_t v, char* buffer, int bufferSize);
static void ToStr(uint64_t v, char* buffer, int bufferSize);
// converts strings to primitive types
static bool ToInt( const char* str, int* value );
static bool ToUnsigned( const char* str, unsigned* value );
static bool ToBool( const char* str, bool* value );
static bool ToFloat( const char* str, float* value );
static bool ToDouble( const char* str, double* value );
static bool ToInt64(const char* str, int64_t* value);
static bool ToUnsigned64(const char* str, uint64_t* value);
// Changes what is serialized for a boolean value.
// Default to "true" and "false". Shouldn't be changed
// unless you have a special testing or compatibility need.
// Be careful: static, global, & not thread safe.
// Be sure to set static const memory as parameters.
static void SetBoolSerialization(const char* writeTrue, const char* writeFalse);
private:
static const char* writeBoolTrue;
static const char* writeBoolFalse;
};
/** XMLNode is a base class for every object that is in the
XML Document Object Model (DOM), except XMLAttributes.
Nodes have siblings, a parent, and children which can
be navigated. A node is always in a XMLDocument.
The type of a XMLNode can be queried, and it can
be cast to its more defined type.
A XMLDocument allocates memory for all its Nodes.
When the XMLDocument gets deleted, all its Nodes
will also be deleted.
@verbatim
A Document can contain: Element (container or leaf)
Comment (leaf)
Unknown (leaf)
Declaration( leaf )
An Element can contain: Element (container or leaf)
Text (leaf)
Attributes (not on tree)
Comment (leaf)
Unknown (leaf)
@endverbatim
*/
class TINYXML2_LIB XMLNode
{
friend class XMLDocument;
friend class XMLElement;
public:
/// Get the XMLDocument that owns this XMLNode.
const XMLDocument* GetDocument() const {
TIXMLASSERT( _document );
return _document;
}
/// Get the XMLDocument that owns this XMLNode.
XMLDocument* GetDocument() {
TIXMLASSERT( _document );
return _document;
}
/// Safely cast to an Element, or null.
virtual XMLElement* ToElement() {
return 0;
}
/// Safely cast to Text, or null.
virtual XMLText* ToText() {
return 0;
}
/// Safely cast to a Comment, or null.
virtual XMLComment* ToComment() {
return 0;
}
/// Safely cast to a Document, or null.
virtual XMLDocument* ToDocument() {
return 0;
}
/// Safely cast to a Declaration, or null.
virtual XMLDeclaration* ToDeclaration() {
return 0;
}
/// Safely cast to an Unknown, or null.
virtual XMLUnknown* ToUnknown() {
return 0;
}
virtual const XMLElement* ToElement() const {
return 0;
}
virtual const XMLText* ToText() const {
return 0;
}
virtual const XMLComment* ToComment() const {
return 0;
}
virtual const XMLDocument* ToDocument() const {
return 0;
}
virtual const XMLDeclaration* ToDeclaration() const {
return 0;
}
virtual const XMLUnknown* ToUnknown() const {
return 0;
}
/** The meaning of 'value' changes for the specific type.
@verbatim
Document: empty (NULL is returned, not an empty string)
Element: name of the element
Comment: the comment text
Unknown: the tag contents
Text: the text string
@endverbatim
*/
const char* Value() const;
/** Set the Value of an XML node.
@sa Value()
*/
void SetValue( const char* val, bool staticMem=false );
/// Gets the line number the node is in, if the document was parsed from a file.
int GetLineNum() const { return _parseLineNum; }
/// Get the parent of this node on the DOM.
const XMLNode* Parent() const {
return _parent;
}
XMLNode* Parent() {
return _parent;
}
/// Returns true if this node has no children.
bool NoChildren() const {
return !_firstChild;
}
/// Get the first child node, or null if none exists.
const XMLNode* FirstChild() const {
return _firstChild;
}
XMLNode* FirstChild() {
return _firstChild;
}
/** Get the first child element, or optionally the first child
element with the specified name.
*/
const XMLElement* FirstChildElement( const char* name = 0 ) const;
XMLElement* FirstChildElement( const char* name = 0 ) {
return const_cast<XMLElement*>(const_cast<const XMLNode*>(this)->FirstChildElement( name ));
}
/// Get the last child node, or null if none exists.
const XMLNode* LastChild() const {
return _lastChild;
}
XMLNode* LastChild() {
return _lastChild;
}
/** Get the last child element or optionally the last child
element with the specified name.
*/
const XMLElement* LastChildElement( const char* name = 0 ) const;
XMLElement* LastChildElement( const char* name = 0 ) {
return const_cast<XMLElement*>(const_cast<const XMLNode*>(this)->LastChildElement(name) );
}
/// Get the previous (left) sibling node of this node.
const XMLNode* PreviousSibling() const {
return _prev;
}
XMLNode* PreviousSibling() {
return _prev;
}
/// Get the previous (left) sibling element of this node, with an optionally supplied name.
const XMLElement* PreviousSiblingElement( const char* name = 0 ) const ;
XMLElement* PreviousSiblingElement( const char* name = 0 ) {
return const_cast<XMLElement*>(const_cast<const XMLNode*>(this)->PreviousSiblingElement( name ) );
}
/// Get the next (right) sibling node of this node.
const XMLNode* NextSibling() const {
return _next;
}
XMLNode* NextSibling() {
return _next;
}
/// Get the next (right) sibling element of this node, with an optionally supplied name.
const XMLElement* NextSiblingElement( const char* name = 0 ) const;
XMLElement* NextSiblingElement( const char* name = 0 ) {
return const_cast<XMLElement*>(const_cast<const XMLNode*>(this)->NextSiblingElement( name ) );
}
/**
Add a child node as the last (right) child.
If the child node is already part of the document,
it is moved from its old location to the new location.
Returns the addThis argument or 0 if the node does not
belong to the same document.
*/
XMLNode* InsertEndChild( XMLNode* addThis );
XMLNode* LinkEndChild( XMLNode* addThis ) {
return InsertEndChild( addThis );
}
/**
Add a child node as the first (left) child.
If the child node is already part of the document,
it is moved from its old location to the new location.
Returns the addThis argument or 0 if the node does not
belong to the same document.
*/
XMLNode* InsertFirstChild( XMLNode* addThis );
/**
Add a node after the specified child node.
If the child node is already part of the document,
it is moved from its old location to the new location.
Returns the addThis argument or 0 if the afterThis node
is not a child of this node, or if the node does not
belong to the same document.
*/
XMLNode* InsertAfterChild( XMLNode* afterThis, XMLNode* addThis );
/**
Delete all the children of this node.
*/
void DeleteChildren();
/**
Delete a child of this node.
*/
void DeleteChild( XMLNode* node );
/**
Make a copy of this node, but not its children.
You may pass in a Document pointer that will be
the owner of the new Node. If the 'document' is
null, then the node returned will be allocated
from the current Document. (this->GetDocument())
Note: if called on a XMLDocument, this will return null.
*/
virtual XMLNode* ShallowClone( XMLDocument* document ) const = 0;
/**
Make a copy of this node and all its children.
If the 'target' is null, then the nodes will
be allocated in the current document. If 'target'
is specified, the memory will be allocated is the
specified XMLDocument.
NOTE: This is probably not the correct tool to
copy a document, since XMLDocuments can have multiple
top level XMLNodes. You probably want to use
XMLDocument::DeepCopy()
*/
XMLNode* DeepClone( XMLDocument* target ) const;
/**
Test if 2 nodes are the same, but don't test children.
The 2 nodes do not need to be in the same Document.
Note: if called on a XMLDocument, this will return false.
*/
virtual bool ShallowEqual( const XMLNode* compare ) const = 0;
/** Accept a hierarchical visit of the nodes in the TinyXML-2 DOM. Every node in the
XML tree will be conditionally visited and the host will be called back
via the XMLVisitor interface.
This is essentially a SAX interface for TinyXML-2. (Note however it doesn't re-parse
the XML for the callbacks, so the performance of TinyXML-2 is unchanged by using this
interface versus any other.)
The interface has been based on ideas from:
- http://www.saxproject.org/
- http://c2.com/cgi/wiki?HierarchicalVisitorPattern
Which are both good references for "visiting".
An example of using Accept():
@verbatim
XMLPrinter printer;
tinyxmlDoc.Accept( &printer );
const char* xmlcstr = printer.CStr();
@endverbatim
*/
virtual bool Accept( XMLVisitor* visitor ) const = 0;
/**
Set user data into the XMLNode. TinyXML-2 in
no way processes or interprets user data.
It is initially 0.
*/
void SetUserData(void* userData) { _userData = userData; }
/**
Get user data set into the XMLNode. TinyXML-2 in
no way processes or interprets user data.
It is initially 0.
*/
void* GetUserData() const { return _userData; }
protected:
explicit XMLNode( XMLDocument* );
virtual ~XMLNode();
virtual char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr);
XMLDocument* _document;
XMLNode* _parent;
mutable StrPair _value;
int _parseLineNum;
XMLNode* _firstChild;
XMLNode* _lastChild;
XMLNode* _prev;
XMLNode* _next;
void* _userData;
private:
MemPool* _memPool;
void Unlink( XMLNode* child );
static void DeleteNode( XMLNode* node );
void InsertChildPreamble( XMLNode* insertThis ) const;
const XMLElement* ToElementWithName( const char* name ) const;
XMLNode( const XMLNode& ); // not supported
XMLNode& operator=( const XMLNode& ); // not supported
};
/** XML text.
Note that a text node can have child element nodes, for example:
@verbatim
<root>This is <b>bold</b></root>
@endverbatim
A text node can have 2 ways to output the next. "normal" output
and CDATA. It will default to the mode it was parsed from the XML file and
you generally want to leave it alone, but you can change the output mode with
SetCData() and query it with CData().
*/
class TINYXML2_LIB XMLText : public XMLNode
{
friend class XMLDocument;
public:
virtual bool Accept( XMLVisitor* visitor ) const;
virtual XMLText* ToText() {
return this;
}
virtual const XMLText* ToText() const {
return this;
}
/// Declare whether this should be CDATA or standard text.
void SetCData( bool isCData ) {
_isCData = isCData;
}
/// Returns true if this is a CDATA text element.
bool CData() const {
return _isCData;
}
virtual XMLNode* ShallowClone( XMLDocument* document ) const;
virtual bool ShallowEqual( const XMLNode* compare ) const;
protected:
explicit XMLText( XMLDocument* doc ) : XMLNode( doc ), _isCData( false ) {}
virtual ~XMLText() {}
char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr );
private:
bool _isCData;
XMLText( const XMLText& ); // not supported
XMLText& operator=( const XMLText& ); // not supported
};
/** An XML Comment. */
class TINYXML2_LIB XMLComment : public XMLNode
{
friend class XMLDocument;
public:
virtual XMLComment* ToComment() {
return this;
}
virtual const XMLComment* ToComment() const {
return this;
}
virtual bool Accept( XMLVisitor* visitor ) const;
virtual XMLNode* ShallowClone( XMLDocument* document ) const;
virtual bool ShallowEqual( const XMLNode* compare ) const;
protected:
explicit XMLComment( XMLDocument* doc );
virtual ~XMLComment();
char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr);
private:
XMLComment( const XMLComment& ); // not supported
XMLComment& operator=( const XMLComment& ); // not supported
};
/** In correct XML the declaration is the first entry in the file.
@verbatim
<?xml version="1.0" standalone="yes"?>
@endverbatim
TinyXML-2 will happily read or write files without a declaration,
however.
The text of the declaration isn't interpreted. It is parsed
and written as a string.
*/
class TINYXML2_LIB XMLDeclaration : public XMLNode
{
friend class XMLDocument;
public:
virtual XMLDeclaration* ToDeclaration() {
return this;
}
virtual const XMLDeclaration* ToDeclaration() const {
return this;
}
virtual bool Accept( XMLVisitor* visitor ) const;
virtual XMLNode* ShallowClone( XMLDocument* document ) const;
virtual bool ShallowEqual( const XMLNode* compare ) const;
protected:
explicit XMLDeclaration( XMLDocument* doc );
virtual ~XMLDeclaration();
char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr );
private:
XMLDeclaration( const XMLDeclaration& ); // not supported
XMLDeclaration& operator=( const XMLDeclaration& ); // not supported
};
/** Any tag that TinyXML-2 doesn't recognize is saved as an
unknown. It is a tag of text, but should not be modified.
It will be written back to the XML, unchanged, when the file
is saved.
DTD tags get thrown into XMLUnknowns.
*/
class TINYXML2_LIB XMLUnknown : public XMLNode
{
friend class XMLDocument;
public:
virtual XMLUnknown* ToUnknown() {
return this;
}
virtual const XMLUnknown* ToUnknown() const {
return this;
}
virtual bool Accept( XMLVisitor* visitor ) const;
virtual XMLNode* ShallowClone( XMLDocument* document ) const;
virtual bool ShallowEqual( const XMLNode* compare ) const;
protected:
explicit XMLUnknown( XMLDocument* doc );
virtual ~XMLUnknown();
char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr );
private:
XMLUnknown( const XMLUnknown& ); // not supported
XMLUnknown& operator=( const XMLUnknown& ); // not supported
};
/** An attribute is a name-value pair. Elements have an arbitrary
number of attributes, each with a unique name.
@note The attributes are not XMLNodes. You may only query the
Next() attribute in a list.
*/
class TINYXML2_LIB XMLAttribute
{
friend class XMLElement;
public:
/// The name of the attribute.
const char* Name() const;
/// The value of the attribute.
const char* Value() const;
/// Gets the line number the attribute is in, if the document was parsed from a file.
int GetLineNum() const { return _parseLineNum; }
/// The next attribute in the list.
const XMLAttribute* Next() const {
return _next;
}
/** IntValue interprets the attribute as an integer, and returns the value.
If the value isn't an integer, 0 will be returned. There is no error checking;
use QueryIntValue() if you need error checking.
*/
int IntValue() const {
int i = 0;
QueryIntValue(&i);
return i;
}
int64_t Int64Value() const {
int64_t i = 0;
QueryInt64Value(&i);
return i;
}
uint64_t Unsigned64Value() const {
uint64_t i = 0;
QueryUnsigned64Value(&i);
return i;
}
/// Query as an unsigned integer. See IntValue()
unsigned UnsignedValue() const {
unsigned i=0;
QueryUnsignedValue( &i );
return i;
}
/// Query as a boolean. See IntValue()
bool BoolValue() const {
bool b=false;
QueryBoolValue( &b );
return b;
}
/// Query as a double. See IntValue()
double DoubleValue() const {
double d=0;
QueryDoubleValue( &d );
return d;
}
/// Query as a float. See IntValue()
float FloatValue() const {
float f=0;
QueryFloatValue( &f );
return f;
}
/** QueryIntValue interprets the attribute as an integer, and returns the value
in the provided parameter. The function will return XML_SUCCESS on success,
and XML_WRONG_ATTRIBUTE_TYPE if the conversion is not successful.
*/
XMLError QueryIntValue( int* value ) const;
/// See QueryIntValue
XMLError QueryUnsignedValue( unsigned int* value ) const;
/// See QueryIntValue
XMLError QueryInt64Value(int64_t* value) const;
/// See QueryIntValue
XMLError QueryUnsigned64Value(uint64_t* value) const;
/// See QueryIntValue
XMLError QueryBoolValue( bool* value ) const;
/// See QueryIntValue
XMLError QueryDoubleValue( double* value ) const;
/// See QueryIntValue
XMLError QueryFloatValue( float* value ) const;
/// Set the attribute to a string value.
void SetAttribute( const char* value );
/// Set the attribute to value.
void SetAttribute( int value );
/// Set the attribute to value.
void SetAttribute( unsigned value );
/// Set the attribute to value.
void SetAttribute(int64_t value);
/// Set the attribute to value.
void SetAttribute(uint64_t value);
/// Set the attribute to value.
void SetAttribute( bool value );
/// Set the attribute to value.
void SetAttribute( double value );
/// Set the attribute to value.
void SetAttribute( float value );
private:
enum { BUF_SIZE = 200 };
XMLAttribute() : _name(), _value(),_parseLineNum( 0 ), _next( 0 ), _memPool( 0 ) {}
virtual ~XMLAttribute() {}
XMLAttribute( const XMLAttribute& ); // not supported
void operator=( const XMLAttribute& ); // not supported
void SetName( const char* name );
char* ParseDeep( char* p, bool processEntities, int* curLineNumPtr );
mutable StrPair _name;
mutable StrPair _value;
int _parseLineNum;
XMLAttribute* _next;
MemPool* _memPool;
};
/** The element is a container class. It has a value, the element name,
and can contain other elements, text, comments, and unknowns.
Elements also contain an arbitrary number of attributes.
*/
class TINYXML2_LIB XMLElement : public XMLNode
{
friend class XMLDocument;
public:
/// Get the name of an element (which is the Value() of the node.)
const char* Name() const {
return Value();
}
/// Set the name of the element.
void SetName( const char* str, bool staticMem=false ) {
SetValue( str, staticMem );
}
virtual XMLElement* ToElement() {
return this;
}
virtual const XMLElement* ToElement() const {
return this;
}
virtual bool Accept( XMLVisitor* visitor ) const;
/** Given an attribute name, Attribute() returns the value
for the attribute of that name, or null if none
exists. For example:
@verbatim
const char* value = ele->Attribute( "foo" );
@endverbatim
The 'value' parameter is normally null. However, if specified,
the attribute will only be returned if the 'name' and 'value'
match. This allow you to write code:
@verbatim
if ( ele->Attribute( "foo", "bar" ) ) callFooIsBar();
@endverbatim
rather than:
@verbatim
if ( ele->Attribute( "foo" ) ) {
if ( strcmp( ele->Attribute( "foo" ), "bar" ) == 0 ) callFooIsBar();
}
@endverbatim
*/
const char* Attribute( const char* name, const char* value=0 ) const;
/** Given an attribute name, IntAttribute() returns the value
of the attribute interpreted as an integer. The default
value will be returned if the attribute isn't present,
or if there is an error. (For a method with error
checking, see QueryIntAttribute()).
*/
int IntAttribute(const char* name, int defaultValue = 0) const;
/// See IntAttribute()
unsigned UnsignedAttribute(const char* name, unsigned defaultValue = 0) const;
/// See IntAttribute()
int64_t Int64Attribute(const char* name, int64_t defaultValue = 0) const;
/// See IntAttribute()
uint64_t Unsigned64Attribute(const char* name, uint64_t defaultValue = 0) const;
/// See IntAttribute()
bool BoolAttribute(const char* name, bool defaultValue = false) const;
/// See IntAttribute()
double DoubleAttribute(const char* name, double defaultValue = 0) const;
/// See IntAttribute()
float FloatAttribute(const char* name, float defaultValue = 0) const;
/** Given an attribute name, QueryIntAttribute() returns
XML_SUCCESS, XML_WRONG_ATTRIBUTE_TYPE if the conversion
can't be performed, or XML_NO_ATTRIBUTE if the attribute
doesn't exist. If successful, the result of the conversion
will be written to 'value'. If not successful, nothing will
be written to 'value'. This allows you to provide default
value:
@verbatim
int value = 10;
QueryIntAttribute( "foo", &value ); // if "foo" isn't found, value will still be 10
@endverbatim
*/
XMLError QueryIntAttribute( const char* name, int* value ) const {
const XMLAttribute* a = FindAttribute( name );
if ( !a ) {
return XML_NO_ATTRIBUTE;
}
return a->QueryIntValue( value );
}
/// See QueryIntAttribute()
XMLError QueryUnsignedAttribute( const char* name, unsigned int* value ) const {
const XMLAttribute* a = FindAttribute( name );
if ( !a ) {
return XML_NO_ATTRIBUTE;
}
return a->QueryUnsignedValue( value );
}
/// See QueryIntAttribute()
XMLError QueryInt64Attribute(const char* name, int64_t* value) const {
const XMLAttribute* a = FindAttribute(name);
if (!a) {
return XML_NO_ATTRIBUTE;
}
return a->QueryInt64Value(value);
}
/// See QueryIntAttribute()
XMLError QueryUnsigned64Attribute(const char* name, uint64_t* value) const {
const XMLAttribute* a = FindAttribute(name);
if(!a) {
return XML_NO_ATTRIBUTE;
}
return a->QueryUnsigned64Value(value);
}
/// See QueryIntAttribute()
XMLError QueryBoolAttribute( const char* name, bool* value ) const {
const XMLAttribute* a = FindAttribute( name );
if ( !a ) {
return XML_NO_ATTRIBUTE;
}
return a->QueryBoolValue( value );
}
/// See QueryIntAttribute()
XMLError QueryDoubleAttribute( const char* name, double* value ) const {
const XMLAttribute* a = FindAttribute( name );
if ( !a ) {
return XML_NO_ATTRIBUTE;
}
return a->QueryDoubleValue( value );
}
/// See QueryIntAttribute()
XMLError QueryFloatAttribute( const char* name, float* value ) const {
const XMLAttribute* a = FindAttribute( name );
if ( !a ) {
return XML_NO_ATTRIBUTE;
}
return a->QueryFloatValue( value );
}
/// See QueryIntAttribute()
XMLError QueryStringAttribute(const char* name, const char** value) const {
const XMLAttribute* a = FindAttribute(name);
if (!a) {
return XML_NO_ATTRIBUTE;
}
*value = a->Value();
return XML_SUCCESS;
}
/** Given an attribute name, QueryAttribute() returns
XML_SUCCESS, XML_WRONG_ATTRIBUTE_TYPE if the conversion
can't be performed, or XML_NO_ATTRIBUTE if the attribute
doesn't exist. It is overloaded for the primitive types,
and is a generally more convenient replacement of
QueryIntAttribute() and related functions.
If successful, the result of the conversion
will be written to 'value'. If not successful, nothing will
be written to 'value'. This allows you to provide default
value:
@verbatim
int value = 10;
QueryAttribute( "foo", &value ); // if "foo" isn't found, value will still be 10
@endverbatim
*/
XMLError QueryAttribute( const char* name, int* value ) const {
return QueryIntAttribute( name, value );
}
XMLError QueryAttribute( const char* name, unsigned int* value ) const {
return QueryUnsignedAttribute( name, value );
}
XMLError QueryAttribute(const char* name, int64_t* value) const {
return QueryInt64Attribute(name, value);
}
XMLError QueryAttribute(const char* name, uint64_t* value) const {
return QueryUnsigned64Attribute(name, value);
}
XMLError QueryAttribute( const char* name, bool* value ) const {
return QueryBoolAttribute( name, value );
}
XMLError QueryAttribute( const char* name, double* value ) const {
return QueryDoubleAttribute( name, value );
}
XMLError QueryAttribute( const char* name, float* value ) const {
return QueryFloatAttribute( name, value );
}
XMLError QueryAttribute(const char* name, const char** value) const {
return QueryStringAttribute(name, value);
}
/// Sets the named attribute to value.
void SetAttribute( const char* name, const char* value ) {
XMLAttribute* a = FindOrCreateAttribute( name );
a->SetAttribute( value );
}
/// Sets the named attribute to value.
void SetAttribute( const char* name, int value ) {
XMLAttribute* a = FindOrCreateAttribute( name );
a->SetAttribute( value );
}
/// Sets the named attribute to value.
void SetAttribute( const char* name, unsigned value ) {
XMLAttribute* a = FindOrCreateAttribute( name );
a->SetAttribute( value );
}
/// Sets the named attribute to value.
void SetAttribute(const char* name, int64_t value) {
XMLAttribute* a = FindOrCreateAttribute(name);
a->SetAttribute(value);
}
/// Sets the named attribute to value.
void SetAttribute(const char* name, uint64_t value) {
XMLAttribute* a = FindOrCreateAttribute(name);
a->SetAttribute(value);
}
/// Sets the named attribute to value.
void SetAttribute( const char* name, bool value ) {
XMLAttribute* a = FindOrCreateAttribute( name );
a->SetAttribute( value );
}
/// Sets the named attribute to value.
void SetAttribute( const char* name, double value ) {
XMLAttribute* a = FindOrCreateAttribute( name );
a->SetAttribute( value );
}
/// Sets the named attribute to value.
void SetAttribute( const char* name, float value ) {
XMLAttribute* a = FindOrCreateAttribute( name );
a->SetAttribute( value );
}
/**
Delete an attribute.
*/
void DeleteAttribute( const char* name );
/// Return the first attribute in the list.
const XMLAttribute* FirstAttribute() const {
return _rootAttribute;
}
/// Query a specific attribute in the list.
const XMLAttribute* FindAttribute( const char* name ) const;
/** Convenience function for easy access to the text inside an element. Although easy
and concise, GetText() is limited compared to getting the XMLText child
and accessing it directly.
If the first child of 'this' is a XMLText, the GetText()
returns the character string of the Text node, else null is returned.
This is a convenient method for getting the text of simple contained text:
@verbatim
<foo>This is text</foo>
const char* str = fooElement->GetText();
@endverbatim
'str' will be a pointer to "This is text".
Note that this function can be misleading. If the element foo was created from
this XML:
@verbatim
<foo><b>This is text</b></foo>
@endverbatim
then the value of str would be null. The first child node isn't a text node, it is
another element. From this XML:
@verbatim
<foo>This is <b>text</b></foo>
@endverbatim
GetText() will return "This is ".
*/
const char* GetText() const;
/** Convenience function for easy access to the text inside an element. Although easy
and concise, SetText() is limited compared to creating an XMLText child
and mutating it directly.
If the first child of 'this' is a XMLText, SetText() sets its value to
the given string, otherwise it will create a first child that is an XMLText.
This is a convenient method for setting the text of simple contained text:
@verbatim
<foo>This is text</foo>
fooElement->SetText( "Hullaballoo!" );
<foo>Hullaballoo!</foo>
@endverbatim
Note that this function can be misleading. If the element foo was created from
this XML:
@verbatim
<foo><b>This is text</b></foo>
@endverbatim
then it will not change "This is text", but rather prefix it with a text element:
@verbatim
<foo>Hullaballoo!<b>This is text</b></foo>
@endverbatim
For this XML:
@verbatim
<foo />
@endverbatim
SetText() will generate
@verbatim
<foo>Hullaballoo!</foo>
@endverbatim
*/
void SetText( const char* inText );
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText( int value );
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText( unsigned value );
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText(int64_t value);
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText(uint64_t value);
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText( bool value );
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText( double value );
/// Convenience method for setting text inside an element. See SetText() for important limitations.
void SetText( float value );
/**
Convenience method to query the value of a child text node. This is probably best
shown by example. Given you have a document is this form:
@verbatim
<point>
<x>1</x>
<y>1.4</y>
</point>
@endverbatim
The QueryIntText() and similar functions provide a safe and easier way to get to the
"value" of x and y.
@verbatim
int x = 0;
float y = 0; // types of x and y are contrived for example
const XMLElement* xElement = pointElement->FirstChildElement( "x" );
const XMLElement* yElement = pointElement->FirstChildElement( "y" );
xElement->QueryIntText( &x );
yElement->QueryFloatText( &y );
@endverbatim
@returns XML_SUCCESS (0) on success, XML_CAN_NOT_CONVERT_TEXT if the text cannot be converted
to the requested type, and XML_NO_TEXT_NODE if there is no child text to query.
*/
XMLError QueryIntText( int* ival ) const;
/// See QueryIntText()
XMLError QueryUnsignedText( unsigned* uval ) const;
/// See QueryIntText()
XMLError QueryInt64Text(int64_t* uval) const;
/// See QueryIntText()
XMLError QueryUnsigned64Text(uint64_t* uval) const;
/// See QueryIntText()
XMLError QueryBoolText( bool* bval ) const;
/// See QueryIntText()
XMLError QueryDoubleText( double* dval ) const;
/// See QueryIntText()
XMLError QueryFloatText( float* fval ) const;
int IntText(int defaultValue = 0) const;
/// See QueryIntText()
unsigned UnsignedText(unsigned defaultValue = 0) const;
/// See QueryIntText()
int64_t Int64Text(int64_t defaultValue = 0) const;
/// See QueryIntText()
uint64_t Unsigned64Text(uint64_t defaultValue = 0) const;
/// See QueryIntText()
bool BoolText(bool defaultValue = false) const;
/// See QueryIntText()
double DoubleText(double defaultValue = 0) const;
/// See QueryIntText()
float FloatText(float defaultValue = 0) const;
/**
Convenience method to create a new XMLElement and add it as last (right)
child of this node. Returns the created and inserted element.
*/
XMLElement* InsertNewChildElement(const char* name);
/// See InsertNewChildElement()
XMLComment* InsertNewComment(const char* comment);
/// See InsertNewChildElement()
XMLText* InsertNewText(const char* text);
/// See InsertNewChildElement()
XMLDeclaration* InsertNewDeclaration(const char* text);
/// See InsertNewChildElement()
XMLUnknown* InsertNewUnknown(const char* text);
// internal:
enum ElementClosingType {
OPEN, // <foo>
CLOSED, // <foo/>
CLOSING // </foo>
};
ElementClosingType ClosingType() const {
return _closingType;
}
virtual XMLNode* ShallowClone( XMLDocument* document ) const;
virtual bool ShallowEqual( const XMLNode* compare ) const;
protected:
char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr );
private:
XMLElement( XMLDocument* doc );
virtual ~XMLElement();
XMLElement( const XMLElement& ); // not supported
void operator=( const XMLElement& ); // not supported
XMLAttribute* FindOrCreateAttribute( const char* name );
char* ParseAttributes( char* p, int* curLineNumPtr );
static void DeleteAttribute( XMLAttribute* attribute );
XMLAttribute* CreateAttribute();
enum { BUF_SIZE = 200 };
ElementClosingType _closingType;
// The attribute list is ordered; there is no 'lastAttribute'
// because the list needs to be scanned for dupes before adding
// a new attribute.
XMLAttribute* _rootAttribute;
};
enum Whitespace {
PRESERVE_WHITESPACE,
COLLAPSE_WHITESPACE
};
/** A Document binds together all the functionality.
It can be saved, loaded, and printed to the screen.
All Nodes are connected and allocated to a Document.
If the Document is deleted, all its Nodes are also deleted.
*/
class TINYXML2_LIB XMLDocument : public XMLNode
{
friend class XMLElement;
// Gives access to SetError and Push/PopDepth, but over-access for everything else.
// Wishing C++ had "internal" scope.
friend class XMLNode;
friend class XMLText;
friend class XMLComment;
friend class XMLDeclaration;
friend class XMLUnknown;
public:
/// constructor
XMLDocument( bool processEntities = true, Whitespace whitespaceMode = PRESERVE_WHITESPACE );
~XMLDocument();
virtual XMLDocument* ToDocument() {
TIXMLASSERT( this == _document );
return this;
}
virtual const XMLDocument* ToDocument() const {
TIXMLASSERT( this == _document );
return this;
}
/**
Parse an XML file from a character string.
Returns XML_SUCCESS (0) on success, or
an errorID.
You may optionally pass in the 'nBytes', which is
the number of bytes which will be parsed. If not
specified, TinyXML-2 will assume 'xml' points to a
null terminated string.
*/
XMLError Parse( const char* xml, size_t nBytes=static_cast<size_t>(-1) );
/**
Load an XML file from disk.
Returns XML_SUCCESS (0) on success, or
an errorID.
*/
XMLError LoadFile( const char* filename );
/**
Load an XML file from disk. You are responsible
for providing and closing the FILE*.
NOTE: The file should be opened as binary ("rb")
not text in order for TinyXML-2 to correctly
do newline normalization.
Returns XML_SUCCESS (0) on success, or
an errorID.
*/
XMLError LoadFile( FILE* );
/**
Save the XML file to disk.
Returns XML_SUCCESS (0) on success, or
an errorID.
*/
XMLError SaveFile( const char* filename, bool compact = false );
/**
Save the XML file to disk. You are responsible
for providing and closing the FILE*.
Returns XML_SUCCESS (0) on success, or
an errorID.
*/
XMLError SaveFile( FILE* fp, bool compact = false );
bool ProcessEntities() const {
return _processEntities;
}
Whitespace WhitespaceMode() const {
return _whitespaceMode;
}
/**
Returns true if this document has a leading Byte Order Mark of UTF8.
*/
bool HasBOM() const {
return _writeBOM;
}
/** Sets whether to write the BOM when writing the file.
*/
void SetBOM( bool useBOM ) {
_writeBOM = useBOM;
}
/** Return the root element of DOM. Equivalent to FirstChildElement().
To get the first node, use FirstChild().
*/
XMLElement* RootElement() {
return FirstChildElement();
}
const XMLElement* RootElement() const {
return FirstChildElement();
}
/** Print the Document. If the Printer is not provided, it will
print to stdout. If you provide Printer, this can print to a file:
@verbatim
XMLPrinter printer( fp );
doc.Print( &printer );
@endverbatim
Or you can use a printer to print to memory:
@verbatim
XMLPrinter printer;
doc.Print( &printer );
// printer.CStr() has a const char* to the XML
@endverbatim
*/
void Print( XMLPrinter* streamer=0 ) const;
virtual bool Accept( XMLVisitor* visitor ) const;
/**
Create a new Element associated with
this Document. The memory for the Element
is managed by the Document.
*/
XMLElement* NewElement( const char* name );
/**
Create a new Comment associated with
this Document. The memory for the Comment
is managed by the Document.
*/
XMLComment* NewComment( const char* comment );
/**
Create a new Text associated with
this Document. The memory for the Text
is managed by the Document.
*/
XMLText* NewText( const char* text );
/**
Create a new Declaration associated with
this Document. The memory for the object
is managed by the Document.
If the 'text' param is null, the standard
declaration is used.:
@verbatim
<?xml version="1.0" encoding="UTF-8"?>
@endverbatim
*/
XMLDeclaration* NewDeclaration( const char* text=0 );
/**
Create a new Unknown associated with
this Document. The memory for the object
is managed by the Document.
*/
XMLUnknown* NewUnknown( const char* text );
/**
Delete a node associated with this document.
It will be unlinked from the DOM.
*/
void DeleteNode( XMLNode* node );
/// Clears the error flags.
void ClearError();
/// Return true if there was an error parsing the document.
bool Error() const {
return _errorID != XML_SUCCESS;
}
/// Return the errorID.
XMLError ErrorID() const {
return _errorID;
}
const char* ErrorName() const;
static const char* ErrorIDToName(XMLError errorID);
/** Returns a "long form" error description. A hopefully helpful
diagnostic with location, line number, and/or additional info.
*/
const char* ErrorStr() const;
/// A (trivial) utility function that prints the ErrorStr() to stdout.
void PrintError() const;
/// Return the line where the error occurred, or zero if unknown.
int ErrorLineNum() const
{
return _errorLineNum;
}
/// Clear the document, resetting it to the initial state.
void Clear();
/**
Copies this document to a target document.
The target will be completely cleared before the copy.
If you want to copy a sub-tree, see XMLNode::DeepClone().
NOTE: that the 'target' must be non-null.
*/
void DeepCopy(XMLDocument* target) const;
// internal
char* Identify( char* p, XMLNode** node );
// internal
void MarkInUse(const XMLNode* const);
virtual XMLNode* ShallowClone( XMLDocument* /*document*/ ) const {
return 0;
}
virtual bool ShallowEqual( const XMLNode* /*compare*/ ) const {
return false;
}
private:
XMLDocument( const XMLDocument& ); // not supported
void operator=( const XMLDocument& ); // not supported
bool _writeBOM;
bool _processEntities;
XMLError _errorID;
Whitespace _whitespaceMode;
mutable StrPair _errorStr;
int _errorLineNum;
char* _charBuffer;
int _parseCurLineNum;
int _parsingDepth;
// Memory tracking does add some overhead.
// However, the code assumes that you don't
// have a bunch of unlinked nodes around.
// Therefore it takes less memory to track
// in the document vs. a linked list in the XMLNode,
// and the performance is the same.
DynArray<XMLNode*, 10> _unlinked;
MemPoolT< sizeof(XMLElement) > _elementPool;
MemPoolT< sizeof(XMLAttribute) > _attributePool;
MemPoolT< sizeof(XMLText) > _textPool;
MemPoolT< sizeof(XMLComment) > _commentPool;
static const char* _errorNames[XML_ERROR_COUNT];
void Parse();
void SetError( XMLError error, int lineNum, const char* format, ... );
// Something of an obvious security hole, once it was discovered.
// Either an ill-formed XML or an excessively deep one can overflow
// the stack. Track stack depth, and error out if needed.
class DepthTracker {
public:
explicit DepthTracker(XMLDocument * document) {
this->_document = document;
document->PushDepth();
}
~DepthTracker() {
_document->PopDepth();
}
private:
XMLDocument * _document;
};
void PushDepth();
void PopDepth();
template<class NodeType, int PoolElementSize>
NodeType* CreateUnlinkedNode( MemPoolT<PoolElementSize>& pool );
};
template<class NodeType, int PoolElementSize>
inline NodeType* XMLDocument::CreateUnlinkedNode( MemPoolT<PoolElementSize>& pool )
{
TIXMLASSERT( sizeof( NodeType ) == PoolElementSize );
TIXMLASSERT( sizeof( NodeType ) == pool.ItemSize() );
NodeType* returnNode = new (pool.Alloc()) NodeType( this );
TIXMLASSERT( returnNode );
returnNode->_memPool = &pool;
_unlinked.Push(returnNode);
return returnNode;
}
/**
A XMLHandle is a class that wraps a node pointer with null checks; this is
an incredibly useful thing. Note that XMLHandle is not part of the TinyXML-2
DOM structure. It is a separate utility class.
Take an example:
@verbatim
<Document>
<Element attributeA = "valueA">
<Child attributeB = "value1" />
<Child attributeB = "value2" />
</Element>
</Document>
@endverbatim
Assuming you want the value of "attributeB" in the 2nd "Child" element, it's very
easy to write a *lot* of code that looks like:
@verbatim
XMLElement* root = document.FirstChildElement( "Document" );
if ( root )
{
XMLElement* element = root->FirstChildElement( "Element" );
if ( element )
{
XMLElement* child = element->FirstChildElement( "Child" );
if ( child )
{
XMLElement* child2 = child->NextSiblingElement( "Child" );
if ( child2 )
{
// Finally do something useful.
@endverbatim
And that doesn't even cover "else" cases. XMLHandle addresses the verbosity
of such code. A XMLHandle checks for null pointers so it is perfectly safe
and correct to use:
@verbatim
XMLHandle docHandle( &document );
XMLElement* child2 = docHandle.FirstChildElement( "Document" ).FirstChildElement( "Element" ).FirstChildElement().NextSiblingElement();
if ( child2 )
{
// do something useful
@endverbatim
Which is MUCH more concise and useful.
It is also safe to copy handles - internally they are nothing more than node pointers.
@verbatim
XMLHandle handleCopy = handle;
@endverbatim
See also XMLConstHandle, which is the same as XMLHandle, but operates on const objects.
*/
class TINYXML2_LIB XMLHandle
{
public:
/// Create a handle from any node (at any depth of the tree.) This can be a null pointer.
explicit XMLHandle( XMLNode* node ) : _node( node ) {
}
/// Create a handle from a node.
explicit XMLHandle( XMLNode& node ) : _node( &node ) {
}
/// Copy constructor
XMLHandle( const XMLHandle& ref ) : _node( ref._node ) {
}
/// Assignment
XMLHandle& operator=( const XMLHandle& ref ) {
_node = ref._node;
return *this;
}
/// Get the first child of this handle.
XMLHandle FirstChild() {
return XMLHandle( _node ? _node->FirstChild() : 0 );
}
/// Get the first child element of this handle.
XMLHandle FirstChildElement( const char* name = 0 ) {
return XMLHandle( _node ? _node->FirstChildElement( name ) : 0 );
}
/// Get the last child of this handle.
XMLHandle LastChild() {
return XMLHandle( _node ? _node->LastChild() : 0 );
}
/// Get the last child element of this handle.
XMLHandle LastChildElement( const char* name = 0 ) {
return XMLHandle( _node ? _node->LastChildElement( name ) : 0 );
}
/// Get the previous sibling of this handle.
XMLHandle PreviousSibling() {
return XMLHandle( _node ? _node->PreviousSibling() : 0 );
}
/// Get the previous sibling element of this handle.
XMLHandle PreviousSiblingElement( const char* name = 0 ) {
return XMLHandle( _node ? _node->PreviousSiblingElement( name ) : 0 );
}
/// Get the next sibling of this handle.
XMLHandle NextSibling() {
return XMLHandle( _node ? _node->NextSibling() : 0 );
}
/// Get the next sibling element of this handle.
XMLHandle NextSiblingElement( const char* name = 0 ) {
return XMLHandle( _node ? _node->NextSiblingElement( name ) : 0 );
}
/// Safe cast to XMLNode. This can return null.
XMLNode* ToNode() {
return _node;
}
/// Safe cast to XMLElement. This can return null.
XMLElement* ToElement() {
return ( _node ? _node->ToElement() : 0 );
}
/// Safe cast to XMLText. This can return null.
XMLText* ToText() {
return ( _node ? _node->ToText() : 0 );
}
/// Safe cast to XMLUnknown. This can return null.
XMLUnknown* ToUnknown() {
return ( _node ? _node->ToUnknown() : 0 );
}
/// Safe cast to XMLDeclaration. This can return null.
XMLDeclaration* ToDeclaration() {
return ( _node ? _node->ToDeclaration() : 0 );
}
private:
XMLNode* _node;
};
/**
A variant of the XMLHandle class for working with const XMLNodes and Documents. It is the
same in all regards, except for the 'const' qualifiers. See XMLHandle for API.
*/
class TINYXML2_LIB XMLConstHandle
{
public:
explicit XMLConstHandle( const XMLNode* node ) : _node( node ) {
}
explicit XMLConstHandle( const XMLNode& node ) : _node( &node ) {
}
XMLConstHandle( const XMLConstHandle& ref ) : _node( ref._node ) {
}
XMLConstHandle& operator=( const XMLConstHandle& ref ) {
_node = ref._node;
return *this;
}
const XMLConstHandle FirstChild() const {
return XMLConstHandle( _node ? _node->FirstChild() : 0 );
}
const XMLConstHandle FirstChildElement( const char* name = 0 ) const {
return XMLConstHandle( _node ? _node->FirstChildElement( name ) : 0 );
}
const XMLConstHandle LastChild() const {
return XMLConstHandle( _node ? _node->LastChild() : 0 );
}
const XMLConstHandle LastChildElement( const char* name = 0 ) const {
return XMLConstHandle( _node ? _node->LastChildElement( name ) : 0 );
}
const XMLConstHandle PreviousSibling() const {
return XMLConstHandle( _node ? _node->PreviousSibling() : 0 );
}
const XMLConstHandle PreviousSiblingElement( const char* name = 0 ) const {
return XMLConstHandle( _node ? _node->PreviousSiblingElement( name ) : 0 );
}
const XMLConstHandle NextSibling() const {
return XMLConstHandle( _node ? _node->NextSibling() : 0 );
}
const XMLConstHandle NextSiblingElement( const char* name = 0 ) const {
return XMLConstHandle( _node ? _node->NextSiblingElement( name ) : 0 );
}
const XMLNode* ToNode() const {
return _node;
}
const XMLElement* ToElement() const {
return ( _node ? _node->ToElement() : 0 );
}
const XMLText* ToText() const {
return ( _node ? _node->ToText() : 0 );
}
const XMLUnknown* ToUnknown() const {
return ( _node ? _node->ToUnknown() : 0 );
}
const XMLDeclaration* ToDeclaration() const {
return ( _node ? _node->ToDeclaration() : 0 );
}
private:
const XMLNode* _node;
};
/**
Printing functionality. The XMLPrinter gives you more
options than the XMLDocument::Print() method.
It can:
-# Print to memory.
-# Print to a file you provide.
-# Print XML without a XMLDocument.
Print to Memory
@verbatim
XMLPrinter printer;
doc.Print( &printer );
SomeFunction( printer.CStr() );
@endverbatim
Print to a File
You provide the file pointer.
@verbatim
XMLPrinter printer( fp );
doc.Print( &printer );
@endverbatim
Print without a XMLDocument
When loading, an XML parser is very useful. However, sometimes
when saving, it just gets in the way. The code is often set up
for streaming, and constructing the DOM is just overhead.
The Printer supports the streaming case. The following code
prints out a trivially simple XML file without ever creating
an XML document.
@verbatim
XMLPrinter printer( fp );
printer.OpenElement( "foo" );
printer.PushAttribute( "foo", "bar" );
printer.CloseElement();
@endverbatim
*/
class TINYXML2_LIB XMLPrinter : public XMLVisitor
{
public:
/** Construct the printer. If the FILE* is specified,
this will print to the FILE. Else it will print
to memory, and the result is available in CStr().
If 'compact' is set to true, then output is created
with only required whitespace and newlines.
*/
XMLPrinter( FILE* file=0, bool compact = false, int depth = 0 );
virtual ~XMLPrinter() {}
/** If streaming, write the BOM and declaration. */
void PushHeader( bool writeBOM, bool writeDeclaration );
/** If streaming, start writing an element.
The element must be closed with CloseElement()
*/
void OpenElement( const char* name, bool compactMode=false );
/// If streaming, add an attribute to an open element.
void PushAttribute( const char* name, const char* value );
void PushAttribute( const char* name, int value );
void PushAttribute( const char* name, unsigned value );
void PushAttribute( const char* name, int64_t value );
void PushAttribute( const char* name, uint64_t value );
void PushAttribute( const char* name, bool value );
void PushAttribute( const char* name, double value );
/// If streaming, close the Element.
virtual void CloseElement( bool compactMode=false );
/// Add a text node.
void PushText( const char* text, bool cdata=false );
/// Add a text node from an integer.
void PushText( int value );
/// Add a text node from an unsigned.
void PushText( unsigned value );
/// Add a text node from a signed 64bit integer.
void PushText( int64_t value );
/// Add a text node from an unsigned 64bit integer.
void PushText( uint64_t value );
/// Add a text node from a bool.
void PushText( bool value );
/// Add a text node from a float.
void PushText( float value );
/// Add a text node from a double.
void PushText( double value );
/// Add a comment
void PushComment( const char* comment );
void PushDeclaration( const char* value );
void PushUnknown( const char* value );
virtual bool VisitEnter( const XMLDocument& /*doc*/ );
virtual bool VisitExit( const XMLDocument& /*doc*/ ) {
return true;
}
virtual bool VisitEnter( const XMLElement& element, const XMLAttribute* attribute );
virtual bool VisitExit( const XMLElement& element );
virtual bool Visit( const XMLText& text );
virtual bool Visit( const XMLComment& comment );
virtual bool Visit( const XMLDeclaration& declaration );
virtual bool Visit( const XMLUnknown& unknown );
/**
If in print to memory mode, return a pointer to
the XML file in memory.
*/
const char* CStr() const {
return _buffer.Mem();
}
/**
If in print to memory mode, return the size
of the XML file in memory. (Note the size returned
includes the terminating null.)
*/
int CStrSize() const {
return _buffer.Size();
}
/**
If in print to memory mode, reset the buffer to the
beginning.
*/
void ClearBuffer( bool resetToFirstElement = true ) {
_buffer.Clear();
_buffer.Push(0);
_firstElement = resetToFirstElement;
}
protected:
virtual bool CompactMode( const XMLElement& ) { return _compactMode; }
/** Prints out the space before an element. You may override to change
the space and tabs used. A PrintSpace() override should call Print().
*/
virtual void PrintSpace( int depth );
virtual void Print( const char* format, ... );
virtual void Write( const char* data, size_t size );
virtual void Putc( char ch );
inline void Write(const char* data) { Write(data, strlen(data)); }
void SealElementIfJustOpened();
bool _elementJustOpened;
DynArray< const char*, 10 > _stack;
private:
/**
Prepares to write a new node. This includes sealing an element that was
just opened, and writing any whitespace necessary if not in compact mode.
*/
void PrepareForNewNode( bool compactMode );
void PrintString( const char*, bool restrictedEntitySet ); // prints out, after detecting entities.
bool _firstElement;
FILE* _fp;
int _depth;
int _textDepth;
bool _processEntities;
bool _compactMode;
enum {
ENTITY_RANGE = 64,
BUF_SIZE = 200
};
bool _entityFlag[ENTITY_RANGE];
bool _restrictedEntityFlag[ENTITY_RANGE];
DynArray< char, 20 > _buffer;
// Prohibit cloning, intentionally not implemented
XMLPrinter( const XMLPrinter& );
XMLPrinter& operator=( const XMLPrinter& );
};
} // tinyxml2
#if defined(_MSC_VER)
# pragma warning(pop)
#endif
#endif // TINYXML2_INCLUDED | 71,400 | C | 29.00042 | 166 | 0.641106 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/iMpcdiDataProvider.cpp | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <pxr/pxr.h>
#include <pxr/base/tf/token.h>
#include <pxr/base/tf/type.h>
#include "iMpcdiDataProvider.h"
PXR_NAMESPACE_OPEN_SCOPE
IEdfDataProvider::IEdfDataProvider(const EdfDataParameters& parameters) : _parameters(parameters)
{
}
IEdfDataProvider::~IEdfDataProvider() = default;
IEdfSourceData::~IEdfSourceData() = default;
const EdfDataParameters& IEdfDataProvider::GetParameters() const
{
return this->_parameters;
}
TF_REGISTRY_FUNCTION(TfType)
{
TfType::Define<IEdfDataProvider>();
}
PXR_NAMESPACE_CLOSE_SCOPE | 1,127 | C++ | 26.512194 | 97 | 0.761313 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiDataProviderFactory.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_EDF_EDFDATAPROVIDERFACTORY_H_
#define OMNI_EDF_EDFDATAPROVIDERFACTORY_H_
#include <pxr/pxr.h>
#include <pxr/base/tf/registryManager.h>
#include <pxr/base/tf/type.h>
#include "api.h"
#include "iMpcdiDataProvider.h"
PXR_NAMESPACE_OPEN_SCOPE
#ifdef doxygen
#define EDF_DEFINE_DATAPROVIDER(ProviderClass, BaseClass1, ...)
#else
#define EDF_DEFINE_DATAPROVIDER(...) \
TF_REGISTRY_FUNCTION(TfType) { \
EdfDefineDataProvider<__VA_ARGS__>(); \
}
#endif
class EdfDataProviderFactoryBase : public TfType::FactoryBase
{
public:
MPCDI_API virtual ~EdfDataProviderFactoryBase();
MPCDI_API virtual IEdfDataProvider* New(const EdfDataParameters& parameters) const = 0;
};
template <class T>
class EdfDataProviderFactory : public EdfDataProviderFactoryBase
{
public:
virtual IEdfDataProvider* New(const EdfDataParameters& parameters) const override
{
return new T(parameters);
}
};
template <class DataProvider, class ...Bases>
void EdfDefineDataProvider()
{
TfType::Define<DataProvider, TfType::Bases<Bases...>>().template SetFactory<EdfDataProviderFactory<DataProvider> >();
}
PXR_NAMESPACE_CLOSE_SCOPE
#endif | 1,735 | C | 27.459016 | 118 | 0.757349 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/tinyxml2.cpp | /*
Original code by Lee Thomason (www.grinninglizard.com)
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any
damages arising from the use of this software.
Permission is granted to anyone to use this software for any
purpose, including commercial applications, and to alter it and
redistribute it freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must
not claim that you wrote the original software. If you use this
software in a product, an acknowledgment in the product documentation
would be appreciated but is not required.
2. Altered source versions must be plainly marked as such, and
must not be misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
*/
#include "tinyxml2.h"
#include <new> // yes, this one new style header, is in the Android SDK.
#if defined(ANDROID_NDK) || defined(__BORLANDC__) || defined(__QNXNTO__)
# include <stddef.h>
# include <stdarg.h>
#else
# include <cstddef>
# include <cstdarg>
#endif
#if defined(_MSC_VER) && (_MSC_VER >= 1400 ) && (!defined WINCE)
// Microsoft Visual Studio, version 2005 and higher. Not WinCE.
/*int _snprintf_s(
char *buffer,
size_t sizeOfBuffer,
size_t count,
const char *format [,
argument] ...
);*/
static inline int TIXML_SNPRINTF( char* buffer, size_t size, const char* format, ... )
{
va_list va;
va_start( va, format );
const int result = vsnprintf_s( buffer, size, _TRUNCATE, format, va );
va_end( va );
return result;
}
static inline int TIXML_VSNPRINTF( char* buffer, size_t size, const char* format, va_list va )
{
const int result = vsnprintf_s( buffer, size, _TRUNCATE, format, va );
return result;
}
#define TIXML_VSCPRINTF _vscprintf
#define TIXML_SSCANF sscanf_s
#elif defined _MSC_VER
// Microsoft Visual Studio 2003 and earlier or WinCE
#define TIXML_SNPRINTF _snprintf
#define TIXML_VSNPRINTF _vsnprintf
#define TIXML_SSCANF sscanf
#if (_MSC_VER < 1400 ) && (!defined WINCE)
// Microsoft Visual Studio 2003 and not WinCE.
#define TIXML_VSCPRINTF _vscprintf // VS2003's C runtime has this, but VC6 C runtime or WinCE SDK doesn't have.
#else
// Microsoft Visual Studio 2003 and earlier or WinCE.
static inline int TIXML_VSCPRINTF( const char* format, va_list va )
{
int len = 512;
for (;;) {
len = len*2;
char* str = new char[len]();
const int required = _vsnprintf(str, len, format, va);
delete[] str;
if ( required != -1 ) {
TIXMLASSERT( required >= 0 );
len = required;
break;
}
}
TIXMLASSERT( len >= 0 );
return len;
}
#endif
#else
// GCC version 3 and higher
//#warning( "Using sn* functions." )
#define TIXML_SNPRINTF snprintf
#define TIXML_VSNPRINTF vsnprintf
static inline int TIXML_VSCPRINTF( const char* format, va_list va )
{
int len = vsnprintf( 0, 0, format, va );
TIXMLASSERT( len >= 0 );
return len;
}
#define TIXML_SSCANF sscanf
#endif
#if defined(_WIN64)
#define TIXML_FSEEK _fseeki64
#define TIXML_FTELL _ftelli64
#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__OpenBSD__) || defined(__NetBSD__) || defined(__DragonFly__) || (__CYGWIN__)
#define TIXML_FSEEK fseeko
#define TIXML_FTELL ftello
#elif defined(__ANDROID__)
#if __ANDROID_API__ > 24
#define TIXML_FSEEK fseeko64
#define TIXML_FTELL ftello64
#else
#define TIXML_FSEEK fseeko
#define TIXML_FTELL ftello
#endif
#elif defined(__unix__) && defined(__x86_64__)
#define TIXML_FSEEK fseeko64
#define TIXML_FTELL ftello64
#else
#define TIXML_FSEEK fseek
#define TIXML_FTELL ftell
#endif
static const char LINE_FEED = static_cast<char>(0x0a); // all line endings are normalized to LF
static const char LF = LINE_FEED;
static const char CARRIAGE_RETURN = static_cast<char>(0x0d); // CR gets filtered out
static const char CR = CARRIAGE_RETURN;
static const char SINGLE_QUOTE = '\'';
static const char DOUBLE_QUOTE = '\"';
// Bunch of unicode info at:
// http://www.unicode.org/faq/utf_bom.html
// ef bb bf (Microsoft "lead bytes") - designates UTF-8
static const unsigned char TIXML_UTF_LEAD_0 = 0xefU;
static const unsigned char TIXML_UTF_LEAD_1 = 0xbbU;
static const unsigned char TIXML_UTF_LEAD_2 = 0xbfU;
namespace tinyxml2
{
struct Entity {
const char* pattern;
int length;
char value;
};
static const int NUM_ENTITIES = 5;
static const Entity entities[NUM_ENTITIES] = {
{ "quot", 4, DOUBLE_QUOTE },
{ "amp", 3, '&' },
{ "apos", 4, SINGLE_QUOTE },
{ "lt", 2, '<' },
{ "gt", 2, '>' }
};
StrPair::~StrPair()
{
Reset();
}
void StrPair::TransferTo( StrPair* other )
{
if ( this == other ) {
return;
}
// This in effect implements the assignment operator by "moving"
// ownership (as in auto_ptr).
TIXMLASSERT( other != 0 );
TIXMLASSERT( other->_flags == 0 );
TIXMLASSERT( other->_start == 0 );
TIXMLASSERT( other->_end == 0 );
other->Reset();
other->_flags = _flags;
other->_start = _start;
other->_end = _end;
_flags = 0;
_start = 0;
_end = 0;
}
void StrPair::Reset()
{
if ( _flags & NEEDS_DELETE ) {
delete [] _start;
}
_flags = 0;
_start = 0;
_end = 0;
}
void StrPair::SetStr( const char* str, int flags )
{
TIXMLASSERT( str );
Reset();
size_t len = strlen( str );
TIXMLASSERT( _start == 0 );
_start = new char[ len+1 ];
memcpy( _start, str, len+1 );
_end = _start + len;
_flags = flags | NEEDS_DELETE;
}
char* StrPair::ParseText( char* p, const char* endTag, int strFlags, int* curLineNumPtr )
{
TIXMLASSERT( p );
TIXMLASSERT( endTag && *endTag );
TIXMLASSERT(curLineNumPtr);
char* start = p;
const char endChar = *endTag;
size_t length = strlen( endTag );
// Inner loop of text parsing.
while ( *p ) {
if ( *p == endChar && strncmp( p, endTag, length ) == 0 ) {
Set( start, p, strFlags );
return p + length;
} else if (*p == '\n') {
++(*curLineNumPtr);
}
++p;
TIXMLASSERT( p );
}
return 0;
}
char* StrPair::ParseName( char* p )
{
if ( !p || !(*p) ) {
return 0;
}
if ( !XMLUtil::IsNameStartChar( (unsigned char) *p ) ) {
return 0;
}
char* const start = p;
++p;
while ( *p && XMLUtil::IsNameChar( (unsigned char) *p ) ) {
++p;
}
Set( start, p, 0 );
return p;
}
void StrPair::CollapseWhitespace()
{
// Adjusting _start would cause undefined behavior on delete[]
TIXMLASSERT( ( _flags & NEEDS_DELETE ) == 0 );
// Trim leading space.
_start = XMLUtil::SkipWhiteSpace( _start, 0 );
if ( *_start ) {
const char* p = _start; // the read pointer
char* q = _start; // the write pointer
while( *p ) {
if ( XMLUtil::IsWhiteSpace( *p )) {
p = XMLUtil::SkipWhiteSpace( p, 0 );
if ( *p == 0 ) {
break; // don't write to q; this trims the trailing space.
}
*q = ' ';
++q;
}
*q = *p;
++q;
++p;
}
*q = 0;
}
}
const char* StrPair::GetStr()
{
TIXMLASSERT( _start );
TIXMLASSERT( _end );
if ( _flags & NEEDS_FLUSH ) {
*_end = 0;
_flags ^= NEEDS_FLUSH;
if ( _flags ) {
const char* p = _start; // the read pointer
char* q = _start; // the write pointer
while( p < _end ) {
if ( (_flags & NEEDS_NEWLINE_NORMALIZATION) && *p == CR ) {
// CR-LF pair becomes LF
// CR alone becomes LF
// LF-CR becomes LF
if ( *(p+1) == LF ) {
p += 2;
}
else {
++p;
}
*q = LF;
++q;
}
else if ( (_flags & NEEDS_NEWLINE_NORMALIZATION) && *p == LF ) {
if ( *(p+1) == CR ) {
p += 2;
}
else {
++p;
}
*q = LF;
++q;
}
else if ( (_flags & NEEDS_ENTITY_PROCESSING) && *p == '&' ) {
// Entities handled by tinyXML2:
// - special entities in the entity table [in/out]
// - numeric character reference [in]
// 中 or 中
if ( *(p+1) == '#' ) {
const int buflen = 10;
char buf[buflen] = { 0 };
int len = 0;
const char* adjusted = const_cast<char*>( XMLUtil::GetCharacterRef( p, buf, &len ) );
if ( adjusted == 0 ) {
*q = *p;
++p;
++q;
}
else {
TIXMLASSERT( 0 <= len && len <= buflen );
TIXMLASSERT( q + len <= adjusted );
p = adjusted;
memcpy( q, buf, len );
q += len;
}
}
else {
bool entityFound = false;
for( int i = 0; i < NUM_ENTITIES; ++i ) {
const Entity& entity = entities[i];
if ( strncmp( p + 1, entity.pattern, entity.length ) == 0
&& *( p + entity.length + 1 ) == ';' ) {
// Found an entity - convert.
*q = entity.value;
++q;
p += entity.length + 2;
entityFound = true;
break;
}
}
if ( !entityFound ) {
// fixme: treat as error?
++p;
++q;
}
}
}
else {
*q = *p;
++p;
++q;
}
}
*q = 0;
}
// The loop below has plenty going on, and this
// is a less useful mode. Break it out.
if ( _flags & NEEDS_WHITESPACE_COLLAPSING ) {
CollapseWhitespace();
}
_flags = (_flags & NEEDS_DELETE);
}
TIXMLASSERT( _start );
return _start;
}
// --------- XMLUtil ----------- //
const char* XMLUtil::writeBoolTrue = "true";
const char* XMLUtil::writeBoolFalse = "false";
void XMLUtil::SetBoolSerialization(const char* writeTrue, const char* writeFalse)
{
static const char* defTrue = "true";
static const char* defFalse = "false";
writeBoolTrue = (writeTrue) ? writeTrue : defTrue;
writeBoolFalse = (writeFalse) ? writeFalse : defFalse;
}
const char* XMLUtil::ReadBOM( const char* p, bool* bom )
{
TIXMLASSERT( p );
TIXMLASSERT( bom );
*bom = false;
const unsigned char* pu = reinterpret_cast<const unsigned char*>(p);
// Check for BOM:
if ( *(pu+0) == TIXML_UTF_LEAD_0
&& *(pu+1) == TIXML_UTF_LEAD_1
&& *(pu+2) == TIXML_UTF_LEAD_2 ) {
*bom = true;
p += 3;
}
TIXMLASSERT( p );
return p;
}
void XMLUtil::ConvertUTF32ToUTF8( unsigned long input, char* output, int* length )
{
const unsigned long BYTE_MASK = 0xBF;
const unsigned long BYTE_MARK = 0x80;
const unsigned long FIRST_BYTE_MARK[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC };
if (input < 0x80) {
*length = 1;
}
else if ( input < 0x800 ) {
*length = 2;
}
else if ( input < 0x10000 ) {
*length = 3;
}
else if ( input < 0x200000 ) {
*length = 4;
}
else {
*length = 0; // This code won't convert this correctly anyway.
return;
}
output += *length;
// Scary scary fall throughs are annotated with carefully designed comments
// to suppress compiler warnings such as -Wimplicit-fallthrough in gcc
switch (*length) {
case 4:
--output;
*output = static_cast<char>((input | BYTE_MARK) & BYTE_MASK);
input >>= 6;
//fall through
case 3:
--output;
*output = static_cast<char>((input | BYTE_MARK) & BYTE_MASK);
input >>= 6;
//fall through
case 2:
--output;
*output = static_cast<char>((input | BYTE_MARK) & BYTE_MASK);
input >>= 6;
//fall through
case 1:
--output;
*output = static_cast<char>(input | FIRST_BYTE_MARK[*length]);
break;
default:
TIXMLASSERT( false );
}
}
const char* XMLUtil::GetCharacterRef( const char* p, char* value, int* length )
{
// Presume an entity, and pull it out.
*length = 0;
if ( *(p+1) == '#' && *(p+2) ) {
unsigned long ucs = 0;
TIXMLASSERT( sizeof( ucs ) >= 4 );
ptrdiff_t delta = 0;
unsigned mult = 1;
static const char SEMICOLON = ';';
if ( *(p+2) == 'x' ) {
// Hexadecimal.
const char* q = p+3;
if ( !(*q) ) {
return 0;
}
q = strchr( q, SEMICOLON );
if ( !q ) {
return 0;
}
TIXMLASSERT( *q == SEMICOLON );
delta = q-p;
--q;
while ( *q != 'x' ) {
unsigned int digit = 0;
if ( *q >= '0' && *q <= '9' ) {
digit = *q - '0';
}
else if ( *q >= 'a' && *q <= 'f' ) {
digit = *q - 'a' + 10;
}
else if ( *q >= 'A' && *q <= 'F' ) {
digit = *q - 'A' + 10;
}
else {
return 0;
}
TIXMLASSERT( digit < 16 );
TIXMLASSERT( digit == 0 || mult <= UINT_MAX / digit );
const unsigned int digitScaled = mult * digit;
TIXMLASSERT( ucs <= ULONG_MAX - digitScaled );
ucs += digitScaled;
TIXMLASSERT( mult <= UINT_MAX / 16 );
mult *= 16;
--q;
}
}
else {
// Decimal.
const char* q = p+2;
if ( !(*q) ) {
return 0;
}
q = strchr( q, SEMICOLON );
if ( !q ) {
return 0;
}
TIXMLASSERT( *q == SEMICOLON );
delta = q-p;
--q;
while ( *q != '#' ) {
if ( *q >= '0' && *q <= '9' ) {
const unsigned int digit = *q - '0';
TIXMLASSERT( digit < 10 );
TIXMLASSERT( digit == 0 || mult <= UINT_MAX / digit );
const unsigned int digitScaled = mult * digit;
TIXMLASSERT( ucs <= ULONG_MAX - digitScaled );
ucs += digitScaled;
}
else {
return 0;
}
TIXMLASSERT( mult <= UINT_MAX / 10 );
mult *= 10;
--q;
}
}
// convert the UCS to UTF-8
ConvertUTF32ToUTF8( ucs, value, length );
return p + delta + 1;
}
return p+1;
}
void XMLUtil::ToStr( int v, char* buffer, int bufferSize )
{
TIXML_SNPRINTF( buffer, bufferSize, "%d", v );
}
void XMLUtil::ToStr( unsigned v, char* buffer, int bufferSize )
{
TIXML_SNPRINTF( buffer, bufferSize, "%u", v );
}
void XMLUtil::ToStr( bool v, char* buffer, int bufferSize )
{
TIXML_SNPRINTF( buffer, bufferSize, "%s", v ? writeBoolTrue : writeBoolFalse);
}
/*
ToStr() of a number is a very tricky topic.
https://github.com/leethomason/tinyxml2/issues/106
*/
void XMLUtil::ToStr( float v, char* buffer, int bufferSize )
{
TIXML_SNPRINTF( buffer, bufferSize, "%.8g", v );
}
void XMLUtil::ToStr( double v, char* buffer, int bufferSize )
{
TIXML_SNPRINTF( buffer, bufferSize, "%.17g", v );
}
void XMLUtil::ToStr( int64_t v, char* buffer, int bufferSize )
{
// horrible syntax trick to make the compiler happy about %lld
TIXML_SNPRINTF(buffer, bufferSize, "%lld", static_cast<long long>(v));
}
void XMLUtil::ToStr( uint64_t v, char* buffer, int bufferSize )
{
// horrible syntax trick to make the compiler happy about %llu
TIXML_SNPRINTF(buffer, bufferSize, "%llu", (long long)v);
}
bool XMLUtil::ToInt(const char* str, int* value)
{
if (IsPrefixHex(str)) {
unsigned v;
if (TIXML_SSCANF(str, "%x", &v) == 1) {
*value = static_cast<int>(v);
return true;
}
}
else {
if (TIXML_SSCANF(str, "%d", value) == 1) {
return true;
}
}
return false;
}
bool XMLUtil::ToUnsigned(const char* str, unsigned* value)
{
if (TIXML_SSCANF(str, IsPrefixHex(str) ? "%x" : "%u", value) == 1) {
return true;
}
return false;
}
bool XMLUtil::ToBool( const char* str, bool* value )
{
int ival = 0;
if ( ToInt( str, &ival )) {
*value = (ival==0) ? false : true;
return true;
}
static const char* TRUE_VALS[] = { "true", "True", "TRUE", 0 };
static const char* FALSE_VALS[] = { "false", "False", "FALSE", 0 };
for (int i = 0; TRUE_VALS[i]; ++i) {
if (StringEqual(str, TRUE_VALS[i])) {
*value = true;
return true;
}
}
for (int i = 0; FALSE_VALS[i]; ++i) {
if (StringEqual(str, FALSE_VALS[i])) {
*value = false;
return true;
}
}
return false;
}
bool XMLUtil::ToFloat( const char* str, float* value )
{
if ( TIXML_SSCANF( str, "%f", value ) == 1 ) {
return true;
}
return false;
}
bool XMLUtil::ToDouble( const char* str, double* value )
{
if ( TIXML_SSCANF( str, "%lf", value ) == 1 ) {
return true;
}
return false;
}
bool XMLUtil::ToInt64(const char* str, int64_t* value)
{
if (IsPrefixHex(str)) {
unsigned long long v = 0; // horrible syntax trick to make the compiler happy about %llx
if (TIXML_SSCANF(str, "%llx", &v) == 1) {
*value = static_cast<int64_t>(v);
return true;
}
}
else {
long long v = 0; // horrible syntax trick to make the compiler happy about %lld
if (TIXML_SSCANF(str, "%lld", &v) == 1) {
*value = static_cast<int64_t>(v);
return true;
}
}
return false;
}
bool XMLUtil::ToUnsigned64(const char* str, uint64_t* value) {
unsigned long long v = 0; // horrible syntax trick to make the compiler happy about %llu
if(TIXML_SSCANF(str, IsPrefixHex(str) ? "%llx" : "%llu", &v) == 1) {
*value = (uint64_t)v;
return true;
}
return false;
}
char* XMLDocument::Identify( char* p, XMLNode** node )
{
TIXMLASSERT( node );
TIXMLASSERT( p );
char* const start = p;
int const startLine = _parseCurLineNum;
p = XMLUtil::SkipWhiteSpace( p, &_parseCurLineNum );
if( !*p ) {
*node = 0;
TIXMLASSERT( p );
return p;
}
// These strings define the matching patterns:
static const char* xmlHeader = { "<?" };
static const char* commentHeader = { "<!--" };
static const char* cdataHeader = { "<![CDATA[" };
static const char* dtdHeader = { "<!" };
static const char* elementHeader = { "<" }; // and a header for everything else; check last.
static const int xmlHeaderLen = 2;
static const int commentHeaderLen = 4;
static const int cdataHeaderLen = 9;
static const int dtdHeaderLen = 2;
static const int elementHeaderLen = 1;
TIXMLASSERT( sizeof( XMLComment ) == sizeof( XMLUnknown ) ); // use same memory pool
TIXMLASSERT( sizeof( XMLComment ) == sizeof( XMLDeclaration ) ); // use same memory pool
XMLNode* returnNode = 0;
if ( XMLUtil::StringEqual( p, xmlHeader, xmlHeaderLen ) ) {
returnNode = CreateUnlinkedNode<XMLDeclaration>( _commentPool );
returnNode->_parseLineNum = _parseCurLineNum;
p += xmlHeaderLen;
}
else if ( XMLUtil::StringEqual( p, commentHeader, commentHeaderLen ) ) {
returnNode = CreateUnlinkedNode<XMLComment>( _commentPool );
returnNode->_parseLineNum = _parseCurLineNum;
p += commentHeaderLen;
}
else if ( XMLUtil::StringEqual( p, cdataHeader, cdataHeaderLen ) ) {
XMLText* text = CreateUnlinkedNode<XMLText>( _textPool );
returnNode = text;
returnNode->_parseLineNum = _parseCurLineNum;
p += cdataHeaderLen;
text->SetCData( true );
}
else if ( XMLUtil::StringEqual( p, dtdHeader, dtdHeaderLen ) ) {
returnNode = CreateUnlinkedNode<XMLUnknown>( _commentPool );
returnNode->_parseLineNum = _parseCurLineNum;
p += dtdHeaderLen;
}
else if ( XMLUtil::StringEqual( p, elementHeader, elementHeaderLen ) ) {
returnNode = CreateUnlinkedNode<XMLElement>( _elementPool );
returnNode->_parseLineNum = _parseCurLineNum;
p += elementHeaderLen;
}
else {
returnNode = CreateUnlinkedNode<XMLText>( _textPool );
returnNode->_parseLineNum = _parseCurLineNum; // Report line of first non-whitespace character
p = start; // Back it up, all the text counts.
_parseCurLineNum = startLine;
}
TIXMLASSERT( returnNode );
TIXMLASSERT( p );
*node = returnNode;
return p;
}
bool XMLDocument::Accept( XMLVisitor* visitor ) const
{
TIXMLASSERT( visitor );
if ( visitor->VisitEnter( *this ) ) {
for ( const XMLNode* node=FirstChild(); node; node=node->NextSibling() ) {
if ( !node->Accept( visitor ) ) {
break;
}
}
}
return visitor->VisitExit( *this );
}
// --------- XMLNode ----------- //
XMLNode::XMLNode( XMLDocument* doc ) :
_document( doc ),
_parent( 0 ),
_value(),
_parseLineNum( 0 ),
_firstChild( 0 ), _lastChild( 0 ),
_prev( 0 ), _next( 0 ),
_userData( 0 ),
_memPool( 0 )
{
}
XMLNode::~XMLNode()
{
DeleteChildren();
if ( _parent ) {
_parent->Unlink( this );
}
}
const char* XMLNode::Value() const
{
// Edge case: XMLDocuments don't have a Value. Return null.
if ( this->ToDocument() )
return 0;
return _value.GetStr();
}
void XMLNode::SetValue( const char* str, bool staticMem )
{
if ( staticMem ) {
_value.SetInternedStr( str );
}
else {
_value.SetStr( str );
}
}
XMLNode* XMLNode::DeepClone(XMLDocument* target) const
{
XMLNode* clone = this->ShallowClone(target);
if (!clone) return 0;
for (const XMLNode* child = this->FirstChild(); child; child = child->NextSibling()) {
XMLNode* childClone = child->DeepClone(target);
TIXMLASSERT(childClone);
clone->InsertEndChild(childClone);
}
return clone;
}
void XMLNode::DeleteChildren()
{
while( _firstChild ) {
TIXMLASSERT( _lastChild );
DeleteChild( _firstChild );
}
_firstChild = _lastChild = 0;
}
void XMLNode::Unlink( XMLNode* child )
{
TIXMLASSERT( child );
TIXMLASSERT( child->_document == _document );
TIXMLASSERT( child->_parent == this );
if ( child == _firstChild ) {
_firstChild = _firstChild->_next;
}
if ( child == _lastChild ) {
_lastChild = _lastChild->_prev;
}
if ( child->_prev ) {
child->_prev->_next = child->_next;
}
if ( child->_next ) {
child->_next->_prev = child->_prev;
}
child->_next = 0;
child->_prev = 0;
child->_parent = 0;
}
void XMLNode::DeleteChild( XMLNode* node )
{
TIXMLASSERT( node );
TIXMLASSERT( node->_document == _document );
TIXMLASSERT( node->_parent == this );
Unlink( node );
TIXMLASSERT(node->_prev == 0);
TIXMLASSERT(node->_next == 0);
TIXMLASSERT(node->_parent == 0);
DeleteNode( node );
}
XMLNode* XMLNode::InsertEndChild( XMLNode* addThis )
{
TIXMLASSERT( addThis );
if ( addThis->_document != _document ) {
TIXMLASSERT( false );
return 0;
}
InsertChildPreamble( addThis );
if ( _lastChild ) {
TIXMLASSERT( _firstChild );
TIXMLASSERT( _lastChild->_next == 0 );
_lastChild->_next = addThis;
addThis->_prev = _lastChild;
_lastChild = addThis;
addThis->_next = 0;
}
else {
TIXMLASSERT( _firstChild == 0 );
_firstChild = _lastChild = addThis;
addThis->_prev = 0;
addThis->_next = 0;
}
addThis->_parent = this;
return addThis;
}
XMLNode* XMLNode::InsertFirstChild( XMLNode* addThis )
{
TIXMLASSERT( addThis );
if ( addThis->_document != _document ) {
TIXMLASSERT( false );
return 0;
}
InsertChildPreamble( addThis );
if ( _firstChild ) {
TIXMLASSERT( _lastChild );
TIXMLASSERT( _firstChild->_prev == 0 );
_firstChild->_prev = addThis;
addThis->_next = _firstChild;
_firstChild = addThis;
addThis->_prev = 0;
}
else {
TIXMLASSERT( _lastChild == 0 );
_firstChild = _lastChild = addThis;
addThis->_prev = 0;
addThis->_next = 0;
}
addThis->_parent = this;
return addThis;
}
XMLNode* XMLNode::InsertAfterChild( XMLNode* afterThis, XMLNode* addThis )
{
TIXMLASSERT( addThis );
if ( addThis->_document != _document ) {
TIXMLASSERT( false );
return 0;
}
TIXMLASSERT( afterThis );
if ( afterThis->_parent != this ) {
TIXMLASSERT( false );
return 0;
}
if ( afterThis == addThis ) {
// Current state: BeforeThis -> AddThis -> OneAfterAddThis
// Now AddThis must disappear from it's location and then
// reappear between BeforeThis and OneAfterAddThis.
// So just leave it where it is.
return addThis;
}
if ( afterThis->_next == 0 ) {
// The last node or the only node.
return InsertEndChild( addThis );
}
InsertChildPreamble( addThis );
addThis->_prev = afterThis;
addThis->_next = afterThis->_next;
afterThis->_next->_prev = addThis;
afterThis->_next = addThis;
addThis->_parent = this;
return addThis;
}
const XMLElement* XMLNode::FirstChildElement( const char* name ) const
{
for( const XMLNode* node = _firstChild; node; node = node->_next ) {
const XMLElement* element = node->ToElementWithName( name );
if ( element ) {
return element;
}
}
return 0;
}
const XMLElement* XMLNode::LastChildElement( const char* name ) const
{
for( const XMLNode* node = _lastChild; node; node = node->_prev ) {
const XMLElement* element = node->ToElementWithName( name );
if ( element ) {
return element;
}
}
return 0;
}
const XMLElement* XMLNode::NextSiblingElement( const char* name ) const
{
for( const XMLNode* node = _next; node; node = node->_next ) {
const XMLElement* element = node->ToElementWithName( name );
if ( element ) {
return element;
}
}
return 0;
}
const XMLElement* XMLNode::PreviousSiblingElement( const char* name ) const
{
for( const XMLNode* node = _prev; node; node = node->_prev ) {
const XMLElement* element = node->ToElementWithName( name );
if ( element ) {
return element;
}
}
return 0;
}
char* XMLNode::ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr )
{
// This is a recursive method, but thinking about it "at the current level"
// it is a pretty simple flat list:
// <foo/>
// <!-- comment -->
//
// With a special case:
// <foo>
// </foo>
// <!-- comment -->
//
// Where the closing element (/foo) *must* be the next thing after the opening
// element, and the names must match. BUT the tricky bit is that the closing
// element will be read by the child.
//
// 'endTag' is the end tag for this node, it is returned by a call to a child.
// 'parentEnd' is the end tag for the parent, which is filled in and returned.
XMLDocument::DepthTracker tracker(_document);
if (_document->Error())
return 0;
while( p && *p ) {
XMLNode* node = 0;
p = _document->Identify( p, &node );
TIXMLASSERT( p );
if ( node == 0 ) {
break;
}
const int initialLineNum = node->_parseLineNum;
StrPair endTag;
p = node->ParseDeep( p, &endTag, curLineNumPtr );
if ( !p ) {
_document->DeleteNode( node );
if ( !_document->Error() ) {
_document->SetError( XML_ERROR_PARSING, initialLineNum, 0);
}
break;
}
const XMLDeclaration* const decl = node->ToDeclaration();
if ( decl ) {
// Declarations are only allowed at document level
//
// Multiple declarations are allowed but all declarations
// must occur before anything else.
//
// Optimized due to a security test case. If the first node is
// a declaration, and the last node is a declaration, then only
// declarations have so far been added.
bool wellLocated = false;
if (ToDocument()) {
if (FirstChild()) {
wellLocated =
FirstChild() &&
FirstChild()->ToDeclaration() &&
LastChild() &&
LastChild()->ToDeclaration();
}
else {
wellLocated = true;
}
}
if ( !wellLocated ) {
_document->SetError( XML_ERROR_PARSING_DECLARATION, initialLineNum, "XMLDeclaration value=%s", decl->Value());
_document->DeleteNode( node );
break;
}
}
XMLElement* ele = node->ToElement();
if ( ele ) {
// We read the end tag. Return it to the parent.
if ( ele->ClosingType() == XMLElement::CLOSING ) {
if ( parentEndTag ) {
ele->_value.TransferTo( parentEndTag );
}
node->_memPool->SetTracked(); // created and then immediately deleted.
DeleteNode( node );
return p;
}
// Handle an end tag returned to this level.
// And handle a bunch of annoying errors.
bool mismatch = false;
if ( endTag.Empty() ) {
if ( ele->ClosingType() == XMLElement::OPEN ) {
mismatch = true;
}
}
else {
if ( ele->ClosingType() != XMLElement::OPEN ) {
mismatch = true;
}
else if ( !XMLUtil::StringEqual( endTag.GetStr(), ele->Name() ) ) {
mismatch = true;
}
}
if ( mismatch ) {
_document->SetError( XML_ERROR_MISMATCHED_ELEMENT, initialLineNum, "XMLElement name=%s", ele->Name());
_document->DeleteNode( node );
break;
}
}
InsertEndChild( node );
}
return 0;
}
/*static*/ void XMLNode::DeleteNode( XMLNode* node )
{
if ( node == 0 ) {
return;
}
TIXMLASSERT(node->_document);
if (!node->ToDocument()) {
node->_document->MarkInUse(node);
}
MemPool* pool = node->_memPool;
node->~XMLNode();
pool->Free( node );
}
void XMLNode::InsertChildPreamble( XMLNode* insertThis ) const
{
TIXMLASSERT( insertThis );
TIXMLASSERT( insertThis->_document == _document );
if (insertThis->_parent) {
insertThis->_parent->Unlink( insertThis );
}
else {
insertThis->_document->MarkInUse(insertThis);
insertThis->_memPool->SetTracked();
}
}
const XMLElement* XMLNode::ToElementWithName( const char* name ) const
{
const XMLElement* element = this->ToElement();
if ( element == 0 ) {
return 0;
}
if ( name == 0 ) {
return element;
}
if ( XMLUtil::StringEqual( element->Name(), name ) ) {
return element;
}
return 0;
}
// --------- XMLText ---------- //
char* XMLText::ParseDeep( char* p, StrPair*, int* curLineNumPtr )
{
if ( this->CData() ) {
p = _value.ParseText( p, "]]>", StrPair::NEEDS_NEWLINE_NORMALIZATION, curLineNumPtr );
if ( !p ) {
_document->SetError( XML_ERROR_PARSING_CDATA, _parseLineNum, 0 );
}
return p;
}
else {
int flags = _document->ProcessEntities() ? StrPair::TEXT_ELEMENT : StrPair::TEXT_ELEMENT_LEAVE_ENTITIES;
if ( _document->WhitespaceMode() == COLLAPSE_WHITESPACE ) {
flags |= StrPair::NEEDS_WHITESPACE_COLLAPSING;
}
p = _value.ParseText( p, "<", flags, curLineNumPtr );
if ( p && *p ) {
return p-1;
}
if ( !p ) {
_document->SetError( XML_ERROR_PARSING_TEXT, _parseLineNum, 0 );
}
}
return 0;
}
XMLNode* XMLText::ShallowClone( XMLDocument* doc ) const
{
if ( !doc ) {
doc = _document;
}
XMLText* text = doc->NewText( Value() ); // fixme: this will always allocate memory. Intern?
text->SetCData( this->CData() );
return text;
}
bool XMLText::ShallowEqual( const XMLNode* compare ) const
{
TIXMLASSERT( compare );
const XMLText* text = compare->ToText();
return ( text && XMLUtil::StringEqual( text->Value(), Value() ) );
}
bool XMLText::Accept( XMLVisitor* visitor ) const
{
TIXMLASSERT( visitor );
return visitor->Visit( *this );
}
// --------- XMLComment ---------- //
XMLComment::XMLComment( XMLDocument* doc ) : XMLNode( doc )
{
}
XMLComment::~XMLComment()
{
}
char* XMLComment::ParseDeep( char* p, StrPair*, int* curLineNumPtr )
{
// Comment parses as text.
p = _value.ParseText( p, "-->", StrPair::COMMENT, curLineNumPtr );
if ( p == 0 ) {
_document->SetError( XML_ERROR_PARSING_COMMENT, _parseLineNum, 0 );
}
return p;
}
XMLNode* XMLComment::ShallowClone( XMLDocument* doc ) const
{
if ( !doc ) {
doc = _document;
}
XMLComment* comment = doc->NewComment( Value() ); // fixme: this will always allocate memory. Intern?
return comment;
}
bool XMLComment::ShallowEqual( const XMLNode* compare ) const
{
TIXMLASSERT( compare );
const XMLComment* comment = compare->ToComment();
return ( comment && XMLUtil::StringEqual( comment->Value(), Value() ));
}
bool XMLComment::Accept( XMLVisitor* visitor ) const
{
TIXMLASSERT( visitor );
return visitor->Visit( *this );
}
// --------- XMLDeclaration ---------- //
XMLDeclaration::XMLDeclaration( XMLDocument* doc ) : XMLNode( doc )
{
}
XMLDeclaration::~XMLDeclaration()
{
//printf( "~XMLDeclaration\n" );
}
char* XMLDeclaration::ParseDeep( char* p, StrPair*, int* curLineNumPtr )
{
// Declaration parses as text.
p = _value.ParseText( p, "?>", StrPair::NEEDS_NEWLINE_NORMALIZATION, curLineNumPtr );
if ( p == 0 ) {
_document->SetError( XML_ERROR_PARSING_DECLARATION, _parseLineNum, 0 );
}
return p;
}
XMLNode* XMLDeclaration::ShallowClone( XMLDocument* doc ) const
{
if ( !doc ) {
doc = _document;
}
XMLDeclaration* dec = doc->NewDeclaration( Value() ); // fixme: this will always allocate memory. Intern?
return dec;
}
bool XMLDeclaration::ShallowEqual( const XMLNode* compare ) const
{
TIXMLASSERT( compare );
const XMLDeclaration* declaration = compare->ToDeclaration();
return ( declaration && XMLUtil::StringEqual( declaration->Value(), Value() ));
}
bool XMLDeclaration::Accept( XMLVisitor* visitor ) const
{
TIXMLASSERT( visitor );
return visitor->Visit( *this );
}
// --------- XMLUnknown ---------- //
XMLUnknown::XMLUnknown( XMLDocument* doc ) : XMLNode( doc )
{
}
XMLUnknown::~XMLUnknown()
{
}
char* XMLUnknown::ParseDeep( char* p, StrPair*, int* curLineNumPtr )
{
// Unknown parses as text.
p = _value.ParseText( p, ">", StrPair::NEEDS_NEWLINE_NORMALIZATION, curLineNumPtr );
if ( !p ) {
_document->SetError( XML_ERROR_PARSING_UNKNOWN, _parseLineNum, 0 );
}
return p;
}
XMLNode* XMLUnknown::ShallowClone( XMLDocument* doc ) const
{
if ( !doc ) {
doc = _document;
}
XMLUnknown* text = doc->NewUnknown( Value() ); // fixme: this will always allocate memory. Intern?
return text;
}
bool XMLUnknown::ShallowEqual( const XMLNode* compare ) const
{
TIXMLASSERT( compare );
const XMLUnknown* unknown = compare->ToUnknown();
return ( unknown && XMLUtil::StringEqual( unknown->Value(), Value() ));
}
bool XMLUnknown::Accept( XMLVisitor* visitor ) const
{
TIXMLASSERT( visitor );
return visitor->Visit( *this );
}
// --------- XMLAttribute ---------- //
const char* XMLAttribute::Name() const
{
return _name.GetStr();
}
const char* XMLAttribute::Value() const
{
return _value.GetStr();
}
char* XMLAttribute::ParseDeep( char* p, bool processEntities, int* curLineNumPtr )
{
// Parse using the name rules: bug fix, was using ParseText before
p = _name.ParseName( p );
if ( !p || !*p ) {
return 0;
}
// Skip white space before =
p = XMLUtil::SkipWhiteSpace( p, curLineNumPtr );
if ( *p != '=' ) {
return 0;
}
++p; // move up to opening quote
p = XMLUtil::SkipWhiteSpace( p, curLineNumPtr );
if ( *p != '\"' && *p != '\'' ) {
return 0;
}
const char endTag[2] = { *p, 0 };
++p; // move past opening quote
p = _value.ParseText( p, endTag, processEntities ? StrPair::ATTRIBUTE_VALUE : StrPair::ATTRIBUTE_VALUE_LEAVE_ENTITIES, curLineNumPtr );
return p;
}
void XMLAttribute::SetName( const char* n )
{
_name.SetStr( n );
}
XMLError XMLAttribute::QueryIntValue( int* value ) const
{
if ( XMLUtil::ToInt( Value(), value )) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
XMLError XMLAttribute::QueryUnsignedValue( unsigned int* value ) const
{
if ( XMLUtil::ToUnsigned( Value(), value )) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
XMLError XMLAttribute::QueryInt64Value(int64_t* value) const
{
if (XMLUtil::ToInt64(Value(), value)) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
XMLError XMLAttribute::QueryUnsigned64Value(uint64_t* value) const
{
if(XMLUtil::ToUnsigned64(Value(), value)) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
XMLError XMLAttribute::QueryBoolValue( bool* value ) const
{
if ( XMLUtil::ToBool( Value(), value )) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
XMLError XMLAttribute::QueryFloatValue( float* value ) const
{
if ( XMLUtil::ToFloat( Value(), value )) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
XMLError XMLAttribute::QueryDoubleValue( double* value ) const
{
if ( XMLUtil::ToDouble( Value(), value )) {
return XML_SUCCESS;
}
return XML_WRONG_ATTRIBUTE_TYPE;
}
void XMLAttribute::SetAttribute( const char* v )
{
_value.SetStr( v );
}
void XMLAttribute::SetAttribute( int v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
_value.SetStr( buf );
}
void XMLAttribute::SetAttribute( unsigned v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
_value.SetStr( buf );
}
void XMLAttribute::SetAttribute(int64_t v)
{
char buf[BUF_SIZE];
XMLUtil::ToStr(v, buf, BUF_SIZE);
_value.SetStr(buf);
}
void XMLAttribute::SetAttribute(uint64_t v)
{
char buf[BUF_SIZE];
XMLUtil::ToStr(v, buf, BUF_SIZE);
_value.SetStr(buf);
}
void XMLAttribute::SetAttribute( bool v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
_value.SetStr( buf );
}
void XMLAttribute::SetAttribute( double v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
_value.SetStr( buf );
}
void XMLAttribute::SetAttribute( float v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
_value.SetStr( buf );
}
// --------- XMLElement ---------- //
XMLElement::XMLElement( XMLDocument* doc ) : XMLNode( doc ),
_closingType( OPEN ),
_rootAttribute( 0 )
{
}
XMLElement::~XMLElement()
{
while( _rootAttribute ) {
XMLAttribute* next = _rootAttribute->_next;
DeleteAttribute( _rootAttribute );
_rootAttribute = next;
}
}
const XMLAttribute* XMLElement::FindAttribute( const char* name ) const
{
for( XMLAttribute* a = _rootAttribute; a; a = a->_next ) {
if ( XMLUtil::StringEqual( a->Name(), name ) ) {
return a;
}
}
return 0;
}
const char* XMLElement::Attribute( const char* name, const char* value ) const
{
const XMLAttribute* a = FindAttribute( name );
if ( !a ) {
return 0;
}
if ( !value || XMLUtil::StringEqual( a->Value(), value )) {
return a->Value();
}
return 0;
}
int XMLElement::IntAttribute(const char* name, int defaultValue) const
{
int i = defaultValue;
QueryIntAttribute(name, &i);
return i;
}
unsigned XMLElement::UnsignedAttribute(const char* name, unsigned defaultValue) const
{
unsigned i = defaultValue;
QueryUnsignedAttribute(name, &i);
return i;
}
int64_t XMLElement::Int64Attribute(const char* name, int64_t defaultValue) const
{
int64_t i = defaultValue;
QueryInt64Attribute(name, &i);
return i;
}
uint64_t XMLElement::Unsigned64Attribute(const char* name, uint64_t defaultValue) const
{
uint64_t i = defaultValue;
QueryUnsigned64Attribute(name, &i);
return i;
}
bool XMLElement::BoolAttribute(const char* name, bool defaultValue) const
{
bool b = defaultValue;
QueryBoolAttribute(name, &b);
return b;
}
double XMLElement::DoubleAttribute(const char* name, double defaultValue) const
{
double d = defaultValue;
QueryDoubleAttribute(name, &d);
return d;
}
float XMLElement::FloatAttribute(const char* name, float defaultValue) const
{
float f = defaultValue;
QueryFloatAttribute(name, &f);
return f;
}
const char* XMLElement::GetText() const
{
/* skip comment node */
const XMLNode* node = FirstChild();
while (node) {
if (node->ToComment()) {
node = node->NextSibling();
continue;
}
break;
}
if ( node && node->ToText() ) {
return node->Value();
}
return 0;
}
void XMLElement::SetText( const char* inText )
{
if ( FirstChild() && FirstChild()->ToText() )
FirstChild()->SetValue( inText );
else {
XMLText* theText = GetDocument()->NewText( inText );
InsertFirstChild( theText );
}
}
void XMLElement::SetText( int v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
SetText( buf );
}
void XMLElement::SetText( unsigned v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
SetText( buf );
}
void XMLElement::SetText(int64_t v)
{
char buf[BUF_SIZE];
XMLUtil::ToStr(v, buf, BUF_SIZE);
SetText(buf);
}
void XMLElement::SetText(uint64_t v) {
char buf[BUF_SIZE];
XMLUtil::ToStr(v, buf, BUF_SIZE);
SetText(buf);
}
void XMLElement::SetText( bool v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
SetText( buf );
}
void XMLElement::SetText( float v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
SetText( buf );
}
void XMLElement::SetText( double v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
SetText( buf );
}
XMLError XMLElement::QueryIntText( int* ival ) const
{
if ( FirstChild() && FirstChild()->ToText() ) {
const char* t = FirstChild()->Value();
if ( XMLUtil::ToInt( t, ival ) ) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
XMLError XMLElement::QueryUnsignedText( unsigned* uval ) const
{
if ( FirstChild() && FirstChild()->ToText() ) {
const char* t = FirstChild()->Value();
if ( XMLUtil::ToUnsigned( t, uval ) ) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
XMLError XMLElement::QueryInt64Text(int64_t* ival) const
{
if (FirstChild() && FirstChild()->ToText()) {
const char* t = FirstChild()->Value();
if (XMLUtil::ToInt64(t, ival)) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
XMLError XMLElement::QueryUnsigned64Text(uint64_t* uval) const
{
if(FirstChild() && FirstChild()->ToText()) {
const char* t = FirstChild()->Value();
if(XMLUtil::ToUnsigned64(t, uval)) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
XMLError XMLElement::QueryBoolText( bool* bval ) const
{
if ( FirstChild() && FirstChild()->ToText() ) {
const char* t = FirstChild()->Value();
if ( XMLUtil::ToBool( t, bval ) ) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
XMLError XMLElement::QueryDoubleText( double* dval ) const
{
if ( FirstChild() && FirstChild()->ToText() ) {
const char* t = FirstChild()->Value();
if ( XMLUtil::ToDouble( t, dval ) ) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
XMLError XMLElement::QueryFloatText( float* fval ) const
{
if ( FirstChild() && FirstChild()->ToText() ) {
const char* t = FirstChild()->Value();
if ( XMLUtil::ToFloat( t, fval ) ) {
return XML_SUCCESS;
}
return XML_CAN_NOT_CONVERT_TEXT;
}
return XML_NO_TEXT_NODE;
}
int XMLElement::IntText(int defaultValue) const
{
int i = defaultValue;
QueryIntText(&i);
return i;
}
unsigned XMLElement::UnsignedText(unsigned defaultValue) const
{
unsigned i = defaultValue;
QueryUnsignedText(&i);
return i;
}
int64_t XMLElement::Int64Text(int64_t defaultValue) const
{
int64_t i = defaultValue;
QueryInt64Text(&i);
return i;
}
uint64_t XMLElement::Unsigned64Text(uint64_t defaultValue) const
{
uint64_t i = defaultValue;
QueryUnsigned64Text(&i);
return i;
}
bool XMLElement::BoolText(bool defaultValue) const
{
bool b = defaultValue;
QueryBoolText(&b);
return b;
}
double XMLElement::DoubleText(double defaultValue) const
{
double d = defaultValue;
QueryDoubleText(&d);
return d;
}
float XMLElement::FloatText(float defaultValue) const
{
float f = defaultValue;
QueryFloatText(&f);
return f;
}
XMLAttribute* XMLElement::FindOrCreateAttribute( const char* name )
{
XMLAttribute* last = 0;
XMLAttribute* attrib = 0;
for( attrib = _rootAttribute;
attrib;
last = attrib, attrib = attrib->_next ) {
if ( XMLUtil::StringEqual( attrib->Name(), name ) ) {
break;
}
}
if ( !attrib ) {
attrib = CreateAttribute();
TIXMLASSERT( attrib );
if ( last ) {
TIXMLASSERT( last->_next == 0 );
last->_next = attrib;
}
else {
TIXMLASSERT( _rootAttribute == 0 );
_rootAttribute = attrib;
}
attrib->SetName( name );
}
return attrib;
}
void XMLElement::DeleteAttribute( const char* name )
{
XMLAttribute* prev = 0;
for( XMLAttribute* a=_rootAttribute; a; a=a->_next ) {
if ( XMLUtil::StringEqual( name, a->Name() ) ) {
if ( prev ) {
prev->_next = a->_next;
}
else {
_rootAttribute = a->_next;
}
DeleteAttribute( a );
break;
}
prev = a;
}
}
char* XMLElement::ParseAttributes( char* p, int* curLineNumPtr )
{
XMLAttribute* prevAttribute = 0;
// Read the attributes.
while( p ) {
p = XMLUtil::SkipWhiteSpace( p, curLineNumPtr );
if ( !(*p) ) {
_document->SetError( XML_ERROR_PARSING_ELEMENT, _parseLineNum, "XMLElement name=%s", Name() );
return 0;
}
// attribute.
if (XMLUtil::IsNameStartChar( (unsigned char) *p ) ) {
XMLAttribute* attrib = CreateAttribute();
TIXMLASSERT( attrib );
attrib->_parseLineNum = _document->_parseCurLineNum;
const int attrLineNum = attrib->_parseLineNum;
p = attrib->ParseDeep( p, _document->ProcessEntities(), curLineNumPtr );
if ( !p || Attribute( attrib->Name() ) ) {
DeleteAttribute( attrib );
_document->SetError( XML_ERROR_PARSING_ATTRIBUTE, attrLineNum, "XMLElement name=%s", Name() );
return 0;
}
// There is a minor bug here: if the attribute in the source xml
// document is duplicated, it will not be detected and the
// attribute will be doubly added. However, tracking the 'prevAttribute'
// avoids re-scanning the attribute list. Preferring performance for
// now, may reconsider in the future.
if ( prevAttribute ) {
TIXMLASSERT( prevAttribute->_next == 0 );
prevAttribute->_next = attrib;
}
else {
TIXMLASSERT( _rootAttribute == 0 );
_rootAttribute = attrib;
}
prevAttribute = attrib;
}
// end of the tag
else if ( *p == '>' ) {
++p;
break;
}
// end of the tag
else if ( *p == '/' && *(p+1) == '>' ) {
_closingType = CLOSED;
return p+2; // done; sealed element.
}
else {
_document->SetError( XML_ERROR_PARSING_ELEMENT, _parseLineNum, 0 );
return 0;
}
}
return p;
}
void XMLElement::DeleteAttribute( XMLAttribute* attribute )
{
if ( attribute == 0 ) {
return;
}
MemPool* pool = attribute->_memPool;
attribute->~XMLAttribute();
pool->Free( attribute );
}
XMLAttribute* XMLElement::CreateAttribute()
{
TIXMLASSERT( sizeof( XMLAttribute ) == _document->_attributePool.ItemSize() );
XMLAttribute* attrib = new (_document->_attributePool.Alloc() ) XMLAttribute();
TIXMLASSERT( attrib );
attrib->_memPool = &_document->_attributePool;
attrib->_memPool->SetTracked();
return attrib;
}
XMLElement* XMLElement::InsertNewChildElement(const char* name)
{
XMLElement* node = _document->NewElement(name);
return InsertEndChild(node) ? node : 0;
}
XMLComment* XMLElement::InsertNewComment(const char* comment)
{
XMLComment* node = _document->NewComment(comment);
return InsertEndChild(node) ? node : 0;
}
XMLText* XMLElement::InsertNewText(const char* text)
{
XMLText* node = _document->NewText(text);
return InsertEndChild(node) ? node : 0;
}
XMLDeclaration* XMLElement::InsertNewDeclaration(const char* text)
{
XMLDeclaration* node = _document->NewDeclaration(text);
return InsertEndChild(node) ? node : 0;
}
XMLUnknown* XMLElement::InsertNewUnknown(const char* text)
{
XMLUnknown* node = _document->NewUnknown(text);
return InsertEndChild(node) ? node : 0;
}
//
// <ele></ele>
// <ele>foo<b>bar</b></ele>
//
char* XMLElement::ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr )
{
// Read the element name.
p = XMLUtil::SkipWhiteSpace( p, curLineNumPtr );
// The closing element is the </element> form. It is
// parsed just like a regular element then deleted from
// the DOM.
if ( *p == '/' ) {
_closingType = CLOSING;
++p;
}
p = _value.ParseName( p );
if ( _value.Empty() ) {
return 0;
}
p = ParseAttributes( p, curLineNumPtr );
if ( !p || !*p || _closingType != OPEN ) {
return p;
}
p = XMLNode::ParseDeep( p, parentEndTag, curLineNumPtr );
return p;
}
XMLNode* XMLElement::ShallowClone( XMLDocument* doc ) const
{
if ( !doc ) {
doc = _document;
}
XMLElement* element = doc->NewElement( Value() ); // fixme: this will always allocate memory. Intern?
for( const XMLAttribute* a=FirstAttribute(); a; a=a->Next() ) {
element->SetAttribute( a->Name(), a->Value() ); // fixme: this will always allocate memory. Intern?
}
return element;
}
bool XMLElement::ShallowEqual( const XMLNode* compare ) const
{
TIXMLASSERT( compare );
const XMLElement* other = compare->ToElement();
if ( other && XMLUtil::StringEqual( other->Name(), Name() )) {
const XMLAttribute* a=FirstAttribute();
const XMLAttribute* b=other->FirstAttribute();
while ( a && b ) {
if ( !XMLUtil::StringEqual( a->Value(), b->Value() ) ) {
return false;
}
a = a->Next();
b = b->Next();
}
if ( a || b ) {
// different count
return false;
}
return true;
}
return false;
}
bool XMLElement::Accept( XMLVisitor* visitor ) const
{
TIXMLASSERT( visitor );
if ( visitor->VisitEnter( *this, _rootAttribute ) ) {
for ( const XMLNode* node=FirstChild(); node; node=node->NextSibling() ) {
if ( !node->Accept( visitor ) ) {
break;
}
}
}
return visitor->VisitExit( *this );
}
// --------- XMLDocument ----------- //
// Warning: List must match 'enum XMLError'
const char* XMLDocument::_errorNames[XML_ERROR_COUNT] = {
"XML_SUCCESS",
"XML_NO_ATTRIBUTE",
"XML_WRONG_ATTRIBUTE_TYPE",
"XML_ERROR_FILE_NOT_FOUND",
"XML_ERROR_FILE_COULD_NOT_BE_OPENED",
"XML_ERROR_FILE_READ_ERROR",
"XML_ERROR_PARSING_ELEMENT",
"XML_ERROR_PARSING_ATTRIBUTE",
"XML_ERROR_PARSING_TEXT",
"XML_ERROR_PARSING_CDATA",
"XML_ERROR_PARSING_COMMENT",
"XML_ERROR_PARSING_DECLARATION",
"XML_ERROR_PARSING_UNKNOWN",
"XML_ERROR_EMPTY_DOCUMENT",
"XML_ERROR_MISMATCHED_ELEMENT",
"XML_ERROR_PARSING",
"XML_CAN_NOT_CONVERT_TEXT",
"XML_NO_TEXT_NODE",
"XML_ELEMENT_DEPTH_EXCEEDED"
};
XMLDocument::XMLDocument( bool processEntities, Whitespace whitespaceMode ) :
XMLNode( 0 ),
_writeBOM( false ),
_processEntities( processEntities ),
_errorID(XML_SUCCESS),
_whitespaceMode( whitespaceMode ),
_errorStr(),
_errorLineNum( 0 ),
_charBuffer( 0 ),
_parseCurLineNum( 0 ),
_parsingDepth(0),
_unlinked(),
_elementPool(),
_attributePool(),
_textPool(),
_commentPool()
{
// avoid VC++ C4355 warning about 'this' in initializer list (C4355 is off by default in VS2012+)
_document = this;
}
XMLDocument::~XMLDocument()
{
Clear();
}
void XMLDocument::MarkInUse(const XMLNode* const node)
{
TIXMLASSERT(node);
TIXMLASSERT(node->_parent == 0);
for (int i = 0; i < _unlinked.Size(); ++i) {
if (node == _unlinked[i]) {
_unlinked.SwapRemove(i);
break;
}
}
}
void XMLDocument::Clear()
{
DeleteChildren();
while( _unlinked.Size()) {
DeleteNode(_unlinked[0]); // Will remove from _unlinked as part of delete.
}
#ifdef TINYXML2_DEBUG
const bool hadError = Error();
#endif
ClearError();
delete [] _charBuffer;
_charBuffer = 0;
_parsingDepth = 0;
#if 0
_textPool.Trace( "text" );
_elementPool.Trace( "element" );
_commentPool.Trace( "comment" );
_attributePool.Trace( "attribute" );
#endif
#ifdef TINYXML2_DEBUG
if ( !hadError ) {
TIXMLASSERT( _elementPool.CurrentAllocs() == _elementPool.Untracked() );
TIXMLASSERT( _attributePool.CurrentAllocs() == _attributePool.Untracked() );
TIXMLASSERT( _textPool.CurrentAllocs() == _textPool.Untracked() );
TIXMLASSERT( _commentPool.CurrentAllocs() == _commentPool.Untracked() );
}
#endif
}
void XMLDocument::DeepCopy(XMLDocument* target) const
{
TIXMLASSERT(target);
if (target == this) {
return; // technically success - a no-op.
}
target->Clear();
for (const XMLNode* node = this->FirstChild(); node; node = node->NextSibling()) {
target->InsertEndChild(node->DeepClone(target));
}
}
XMLElement* XMLDocument::NewElement( const char* name )
{
XMLElement* ele = CreateUnlinkedNode<XMLElement>( _elementPool );
ele->SetName( name );
return ele;
}
XMLComment* XMLDocument::NewComment( const char* str )
{
XMLComment* comment = CreateUnlinkedNode<XMLComment>( _commentPool );
comment->SetValue( str );
return comment;
}
XMLText* XMLDocument::NewText( const char* str )
{
XMLText* text = CreateUnlinkedNode<XMLText>( _textPool );
text->SetValue( str );
return text;
}
XMLDeclaration* XMLDocument::NewDeclaration( const char* str )
{
XMLDeclaration* dec = CreateUnlinkedNode<XMLDeclaration>( _commentPool );
dec->SetValue( str ? str : "xml version=\"1.0\" encoding=\"UTF-8\"" );
return dec;
}
XMLUnknown* XMLDocument::NewUnknown( const char* str )
{
XMLUnknown* unk = CreateUnlinkedNode<XMLUnknown>( _commentPool );
unk->SetValue( str );
return unk;
}
static FILE* callfopen( const char* filepath, const char* mode )
{
TIXMLASSERT( filepath );
TIXMLASSERT( mode );
#if defined(_MSC_VER) && (_MSC_VER >= 1400 ) && (!defined WINCE)
FILE* fp = 0;
const errno_t err = fopen_s( &fp, filepath, mode );
if ( err ) {
return 0;
}
#else
FILE* fp = fopen( filepath, mode );
#endif
return fp;
}
void XMLDocument::DeleteNode( XMLNode* node ) {
TIXMLASSERT( node );
TIXMLASSERT(node->_document == this );
if (node->_parent) {
node->_parent->DeleteChild( node );
}
else {
// Isn't in the tree.
// Use the parent delete.
// Also, we need to mark it tracked: we 'know'
// it was never used.
node->_memPool->SetTracked();
// Call the static XMLNode version:
XMLNode::DeleteNode(node);
}
}
XMLError XMLDocument::LoadFile( const char* filename )
{
if ( !filename ) {
TIXMLASSERT( false );
SetError( XML_ERROR_FILE_COULD_NOT_BE_OPENED, 0, "filename=<null>" );
return _errorID;
}
Clear();
FILE* fp = callfopen( filename, "rb" );
if ( !fp ) {
SetError( XML_ERROR_FILE_NOT_FOUND, 0, "filename=%s", filename );
return _errorID;
}
LoadFile( fp );
fclose( fp );
return _errorID;
}
XMLError XMLDocument::LoadFile( FILE* fp )
{
Clear();
TIXML_FSEEK( fp, 0, SEEK_SET );
if ( fgetc( fp ) == EOF && ferror( fp ) != 0 ) {
SetError( XML_ERROR_FILE_READ_ERROR, 0, 0 );
return _errorID;
}
TIXML_FSEEK( fp, 0, SEEK_END );
unsigned long long filelength;
{
const long long fileLengthSigned = TIXML_FTELL( fp );
TIXML_FSEEK( fp, 0, SEEK_SET );
if ( fileLengthSigned == -1L ) {
SetError( XML_ERROR_FILE_READ_ERROR, 0, 0 );
return _errorID;
}
TIXMLASSERT( fileLengthSigned >= 0 );
filelength = static_cast<unsigned long long>(fileLengthSigned);
}
const size_t maxSizeT = static_cast<size_t>(-1);
// We'll do the comparison as an unsigned long long, because that's guaranteed to be at
// least 8 bytes, even on a 32-bit platform.
if ( filelength >= static_cast<unsigned long long>(maxSizeT) ) {
// Cannot handle files which won't fit in buffer together with null terminator
SetError( XML_ERROR_FILE_READ_ERROR, 0, 0 );
return _errorID;
}
if ( filelength == 0 ) {
SetError( XML_ERROR_EMPTY_DOCUMENT, 0, 0 );
return _errorID;
}
const size_t size = static_cast<size_t>(filelength);
TIXMLASSERT( _charBuffer == 0 );
_charBuffer = new char[size+1];
const size_t read = fread( _charBuffer, 1, size, fp );
if ( read != size ) {
SetError( XML_ERROR_FILE_READ_ERROR, 0, 0 );
return _errorID;
}
_charBuffer[size] = 0;
Parse();
return _errorID;
}
XMLError XMLDocument::SaveFile( const char* filename, bool compact )
{
if ( !filename ) {
TIXMLASSERT( false );
SetError( XML_ERROR_FILE_COULD_NOT_BE_OPENED, 0, "filename=<null>" );
return _errorID;
}
FILE* fp = callfopen( filename, "w" );
if ( !fp ) {
SetError( XML_ERROR_FILE_COULD_NOT_BE_OPENED, 0, "filename=%s", filename );
return _errorID;
}
SaveFile(fp, compact);
fclose( fp );
return _errorID;
}
XMLError XMLDocument::SaveFile( FILE* fp, bool compact )
{
// Clear any error from the last save, otherwise it will get reported
// for *this* call.
ClearError();
XMLPrinter stream( fp, compact );
Print( &stream );
return _errorID;
}
XMLError XMLDocument::Parse( const char* xml, size_t nBytes )
{
Clear();
if ( nBytes == 0 || !xml || !*xml ) {
SetError( XML_ERROR_EMPTY_DOCUMENT, 0, 0 );
return _errorID;
}
if ( nBytes == static_cast<size_t>(-1) ) {
nBytes = strlen( xml );
}
TIXMLASSERT( _charBuffer == 0 );
_charBuffer = new char[ nBytes+1 ];
memcpy( _charBuffer, xml, nBytes );
_charBuffer[nBytes] = 0;
Parse();
if ( Error() ) {
// clean up now essentially dangling memory.
// and the parse fail can put objects in the
// pools that are dead and inaccessible.
DeleteChildren();
_elementPool.Clear();
_attributePool.Clear();
_textPool.Clear();
_commentPool.Clear();
}
return _errorID;
}
void XMLDocument::Print( XMLPrinter* streamer ) const
{
if ( streamer ) {
Accept( streamer );
}
else {
XMLPrinter stdoutStreamer( stdout );
Accept( &stdoutStreamer );
}
}
void XMLDocument::ClearError() {
_errorID = XML_SUCCESS;
_errorLineNum = 0;
_errorStr.Reset();
}
void XMLDocument::SetError( XMLError error, int lineNum, const char* format, ... )
{
TIXMLASSERT( error >= 0 && error < XML_ERROR_COUNT );
_errorID = error;
_errorLineNum = lineNum;
_errorStr.Reset();
const size_t BUFFER_SIZE = 1000;
char* buffer = new char[BUFFER_SIZE];
TIXMLASSERT(sizeof(error) <= sizeof(int));
TIXML_SNPRINTF(buffer, BUFFER_SIZE, "Error=%s ErrorID=%d (0x%x) Line number=%d", ErrorIDToName(error), int(error), int(error), lineNum);
if (format) {
size_t len = strlen(buffer);
TIXML_SNPRINTF(buffer + len, BUFFER_SIZE - len, ": ");
len = strlen(buffer);
va_list va;
va_start(va, format);
TIXML_VSNPRINTF(buffer + len, BUFFER_SIZE - len, format, va);
va_end(va);
}
_errorStr.SetStr(buffer);
delete[] buffer;
}
/*static*/ const char* XMLDocument::ErrorIDToName(XMLError errorID)
{
TIXMLASSERT( errorID >= 0 && errorID < XML_ERROR_COUNT );
const char* errorName = _errorNames[errorID];
TIXMLASSERT( errorName && errorName[0] );
return errorName;
}
const char* XMLDocument::ErrorStr() const
{
return _errorStr.Empty() ? "" : _errorStr.GetStr();
}
void XMLDocument::PrintError() const
{
printf("%s\n", ErrorStr());
}
const char* XMLDocument::ErrorName() const
{
return ErrorIDToName(_errorID);
}
void XMLDocument::Parse()
{
TIXMLASSERT( NoChildren() ); // Clear() must have been called previously
TIXMLASSERT( _charBuffer );
_parseCurLineNum = 1;
_parseLineNum = 1;
char* p = _charBuffer;
p = XMLUtil::SkipWhiteSpace( p, &_parseCurLineNum );
p = const_cast<char*>( XMLUtil::ReadBOM( p, &_writeBOM ) );
if ( !*p ) {
SetError( XML_ERROR_EMPTY_DOCUMENT, 0, 0 );
return;
}
ParseDeep(p, 0, &_parseCurLineNum );
}
void XMLDocument::PushDepth()
{
_parsingDepth++;
if (_parsingDepth == TINYXML2_MAX_ELEMENT_DEPTH) {
SetError(XML_ELEMENT_DEPTH_EXCEEDED, _parseCurLineNum, "Element nesting is too deep." );
}
}
void XMLDocument::PopDepth()
{
TIXMLASSERT(_parsingDepth > 0);
--_parsingDepth;
}
XMLPrinter::XMLPrinter( FILE* file, bool compact, int depth ) :
_elementJustOpened( false ),
_stack(),
_firstElement( true ),
_fp( file ),
_depth( depth ),
_textDepth( -1 ),
_processEntities( true ),
_compactMode( compact ),
_buffer()
{
for( int i=0; i<ENTITY_RANGE; ++i ) {
_entityFlag[i] = false;
_restrictedEntityFlag[i] = false;
}
for( int i=0; i<NUM_ENTITIES; ++i ) {
const char entityValue = entities[i].value;
const unsigned char flagIndex = static_cast<unsigned char>(entityValue);
TIXMLASSERT( flagIndex < ENTITY_RANGE );
_entityFlag[flagIndex] = true;
}
_restrictedEntityFlag[static_cast<unsigned char>('&')] = true;
_restrictedEntityFlag[static_cast<unsigned char>('<')] = true;
_restrictedEntityFlag[static_cast<unsigned char>('>')] = true; // not required, but consistency is nice
_buffer.Push( 0 );
}
void XMLPrinter::Print( const char* format, ... )
{
va_list va;
va_start( va, format );
if ( _fp ) {
vfprintf( _fp, format, va );
}
else {
const int len = TIXML_VSCPRINTF( format, va );
// Close out and re-start the va-args
va_end( va );
TIXMLASSERT( len >= 0 );
va_start( va, format );
TIXMLASSERT( _buffer.Size() > 0 && _buffer[_buffer.Size() - 1] == 0 );
char* p = _buffer.PushArr( len ) - 1; // back up over the null terminator.
TIXML_VSNPRINTF( p, len+1, format, va );
}
va_end( va );
}
void XMLPrinter::Write( const char* data, size_t size )
{
if ( _fp ) {
fwrite ( data , sizeof(char), size, _fp);
}
else {
char* p = _buffer.PushArr( static_cast<int>(size) ) - 1; // back up over the null terminator.
memcpy( p, data, size );
p[size] = 0;
}
}
void XMLPrinter::Putc( char ch )
{
if ( _fp ) {
fputc ( ch, _fp);
}
else {
char* p = _buffer.PushArr( sizeof(char) ) - 1; // back up over the null terminator.
p[0] = ch;
p[1] = 0;
}
}
void XMLPrinter::PrintSpace( int depth )
{
for( int i=0; i<depth; ++i ) {
Write( " " );
}
}
void XMLPrinter::PrintString( const char* p, bool restricted )
{
// Look for runs of bytes between entities to print.
const char* q = p;
if ( _processEntities ) {
const bool* flag = restricted ? _restrictedEntityFlag : _entityFlag;
while ( *q ) {
TIXMLASSERT( p <= q );
// Remember, char is sometimes signed. (How many times has that bitten me?)
if ( *q > 0 && *q < ENTITY_RANGE ) {
// Check for entities. If one is found, flush
// the stream up until the entity, write the
// entity, and keep looking.
if ( flag[static_cast<unsigned char>(*q)] ) {
while ( p < q ) {
const size_t delta = q - p;
const int toPrint = ( INT_MAX < delta ) ? INT_MAX : static_cast<int>(delta);
Write( p, toPrint );
p += toPrint;
}
bool entityPatternPrinted = false;
for( int i=0; i<NUM_ENTITIES; ++i ) {
if ( entities[i].value == *q ) {
Putc( '&' );
Write( entities[i].pattern, entities[i].length );
Putc( ';' );
entityPatternPrinted = true;
break;
}
}
if ( !entityPatternPrinted ) {
// TIXMLASSERT( entityPatternPrinted ) causes gcc -Wunused-but-set-variable in release
TIXMLASSERT( false );
}
++p;
}
}
++q;
TIXMLASSERT( p <= q );
}
// Flush the remaining string. This will be the entire
// string if an entity wasn't found.
if ( p < q ) {
const size_t delta = q - p;
const int toPrint = ( INT_MAX < delta ) ? INT_MAX : static_cast<int>(delta);
Write( p, toPrint );
}
}
else {
Write( p );
}
}
void XMLPrinter::PushHeader( bool writeBOM, bool writeDec )
{
if ( writeBOM ) {
static const unsigned char bom[] = { TIXML_UTF_LEAD_0, TIXML_UTF_LEAD_1, TIXML_UTF_LEAD_2, 0 };
Write( reinterpret_cast< const char* >( bom ) );
}
if ( writeDec ) {
PushDeclaration( "xml version=\"1.0\"" );
}
}
void XMLPrinter::PrepareForNewNode( bool compactMode )
{
SealElementIfJustOpened();
if ( compactMode ) {
return;
}
if ( _firstElement ) {
PrintSpace (_depth);
} else if ( _textDepth < 0) {
Putc( '\n' );
PrintSpace( _depth );
}
_firstElement = false;
}
void XMLPrinter::OpenElement( const char* name, bool compactMode )
{
PrepareForNewNode( compactMode );
_stack.Push( name );
Write ( "<" );
Write ( name );
_elementJustOpened = true;
++_depth;
}
void XMLPrinter::PushAttribute( const char* name, const char* value )
{
TIXMLASSERT( _elementJustOpened );
Putc ( ' ' );
Write( name );
Write( "=\"" );
PrintString( value, false );
Putc ( '\"' );
}
void XMLPrinter::PushAttribute( const char* name, int v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
PushAttribute( name, buf );
}
void XMLPrinter::PushAttribute( const char* name, unsigned v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
PushAttribute( name, buf );
}
void XMLPrinter::PushAttribute(const char* name, int64_t v)
{
char buf[BUF_SIZE];
XMLUtil::ToStr(v, buf, BUF_SIZE);
PushAttribute(name, buf);
}
void XMLPrinter::PushAttribute(const char* name, uint64_t v)
{
char buf[BUF_SIZE];
XMLUtil::ToStr(v, buf, BUF_SIZE);
PushAttribute(name, buf);
}
void XMLPrinter::PushAttribute( const char* name, bool v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
PushAttribute( name, buf );
}
void XMLPrinter::PushAttribute( const char* name, double v )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( v, buf, BUF_SIZE );
PushAttribute( name, buf );
}
void XMLPrinter::CloseElement( bool compactMode )
{
--_depth;
const char* name = _stack.Pop();
if ( _elementJustOpened ) {
Write( "/>" );
}
else {
if ( _textDepth < 0 && !compactMode) {
Putc( '\n' );
PrintSpace( _depth );
}
Write ( "</" );
Write ( name );
Write ( ">" );
}
if ( _textDepth == _depth ) {
_textDepth = -1;
}
if ( _depth == 0 && !compactMode) {
Putc( '\n' );
}
_elementJustOpened = false;
}
void XMLPrinter::SealElementIfJustOpened()
{
if ( !_elementJustOpened ) {
return;
}
_elementJustOpened = false;
Putc( '>' );
}
void XMLPrinter::PushText( const char* text, bool cdata )
{
_textDepth = _depth-1;
SealElementIfJustOpened();
if ( cdata ) {
Write( "<![CDATA[" );
Write( text );
Write( "]]>" );
}
else {
PrintString( text, true );
}
}
void XMLPrinter::PushText( int64_t value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( value, buf, BUF_SIZE );
PushText( buf, false );
}
void XMLPrinter::PushText( uint64_t value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr(value, buf, BUF_SIZE);
PushText(buf, false);
}
void XMLPrinter::PushText( int value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( value, buf, BUF_SIZE );
PushText( buf, false );
}
void XMLPrinter::PushText( unsigned value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( value, buf, BUF_SIZE );
PushText( buf, false );
}
void XMLPrinter::PushText( bool value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( value, buf, BUF_SIZE );
PushText( buf, false );
}
void XMLPrinter::PushText( float value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( value, buf, BUF_SIZE );
PushText( buf, false );
}
void XMLPrinter::PushText( double value )
{
char buf[BUF_SIZE];
XMLUtil::ToStr( value, buf, BUF_SIZE );
PushText( buf, false );
}
void XMLPrinter::PushComment( const char* comment )
{
PrepareForNewNode( _compactMode );
Write( "<!--" );
Write( comment );
Write( "-->" );
}
void XMLPrinter::PushDeclaration( const char* value )
{
PrepareForNewNode( _compactMode );
Write( "<?" );
Write( value );
Write( "?>" );
}
void XMLPrinter::PushUnknown( const char* value )
{
PrepareForNewNode( _compactMode );
Write( "<!" );
Write( value );
Putc( '>' );
}
bool XMLPrinter::VisitEnter( const XMLDocument& doc )
{
_processEntities = doc.ProcessEntities();
if ( doc.HasBOM() ) {
PushHeader( true, false );
}
return true;
}
bool XMLPrinter::VisitEnter( const XMLElement& element, const XMLAttribute* attribute )
{
const XMLElement* parentElem = 0;
if ( element.Parent() ) {
parentElem = element.Parent()->ToElement();
}
const bool compactMode = parentElem ? CompactMode( *parentElem ) : _compactMode;
OpenElement( element.Name(), compactMode );
while ( attribute ) {
PushAttribute( attribute->Name(), attribute->Value() );
attribute = attribute->Next();
}
return true;
}
bool XMLPrinter::VisitExit( const XMLElement& element )
{
CloseElement( CompactMode(element) );
return true;
}
bool XMLPrinter::Visit( const XMLText& text )
{
PushText( text.Value(), text.CData() );
return true;
}
bool XMLPrinter::Visit( const XMLComment& comment )
{
PushComment( comment.Value() );
return true;
}
bool XMLPrinter::Visit( const XMLDeclaration& declaration )
{
PushDeclaration( declaration.Value() );
return true;
}
bool XMLPrinter::Visit( const XMLUnknown& unknown )
{
PushUnknown( unknown.Value() );
return true;
}
} // namespace tinyxml2 | 75,034 | C++ | 24.06179 | 140 | 0.559013 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiData.cpp | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <pxr/base/plug/plugin.h>
#include <pxr/base/plug/registry.h>
#include <pxr/base/tf/token.h>
#include <pxr/usd/sdf/schema.h>
#include "mpcdiData.h"
#include "mpcdiDataProviderFactory.h"
#include "mpcdiPluginManager.h"
#include <iostream>
PXR_NAMESPACE_OPEN_SCOPE
static const SdfPath ROOT_PATH("/");
static const SdfPath DATA_ROOT_PATH("/Data");
TF_DEFINE_PUBLIC_TOKENS(
EdfDataParametersTokens,
// plugin metadata to specify an id for a specific data provider
(dataProviderId)
// plugin metadata describing the arguments for the provider to use
// to load the layer
(providerArgs)
);
EdfDataParameters EdfDataParameters::FromFileFormatArgs(const SdfFileFormat::FileFormatArguments& args)
{
EdfDataParameters parameters;
parameters.dataProviderId = *(TfMapLookupPtr(args, EdfDataParametersTokens->dataProviderId));
// unpack the file format argument representation of the provider arguments
std::string prefix = EdfDataParametersTokens->providerArgs.GetString() + ":";
size_t prefixLength = prefix.length();
for (SdfFileFormat::FileFormatArguments::const_iterator it = args.begin(); it != args.end(); it++)
{
size_t index = it->first.find(prefix);
if (index == 0)
{
// this is an unpacked prefixed provider argument
parameters.providerArgs[it->first.substr(prefixLength)] = it->second;
}
}
return parameters;
}
EdfSourceData::EdfSourceData(EdfData* data)
{
this->_data = data;
}
EdfSourceData::~EdfSourceData()
{
this->_data = nullptr;
}
void EdfSourceData::CreatePrim(const SdfPath& parentPath, const std::string& name, const SdfSpecifier& specifier,
const TfToken& typeName)
{
if (this->_data != nullptr)
{
this->_data->_CreatePrim(parentPath, name, specifier, typeName);
}
}
void EdfSourceData::CreateAttribute(const SdfPath& parentPrimPath, const std::string& name, const SdfValueTypeName& typeName,
const SdfVariability& variability, const VtValue& value)
{
if (this->_data != nullptr)
{
this->_data->_CreateAttribute(parentPrimPath, name, typeName, variability, value);
}
}
void EdfSourceData::SetField(const SdfPath& primPath, const TfToken& fieldName, const VtValue& value)
{
if (this->_data != nullptr)
{
this->_data->_SetFieldValue(primPath, fieldName, value);
}
}
bool EdfSourceData::HasField(const SdfPath& primPath, const TfToken& fieldName, VtValue* value)
{
if (this ->_data != nullptr)
{
return this->_data->Has(primPath, fieldName, value);
}
return false;
}
bool EdfSourceData::HasAttribute(const SdfPath& attributePath, VtValue* defaultValue)
{
if (this->_data != nullptr)
{
return this->_data->Has(attributePath, SdfFieldKeys->Default, defaultValue);
}
return false;
}
EdfData::EdfData(std::unique_ptr<IEdfDataProvider> dataProvider)
{
this->_dataProvider = std::move(dataProvider);
this->_sourceData = std::make_shared<EdfSourceData>(this);
}
EdfDataRefPtr EdfData::CreateFromParameters(const EdfDataParameters& parameters)
{
std::unique_ptr<IEdfDataProvider> dataProvider = MPCDIPluginManager::GetInstance().CreateDataProvider(parameters.dataProviderId, parameters);
if (dataProvider == nullptr)
{
// there was no provider responsible for this data or it didn't load properly,
// so the best we can do is provide an empty EdfData object with no backing provider
// this will load nothing except an empty default Root prim
return TfCreateRefPtr(new EdfData(nullptr));
}
return TfCreateRefPtr(new EdfData(std::move(dataProvider)));
}
void EdfData::CreateSpec(const SdfPath& path, SdfSpecType specType)
{
// not supported in this PoC
// the data provider can create new prim / property specs
// via the callbacks, but the external public API cannot
// but if it were, here's how it would be
// done concurrently
/*
this->_CreateSpec(path, specType);
*/
}
void EdfData::Erase(const SdfPath& path, const TfToken& fieldName)
{
// not supported in this PoC
// but if it were, here's how it would be
// done concurrently
/*
SpecData::accessor accessor;
if (_specData.find(accessor, path))
{
_SpecData& spec = accessor->second;
size_t fieldSize = spec.fields.size();
for (size_t i = 0; i < fieldSize; i++)
{
if (spec.fields[i].first == fieldName)
{
spec.fields.erase(spec.fields.begin() + i);
accessor.release();
return;
}
}
}
accessor.release();
*/
}
void EdfData::EraseSpec(const SdfPath& path)
{
// not supported in this PoC
// but it it were, here's how we'd do it
// with the concurrent hash
/*
SpecData::const_accessor accessor;
if (_specData.find(accessor, path))
{
_specData.erase(accessor);
}
accessor.release();
*/
}
VtValue EdfData::Get(const SdfPath& path, const TfToken& fieldName) const
{
VtValue val;
this->Has(path, fieldName, &val);
return val;
}
SdfSpecType EdfData::GetSpecType(const SdfPath& path) const
{
// in all cases we either have the spec data available
// because we created e.g. the root on Read
// or because the data provider created
// prims / properties when it performed its Read
// or we don't know
SpecData::const_accessor accessor;
if (_specData.find(accessor, path))
{
return accessor->second.specType;
}
accessor.release();
return SdfSpecType::SdfSpecTypeUnknown;
}
bool EdfData::Has(const SdfPath& path, const TfToken& fieldName, SdfAbstractDataValue* value) const
{
if (value != nullptr)
{
VtValue val;
if (this->Has(path, fieldName, &val))
{
return value->StoreValue(val);
}
}
else
{
VtValue val;
return this->Has(path, fieldName, &val);
}
return false;
}
bool EdfData::Has(const SdfPath& path, const TfToken& fieldName, VtValue* value) const
{
// in general, we can just get the value for whatever is being asked for
// from the hash (and know whether it was there or not)
// children are a special case, because those we want to ask the back-end
// provider to load - one tricky bit is understanding when we want the data provider
// to load and when we want to use the cached value
// as a general rule, if SdfChildrenKeys isn't present in the list of fields
// for the prim, but we have the prim, we need to ask the data provider to load
// for simplicity sake, this is a one-time load -> the data provider will use
// the callbacks to insert the children prims / attributes
// if we asked the data provider to load the children, and after that the field
// still isn't present, then we insert the field with an empty list since
// the provider never created any children (maybe the back-end query returned nothing)
std::cout << path.GetAsString() << " " << fieldName.GetString() << std::endl;
bool hasValue = this->_GetFieldValue(path, fieldName, value);
if (!hasValue && fieldName == SdfChildrenKeys->PrimChildren &&
this->_dataProvider != nullptr)
{
// give the data provider an opportunity to load their children
this->_dataProvider->ReadChildren(path.GetAsString(), this->_sourceData);
// after the read call, we check again to see if it's present
hasValue = this->_GetFieldValue(path, fieldName, value);
if (!hasValue)
{
// if it still doesn't exist, we assume that there were no children
// and we cache that fact now
TfTokenVector primChildren;
VtValue primChildrenValue(primChildren);
this->_SetFieldValue(path, SdfChildrenKeys->PrimChildren, primChildrenValue);
if(value != nullptr)
{
*value = primChildrenValue;
}
hasValue = true;
}
}
return hasValue;
}
bool EdfData::HasSpec(const SdfPath& path) const
{
return this->GetSpecType(path) != SdfSpecType::SdfSpecTypeUnknown;
}
bool EdfData::IsEmpty() const
{
return false;
}
std::vector<TfToken> EdfData::List(const SdfPath& path) const
{
TfTokenVector names;
SpecData::const_accessor accessor;
if (_specData.find(accessor, path))
{
size_t numFields = accessor->second.fields.size();
names.resize(numFields);
for (size_t i = 0; i < numFields; i++)
{
names[i] = accessor->second.fields[i].first;
}
}
accessor.release();
return names;
}
void EdfData::MoveSpec(const SdfPath& oldPath, const SdfPath& newPath)
{
// not supported in this PoC
// but it it were, here's how we'd do it
// with the concurrent hash
/*
SpecData::accessor accessor;
if (_specData.find(accessor, path))
{
SpecData::accessor writeAccessor;
_specData.insert(writeAccessor, newPath);
writeAccessor->second = accessor->second;
writeAccessor.release();
_specData.erase(accessor);
}
accessor.release();
*/
}
void EdfData::Set(const SdfPath& path, const TfToken& fieldName, const VtValue& value)
{
// not supported in this PoC
// but it it were, here's how we'd do it
// with the concurrent hash
/*
this->_SetFieldValue(path, fieldName, value);
*/
}
void EdfData::Set(const SdfPath& path, const TfToken& fieldName, const SdfAbstractDataConstValue& value)
{
// not supported in this PoC
// but it it were, here's how we'd do it
// with the concurrent hash
/*
VtValue wrappedValue;
value.GetValue(&wrappedValue);
this->_SetFieldValue(path, fieldName, wrappedValue);
*/
}
bool EdfData::StreamsData() const
{
// by default, we assume the backing provider will stream data
// but it will tell us whether it has cached that data or not later
return true;
}
bool EdfData::IsDetached() const
{
if (this->_dataProvider != nullptr)
{
return this->_dataProvider->IsDataCached();
}
else
{
return SdfAbstractData::IsDetached();
}
}
std::set<double> EdfData::ListAllTimeSamples() const
{
// not supported in this POC
return std::set<double>();
}
std::set<double> EdfData::ListTimeSamplesForPath(const SdfPath& path) const
{
// not supported in this POC
return std::set<double>();
}
bool EdfData::GetBracketingTimeSamples(double time, double* tLower, double* tUpper) const
{
// not supported in this POC
return false;
}
size_t EdfData::GetNumTimeSamplesForPath(const SdfPath& path) const
{
// not supported in this POC
return 0;
}
bool EdfData::GetBracketingTimeSamplesForPath(const SdfPath& path, double time, double* tLower, double* tUpper) const
{
// not supported in this POC
return false;
}
bool EdfData::QueryTimeSample(const SdfPath& path, double time, VtValue* optionalValue) const
{
// not supported in this POC
return false;
}
bool EdfData::QueryTimeSample(const SdfPath& path, double time, SdfAbstractDataValue* optionalValue) const
{
// not supported in this POC
return false;
}
void EdfData::SetTimeSample(const SdfPath& path, double time, const VtValue& value)
{
// not supported in this POC
}
void EdfData::EraseTimeSample(const SdfPath& path, double time)
{
// not supported in this POC
}
void EdfData::_VisitSpecs(SdfAbstractDataSpecVisitor* visitor) const
{
// not supported in this POC
}
bool EdfData::Read()
{
// on first read, create the specs for the absolute root path and
// for the /Data path where the provider will root their data
SpecData::accessor accessor;
_specData.insert(accessor, SdfPath::AbsoluteRootPath());
accessor->second.specType = SdfSpecType::SdfSpecTypePseudoRoot;
accessor.release();
// insert known field names for the root path
// this includes at minimum:
// SdfFieldKeys->DefaultPrim
// SdfChildrenKeys->PrimChildren
TfTokenVector rootChildren({DATA_ROOT_PATH.GetNameToken()});
VtValue primChildrenValue(rootChildren);
VtValue defaultPrimValue(DATA_ROOT_PATH.GetNameToken());
this->_SetFieldValue(ROOT_PATH, SdfChildrenKeys->PrimChildren, primChildrenValue);
this->_SetFieldValue(ROOT_PATH, SdfFieldKeys->DefaultPrim, defaultPrimValue);
// insert the data root path
_specData.insert(accessor, DATA_ROOT_PATH);
accessor->second.specType = SdfSpecType::SdfSpecTypePrim;
accessor.release();
// insert known field names for the data root path
// this includes at minimum:
// SdfFieldKeys->Specifier
// SdfFieldKeys->TypeName
// SdfFieldKeys->PrimChildren
// SdfFieldKeys->PropertyChildren
// prim children is loaded on demand during both deferred
// and non-deferred reads, so we don't set it here
TfTokenVector dataRootPropertyChildren;
VtValue specifierValue(SdfSpecifier::SdfSpecifierDef);
VtValue typeNameValue;
VtValue dataRootPropertyChildrenValue(dataRootPropertyChildren);
this->_SetFieldValue(DATA_ROOT_PATH, SdfFieldKeys->Specifier, specifierValue);
this->_SetFieldValue(DATA_ROOT_PATH, SdfFieldKeys->TypeName, typeNameValue);
this->_SetFieldValue(DATA_ROOT_PATH, SdfChildrenKeys->PropertyChildren, dataRootPropertyChildrenValue);
// if we have a valid provider, ask it to read it's data based on what parameters
// it was initialized with, otherwise just return true because we only have an empty
// root in the default implementation
bool readResult = true;
if (this->_dataProvider != nullptr)
{
readResult = this->_dataProvider->Read(this->_sourceData);
}
return readResult;
}
void EdfData::_CreatePrim(const SdfPath& parentPath, const std::string& name,
const SdfSpecifier& specifier, const TfToken& typeName)
{
SdfPath primPath = SdfPath(parentPath.GetAsString() + "/" + name);
this->_CreateSpec(primPath, SdfSpecType::SdfSpecTypePrim);
this->_SetFieldValue(primPath, SdfFieldKeys->TypeName, VtValue(typeName));
this->_SetFieldValue(primPath, SdfFieldKeys->Specifier, VtValue(specifier));
// add this prim to the PrimChildren property of parentPath
VtValue existingPrimChildrenValue;
if (this->_GetFieldValue(parentPath, SdfChildrenKeys->PrimChildren, &existingPrimChildrenValue))
{
// there are already children present, so append to the list
TfTokenVector existingChildren = existingPrimChildrenValue.UncheckedGet<TfTokenVector>();
existingChildren.push_back(TfToken(name));
// set the value back
this->_SetFieldValue(parentPath, SdfChildrenKeys->PrimChildren, VtValue(existingChildren));
}
else
{
// no children present yet
TfTokenVector children;
children.push_back(TfToken(name));
this->_SetFieldValue(parentPath, SdfChildrenKeys->PrimChildren, VtValue(children));
}
}
void EdfData::_CreateAttribute(const SdfPath& primPath, const std::string& name,
const SdfValueTypeName& typeName, const SdfVariability& variability, const VtValue& value)
{
// creating an attribute means setting the attribute path
// which is a combination of the prim path and the attribute name
// the type name field key of the attribute
// the variability field key of the attribute
// and a default field key holding its value
SdfPath attributePath = SdfPath(primPath.GetAsString() + "." + name);
this->_CreateSpec(attributePath, SdfSpecType::SdfSpecTypeAttribute);
this->_SetFieldValue(attributePath, SdfFieldKeys->TypeName, VtValue(typeName));
this->_SetFieldValue(attributePath, SdfFieldKeys->Variability, VtValue(variability));
this->_SetFieldValue(attributePath, SdfFieldKeys->Default, value);
// add this attribute to PropertyChildren of primPath
VtValue existingPropertyChildrenValue;
if (this->_GetFieldValue(primPath, SdfChildrenKeys->PropertyChildren, &existingPropertyChildrenValue))
{
// there are already children present, so append to the list
TfTokenVector existingChildren = existingPropertyChildrenValue.UncheckedGet<TfTokenVector>();
existingChildren.push_back(TfToken(name));
// set the value back
this->_SetFieldValue(primPath, SdfChildrenKeys->PropertyChildren, VtValue(existingChildren));
}
else
{
// no children present yet
TfTokenVector children;
children.push_back(TfToken(name));
this->_SetFieldValue(primPath, SdfChildrenKeys->PropertyChildren, VtValue(children));
}
}
void EdfData::_CreateSpec(const SdfPath& path, const SdfSpecType& specType)
{
SpecData::accessor accessor;
if (_specData.find(accessor, path))
{
accessor->second.specType = specType;
}
else
{
_specData.insert(accessor, path);
accessor->second.specType = specType;
}
accessor.release();
}
bool EdfData::_GetSpecTypeAndFieldValue(const SdfPath& path,
const TfToken& fieldName, SdfSpecType* specType, VtValue* value) const
{
// specType and value can be nullptrs here - this just means
// we want to know if we have the field at all for a possible
// subsequent call in the future
if (specType != nullptr)
{
*specType = SdfSpecTypeUnknown;
}
SpecData::const_accessor accessor;
if (_specData.find(accessor, path))
{
const _SpecData &spec = accessor->second;
if (specType != nullptr)
{
*specType = spec.specType;
}
for (auto const& f: spec.fields)
{
if (f.first == fieldName)
{
// copy so that we don't give
// back a direct pointer to a released
// accessor
if (value != nullptr)
{
*value = value;
}
accessor.release();
return true;
}
}
}
accessor.release();
return false;
}
bool EdfData::_GetFieldValue(const SdfPath& path,
const TfToken& fieldName, VtValue* value) const
{
// value can be a nullptr here - this just means
// we want to know if we have the field at all for a
// possible subsequent call in the future
SpecData::const_accessor accessor;
if (_specData.find(accessor, path))
{
const _SpecData &spec = accessor->second;
for (auto const& f: spec.fields)
{
if (f.first == fieldName)
{
// copy so that we don't give
// back a direct pointer to a released
// accessor
if (value != nullptr)
{
*value = f.second;
}
accessor.release();
return true;
}
}
}
accessor.release();
return false;
}
void EdfData::_SetFieldValue(const SdfPath& path, const TfToken& fieldName, const VtValue& value)
{
// NOTE: if we ever wanted to add support for querying whether
// the backend data provider could support writes, we should
// query that here and ask them to write to their backing data store
SpecData::accessor accessor;
if (_specData.find(accessor, path))
{
_SpecData& spec = accessor->second;
for (auto &f: spec.fields)
{
if (f.first == fieldName)
{
f.second = value;
accessor.release();
return;
}
}
// if we get here, we didn't have the field yet so create it
spec.fields.emplace_back(std::piecewise_construct,
std::forward_as_tuple(fieldName),
std::forward_as_tuple());
spec.fields.back().second = value;
accessor.release();
return;
}
accessor.release();
}
void EdfData::_SetFieldValue(const SdfPath& path, const TfToken& fieldName, const VtValue& value) const
{
// NOTE: if we ever wanted to add support for querying whether
// the backend data provider could support writes, we should
// query that here and ask them to write to their backing data store
SpecData::accessor accessor;
if (_specData.find(accessor, path))
{
_SpecData& spec = accessor->second;
for (auto& f : spec.fields)
{
if (f.first == fieldName)
{
f.second = value;
accessor.release();
return;
}
}
// if we get here, we didn't have the field yet so create it
spec.fields.emplace_back(std::piecewise_construct,
std::forward_as_tuple(fieldName),
std::forward_as_tuple());
spec.fields.back().second = value;
accessor.release();
return;
}
accessor.release();
}
PXR_NAMESPACE_CLOSE_SCOPE
| 20,789 | C++ | 28.240506 | 142 | 0.68416 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiFileFormat.cpp | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mpcdiFileFormat.h"
#include "tinyxml2.h"
#include <pxr/pxr.h>
#include <pxr/base/tf/diagnostic.h>
#include <pxr/base/tf/stringUtils.h>
#include <pxr/base/tf/token.h>
#include <pxr/usd/usd/prim.h>
#include <pxr/usd/usd/stage.h>
#include <pxr/usd/usd/usdaFileFormat.h>
#include <pxr/usd/usdGeom/mesh.h>
#include <pxr/usd/usdGeom/scope.h>
#include <pxr/usd/usdGeom/camera.h>
#include <pxr/usd/usdGeom/cube.h>
#include <pxr/usd/usdGeom/xformable.h>
#include <pxr/usd/usdGeom/xform.h>
#include <pxr/usd/usdLux/rectLight.h>
#include <pxr/base/gf/matrix3f.h>
#include <pxr/base/gf/vec3f.h>
#include <fstream>
#include <cmath>
PXR_NAMESPACE_OPEN_SCOPE
MpcdiFileFormat::MpcdiFileFormat() : SdfFileFormat(
MpcdiFileFormatTokens->Id,
MpcdiFileFormatTokens->Version,
MpcdiFileFormatTokens->Target,
MpcdiFileFormatTokens->Extension)
{
}
MpcdiFileFormat::~MpcdiFileFormat()
{
}
static const double defaultSideLengthValue = 1.0;
static double _ExtractSideLengthFromContext(const PcpDynamicFileFormatContext& context)
{
// Default sideLength.
double sideLength = defaultSideLengthValue;
VtValue value;
if (!context.ComposeValue(MpcdiFileFormatTokens->SideLength,
&value) ||
value.IsEmpty()) {
return sideLength;
}
if (!value.IsHolding<double>()) {
return sideLength;
}
return value.UncheckedGet<double>();
}
static double
_ExtractSideLengthFromArgs(const SdfFileFormat::FileFormatArguments& args)
{
// Default sideLength.
double sideLength = defaultSideLengthValue;
// Find "sideLength" file format argument.
auto it = args.find(MpcdiFileFormatTokens->SideLength);
if (it == args.end()) {
return sideLength;
}
// Try to convert the string value to the actual output value type.
double extractVal;
bool success = true;
extractVal = TfUnstringify<double>(it->second, &success);
if (!success) {
return sideLength;
}
sideLength = extractVal;
return sideLength;
}
bool MpcdiFileFormat::CanRead(const std::string& filePath) const
{
return true;
}
static float GetXMLFloat(tinyxml2::XMLElement* node, const std::string key)
{
return std::stof(node->FirstChildElement(key.c_str())->GetText());
}
static std::string CleanNameForUSD(const std::string& name)
{
std::string cleanedName = name;
if(cleanedName.size() == 0)
{
return "Default";
}
if(cleanedName.size() == 1 && !TfIsValidIdentifier(cleanedName))
{
// If we have an index as a name, we only need to add _ beforehand.
return CleanNameForUSD("_" + cleanedName);
}
return TfMakeValidIdentifier(cleanedName);
}
bool MpcdiFileFormat::Read(SdfLayer* layer, const std::string& resolvedPath, bool metadataOnly) const
{
// these macros emit methods defined in the Pixar namespace
// but not properly scoped, so we have to use the namespace
// locally here - note this isn't strictly true since we had to open
// the namespace scope anyway because the macros won't allow non-Pixar namespaces
// to be used because of some auto-generated content
// Read file, file exists?
const std::ifstream filePath(resolvedPath);
if(!filePath.good())
{
TF_CODING_ERROR("File doesn't exist with resolved path: " + resolvedPath);
return false;
}
// Read XML file
tinyxml2::XMLDocument doc;
const tinyxml2::XMLError xmlReadSuccess = doc.LoadFile(resolvedPath.c_str());
if(xmlReadSuccess != 0)
{
TF_CODING_ERROR("Failed to load xml file: " + resolvedPath);
return false;
}
// Parsing of MPCDI data
tinyxml2::XMLElement* rootNode = doc.RootElement();
if(rootNode == nullptr)
{
TF_CODING_ERROR("XML Root node is null: " + resolvedPath);
return false;
}
// Create a new anonymous layer and wrap a stage around it.
SdfLayerRefPtr newLayer = SdfLayer::CreateAnonymous(".usd");
UsdStageRefPtr stage = UsdStage::Open(newLayer);
const auto& xformPath = SdfPath("/mpcdi_payload");
auto mpdiScope = UsdGeomXform::Define(stage, xformPath);
stage->SetDefaultPrim(mpdiScope.GetPrim());
auto displayNode = rootNode->FirstChildElement("display");
for(auto* buffer = displayNode->FirstChildElement("buffer"); buffer != nullptr; buffer = buffer->NextSiblingElement("buffer"))
{
const std::string bufferId = std::string(buffer->Attribute("id"));
std::string bufferIdentifier = CleanNameForUSD(bufferId);
SdfPath bufferPath = xformPath.AppendChild(TfToken(bufferIdentifier));
auto bufferScope = UsdGeomScope::Define(stage, bufferPath);
// Get region
for(auto* regionNode = buffer->FirstChildElement("region"); regionNode != nullptr; regionNode = regionNode->NextSiblingElement("region"))
{
const std::string regionId = std::string(regionNode->Attribute("id"));
const std::string cleanedRegionId = CleanNameForUSD(regionId);
SdfPath regionPath = bufferPath.AppendChild(TfToken(cleanedRegionId));
// Get Frustum
auto frustumNode = regionNode->FirstChildElement("frustum");
const auto frustumYaw = GetXMLFloat(frustumNode, "yaw") * -1.0f;
const auto frustumPitch = GetXMLFloat(frustumNode, "pitch");
const auto frustumRoll = GetXMLFloat(frustumNode, "roll");
const auto frustumRightAngle = GetXMLFloat(frustumNode, "rightAngle");
const auto frustumLeftAngle = GetXMLFloat(frustumNode, "leftAngle");
const auto frustumUpAngle = GetXMLFloat(frustumNode, "upAngle");
const auto frustumDownAngle = GetXMLFloat(frustumNode, "downAngle");
constexpr const float toRad = 3.14159265358979323846 / 180.0;
constexpr const float focalLength = 10.0f;
constexpr const float focusDistance = 2000.0f;
const float tanRight = std::tan(frustumRightAngle * toRad);
const float tanLeft = std::tan(frustumLeftAngle * toRad);
const float tanUp = std::tan(frustumUpAngle * toRad);
const float tanDown = std::tan(frustumDownAngle * toRad);
const float apertureH = (std::abs(tanRight) + std::abs(tanLeft)) * focalLength;
const float apertureV = (std::abs(tanUp) + std::abs(tanDown)) * focalLength;
const float lightWidth = std::abs(tanRight) + std::abs(tanLeft);
const float lightHeight = std::abs(tanUp) + std::abs(tanDown);
const float lensShiftH = (tanLeft + tanRight) / (tanLeft - tanRight);
const float lensShiftV = (tanUp + tanDown) / (tanUp - tanDown);
const float apertureOffsetH = lensShiftH * apertureH / 2.0;
const float apertureOffsetV = lensShiftV * apertureV / 2.0;
// Coordinate frame
const float posScaling = 10.0f;
auto coordFrameNode = regionNode->FirstChildElement("coordinateFrame");
const auto posX = GetXMLFloat(coordFrameNode, "posx") * posScaling;
const auto posY = GetXMLFloat(coordFrameNode, "posy") * posScaling;
const auto posZ = GetXMLFloat(coordFrameNode, "posz") * posScaling;
const auto yawX = GetXMLFloat(coordFrameNode, "yawx");
const auto yawY = GetXMLFloat(coordFrameNode, "yawy");
const auto yawZ = GetXMLFloat(coordFrameNode, "yawz");
const auto pitchX = GetXMLFloat(coordFrameNode, "pitchx");
const auto pitchY = GetXMLFloat(coordFrameNode, "pitchy");
const auto pitchZ = GetXMLFloat(coordFrameNode, "pitchz");
const auto rollX = GetXMLFloat(coordFrameNode, "rollx");
const auto rollY = GetXMLFloat(coordFrameNode, "rolly");
const auto rollZ = GetXMLFloat(coordFrameNode, "rollz");
GfMatrix3f sourceToStandard = GfMatrix3f(pitchX, pitchY, pitchZ, yawX, yawY, yawZ, rollX, rollY, rollZ);
auto newPosition = sourceToStandard * GfVec3f(posX, posY, posZ);
newPosition[1] = -newPosition[1];
newPosition[2] = -newPosition[2];
// Camera
UsdGeomCamera camera = UsdGeomCamera::Define(stage, regionPath);
// Camera transform
auto cameraXform = UsdGeomXformable(camera);
auto translateOperation = cameraXform.AddTranslateOp(UsdGeomXformOp::PrecisionFloat);
translateOperation.Set<GfVec3f>(newPosition * 10.0);
cameraXform.AddRotateYOp().Set(frustumYaw);
cameraXform.AddRotateXOp().Set(frustumPitch);
cameraXform.AddRotateZOp().Set(frustumRoll);
// Set camera attributes
camera.GetFocalLengthAttr().Set(focalLength);
camera.GetFocusDistanceAttr().Set(focusDistance);
camera.GetHorizontalApertureAttr().Set(apertureH);
camera.GetHorizontalApertureOffsetAttr().Set(apertureOffsetH);
camera.GetVerticalApertureAttr().Set(apertureV);
camera.GetVerticalApertureOffsetAttr().Set(apertureOffsetV);
// Light
SdfPath lightPath = regionPath.AppendChild(TfToken("RectLight"));
auto rectLight = UsdLuxRectLight::Define(stage, lightPath);
auto lightXform = UsdGeomXformable(rectLight);
auto lightTranslateOperation = lightXform.AddTranslateOp(UsdGeomXformOp::PrecisionFloat);
rectLight.GetPrim().CreateAttribute(
TfToken("isProjector"),
SdfValueTypeNames->Bool
).Set(true);
rectLight.GetPrim().CreateAttribute(
TfToken("exposure"),
SdfValueTypeNames->Float
).Set(5.0f);
rectLight.GetPrim().CreateAttribute(
TfToken("intensity"),
SdfValueTypeNames->Float
).Set(15000.0f);
rectLight.GetWidthAttr().Set(lightWidth);
rectLight.GetHeightAttr().Set(lightHeight);
// Projector box
SdfPath cubePath = regionPath.AppendChild(TfToken("ProjectorBox"));
UsdGeomCube projectorBoxMesh = UsdGeomCube::Define(stage, cubePath);
const auto projectorBoxSize = GfVec3f(50, 15, 40);
const auto projectorBoxOffset = GfVec3f(0, 0, 42);
auto projectorBoxXform = UsdGeomXformable(projectorBoxMesh);
projectorBoxXform.AddTranslateOp(UsdGeomXformOp::PrecisionFloat).Set<GfVec3f>(projectorBoxOffset);
projectorBoxXform.AddScaleOp(UsdGeomXformOp::PrecisionFloat).Set<GfVec3f>(projectorBoxSize);
}
}
// Copy contents into output layer.
layer->TransferContent(newLayer);
return true;
}
bool MpcdiFileFormat::WriteToString(const SdfLayer& layer, std::string* str, const std::string& comment) const
{
// this POC doesn't support writing
return false;
}
bool MpcdiFileFormat::WriteToStream(const SdfSpecHandle& spec, std::ostream& out, size_t indent) const
{
// this POC doesn't support writing
return false;
}
/*
void MpcdiFileFormat::ComposeFieldsForFileFormatArguments(const std::string& assetPath, const PcpDynamicFileFormatContext& context, FileFormatArguments* args, VtValue* contextDependencyData) const
{
// Default sideLength.
double sideLength = 1.0;
VtValue value;
if (!context.ComposeValue(MpcdiFileFormatTokens->SideLength,
&value) ||
value.IsEmpty()) {
}
if (!value.IsHolding<double>()) {
// error;
}
double length;
(*args)[MpcdiFileFormatTokens->SideLength] = TfStringify(sideLength);
}
bool MpcdiFileFormat::CanFieldChangeAffectFileFormatArguments(const TfToken& field, const VtValue& oldValue, const VtValue& newValue, const VtValue& contextDependencyData) const
{
// Check if the "sideLength" argument changed.
double oldLength = oldValue.IsHolding<double>()
? oldValue.UncheckedGet<double>()
: 1.0;
double newLength = newValue.IsHolding<double>()
? newValue.UncheckedGet<double>()
: 1.0;
return oldLength != newLength;
}
*/
// these macros emit methods defined in the Pixar namespace
// but not properly scoped, so we have to use the namespace
// locally here
TF_DEFINE_PUBLIC_TOKENS(
MpcdiFileFormatTokens,
((Id, "mpcdiFileFormat"))
((Version, "1.0"))
((Target, "usd"))
((Extension, "xml"))
((SideLength, "Usd_Triangle_SideLength"))
);
TF_REGISTRY_FUNCTION(TfType)
{
SDF_DEFINE_FILE_FORMAT(MpcdiFileFormat, SdfFileFormat);
}
PXR_NAMESPACE_CLOSE_SCOPE | 12,291 | C++ | 32.493188 | 196 | 0.717842 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiFileFormat.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_MPCDI_MPCDIFILEFORMAT_H_
#define OMNI_MPCDI_MPCDIFILEFORMAT_H_
#define NOMINMAX
#include <pxr/base/tf/staticTokens.h>
#include <pxr/pxr.h>
#include <pxr/base/tf/token.h>
#include <pxr/usd/sdf/fileFormat.h>
#include <pxr/usd/sdf/layer.h>
#include <pxr/usd/pcp/dynamicFileFormatInterface.h>
#include <pxr/usd/pcp/dynamicFileFormatContext.h>
#include "api.h"
PXR_NAMESPACE_OPEN_SCOPE
/// \class EdfFileFormat
///
/// Represents a generic dynamic file format for external data.
/// Actual acquisition of the external data is done via a set
/// of plug-ins to various back-end external data systems.
///
class MPCDI_API MpcdiFileFormat : public SdfFileFormat
{
public:
// SdfFileFormat overrides
bool CanRead(const std::string& filePath) const override;
bool Read(SdfLayer* layer, const std::string& resolvedPath, bool metadataOnly) const override;
bool WriteToString(const SdfLayer& layer, std::string* str, const std::string& comment = std::string()) const override;
bool WriteToStream(const SdfSpecHandle& spec, std::ostream& out, size_t indent) const override;
// PcpDynamicFileFormatInterface overrides
//void ComposeFieldsForFileFormatArguments(const std::string& assetPath, const PcpDynamicFileFormatContext& context, FileFormatArguments* args, VtValue* contextDependencyData) const override;
//bool CanFieldChangeAffectFileFormatArguments(const TfToken& field, const VtValue& oldValue, const VtValue& newValue, const VtValue& contextDependencyData) const override;
protected:
SDF_FILE_FORMAT_FACTORY_ACCESS;
virtual ~MpcdiFileFormat();
MpcdiFileFormat();
};
TF_DECLARE_PUBLIC_TOKENS(
MpcdiFileFormatTokens,
((Id, "mpcdiFileFormat"))
((Version, "1.0"))
((Target, "usd"))
((Extension, "xml"))
((SideLength, "Usd_Triangle_SideLength"))
);
TF_DECLARE_WEAK_AND_REF_PTRS(MpcdiFileFormat);
PXR_NAMESPACE_CLOSE_SCOPE
#endif | 2,459 | C | 32.69863 | 192 | 0.764538 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiPluginManager.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_MPCDI_MPCDIPLUGINMANAGER_H_
#define OMNI_MPCDI_MPCDIPLUGINMANAGER_H_
#include <string>
#include <unordered_map>
#include <pxr/pxr.h>
#include <pxr/base/tf/singleton.h>
#include <pxr/base/tf/type.h>
#include <pxr/base/plug/plugin.h>
#include "iMpcdiDataProvider.h"
PXR_NAMESPACE_OPEN_SCOPE
struct _DataProviderInfo
{
public:
PlugPluginPtr plugin;
TfType dataProviderType;
};
/// \class EdfPluginmanager
///
/// Singleton object responsible for managing the different data provider
/// plugins registered for use by the EDF file format provider.
///
class MPCDIPluginManager
{
public:
static MPCDIPluginManager& GetInstance()
{
return TfSingleton<MPCDIPluginManager>::GetInstance();
}
// prevent copying and assignment
MPCDIPluginManager(const MPCDIPluginManager&) = delete;
MPCDIPluginManager& operator=(const MPCDIPluginManager&) = delete;
std::unique_ptr<IEdfDataProvider> CreateDataProvider(const std::string& dataProviderId, const EdfDataParameters& parameters);
private:
MPCDIPluginManager();
~MPCDIPluginManager();
void _GetDataProviders();
friend class TfSingleton<MPCDIPluginManager>;
private:
bool _pluginsLoaded;
std::unordered_map<std::string, _DataProviderInfo> _dataProviderPlugins;
};
PXR_NAMESPACE_CLOSE_SCOPE
#endif | 1,869 | C | 24.616438 | 126 | 0.76886 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiPluginManager.cpp | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <pxr/base/tf/instantiateSingleton.h>
#include <pxr/base/plug/registry.h>
#include <pxr/base/js/value.h>
#include <pxr/base/js/utils.h>
#include "mpcdiPluginManager.h"
#include "mpcdiDataProviderFactory.h"
PXR_NAMESPACE_OPEN_SCOPE
TF_INSTANTIATE_SINGLETON(MPCDIPluginManager);
TF_DEFINE_PRIVATE_TOKENS(
MPCDIDataProviderPlugInTokens,
// metadata describing a unique id for the data provider plugin
(dataProviderId)
);
MPCDIPluginManager::MPCDIPluginManager()
{
this->_pluginsLoaded = false;
}
MPCDIPluginManager::~MPCDIPluginManager()
{
}
std::unique_ptr<IEdfDataProvider> MPCDIPluginManager::CreateDataProvider(const std::string& dataProviderId, const EdfDataParameters& parameters)
{
// load the plugins if not already loaded
this->_GetDataProviders();
// attempt to find the plugin responsible for the data provider id
const std::unordered_map<std::string, _DataProviderInfo>::iterator it = this->_dataProviderPlugins.find(dataProviderId);
if (it == this->_dataProviderPlugins.end())
{
TF_CODING_ERROR("Failed to find plugin for %s", dataProviderId.c_str());
return nullptr;
}
// load the corresponding plugin if not already loaded
if (!it->second.plugin->Load())
{
TF_CODING_ERROR("Failed to load plugin %s for %s", it->second.plugin->GetName().c_str(), it->second.dataProviderType.GetTypeName().c_str());
return nullptr;
}
std::unique_ptr<IEdfDataProvider> dataProvider;
EdfDataProviderFactoryBase* factory = it->second.dataProviderType.GetFactory<EdfDataProviderFactoryBase>();
if (factory != nullptr)
{
dataProvider.reset(factory->New(parameters));
}
if (dataProvider == nullptr)
{
TF_CODING_ERROR("Failed to create data provider %s from plugin %s", it->second.dataProviderType.GetTypeName().c_str(), it->second.plugin->GetName().c_str());
}
return dataProvider;
}
void MPCDIPluginManager::_GetDataProviders()
{
// this uses the standard Pixar plug-in mechansim to load and discover
// plug-ins of a certain type
if (!this->_pluginsLoaded)
{
std::set<TfType> dataProviderTypes;
PlugRegistry::GetAllDerivedTypes(TfType::Find<IEdfDataProvider>(), &dataProviderTypes);
for (const TfType dataProviderType : dataProviderTypes)
{
// get the plugin for the specified type from the plugin registry
const PlugPluginPtr plugin = PlugRegistry::GetInstance().GetPluginForType(dataProviderType);
if (plugin == nullptr)
{
TF_CODING_ERROR("Failed to find plugin for %s", dataProviderType.GetTypeName().c_str());
continue;
}
std::string dataProviderId;
const JsOptionalValue dataProviderIdVal = JsFindValue(plugin->GetMetadataForType(dataProviderType), MPCDIDataProviderPlugInTokens->dataProviderId.GetString());
if (!dataProviderIdVal.has_value() || !dataProviderIdVal->Is<std::string>())
{
TF_CODING_ERROR("'%s' metadata for '%s' must be specified!", MPCDIDataProviderPlugInTokens->dataProviderId.GetText(), dataProviderType.GetTypeName().c_str());
continue;
}
dataProviderId = dataProviderIdVal->GetString();
// store the map between the data provider id and the plugin
_DataProviderInfo providerInfo;
providerInfo.plugin = plugin;
providerInfo.dataProviderType = dataProviderType;
this->_dataProviderPlugins[dataProviderId] = providerInfo;
}
this->_pluginsLoaded = true;
}
}
PXR_NAMESPACE_CLOSE_SCOPE | 3,936 | C++ | 32.084033 | 162 | 0.748476 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiData.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_EDF_EDFDATA_H_
#define OMNI_EDF_EDFDATA_H_
#include <string>
#include <set>
#include <pxr/pxr.h>
#include <pxr/base/tf/declarePtrs.h>
#include <pxr/usd/sdf/abstractData.h>
#include <pxr/usd/sdf/fileFormat.h>
#include <tbb/concurrent_hash_map.h>
#include "iMpcdiDataProvider.h"
PXR_NAMESPACE_OPEN_SCOPE
TF_DECLARE_PUBLIC_TOKENS(
EdfDataParametersTokens,
(dataProviderId)
(providerArgs)
);
TF_DECLARE_WEAK_AND_REF_PTRS(EdfData);
/// \class EdfSourceData
///
/// Serves as a wrapper around EdfData for data providers to populate
/// information into.
///
class EdfSourceData : public IEdfSourceData
{
public:
EdfSourceData(EdfData* data);
virtual ~EdfSourceData();
virtual void CreatePrim(const SdfPath& parentPath, const std::string& name, const SdfSpecifier& specifier,
const TfToken& typeName) override;
virtual void CreateAttribute(const SdfPath& parentPrimPath, const std::string& name, const SdfValueTypeName& typeName,
const SdfVariability& variability, const VtValue& value) override;
virtual void SetField(const SdfPath& primPath, const TfToken& fieldName, const VtValue& value) override;
virtual bool HasField(const SdfPath& primPath, const TfToken& fieldName, VtValue* value) override;
virtual bool HasAttribute(const SdfPath& attributePath, VtValue* defaultValue) override;
private:
EdfData* _data;
};
/// \class EdfData
///
/// This class is used to hold the data required to open
/// a layer from files of "edf" format. This data is initialized
/// by metadata unique to the prim the payload is attached to
/// and turned into file format args to create the appropriate
/// layer identifier for USD.
///
class EdfData : public SdfAbstractData
{
public:
static EdfDataRefPtr CreateFromParameters(const EdfDataParameters& parameters);
// SdfAbstractData overrides
void CreateSpec(const SdfPath& path, SdfSpecType specType) override;
void Erase(const SdfPath& path, const TfToken& fieldName) override;
void EraseSpec(const SdfPath& path) override;
VtValue Get(const SdfPath& path, const TfToken& fieldName) const override;
SdfSpecType GetSpecType(const SdfPath& path) const override;
bool Has(const SdfPath& path, const TfToken& fieldName, SdfAbstractDataValue* value) const override;
bool Has(const SdfPath& path, const TfToken& fieldName, VtValue* value = nullptr) const override;
bool HasSpec(const SdfPath& path) const override;
bool IsEmpty() const override;
std::vector<TfToken> List(const SdfPath& path) const override;
void MoveSpec(const SdfPath& oldPath, const SdfPath& newPath) override;
void Set(const SdfPath& path, const TfToken& fieldName, const VtValue& value) override;
void Set(const SdfPath& path, const TfToken& fieldName, const SdfAbstractDataConstValue& value) override;
bool StreamsData() const override;
bool IsDetached() const override;
std::set<double> ListAllTimeSamples() const override;
std::set<double> ListTimeSamplesForPath(const SdfPath& path) const override;
bool GetBracketingTimeSamples(double time, double* tLower, double* tUpper) const override;
size_t GetNumTimeSamplesForPath(const SdfPath& path) const override;
bool GetBracketingTimeSamplesForPath(const SdfPath& path, double time, double* tLower, double* tUpper) const override;
bool QueryTimeSample(const SdfPath& path, double time, VtValue* optionalValue = nullptr) const override;
bool QueryTimeSample(const SdfPath& path, double time, SdfAbstractDataValue* optionalValue) const override;
void SetTimeSample(const SdfPath& path, double time, const VtValue& value) override;
void EraseTimeSample(const SdfPath& path, double time) override;
virtual bool Read();
protected:
// SdfAbstractDataOverrides
void _VisitSpecs(SdfAbstractDataSpecVisitor* visitor) const override;
private:
friend class EdfSourceData;
// can only be constructed via CreateFromParameters
EdfData(std::unique_ptr<IEdfDataProvider> dataProvider);
// helper methods for retrieving spec properties
// modeled after SdfData
bool _GetSpecTypeAndFieldValue(const SdfPath& path,
const TfToken& fieldName, SdfSpecType* specType, VtValue* value) const;
bool _GetFieldValue(const SdfPath& path,
const TfToken& fieldName, VtValue* value) const;
// helper methods for setting properties for the root
// we don't have functionality for the public Set API
// but we need to do it internally - if we ever added
// support for the set API (i.e. the backend provider
// supported writes), we could call this internally
void _SetFieldValue(const SdfPath& path, const TfToken& fieldName, const VtValue& value);
void _SetFieldValue(const SdfPath& path, const TfToken& fieldName, const VtValue& value) const;
void _CreateSpec(const SdfPath& path, const SdfSpecType& specType);
// instance methods for callbacks on context
void _CreatePrim(const SdfPath& parentPath, const std::string& name,
const SdfSpecifier& specifier, const TfToken& typeName);
void _CreateAttribute(const SdfPath& primPath, const std::string& name,
const SdfValueTypeName& typeName, const SdfVariability& variability, const VtValue& value);
private:
// holds a pointer to the specific data provider to use
// to query back-end data
std::unique_ptr<IEdfDataProvider> _dataProvider;
// holds a shared pointer to the source data object
// used to callback on to create prims / attributes
std::shared_ptr<IEdfSourceData> _sourceData;
// mimic the storage structure of SdfData, just put it
// in a concurrent_hash_map rather than a TfHashMap
// the downside here is if we lock one field value for a write
// the whole prim gets locked, but for our purposes
// here that should be ok - the advantage we get is
// that on deferred reads we should be able to multithread
// the back-end object acquisition during prim indexing
typedef std::pair<TfToken, VtValue> _FieldValuePair;
struct _SpecData {
_SpecData() : specType(SdfSpecTypeUnknown) {}
SdfSpecType specType;
std::vector<_FieldValuePair> fields;
};
// Hash structure consistent with what TBB expects
// but forwarded to what's already in USD
struct SdfPathHash {
static size_t hash(const SdfPath& path)
{
return path.GetHash();
}
static bool equal(const SdfPath& path1, const SdfPath& path2)
{
return path1 == path2;
}
};
typedef tbb::concurrent_hash_map<SdfPath, _SpecData, SdfPathHash> SpecData;
mutable SpecData _specData;
};
PXR_NAMESPACE_CLOSE_SCOPE
#endif | 7,126 | C | 37.317204 | 119 | 0.753999 |
MomentFactory/Omniverse-MPCDI-converter/src/usd-plugins/fileFormat/mpcdiFileFormat/mpcdiDataProviderFactory.cpp | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <pxr/pxr.h>
#include "mpcdiDataProviderFactory.h"
PXR_NAMESPACE_OPEN_SCOPE
EdfDataProviderFactoryBase::~EdfDataProviderFactoryBase() = default;
PXR_NAMESPACE_CLOSE_SCOPE | 775 | C++ | 32.739129 | 75 | 0.766452 |
MomentFactory/Omniverse-MPCDI-converter/tools/scripts/link_app.py | import argparse
import json
import os
import sys
import packmanapi
import urllib3
def find_omniverse_apps():
http = urllib3.PoolManager()
try:
r = http.request("GET", "http://127.0.0.1:33480/components")
except Exception as e:
print(f"Failed retrieving apps from an Omniverse Launcher, maybe it is not installed?\nError: {e}")
sys.exit(1)
apps = {}
for x in json.loads(r.data.decode("utf-8")):
latest = x.get("installedVersions", {}).get("latest", "")
if latest:
for s in x.get("settings", []):
if s.get("version", "") == latest:
root = s.get("launch", {}).get("root", "")
apps[x["slug"]] = (x["name"], root)
break
return apps
def create_link(src, dst):
print(f"Creating a link '{src}' -> '{dst}'")
packmanapi.link(src, dst)
APP_PRIORITIES = ["code", "create", "view"]
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Create folder link to Kit App installed from Omniverse Launcher")
parser.add_argument(
"--path",
help="Path to Kit App installed from Omniverse Launcher, e.g.: 'C:/Users/bob/AppData/Local/ov/pkg/create-2021.3.4'",
required=False,
)
parser.add_argument(
"--app", help="Name of Kit App installed from Omniverse Launcher, e.g.: 'code', 'create'", required=False
)
args = parser.parse_args()
path = args.path
if not path:
print("Path is not specified, looking for Omniverse Apps...")
apps = find_omniverse_apps()
if len(apps) == 0:
print(
"Can't find any Omniverse Apps. Use Omniverse Launcher to install one. 'Code' is the recommended app for developers."
)
sys.exit(0)
print("\nFound following Omniverse Apps:")
for i, slug in enumerate(apps):
name, root = apps[slug]
print(f"{i}: {name} ({slug}) at: '{root}'")
if args.app:
selected_app = args.app.lower()
if selected_app not in apps:
choices = ", ".join(apps.keys())
print(f"Passed app: '{selected_app}' is not found. Specify one of the following found Apps: {choices}")
sys.exit(0)
else:
selected_app = next((x for x in APP_PRIORITIES if x in apps), None)
if not selected_app:
selected_app = next(iter(apps))
print(f"\nSelected app: {selected_app}")
_, path = apps[selected_app]
if not os.path.exists(path):
print(f"Provided path doesn't exist: {path}")
else:
SCRIPT_ROOT = os.path.dirname(os.path.realpath(__file__))
create_link(f"{SCRIPT_ROOT}/../../app", path)
print("Success!")
| 2,814 | Python | 32.117647 | 133 | 0.562189 |
MomentFactory/Omniverse-MPCDI-converter/tools/packman/packmanconf.py | # Use this file to bootstrap packman into your Python environment (3.7.x). Simply
# add the path by doing sys.insert to where packmanconf.py is located and then execute:
#
# >>> import packmanconf
# >>> packmanconf.init()
#
# It will use the configured remote(s) and the version of packman in the same folder,
# giving you full access to the packman API via the following module
#
# >> import packmanapi
# >> dir(packmanapi)
import os
import platform
import sys
def init():
"""Call this function to initialize the packman configuration.
Calls to the packman API will work after successfully calling this function.
Note:
This function only needs to be called once during the execution of your
program. Calling it repeatedly is harmless but wasteful.
Compatibility with your Python interpreter is checked and upon failure
the function will report what is required.
Example:
>>> import packmanconf
>>> packmanconf.init()
>>> import packmanapi
>>> packmanapi.set_verbosity_level(packmanapi.VERBOSITY_HIGH)
"""
major = sys.version_info[0]
minor = sys.version_info[1]
if major != 3 or minor != 10:
raise RuntimeError(
f"This version of packman requires Python 3.10.x, but {major}.{minor} was provided"
)
conf_dir = os.path.dirname(os.path.abspath(__file__))
os.environ["PM_INSTALL_PATH"] = conf_dir
packages_root = get_packages_root(conf_dir)
version = get_version(conf_dir)
module_dir = get_module_dir(conf_dir, packages_root, version)
sys.path.insert(1, module_dir)
def get_packages_root(conf_dir: str) -> str:
root = os.getenv("PM_PACKAGES_ROOT")
if not root:
platform_name = platform.system()
if platform_name == "Windows":
drive, _ = os.path.splitdrive(conf_dir)
root = os.path.join(drive, "packman-repo")
elif platform_name == "Darwin":
# macOS
root = os.path.join(
os.path.expanduser("~"), "/Library/Application Support/packman-cache"
)
elif platform_name == "Linux":
try:
cache_root = os.environ["XDG_HOME_CACHE"]
except KeyError:
cache_root = os.path.join(os.path.expanduser("~"), ".cache")
return os.path.join(cache_root, "packman")
else:
raise RuntimeError(f"Unsupported platform '{platform_name}'")
# make sure the path exists:
os.makedirs(root, exist_ok=True)
return root
def get_module_dir(conf_dir, packages_root: str, version: str) -> str:
module_dir = os.path.join(packages_root, "packman-common", version)
if not os.path.exists(module_dir):
import tempfile
tf = tempfile.NamedTemporaryFile(delete=False)
target_name = tf.name
tf.close()
url = f"http://bootstrap.packman.nvidia.com/packman-common@{version}.zip"
print(f"Downloading '{url}' ...")
import urllib.request
urllib.request.urlretrieve(url, target_name)
from importlib.machinery import SourceFileLoader
# import module from path provided
script_path = os.path.join(conf_dir, "bootstrap", "install_package.py")
ip = SourceFileLoader("install_package", script_path).load_module()
print("Unpacking ...")
ip.install_package(target_name, module_dir)
os.unlink(tf.name)
return module_dir
def get_version(conf_dir: str):
path = os.path.join(conf_dir, "packman")
if not os.path.exists(path): # in dev repo fallback
path += ".sh"
with open(path, "rt", encoding="utf8") as launch_file:
for line in launch_file.readlines():
if line.startswith("PM_PACKMAN_VERSION"):
_, value = line.split("=")
return value.strip()
raise RuntimeError(f"Unable to find 'PM_PACKMAN_VERSION' in '{path}'")
| 3,932 | Python | 35.416666 | 95 | 0.632503 |
MomentFactory/Omniverse-MPCDI-converter/tools/packman/config.packman.xml | <config remotes="cloudfront">
<remote2 name="cloudfront">
<transport actions="download" protocol="https" packageLocation="d4i3qtqj3r0z5.cloudfront.net/${name}@${version}" />
</remote2>
</config>
| 211 | XML | 34.333328 | 123 | 0.691943 |
MomentFactory/Omniverse-MPCDI-converter/tools/packman/bootstrap/install_package.py | # Copyright 2019 NVIDIA CORPORATION
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import shutil
import sys
import tempfile
import zipfile
__author__ = "hfannar"
logging.basicConfig(level=logging.WARNING, format="%(message)s")
logger = logging.getLogger("install_package")
class TemporaryDirectory:
def __init__(self):
self.path = None
def __enter__(self):
self.path = tempfile.mkdtemp()
return self.path
def __exit__(self, type, value, traceback):
# Remove temporary data created
shutil.rmtree(self.path)
def install_package(package_src_path, package_dst_path):
with zipfile.ZipFile(package_src_path, allowZip64=True) as zip_file, TemporaryDirectory() as temp_dir:
zip_file.extractall(temp_dir)
# Recursively copy (temp_dir will be automatically cleaned up on exit)
try:
# Recursive copy is needed because both package name and version folder could be missing in
# target directory:
shutil.copytree(temp_dir, package_dst_path)
except OSError as exc:
logger.warning("Directory %s already present, packaged installation aborted" % package_dst_path)
else:
logger.info("Package successfully installed to %s" % package_dst_path)
install_package(sys.argv[1], sys.argv[2])
| 1,844 | Python | 33.166666 | 108 | 0.703362 |
MomentFactory/Omniverse-MPCDI-converter/exts/mf.ov.mpcdi_converter/mf/ov/mpcdi_converter/extension.py | import os
import time
from typing import List
import omni.ext
import omni.client
import carb
import omni.kit.notification_manager as nm
from omni.kit.notification_manager import NotificationStatus
from omni.kit.menu import utils
from omni.kit.tool.asset_importer.file_picker import FilePicker
from omni.kit.tool.asset_importer.filebrowser import FileBrowserMode, FileBrowserSelectionType
import omni.ui as ui
import omni.kit.tool.asset_importer as ai
import omni.kit.window.content_browser as content
from .omni_client_wrapper import OmniClientWrapper
import xml.etree.ElementTree as ET
from pxr import UsdGeom, Sdf, Gf, Tf
import math
import logging
class MPCDIConverterContext:
usd_reference_path = ""
class MPCDIConverterHelper:
def __init__(self):
pass
def _cleanNameForUSD(self, strIn: str) -> str:
strOut = strIn
# Do not allow for a blank name
if len(strOut) == 0:
return "Default"
elif len(strOut) == 1 and strIn.isnumeric():
# If we have an index as a name, we only need to add _ beforehand.
return "_" + strIn
return Tf.MakeValidIdentifier(strIn)
def _convert_xml_to_usd(self, absolute_path_xml):
result = 0
try:
_, _, content = omni.client.read_file(absolute_path_xml)
data = memoryview(content).tobytes()
# Read xml file here
root = ET.fromstring(data)
hasLensShifting = False
stage = omni.usd.get_context().get_stage()
mpcdiId = "/MPCDI"
stage.DefinePrim(mpcdiId, "Xform")
# Create usd content here
for display in root:
if display.tag != 'display':
continue
for buffer in display:
bufferId = buffer.attrib['id']
bufferPath = mpcdiId + '/' + self._cleanNameForUSD(bufferId)
stage.DefinePrim(bufferPath, "Scope")
# A region is a projector
for region in buffer:
# GetCoordFrams
coordinateFrame = region.find('coordinateFrame')
# Get Position
posX = float(coordinateFrame.find('posx').text) * 10
posY = float(coordinateFrame.find('posy').text) * 10
posZ = float(coordinateFrame.find('posz').text) * 10
# Get Axis up
upX = float(coordinateFrame.find('yawx').text)
upY = float(coordinateFrame.find('yawy').text)
upZ = float(coordinateFrame.find('yawz').text)
# Get Axis right
rightX = float(coordinateFrame.find('pitchx').text)
rightY = float(coordinateFrame.find('pitchy').text)
rightZ = float(coordinateFrame.find('pitchz').text)
# Get Axis down
forwardX = float(coordinateFrame.find('rollx').text)
forwardY = float(coordinateFrame.find('rolly').text)
forwardZ = float(coordinateFrame.find('rollz').text)
# The "coordinateFrame" provided in the MPCDI comes with three vectors to solve any coordinate
# system ambiguity we meed to convert the position from the "source" coordinate system to the
# standard MPCDI system And then convert from the standard to the Omniverse system
sourceToStandard = Gf.Matrix3f(
rightX, rightY, rightZ,
upX, upY, upZ,
forwardX, forwardY, forwardZ)
# Omniverse uses the same axis for Roll/Pitch/Yaw than the standard, so we have a diagonal matrix
# BUT the Y and Z axis are pointing to the opposite direction, so we need to invert them
# in the matrix. Here we'll avoid a second matrix product and simply invert Y and Z of the
# vector instead.
newPos = sourceToStandard * Gf.Vec3f(posX, posY, posZ)
newPos[1] = newPos[1] * -1.0
newPos[2] = newPos[2] * -1.0
frustum = region.find('frustum')
yaw = float(frustum.find('yaw').text) * -1
pitch = float(frustum.find('pitch').text)
roll = float(frustum.find('roll').text)
# For the moment we do not support lens shifting, so we simply add the two angles and assume
# They are the same on both sides of the angle.
fovRight = float(frustum.find('rightAngle').text)
fovLeft = float(frustum.find('leftAngle').text)
fovTop = float(frustum.find('upAngle').text)
fovBottom = float(frustum.find('downAngle').text)
focalLength = 10 # We chose a fixed focal length.
tanRight = math.tan(math.radians(fovRight))
tanLeft = math.tan(math.radians(fovLeft))
tanUp = math.tan(math.radians(fovTop))
tanDown = math.tan(math.radians(fovBottom))
apertureH = (abs(tanRight) + abs(tanLeft)) * focalLength
apertureV = (abs(tanUp) + abs(tanDown)) * focalLength
lightWidth = abs(tanRight) + abs(tanLeft)
lightHeight = abs(tanUp) + abs(tanDown)
horizLensShiftAmount = (tanLeft + tanRight) / (tanLeft - tanRight)
vertLensShiftAmount = (tanUp + tanDown) / (tanUp - tanDown)
horizApertureOffset = horizLensShiftAmount * apertureH / 2.0
vertApertureOffset = vertLensShiftAmount * apertureV / 2.0
if fovRight != fovLeft or fovTop != fovBottom:
hasLensShifting = True
regionId = region.attrib['id']
primPath = bufferPath + '/' + self._cleanNameForUSD(regionId)
prim = stage.DefinePrim(primPath, "Camera")
prim.GetAttribute('focalLength').Set(focalLength)
prim.GetAttribute('focusDistance').Set(2000.0)
prim.GetAttribute('horizontalAperture').Set(apertureH)
prim.GetAttribute('horizontalApertureOffset').Set(horizApertureOffset)
prim.GetAttribute('verticalAperture').Set(apertureV)
prim.GetAttribute('verticalApertureOffset').Set(vertApertureOffset)
primXform = UsdGeom.Xformable(prim)
# This prevents from trying to add another Operation if overwritting nodes.
primXform.ClearXformOpOrder()
primXform.AddTranslateOp().Set(value=(newPos * 10.0))
primXform.AddRotateYOp().Set(value=yaw)
primXform.AddRotateXOp().Set(value=pitch)
primXform.AddRotateZOp().Set(value=roll)
# Create rectLight node
rectLightpath = primPath + '/ProjectLight'
rectLight = stage.DefinePrim(rectLightpath, 'RectLight')
# We need to create those attributes as they are not standard in USD and they are omniverse
# Specific. At this point in time Omniverse hasn't added their own attributes.
# We simply do it ourselves.
rectLight.CreateAttribute('isProjector', Sdf.ValueTypeNames.Bool).Set(True)
rectLight.CreateAttribute('intensity', Sdf.ValueTypeNames.Float).Set(15000)
rectLight.CreateAttribute('exposure', Sdf.ValueTypeNames.Float).Set(5)
rectLight.GetAttribute('inputs:width').Set(lightWidth)
rectLight.GetAttribute('inputs:height').Set(lightHeight)
# Creating projector box mesh to simulate the space a projector takes in the space
projectorBoxPath = primPath + '/ProjectorBox'
projector = stage.DefinePrim(projectorBoxPath, 'Cube')
projectorXform = UsdGeom.Xformable(projector)
projectorXform.ClearXformOpOrder()
projectorXform.AddTranslateOp().Set(value=(0, 0, 42.0))
projectorXform.AddScaleOp().Set(value=(50.0, 15, 40.0))
except Exception as e:
logger = logging.getLogger(__name__)
logger.error(f"Failed to parse MPCDI file. Make sure it is not corrupt. {e}")
return -1
if hasLensShifting:
message = "Lens shifting detected in MPCDI. Lens shifting is not supported."
logger = logging.getLogger(__name__)
logger.warn(message)
nm.post_notification(message, status=NotificationStatus.WARNING)
return result
def _create_import_task(self, absolute_path, relative_path, export_folder, _):
stage = omni.usd.get_context().get_stage()
usd_path = ""
# If the stage is not saved save the imported USD next to the original asset.
if not stage or stage.GetRootLayer().anonymous:
now = time.localtime()
ext = time.strftime("_%H%M%S", now)
basename = relative_path[:relative_path.rfind(".")]
no_folder_name = absolute_path[:absolute_path.find("/" + relative_path)]
host_dir = os.path.join(no_folder_name, "convertedAssets", basename + ext).replace("\\", "/")
# Save the imported USD next to the saved stage.
path_out = omni.usd.get_context().get_stage_url()
# If user makes a selection for the output folder use it.
if export_folder is not None:
path_out = export_folder
path_out_index = path_out.rfind("/")
success = self._convert_xml_to_usd(absolute_path) # self._hi.convert_cad_file_to_usd(absolute_path, path_out[:path_out_index])
ext_index = relative_path.rfind(".")
relative_path = self._cleanNameForUSD(relative_path[:ext_index]) + ".usd"
usd_path = os.path.join(path_out[:path_out_index], relative_path).replace("\\", "/")
logger = logging.getLogger(__name__)
if success == 0:
message = "Import succesful"
logger.info(message)
nm.post_notification(message)
return usd_path
elif success == -10002:
# TODO this is when we have problem reading the file from OV, might need to download it locally
logger.info("NOT IMPLEMENTED: Failure to load model form omniverse server, please select a file from local disk.")
nm.post_notification(
f"Failed to convert file {os.path.basename(absolute_path)}.\n"
"Please check console for more details.",
status=nm.NotificationStatus.WARNING,
)
return None
else:
logger.info("IMPORT FAILED")
nm.post_notification(
f"Failed to convert file {os.path.basename(absolute_path)}.\n"
"Please check console for more details.",
status=nm.NotificationStatus.WARNING,
)
return None
async def create_import_task(self, absolute_paths, relative_paths, export_folder, hoops_context):
converted_assets = {}
for i in range(len(absolute_paths)):
converted_assets[absolute_paths[i]] = self._create_import_task(absolute_paths[i], relative_paths[i],
export_folder, hoops_context)
return converted_assets
class MPCDIConverterOptions:
def __init__(self):
self.cad_converter_context = MPCDIConverterContext()
self.export_folder: str = None
class MPCDIConverterOptionsBuilder:
def __init__(self, usd_context):
super().__init__()
self._file_picker = None
self._usd_context = usd_context
self._export_context = MPCDIConverterOptions()
self._folder_button = None
self._refresh_default_folder = False
self._default_folder = None
self._clear()
def _clear(self):
self._built = False
self._export_folder_field = None
if self._folder_button:
self._folder_button.set_clicked_fn(None)
self._folder_button = None
def set_default_target_folder(self, folder: str):
self._default_folder = folder
self._refresh_default_folder = True
def build_pane(self, asset_paths: List[str]):
self._export_context = self.get_import_options()
if self._refresh_default_folder:
self._export_context.export_folder = self._default_folder
self._default_folder = None
self._refresh_default_folder = False
self._built = True
OPTIONS_STYLE = {
"Rectangle::hovering": {"background_color": 0x0, "border_radius": 2, "margin": 0, "padding": 0},
"Rectangle::hovering:hovered": {"background_color": 0xFF9E9E9E},
"Button.Image::folder": {"image_url": Icons().get("folder")},
"Button.Image::folder:checked": {"image_url": Icons().get("folder")},
"Button::folder": {"background_color": 0x0, "margin": 0},
"Button::folder:checked": {"background_color": 0x0, "margin": 0},
"Button::folder:pressed": {"background_color": 0x0, "margin": 0},
"Button::folder:hovered": {"background_color": 0x0, "margin": 0},
}
with ui.VStack(height=0, style=OPTIONS_STYLE):
ui.Spacer(width=0, height=5)
with ui.HStack(height=0):
ui.Label("Convert To:", width=0)
ui.Spacer(width=3)
with ui.VStack(height=0):
ui.Spacer(height=4)
self._export_folder_field = ui.StringField(height=20, width=ui.Fraction(1), read_only=False)
self._export_folder_field.set_tooltip(
"Left this empty will export USD to the folder that assets are under."
)
ui.Spacer(height=4)
with ui.VStack(height=0, width=0):
ui.Spacer(height=4)
with ui.ZStack(width=20, height=20):
ui.Rectangle(name="hovering")
self._folder_button = ui.Button(name="folder", width=24, height=24)
self._folder_button.set_tooltip("Choose folder")
ui.Spacer(height=4)
ui.Spacer(width=2)
self._folder_button.set_clicked_fn(self._show_file_picker)
ui.Spacer(width=0, height=10)
if self._export_context.export_folder:
self._export_folder_field.model.set_value(self._export_context.export_folder)
else:
self._export_folder_field.model.set_value("")
def _select_picked_folder_callback(self, paths):
if paths:
self._export_folder_field.model.set_value(paths[0])
def _cancel_picked_folder_callback(self):
pass
def _show_file_picker(self):
if not self._file_picker:
mode = FileBrowserMode.OPEN
file_type = FileBrowserSelectionType.DIRECTORY_ONLY
filters = [(".*", "All Files (*.*)")]
self._file_picker = FilePicker("Select Folder", mode=mode, file_type=file_type, filter_options=filters)
self._file_picker.set_file_selected_fn(self._select_picked_folder_callback)
self._file_picker.set_cancel_fn(self._cancel_picked_folder_callback)
folder = self._export_folder_field.model.get_value_as_string()
if utils.is_folder(folder):
self._file_picker.show(folder)
else:
self._file_picker.show(self._get_current_dir_in_content_window())
def _get_current_dir_in_content_window(self):
content_window = content.get_content_window()
return content_window.get_current_directory()
def get_import_options(self):
context = MPCDIConverterOptions()
# TODO enable this after the filepicker bugfix: OM-47383
# if self._built:
# context.export_folder = str.strip(self._export_folder_field.model.get_value_as_string())
# context.export_folder = context.export_folder.replace("\\", "/")
return context
def destroy(self):
self._clear()
if self._file_picker:
self._file_picker.destroy()
class MPCDIConverterDelegate(ai.AbstractImporterDelegate):
def __init__(self, usd_context, name, filters, descriptions):
super().__init__()
self._hoops_options_builder = MPCDIConverterOptionsBuilder(usd_context)
self._hoops_converter = MPCDIConverterHelper()
self._name = name
self._filters = filters
self._descriptions = descriptions
def destroy(self):
if self._hoops_converter:
self._hoops_converter.destroy()
self._hoops_converter = None
if self._hoops_options_builder:
self._hoops_options_builder.destroy()
self._hoops_options_builder = None
@property
def name(self):
return self._name
@property
def filter_regexes(self):
return self._filters
@property
def filter_descriptions(self):
return self._descriptions
def build_options(self, paths):
pass
# TODO enable this after the filepicker bugfix: OM-47383
# self._hoops_options_builder.build_pane(paths)
async def convert_assets(self, paths):
context = self._hoops_options_builder.get_import_options()
hoops_context = context.cad_converter_context
absolute_paths = []
relative_paths = []
for file_path in paths:
if self.is_supported_format(file_path):
absolute_paths.append(file_path)
filename = os.path.basename(file_path)
relative_paths.append(filename)
converted_assets = await self._hoops_converter.create_import_task(
absolute_paths, relative_paths, context.export_folder, hoops_context
)
return converted_assets
_global_instance = None
# Any class derived from `omni.ext.IExt` in top level module (defined in `python.modules` of `extension.toml`) will be
# instantiated when extension gets enabled and `on_startup(ext_id)` will be called. Later when extension gets disabled
# on_shutdown() is called.
class MfMpcdiConverterExtension(omni.ext.IExt):
# ext_id is current extension id. It can be used with extension manager to query additional information, like where
# this extension is located on filesystem.
def on_startup(self, ext_id):
global _global_instance
_global_instance = self
self._usd_context = omni.usd.get_context()
self.delegate_mpcdi = MPCDIConverterDelegate(
self._usd_context,
"MPCDI Converter",
["(.*\\.mpcdi\\.xml$)"],
["mpcdi XML Files (*.mpdci.xml)"]
)
ai.register_importer(self.delegate_mpcdi)
def on_shutdown(self):
global _global_instance
_global_instance = None
ai.remove_importer(self.delegate_mpcdi)
self.delegate_mpcdi = None
| 19,870 | Python | 43.354911 | 135 | 0.573125 |
MomentFactory/Omniverse-MPCDI-converter/exts/mf.ov.mpcdi_converter/mf/ov/mpcdi_converter/__init__.py | import os
from pxr import Plug
pluginsRoot = os.path.join(os.path.dirname(__file__), '../../../plugin/resources')
Plug.Registry().RegisterPlugins(pluginsRoot)
from .extension import *
| 191 | Python | 20.333331 | 82 | 0.706806 |
MomentFactory/Omniverse-MPCDI-converter/exts/mf.ov.mpcdi_converter/mf/ov/mpcdi_converter/omni_client_wrapper.py | import os
import traceback
import asyncio
import carb
import omni.client
def _encode_content(content):
if type(content) == str:
payload = bytes(content.encode("utf-8"))
elif type(content) != type(None):
payload = bytes(content)
else:
payload = bytes()
return payload
class OmniClientWrapper:
@staticmethod
async def exists(path):
try:
result, entry = await omni.client.stat_async(path)
return result == omni.client.Result.OK
except Exception as e:
traceback.print_exc()
carb.log_error(str(e))
return False
@staticmethod
def exists_sync(path):
try:
result, entry = omni.client.stat(path)
return result == omni.client.Result.OK
except Exception as e:
traceback.print_exc()
carb.log_error(str(e))
return False
@staticmethod
async def write(path: str, content):
carb.log_info(f"Writing {path}...")
try:
result = await omni.client.write_file_async(path, _encode_content(content))
if result != omni.client.Result.OK:
carb.log_error(f"Cannot write {path}, error code: {result}.")
return False
except Exception as e:
traceback.print_exc()
carb.log_error(str(e))
return False
finally:
carb.log_info(f"Writing {path} done...")
return True
@staticmethod
async def copy(src_path: str, dest_path: str):
carb.log_info(f"Coping from {src_path} to {dest_path}...")
try:
await omni.client.delete_async(dest_path)
result = await omni.client.copy_async(src_path, dest_path)
if result != omni.client.Result.OK:
carb.log_error(f"Cannot copy from {src_path} to {dest_path}, error code: {result}.")
return False
else:
return True
except Exception as e:
traceback.print_exc()
carb.log_error(str(e))
return False
@staticmethod
async def read(src_path: str):
carb.log_info(f"Reading {src_path}...")
try:
result, version, content = await omni.client.read_file_async(src_path)
if result == omni.client.Result.OK:
return memoryview(content).tobytes()
else:
carb.log_error(f"Cannot read {src_path}, error code: {result}.")
except Exception as e:
traceback.print_exc()
carb.log_error(str(e))
finally:
carb.log_info(f"Reading {src_path} done...")
return None
@staticmethod
async def create_folder(path):
carb.log_info(f"Creating dir {path}...")
result = await omni.client.create_folder_async(path)
return result == omni.client.Result.OK or result == omni.client.Result.ERROR_ALREADY_EXISTS
@staticmethod
def create_folder_sync(path):
carb.log_info(f"Creating dir {path}...")
result = omni.client.create_folder(path)
return result == omni.client.Result.OK or result == omni.client.Result.ERROR_ALREADY_EXISTS | 3,226 | Python | 30.637255 | 100 | 0.574086 |
MomentFactory/Omniverse-MPCDI-converter/exts/mf.ov.mpcdi_converter/config/extension.toml | [package]
version = "1.1.1"
title = "MF MPCDI converter"
description="Brings the support of MPCDI videoprojector files."
authors = ["Moment Factory","Antoine Pilote"]
readme = "docs/README.md"
changelog="docs/CHANGELOG.md"
repository = "https://github.com/MomentFactory/Omniverse-MPCDI-converter"
category = "Simulation"
keywords = ["videoprojector", "MPCDI", "audiovisual", "video", "projection", "videomapping"]
preview_image = "data/preview.png"
icon = "data/mf-ov-extensions-icons.png"
toggleable = false
[core]
reloadable = false
# Load at the start, load all schemas with order -100 (with order -1000 the USD libs are loaded)
order = -100
[dependencies]
"omni.kit.uiapp" = {}
"omni.kit.tool.asset_importer" = {}
[[python.module]]
name = "mf.ov.mpcdi_converter"
[package.target]
kit = ["105.1"]
[package.writeTarget]
kit = true
python = false
| 861 | TOML | 21.102564 | 96 | 0.716609 |
MomentFactory/Omniverse-MPCDI-converter/exts/mf.ov.mpcdi_converter/docs/CHANGELOG.md | # Changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [1.1.1] - 2023-12-02
- Deprecated kit 104 and 105.0
- Monorepo for the USD fileformat plugin
- Procedure to build for USDView
## [1.1.0] - 2023-10-04
- Added native USD file format plugin for payload support.
## [1.0.0] - 2023-07-19
### Added
- Compatibility with USD Composer 2023.1.1
### Changed
- More explicit error message when failing to import
## [0.1.0] - 2023-03-30
### Added
- Initial version of Omniverse MPCDI extension
| 536 | Markdown | 20.479999 | 80 | 0.697761 |
MomentFactory/Omniverse-MPCDI-converter/exts/mf.ov.mpcdi_converter/docs/README.md | # MPCDI converter for Omniverse [mf.ov.mpcdi_converter]
An Omniverse extension for MPDCI files.
Support MPCDI* to OpenUSD conversion as well as References to MPDCI files through a native USD FileFormat plugin.
MPCDI* is a VESA interchange format for videoprojectors technical data.
*Multiple Projection Common Data Interchange
MPCDIv2 is under Copyright © 2013 – 2015 Video Electronics Standards Association. All rights reserved.
| 434 | Markdown | 38.545451 | 113 | 0.815668 |
MomentFactory/Omniverse-MPCDI-converter/PACKAGE-LICENSES/USD-LICENSE.md | Universal Scene Description (USD) components are licensed under the following terms:
Modified Apache 2.0 License
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor
and its affiliates, except as required to comply with Section 4(c) of
the License and to reproduce the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
============================================================
RapidJSON
============================================================
Tencent is pleased to support the open source community by making RapidJSON available.
Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved.
If you have downloaded a copy of the RapidJSON binary from Tencent, please note that the RapidJSON binary is licensed under the MIT License.
If you have downloaded a copy of the RapidJSON source code from Tencent, please note that RapidJSON source code is licensed under the MIT License, except for the third-party components listed below which are subject to different license terms. Your integration of RapidJSON into your own projects may require compliance with the MIT License, as well as the other licenses applicable to the third-party components included within RapidJSON. To avoid the problematic JSON license in your own projects, it's sufficient to exclude the bin/jsonchecker/ directory, as it's the only code under the JSON license.
A copy of the MIT License is included in this file.
Other dependencies and licenses:
Open Source Software Licensed Under the BSD License:
--------------------------------------------------------------------
The msinttypes r29
Copyright (c) 2006-2013 Alexander Chemeris
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Open Source Software Licensed Under the JSON License:
--------------------------------------------------------------------
json.org
Copyright (c) 2002 JSON.org
All Rights Reserved.
JSON_checker
Copyright (c) 2002 JSON.org
All Rights Reserved.
Terms of the JSON License:
---------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Terms of the MIT License:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
============================================================
pygilstate_check
============================================================
The MIT License (MIT)
Copyright (c) 2014, Pankaj Pandey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================
double-conversion
============================================================
Copyright 2006-2011, the V8 project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
============================================================
OpenEXR/IlmBase/Half
============================================================
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2002, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
============================================================
Apple Technical Q&A QA1361 - Detecting the Debugger
https://developer.apple.com/library/content/qa/qa1361/_index.html
============================================================
Sample code project: Detecting the Debugger
Version: 1.0
Abstract: Shows how to determine if code is being run under the debugger.
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
============================================================
LZ4
============================================================
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2017, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://www.lz4.org
- LZ4 source repository : https://github.com/lz4/lz4
============================================================
stb
============================================================
stb_image - v2.19 - public domain image loader - http://nothings.org/stb
no warranty implied; use at your own risk
stb_image_resize - v0.95 - public domain image resizing
by Jorge L Rodriguez (@VinoBS) - 2014
http://github.com/nothings/stb
stb_image_write - v1.09 - public domain - http://nothings.org/stb/stb_image_write.h
writes out PNG/BMP/TGA/JPEG/HDR images to C stdio - Sean Barrett 2010-2015
no warranty implied; use at your own risk
ALTERNATIVE B - Public Domain (www.unlicense.org)
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
software, either in source code form or as a compiled binary, for any purpose,
commercial or non-commercial, and by any means.
In jurisdictions that recognize copyright laws, the author or authors of this
software dedicate any and all copyright interest in the software to the public
domain. We make this dedication for the benefit of the public at large and to
the detriment of our heirs and successors. We intend this dedication to be an
overt act of relinquishment in perpetuity of all present and future rights to
this software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| 26,573 | Markdown | 57.792035 | 739 | 0.727317 |
MomentFactory/Omniverse-MVR-GDTF-converter/bootstrap.py | # Copyright 2023 NVIDIA CORPORATION
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import io
import packmanapi
import os
import sys
REPO_ROOT = os.path.dirname(os.path.realpath(__file__))
REPO_DEPS_FILE = os.path.join(REPO_ROOT, "deps", "repo-deps.packman.xml")
if __name__ == "__main__":
# pull all repo dependencies first
# and add them to the python path
with contextlib.redirect_stdout(io.StringIO()):
deps = packmanapi.pull(REPO_DEPS_FILE)
for dep_path in deps.values():
if dep_path not in sys.path:
sys.path.append(dep_path)
sys.path.append(REPO_ROOT)
import omni.repo.usd
omni.repo.usd.bootstrap(REPO_ROOT) | 1,191 | Python | 31.216215 | 74 | 0.717044 |
MomentFactory/Omniverse-MVR-GDTF-converter/repo.toml | # common settings for repo_usd for all USD plug-ins
[repo_usd]
usd_root = "${root}/_build/usd-deps/nv-usd/%{config}"
usd_python_root = "${root}/_build/usd-deps/python"
generate_plugin_buildfiles = true
plugin_buildfile_format = "cmake"
generate_root_buildfile = true
[repo_usd.plugin.gdtfFileFormat]
plugin_dir = "${root}/src/usd-plugins/fileFormat/gdtfFileFormat"
install_root = "${root}/_install/%{platform}/%{config}/gdtfFileFormat"
include_dir = "include/gdtfFileFormat"
additional_include_dirs = [
"../../../../_build/usd-deps/nv_usd/%{config}/include/tbb",
"${root}/src/usd-plugins/fileFormat/mvrFileFormat/assimp/include"
]
public_headers = [
"api.h"
]
private_headers = [
"gdtfFileFormat.h",
"gdtfUsdConverter.h",
"../mvrFileFormat/gdtfParser/GdtfParser.h",
"tinyxml2.h"
]
preprocessor_defines = [
"GDTF_FILEFORMAT"
]
cpp_files = [
"gdtfFileFormat.cpp",
"gdtfUsdConverter.cpp",
"../mvrFileFormat/gdtfParser/GdtfParser.cpp",
"tinyxml2.cpp"
]
resource_files = [
"plugInfo.json"
]
additional_library_dirs = [
"${root}/src/usd-plugins/fileFormat/mvrFileFormat"
]
additional_static_libs = [
"assimp",
"zlibstatic"
]
usd_lib_dependencies = [
"arch",
"tf",
"plug",
"vt",
"gf",
"sdf",
"js",
"pcp",
"usdGeom",
"usd",
"usdLux"
]
[repo_usd.plugin.mvrFileFormat]
plugin_dir = "${root}/src/usd-plugins/fileFormat/mvrFileFormat"
install_root = "${root}/_install/%{platform}/%{config}/mvrFileFormat"
include_dir = "include/mvrFileFormat"
additional_include_dirs = [
"../../../../_build/usd-deps/nv_usd/%{config}/include/tbb",
"${root}/src/usd-plugins/fileFormat/mvrFileFormat/assimp/include"
]
public_headers = [
"api.h"
]
private_headers = [
"mvrFileFormat.h",
"mvrParser/tinyxml2.h",
"mvrParser/Fixture.h",
"mvrParser/FixtureFactory.h",
"mvrParser/LayerFactory.h",
"mvrParser/MVRParser.h",
"gdtfParser/GdtfParser.h",
"gdtfParser/ModelSpecification.h",
"gdtfParser/Device.h",
"../gdtfFileFormat/gdtfUsdConverter.h"
]
cpp_files = [
"mvrFileFormat.cpp",
"mvrParser/Fixture.cpp",
"mvrParser/FixtureFactory.cpp",
"mvrParser/LayerFactory.cpp",
"mvrParser/MVRParser.cpp",
"mvrParser/tinyxml2.cpp",
"gdtfParser/GdtfParser.cpp",
"../gdtfFileFormat/gdtfUsdConverter.cpp"
]
resource_files = [
"plugInfo.json"
]
additional_library_dirs = [
"${root}/src/usd-plugins/fileFormat/mvrFileFormat"
]
additional_static_libs = [
"assimp",
"zlibstatic"
]
usd_lib_dependencies = [
"arch",
"tf",
"plug",
"vt",
"gf",
"sdf",
"js",
"pcp",
"usdGeom",
"usd",
"usdLux"
]
| 2,700 | TOML | 20.782258 | 70 | 0.644444 |
MomentFactory/Omniverse-MVR-GDTF-converter/README.md | # MF.OV.MVR and MF.OV.GDTF
Brings support of MVR and GDTF files to Omniverse and USD.
This repository contains two different extensions.
GDTF (General Device Type Format) defines an asset format that collects technical information about Audiovisual devices. It is currently centered on lighting fixtures and provide accurate digital twins of lighting devices from 100+ manufacturers.
MVR (My Virtual Rig) is a scene format that can describe an complete rig of lights, using GDTF assets at its core while adding capabilities to define groups, layers, DMX address and more to allow lighting designer to build virtual replicas of their lighting rigs and enforce a single file format from show design to previz to operation.
This repository contains two separate extensions :
- [MVR extension](./exts/mf.ov.mvr/)
- [GDTF extension](./exts/mf.ov.gdtf/)
# Requirements
- Requires Omniverse Kit >= 105
- Tested in USD Composer 2023.2.2 and 2023.2.0
# Build
## Build for Omniverse
- Just run `build.bat`
- Once the build is complete, the FileFormat dlls should be located under : `_install/windows-x86_64/release`
## Build for USDView
The dependency configuration is contained in the [usd-deps.packman.xml](deps/usd-deps.packman.xml) file
To switch to the correct OpenUSD version for USDview compilation, it may be required to edit the packman configuration file to :
```
<project toolsVersion="5.6">
<dependency name="nv-usd" linkPath="../_build/usd-deps/nv-usd/${config}">
<package name="usd.py310.${platform}.usdview.${config}" version="0.23.05-tc.47+v23.05.b53573ea" />
</dependency>
<dependency name="python" linkPath="../_build/usd-deps/python">
<package name="python" version="3.10.13+nv1-${platform}" />
</dependency>
</project>
```
`source setenvwindows`
Test with `usdview resources/scene.usda`
Note : light meshes will not show up unless you have a GLTF FileFormat plugin enabled.
## Alternate builds
At the bottom of this Readme, you will find alternative ways of building for Unreal 5.3 and Blender 4.0.
# Using the extension
To enable the extensions in USD Composer:
- `Window` > `Extensions`
- Search for `MF GDTF Converter` or `MF MVR Converter` in commuunity and enable them with the "autoload" checkbox.
- Restart USD composer.
## Sample files
MVR samples :
- [7-fixtures-samples.mvr](./exts/mf.ov.mvr/sample/7-fixtures-sample.mvr/)
- [fixture-line-gltf.mvr](./exts/mf.ov.mvr/sample/fixture-line-gltf.mvr/)
GDTF sample
- [Robin_MMX_Blade](./exts/mf.ov.gdtf/sample/Robe_Lighting@Robin_MMX_Blade@2023-07-25__Beam_revision.gdtf)
Thousands of GDTF files are available on [GDTF-share](https://gdtf-share.com/).
For example the very last version of the GDTF sample file we provide can be downloaded from [here](https://gdtf-share.com/share.php?page=home&manu.=Robe%20Lighting&fix=Robin%20MMX%20Blade)
## Reference MVR/GDTF files
To reference an MVR or a GDTF file, just drag and drop the file on your viewport or your Stage Window.
## Convert MVR/GDTF files
Note: to properly work with MVR files, both extension have to be enabled.
1. In the content tab, browse to the folder where you want to import your `MVR` or `GDTF` files.
2. Click the `+Import` button and select "External Assets (FBX, OBJ...)
3. Choose a `MVR` or `GDTF` file and wait for it to import.
- MVR import
- The import result will be stored in a folder with the same name as the imported file in the current content browser directory.
- If `GDTF` files are referenced, they will be converted to `USD` in a subfolder.
- GDTF import
- The import result will be stored in a folder with the same name as the imported file in the current content browser directory.
4. To finalize the import, drag the freshly converted `USD` file in your project or open it.
# Implementation notes
## `MVR.USD` USD schema
Note : not every aspect of the MVR specification is currently implemented for USD, as we focused on the ability to retrieve the lighting fixture information.
1. Under the Root, you'll find `Scope` representing the different `Layers` of the MVR scene.
2. Inside them you'll find each GDTF Fixture represented by an `Xform` pointing to an USD payload.
3. `Xform` are named using their names and their uuid to ensure unique naming.
4. `Xform` also have custom properties (see Raw USD Properties) using the following convention: `mf:mvr:property`.
```
Root/
└─📁MVR-Layer1 (Scope)
| ├─💠Make_Model_UID1 (Xform with payload)
| └─💠Make_Model_UID2 (Xform with payload)
└──📁MVR-Layer1 (Scope)
└─💠Make_Model_UID1 (Xform with payload)
└─💠Make_Model_UID2 (Xform with payload)
```
## MVR Raw USD Properties
When importing an MVR files, some properties specific to MVR and not compatible with USD will be imported as raw USD properties of an Xform holding a lighting fixture :
| Property | Type | Description |
|--- |--- |--- |
|`mf:mvr:name` |[🔗String](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | The name of the object. |
|`mf:mvr:uuid` |[🔗UUID](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | The unique identifier of the object. |
|`mf:mvr:Classing` |[🔗UUID](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | The class the object belongs to |
|`mf:mvr:GDTFMode` |[🔗String](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | The name of the used DMX mode. This has to match the name of a DMXMode in the GDTF file.|
|`mf:mvr:GDTFSpec` |[🔗FileName](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | The name of the file containing the GDTF information for this light fixture. |
|`mf:mvr:CastShadow` |[🔗Bool](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | Wether the fixture casts shadows or not. |
|`mf:mvr:UnitNumber` |[🔗Integer](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) |The unit number of the lighting fixture in a position. |
|`mf:mvr:Addresses` |[🔗Adresses](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#node-definition-addresses)| the DMX address of the fixture. |
|`mf:mvr:CustomCommands` |[🔗CustomCommands](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#node-definition-customcommands)| Custom commands that should be executed on the fixture |
|`mf:mvr:CIEColor` |[🔗CIE Color](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#user-content-attrtype-ciecolor)| A color assigned to a fixture. If it is not defined, there is no color for the fixture.|
|`mf:mvr:FixtureTypeId` |[🔗Integer](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) | The Fixture Type ID is a value that can be used as a short name of the Fixture Type. |
|`mf:mvr:CustomId` |[🔗Integer](https://github.com/mvrdevelopment/spec/blob/main/mvr-spec.md#generic-value-types) |The Custom ID is a value that can be used as a short name of the Fixture Instance. |
Example

## `GDTF.USD` USD Schema
GDTF can have multiple structure type, but here is a typical example for a moving light.
```
Root/
└─💠 Base (Xform)
├─💠model (Xform)
│ └─🧊 mesh (Mesh)
├─💠Yoke (Xform)
│ ├─💠model (Xform)
│ │ └─🧊 mesh (Mesh)
| └──💠Head (Xform)
│ └─💠model (Xform)
│ └─🧊 mesh (Mesh)
└─📁Looks (Scope)
```
## GDTF Raw USD Properties
### Properties affecting the USD Prims properties
| Property | Type | Description
|--- |--- |---
|`mf:gdtf:BeamAngle` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types)| Will affect the USD Light's `Cone Angle`.
|`mf:gdtf:ColorTemperature` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types)| Color temperature; Unit: kelvin. Will affec USD Light's `Color Temperature`.
|`mf:gdtf:LuminousFlux` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types)| Intensity of all the represented light emitters; Unit: lumen. Will affec USD Light's `intensity`
### Fixture
| Property | Type | Description
|--- |--- |---
|`mf:gdtf:OperatingTemperature:High`|[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | Lowest temperature the device can be operated. Unit: °C.
|`mf:gdtf:OperatingTemperature:Low` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | Highest temperature the device can be operated. Unit: °C.
|`mf:gdtf:Weight` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | Weight of the device including all accessories. Unit: kilogram.
|`mf:gdtf:LegHeight` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | Defines height of the legs - distance between the floor and the bottom base plate. Unit: meter.
### Beam (Light)
| Property | Type | Description
|--- |--- |---
|`mf:gdtf:BeamType` |[🔗Enum](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | Beam Type; Specified values: "Wash", "Spot", "None", "Rectangle", "PC", "Fresnel", "Glow".
|`mf:gdtf:ColorRenderingIndex`|[🔗Uint](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | The CRI according to TM-30 is a quantitative measure of the ability of the light source showing the object color naturally as it does as daylight reference.
|`mf:gdtf:FieldAngle` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types)| Field angle; Unit: degree.
|`mf:gdtf:LampType` |[🔗Enum](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types) | Defines type of the light source; The currently defined types are: Discharge, Tungsten, Halogen, LED.
|`mf:gdtf:PowerConsumption` |[🔗Float](https://github.com/mvrdevelopment/spec/blob/main/gdtf-spec.md#table-1--xml-attribute-value-types)| ower consumption; Unit: Watt.
### XML Notes
Example of a fixture defined in a MVR file (contains some, but not all properties):
```xml
<Fixture name="Sharpy" uuid="C63B1F8D-6DAD-438C-9228-E33C6EF2947E">
<Matrix>{-1.000000,0.000000,0.000000}{0.000000,-1.000000,-0.000000}{0.000000,0.000000,1.000000}{-766.333333,4572.000000,7620.000000}</Matrix>
<GDTFSpec>Clay Paky@Sharpy [Bulb=MSD Platinum 5R 189W].gdtf</GDTFSpec>
<GDTFMode>Vect</GDTFMode>
<Addresses>
<Address break="0">21</Address>
</Addresses>
<FixtureID>102</FixtureID>
<UnitNumber>0</UnitNumber>
<FixtureTypeId>0</FixtureTypeId>
<CustomId>0</CustomId>
<Color>0.312712,0.329008,100.000000</Color>
<CastShadow>false</CastShadow>
<Mappings/>
</Fixture>
```
Some notes on the properties:
- Matrix is in millimeters (applies to the last part, the translation).
- Color is in [CIE 1931 color space](https://en.wikipedia.org/wiki/CIE_1931_color_space) and represent the color of a color gel or similar apparatus and not of the fixture itself.
# Alternate builds
## Build for Unreal 5.3
Unreal 5.3 uses USD 23.02
Use the following dependency
```
<dependency name="nv-usd" linkPath="../_build/usd-deps/nv-usd/${config}">
<package name="usd.py310.${platform}.usdview.${config}" version="0.23.02-tc.1+pxr-23.02-build-patch.9ed269df" />
</dependency>
```
- In your Unreal project, enable the USD Importer plugin
- Create a subfolder in your Unreal Project ex : `MyProject/Content/USDPlugins`
- Copy the plugInfo.json and the mvrFileFormat.dll at the root of this folder
- Adapt the plugInfo.json :
- `"LibraryPath": "mvrFileFormat.dll"`
- `"ResourcePath": "."`,
- `"Root": "."`
- Add the `MyProject/Content/USDPlugins` in Edit > Project Settings > USDImporter > Additional Plugin Directories
Note.
Unreal is gonna complain about missing dll.
Dirty fix is to add the following dll (take the one from packman) into the `...UE_5.3\Engine\Binaries\Win64`
- `boost_python310-vc142-mt-x64-1_78.dll`
- `python310.dll`
## Build for Blender 3.6.x or 4.0
Waiting for a cleaner way to provide build support for Blender, here is a step by step.
Use the following dependency.
```
<dependency name="blender_usd" linkPath="../_build/usd-deps/nv-usd">
<package name="blender_usd" version="63380-py310-usd23.05-windows-x86_64"/>
</dependency>
```
In the `repo.toml`
Modify the USD dependencies.
```
usd_lib_dependencies = [
"ms"
]
```
Remove `%{config}` after `usd_root`
```
usd_root = "${root}/_build/usd-deps/nv-usd"
```
Copy the Plugin folder : `omniverse-mvr-extension/_install/windows-x86_64/release/mvrFileFormat`
into your Blender Plugin folder `BLENDER_ROOT/blender.shared/usd`
# Resources
- Inspired by : [NVIDIA's usd-plugin-sample](https://github.com/NVIDIA-Omniverse/usd-plugin-samples/)
- [MVR and GDTF homepage with Fixture Library](https://gdtf-share.com/)
- [Specifications Github repostory](https://github.com/mvrdevelopment/spec)
- [Gdtf.eu](https://gdtf.eu/docs/list-of-projects/)
# Known limitation
- GDTF files using 3ds model are supported but will require python 3.10 cli installed on the host computer.
- Some MVR or GDTF files might fail to convert due to invalid or incomplete files.
- Only lighting devices are supported, prefer for importing mvr file not contaning trusses for instance. It could lead to strange behaviors and crashes. | 15,288 | Markdown | 55.003663 | 336 | 0.652276 |
MomentFactory/Omniverse-MVR-GDTF-converter/deps/target-deps.packman.xml | <project toolsVersion="5.6">
<dependency name="libcurl" linkPath="../_build/target-deps/libcurl">
<package name="libcurl" version="8.1.2-3-${platform}-static-release"/>
</dependency>
<dependency name="zlib" linkPath="../_build/target-deps/zlib">
<package name="zlib" version="1.2.13+nv1-${platform}" />
</dependency>
<dependency name="openssl" linkPath="../_build/target-deps/openssl">
<package name="openssl" version="3.0.10-3-${platform}-static-release" />
</dependency>
<dependency name="omni-geospatial" linkPath="../_build/target-deps/omni-geospatial">
<package name="omni-geospatial" version="2.0.3-pxr_23_05+mr17.384.337fb43b.tc.${platform}.${config}" />
</dependency>
</project> | 718 | XML | 50.357139 | 107 | 0.683844 |
MomentFactory/Omniverse-MVR-GDTF-converter/deps/usd-deps.packman - Copy.xml | <project toolsVersion="5.6">
<dependency name="nv-usd" linkPath="../_build/usd-deps/nv-usd/${config}">
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-win64_py310_${config}-dev_omniverse" platforms="windows-x86_64" />
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-linux64_py310-centos_${config}-dev_omniverse" platforms="linux-x86_64" />
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-linux-aarch64_py310_${config}-dev_omniverse" platforms="linux-aarch64" />
</dependency>
<dependency name="python" linkPath="../_build/usd-deps/python">
<package name="python" version="3.10.10+nv1-${platform}" />
</dependency>
</project> | 681 | XML | 67.199993 | 136 | 0.694567 |
MomentFactory/Omniverse-MVR-GDTF-converter/deps/repo-deps.packman.xml | <project toolsVersion="5.6">
<dependency name="repo_usd" linkPath="../_repo/repo_usd">
<package name="repo_usd" version="4.0.1" />
</dependency>
</project> | 171 | XML | 33.399993 | 61 | 0.619883 |
MomentFactory/Omniverse-MVR-GDTF-converter/deps/usd-deps.packman.xml | <project toolsVersion="5.6">
<dependency name="nv-usd" linkPath="../_build/usd-deps/nv-usd/${config}">
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-win64_py310_${config}-dev_omniverse" platforms="windows-x86_64" />
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-linux64_py310-centos_${config}-dev_omniverse" platforms="linux-x86_64" />
<package name="nv-usd" version="22.11.nv.0.2.1071.7d2f59ad-linux-aarch64_py310_${config}-dev_omniverse" platforms="linux-aarch64" />
</dependency>
<dependency name="python" linkPath="../_build/usd-deps/python">
<package name="python" version="3.10.10+nv1-${platform}" />
</dependency>
</project> | 681 | XML | 67.199993 | 136 | 0.694567 |
MomentFactory/Omniverse-MVR-GDTF-converter/PACKAGE-LICENSE/USD-LICENSE.md | Universal Scene Description (USD) components are licensed under the following terms:
Modified Apache 2.0 License
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor
and its affiliates, except as required to comply with Section 4(c) of
the License and to reproduce the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
============================================================
RapidJSON
============================================================
Tencent is pleased to support the open source community by making RapidJSON available.
Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved.
If you have downloaded a copy of the RapidJSON binary from Tencent, please note that the RapidJSON binary is licensed under the MIT License.
If you have downloaded a copy of the RapidJSON source code from Tencent, please note that RapidJSON source code is licensed under the MIT License, except for the third-party components listed below which are subject to different license terms. Your integration of RapidJSON into your own projects may require compliance with the MIT License, as well as the other licenses applicable to the third-party components included within RapidJSON. To avoid the problematic JSON license in your own projects, it's sufficient to exclude the bin/jsonchecker/ directory, as it's the only code under the JSON license.
A copy of the MIT License is included in this file.
Other dependencies and licenses:
Open Source Software Licensed Under the BSD License:
--------------------------------------------------------------------
The msinttypes r29
Copyright (c) 2006-2013 Alexander Chemeris
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Open Source Software Licensed Under the JSON License:
--------------------------------------------------------------------
json.org
Copyright (c) 2002 JSON.org
All Rights Reserved.
JSON_checker
Copyright (c) 2002 JSON.org
All Rights Reserved.
Terms of the JSON License:
---------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Terms of the MIT License:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
============================================================
pygilstate_check
============================================================
The MIT License (MIT)
Copyright (c) 2014, Pankaj Pandey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================
double-conversion
============================================================
Copyright 2006-2011, the V8 project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
============================================================
OpenEXR/IlmBase/Half
============================================================
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2002, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
============================================================
Apple Technical Q&A QA1361 - Detecting the Debugger
https://developer.apple.com/library/content/qa/qa1361/_index.html
============================================================
Sample code project: Detecting the Debugger
Version: 1.0
Abstract: Shows how to determine if code is being run under the debugger.
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
============================================================
LZ4
============================================================
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2017, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://www.lz4.org
- LZ4 source repository : https://github.com/lz4/lz4
============================================================
stb
============================================================
stb_image - v2.19 - public domain image loader - http://nothings.org/stb
no warranty implied; use at your own risk
stb_image_resize - v0.95 - public domain image resizing
by Jorge L Rodriguez (@VinoBS) - 2014
http://github.com/nothings/stb
stb_image_write - v1.09 - public domain - http://nothings.org/stb/stb_image_write.h
writes out PNG/BMP/TGA/JPEG/HDR images to C stdio - Sean Barrett 2010-2015
no warranty implied; use at your own risk
ALTERNATIVE B - Public Domain (www.unlicense.org)
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
software, either in source code form or as a compiled binary, for any purpose,
commercial or non-commercial, and by any means.
In jurisdictions that recognize copyright laws, the author or authors of this
software dedicate any and all copyright interest in the software to the public
domain. We make this dedication for the benefit of the public at large and to
the detriment of our heirs and successors. We intend this dedication to be an
overt act of relinquishment in perpetuity of all present and future rights to
this software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| 26,573 | Markdown | 57.792035 | 739 | 0.727317 |
MomentFactory/Omniverse-MVR-GDTF-converter/src/usd-plugins/fileFormat/mvrFileFormat/mvrFileFormat.cpp | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mvrFileFormat.h"
#include <pxr/base/tf/diagnostic.h>
#include <pxr/base/tf/stringUtils.h>
#include <pxr/base/tf/token.h>
#include <pxr/usd/usd/prim.h>
#include <pxr/usd/usd/stage.h>
#include <pxr/usd/usd/usdaFileFormat.h>
#include <pxr/usd/usdGeom/mesh.h>
#include <pxr/usd/usdGeom/scope.h>
#include <pxr/usd/usdGeom/camera.h>
#include <pxr/usd/usdGeom/cube.h>
#include <pxr/usd/usdGeom/xformable.h>
#include <pxr/usd/usdGeom/xform.h>
#include <pxr/usd/usdLux/rectLight.h>
#include <pxr/base/gf/matrix3f.h>
#include <pxr/base/gf/rotation.h>
#include <pxr/base/gf/vec3f.h>
#include <pxr/usd/usd/payloads.h>
#include "mvrParser/MVRParser.h"
#include "../gdtfFileFormat/gdtfUsdConverter.h"
#include <iostream>
#define _SILENCE_EXPERIMENTAL_FILESYSTEM_DEPRECATION_WARNING
#include <experimental/filesystem>
PXR_NAMESPACE_OPEN_SCOPE
MvrFileFormat::MvrFileFormat() : SdfFileFormat(
MvrFileFormatTokens->Id,
MvrFileFormatTokens->Version,
MvrFileFormatTokens->Target,
MvrFileFormatTokens->Extension)
{
}
MvrFileFormat::~MvrFileFormat()
{
}
bool MvrFileFormat::CanRead(const std::string& filePath) const
{
return true;
}
static std::string CleanNameForUSD(const std::string& name)
{
std::string cleanedName = name;
if(cleanedName.size() == 0)
{
return "Default";
}
if(cleanedName.size() == 1 && !TfIsValidIdentifier(cleanedName))
{
// If we have an index as a name, we only need to add _ beforehand.
return CleanNameForUSD("_" + cleanedName);
}
return TfMakeValidIdentifier(cleanedName);
}
bool MvrFileFormat::Read(SdfLayer* layer, const std::string& resolvedPath, bool metadataOnly) const
{
// these macros emit methods defined in the Pixar namespace
// but not properly scoped, so we have to use the namespace
// locally here - note this isn't strictly true since we had to open
// the namespace scope anyway because the macros won't allow non-Pixar namespaces
// to be used because of some auto-generated content
PXR_NAMESPACE_USING_DIRECTIVE
if (!TF_VERIFY(layer))
{
return false;
}
// Parse MVR file
// ---------------------
using namespace MVR;
auto parser = MVRParser();
auto layers = parser.ParseMVRFile(resolvedPath);
// Create USD Schema
// ------------------------
SdfLayerRefPtr newLayer = SdfLayer::CreateAnonymous(".usd");
UsdStageRefPtr stage = UsdStage::Open(newLayer);
auto xformPath = SdfPath("/mvr_payload");
auto defaultPrim = UsdGeomXform::Define(stage, xformPath);
stage->SetDefaultPrim(defaultPrim.GetPrim());
for(const auto& layer : layers)
{
const std::string cleanName = CleanNameForUSD(layer.name);
const auto& layerPath = xformPath.AppendChild(TfToken(CleanNameForUSD(layer.name)));
auto layerUsd = UsdGeomScope::Define(stage, layerPath);
for(const auto& fixture : layer.fixtures)
{
const std::string cleanFixtureName = CleanNameForUSD(fixture.Name + fixture.UUID);
const auto& fixturePath = layerPath.AppendChild(TfToken(cleanFixtureName));
const auto& fixtureUsd = UsdGeomXform::Define(stage, fixturePath);
GDTF::GDTFSpecification gdtfSpec = parser.GetGDTFSpecification(fixture.GDTFSpec);
GDTF::ConvertToUsd(gdtfSpec, stage, fixturePath.GetAsString());
GfMatrix4d transform = GfMatrix4d(
fixture.Matrix[0][0], fixture.Matrix[0][1], fixture.Matrix[0][2], 0,
fixture.Matrix[1][0], fixture.Matrix[1][1], fixture.Matrix[1][2], 0,
fixture.Matrix[2][0], fixture.Matrix[2][1], fixture.Matrix[2][2], 0,
fixture.Matrix[3][0], fixture.Matrix[3][1], fixture.Matrix[3][2], 1
);
// Offset matrix
GfMatrix3d rotateMinus90deg = GfMatrix3d(1, 0, 0,
0, 0, 1,
0, -1, 0);
// Translation
//transform = transform.GetTranspose();
GfVec3d translation = rotateMinus90deg * transform.ExtractTranslation() * 0.1;
// Rotation
GfRotation rotation = transform.ExtractRotation();
GfVec3d euler = rotation.Decompose(GfVec3f::XAxis(), GfVec3f::YAxis(), GfVec3f::ZAxis());
GfVec3d rotate = rotateMinus90deg * euler; // we somehow have a complete 180 offset here.
// Set transform
auto fixtureXform = UsdGeomXformable(fixtureUsd);
fixtureXform.ClearXformOpOrder();
fixtureXform.AddTranslateOp().Set(translation);
fixtureXform.AddScaleOp().Set(GfVec3f(100, 100, 100));
fixtureXform.AddRotateYXZOp(UsdGeomXformOp::PrecisionDouble).Set(rotate);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:name"), pxr::SdfValueTypeNames->String).Set(fixture.Name);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:uuid"), pxr::SdfValueTypeNames->String).Set(fixture.UUID);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:GDTFSpec"), pxr::SdfValueTypeNames->String).Set(fixture.GDTFSpec);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:GDTFMode"), pxr::SdfValueTypeNames->String).Set(fixture.GDTFMode);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:Classing"), pxr::SdfValueTypeNames->String).Set(fixture.Classing);
int i = 0;
std::string allCommands = "[";
for(auto cc : fixture.CustomCommands)
{
allCommands += cc + ",";
i++;
}
allCommands += "]";
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:CustomCommands"), pxr::SdfValueTypeNames->String).Set(allCommands);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:CIEColor"), pxr::SdfValueTypeNames->String).Set(fixture.CieColor);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:FixtureID"), pxr::SdfValueTypeNames->UInt).Set(fixture.FixtureID);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:UnitNumber"), pxr::SdfValueTypeNames->UInt).Set(fixture.UnitNumber);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:FixtureTypeId"), pxr::SdfValueTypeNames->UInt).Set(fixture.FixtureTypeID);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:CustomId"), pxr::SdfValueTypeNames->UInt).Set(fixture.CustomId);
fixtureUsd.GetPrim().CreateAttribute(TfToken("mf:mvr:CastShadow"), pxr::SdfValueTypeNames->Bool).Set(fixture.CastShadows);
}
}
layer->TransferContent(newLayer);
return true;
}
bool MvrFileFormat::WriteToString(const SdfLayer& layer, std::string* str, const std::string& comment) const
{
// this POC doesn't support writing
return false;
}
bool MvrFileFormat::WriteToStream(const SdfSpecHandle& spec, std::ostream& out, size_t indent) const
{
// this POC doesn't support writing
return false;
}
bool MvrFileFormat::_ShouldSkipAnonymousReload() const
{
return false;
}
bool MvrFileFormat::_ShouldReadAnonymousLayers() const
{
return true;
}
void MvrFileFormat::ComposeFieldsForFileFormatArguments(const std::string& assetPath, const PcpDynamicFileFormatContext& context, FileFormatArguments* args, VtValue* contextDependencyData) const
{
return;
}
bool MvrFileFormat::CanFieldChangeAffectFileFormatArguments(const TfToken& field, const VtValue& oldValue, const VtValue& newValue, const VtValue& contextDependencyData) const
{
return true;
}
// these macros emit methods defined in the Pixar namespace
// but not properly scoped, so we have to use the namespace
// locally here
TF_DEFINE_PUBLIC_TOKENS(
MvrFileFormatTokens,
((Id, "mvrFileFormat"))
((Version, "1.0"))
((Target, "usd"))
((Extension, "mvr"))
);
TF_REGISTRY_FUNCTION(TfType)
{
SDF_DEFINE_FILE_FORMAT(MvrFileFormat, SdfFileFormat);
}
PXR_NAMESPACE_CLOSE_SCOPE | 7,940 | C++ | 32.506329 | 194 | 0.732116 |
MomentFactory/Omniverse-MVR-GDTF-converter/src/usd-plugins/fileFormat/mvrFileFormat/api.h | // Copyright 2023 NVIDIA CORPORATION
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef OMNI_MVR_API_H_
#define OMNI_MVR_API_H_
#include "pxr/base/arch/export.h"
#if defined(PXR_STATIC)
# define MVR_API
# define MVR_API_TEMPLATE_CLASS(...)
# define MVR_API_TEMPLATE_STRUCT(...)
# define EDF_LOCAL
#else
# if defined(MVRFILEFORMAT_EXPORTS)
# define MVR_API ARCH_EXPORT
# define MVR_API_TEMPLATE_CLASS(...) ARCH_EXPORT_TEMPLATE(class, __VA_ARGS__)
# define MVR_API_TEMPLATE_STRUCT(...) ARCH_EXPORT_TEMPLATE(struct, __VA_ARGS__)
# else
# define MVR_API ARCH_IMPORT
# define MVR_API_TEMPLATE_CLASS(...) ARCH_IMPORT_TEMPLATE(class, __VA_ARGS__)
# define MVR_API_TEMPLATE_STRUCT(...) ARCH_IMPORT_TEMPLATE(struct, __VA_ARGS__)
# endif
# define EDF_LOCAL ARCH_HIDDEN
#endif
#endif | 1,340 | C | 34.289473 | 85 | 0.701493 |
Subsets and Splits