file_path
stringlengths 21
224
| content
stringlengths 0
80.8M
|
---|---|
eliabntt/GRADE-RR/additional_scripts/zebra_filter_compress.sh | #!/bin/bash
set -e
# --help first arg is input second is output folder
if [ $1 == "--help" ]; then
echo "Usage: $0 <input_file> <output_file>"
exit 1
fi
# Check if number of args is two
if [ $# -ne 2 ]; then
echo "Usage: $0 <input_file> <output_file>"
exit 1
fi
# input path is the first argument
input_path=$1
# output path is the second argument
output_main_path=$2
# loop through all the folders in the input directory
for folder in $input_path/*; do
# create the output directory if it doesn't exist
output_path=$output_main_path/$(basename $folder)
mkdir -p $output_path
# copy all npy, usd, png, yaml, log files from the input directory to the output directory
find $folder -maxdepth 1 -type f -name "*.npy" -exec cp {} $output_path \;
find $folder -maxdepth 1 -type f -name "*.usd" -exec cp {} $output_path \;
find $folder -maxdepth 1 -type f -name "*.png" -exec cp {} $output_path \;
find $folder -maxdepth 1 -type f -name "*.yaml" -exec cp {} $output_path \;
find $folder -maxdepth 1 -type f -name "*.log" -exec cp {} $output_path \;
# list all Vieport directories in $folder and associate that to the viewports variable
viewports=$(find $folder -maxdepth 1 -type d -name "Viewport*")
# for every viewport in viewports mkdir in $output_path
for viewport in $viewports;
do
mkdir -p "$output_path/$(basename $viewport)/rgb"
# for every file in rgb, if it is a .png file, copy it to the rgb directory while converting it to jpg
mkdir -p "$output_path/$(basename $viewport)"
# copy all the folders except for rgb
find $viewport -maxdepth 1 -type d ! -type l -not -name "rgb" ! -path $viewport -exec cp {} "$output_path/$(basename $viewport)" -r \;
# create the rgb directory
for file in $viewport/rgb/*; do
if [[ $file == *.png ]]
then
# get the file name
file_name=$(basename $file)
# convert the file to jpg
convert $file "$output_path/$(basename $viewport)/rgb/${file_name%.png}.jpg"
fi
done
done
# compress the output directory into a tar.gz file and delete the output directory
tar -czvf $output_path.tar.gz $output_path --remove-files
done |
eliabntt/GRADE-RR/additional_scripts/process_dataset.sh | #!/bin/bash
# --help first arg is input second is output folder
if [ $1 == "--help" ]; then
echo "Usage: $0 <input_folder> <output_folder>"
exit 1
fi
# Check if number of args is two
if [ $# -ne 2 ]; then
echo "Usage: $0 <input_folder> <output_folder>"
exit 1
fi
# input path is the first argument
input_path=$1
# output path is the second argument
output_path=$2
# for all and only the folders in $input_path run filter_compress.sh
for folder in $input_path/*; do
if [ -d $folder ]; then
echo "Processing $folder"
./filter_compress.sh $folder $output_path
fi
done
|
eliabntt/GRADE-RR/additional_scripts/pixel_to_world.py | """
This code serve as an example to project the points from the pixel coordinates to the world coordinates.
You need the camera pose and projection matrix, as well as clearly the pixel depth.
Those are available in the viewport folder, for example:
Viewport0/camera
Viewport0/depth (or depthLinear)
You will load the camera viewport_mat from the camera folder.
This dictionary will have the view projection matrix and the global camera pose
They use a near/far clipping plane model, and not a focal length model.
At the end of the file you can also check how to use the focal length model, but you need to know the focal length of the camera
"""
viewport_mat = np.load(os.path.join(viewport, 'camera',f'{i}.npy'), allow_pickle=True)
# in Isaac view_projection is np.dot(view_matrix, proj_matrix)
# view_matrix is local to world, i.e. the inverse of the pose matrix
# the proj_matrix use the near far clipping plane model
# a = -1.0 / np.tan(np.radians(fov / 2))
# b = -a * aspect_ratio
# c = z_far / (z_far - z_near)
# d = z_near * z_far / (z_far - z_near)
# Construct the camera projection matrix
# projection_matrix = np.array([
# [a, 0.0, 0.0, 0.0],
# [0.0, b, 0.0, 0.0],
# [0.0, 0.0, c, 1.0],
# [0.0, 0.0, d, 0.0]
# ])
view_mat = viewport_mat.item()["view_projection_matrix"]
pose_mat = viewport_mat.item()["pose"]
inv_VP = np.linalg.inv(view_mat)
pixel_x = ....
pixel_y = ....
pixel_d = ....
width = viewport_mat['resolution']['width']
width = viewport_mat['resolution']['height']
F = viewport_mat['clipping_range'][1]
N = viewport_mat['clipping_range'][0]
W = -pixel_d
ndc_x = (2 * pixel_x) / width - 1
ndc_y = 1 - (2 * pixel_y) / height
Z = ( (W*F/(F-N)) + N*F/(F-N) )/(W)
xyz = np.array([ndc_x, ndc_y, Z, 1]) * W
xyz = np.dot(xyz, inv_VP)
# alternatively consider that a = -fx, b = fy, cx = widht / 2, cy = height /2
# and that the pose_mat has the translation in the last ROW (in unit coordinates, so mind the scale)
tmp = np.dot(pose_mat, view_mat)
fx = -tmp[0,0]
fy = tmp[1,1]
cx = width / 2
cy = height / 2
x = (px - cx) * d / fx
y = (py - cy) * d / fy
pt = [x,y,z,1]
xyz = np.dot(cpose.T, pt)[:3]
|
eliabntt/GRADE-RR/additional_scripts/filter_compress.sh | #!/bin/bash
set -e
# --help first arg is input second is output folder
if [ $1 == "--help" ]; then
echo "Usage: $0 <input_file> <output_file>"
exit 1
fi
# Check if number of args is two
if [ $# -ne 2 ]; then
echo "Usage: $0 <input_file> <output_file>"
exit 1
fi
# input path is the first argument
input_path=$1
# output path is the second argument
output_path=$2
# create the output directory if it doesn't exist
mkdir -p $output_path
# copy Viewport0_occluded from the input directory to the output directory
# cp $input_path/Viewport0_occluded $output_path/Viewport0_occluded -r
# copy all npy, usd, png, yaml, log files from the input directory to the output directory
find $input_path -maxdepth 1 -type f -name "*.npy" -exec cp {} $output_path \;
find $input_path -maxdepth 1 -type f -name "*.usd" -exec cp {} $output_path \;
find $input_path -maxdepth 1 -type f -name "*.png" -exec cp {} $output_path \;
find $input_path -maxdepth 1 -type f -name "*.yaml" -exec cp {} $output_path \;
find $input_path -maxdepth 1 -type f -name "*.log" -exec cp {} $output_path \;
# copy the folder whose name does not starts with Viewport0 from the input directory to the output directory
find $input_path -maxdepth 1 -type d ! -type l -not -name "Viewport0*" ! -path $input_path -exec cp {} $output_path -r \;
# copy all the .bag* files from the input directory to the output directory
find $input_path -maxdepth 1 -type f -name "*.bag*" -exec cp {} $output_path \;
# for every *.bag* file, if .active is in the name, run rosbag reindex, and remove the .active file
for file in $output_path/*.bag*; do
if [[ $file == *.active ]]
then
echo "Reindexing $file"
# get substring of $file without the .active extension
file_new=${file%.active}
mv $file $file_new
rosbag reindex $file_new
rm ${file_new%.bag}.orig.bag
fi
done
# for each .bag file in the output directory, run rosbag reindex on it, and delete the .bag.orig file
for bag in $output_path/*.bag; do
echo "Compressing $bag"
# get the file name
file_name=$(basename $bag)
# move $bag to the same directory with the same name but with old_ prepended
mv $bag $output_path/old_$file_name
# bag is now the new filename
bag=$output_path/old_$file_name
# newbag is the old filename
newbag=$output_path/$file_name
rosbag filter $bag $newbag "'joint' in topic or \
'tf' in topic or \
'imu' in topic or \
'odom' in topic or \
'pose' in topic or \
'camera_link/1' in topic or \
'clock' in topic or \
'command' in topic or \
'exploration_node' in topic or \
'predicted_state' in topic or \
'reference_trajectory' in topic"
rm $bag
rosbag compress $newbag
rm ${newbag%.bag}.orig.bag
done |
eliabntt/GRADE-RR/additional_scripts/get_benchbot.sh | #/bin/bash
if [ "$#" -eq 1 ]; then
cd $1
fi
if [ "$#" -gt 1 ]; then
echo "illegal number of params"
exit
fi
wget https://cloudstor.aarnet.edu.au/plus/s/n9PDshcQZiCc1h0/download -O challenge.zip
mkdir bb_challenge
unzip challenge.zip -d bb_challenge
cd bb_challenge
rm *.yaml
mv .sim_data/* ./
cd ..
#rm challenge.zip
wget https://cloudstor.aarnet.edu.au/plus/s/7lEK6dBl0zVvA5D/download -O develop.zip
mkdir bb_develop
unzip develop.zip -d bb_develop
cd bb_develop
rm *.yaml
mv .sim_data/* ./
cd ..
|
eliabntt/GRADE-RR/additional_scripts/process_bag.sh | #!/bin/bash
set -e
# get input main folder
input_main_folder=$1
# get the temp folder
temp_folder=$2
# if the temp folder does not exist create it
mkdir -p $temp_folder
shouldIprocess=false
# b0fe48b1-d6b1-4854-ba04-111d22289522 - tmp
# e2104869-2823-4736-9e92-bc25fd7c9502 - tmp2
# for each folder in input_main_folder echo the name
for folder in $input_main_folder/* ; do
folder_name=$(basename $folder)
# if folder name equal to ciao set shouldIprocess to true
if [ $folder_name == "e3080420-c235-480d-8122-9ba120001e5e" ]; then
shouldIprocess=true
fi
# if shouldIprocess is false continue
if [ $shouldIprocess == false ]; then
echo "not processing $folder_name"
continue
fi
echo "processing $folder_name"
# get the folder name
folder_name=$(basename $folder)
# create a folder in temp_folder with the folder name
mkdir -p $temp_folder/$folder_name
# copy the bag files to the temp folder
cp $folder/*.bag* $temp_folder/$folder_name
# run
/media/ebonetto/WindowsData/GRADE_tools/preprocessing/process_data.sh -t bag -p $temp_folder/$folder_name
/media/ebonetto/WindowsData/GRADE_tools/preprocessing/process_data.sh -t extract -p $temp_folder/$folder_name/reindex_bags
# make a static_bag and a dynamic_bag folder in $folder
mkdir -p $folder/static_bag
mkdir -p $folder/dynamic_bag
mkdir -p $temp_folder/$folder_name/static_bag
mkdir -p $temp_folder/$folder_name/dynamic_bag
reindex_folder=$temp_folder/$folder_name/reindex_bags
for bag in $reindex_folder/*.bag; do
file_name=$(basename $bag)
newbag=$temp_folder/$folder_name/static_bag/$file_name
rosbag filter $bag $newbag "'camera_link/1' not in topic"
rosbag compress $newbag
rm ${newbag%.bag}.orig.bag
newbag=$temp_folder/$folder_name/dynamic_bag/$file_name
rosbag filter $bag $newbag "'camera_link/0' not in topic"
rosbag compress $newbag
rm ${newbag%.bag}.orig.bag
done
# for each .bag in the $folder copy the .bag file with the same name from the $temp_folder/$folder_name
for bag in $temp_folder/$folder_name/*.bag; do
rosbag compress $bag
rm ${bag%.bag}.orig.bag
cp $bag $folder
done
# rm *.bag.active in $folder if they exist
rm $folder/*.bag.active || true
# mv all the static_bag and dynamic_bag to the $folder
mv $temp_folder/$folder_name/static_bag/* $folder/static_bag
mv $temp_folder/$folder_name/dynamic_bag/* $folder/dynamic_bag
mv $temp_folder/$folder_name/reindex_bags/data $folder/exp_data
# remove the temp folder
rm -rf $temp_folder/$folder_name
done |
eliabntt/GRADE-RR/additional_scripts/check_folders.py | """
Use this to check if all the files/folders are there
"""
import os
import ipdb
mainpath = "/ps/project/irotate/"
folders = ["DE_lot_obs_cam0"]
tocheck = ["bbox_2d_loose","bbox_2d_tight","bbox_3d","camera","depthLinear","instance","poses","rgb"]
for mainfolder in folders:
for folder in os.listdir(os.path.join(mainpath, mainfolder)):
for subfolder in [os.path.join(mainpath, mainfolder, folder, "Viewport0"), os.path.join(mainpath, mainfolder, folder, "Viewport0_occluded")]:
print(subfolder)
data = os.listdir(subfolder)
if len(data) > len(tocheck):
print("More than expected folders")
print(subfolder)
ipdb.set_trace()
if len(data) < len(tocheck):
print("Less than expected folders")
print(subfolder)
ipdb.set_trace()
for f in data:
if f not in tocheck:
continue
if len(os.listdir(os.path.join(subfolder, f))) != 1801:
print("Not enough files in folder")
print(os.path.join(subfolder, f))
ipdb.set_trace()
|
eliabntt/GRADE-RR/additional_scripts/in_place_filter_compress.sh | # get the inputdir
input_path=$1
for folder_base in $input_path; do
# for folder in noisy_bags, reindex_bags
for folder in $folder_base"/D" $folder_base"/DH" $folder_base"/WOH" $folder_base"/WO" $folder_base"/F" $folder_base"/FH" ; do
echo $folder
# set output_path as folder
output_path=$folder
# for each .bag file in the output directory, run rosbag reindex on it, and delete the .bag.orig file
for bag in $output_path/*.bag; do
echo "Compressing $bag"
# get the file name
file_name=$(basename $bag)
# move $bag to the same directory with the same name but with old_ prepended
mv $bag $output_path/old_$file_name
# bag is now the new filename
bag=$output_path/old_$file_name
# newbag is the old filename
newbag=$output_path/$file_name
rosbag filter $bag $newbag "'joint' in topic or \
'tf' in topic or \
'imu' in topic or \
'odom' in topic or \
'pose' in topic or \
'camera_link/1' in topic or \
'clock' in topic or \
'command' in topic or \
'exploration_node' in topic or \
'predicted_state' in topic or \
'reference_trajectory' in topic"
rm $bag
rosbag compress $newbag
rm ${newbag%.bag}.orig.bag
done
done
for folder in $folder_base"/S" $folder_base"/SH" ; do
echo $folder
# set output_path as folder
output_path=$folder
# for each .bag file in the output directory, run rosbag reindex on it, and delete the .bag.orig file
for bag in $output_path/*.bag; do
echo "Compressing $bag"
# get the file name
file_name=$(basename $bag)
# move $bag to the same directory with the same name but with old_ prepended
mv $bag $output_path/old_$file_name
# bag is now the new filename
bag=$output_path/old_$file_name
# newbag is the old filename
newbag=$output_path/$file_name
rosbag filter $bag $newbag "'joint' in topic or \
'tf' in topic or \
'imu' in topic or \
'odom' in topic or \
'pose' in topic or \
'camera_link/0' in topic or \
'clock' in topic or \
'command' in topic or \
'exploration_node' in topic or \
'predicted_state' in topic or \
'reference_trajectory' in topic"
rm $bag
rosbag compress $newbag
rm ${newbag%.bag}.orig.bag
done
done
done
|
eliabntt/GRADE-RR/additional_scripts/process_paths/parser_config.yaml | cc_path: "../.." # set your cc_texture path
prefix_cc: ""
front3d_path: "../.." # set your global 3d_front path
prefix_front3d: ""
cloth3d_path: "../../.."
prefix_cloth3d: ""
surreal_path: "../.."
prefix_surreal: ""
normpath: True
|
eliabntt/GRADE-RR/additional_scripts/process_paths/change_paths.py | import argparse
import confuse
import os
def change_path(c_line, prefix, my_cc_path, match_str, normpath, remove_prefix=True):
if remove_prefix:
offset = len(match_str)
else:
offset = -1
path = os.path.join(my_cc_path + c_line[c_line.find(match_str) + offset:])
if normpath:
path = os.path.normpath(path[:path.rfind("@")].replace('\\',"/")) + path[path.rfind("@"):]
new_path = c_line[:c_line.find("@") + 1] + prefix + path
return new_path
parser = argparse.ArgumentParser(description="USD reference changer")
parser.add_argument("--config_file", type=str, default="parser_config.yaml")
parser.add_argument("--input", type=str)
parser.add_argument("--output_name", type=str, default="")
parser.add_argument("--output_dir", type=str, default="")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("USDRefChanger", __name__)
config.set_file(args.config_file)
config.set_args(args)
filename = config["input"].get()
output_loc = config["output_dir"].get()
if output_loc == "":
output_loc = os.path.dirname(config["input"].get())
out_name = config["output_name"].get()
if out_name == "":
out_name = os.path.basename(config["input"].get())[:-4] + "_proc.usda"
else:
if out_name[-4:] != "usda":
out_name += ".usda"
out_file_path = os.path.join(output_loc, out_name)
prefix_cc = config["prefix_cc"].get()
my_cc_path = config["cc_path"].get()
prefix_3dfront = config["prefix_front3d"].get()
my_front_path = config["front3d_path"].get()
prefix_cloth3d = config["prefix_cloth3d"].get()
my_cloth_path = config["cloth3d_path"].get()
prefix_surreal = config["prefix_surreal"].get()
my_surr_path = config["surreal_path"].get()
normpath = config["normpath"].get()
with open(out_file_path, "w") as o_file, open(filename, "r") as i_file:
lines = i_file.readlines()
for line in lines:
c_line = line
if ".png" in line or ".jpg" in line or ".jpeg" in line or ".tga" in line or ".tif" in line or ".bmp" in line and "cc_textures" not in line:
# remove 3D-FUTURE-model
if "3D-FUTURE-model" in line:
# import ipdb; ipdb.set_trace()
c_line = line.replace("3D-FUTURE-model/", "")
if "cc_textures" not in line: # and "../../" in line:
# import ipdb; ipdb.set_trace()
# add after ../../ 3D-FUTURE-model
l_index = c_line.find("../../")
c_line = c_line[:l_index+6] + "3D-FUTURE-model/" + c_line[l_index+6:]
if "opacity_constant" in line or "reflection_roughness_constant" in line or "metallic_constant" in line:
tmp = c_line.split(" ")
tmp[-1] = tmp[-1].replace("\n", "")
if "int" in tmp:
tmp[tmp.index("int")] = "float"
if float(tmp[-1]) == 0:
tmp[-1] = str(0.00001)
try:
tmp[-1] = str(format(float(tmp[-1])))
except:
import ipdb; ipdb.set_trace()
c_line = " ".join(tmp)+"\n"
elif "cc_textures" in line:
c_line = change_path(c_line, prefix_cc, my_cc_path, "cc_textures", normpath, remove_prefix=False)
elif "3DFRONT" in line or "3D-FUTURE" in line:
if "future" in line.lower():
c_line = change_path(c_line, prefix_3dfront, my_front_path, "3D-FUTURE-model", normpath)
else:
import ipdb;
ipdb.set_trace()
c_line = change_path(c_line, prefix_3dfront, my_front_path, "3DFRONT", normpath)
elif "cloth3d" in line:
c_line = change_path(c_line, prefix_cloth3d, my_cloth_path, "cloth_3d", normpath)
elif "surreal" in line:
c_line = change_path(c_line, prefix_surreal, my_surr_path, "surreal", normpath)
o_file.write(c_line)
|
eliabntt/GRADE-RR/additional_scripts/process_paths/get_human_skins.sh | #!/usr/bin/env bash
set -e
# get input main folder
input_main_folder=$1
out_main_folder=$2
# set output folder as Desktop
output_folder=/home/ebonetto/Desktop/output
# expand PATH and PYTHONPATH with
PATH=$PATH:/media/ebonetto/WindowsData/USD/install/bin
PYTHONPATH=$PYTHONPATH:/media/ebonetto/WindowsData/USD/install/lib/python
#if out_main_folder does not exist, create it
if [ ! -d "$out_main_folder" ]; then
mkdir -p $out_main_folder
fi
# for each folder in input_main_folder echo the name
for folder in $input_main_folder/* ; do
echo $folder
# get the folder name
folder_name=$(basename $folder)
# run usdcat on the usd file with the same name as the folder
usdcat $folder/$folder_name.usd -o $output_folder/$folder_name.usda
# remove _with_cache from folder_name
new_folder_name=${folder_name/_with_cache/}
echo $new_folder_name
mapping_folder=$2/$new_folder_name
# if mapping_folder does not exist, create it
if [ ! -d "$mapping_folder" ]; then
mkdir -p $mapping_folder
fi
cat $output_folder/$folder_name.usda | grep grey | head -n 1 | cut -d '.' -f 1 | rev | cut -d '\' -f 1 | rev > $mapping_folder/$new_folder_name.txt
# rm the usda file
rm $output_folder/$folder_name.usda
done
|
eliabntt/GRADE-RR/additional_scripts/process_paths/process_env_paths.sh | #!/usr/bin/env bash
set -e
# get input main folder
input_main_folder=$1
# set output folder as Desktop
output_folder=/home/ebonetto/Desktop/output
# expand PATH and PYTHONPATH with
PATH=$PATH:/media/ebonetto/WindowsData/USD/install/bin
PYTHONPATH=$PYTHONPATH:/media/ebonetto/WindowsData/USD/install/lib/python
# set shouldIprocess to false
shouldIprocess=false
# for each folder in input_main_folder echo the name
for folder in $input_main_folder/* ; do
echo $folder
# get the folder name
folder_name=$(basename $folder)
# if folder_name == 3e40b128-6291-41ff-89aa-0ae707a594c6 set shouldIprocess to true
if [ $folder_name == "36810ab3-d383-431d-9cda-f58c70c83c5e" ]; then
shouldIprocess=true
fi
# if not shouldIprocess continue
if [ $shouldIprocess == false ]; then
echo "not processing $folder_name"
continue
fi
# run usdcat on the usd file with the same name as the folder
usdcat $folder/$folder_name.usd -o $output_folder/$folder_name.usda
# run the python script to change the paths
python3 ./change_paths.py --input $output_folder/$folder_name.usda --output_dir $output_folder --output_name ${folder_name}_proc
# run usdcat on the usda file to convert it to usd
usdcat $output_folder/${folder_name}_proc.usda -o $output_folder/$folder_name.usd
# remove the usda files
rm $output_folder/$folder_name.usda
rm $output_folder/${folder_name}_proc.usda
# mv the new usd file to the input folder
mv $output_folder/$folder_name.usd $folder/$folder_name.usd
done
|
eliabntt/GRADE-RR/additional_scripts/process_paths/README.md | Requirements:
Please install the following packages:
[OpenUSD](https://github.com/PixarAnimationStudios/OpenUSD)
These files are useful to automatically change some text in the USD files.
In short you will edit the `change_paths` script to your desire using python and the `parser_config.yaml` config file.
Then you can run `process_paths.sh` to process the USD file.
The processing work as follow: USD -> convert to USDA -> process -> convert back to USD
|
eliabntt/GRADE-RR/additional_scripts/process_paths/process_paths.sh | #!/bin/bash
set -e
if [ -z "$1" ]
then
echo "at least the file to process needs to be given as arg"
exit 1
fi
echo "Going to process file ${1}. This will create tmp/tmp.usda, a temporary processing usda, which will then be converted in a usd file"
echo
echo "We STRONGLY suggest that you review this script, it may end up overwrite or delete your files."
echo "Note that, except for that, you can run this script safely"
read -p "Are you sure you want to run this? " -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
exit 1
fi
mkdir -p tmp
usdcat -o tmp/tmp.usda $1
if [ -z "$2" ]
then
name=$(dirname $1)
base=$(basename $1 .usd)
echo "No argument specified. The resulting file will be in ${name}/${base}_proc.usd"
python ./change_paths.py --input tmp/tmp.usda --output_dir ${name} --output_name ${base}_proc
usdcat -o ${name}/${base}_proc.usd ${name}/${base}_proc.usda
rm ${name}/${base}_proc.usda
rm tmp/tmp.usda
elif [ -z "$3" ]
then
name=$(dirname $1)
base=$2
basename=$(basename $1 .usd)
echo "No output directory specified. The resulting file will be in ${name}/${2}.usd"
python ./change_paths.py --input $1 --output_name $2 --output_dir ${name}
usdcat -o ${name}/${2}.usd ${name}/${2}.usda
rm tmp/tmp.usda
rm ${name}/${2}.usda
elif [ -z "$4" ]
then
echo "The resulting file will be in ${3}/{2}.usd"
mkdir -p $3
python ./change_paths.py --input $1 --output_name $2 --output_dir $3
usdcat ${3}/${2}.usda -o ${3}/${2}.usd
rm tmp/tmp.usda
rm ${3}/${2}.usda
fi
|
eliabntt/GRADE-RR/scripts/kill.sh | pkill -f -9 exploration_node
pkill -f -9 mav_nonlinear_mpc
pkill -f -9 waypoint_generator
pkill -f -9 custom_joint_controller_ros
pkill -f -9 nonlinear_mpc_node
pkill -f -9 rosout
pkill -f -9 my_robot
pkill -f -9 static_transform_publisher
pkill -f -9 move_group
pkill -f -9 robot_state_publisher
pkill -f -9 rviz
pkill -f -9 joint_state_publisher
pkill -f -9 collision_check
pkill -f -9 custom_joint_controller_ros_node
pkill -f -9 simulator_ros
pkill -f -9 paper_simulation
sleep 1
|
eliabntt/GRADE-RR/simulator/people_and_objects.py | import argparse
import time
import os
import numpy as np
# base_env_path and other settings are in the config file
out_dir = "" # set this to a temporary empty dir
from omni.isaac.kit import SimulationApp
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
parser = argparse.ArgumentParser(description="Your second IsaacSim run")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False, help="Use rtx when True, use path tracing when False")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("world_and_robot", __name__)
config.set_file(args.config_file)
config.set_args(args)
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
omni.usd.get_context().open_stage(config["base_env_path"].get(), None)
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
meters_per_unit = config["meters_per_unit"].get()
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(), rendering_dt=1.0 / config["render_hz"].get(), stage_units_in_meters=meters_per_unit, backend='torch')
simulation_context.initialize_physics()
physx_interface = omni.physx.acquire_physx_interface()
physx_interface.start_simulation()
print("Adding ROS clock, you can check with rostopic echo /clock")
_clock_graph = add_clock()
simulation_context.play()
for _ in range(10):
simulation_context.step()
og.Controller.evaluate_sync(_clock_graph)
simulation_context.stop()
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.objects_utils import *
from utils.human_utils import *
simulation_environment_setup(need_ros = True)
if base_world_path != "":
from utils.environment_utils import *
print("Loading environment...")
environment = environment(config, meters_per_unit=meters_per_unit)
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get())
print("Visualization...")
for _ in range(1000):
simulation_context.render()
simulation_context.step(render=False)
print("Environment loading done...")
add_colliders(env_prim_path)
print("Colliders added..")
simulation_context.play()
x, y, z = 0, 0, 0
if out_dir != "":
environment.generate_map(out_dir, origin=[x,y,z])
print("Map generated..")
simulation_context.stop()
ros_transform_components = []
camera_list = []
viewport_list = []
camera_pose, camera_pose_pub = [], []
imus,imu_pubs = [], []
lidars = []
odoms, odom_pubs = [], []
from omni.isaac.sensor import _sensor
_is = _sensor.acquire_imu_sensor_interface()
old_h_ape, old_v_ape = [], []
_dc = dynamic_control_interface()
print("Loading robots..")
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
for n in range(config["num_robots"].get()):
import_robot(robot_base_prim_path, n, usd_robot_path)
x, y, z, yaw = np.random.randint(-100,100,4)
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}",
[x / meters_per_unit, y / meters_per_unit, z / meters_per_unit],
[0, 0, np.deg2rad(yaw)],
upper_zlim = z * 2,
lower_zlim = -z * 2
)
print("Adding ROS components")
add_ros_components(robot_base_prim_path, n, ros_transform_components, camera_list, viewport_list,
camera_pose, camera_pose_pub, imu_pubs, imus,
odoms, odom_pubs, lidars,
[], config, old_h_ape, old_v_ape, _is, simulation_context, _clock, irotate=False)
kit.update()
timeline = setup_timeline(config) # setup the timeline before adding anything animated
print("Loading people")
n = 0
human_base_prim_path = config["human_base_prim_path"].get()
while n < config["num_humans"].get():
folder = rng.choice(human_folders)
random_name = rng.choice(os.listdir(os.path.join(human_export_folder, folder)))
asset_path = os.path.join(human_export_folder, folder, random_name, random_name + ".usd")
print("Loading human {} from {}".format(random_name, folder))
tmp_pkl = pkl.load(open(os.path.join(human_export_folder, folder, random_name, random_name + ".pkl"), 'rb'))
used_ob_stl_paths.append(os.path.join(human_export_folder, folder, random_name, random_name + ".stl"))
load_human(human_base_prim_path, n, asset_path)
stl_path = os.path.join(human_export_folder, folder, random_name, random_name + ".stl")
x = np.random.randint(environment.env_limits_shifted[0], environment.env_limits_shifted[3])
y = np.random.randint(environment.env_limits_shifted[1], environment.env_limits_shifted[4])
z = 0
yaw = np.random.randint(0,360)
# position the mesh
set_translate(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"),
[x / meters_per_unit, y / meters_per_unit, z / meters_per_unit])
set_scale(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"), 1 / meters_per_unit)
set_rotate(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"), [0, 0, np.deg2rad(yaw)])
n += 1
print("Load objects")
google_ob_used, shapenet_ob_used = load_objects(config, environment, np.random.default_rng(), [], 1/meters_per_unit)
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
print("Note that the rendering is now blocking until finished")
for i in range(100):
print(f"Iteration {i}/100", end="\r")
sleeping(simulation_context, viewport_list, raytracing=config["rtx_mode"].get())
# deselect all objects
omni.usd.get_context().get_selection().clear_selected_prim_paths()
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
timeline.set_current_time(0)
timeline.set_auto_update(False) # this no longer works as expected.
# Theoretically, once this is set and the timeline plays, rendering will not advance the timeline
# this is no longer the case. Thus, keep track of the ctime (as we do within sleeping function)
# the simulation context can be kept stopped, but that will prevent physics and time to advance.
# https://forums.developer.nvidia.com/t/the-timeline-set-auto-update-false-no-longer-works/253504/10
simulation_context.play()
for i in range(2000):
simulation_context.step(render=False)
og.Controller.evaluate_sync(_clock)
time.sleep(0.2)
simulation_context.render()
# publish IMU
print("Publishing IMU...")
pub_imu(_is, imu_pubs, robot_imu_frames, meters_per_unit)
if i % ratio_joints == 0:
for js in joint_states:
og.Controller.set(og.Controller.attribute(f"{js}/OnImpulseEvent.state:enableImpulse"), True)
if i % ratio_tf:
for tf in tf_trees:
og.Controller.set(og.Controller.attribute(f"{tf}/OnImpulseEvent.state:enableImpulse"), True)
if simulation_step % ratio_odom == 0:
c_pose, _ = pub_odom(odoms, odom_pubs, _dc, meters_per_unit)
pub_cam_pose(camera_pose, camera_pose_pub, _dc, meters_per_unit)
if simulation_step % ratio_camera == 0:
# The RTX LiDAR is still a fuzzy component. The "normal" LiDAR is more stable, but won't see non-colliding objects
for lidar in lidars:
og.Controller.attribute(lidar+".inputs:step").set(1)
ctime = timeline.get_current_time()
simulation_context.render()
timeline.set_current_time(ctime)
for lidar in lidars:
og.Controller.attribute(lidar+".inputs:step").set(0)
pub_and_write_images(simulation_context, viewport_list, ros_camera_list, raytracing) # clearly not writing anything here
timeline.forward_one_frame() # advancing the timeline
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/smpl_and_bbox.py | import argparse
import carb
import confuse
import ipdb
import math
import numpy as np
import os
import roslaunch
import rospy
import scipy.spatial.transform as tf
import sys
import time
import traceback
import trimesh
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
from omni.syntheticdata import sensors, helpers as sensors, generic_helper_lib
def get_obj_pose(time):
"""Get pose of all objects with a semantic label.
"""
stage = omni.usd.get_context().get_stage()
mappings = generic_helper_lib.get_instance_mappings()
pose = []
for m in mappings:
prim_path = m[1]
prim = stage.GetPrimAtPath(prim_path)
prim_tf = omni.usd.get_world_transform_matrix(prim, time)
pose.append((str(prim_path), m[2], str(m[3]), np.array(prim_tf)))
return pose
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
"""
Exported information will have the shape of
[[prim_asset_path, bbox] [prim_asset_path,skel] [prim_asset_path, init_tf, init_rot]]
prim_asset_path is string of the asset in the simulation.
It will be processed in order so expect groups of human,cloth --- possibly reversed
All is output in WORLD frame. Please check the notes regarding projection in camera frame.
bbox will be of shape (ef, 8, 3) if only one bbox is saved or (ef, 2, 8, 3) if both are saved
ef will be either the last animated frame (given the simulated environment) or the last frame of the animations + 1
if you need to access the bbox of the mesh after that just use [-1]
skel is the smpl skeleton info
use the flags below to export only the skeleton, only the garments or only the body or any combination
init_rot is the same of the info file
init_tf is equal, except that here we account for the small vertical translation that is added to meshes very close to the ground
-- this was a bug during the data generation which actually has very little influence (< 0.1 cm in vertical displacement)
-- the design choice was to save the placement value and then have always a way to recover the eventual vertical displacement which is anyway based on a rule (check human_utils.py:move_humans_to_ground)
everything is in meters
NOTE: We start writing images from timeline.frame = 1 (1/fps) since the "forward_timeline" call has been placed _before_ the publishing
"""
try:
parser = argparse.ArgumentParser(description="Get Bounding Boxes")
parser.add_argument("--experiment_folder", type=str,
help="The experiment folder with the USD file and the info file")
parser.add_argument("--body", type=boolean_string, default=True, help="When true process the bodies")
parser.add_argument("--garments", type=boolean_string, default=True, help="When true process the garments")
parser.add_argument("--base_path", type=str, default="my_human_", help="Human prim base path")
parser.add_argument("--headless", type=boolean_string, default=False, help="Whether run this headless or not")
parser.add_argument("--write", type=boolean_string, default=True, help="Whether to write results")
parser.add_argument("--both", type=boolean_string, default=False,
help="Whether to write both vertex types -- preference in code is both - fast - slow")
parser.add_argument("--fast", type=boolean_string, default=True,
help="Whether to write only the axis-aligned box or the oriented one")
parser.add_argument("--only_exp", type=boolean_string, default=True,
help="Whether to export only the experiment (considering the reverse strategy) or the whole sequences")
parser.add_argument("--get_skel", type=boolean_string, default=True, help="Whether to include the skeleton info")
parser.add_argument("--skel_root", type=str, default="avg_root",
help="This is a recognizable last part of the root of the skeleton prim, in our case _avg_root "
+ "It will process ONLY the path of which the last part is this root")
parser.add_argument("--correct_poses", type=boolean_string, default=False)
args, unknown = parser.parse_known_args()
config = confuse.Configuration("BoundingBoxes", __name__)
config.set_args(args)
exp_info = np.load(os.path.join(config["experiment_folder"].get(), "experiment_info.npy"), allow_pickle=True)
exp_info = exp_info.item()
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.objects_utils import *
from utils.environment_utils import *
from utils.human_utils import *
simulation_environment_setup()
local_file_prefix = "my-computer://"
omni.usd.get_context().open_stage(local_file_prefix + config["experiment_folder"].get() + "/loaded_stage.usd", None)
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
simulation_context = SimulationContext(physics_dt=1.0 / exp_info["config"]["physics_hz"].get(),
rendering_dt=1.0 / exp_info["config"]["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.start_simulation()
meters_per_unit = UsdGeom.GetStageMetersPerUnit(stage)
set_raytracing_settings(exp_info["config"]["physics_hz"].get())
timeline = setup_timeline(exp_info["config"])
base_path = config["base_path"].get()
fast, both, slow = False, False, False
if config["both"].get():
both = True
elif config["fast"].get():
fast = True
else:
slow = True
get_skel = config["get_skel"]
only_exp = config["only_exp"].get()
humans_info = exp_info["humans"]
write = config["write"].get()
if write:
results = []
stime = time.time()
helper_list_global = []
helper_list_skel = []
skel_root = config["skel_root"].get()
smpl_info_path = ""
for prim in stage.Traverse():
prim_path = str(prim.GetPath()).lower()
if base_path in prim_path:
if (get_skel and skel_root in prim_path and prim_path[:prim_path.find(skel_root)] not in helper_list_skel) or \
(str(prim.GetTypeName()).lower() == "mesh" and "points" in prim.GetPropertyNames()):
print(f"Processing {prim}")
parent = prim.GetParent()
refs = omni.usd.get_composed_references_from_prim(parent)
while len(refs) == 0:
parent = parent.GetParent()
refs = omni.usd.get_composed_references_from_prim(parent)
human_global_path = str(omni.usd.get_composed_references_from_prim(parent)[0][0].assetPath)
human_global_path = human_global_path[len(local_file_prefix):]
index = humans_info['folders'].index(human_global_path[:-3] + "stl")
init_tf = np.array(parent.GetAttribute("xformOp:translate").Get())
init_rot = parent.GetAttribute("xformOp:orient").Get()
init_rot = np.array([init_rot.GetImaginary()[0], init_rot.GetImaginary()[1], init_rot.GetImaginary()[2],
init_rot.GetReal()])
init_rot_mat = tf.Rotation.from_quat(init_rot).as_matrix()
if write and str(parent.GetPath()) not in helper_list_global:
results.append([str(parent.GetPath()), init_tf, init_rot])
helper_list_global.append(str(parent.GetPath()))
if human_global_path[:-3] + "pkl" != smpl_info_path:
smpl_info_path = human_global_path[:-3] + "pkl"
smpl_anim_info = pkl.load(open(smpl_info_path, 'rb'))
smpl_info = smpl_anim_info["info"]
r = smpl_info["zrot"]
rot_mat = tf.Rotation.from_euler('z', r).as_matrix()
ef = int(math.ceil(smpl_anim_info["ef"] * exp_info["config"]["fps"].get() / 24))
if only_exp:
ef = min(ef, int(math.ceil(
exp_info["config"]["experiment_length"].get() / exp_info['reversing_timeline_ratio'])))
if (get_skel and skel_root in prim_path):
helper_list_skel.append(prim_path[:prim_path.find(skel_root)])
skeleton, joint_token = AnimationSchema.SkelJoint(prim).GetJoint()
skel_cache = UsdSkel.Cache()
skel_query = skel_cache.GetSkelQuery(UsdSkel.Skeleton(skeleton.GetPrim()))
xfCache = UsdGeom.XformCache()
skeleton_info = np.empty((ef, 3), dtype=object)
for i in range(0, ef):
xfCache.SetTime(i)
transforms = skel_query.ComputeJointWorldTransforms(xfCache)
translates, rotations, scales = UsdSkel.DecomposeTransforms(transforms)
skeleton_info[i] = [np.array(translates) * meters_per_unit, np.array(rotations),
np.array(scales) * meters_per_unit]
if write:
results.append([str(prim.GetPath()), np.array(skeleton_info)])
else:
points = UsdGeom.PointBased(prim)
if both:
bounds = np.zeros((ef, 2, 8, 3))
else:
bounds = np.zeros((ef, 8, 3))
for i in range(0, ef):
points_in_mesh = points.ComputePointsAtTime(i, Usd.TimeCode(i))
points_in_mesh = np.array(points_in_mesh)
# bound = points.ComputeWorldBound(i, "default")
# for j in range(8):
# print(bound.ComputeAlignedRange().GetCorner(j))
points_in_mesh = ((points_in_mesh @ rot_mat.T @ init_rot_mat.T) + init_tf * meters_per_unit)
# normals = prim.GetAttribute("normals").Get(i)
# normals = np.array(normals)
mymesh = trimesh.PointCloud(points_in_mesh)
if fast:
temp_bounds = mymesh.bounding_box.vertices
elif slow:
temp_bounds = mymesh.bounding_box_oriented.vertices
elif both:
temp_bounds = [mymesh.bounding_box.vertices, mymesh.bounding_box_oriented.vertices]
bounds[i] = temp_bounds
if write:
results.append([str(prim.GetPath()), bounds])
results = np.array(results, dtype=object)
print(f"etime {time.time() - stime}")
if write:
np.save(os.path.join(config["experiment_folder"].get(), "bboxes.npy"), results)
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
import ipdb
ipdb.set_trace()
finally:
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/replay_experiment.py | import argparse
import carb
import confuse
import cv2
import ipdb
import math
import numpy as np
import os
import rosbag
import roslaunch
import rospy
import scipy.spatial.transform as tf
import sys
import time
import traceback
import trimesh
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
"""
Suppose you want a stereo camera
And to have optical flow
And LiDAR (not fully supported yet) of the experiments.
This is a way in which you can re-process your info and get the results.
Suggestion: teleport is much more precise (sub mm difference). Working with velocities is fisy
This code is a bit hard-coded as it is a demonstration code.
"""
try:
parser = argparse.ArgumentParser(description="Get Bounding Boxes")
parser.add_argument("--experiment_folder", type=str,
help="The experiment folder with the USD file and the info file")
parser.add_argument("--headless", type=boolean_string, default=False, help="Whether run this headless or not")
parser.add_argument("--write", type=boolean_string, default=False, help="Whether to write new cameras results")
parser.add_argument("--write_flow", type=boolean_string, default=False, help="Whether to write optical flow")
parser.add_argument("--write_normals", type=boolean_string, default=False, help="Whether to write normals")
parser.add_argument("--use_teleport", type=boolean_string, default=False,
help="Whether to use teleport or force joint vel, both have adv and disadv")
parser.add_argument("--use_reindex", type=boolean_string, default=False, help="Whether to use reindexed bags")
parser.add_argument("--bag_basename", type=str, default="7659a6c9-9fc7-4be5-bc93-5b202ff2a22b")
parser.add_argument("--out_folder_npy", type=str, default='additional_data')
parser.add_argument("--bag_subpath", type=str, default="")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("NewSensor", __name__)
config.set_args(args)
exp_info = np.load(os.path.join(config["experiment_folder"].get(), "experiment_info.npy"), allow_pickle=True)
exp_info = exp_info.item()
poses_path = os.path.join(config["experiment_folder"].get(), "Viewport0", "camera")
write_flow = config["write_flow"].get()
write_normals = config["write_normals"].get()
write = config["write"].get()
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.objects_utils import *
from utils.environment_utils import *
from utils.human_utils import *
simulation_environment_setup()
rospy.init_node("new_sensor_publisher", anonymous=True, disable_signals=True, log_level=rospy.ERROR)
local_file_prefix = "my-computer://"
omni.usd.get_context().open_stage(local_file_prefix + config["experiment_folder"].get() + "/loaded_stage.usd", None)
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
simulation_context = SimulationContext(physics_dt=1.0 / exp_info["config"]["physics_hz"].get(),
rendering_dt=1.0 / exp_info["config"]["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.initialize_physics()
meters_per_unit = UsdGeom.GetStageMetersPerUnit(stage)
set_raytracing_settings(exp_info["config"]["physics_hz"].get())
timeline = setup_timeline(exp_info["config"])
reversing_timeline_ratio = exp_info['reversing_timeline_ratio']
experiment_length = exp_info['config']['experiment_length'].get()
ratio_camera = exp_info['config']['ratio_camera'].get()
cnt_reversal = 1
simulation_context.stop()
### here we add the new camera to the robot. It will be located 5 cm to the right w.r.t. the original one
old_h_ape = []
old_v_ape = []
viewport_window_list = []
ros_camera_list = []
# omni.kit.commands.execute('CopyPrim',
# path_from='/my_robot_0/camera_link/Camera',
# path_to='/my_robot_0/camera_link/Camera_stereo',
# exclusive_select=False)
# set_translate(stage.GetPrimAtPath('/my_robot_0/camera_link/Camera_stereo'), [1, 0, 0])
# component, viewport = add_camera_and_viewport("/my_robot_0/camera_link",
# exp_info["config"]["robot_sensor_size"].get(), old_h_ape, old_v_ape,
# simulation_context, 0, 0, camera_path="Camera_stereo")
# cam_outputs = control_camera(viewport, simulation_context)
# ros_camera_list.append([0, component, cam_outputs])
# viewport_window_list.append(viewport)
# omni.kit.commands.execute('CopyPrim',
# path_from='/my_robot_0/camera_link/Camera_npy',
# path_to='/my_robot_0/camera_link/Camera_npy_stereo',
# exclusive_select=False)
#
# set_translate(stage.GetPrimAtPath('/my_robot_0/camera_link/Camera_npy_stereo'), [1, 0, 0])
# viewport_npy, _ = create_viewport("/my_robot_0/camera_link/Camera_npy_stereo", config["headless"].get(),
# 0, exp_info["config"]["npy_sensor_size"].get(), old_h_ape, old_v_ape, simulation_context)
# viewport_window_list.append(viewport_npy)
viewport_npy, _ = create_viewport("/my_robot_0/camera_link/Camera_npy", config["headless"].get(),
0, exp_info["config"]["npy_sensor_size"].get(), old_h_ape, old_v_ape, simulation_context)
viewport_window_list.append(viewport_npy)
is_rtx = exp_info["config"]["rtx_mode"].get()
if is_rtx:
set_raytracing_settings(exp_info["config"]["physics_hz"].get())
else:
set_pathtracing_settings(exp_info["config"]["physics_hz"].get())
simulation_context.play()
for _ in range(5): simulation_context.render()
old_v_ape = [2.32] * len(old_v_ape) # todo this is harcoded
for index, cam in enumerate(viewport_window_list):
simulation_context.step(render=False)
simulation_context.render()
camera = stage.GetPrimAtPath(cam.get_active_camera())
camera.GetAttribute("horizontalAperture").Set(old_h_ape[index])
camera.GetAttribute("verticalAperture").Set(old_v_ape[index])
simulation_context.stop()
_clock_graph = add_clock() # add ROS clock
og.Controller.evaluate_sync(_clock_graph)
# add a new sensor
lidars = []
# sensor = add_lidar(f"/my_robot_0/yaw_link", [0, 0, -.1], [0, 0, 0], is_3d=True, is_2d=False)
# lidars.append(sensor)
kit.update()
cnt_tf = -1
use_teleport = config["use_teleport"].get()
use_reindex = config["use_reindex"].get()
id_bag = 0
bag_path = os.path.join(config["experiment_folder"].get(), config['bag_subpath'].get(),
f"{config['bag_basename'].get()}_{id_bag}.bag")
joint_order = ['x_joint', 'y_joint', 'z_joint', 'roll_joint', 'pitch_joint', 'yaw_joint']
joint_position = []
joint_velocity = []
joint_time = []
robot_pose = []
started = use_reindex
while os.path.exists(bag_path):
bag = rosbag.Bag(bag_path)
for topic, msg, t in bag.read_messages(
topics=["/my_robot_0/joint_states", "/my_robot_0/odom", "/starting_experiment"]):
if not started:
if topic == "/starting_experiment":
started = True
continue
else:
continue
if 'joint' in topic:
joint_position.append(msg.position)
joint_velocity.append(msg.velocity)
joint_time.append(msg.header.stamp)
else:
robot_pose.append([msg.pose.pose.position, msg.pose.pose.orientation])
id_bag += 1
bag_path = os.path.join(config["experiment_folder"].get(), config['bag_subpath'].get(),
f"{config['bag_basename'].get()}_{id_bag}.bag")
if len(joint_position) == 0:
print("No bag found")
sys.exit(-1)
ratio_tf = exp_info['config']['ratio_tf'].get()
init_x, init_y, init_z, init_roll, init_pitch, init_yaw = get_robot_joint_init_loc('/my_robot_0')
init_pos = np.array([init_x, init_y, init_z])
init_rot = np.array([init_roll, init_pitch, init_yaw])
change_collision_at_path(False,paths=['/my_robot_0/camera_link/Cube.physics:collisionEnabled','/my_robot_0/yaw_link/visuals.physics:collisionEnabled'])
kit.update()
set_drone_joints_init_loc('/my_robot_0', [0, 0, 0], [0,0,0], 300, lower_zlim=0) # todo use actual limit from simulation
kit.update()
simulation_context.play()
for _ in range(5):
simulation_context.step(render=False)
simulation_context.render()
timeline.set_auto_update(False)
timeline.set_current_time(min(- 1 / (exp_info['config']["physics_hz"].get() / ratio_camera),
-abs(exp_info['config']["bootstrap_exploration"].get())))
simulation_step = int(timeline.get_current_time() * exp_info['config']["physics_hz"].get()) - 1
out_dir_npy = os.path.join(config['experiment_folder'].get(), config['out_folder_npy'].get())
if write_flow:
_tmp = extension_custom.MyRecorder()
_tmp.on_startup()
_settings = _tmp.get_default_settings()
_settings["rgb"]["enabled"] = False
_settings["motion-vector"]["enabled"] = write_flow
_settings["motion-vector"]["colorize"] = False
_settings["motion-vector"]["npy"] = True
my_recorder_flow = recorder_setup(_settings, out_dir_npy, True, 0)
my_recorder_flow._enable_record = False
if write_normals:
_tmp = extension_custom.MyRecorder()
_tmp.on_startup()
_settings = _tmp.get_default_settings()
_settings["rgb"]["enabled"] = True
_settings["normals"]["enabled"] = write_normals
_settings["motion-vector"]["colorize"] = False
_settings["motion-vector"]["npy"] = True
my_recorder_normals = recorder_setup(_settings, out_dir_npy, True, 0)
my_recorder_normals._enable_record = False
if write:
_tmp = exp_info['config']['_recorder_settings'].get()
_tmp["depth"]["enabled"] = False
_tmp["depthLinear"]["enabled"] = False
_tmp["semantic"]["enabled"] = False
_tmp["normals"]["enabled"] = False
_tmp["bbox_2d_loose"]["enabled"] = False
_tmp["bbox_2d_tight"]["enabled"] = False
_tmp["bbox_3d"]["enabled"] = False
my_recorder = recorder_setup(_tmp, out_dir_npy, True, 0)
my_recorder._enable_record = False
# how to hide dynamic content
dynamicprims = []
for prim in stage.Traverse():
if 'my_human' in str(prim.GetPath()).lower():
dynamicprims.append(prim)
for prim in stage.GetPrimAtPath("/World").GetChildren()[6:]:
dynamicprims.append(prim)
toggle_dynamic_objects(dynamicprims, False)
forward = True
while kit.is_running():
simulation_step += 1
if simulation_step == 0:
_dc = dynamic_control_interface()
handle = _dc.get_rigid_body('/my_robot_0/yaw_link')
if not use_teleport:
art = _dc.get_articulation('/my_robot_0')
joints = []
_dc.wake_up_articulation(art)
for joint in joint_order:
joints.append(_dc.find_articulation_dof(art, joint))
change_collision_at_path(True,paths=['/my_robot_0/camera_link/Cube.physics:collisionEnabled','/my_robot_0/yaw_link/visuals.physics:collisionEnabled'])
og.Controller.evaluate_sync(_clock_graph)
# since the first image generated is at time=1/30, we add 7/240
prev_time = timeline.get_current_time() + 7 / 240 * (simulation_step == 0)
timeline.set_current_time(prev_time)
simulation_step += 8
sleeping(simulation_context, viewport_window_list, is_rtx)
try:
if write:
my_recorder._update()
my_recorder._enable_record = True
if write_flow:
my_recorder_flow._update()
my_recorder_flow._enable_record = True
if write_normals:
my_recorder_normals._update()
my_recorder_normals._enable_record = True
except:
sleeping(simulation_context, viewport_window_list, is_rtx)
if write:
my_recorder._update()
my_recorder._enable_record = True
if write_flow:
my_recorder_flow._update()
my_recorder_flow._enable_record = True
if write_normals:
my_recorder_normals._update()
my_recorder_normals._enable_record = True
simulation_context.render()
simulation_context.render()
timeline.set_current_time(prev_time)
if simulation_step < 0:
simulation_context.step(render=False)
if (simulation_step % ratio_camera == 0):
timeline.forward_one_frame()
continue
if use_teleport:
if simulation_step % ratio_tf == 0:
cnt_tf += 1
teleport("/my_robot_0", np.array(joint_position[cnt_tf][:3]) / meters_per_unit + init_pos
, tf.Rotation.from_euler('XYZ', joint_position[cnt_tf][3:] + init_rot).as_quat())
if (simulation_step % (ratio_tf * 2) == 0): # odm is published half the rate of the tf
myp = _dc.get_rigid_body_pose(handle)
print(
f"pose diff {np.array(_dc.get_rigid_body_pose(handle).p) / 100 - np.array([robot_pose[int(cnt_tf / 2)][0].x, robot_pose[int(cnt_tf / 2)][0].y, robot_pose[int(cnt_tf / 2)][0].z])}")
else:
vel = np.array(joint_velocity[
cnt_tf]) # or average position between the two, or use the IMU to interpolate also which has 240 hz
pos = (np.array(joint_position[cnt_tf][:3]) + vel[:3] * 1 / 240) / meters_per_unit + init_pos
ori = (np.array(joint_position[cnt_tf][3:]) + vel[3:] * 1 / 240) + init_rot
teleport("/my_robot_0", pos, tf.Rotation.from_euler('XYZ', ori).as_quat())
else:
_dc.wake_up_articulation(art)
if simulation_step % ratio_tf == 0:
cnt_tf += 1
vel = np.array(joint_velocity[cnt_tf])
next_vel = vel
if cnt_tf < len(joint_position) - 1:
next_vel = np.array(joint_velocity[cnt_tf + 1])
if cnt_tf == 0:
pos = np.append(np.array(joint_position[cnt_tf][:3]) / meters_per_unit + init_pos - vel[:3] * 1 / 240,
joint_position[cnt_tf][3:] + init_rot - vel[3:] * 1 / 240)
for idx, joint in enumerate(joints):
_dc.set_dof_position(joint, pos[idx] * (-1 if idx == 1 else 1))
cvel = (vel + next_vel) / 2
cvel[:3] = cvel[:3] / meters_per_unit
_dc.set_articulation_dof_velocity_targets(art, list(cvel))
for idx, joint in enumerate(joints):
_dc.set_dof_velocity(joint, cvel[idx] * (-1 if idx == 1 else 1))
if (simulation_step % (ratio_tf * 2) == 0):
myp = _dc.get_rigid_body_pose(handle)
print(
f"pose diff {np.array(_dc.get_rigid_body_pose(handle).p) / 100 - np.array([robot_pose[int(cnt_tf / 2)][0].x, robot_pose[int(cnt_tf / 2)][0].y, robot_pose[int(cnt_tf / 2)][0].z])}")
if simulation_step % 8 == 0:
# tmp = np.load(
# f'/ps/project/irotate/GRADE_DATA/DE/7659a6c9-9fc7-4be5-bc93-5b202ff2a22b/Viewport0/camera/{int(simulation_step/8)}.npy',
# allow_pickle=True).item()
prim_tf = omni.usd.get_world_transform_matrix(stage.GetPrimAtPath('/my_robot_0/camera_link/Camera'))
# in v2022 this is the only viable option to control time since timeline.set_auto_update=False is not working
timeline.set_current_time(prev_time + 1 / 240 * (1 if forward else -1))
prev_time = timeline.get_current_time()
simulation_context.step(render=False)
simulation_context.render()
print("Clocking...")
# NOTE THAT THIS MIGHT GET CONFUSING -- reindexing/retiming is needed for sure. Tests need to be careful!
og.Controller.evaluate_sync(_clock_graph)
if simulation_step == 0:
og.Controller.evaluate_sync(_clock_graph)
time.sleep(0.2)
if simulation_step % ratio_camera == 0:
if (simulation_step + ratio_camera) / ratio_camera < (experiment_length / reversing_timeline_ratio) * (
cnt_reversal):
forward = True
else:
if (simulation_step + ratio_camera) / ratio_camera >= ((experiment_length - 1) / reversing_timeline_ratio) * (
cnt_reversal + 1) or \
(timeline.get_current_time() - 1 / timeline.get_time_codes_per_seconds()) < 0:
cnt_reversal += 2
forward = True
else:
forward = False
if write_flow:
if my_recorder_flow._enable_record:
simulation_context.render()
my_recorder_flow._counter += 1
time.sleep(1.5) # this seems necessary
my_recorder_flow._update()
# you have two ways to proceed here. the sleeping performs just the rendering and then you manually toggle the recorder below
# otherwise use pub_and_write_images which automatically calls it if necessary. In the latter case, remember to increase the counter
sleeping(simulation_context, viewport_window_list, is_rtx)
# if write:
# if my_recorder._enable_record:
# my_recorder._counter += 1
# pub_and_write_images(simulation_context, viewport_window_list, ros_camera_list, is_rtx, my_recorder)
if write:
if my_recorder._enable_record:
my_recorder._counter += 1
my_recorder._update()
if write_normals:
if my_recorder_normals._enable_record:
my_recorder_normals._counter += 1
my_recorder_normals._update()
# new sensor here -- imagine 30 fps -- in that case I need to publish
# if you need sensors in the middle you need to interpolate
# using IMU and TF readings
# you can access those from the rosbags
# note you might need to work with the timeline times if the rate that you want is different
# if simulation_step % ratio_camera == 0:
# for lidar in lidars:
# og.Controller.attribute(lidar + ".inputs:step").set(1)
# ctime = timeline.get_current_time()
# simulation_context.render()
# # point_cloud = og.Controller().node("/Render/PostProcess/SDGPipeline/RenderProduct_Replicator_RtxSensorCpuIsaacComputeRTXLidarPointCloud").get_attribute("outputs:pointCloudData").get()
# # laser_scan = og.Controller().node("/Render/PostProcess/SDGPipeline/RenderProduct_Replicator_RtxSensorCpuIsaacComputeRTXLidarFlatScan").get_attribute("outputs:linearDepthData").get()
# timeline.set_current_time(ctime)
# for lidar in lidars:
# og.Controller.attribute(lidar+".inputs:step").set(0)
if simulation_step % ratio_camera == 0 and simulation_step / ratio_camera == experiment_length:
print("End of experiment!!!")
simulation_context.pause()
break
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
import ipdb
ipdb.set_trace()
finally:
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/zebra_datagen.py | import argparse
import carb
import confuse
import ipdb
import numpy as np
import os
import sys
import time
import traceback
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
def compute_points(skel_root_path, prim, ef, stage):
usdSkelRoot = UsdSkel.Root.Get(stage, skel_root_path)
UsdSkel.BakeSkinning(usdSkelRoot, Gf.Interval(0, ef))
prim = UsdGeom.PointBased(prim)
xformCache = UsdGeom.XformCache()
final_points = np.zeros((ef, len(prim.GetPointsAttr().Get()), 3))
for prim in Usd.PrimRange(usdSkelRoot.GetPrim()):
if prim.GetTypeName() != "Mesh":
continue
localToWorld = xformCache.GetLocalToWorldTransform(prim)
for time in range(ef):
points = UsdGeom.Mesh(prim).GetPointsAttr().Get(time)
for index in range(len(points)):
points[index] = localToWorld.Transform(points[index])
points = np.array(points)
final_points[time] = points
return final_points
def randomize_floor_position(floor_data, floor_translation, scale, meters_per_unit, env_name, rng):
floor_points = np.zeros((len(floor_data), 3))
if env_name == "Windmills":
yaw = np.deg2rad(-155)
rot = np.array([[np.cos(yaw), -np.sin(yaw), 0], [np.sin(yaw), np.cos(yaw), 0], [0, 0, 1]])
floor_translation = np.matmul(floor_translation, rot)
if env_name == "L_Terrain":
meters_per_unit = 1
for i in range(len(floor_data)):
floor_points[i, 0] = floor_data[i][0] * scale[0] * meters_per_unit + floor_translation[0] * meters_per_unit
floor_points[i, 1] = floor_data[i][1] * scale[1] * meters_per_unit + floor_translation[1] * meters_per_unit
floor_points[i, 2] = floor_data[i][2] * scale[2] * meters_per_unit + floor_translation[2] * meters_per_unit
if env_name == "L_Terrain":
meters_per_unit = 0.01
max_floor_x = max(floor_points[:, 0])
min_floor_x = min(floor_points[:, 0])
max_floor_y = max(floor_points[:, 1])
min_floor_y = min(floor_points[:, 1])
if env_name == "Windmills":
min_floor_x = -112
max_floor_x = 161
min_floor_y = -209
max_floor_y = 63
rows = np.where((floor_points[:, 0] > min_floor_x) & (floor_points[:, 0] < max_floor_x) & (floor_points[:, 1] > min_floor_y) & (floor_points[:, 1] < max_floor_y))[0]
floor_points = floor_points[rows]
rows = []
while (len(rows) == 0):
size_x = rng.integers(40, 120)
size_y = rng.integers(40, 120)
# get all floor_points within a size x size square randomly centered
min_x = rng.uniform(min(floor_points[:, 0]), max(floor_points[:, 0]))
max_x = min_x + min(size_x, max(floor_points[:, 0]) - min(floor_points[:, 0]))
while max_x > max(floor_points[:, 0]):
min_x = rng.uniform(min(floor_points[:, 0]), max(floor_points[:, 0]))
max_x = min_x + min(size_x, max(floor_points[:, 0]) - min(floor_points[:, 0]))
min_y = rng.uniform(min(floor_points[:, 1]), max(floor_points[:, 1]))
max_y = min_y + min(size_y, max(floor_points[:, 1]) - min(floor_points[:, 1]))
while max_y > max(floor_points[:, 1]):
min_y = rng.uniform(min(floor_points[:, 1]), max(floor_points[:, 1]))
max_y = min_y + min(size_y, max(floor_points[:, 1]) - min(floor_points[:, 1]))
# FIXME this is just an approximation which MAY NOT WORK ALWAYS!
rows = np.where((min_x <= floor_points[:,0]) & (floor_points[:,0] <= max_x) & (floor_points[:,1]<=max_y) & (floor_points[:,1]>= min_y))[0]
floor_points = floor_points[rows]
shape = (len(np.unique(floor_points[:, 0])), -1, 3)
floor_points = floor_points.reshape(shape)
if (floor_points[0, 1, 0] - floor_points[0, 0, 0]) > 1:
zoom_factor = int(floor_points[0, 1, 0] - floor_points[0, 0, 0])
import scipy.ndimage.interpolation as interpolation
floor_points = interpolation.zoom(floor_points, (zoom_factor, zoom_factor, 1))
return floor_points, max_floor_x, min_floor_x, max_floor_y, min_floor_y
try:
parser = argparse.ArgumentParser(description="Dynamic Worlds Simulator")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False,
help="Use rtx when True, use path tracing when False")
parser.add_argument("--record", type=boolean_string, default=False, help="Writing data to the disk")
parser.add_argument("--debug_vis", type=boolean_string, default=False,
help="When true continuosly loop the rendering")
parser.add_argument("--neverending", type=boolean_string, default=False, help="Never stop the main loop")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("DynamicWorlds", __name__)
config.set_file(args.config_file)
config.set_args(args)
can_start = True
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
# Cannot move before SimApp is launched
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.environment_utils import *
from pxr import UsdGeom, UsdLux, Gf, Vt, UsdPhysics, PhysxSchema, Usd, UsdShade, Sdf, UsdSkel
simulation_environment_setup(need_ros=False)
all_env_names = ["Bliss", "Forest", "Grasslands", "Iceland", "L_Terrain", "Meadow",
"Moorlands", "Nature_1", 'Nature_2', "Savana", "Windmills", "Woodland"]
ground_area_name = ["Landscape_1", "Landscape_1", "Landscape_1", "Landscape_0", "Terrain_5", "Landscape_0",
"Landscape_2", "Ground", "Ground", "Landscape_1", "Landscape_0", "Landscape_1"]
need_sky = [True] * len(all_env_names)
env_id = all_env_names.index(config["fix_env"].get())
rng = np.random.default_rng()
rng_state = np.random.get_state()
local_file_prefix = ""
# setup environment variables
environment = environment(config, rng, local_file_prefix)
out_dir = os.path.join(config['out_folder'].get(), environment.env_name)
out_dir_npy = os.path.join(config['out_folder_npy'].get(), environment.env_name)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
omni.usd.get_context().open_stage(local_file_prefix + config["base_env_path"].get(), None)
# Wait two frames so that stage starts loading
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
omni.kit.commands.execute("DeletePrimsCommand", paths=["/World/GroundPlane"])
# do this AFTER loading the world
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(),
rendering_dt=1.0 / config["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.initialize_physics()
simulation_context.play()
simulation_context.stop()
kit.update()
meters_per_unit = 0.01
# use rtx while setting up!
set_raytracing_settings(config["physics_hz"].get())
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get(), "World")
if all_env_names[env_id] == "L_Terrain":
set_scale(stage.GetPrimAtPath(f"/World/home"), 100)
while is_stage_loading():
kit.update()
floor_data = stage.GetPrimAtPath(f"/World/home/{ground_area_name[env_id]}/{ground_area_name[env_id]}").GetProperty(
'points').Get()
floor_translation = np.array(stage.GetPrimAtPath(f"/World/home/{ground_area_name[env_id]}").GetProperty(
'xformOp:translate').Get())
scale = np.array(stage.GetPrimAtPath(f"/World/home/{ground_area_name[env_id]}").GetProperty("xformOp:scale").Get())
# i need to consider that z has a bounding box and that the position is on the top corner
for _ in range(50):
simulation_context.render()
floor_points, max_floor_x, min_floor_x, max_floor_y, min_floor_y = randomize_floor_position(floor_data,
floor_translation, scale,
meters_per_unit, all_env_names[env_id], rng)
add_semantics(stage.GetPrimAtPath("/World/home"), "world")
# set timeline of the experiment
timeline = setup_timeline(config)
viewport_window_list = []
dynamic_prims = []
first = True
simulation_context.stop()
simulation_context.play()
for _ in range(10):
simulation_context.step()
_dc = dynamic_control_interface()
print("Loading robots..")
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
old_h_ap = []
old_v_ap = []
simulation_context.stop()
for n in range(config["num_robots"].get()):
import_robot(robot_base_prim_path, n, usd_robot_path, local_file_prefix)
change_prim_collision(False, robot_base_prim_path + str(n))
set_drone_joints_init_loc(robot_base_prim_path + str(n), [0, 0, 0], [0, 0, 0], 10e15)
kit.update()
for n in range(config["num_robots"].get()):
add_npy_viewport(viewport_window_list, robot_base_prim_path, n, old_h_ap, old_v_ap, config,simulation_context, tot_num_ros_cam=0)
kit.update()
for _ in range(5):
simulation_context.render()
for index, cam in enumerate(viewport_window_list):
camera = stage.GetPrimAtPath(cam.get_active_camera())
camera.GetAttribute("horizontalAperture").Set(old_h_ap[index])
camera.GetAttribute("verticalAperture").Set(old_v_ap[index])
print("Loading robot complete")
print("Loading zebras..")
zebra_anims_loc = config["zebra_anims_loc"].get()
# get a list of .usd file in the folder
import glob
zebra_files = glob.glob(f"{zebra_anims_loc}/*.usd")
from utils.zebra_utils import *
from omni.kit.window.sequencer.scripts import sequencer_drop_controller
_, sequence = omni.kit.commands.execute("SequencerCreateSequenceCommand")
sequence_path = sequence.GetPrim().GetPath()
kit.update()
zebra_anim_names = ["Attack", "Attack01", "Attack02", "Eating", "Gallop", "Hit_Back", "Hit_Front", "Hit_Left",
"Hit_Right", "Idle", "Idle2", "Idle3", "Idle4", "Jump", "Tarsus", "Trot", "Walkback"]
zebra_seq_lengths = [27, 54, 32, 133, 12, 15, 17, 20, 15, 48, 72, 119, 201, 43, 29, 24, 27]
zebra_mesh_paths = [
"Attack/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0_001",
"Attack01/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0_001",
"Attack02/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0_001",
"Eating/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Gallop/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Hit_Back/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Hit_Front/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Hit_Left/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Hit_Right/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Idle/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Idle2/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Idle3/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Idle4/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Jump/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Tarsus/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Trot/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0",
"Walkback/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6/Object_45/Zebra_SHP2_0_Zebra_Mat_0"]
zebra_info = {}
for i, v in enumerate(zebra_anim_names):
zebra_info[v] = {"path": zebra_mesh_paths[i], "length": zebra_seq_lengths[i], "mesh_path": zebra_mesh_paths[i]}
for zebra_file in zebra_files:
if not os.path.exists(zebra_file[:-4] + "_points.npy"):
zebra_name = zebra_file.split("/")[-1].split(".")[0]
zebra_index = zebra_anim_names.index(zebra_name)
zebra_path = load_zebra("/zebra_", zebra_index, zebra_file)
kit.update()
kit.update()
zebra_name = zebra_file.split("/")[-1].split(".")[0]
zebra_index = zebra_anim_names.index(zebra_name)
prim = stage.GetPrimAtPath(zebra_path + zebra_mesh_paths[zebra_index][len(zebra_name):])
skel_root_path = zebra_path + "/Zebra_motions/African_Animal___Zebra/_Object_Pivot_Node_/Object_6"
points = compute_points(skel_root_path, prim, zebra_seq_lengths[zebra_index], stage) * meters_per_unit
np.save(zebra_file[:-4] + "_points.npy", points)
zebra_info[zebra_name]["points"] = points
omni.kit.commands.execute("DeletePrimsCommand", paths=[zebra_path])
else:
zebra_name = zebra_file.split("/")[-1].split(".")[0]
zebra_index = zebra_anim_names.index(zebra_name)
zebra_info[zebra_name]["points"] = np.load(zebra_file[:-4] + "_points.npy")
max_anim_length = max(zebra_seq_lengths)
# IT IS OF CRUCIAL IMPORTANCE THAT AFTER THIS POINT THE RENDER GETS DONE WITH THE SLEEPING CALL! OTHERWISE PATH TRACING SPP WILL GET RUINED
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
for _ in range(5):
simulation_context.step(render=False)
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
timeline.set_current_time(0)
simulation_step = 0 # this is NOT the frame, this is the "step" (related to physics_hz)
my_recorder = recorder_setup(config['_recorder_settings'].get(), out_dir_npy, config['record'].get(), 0)
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
timeline.set_auto_update(False)
# two times, this will ensure that totalSpp is reached
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
my_recorder._enable_record = False
exp_len = config["anim_exp_len"].get()
my_recorder._enable_record = False
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
if config["rtx_mode"].get():
my_recorder._update()
hidden_position = [min_floor_x / meters_per_unit, min_floor_y / meters_per_unit, -10e5]
all_zebras = preload_all_zebras(config, rng, zebra_files, zebra_info, simulation_context, sequencer_drop_controller,
max_anim_length, hidden_position)
substep = 3
simulation_context.play()
import ipdb; ipdb.set_trace()
while kit.is_running():
if simulation_step > 0:
for zebra in all_zebras:
set_translate(stage.GetPrimAtPath(zebra), list(hidden_position))
floor_points, max_floor_x, min_floor_x, max_floor_y, min_floor_y = randomize_floor_position(floor_data,
floor_translation,
scale,
meters_per_unit,
all_env_names[env_id], rng)
frame_info = place_zebras(all_zebras, rng, floor_points, meters_per_unit, hidden_position, config, max_anim_length,
zebra_info)
for c_substep in range(substep):
average_zebra_x = 0
average_zebra_y = 0
average_zebra_z = 0
max_zebra_x = -1e10
max_zebra_y = -1e10
min_zebra_x = 1e10
min_zebra_y = 1e10
counter = 0
for prim in frame_info:
if "zebra" in prim:
average_zebra_x += frame_info[prim]["position"][0]
average_zebra_y += frame_info[prim]["position"][1]
average_zebra_z += frame_info[prim]["position"][2]
max_zebra_x = max(max_zebra_x, frame_info[prim]["position"][0])
max_zebra_y = max(max_zebra_y, frame_info[prim]["position"][1])
min_zebra_x = min(min_zebra_x, frame_info[prim]["position"][0])
min_zebra_y = min(min_zebra_y, frame_info[prim]["position"][1])
counter += 1
average_zebra_x /= counter
average_zebra_y /= counter
average_zebra_z /= counter
delta_x = max_zebra_x - min_zebra_x
delta_y = max_zebra_y - min_zebra_y
used_x = []
used_y = []
used_z = []
for n in range(config["num_robots"].get()):
safe = False
while not safe:
# -100 + 100
random_x = rng.uniform(average_zebra_x - delta_x/2 - 5, average_zebra_x + delta_x/2 + 5)
# keep random_x within max_floor_x min_floor_x
random_x = max(random_x, min_floor_x)
random_x = min(random_x, max_floor_x)
random_y = rng.uniform(average_zebra_y - delta_y/2 -5, average_zebra_y + delta_y/2 + 5)
# keep random_y within max_floor_y min_floor_y
random_y = max(random_y, min_floor_y)
random_y = min(random_y, max_floor_y)
random_z = rng.uniform(average_zebra_z + 5, average_zebra_z + 20)
if len(used_x) > 0:
for i in range(len(used_x)):
safe = True
if np.sqrt((used_x[i] - random_x) ** 2 + (used_y[i] - random_y) ** 2 + (used_z[i] - random_z) ** 2) < .5:
safe = False
break
else:
safe = True
if safe:
used_x.append(random_x)
used_y.append(random_y)
used_z.append(random_z)
# get angle between robot and average_zebra
angle = np.arctan2(average_zebra_y - random_y, average_zebra_x - random_x)
# randomize yaw +- 30 degrees
yaw = rng.uniform(-np.pi / 6, np.pi / 6) + angle
# randomize yaw +- 15 degrees
yaw = rng.uniform(-np.pi / 12, np.pi / 12) + angle
# get pitch + 15 degrees (camera already pitched)
# with a weight based on the average zebra location
pitch = - np.arctan2(average_zebra_z - random_z, np.sqrt(
(average_zebra_x - random_x) ** 2 + (average_zebra_y - random_y) ** 2))
# roll minimal -10, 10 degrees
roll = rng.uniform(-np.pi / 18, np.pi / 18)
rot = Rotation.from_euler('xyz', [roll, pitch, yaw])
teleport(robot_base_prim_path + str(n),
[random_x / meters_per_unit, random_y / meters_per_unit, random_z / meters_per_unit],
rot.as_quat())
frame_info[f"{robot_base_prim_path}{n}"] = {"position": [random_x, random_y, random_z],
"rotation": [roll, pitch, yaw]}
simulation_context.step(render=False)
simulation_context.step(render=False)
for _ in range(3):
simulation_context.step(render=False)
simulation_context.render()
sleep(0.5)
# two frames with the same animation point
# todo fix the time
import ipdb;
ipdb.set_trace()
timeline.set_current_time(max_anim_length / timeline.get_time_codes_per_seconds())
if need_sky[env_id]:
# with probability 0.9 during day hours
stage.GetPrimAtPath("/World/Looks/SkyMaterial/Shader").GetAttribute("inputs:SunPositionFromTOD").Set(True)
if rng.uniform() < 0.9:
stage.GetPrimAtPath("/World/Looks/SkyMaterial/Shader").GetAttribute("inputs:TimeOfDay").Set(
rng.uniform(5, 20))
else:
if rng.uniform() < 0.5:
stage.GetPrimAtPath("/World/Looks/SkyMaterial/Shader").GetAttribute("inputs:TimeOfDay").Set(
rng.uniform(0, 5))
else:
stage.GetPrimAtPath("/World/Looks/SkyMaterial/Shader").GetAttribute("inputs:TimeOfDay").Set(
rng.uniform(20, 24))
print("Publishing cameras...")
my_recorder._enable_record = True
frame_info["step"] = simulation_step
frame_info["substep"] = c_substep
pub_try_cnt = 0
success_pub = False
while not success_pub and pub_try_cnt < 3:
try:
pub_and_write_images(simulation_context, viewport_window_list, [],
config["rtx_mode"].get(), my_recorder)
success_pub = True
except:
print("Error publishing camera")
pub_try_cnt += 1
import ipdb; ipdb.set_trace()
# simulation_context.stop()
# simulation_context.play()
sleep(0.5)
simulation_context.render()
simulation_context.render()
if not success_pub:
frame_info["error"] = True
else:
frame_info["error"] = False
np.save(out_dir_npy + f"/frame_{simulation_step}_{c_substep}.npy", frame_info)
simulation_context.stop()
# clips = [f"/World/Sequence{k}{k}_Clip" for k in frame_info.keys() if k.startswith("/zebra")]
# remove targets from clips
# for clip in clips:
# relationship = stage.GetPrimAtPath(clip).GetProperty("animation")
# relationship.RemoveTarget(relationship.GetTargets()[0])
# relationship = stage.GetPrimAtPath(clip).GetProperty("assetPrim")
# asset = relationship.GetTargets()[0]
# relationship.RemoveTarget(asset)
# omni.kit.commands.execute("DeletePrimsCommand",
# paths=clips)
# omni.kit.commands.execute("DeletePrimsCommand",
# paths=
# [f"/World/Sequence{k}" for k in frame_info.keys() if k.startswith("/zebra")])
# omni.kit.commands.execute("DeletePrimsCommand", paths=[k for k in frame_info.keys() if k.startswith("/zebra")])
timeline.set_current_time(0)
my_recorder._counter += 1
simulation_step += 1
if simulation_step >= exp_len:
break
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
ipdb.post_mortem(tb)
finally:
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/FUEL_indoor_simulation.py | import argparse
import carb
import confuse
import ipdb
import numpy as np
import os
import roslaunch
import rospy
import sys
import time
import traceback
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
try:
parser = argparse.ArgumentParser(description="Dynamic Worlds Simulator")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False,
help="Use rtx when True, use path tracing when False")
parser.add_argument("--record", type=boolean_string, default=True, help="Writing data to the disk")
parser.add_argument("--debug_vis", type=boolean_string, default=False,
help="When true continuosly loop the rendering")
parser.add_argument("--neverending", type=boolean_string, default=False, help="Never stop the main loop")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("DynamicWorlds", __name__)
config.set_file(args.config_file)
config.set_args(args)
os.environ["SHAPENET_LOCAL_DIR"] = config["shapenet_local_dir"].get()
experiment_length = config["experiment_length"].get()
can_start = True
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
# Cannot move before SimApp is launched
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.objects_utils import *
from utils.environment_utils import *
from utils.human_utils import *
def monitor_movement(msg, args):
global second_start
global last_check_time
global c_pose
global old_pose
global rng
global env_prim_path
wait_time = rospy.Duration(1)
index, environment = args[0], args[1]
if second_start and rospy.Time.now() > last_check_time + wait_time:
last_check_time = rospy.Time.now()
diff_x = abs(old_pose[index][0] - c_pose[index][0]) ** 2
diff_y = abs(old_pose[index][1] - c_pose[index][1]) ** 2
diff_z = abs(old_pose[index][2] - c_pose[index][2]) ** 2
dist = (diff_x + diff_y + diff_z) ** 0.5
if (dist) < 0.1:
my_pose = PoseStamped()
if (rng.uniform() > .9):
x, y, z, yaw = position_object(environment, type=0)
x = x[0]
y = y[0]
z = z[0]
yaw = yaw[0] + rng.uniform(0, 2 * np.pi)
else:
yaw = get_robot_yaw(c_pose[index][0], c_pose[index][1], c_pose[index][2],
environment.env_mesh, environment.shifts)
x = c_pose[index][0] + 0.2 * np.cos(yaw)
y = c_pose[index][1] + 0.2 * np.sin(yaw)
z = c_pose[index][2]
yaw += rng.uniform(0, 2 * np.pi)
my_pose.pose.position.x = x
my_pose.pose.position.y = y
my_pose.pose.position.z = z
rot = np.array(yaw) * 180 / np.pi
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), 0)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), rot)
).GetQuat()
my_pose.pose.orientation.x = quat.imaginary[0]
my_pose.pose.orientation.y = quat.imaginary[1]
my_pose.pose.orientation.z = quat.imaginary[2]
my_pose.pose.orientation.w = quat.real
print(
f"Publishing random goal since robot {index} stuck [{x},{y},{z}, {yaw} ({yaw * 180 / 3.14})].")
my_pose.header.frame_id = "world"
my_pose.header.stamp = rospy.Time.now()
movement_monitor_pubs[index].publish(my_pose)
if (dist) < 0.05:
set_colliders(env_prim_path, True)
else:
old_pose[index] = c_pose[index]
set_colliders(env_prim_path, True)
def autostart_exploration(msg, index):
global first_start
global second_start
global can_start
global can_change_second_start
global last_pub_time
if (msg.data == "PUB_FIRST_360"):
can_change_second_start = True
wait_time = rospy.Duration(0, 500000000) if second_start else rospy.Duration(1)
if (msg.data == "WAIT_TRIGGER" or (
msg.data == "PUB_360" and not second_start) and rospy.Time.now() > last_pub_time + wait_time):
if can_start:
if not first_start:
first_start = True
elif can_change_second_start:
second_start = True
print("Exploration will start at the end of this movement")
default_pose = PoseStamped()
default_pose.header.frame_id = "world"
default_pose.header.stamp = rospy.Time.now()
start_explorer_pubs[index].publish(default_pose)
last_pub_time = rospy.Time.now()
def publish_random_goal(msg, args):
global last_pub_time
global first_start
global second_start
global can_start
global can_change_second_start
index, environment = args[0], args[1]
if (msg.data == "PUB_FIRST_360"):
can_change_second_start = True
if (msg.data == "WAIT_TRIGGER" or (
msg.data == "PUB_360" and not second_start) and rospy.Time.now() > last_pub_time + rospy.Duration(0,
500000000)):
if can_start:
if not first_start:
first_start = True
elif can_change_second_start:
second_start = True
my_pose = PoseStamped()
x, y, z, yaw = position_object(environment, type=0)
my_pose.pose.position.x = x[0]
my_pose.pose.position.y = y[0]
my_pose.pose.position.z = z[0]
rot = np.array(yaw[0]) * 180 / np.pi
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), 0)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), rot)
).GetQuat()
my_pose.pose.orientation.x = quat.imaginary[0]
my_pose.pose.orientation.y = quat.imaginary[1]
my_pose.pose.orientation.z = quat.imaginary[2]
my_pose.pose.orientation.w = quat.real
print(f"Publishing random goal [{x[0]},{y[0]},{z[0]}, {yaw[0]} ({yaw[0] * 180 / 3.14})] for robot {index}")
my_pose.header.frame_id = "fixing_manual"
my_pose.header.stamp = rospy.Time.now()
send_waypoint_pubs[index].publish(my_pose)
last_pub_time = rospy.Time.now()
simulation_environment_setup()
# set timeline of the experiment
timeline = setup_timeline(config)
rospy.init_node("my_isaac_ros_app", anonymous=True, disable_signals=True, log_level=rospy.ERROR)
starting_pub = rospy.Publisher('starting_experiment', String)
rng = np.random.default_rng()
rng_state = np.random.get_state()
local_file_prefix = "" # if something is broken try my-computer://
# setup environment variables
meters_per_unit = config["meters_per_unit"].get()
environment = environment(config, rng, local_file_prefix, meters_per_unit)
uuid = roslaunch.rlutil.get_or_generate_uuid(None, False)
out_dir = os.path.join(config['out_folder'].get(), environment.env_name)
out_dir_npy = os.path.join(config['out_folder_npy'].get(), environment.env_name)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
os.environ["ROS_LOG_DIR"] = out_dir
roslaunch.configure_logging(uuid)
launch_files = ros_launchers_setup(roslaunch, environment.env_limits_shifted, config)
parent = roslaunch.parent.ROSLaunchParent(uuid, launch_files, force_log=True)
omni.usd.get_context().open_stage(local_file_prefix + config["base_env_path"].get(), None)
# Wait two frames so that stage starts loading
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
# do this AFTER loading the world
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(),
rendering_dt=1.0 / config["render_hz"].get(),
stage_units_in_meters=meters_per_unit, backend='torch')
simulation_context.initialize_physics()
physx_interface = omni.physx.acquire_physx_interface()
physx_interface.start_simulation()
_clock_graph = add_clock() # add ROS clock
simulation_context.play()
for _ in range(10):
simulation_context.step()
og.Controller.evaluate_sync(_clock_graph)
last_pub_time = rospy.Time.now()
simulation_context.stop()
# fixme IDK why this is necessary sometimes
try:
parent.start()
except:
print("Failed to start roslaunch, retry")
try:
parent.start()
except:
print("Failed to start roslaunch, exit")
exit(1)
print("ros node launched")
kit.update()
# use rtx while setting up!
set_raytracing_settings(config["physics_hz"].get())
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get())
randomize_and_fix_lights(config["_random_light"].get(), rng, env_prim_path, environment.env_limits[-1] - 0.2,
meters_per_unit, is_rtx=config["rtx_mode"].get())
randomize_roughness(config["_random_roughness"].get(), rng, env_prim_path)
ros_camera_list = []
ros_transform_components = [] # list of tf and joint components, one (of each) for each robot
viewport_window_list = []
dynamic_prims = []
imus_handle_list = []
robot_odom_frames = []
robot_imu_frames = []
camera_pose_frames = []
imu_pubs = []
odom_pubs = []
cam_pose_pubs = []
first = True
simulation_context.play()
for _ in range(100):
og.Controller.evaluate_sync(_clock_graph)
simulation_context.step()
last_pub_time = rospy.Time.now()
simulation_context.stop()
print("Generating map...")
if add_colliders(env_prim_path):
simulation_context.play()
x, y, z, yaw = position_object(environment, type=3)
environment.generate_map(out_dir, origin=[x[0], y[0], 0])
for _ in range(10):
simulation_context.step()
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
else:
simulation_context.play()
for _ in range(10):
simulation_context.step()
print("Error generating collisions", file=sys.stderr)
simulation_context.play()
_dc = dynamic_control_interface()
print("Loading robots..")
from omni.isaac.sensor import _sensor
_is = _sensor.acquire_imu_sensor_interface()
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
c_pose = []
old_pose = []
old_h_ap = []
old_v_ap = []
lidars = []
simulation_context.stop()
for n in range(config["num_robots"].get()):
import_robot(robot_base_prim_path, n, usd_robot_path, local_file_prefix)
x, y, z, yaw = get_valid_robot_location(environment, first)
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}", [x / meters_per_unit, y / meters_per_unit, z / meters_per_unit], [0,0,yaw],
(environment.env_limits[5]) / meters_per_unit, 0.3/meters_per_unit, irotate=config["is_iRotate"].get())
c_pose.append([x, y, z])
old_pose.append([x, y, z])
# todo make a comment about this and the number of cameras
add_ros_components(robot_base_prim_path, n, ros_transform_components, ros_camera_list, viewport_window_list,
camera_pose_frames, cam_pose_pubs, imu_pubs, robot_imu_frames,
robot_odom_frames, odom_pubs, lidars,
dynamic_prims, config, old_h_ap, old_v_ap, _is, simulation_context, _clock_graph)
kit.update()
first = False
for n in range(config["num_robots"].get()):
add_npy_viewport(viewport_window_list, robot_base_prim_path, n, old_h_ap, old_v_ap, config, simulation_context,
config["num_robots"].get() * 1)
for _ in range(50):
simulation_context.render()
print("Loading robot complete")
print("WARNING: CAMERA APERTURE MANUAL SET NO LONGER WORKS, NEEDS TO BE FIXED BY NVIDIA!!!!")
time.sleep(5)
# # legacy code
# for index, cam in enumerate(viewport_window_list):
# camera = stage.GetPrimAtPath(cam.get_active_camera())
# camera.GetAttribute("horizontalAperture").Set(old_h_ap[index])
# camera.GetAttribute("verticalAperture").Set(old_v_ap[index])
print("Starting FSM - setting up topics...")
start_explorer_pubs = []
send_waypoint_pubs = []
movement_monitor_pubs = []
for index, _ in enumerate(robot_odom_frames):
print("Waiting for fsm to start for robot {}".format(index))
my_topic = f"{robot_base_prim_path}{index}/exploration_node/fsm_exploration/state"
if config["autonomous"].get():
rospy.Subscriber(my_topic, String, callback=autostart_exploration, callback_args=index)
start_explorer_pubs.append(
rospy.Publisher(f"{robot_base_prim_path}{index}/traj_start_trigger", PoseStamped, queue_size=10))
else:
rospy.Subscriber(my_topic, String, callback=publish_random_goal, callback_args=(index, environment))
send_waypoint_pubs.append(
rospy.Publisher(f"{robot_base_prim_path}{index}/exploration_node/manual_goal", PoseStamped,
queue_size=10))
rospy.Subscriber(my_topic, String, callback=monitor_movement, callback_args=(index, environment))
movement_monitor_pubs.append(
rospy.Publisher(f"{robot_base_prim_path}{index}/command/pose", PoseStamped, queue_size=10))
print("fsm management for robot {} setted up".format(index))
print("FSM setted up")
print("Loading humans..")
my_humans = []
my_humans_heights = []
human_export_folder = config["human_path"].get()
human_folders = os.listdir(human_export_folder)
tot_area = 0
areas = []
initial_dynamics = len(dynamic_prims)
used_ob_stl_paths = []
## todo cycle to complete area, need to update the service probably
n = 0
human_anim_len = []
added_prims = []
human_base_prim_path = config["human_base_prim_path"].get()
while n < rng.integers(7, 1 + max(7, config["num_humans"].get())):
anim_len = 0
# the animation needs to be shorter than config["max_anim_len"].get() and longer than 0/min_len
while anim_len < max(config["min_human_anim_len"].get(), 0) or anim_len > config["max_human_anim_len"].get():
folder = rng.choice(human_folders)
while "old_textures" in folder:
folder = rng.choice(human_folders)
random_name = rng.choice(os.listdir(os.path.join(human_export_folder, folder)))
asset_path = local_file_prefix + os.path.join(human_export_folder, folder, random_name,
random_name + ".usd")
tmp_pkl = pkl.load(open(os.path.join(human_export_folder, folder, random_name, random_name + ".pkl"), 'rb'))
anim_len = tmp_pkl['ef']
print("Loading human {} from {}".format(random_name, folder))
used_ob_stl_paths.append(os.path.join(human_export_folder, folder, random_name, random_name + ".stl"))
human_anim_len.append(tmp_pkl['ef'])
if "verts" in tmp_pkl.keys():
my_humans_heights.append(tmp_pkl['verts'][:, :, 2])
else:
my_humans_heights.append(None)
my_humans.append(random_name)
load_human(human_base_prim_path, n, asset_path, dynamic_prims, added_prims)
stl_path = os.path.join(human_export_folder, folder, random_name, random_name + ".stl")
this_mesh = mesh.Mesh.from_file(stl_path)
areas.append((this_mesh.x.max() - this_mesh.x.min()) * (this_mesh.y.max() - this_mesh.y.min()))
tot_area += areas[-1]
n += 1
x, y, z, yaw = position_object(environment, type=1, objects=my_humans, ob_stl_paths=used_ob_stl_paths, max_collisions=int(config["allow_collision"].get()))
to_be_removed = []
human_prim_list = []
body_origins = []
for n, human in enumerate(my_humans):
if z[n] < 0:
to_be_removed.append(n)
tot_area -= areas[n]
else:
set_translate(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"),
[x[n] / meters_per_unit, y[n] / meters_per_unit, z[n] / meters_per_unit])
set_scale(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"), 1 / meters_per_unit)
set_rotate(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"), [0, 0, yaw[n]])
human_prim_list.append(f"{human_base_prim_path}{n}")
body_origins.append([x[n], y[n], z[n], yaw[n]])
if len(to_be_removed) > 0:
print("Removing humans that are out of the environment")
to_be_removed.reverse()
cumsum = np.cumsum(added_prims)
for n in to_be_removed:
my_humans.pop(n)
used_ob_stl_paths.pop(n)
my_humans_heights.pop(n)
for _ in range(added_prims[n]):
if n > 0:
dynamic_prims.pop(cumsum[n - 1] + initial_dynamics)
else:
dynamic_prims.pop(initial_dynamics)
human_anim_len.pop(n)
omni.kit.commands.execute("DeletePrimsCommand", paths=[f"{human_base_prim_path}{n}" for n in to_be_removed])
print("Loading human complete")
google_ob_used, shapenet_ob_used = load_objects(config, environment, rng, dynamic_prims, 1/meters_per_unit)
# IT IS OF CRUCIAL IMPORTANCE THAT AFTER THIS POINT THE RENDER GETS DONE WITH THE SLEEPING CALL! OTHERWISE PATH TRACING SPP WILL GET RUINED
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
omni.usd.get_context().get_selection().clear_selected_prim_paths()
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
for _ in range(5):
simulation_context.step(render=False)
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
timeline.set_current_time(0)
simulation_step = 0 # this is NOT the frame, this is the "step" (related to physics_hz)
my_recorder = recorder_setup(config['_recorder_settings'].get(), out_dir_npy, config['record'].get(), skip_cameras=1)
simulation_context.stop()
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
timeline.set_auto_update(False)
for _ in range(5):
kit.update()
simulation_context.play()
timeline.set_auto_update(False)
first_start = False
second_start = False
can_change_second_start = False
# two times, this will ensure that totalSpp is reached
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
last_pub_time = rospy.Time.now()
last_check_time = rospy.Time.now()
if config['debug_vis'].get():
cnt = 0
while 1:
cnt += 1
if cnt % 10000 == 0:
import ipdb
ipdb.set_trace()
print("Debug vis")
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
reversing_timeline_ratio = compute_timeline_ratio(human_anim_len, config["reverse_strategy"].get(),
experiment_length)
print(f"The reversing ratio is {reversing_timeline_ratio}.\n"
f"This implies that that every {experiment_length / reversing_timeline_ratio} frames we reverse the animations")
cnt_reversal = 1
# example
# exp length: 600, ratio: 4
# forward 0-150, 151-300 backward, 300-450 forward, 450-600 backward (so 4 slots)
# exp length: 1200, ratio: 4
# forward 0-300, 301-600 backward, 601-900 forward, 901-1200 backward (so 4 slots)
ratio_camera = config["ratio_camera"].get()
ratio_odom = config["ratio_odom"].get()
ratio_tf = config["ratio_tf"].get()
starting_to_pub = False
my_recorder._enable_record = False
status = True
while kit.is_running():
# NOTE EVERYTHING THAT NEEDS TO BE RENDERED NEEDS TO BE MOVED AFTER THE TIMELINE UPDATE CONSISTENTLY
if can_start:
last_check_time = rospy.Time.now()
if second_start:
if config['record'].get():
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
my_recorder._update()
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
starting_to_pub = True
timeline.set_current_time(min(- 1 / (config["physics_hz"].get() / ratio_camera),
-abs(config["bootstrap_exploration"].get())))
simulation_step = int(timeline.get_current_time() * config["physics_hz"].get()) - 1
# reset_physics(timeline, simulation_context)
print("Bootstrap started")
can_start = False
simulation_step += 1
if starting_to_pub and simulation_step == 0:
timeline.set_current_time(0)
# reset_physics(timeline, simulation_context)
move_humans_to_ground(my_humans_heights, human_prim_list, simulation_step / ratio_camera, meters_per_unit,
config["max_distance_human_ground"].get())
print("Starting recording NOW!")
msg = String("starting")
starting_pub.publish(msg)
starting_to_pub = False
time.sleep(0.5)
if config['record'].get():
my_recorder._enable_record = True
last_check_time = rospy.Time.now()
if (config["_random_light"].get()["during_experiment"]):
if (simulation_step % config["_random_light"].get()["n-frames"] == 0):
# fixme todo smooth change, idea get max-min and time window
randomize_and_fix_lights(config["_random_light"].get(), rng, env_prim_path, environment.env_limits[-1],
environment.meters_per_unit, is_rtx=config["rtx_mode"].get())
# step the physics
simulation_context.step(render=False)
# get the current time in ROS
print("Clocking...")
og.Controller.evaluate_sync(_clock_graph)
time.sleep(0.1)
ctime = timeline.get_current_time()
simulation_context.render()
timeline.set_current_time(ctime)
# publish IMU
print("Publishing IMU...")
pub_imu(_is, imu_pubs, robot_imu_frames, meters_per_unit)
# publish joint status (ca 120 Hz)
if simulation_step % ratio_tf == 0:
print("Publishing joint/tf status...")
for component in ros_transform_components:
og.Controller.set(og.Controller.attribute(f"{component}/OnImpulseEvent.state:enableImpulse"), True)
# publish odometry (60 hz)
if simulation_step % ratio_odom == 0:
print("Publishing odometry...")
c_pose, _ = pub_odom(robot_odom_frames, odom_pubs, _dc, meters_per_unit)
pub_cam_pose(camera_pose_frames, cam_pose_pubs, _dc, meters_per_unit)
# we consider ratio_camera to forward the animation.
# If you want it different ratio_animation < ratio_camera to avoid
# two frames with the same animation point
if second_start:
if simulation_step % ratio_camera == 0:
if my_recorder._enable_record:
# update the image counter externally so that we can use it in the recorder and all images have the same index
my_recorder._counter += 1
if simulation_step / ratio_camera < (experiment_length / reversing_timeline_ratio) * (
cnt_reversal):
timeline.forward_one_frame()
else:
if simulation_step / ratio_camera >= ((experiment_length - 1) / reversing_timeline_ratio) * (
cnt_reversal + 1) or \
(timeline.get_current_time() - 1 / timeline.get_time_codes_per_seconds()) < 0:
cnt_reversal += 2
timeline.forward_one_frame()
else:
timeline.rewind_one_frame()
if simulation_step % ratio_camera == 0:
for lidar in lidars:
og.Controller.attribute(lidar+".inputs:step").set(1)
ctime = timeline.get_current_time()
simulation_context.render()
timeline.set_current_time(ctime)
for lidar in lidars:
og.Controller.attribute(lidar+".inputs:step").set(0)
# publish camera (30 hz)
if simulation_step % ratio_camera == 0:
ctime = timeline.get_current_time()
print("Publishing cameras...")
# FIRST ONE WRITTEN IS AT 1/30 on the timeline
pub_and_write_images(simulation_context, viewport_window_list, ros_camera_list, config["rtx_mode"].get(), my_recorder, second_start)
timeline.set_current_time(ctime)
if simulation_step % ratio_camera == 0 and simulation_step / ratio_camera == experiment_length \
and not config["neverending"].get():
print("End of experiment!!!")
simulation_context.pause()
if my_recorder.data_writer is not None:
my_recorder.data_writer.stop_threads()
timeline.set_current_time(0)
context.save_as_stage(os.path.join(out_dir, "loaded_stage.usd"))
experiment_info = {}
experiment_info["config"] = config
experiment_info["reversing_timeline_ratio"] = reversing_timeline_ratio
experiment_info["humans"] = {}
experiment_info["humans"]["ids"] = my_humans
experiment_info["humans"]["folders"] = used_ob_stl_paths
experiment_info["humans"]["origins"] = body_origins # x y z yaw
experiment_info["google_obs"] = google_ob_used
experiment_info["shapenet_obs"] = shapenet_ob_used
experiment_info["environment"] = {}
experiment_info["environment"]["id"] = environment.env_name
experiment_info["environment"]["folder"] = environment.env_path
experiment_info["environment"]["shifts"] = environment.shifts
experiment_info["rng_state"] = rng_state
np.save(os.path.join(out_dir, "experiment_info.npy"), experiment_info)
break
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
ipdb.post_mortem(tb)
finally:
for pub in odom_pubs:
pub.unregister()
for pub in imu_pubs:
pub.unregister()
for pub in cam_pose_pubs:
pub.unregister()
for pub in start_explorer_pubs:
pub.unregister()
for pub in send_waypoint_pubs:
pub.unregister()
parent.shutdown()
rospy.signal_shutdown("my_simulation complete")
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/robot_with_ros.py | import argparse
import time
import os
import numpy as np
# base_env_path and other settings are in the config file
out_dir = "" # set this to a temporary empty dir
from omni.isaac.kit import SimulationApp
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
parser = argparse.ArgumentParser(description="Your second IsaacSim run")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False, help="Use rtx when True, use path tracing when False")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("world_and_robot", __name__)
config.set_file(args.config_file)
config.set_args(args)
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
omni.usd.get_context().open_stage(config["base_env_path"].get(), None)
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
meters_per_unit = config["meters_per_unit"].get()
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(), rendering_dt=1.0 / config["render_hz"].get(), stage_units_in_meters=meters_per_unit, backend='torch')
simulation_context.initialize_physics()
physx_interface = omni.physx.acquire_physx_interface()
physx_interface.start_simulation()
print("Adding ROS clock, you can check with rostopic echo /clock")
_clock_graph = add_clock() # add ROS clock
simulation_context.play()
for _ in range(10):
simulation_context.step() # remember that this step also the physics
og.Controller.evaluate_sync(_clock_graph)
simulation_context.stop()
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
simulation_environment_setup(need_ros = True)
if base_world_path != "":
from utils.environment_utils import *
print("Loading environment...")
environment = environment(config, meters_per_unit=meters_per_unit)
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get())
print("Visualization...")
for _ in range(1000):
simulation_context.render()
simulation_context.step(render=False)
print("Environment loading done...")
add_colliders(env_prim_path)
print("Colliders added..")
simulation_context.play()
x, y, z = 0, 0, 0
if out_dir != "":
environment.generate_map(out_dir, origin=[x,y,z])
print("Map generated..")
simulation_context.stop()
# prepare some containers
joint_states = []
tf_trees = []
camera_list = []
viewport_list = []
camera_pose, camera_pose_pub = [], []
imus,imu_pubs = [], []
lidars = []
odoms, odom_pubs = [], []
# get the interface to add imu sensors
from omni.isaac.sensor import _sensor
_is = _sensor.acquire_imu_sensor_interface()
# these are kept because the aperture is resetted based on the h aperture by IsaacSim.
# In v2021 this could have been reverted. In v2022 not.
old_h_ape, old_v_ape = [], []
# get the interface to access dynamics of the assets
_dc = dynamic_control_interface()
print("Loading robots..")
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
for n in range(config["num_robots"].get()):
import_robot(robot_base_prim_path, n, usd_robot_path)
x, y, z, yaw = np.random.randint(-100,100,4)
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}",
[x / meters_per_unit, y / meters_per_unit, z / meters_per_unit],
[0, 0, np.deg2rad(yaw)],
upper_zlim = z * 2,
lower_zlim = -z * 2
)
print("Adding ROS components")
joint_states.append(add_joint_state(f"{robot_base_prim_path}{n}"))
tf_trees.append(add_pose_tree(f"{robot_base_prim_path}{n}"))
# create the viewport, the camera component
component, viewport = add_camera_and_viewport(f"{robot_base_prim_path}{n}/camera_link",
config["robot_sensor_size"].get(),
old_h_ape, old_v_ape, simulation_context,
0, n, cam_per_robot=1) # cam index is useful if you want multiple cameras
cam_outputs = control_camera(viewport, simulation_context)
camera_list.append([n + 0, component, cam_outputs])
viewport_list.append(viewport)
omni.kit.app.get_app().update()
camera_pose.append(f"{robot_base_prim_path}{n}/camera_link")
camera_pose_pub.append(rospy.Publisher(f"{robot_base_prim_path}{n}/camera/pose", PoseStamped, queue_size=10))
setup_imu_sensor(_is, config, f"{robot_base_prim_path}{n}/imu_link")
imu_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/imu_body", Imu, queue_size=10))
imus.append(f"{robot_base_prim_path}{n}/imu_link")
odoms.append(f"{robot_base_prim_path}{n}/yaw_link")
odom_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/odom", Odometry, queue_size=10))
sensor = add_lidar(f"{robot_base_prim_path}{n}/yaw_link", [0, 0, -.1], [0, 0, 0], is_3d=True, is_2d=True)
lidars.append(sensor)
# alternatively
# add_ros_components(robot_base_prim_path, n, ros_transform_components, camera_list, viewport_list,
# camera_pose, camera_pose_pub, imu_pubs, imus,
# odoms, odom_pubs, lidars,
# [], config, old_h_ape, old_v_ape, _is, simulation_context, _clock, irotate=False):
print("Loading robots done")
# set some settings for the rendering
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
print("Note that the rendering is now blocking until finished")
for i in range(100):
print(f"Iteration {i}/100", end="\r")
sleeping(simulation_context, viewport_list, raytracing=config["rtx_mode"].get())
# deselect all objects
omni.usd.get_context().get_selection().clear_selected_prim_paths()
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
simulation_context.play()
for i in range(2000):
simulation_context.step(render=False)
og.Controller.evaluate_sync(_clock)
time.sleep(0.2)
simulation_context.render()
# publish IMU
print("Publishing IMU...")
pub_imu(_is, imu_pubs, robot_imu_frames, meters_per_unit)
if i % ratio_joints == 0:
for js in joint_states:
og.Controller.set(og.Controller.attribute(f"{js}/OnImpulseEvent.state:enableImpulse"), True)
if i % ratio_tf:
for tf in tf_trees:
og.Controller.set(og.Controller.attribute(f"{tf}/OnImpulseEvent.state:enableImpulse"), True)
if simulation_step % ratio_odom == 0:
c_pose, _ = pub_odom(odoms, odom_pubs, _dc, meters_per_unit)
pub_cam_pose(camera_pose, camera_pose_pub, _dc, meters_per_unit)
if simulation_step % ratio_camera == 0:
# The RTX LiDAR is still a fuzzy component. The "normal" LiDAR is more stable, but won't see non-colliding objects
for lidar in lidars:
og.Controller.attribute(lidar+".inputs:step").set(1)
ctime = timeline.get_current_time()
simulation_context.render()
timeline.set_current_time(ctime)
for lidar in lidars:
og.Controller.attribute(lidar+".inputs:step").set(0)
pub_and_write_images(simulation_context, viewport_list, ros_camera_list, raytracing) # clearly not writing anything here
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/world_and_robot.py | import argparse
# base_env_path and other settings are in the config file
out_dir = "" # set this to a temporary empty dir
from omni.isaac.kit import SimulationApp
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
parser = argparse.ArgumentParser(description="Your second IsaacSim run")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False, help="Use rtx when True, use path tracing when False")
# new options
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("world_and_robot", __name__)
# load the config file specified
config.set_file(args.config_file)
config.set_args(args)
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
omni.usd.get_context().open_stage(config["base_env_path"].get(), None)
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
meters_per_unit = config["meters_per_unit"].get()
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(), rendering_dt=1.0 / config["render_hz"].get(), stage_units_in_meters=meters_per_unit, backend='torch')
simulation_context.initialize_physics()
physx_interface = omni.physx.acquire_physx_interface()
physx_interface.start_simulation()
for _ in range(100):
simulation_context.render()
simulation_context.step(render=False)
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
simulation_environment_setup(need_ros = False) # enable some extensions, check if ros is running automatically
if base_world_path != "":
from utils.environment_utils import *
print("Loading environment...")
environment = environment(config, meters_per_unit=meters_per_unit) # setup the class
env_prim_path = environment.load_and_center(config["env_prim_path"].get()) # actually load the env
process_semantics(config["env_prim_path"].get()) # add semantic information based either on label you provide, or looking into fields of the objcets. This applies semantic to all childs
print("Visualization...")
for _ in range(1000):
simulation_context.render()
simulation_context.step(render=False)
print("Environment loading done...")
print("Add colliders to the environment, if the environment is big this could take ages..")
add_colliders(env_prim_path) # add colliders to the environment
print("Colliders added..")
print("For the next step please check out the code and set x, y, z manually to test them out..")
print()
ipdb.set_trace()
simulation_context.play()
x, y, z = 0, 0, 0
if out_dir == "":
print("Change out_dir")
environment.generate_map(out_dir, origin=[x,y,z])
print("Map generated..")
simulation_context.stop()
print("Loading robots..")
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
for n in range(config["num_robots"].get()):
import_robot(robot_base_prim_path, n, usd_robot_path)
x, y, z, yaw = np.random.randint(-100,100,4)
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}",
[x / meters_per_unit, y / meters_per_unit, z / meters_per_unit],
[0, 0, np.deg2rad(yaw)],
upper_zlim = z * 2,
lower_zlim = -z * 2
)
print("Loading robots done")
simulation_context.play()
for _ in range(2000):
simulation_context.render()
simulation_context.step(render=False)
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/irotate_simulation.py | import argparse
import carb
import confuse
import ipdb
import numpy as np
import os
import roslaunch
import rospy
import sys
import time
import traceback
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
try:
parser = argparse.ArgumentParser(description="Dynamic Worlds Simulator")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False,
help="Use rtx when True, use path tracing when False")
parser.add_argument("--record", type=boolean_string, default=True, help="Writing data to the disk")
parser.add_argument("--debug_vis", type=boolean_string, default=False,
help="When true continuosly loop the rendering")
parser.add_argument("--neverending", type=boolean_string, default=False, help="Never stop the main loop")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("DynamicWorlds", __name__)
config.set_file(args.config_file)
config.set_args(args)
experiment_length = config["experiment_length"].get()
can_start = True
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
# Cannot move before SimApp is launched
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.environment_utils import *
simulation_environment_setup()
rospy.init_node("my_isaac_ros_app", anonymous=True, disable_signals=True, log_level=rospy.ERROR)
starting_pub = rospy.Publisher('starting_experiment', String)
rng = np.random.default_rng()
rng_state = np.random.get_state()
local_file_prefix = "my-computer://"
# setup environment variables
environment = environment(config, rng, local_file_prefix)
uuid = roslaunch.rlutil.get_or_generate_uuid(None, False)
out_dir = os.path.join(config['out_folder'].get(), environment.env_name)
out_dir_npy = os.path.join(config['out_folder_npy'].get(), environment.env_name)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
os.environ["ROS_LOG_DIR"] = out_dir
roslaunch.configure_logging(uuid)
launch_files = ros_launchers_setup(roslaunch, environment.env_limits_shifted, config)
parent = roslaunch.parent.ROSLaunchParent(uuid, launch_files, force_log=True)
omni.usd.get_context().open_stage(local_file_prefix + config["base_env_path"].get(), None)
# Wait two frames so that stage starts loading
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
# do this AFTER loading the world
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(),
rendering_dt=1.0 / config["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.start_simulation()
add_clock() # add ROS clock
simulation_context.play()
for _ in range(100):
omni.kit.commands.execute("RosBridgeTickComponent", path="/ROS_Clock")
simulation_context.step()
last_pub_time = rospy.Time.now()
simulation_context.stop()
# fixme IDK why this is necessary sometimes
try:
parent.start()
except:
print("Failed to start roslaunch, retry")
try:
parent.start()
except:
print("Failed to start roslaunch, exit")
exit(1)
print("ros node launched")
kit.update()
meters_per_unit = UsdGeom.GetStageMetersPerUnit(stage)
# use rtx while setting up!
set_raytracing_settings(config["physics_hz"].get())
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get())
randomize_and_fix_lights(config["_random_light"].get(), rng, env_prim_path, environment.env_limits[-1] - 0.2,
environment.meters_per_unit, is_rtx=config["rtx_mode"].get())
randomize_roughness(config["_random_roughness"].get(), rng, env_prim_path)
# set timeline of the experiment
timeline = setup_timeline(config)
ros_camera_list = []
ros_transform_components = [] # list of tf and joint components, one (of each) for each robot
viewport_window_list = []
dynamic_prims = []
imus_handle_list = []
robot_odom_frames = []
robot_imu_frames = []
camera_pose_frames = []
imu_pubs = []
odom_pubs = []
cam_pose_pubs = []
camera_odom_pubs = []
camera_odom_frames = []
lidar_components = []
first = True
imu_sensor, imu_props = setup_imu_sensor(config)
simulation_context.play()
for _ in range(100):
omni.kit.commands.execute("RosBridgeTickComponent", path="/ROS_Clock")
simulation_context.step()
last_pub_time = rospy.Time.now()
simulation_context.stop()
print("Generating map...")
if add_colliders(env_prim_path):
simulation_context.play()
x, y, z, yaw = position_object(environment, type=3)
environment.generate_map(out_dir, origin=[x[0], y[0], 0])
for _ in range(10):
simulation_context.step()
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
else:
simulation_context.play()
for _ in range(10):
simulation_context.step()
print("Error generating collisions", file=sys.stderr)
simulation_context.play()
_dc = dynamic_control_interface()
print("Loading robots..")
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
c_pose = []
old_pose = []
old_h_ap = []
old_v_ap = []
for n in range(config["num_robots"].get()):
simulation_context.stop()
import_robot(robot_base_prim_path, n, usd_robot_path, local_file_prefix)
x, y, z, yaw = 0, 0, 0, 0
simulation_context.stop()
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}", [x / meters_per_unit, y / meters_per_unit, z / meters_per_unit],
[0, 0, yaw],
(environment.env_limits[5]) / meters_per_unit, irotate=config["is_iRotate"].get())
c_pose.append([x, y, z])
old_pose.append([x, y, z])
kit.update()
simulation_context.play()
kit.update()
add_ros_components(robot_base_prim_path, n, ros_transform_components, ros_camera_list, viewport_window_list,
camera_pose_frames, cam_pose_pubs, imus_handle_list, imu_pubs, robot_imu_frames,
robot_odom_frames, odom_pubs,
dynamic_prims, config, imu_sensor, imu_props, old_h_ap, old_v_ap, config["is_iRotate"].get())
add_irotate_ros_components(camera_odom_frames, camera_odom_pubs, lidar_components, robot_base_prim_path, n)
kit.update()
first = False
for n in range(config["num_robots"].get()):
add_npy_viewport(viewport_window_list, robot_base_prim_path, n, old_h_ap, old_v_ap, config,
config["num_robots"].get() * 1)
kit.update()
for _ in range(50):
simulation_context.render()
print("Loading robot complete")
for index, cam in enumerate(viewport_window_list):
camera = stage.GetPrimAtPath(cam.get_active_camera())
camera.GetAttribute("horizontalAperture").Set(old_h_ap[index])
camera.GetAttribute("verticalAperture").Set(old_v_ap[index])
# setup manual ticks for all components (just to be sure)
# IMU not necessary as it is NOT a ROS component itself
for component in ros_camera_list:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
for component in ros_transform_components:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
# IT IS OF CRUCIAL IMPORTANCE THAT AFTER THIS POINT THE RENDER GETS DONE WITH THE SLEEPING CALL! OTHERWISE PATH TRACING SPP WILL GET RUINED
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
simulation_context.stop()
simulation_context.play()
for _ in range(5):
simulation_context.step(render=False)
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
timeline.set_current_time(0)
simulation_step = 0 # this is NOT the frame, this is the "step" (related to physics_hz)
my_recorder = recorder_setup(config['_recorder_settings'].get(), out_dir_npy, config['record'].get())
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
timeline.set_auto_update(False)
omni.kit.commands.execute("RosBridgeUseSimTime", use_sim_time=True)
omni.kit.commands.execute("RosBridgeUsePhysicsStepSimTime", use_physics_step_sim_time=True)
# two times, this will ensure that totalSpp is reached
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
last_pub_time = rospy.Time.now()
last_check_time = rospy.Time.now()
if config['debug_vis'].get():
cnt = 0
while 1:
cnt += 1
if cnt % 10000 == 0:
import ipdb
ipdb.set_trace()
print("DEBUGGING VIS")
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
for i, cam in enumerate(ros_camera_list):
omni.kit.commands.execute("RosBridgeTickComponent", path=str(cam.GetPath()))
reversing_timeline_ratio = 1
print(
f"The reversing ratio is {reversing_timeline_ratio}.\n"
f"This implies that that every {experiment_length / reversing_timeline_ratio} frames we reverse the animations")
cnt_reversal = 1
ratio_camera = config["ratio_camera"].get()
ratio_odom = config["ratio_odom"].get()
ratio_tf = config["ratio_tf"].get()
starting_to_pub = False
my_recorder._enable_record = False
second_start = False
while kit.is_running():
if can_start:
last_check_time = rospy.Time.now()
if config['record'].get():
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
my_recorder._update()
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
starting_to_pub = True
timeline.set_current_time(min(- 1 / (config["physics_hz"].get() / ratio_camera),
-abs(config["bootstrap_exploration"].get())))
simulation_step = int(timeline.get_current_time() * config["physics_hz"].get()) - 1
print("Bootstrap started")
can_start = False
second_start = True
simulation_step += 1
if starting_to_pub and simulation_step == 0:
print("Starting recording NOW!")
msg = String("starting")
starting_pub.publish(msg)
starting_to_pub = False
time.sleep(0.5)
if config['record'].get():
my_recorder._enable_record = True
last_check_time = rospy.Time.now()
if (config["_random_light"].get()["during_experiment"]):
if (simulation_step % config["_random_light"].get()["n-frames"] == 0):
# fixme todo smooth change, idea get max-min and time window
randomize_and_fix_lights(config["_random_light"].get(), rng, env_prim_path, environment.env_limits[-1],
environment.meters_per_unit, is_rtx=config["rtx_mode"].get())
# step the physics
simulation_context.step(render=False)
# get the current time in ROS
print("Clocking...")
omni.kit.commands.execute("RosBridgeTickComponent", path="/ROS_Clock")
time.sleep(0.2)
# publish IMU
print("Publishing IMU...")
pub_imu(imus_handle_list, imu_sensor, imu_pubs, robot_imu_frames, meters_per_unit)
# publish joint status (ca 120 Hz)
if simulation_step % ratio_tf == 0:
print("Publishing joint/tf status...")
for component in ros_transform_components:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
# publish odometry (60 hz)
if simulation_step % ratio_odom == 0:
print("Publishing odometry...")
pub_cam_pose(camera_pose_frames, cam_pose_pubs, _dc, meters_per_unit)
c_pose, _ = pub_odom(camera_odom_frames, camera_odom_pubs, _dc, meters_per_unit, robot_odom_frames)
c_pose, _ = pub_odom(robot_odom_frames, odom_pubs, _dc, meters_per_unit)
for component in lidar_components:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
# we consider ratio_camera to forward the animation.
# If you want it different ratio_animation < ratio_camera to avoid
# two frames with the same animation point
if simulation_step % ratio_camera == 0:
if my_recorder._enable_record:
# update the image counter externally so that we can use it in the recorder and all images have the same index
my_recorder._counter += 1
if simulation_step / ratio_camera < (experiment_length / reversing_timeline_ratio) * (
cnt_reversal):
timeline.forward_one_frame()
else:
if simulation_step / ratio_camera >= ((experiment_length - 1) / reversing_timeline_ratio) * (
cnt_reversal + 1) or \
(timeline.get_current_time() - 1 / timeline.get_time_codes_per_seconds()) < 0:
cnt_reversal += 2
timeline.forward_one_frame()
else:
timeline.rewind_one_frame()
# publish camera (30 hz)
if simulation_step % ratio_camera == 0:
print("Publishing cameras...")
# getting skel pose for each joint
# get_skeleton_info(meters_per_unit, body_origins, body_list)
# FIRST ONE WRITTEN IS AT 1/30 on the timeline
pub_and_write_images(simulation_context, viewport_window_list,
ros_camera_list, config["rtx_mode"].get(),
my_recorder, second_start)
if simulation_step % ratio_camera == 0 and simulation_step / ratio_camera == experiment_length \
and not config["neverending"].get():
print("End of experiment!!!")
simulation_context.pause()
if my_recorder.data_writer is not None:
my_recorder.data_writer.stop_threads()
timeline.set_current_time(0)
context.save_as_stage(os.path.join(out_dir, "loaded_stage.usd"))
experiment_info = {}
experiment_info["config"] = config
experiment_info["reversing_timeline_ratio"] = reversing_timeline_ratio
experiment_info["environment"] = {}
experiment_info["environment"]["id"] = environment.env_name
experiment_info["environment"]["folder"] = environment.env_path
experiment_info["environment"]["shifts"] = environment.shifts
experiment_info["rng_state"] = rng_state
np.save(os.path.join(out_dir, "experiment_info.npy"), experiment_info)
break
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
# ipdb.post_mortem(tb)
finally:
for pub in odom_pubs:
pub.unregister()
for pub in imu_pubs:
pub.unregister()
for pub in cam_pose_pubs:
pub.unregister()
parent.shutdown()
rospy.signal_shutdown("my_simulation complete")
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/multi_robot_sim.py | import argparse
import carb
import confuse
import ipdb
import numpy as np
import os
import roslaunch
import rospy
import sys
import time
import traceback
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
try:
parser = argparse.ArgumentParser(description="Dynamic Worlds Simulator")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False,
help="Use rtx when True, use path tracing when False")
parser.add_argument("--record", type=boolean_string, default=True, help="Writing data to the disk")
parser.add_argument("--debug_vis", type=boolean_string, default=False,
help="When true continuosly loop the rendering")
parser.add_argument("--neverending", type=boolean_string, default=False, help="Never stop the main loop")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("DynamicWorlds", __name__)
config.set_file(args.config_file)
config.set_args(args)
os.environ["SHAPENET_LOCAL_DIR"] = config["shapenet_local_dir"].get()
experiment_length = config["experiment_length"].get()
can_start = True
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
# Cannot move before SimApp is launched
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.objects_utils import *
from utils.environment_utils import *
from utils.human_utils import *
def monitor_movement(msg, args):
global second_start
global last_check_time
global c_pose
global old_pose
global rng
global env_prim_path
wait_time = rospy.Duration(1)
index, environment = args[0], args[1]
if second_start and rospy.Time.now() > last_check_time + wait_time:
last_check_time = rospy.Time.now()
diff_x = abs(old_pose[index][0] - c_pose[index][0]) ** 2
diff_y = abs(old_pose[index][1] - c_pose[index][1]) ** 2
diff_z = abs(old_pose[index][2] - c_pose[index][2]) ** 2
dist = (diff_x + diff_y + diff_z) ** 0.5
if (dist) < 0.1:
my_pose = PoseStamped()
if (rng.uniform() > .9):
x, y, z, yaw = position_object(environment, type=0)
x = x[0]
y = y[0]
z = z[0]
yaw = yaw[0] + rng.uniform(0, 2 * np.pi)
else:
yaw = get_robot_yaw(c_pose[index][0], c_pose[index][1], c_pose[index][2],
environment.env_mesh, environment.shifts)
x = c_pose[index][0] + 0.2 * np.cos(yaw)
y = c_pose[index][1] + 0.2 * np.sin(yaw)
z = c_pose[index][2]
yaw += rng.uniform(0, 2 * np.pi)
my_pose.pose.position.x = x
my_pose.pose.position.y = y
my_pose.pose.position.z = z
rot = np.array(yaw) * 180 / np.pi
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), 0)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), rot)
).GetQuat()
my_pose.pose.orientation.x = quat.imaginary[0]
my_pose.pose.orientation.y = quat.imaginary[1]
my_pose.pose.orientation.z = quat.imaginary[2]
my_pose.pose.orientation.w = quat.real
print(
f"Publishing random goal since robot {index} stuck [{x},{y},{z}, {yaw} ({yaw * 180 / 3.14})].")
my_pose.header.frame_id = "world"
my_pose.header.stamp = rospy.Time.now()
movement_monitor_pubs[index].publish(my_pose)
if (dist) < 0.05:
set_colliders(env_prim_path, True)
else:
old_pose[index] = c_pose[index]
set_colliders(env_prim_path, True)
def autostart_exploration(msg, index):
global first_start
global second_start
global can_start
global can_change_second_start
global last_pub_time
if (msg.data == "PUB_FIRST_360"):
can_change_second_start = True
wait_time = rospy.Duration(0, 500000000) if second_start else rospy.Duration(1)
if (msg.data == "WAIT_TRIGGER" or (
msg.data == "PUB_360" and not second_start) and rospy.Time.now() > last_pub_time + wait_time):
if can_start:
if not first_start:
first_start = True
elif can_change_second_start:
second_start = True
print("Exploration will start at the end of this movement")
default_pose = PoseStamped()
default_pose.header.frame_id = "world"
default_pose.header.stamp = rospy.Time.now()
start_explorer_pubs[index].publish(default_pose)
last_pub_time = rospy.Time.now()
def publish_random_goal(msg, args):
global last_pub_time
global first_start
global second_start
global can_start
global can_change_second_start
index, environment = args[0], args[1]
if (msg.data == "PUB_FIRST_360"):
can_change_second_start = True
if (msg.data == "WAIT_TRIGGER" or (
msg.data == "PUB_360" and not second_start) and rospy.Time.now() > last_pub_time + rospy.Duration(0,
500000000)):
if can_start:
if not first_start:
first_start = True
elif can_change_second_start:
second_start = True
my_pose = PoseStamped()
x, y, z, yaw = position_object(environment, type=0)
my_pose.pose.position.x = x[0]
my_pose.pose.position.y = y[0]
my_pose.pose.position.z = z[0]
rot = np.array(yaw[0]) * 180 / np.pi
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), 0)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), rot)
).GetQuat()
my_pose.pose.orientation.x = quat.imaginary[0]
my_pose.pose.orientation.y = quat.imaginary[1]
my_pose.pose.orientation.z = quat.imaginary[2]
my_pose.pose.orientation.w = quat.real
print(f"Publishing random goal [{x[0]},{y[0]},{z[0]}, {yaw[0]} ({yaw[0] * 180 / 3.14})] for robot {index}")
my_pose.header.frame_id = "fixing_manual"
my_pose.header.stamp = rospy.Time.now()
send_waypoint_pubs[index].publish(my_pose)
last_pub_time = rospy.Time.now()
simulation_environment_setup()
rospy.init_node("my_isaac_ros_app", anonymous=True, disable_signals=True, log_level=rospy.ERROR)
starting_pub = rospy.Publisher('starting_experiment', String)
rng = np.random.default_rng()
rng_state = np.random.get_state()
local_file_prefix = "my-computer://"
# setup environment variables
environment = environment(config, rng, local_file_prefix)
uuid = roslaunch.rlutil.get_or_generate_uuid(None, False)
out_dir = os.path.join(config['out_folder'].get(), environment.env_name)
out_dir_npy = os.path.join(config['out_folder_npy'].get(), environment.env_name)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
os.environ["ROS_LOG_DIR"] = out_dir
roslaunch.configure_logging(uuid)
launch_files = ros_launchers_setup(roslaunch, environment.env_limits_shifted, config)
parent = roslaunch.parent.ROSLaunchParent(uuid, launch_files, force_log=True)
omni.usd.get_context().open_stage(local_file_prefix + config["base_env_path"].get(), None)
# Wait two frames so that stage starts loading
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
# do this AFTER loading the world
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(),
rendering_dt=1.0 / config["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.start_simulation()
add_clock() # add ROS clock
simulation_context.play()
for _ in range(100):
omni.kit.commands.execute("RosBridgeTickComponent", path="/ROS_Clock")
simulation_context.step()
last_pub_time = rospy.Time.now()
simulation_context.stop()
# fixme IDK why this is necessary sometimes
try:
parent.start()
except:
print("Failed to start roslaunch, retry")
try:
parent.start()
except:
print("Failed to start roslaunch, exit")
exit(1)
print("ros node launched")
kit.update()
meters_per_unit = UsdGeom.GetStageMetersPerUnit(stage)
# use rtx while setting up!
set_raytracing_settings(config["physics_hz"].get())
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get())
randomize_and_fix_lights(config["_random_light"].get(), rng, env_prim_path, environment.env_limits[-1] - 0.2,
environment.meters_per_unit, is_rtx=config["rtx_mode"].get())
randomize_roughness(config["_random_roughness"].get(), rng, env_prim_path)
# set timeline of the experiment
timeline = setup_timeline(config)
ros_camera_list = []
ros_transform_components = [] # list of tf and joint components, one (of each) for each robot
viewport_window_list = []
dynamic_prims = []
imus_handle_list = []
robot_odom_frames = []
robot_imu_frames = []
camera_pose_frames = []
imu_pubs = []
odom_pubs = []
cam_pose_pubs = []
irotate_cam_odom_pubs = []
irotate_cam_odom_frames = []
irotate_differential_odom_frames = []
lidar_components = []
first = True
imu_sensor, imu_props = setup_imu_sensor(config)
simulation_context.play()
for _ in range(100):
omni.kit.commands.execute("RosBridgeTickComponent", path="/ROS_Clock")
simulation_context.step()
last_pub_time = rospy.Time.now()
simulation_context.stop()
print("Generating map...")
if add_colliders(env_prim_path):
simulation_context.play()
x, y, z, yaw = position_object(environment, type=3)
environment.generate_map(out_dir, origin=[x[0], y[0], 0])
for _ in range(10):
simulation_context.step()
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
else:
simulation_context.play()
for _ in range(10):
simulation_context.step()
print("Error generating collisions", file=sys.stderr)
simulation_context.play()
_dc = dynamic_control_interface()
print("Loading robots..")
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = [str(i) for i in config["usd_robot_path"].get()]
c_pose = []
old_pose = []
old_h_ap = []
old_v_ap = []
is_irotate = np.array(config["is_iRotate"].get())
for n in range(config["num_robots"].get()):
simulation_context.stop()
import_robot(robot_base_prim_path, n, usd_robot_path[n], local_file_prefix)
if is_irotate[n]:
x, y, z, yaw = 0, 0, 0, 0
else:
x, y, z, yaw = get_valid_robot_location(environment, first)
simulation_context.stop()
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}", [x / meters_per_unit, y / meters_per_unit, z / meters_per_unit],
[0, 0, yaw],
(environment.env_limits[5]) / meters_per_unit, 0.3/meters_per_unit, is_irotate[n])
c_pose.append([x, y, z])
old_pose.append([x, y, z])
kit.update()
simulation_context.play()
kit.update()
add_ros_components(robot_base_prim_path, n, ros_transform_components, ros_camera_list, viewport_window_list,
camera_pose_frames, cam_pose_pubs, imus_handle_list, imu_pubs, robot_imu_frames,
robot_odom_frames, odom_pubs,
dynamic_prims, config, imu_sensor, imu_props, old_h_ap, old_v_ap, is_irotate[n])
if is_irotate[n]:
add_irotate_ros_components(irotate_cam_odom_frames, irotate_cam_odom_pubs, lidar_components, robot_base_prim_path,
n)
irotate_differential_odom_frames.append(robot_odom_frames[-1])
kit.update()
first = False
for n in range(config["num_robots"].get()):
add_npy_viewport(viewport_window_list, robot_base_prim_path, n, old_h_ap, old_v_ap, config, config["num_robots"].get()*1)
for _ in range(50):
simulation_context.render()
print("Loading robot complete")
for index, cam in enumerate(viewport_window_list):
camera = stage.GetPrimAtPath(cam.get_active_camera())
camera.GetAttribute("horizontalAperture").Set(old_h_ap[index])
camera.GetAttribute("verticalAperture").Set(old_v_ap[index])
# IMU not necessary as it is NOT a ROS component itself
for component in ros_camera_list:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
for component in ros_transform_components:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
print("Starting FSM - setting up topics...")
start_explorer_pubs = []
send_waypoint_pubs = []
movement_monitor_pubs = []
for index, _ in enumerate(robot_odom_frames):
print("Waiting for fsm to start for robot {}".format(index))
my_topic = f"{robot_base_prim_path}{index}/exploration_node/fsm_exploration/state"
if config["autonomous"].get():
rospy.Subscriber(my_topic, String, callback=autostart_exploration, callback_args=index)
start_explorer_pubs.append(
rospy.Publisher(f"{robot_base_prim_path}{index}/traj_start_trigger", PoseStamped, queue_size=10))
else:
rospy.Subscriber(my_topic, String, callback=publish_random_goal, callback_args=(index, environment))
send_waypoint_pubs.append(
rospy.Publisher(f"{robot_base_prim_path}{index}/exploration_node/manual_goal", PoseStamped,
queue_size=10))
rospy.Subscriber(my_topic, String, callback=monitor_movement, callback_args=(index, environment))
movement_monitor_pubs.append(
rospy.Publisher(f"{robot_base_prim_path}{index}/command/pose", PoseStamped, queue_size=10))
print("fsm management for robot {} setted up".format(index))
print("FSM setted up")
print("Loading humans..")
my_humans = []
my_humans_heights = []
human_export_folder = config["human_path"].get()
human_folders = os.listdir(human_export_folder)
tot_area = 0
areas = []
initial_dynamics = len(dynamic_prims)
used_ob_stl_paths = []
## todo cycle to complete area, need to update the service probably
n = 0
human_anim_len = []
added_prims = []
human_base_prim_path = config["human_base_prim_path"].get()
n_humans_loading = rng.integers(7, 1 + max(7, config["num_humans"].get()))
while n < n_humans_loading:
anim_len = 0
# the animation needs to be shorter than config["max_anim_len"].get() and longer than 0/min_len
while anim_len < max(config["min_human_anim_len"].get(), 0) or anim_len > config["max_human_anim_len"].get():
folder = rng.choice(human_folders)
random_name = rng.choice(os.listdir(os.path.join(human_export_folder, folder)))
asset_path = local_file_prefix + os.path.join(human_export_folder, folder, random_name,
random_name + ".usd")
tmp_pkl = pkl.load(open(os.path.join(human_export_folder, folder, random_name, random_name + ".pkl"), 'rb'))
anim_len = tmp_pkl['ef']
print("Loading human {} from {}".format(random_name, folder))
used_ob_stl_paths.append(os.path.join(human_export_folder, folder, random_name, random_name + ".stl"))
human_anim_len.append(tmp_pkl['ef'])
if "verts" in tmp_pkl.keys():
my_humans_heights.append(tmp_pkl['verts'][:, :, 2])
else:
my_humans_heights.append(None)
my_humans.append(random_name)
load_human(human_base_prim_path, n, asset_path, dynamic_prims, added_prims)
stl_path = os.path.join(human_export_folder, folder, random_name, random_name + ".stl")
this_mesh = mesh.Mesh.from_file(stl_path)
areas.append((this_mesh.x.max() - this_mesh.x.min()) * (this_mesh.y.max() - this_mesh.y.min()))
tot_area += areas[-1]
# if not config["use_area"].get():
n += 1
# if env_area / area_polygon * 100 > config["area_percentage"].get():
# break
x, y, z, yaw = position_object(environment, type=1, objects=my_humans, ob_stl_paths=used_ob_stl_paths,
max_collisions=int(config["allow_collision"].get()))
to_be_removed = []
human_prim_list = []
body_origins = []
for n, human in enumerate(my_humans):
if z[n] < 0:
to_be_removed.append(n)
tot_area -= areas[n]
else:
set_translate(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"),
[x[n] / meters_per_unit, y[n] / meters_per_unit, z[n] / meters_per_unit])
set_rotate(stage.GetPrimAtPath(f"{human_base_prim_path}{n}"), [0, 0, yaw[n]])
human_prim_list.append(f"{human_base_prim_path}{n}")
body_origins.append([x[n], y[n], z[n], yaw[n]])
if len(to_be_removed) > 0:
print("Removing humans that are out of the environment")
to_be_removed.reverse()
cumsum = np.cumsum(added_prims)
for n in to_be_removed:
my_humans.pop(n)
used_ob_stl_paths.pop(n)
my_humans_heights.pop(n)
for _ in range(added_prims[n]):
if n > 0:
dynamic_prims.pop(cumsum[n - 1] + initial_dynamics)
else:
dynamic_prims.pop(initial_dynamics)
human_anim_len.pop(n)
omni.kit.commands.execute("DeletePrimsCommand", paths=[f"{human_base_prim_path}{n}" for n in to_be_removed])
print("Loading human complete")
google_ob_used, shapenet_ob_used = load_objects(config, environment, rng, dynamic_prims)
# IT IS OF CRUCIAL IMPORTANCE THAT AFTER THIS POINT THE RENDER GETS DONE WITH THE SLEEPING CALL! OTHERWISE PATH TRACING SPP WILL GET RUINED
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
simulation_context.stop()
simulation_context.play()
for _ in range(5):
simulation_context.step(render=False)
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
timeline.set_current_time(0)
simulation_step = 0 # this is NOT the frame, this is the "step" (related to physics_hz)
my_recorder = recorder_setup(config['_recorder_settings'].get(), out_dir_npy, config['record'].get(),
config["num_robots"].get() * 1)
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
timeline.set_auto_update(False)
first_start = False
second_start = False
can_change_second_start = False
omni.kit.commands.execute("RosBridgeUseSimTime", use_sim_time=True)
omni.kit.commands.execute("RosBridgeUsePhysicsStepSimTime", use_physics_step_sim_time=True)
# two times, this will ensure that totalSpp is reached
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
last_pub_time = rospy.Time.now()
last_check_time = rospy.Time.now()
if config['debug_vis'].get():
cnt = 0
while 1:
cnt += 1
if cnt % 10000 == 0:
import ipdb
ipdb.set_trace()
print("DEBUGGING VIS")
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
for i, cam in enumerate(ros_camera_list):
omni.kit.commands.execute("RosBridgeTickComponent", path=str(cam.GetPath()))
reversing_timeline_ratio = compute_timeline_ratio(human_anim_len, config["reverse_strategy"].get(),
experiment_length)
print(
f"The reversing ratio is {reversing_timeline_ratio}.\n"
f"This implies that that every {experiment_length / reversing_timeline_ratio} frames we reverse the animations")
cnt_reversal = 1
ratio_camera = config["ratio_camera"].get()
ratio_odom = config["ratio_odom"].get()
ratio_tf = config["ratio_tf"].get()
starting_to_pub = False
my_recorder._enable_record = False
while kit.is_running():
if can_start:
last_check_time = rospy.Time.now()
if second_start:
if config['record'].get():
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
my_recorder._update()
sleeping(simulation_context, viewport_window_list, raytracing=config["rtx_mode"].get())
starting_to_pub = True
timeline.set_current_time(min(- 1 / (config["physics_hz"].get() / ratio_camera),
-abs(config["bootstrap_exploration"].get())))
simulation_step = int(timeline.get_current_time() * config["physics_hz"].get()) - 1
print("Bootstrap started")
can_start = False
simulation_step += 1
if starting_to_pub and simulation_step == 0:
move_humans_to_ground(my_humans_heights, human_prim_list, simulation_step / ratio_camera, meters_per_unit,
config["max_distance_human_ground"].get())
print("Starting recording NOW!")
msg = String("starting")
starting_pub.publish(msg)
starting_to_pub = False
time.sleep(0.5)
if config['record'].get():
my_recorder._enable_record = True
last_check_time = rospy.Time.now()
if (config["_random_light"].get()["during_experiment"]):
if (simulation_step % config["_random_light"].get()["n-frames"] == 0):
# fixme todo smooth change, idea get max-min and time window
randomize_and_fix_lights(config["_random_light"].get(), rng, env_prim_path, environment.env_limits[-1],
environment.meters_per_unit, is_rtx=config["rtx_mode"].get())
# step the physics
simulation_context.step(render=False)
# get the current time in ROS
print("Clocking...")
omni.kit.commands.execute("RosBridgeTickComponent", path="/ROS_Clock")
time.sleep(0.2)
# publish IMU
print("Publishing IMU...")
pub_imu(imus_handle_list, imu_sensor, imu_pubs, robot_imu_frames, meters_per_unit)
# publish joint status (ca 120 Hz)
if simulation_step % ratio_tf == 0:
print("Publishing joint/tf status...")
for component in ros_transform_components:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
# publish odometry (60 hz)
if simulation_step % ratio_odom == 0:
print("Publishing odometry...")
c_pose, _ = pub_odom(robot_odom_frames, odom_pubs, _dc, meters_per_unit)
pub_cam_pose(camera_pose_frames, cam_pose_pubs, _dc, meters_per_unit)
_, _ = pub_odom(irotate_cam_odom_frames, irotate_cam_odom_pubs, _dc, meters_per_unit,
irotate_differential_odom_frames)
for component in lidar_components:
omni.kit.commands.execute("RosBridgeTickComponent", path=str(component.GetPath()))
# we consider ratio_camera to forward the animation.
# If you want it different ratio_animation < ratio_camera to avoid
# two frames with the same animation point
if second_start:
if simulation_step % ratio_camera == 0:
if my_recorder._enable_record:
# update the image counter externally so that we can use it in the recorder and all images have the same index
my_recorder._counter += 1
if simulation_step / ratio_camera < (experiment_length / reversing_timeline_ratio) * (
cnt_reversal):
timeline.forward_one_frame()
else:
if simulation_step / ratio_camera >= ((experiment_length - 1) / reversing_timeline_ratio) * (
cnt_reversal + 1) or \
(timeline.get_current_time() - 1 / timeline.get_time_codes_per_seconds()) < 0:
cnt_reversal += 2
timeline.forward_one_frame()
else:
timeline.rewind_one_frame()
# publish camera (30 hz)
if simulation_step % ratio_camera == 0:
print("Publishing cameras...")
# getting skel pose for each joint
# get_skeleton_info(meters_per_unit, body_origins, body_list)
# FIRST ONE WRITTEN IS AT 1/30 on the timeline
pub_and_write_images(simulation_context, viewport_window_list,
ros_camera_list, config["rtx_mode"].get(),
my_recorder, second_start)
if simulation_step % ratio_camera == 0 and simulation_step / ratio_camera == experiment_length \
and not config["neverending"].get():
print("End of experiment!!!")
simulation_context.pause()
if my_recorder.data_writer is not None:
my_recorder.data_writer.stop_threads()
timeline.set_current_time(0)
context.save_as_stage(os.path.join(out_dir, "loaded_stage.usd"))
experiment_info = {}
experiment_info["config"] = config
experiment_info["reversing_timeline_ratio"] = reversing_timeline_ratio
experiment_info["humans"] = {}
experiment_info["humans"]["ids"] = my_humans
experiment_info["humans"]["folders"] = used_ob_stl_paths
experiment_info["humans"]["origins"] = body_origins # x y z yaw
experiment_info["google_obs"] = google_ob_used
experiment_info["shapenet_obs"] = shapenet_ob_used
experiment_info["environment"] = {}
experiment_info["environment"]["id"] = environment.env_name
experiment_info["environment"]["folder"] = environment.env_path
experiment_info["environment"]["shifts"] = environment.shifts
experiment_info["rng_state"] = rng_state
np.save(os.path.join(out_dir, "experiment_info.npy"), experiment_info)
break
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
# ipdb.post_mortem(tb)
finally:
for pub in odom_pubs:
pub.unregister()
for pub in imu_pubs:
pub.unregister()
for pub in cam_pose_pubs:
pub.unregister()
for pub in start_explorer_pubs:
pub.unregister()
for pub in send_waypoint_pubs:
pub.unregister()
parent.shutdown()
rospy.signal_shutdown("my_simulation complete")
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/first_run.py | import argparse
base_environment_path = "" # please edit this e.g. GRADE-RR/usds/env_base.usd
# necessary import
from omni.isaac.kit import SimulationApp
# simply use this to correctly parse booleans
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
parser = argparse.ArgumentParser(description="Your first IsaacSim run")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False, help="Use rtx when True, use path tracing when False")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("first_run", __name__)
config.set_args(args)
# create a kit object which is your Simulation App
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
# !!! you can ONLY load Isaac modules AFTER this point !!!
# after here you can do everything that you desire
# first step is usually opening a basic stage, perhaps with some assets already in as the sky
omni.usd.get_context().open_stage(base_environment_path, None)
# Wait two frames so that stage starts loading
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage() # used to access the elements of the simulation
simulation_context = SimulationContext(physics_dt=1.0 / 60, rendering_dt=1.0 / 60, stage_units_in_meters=0.01, backend='torch')
simulation_context.initialize_physics()
physx_interface = omni.physx.acquire_physx_interface()
physx_interface.start_simulation()
for _ in range(100):
simulation_context.render()
simulation_context.step(render=False)
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/correct_data.py | import argparse
import carb
import confuse
import ipdb
import math
import numpy as np
import os
import roslaunch
import rospy
import scipy.spatial.transform as tf
import sys
import time
import traceback
import trimesh
import yaml
from omni.isaac.kit import SimulationApp
from time import sleep
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
"""
Exported information will have the shape of
[[prim_asset_path, bbox] [prim_asset_path,skel] [prim_asset_path, init_tf, init_rot]]
prim_asset_path is string of the asset in the simulation.
It will be processed in order so expect groups of human,cloth --- possibly reversed
All is output in WORLD frame. Please check the notes regarding projection in camera frame.
bbox will be of shape (ef, 8, 3) if only one bbox is saved or (ef, 2, 8, 3) if both are saved
ef will be either the last animated frame (given the simulated environment) or the last frame of the animations + 1
if you need to access the bbox of the mesh after that just use [-1]
skel is the smpl skeleton info
use the flags below to export only the skeleton, only the garments or only the body or any combination
init_rot is the same of the info file
init_tf is equal, except that here we account for the small vertical translation that is added to meshes very close to the ground
-- this was a bug during the data generation which actually has very little influence (< 0.1 cm in vertical displacement)
-- the design choice was to save the placement value and then have always a way to recover the eventual vertical displacement which is anyway based on a rule (check human_utils.py:move_humans_to_ground)
everything is in meters
NOTE: We start writing images from timeline.frame = 1 (1/fps) since the "forward_timeline" call has been placed _before_ the publishing
"""
try:
parser = argparse.ArgumentParser(description="Get Bounding Boxes")
parser.add_argument("--experiment_folder", type=str,
help="The experiment folder with the USD file and the info file")
parser.add_argument("--body", type=boolean_string, default=True, help="When true process the bodies")
parser.add_argument("--garments", type=boolean_string, default=True, help="When true process the garments")
parser.add_argument("--base_human_path", type=str, default="my_human_", help="Human prim base path")
parser.add_argument("--headless", type=boolean_string, default=False, help="Whether run this headless or not")
parser.add_argument("--write", type=boolean_string, default=True, help="Whether to write results")
parser.add_argument("--both", type=boolean_string, default=False,
help="Whether to write both vertex types -- preference in code is both - fast - slow")
parser.add_argument("--fast", type=boolean_string, default=True,
help="Whether to write only the axis-aligned box or the oriented one")
parser.add_argument("--get_skel", type=boolean_string, default=True, help="Whether to include the skeleton info")
parser.add_argument("--skel_root", type=str, default="avg_root",
help="This is a recognizable last part of the root of the skeleton prim, in our case _avg_root "
+ "It will process ONLY the path of which the last part is this root")
parser.add_argument("--correct_poses", type=boolean_string, default=True)
parser.add_argument("--old_poses", type=str, default='')
parser.add_argument("--decimate", type=int, default=0, help="Decimate the mesh by this factor")
parser.add_argument("--output_dir_humans", type=str)
parser.add_argument("--output_dir_poses", type=str)
args, unknown = parser.parse_known_args()
config = confuse.Configuration("BoundingBoxes", __name__)
config.set_args(args)
exp_info = np.load(os.path.join(config["experiment_folder"].get(), "experiment_info.npy"), allow_pickle=True)
exp_info = exp_info.item()
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.objects_utils import *
from utils.environment_utils import *
from utils.human_utils import *
from omni.syntheticdata import sensors, helpers
def get_obj_poses(time,mappings = []):
"""Get pose of all objects with a semantic label.
"""
stage = omni.usd.get_context().get_stage()
if len(mappings) == 0:
mappings = helpers.get_instance_mappings()
pose = []
for m in mappings:
prim_path = m[1]
prim = stage.GetPrimAtPath(prim_path)
prim_tf = omni.usd.get_world_transform_matrix(prim, time)
pose.append((str(prim_path), m[2], str(m[3]), np.array(prim_tf)))
return pose
simulation_environment_setup(need_ros=False)
local_file_prefix = "my-computer://"
omni.usd.get_context().open_stage(local_file_prefix + config["experiment_folder"].get() + "/loaded_stage.usd", None)
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
simulation_context = SimulationContext(physics_dt=1.0 / exp_info["config"]["physics_hz"].get(),
rendering_dt=1.0 / exp_info["config"]["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.start_simulation()
meters_per_unit = UsdGeom.GetStageMetersPerUnit(stage)
set_raytracing_settings(exp_info["config"]["physics_hz"].get())
timeline = setup_timeline(exp_info["config"])
base_human_path = config["base_human_path"].get()
fast, both, slow = False, False, False
if config["both"].get():
both = True
elif config["fast"].get():
fast = True
else:
slow = True
get_skel = config["get_skel"]
human_prims = [x for x in stage.GetPrimAtPath('/').GetAllChildren() if base_human_path in x.GetName()]
humans_info = exp_info["humans"]
for prim in stage.Traverse():
if "human" in prim.GetName():
imageable = UsdGeom.Imageable(prim)
imageable.MakeVisible()
for id, folder in enumerate(humans_info['folders']):
# if folder does not exist, remove the _with_cache from ids, folder
if not os.path.exists(folder):
humans_info['ids'][id] = humans_info['ids'][id].replace("_with_cache", "")
humans_info['folders'][id] = humans_info['folders'][id].replace("_with_cache", "")
human_prim = human_prims[id]
human_global_path = str(omni.usd.get_composed_references_from_prim(human_prim)[0][0].assetPath)
human_global_path = human_global_path.replace("_with_cache", "")
human_prim.GetReferences().SetReferences([Sdf.Reference(assetPath=human_global_path)])
for _ in range(100):
kit.update()
write = config["write"].get()
if write:
results = []
decimate = config["decimate"].get()
helper_list_global = []
helper_list_skel = []
skel_root = config["skel_root"].get()
smpl_info_path = ""
experiment_length = exp_info["config"]["experiment_length"].get()
reversing_timeline_ratio = exp_info['reversing_timeline_ratio']
fn = 0
forward = True
cnt_reversal = 1
mapping = []
if config["correct_poses"].get():
if config["old_poses"].get() != '':
old_poses = np.load(config["old_poses"].get(), allow_pickle=True)
for i in range(len(old_poses)):
mapping.append([i, old_poses[i][0], old_poses[i][1], old_poses[i][2]])
else:
print("Using local mapping")
sleep(10)
i = 0
out_dir_humans = config["output_dir_humans"].get()
out_dir_poses = config["output_dir_poses"].get()
for folder in [out_dir_humans, out_dir_poses]:
if not os.path.exists(folder):
os.makedirs(folder)
for simulation_step in range(experiment_length+1):
fn += 1
stime = time.time()
if simulation_step < (experiment_length / reversing_timeline_ratio) * (
cnt_reversal):
forward = True
timeline.forward_one_frame() # or you can advance time directly here as it was done previously, note that you need to remove the "+ratio_camera" above
else:
if simulation_step >= ((experiment_length - 1) / reversing_timeline_ratio) * (
cnt_reversal + 1) or \
(timeline.get_current_time() - 1 / timeline.get_time_codes_per_seconds()) < 0:
cnt_reversal += 2
forward = True
timeline.forward_one_frame()
else:
forward = False
timeline.rewind_one_frame()
i = i + 1 if forward else i - 1
print(f"Processing frame {fn} which correspond to fram number {i}-th in the timeline at ctim {timeline.get_current_time()}")
print("You can check the time with the information in Vieport/camera/i-th.npy. Field 'ctime'.")
results = {'bbox3d':{},'skel':{},'verts':{},'init_pose':{}}
if config["correct_poses"].get():
poses = get_obj_poses(Usd.TimeCode(i),mapping)
if write:
try:
np.save(os.path.join(out_dir_poses, f"{fn}.npy"), poses)
except:
print("Error saving poses")
import ipdb; ipdb.set_trace()
for prim in stage.Traverse():
prim_path = str(prim.GetPath()).lower()
if base_human_path in prim_path:
if (get_skel and skel_root in prim_path and prim_path[:prim_path.find(skel_root)] not in helper_list_skel) or \
(str(prim.GetTypeName()).lower() == "mesh" and "points" in prim.GetPropertyNames()):
parent = prim.GetParent()
refs = omni.usd.get_composed_references_from_prim(parent)
while len(refs) == 0:
parent = parent.GetParent()
refs = omni.usd.get_composed_references_from_prim(parent)
human_global_path = str(omni.usd.get_composed_references_from_prim(parent)[0][0].assetPath)
human_global_path = human_global_path[len(local_file_prefix):]
index = humans_info['folders'].index(human_global_path[:-3] + "stl")
init_tf = np.array(parent.GetAttribute("xformOp:translate").Get())
init_rot = parent.GetAttribute("xformOp:orient").Get()
init_rot = np.array([init_rot.GetImaginary()[0], init_rot.GetImaginary()[1], init_rot.GetImaginary()[2],
init_rot.GetReal()])
init_rot_mat = tf.Rotation.from_quat(init_rot).as_matrix()
if write and str(parent.GetPath()):
results['init_pose'][str(parent.GetPath())] = [init_tf, init_rot]
if human_global_path[:-3] + "pkl" != smpl_info_path:
smpl_info_path = human_global_path[:-3] + "pkl"
# if the path does not exist
smpl_anim_info = pkl.load(open(smpl_info_path, 'rb'))
smpl_info = smpl_anim_info["info"]
if 'zrot' in smpl_info.keys():
r = smpl_info["zrot"]
else:
r = smpl_info['poses'][0, :3][2]
rot_mat = tf.Rotation.from_euler('z', r).as_matrix()
if (get_skel and skel_root in prim_path):
helper_list_skel.append(prim_path[:prim_path.find(skel_root)])
skeleton, joint_token = AnimationSchema.SkelJoint(prim).GetJoint()
skel_cache = UsdSkel.Cache()
skel_query = skel_cache.GetSkelQuery(UsdSkel.Skeleton(skeleton.GetPrim()))
xfCache = UsdGeom.XformCache()
skeleton_info = np.empty((1, 3), dtype=object)
xfCache.SetTime(i)
transforms = skel_query.ComputeJointWorldTransforms(xfCache)
translates, rotations, scales = UsdSkel.DecomposeTransforms(transforms)
skeleton_info[0] = [np.array(translates) * meters_per_unit, np.array(rotations),
np.array(scales) * meters_per_unit]
if write:
results['skel'][str(prim.GetPath())] = np.array(skeleton_info)
else:
points = UsdGeom.PointBased(prim)
if both:
bounds = np.zeros((1, 2, 8, 3))
else:
bounds = np.zeros((1, 8, 3))
points_in_mesh = points.ComputePointsAtTime(i, Usd.TimeCode(i))
points_in_mesh = np.array(points_in_mesh)
# bound = points.ComputeWorldBound(i, "default")
# for j in range(8):
# print(bound.ComputeAlignedRange().GetCorner(j))
points_in_mesh = ((points_in_mesh @ rot_mat.T @ init_rot_mat.T) + init_tf * meters_per_unit)
# normals = prim.GetAttribute("normals").Get(i)
# normals = np.array(normals)
results['verts'][str(prim.GetPath())] = points_in_mesh
oldmesh = trimesh.PointCloud(points_in_mesh)
# decimate points_in_mesh
if decimate > 1:
points_in_mesh = points_in_mesh[::decimate]
mymesh = trimesh.PointCloud(points_in_mesh)
if fast:
temp_bounds = oldmesh.bounding_box.vertices
elif slow:
temp_bounds = mymesh.bounding_box_oriented.vertices
elif both:
temp_bounds = [oldmesh.bounding_box.vertices, mymesh.bounding_box_oriented.vertices]
if write:
if both:
results['bbox3d'][str(prim.GetPath())] = {}
results['bbox3d'][str(prim.GetPath())]['aligned'] = np.array(temp_bounds[0])
results['bbox3d'][str(prim.GetPath())]['oriented'] = np.array(temp_bounds[1])
else:
results['bbox3d'][str(prim.GetPath())] = np.array(temp_bounds)
humans = []
# for each results['bbox3d'] get the human (/my_human_x) and combine its bounding boxes
dic_keys = list(results['bbox3d'].keys())
for key in dic_keys:
newkey = key[:key[1:].find("/")+1]
if newkey not in humans:
humans.append(newkey)
if newkey not in results['bbox3d'].keys():
results['bbox3d'][newkey] = results['bbox3d'][key]
else:
# extend
if both:
results['bbox3d'][newkey]['aligned'] = np.concatenate(
(results['bbox3d'][newkey]['aligned'], results['bbox3d'][key]['aligned']))
results['bbox3d'][newkey]['oriented'] = np.concatenate(
(results['bbox3d'][newkey]['oriented'], results['bbox3d'][key]['oriented']))
else:
results['bbox3d'][newkey] = np.concatenate((results['bbox3d'][newkey], results['bbox3d'][key]))
# merge the boxes
for key in humans:
if both:
points_in_mesh = results['bbox3d'][key]['aligned']
mymesh = trimesh.PointCloud(points_in_mesh)
temp_bounds = mymesh.bounding_box.vertices
results['bbox3d'][key]['aligned'] = np.array(temp_bounds)
points_in_mesh = results['bbox3d'][key]['oriented']
mymesh = trimesh.PointCloud(points_in_mesh)
temp_bounds = mymesh.bounding_box_oriented.vertices
results['bbox3d'][key]['oriented'] = np.array(temp_bounds)
else:
if slow:
points_in_mesh = results['bbox3d'][key]
mymesh = trimesh.PointCloud(points_in_mesh)
temp_bounds = mymesh.bounding_box_oriented.vertices
results['bbox3d'][key] = np.array(temp_bounds)
else:
points_in_mesh = results['bbox3d'][key]
mymesh = trimesh.PointCloud(points_in_mesh)
temp_bounds = mymesh.bounding_box.vertices
results['bbox3d'][key] = np.array(temp_bounds)
results = np.array(results, dtype=object)
print(f"etime {time.time() - stime}")
if write:
try:
np.save(os.path.join(config["output_dir_humans"].get(), f"{fn}.npy"), results)
except:
import ipdb; ipdb.set_trace()
np.save(os.path.join(config["output_dir_humans"].get(), f"{fn}.npy"), results)
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
import ipdb
ipdb.set_trace()
finally:
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/savana_simulation.py | import carb
import rospy
from omni.isaac.kit import SimulationApp
import argparse
import os
import time
import numpy as np
import roslaunch
from time import sleep
import yaml
import confuse
import ipdb, traceback, sys
def boolean_string(s):
if s.lower() not in {'false', 'true'}:
raise ValueError('Not a valid boolean string')
return s.lower() == 'true'
try:
parser = argparse.ArgumentParser(description="Dynamic Worlds Simulator")
parser.add_argument("--config_file", type=str, default="config.yaml")
parser.add_argument("--headless", type=boolean_string, default=True, help="Wheter to run it in headless mode or not")
parser.add_argument("--rtx_mode", type=boolean_string, default=False, help="Use rtx when True, use path tracing when False")
parser.add_argument("--record", type=boolean_string, default=True, help="Writing data to the disk")
parser.add_argument("--debug_vis", type=boolean_string, default=False, help="When true continuosly loop the rendering")
parser.add_argument("--neverending", type=boolean_string, default=False, help="Never stop the main loop")
parser.add_argument("--fix_env", type=str, default="",
help="leave it empty to have a random env, fix it to use a fixed one. Useful for loop processing")
args, unknown = parser.parse_known_args()
config = confuse.Configuration("DynamicWorlds", __name__)
config.set_file(args.config_file)
config.set_args(args)
can_start = True
CONFIG = {"display_options": 3286, "width": 1280, "height": 720, "headless": config["headless"].get()}
kit = SimulationApp(launch_config=CONFIG, experience=f"{os.environ['EXP_PATH']}/omni.isaac.sim.python.kit")
# Cannot move before SimApp is launched
import utils.misc_utils
from utils.misc_utils import *
from utils.robot_utils import *
from utils.simulation_utils import *
from utils.environment_utils import *
simulation_environment_setup()
# set timeline of the experiment
timeline = setup_timeline(config)
rospy.init_node("my_isaac_ros_app", anonymous=True, disable_signals=True, log_level=rospy.ERROR)
starting_pub = rospy.Publisher('starting_experiment', String)
rng = np.random.default_rng()
rng_state = np.random.get_state()
local_file_prefix = ""
# setup environment variables
meters_per_unit = config["meters_per_unit"].get()
environment = environment(config, rng, local_file_prefix, meters_per_unit)
out_dir = os.path.join(config['out_folder'].get(), environment.env_name)
out_dir_npy = os.path.join(config['out_folder_npy'].get(), environment.env_name)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
os.environ["ROS_LOG_DIR"] = out_dir
omni.usd.get_context().open_stage(local_file_prefix + config["base_env_path"].get(), None)
# Wait two frames so that stage starts loading
kit.update()
kit.update()
print("Loading stage...")
while is_stage_loading():
kit.update()
print("Loading Complete")
context = omni.usd.get_context()
stage = context.get_stage()
set_stage_up_axis("Z")
if config["clean_base_env"].get():
omni.kit.commands.execute("DeletePrimsCommand", paths=["/World/GroundPlane"])
# do this AFTER loading the world
simulation_context = SimulationContext(physics_dt=1.0 / config["physics_hz"].get(),
rendering_dt=1.0 / config["render_hz"].get(),
stage_units_in_meters=0.01)
simulation_context.initialize_physics()
physx_interface = omni.physx.acquire_physx_interface()
physx_interface.start_simulation()
_clock_graph = add_clock() # add ROS clock
simulation_context.play()
for _ in range(10):
simulation_context.step()
og.Controller.evaluate_sync(_clock_graph)
last_pub_time = rospy.Time.now()
simulation_context.stop()
kit.update()
# use rtx while setting up!
set_raytracing_settings(config["physics_hz"].get())
env_prim_path = environment.load_and_center(config["env_prim_path"].get())
process_semantics(config["env_prim_path"].get())
ros_camera_list = []
ros_transform_components = [] # list of tf and joint components, one (of each) for each robot
viewport_window_list = []
dynamic_prims = []
imus_handle_list = []
robot_odom_frames = []
robot_imu_frames = []
camera_pose_frames = []
imu_pubs = []
odom_pubs = []
cam_pose_pubs = []
simulation_context.play()
for _ in range(100):
og.Controller.evaluate_sync(_clock_graph)
simulation_context.step()
print("Loading robots..")
from omni.isaac.sensor import _sensor
_is = _sensor.acquire_imu_sensor_interface()
_dc = dynamic_control_interface()
robot_base_prim_path = config["robot_base_prim_path"].get()
usd_robot_path = str(config["usd_robot_path"].get())
old_h_ap = []
old_v_ap = []
robot_init_loc = []
robot_init_ang = []
simulation_context.stop()
for n in range(config["num_robots"].get()):
import_robot(robot_base_prim_path, n, usd_robot_path, local_file_prefix)
if config["init_loc"].get()["use"]:
# assuming we go here
x = config["init_loc"].get()["x"][n]
y = config["init_loc"].get()["y"][n]
z = config["init_loc"].get()["z"][n]
yaw = np.deg2rad(config["init_loc"].get()["yaw"][n])
roll = np.deg2rad(config["init_loc"].get()["roll"][n])
pitch = np.deg2rad(config["init_loc"].get()["pitch"][n])
robot_init_loc.append([x,y,z])
robot_init_ang.append([roll, pitch, yaw])
set_drone_joints_init_loc(f"{robot_base_prim_path}{n}", [x / meters_per_unit, y / meters_per_unit, z / meters_per_unit], [roll, pitch, yaw],
(environment.env_limits[5]) / meters_per_unit)
add_ros_components(robot_base_prim_path, n, ros_transform_components, ros_camera_list, viewport_window_list,
camera_pose_frames, cam_pose_pubs, imu_pubs, robot_imu_frames,
robot_odom_frames, odom_pubs, None, #lidars = None
dynamic_prims, config, old_h_ap, old_v_ap, _is, simulation_context, _clock_graph)
kit.update()
if config["use_robot_traj"].get():
add_robot_traj(f"{robot_base_prim_path}{n}",config,meters_per_unit,timeline.get_time_codes_per_seconds())
for n in range(config["num_robots"].get()):
add_npy_viewport(viewport_window_list, robot_base_prim_path, n, old_h_ap, old_v_ap, config, simulation_context,
config["num_robots"].get())
for _ in range(50):
simulation_context.render()
print("Loading robot complete")
print("WARNING: CAMERA APERTURE MANUAL SET NO LONGER WORKS, NEEDS TO BE FIXED BY NVIDIA!!!!")
time.sleep(5)
for index, cam in enumerate(viewport_window_list):
camera = stage.GetPrimAtPath(cam.get_active_camera())
camera.GetAttribute("horizontalAperture").Set(old_h_ap[index])
camera.GetAttribute("verticalAperture").Set(old_v_ap[index])
# IT IS OF CRUCIAL IMPORTANCE THAT AFTER THIS POINT THE RENDER GETS DONE WITH THE SLEEPING CALL! OTHERWISE PATH TRACING SPP WILL GET RUINED
if (config["rtx_mode"].get()):
set_raytracing_settings(config["physics_hz"].get())
else:
set_pathtracing_settings(config["physics_hz"].get())
omni.usd.get_context().get_selection().set_selected_prim_paths([], False)
simulation_context.stop()
simulation_context.play()
for _ in range(5):
simulation_context.step(render=False)
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
timeline.set_current_time(0)
simulation_step = 0 # this is NOT the frame, this is the "step" (related to physics_hz)
my_recorder = recorder_setup(config['_recorder_settings'].get(), out_dir_npy, config['record'].get())
timeline.set_current_time(0) # set to 0 to be sure that the first frame is recorded
timeline.set_auto_update(False)
# two times, this will ensure that totalSpp is reached
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
if config['debug_vis'].get():
cnt = 0
while 1:
cnt += 1
if cnt % 10000 == 0:
import ipdb
ipdb.set_trace()
print("DEBUGGING VIS")
simulation_context.step(render=False)
simulation_context.step(render=True)
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
for i, cam in enumerate(ros_camera_list):
omni.kit.commands.execute("RosBridgeTickComponent", path=str(cam.GetPath()))
ratio_camera = config["ratio_camera"].get()
ratio_odom = config["ratio_odom"].get()
ratio_tf = config["ratio_tf"].get()
starting_to_pub = False
my_recorder._enable_record = False
forward = True
goal_list = []
exp_len = config["anim_exp_len"].get()
if not config["use_robot_traj"].get() and config["use_joint_traj"].get():
for elem in config["robot_traj"].get():
goal_list.append([elem["pose"]["x"], elem["pose"]["y"], elem["pose"]["z"],
elem["pose"]["roll"], elem["pose"]["pitch"], elem["pose"]["yaw"]])
while kit.is_running():
if can_start:
if config['record'].get():
# reload_references("/World/home")
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
my_recorder._update()
sleeping(simulation_context, viewport_window_list, config["rtx_mode"].get())
starting_to_pub = True
timeline.set_current_time(min(- 1 / (config["physics_hz"].get() / ratio_camera),
-abs(config["bootstrap_exploration"].get())))
simulation_step = int(timeline.get_current_time() * config["physics_hz"].get()) - 1
print("Bootstrap started")
can_start = False
simulation_step += 1
if starting_to_pub and simulation_step == 0:
print("Starting recording NOW!")
msg = String("starting")
starting_pub.publish(msg)
time.sleep(0.5)
starting_to_pub = False
if config['record'].get():
my_recorder._enable_record = True
# step the physics
simulation_context.step(render=False)
# get the current time in ROS
print("Clocking...")
og.Controller.evaluate_sync(_clock_graph)
ctime = timeline.get_current_time()
simulation_context.render()
timeline.set_current_time(ctime)
# publish IMU
print("Publishing IMU...")
pub_imu(_is, imu_pubs, robot_imu_frames, meters_per_unit)
# publish joint status (ca 120 Hz)
if simulation_step % ratio_tf == 0:
print("Publishing joint/tf status...")
for component in ros_transform_components:
og.Controller.set(og.Controller.attribute(f"{component}/OnImpulseEvent.state:enableImpulse"), True)
# publish odometry (60 hz)
if simulation_step % ratio_odom == 0:
print("Publishing odometry...")
c_pose, c_angle = pub_odom(robot_odom_frames, odom_pubs, _dc, meters_per_unit)
pub_cam_pose(camera_pose_frames, cam_pose_pubs, _dc, meters_per_unit)
if config["use_joint_traj"].get():
if len(goal_list)>0 and simulation_step >= 0:
# this needs to be expanded to multiple robots
goal_list = check_pose_and_goals(robot_init_loc[0], robot_init_ang[0], c_pose[0], c_angle[0], "/my_robot_0", goal_list, meters_per_unit, simulation_step == 0)
if len(goal_list)==0:
break
# we consider ratio_camera to forward the animation.
# If you want it different ratio_animation < ratio_camera to avoid
# two frames with the same animation point
if simulation_step % ratio_camera == 0:
if my_recorder._enable_record:
# update the image counter externally so that we can use it in the recorder and all images have the same index
my_recorder._counter += 1
if (simulation_step > 0 and (simulation_step / ratio_camera + 1) % exp_len == 0):
forward = not forward
if (timeline.get_current_time() - 1 / timeline.get_time_codes_per_seconds()<0):
forward = True
if forward:
timeline.forward_one_frame()
else:
timeline.rewind_one_frame()
# publish camera (30 hz)
if simulation_step % ratio_camera == 0:
ctime = timeline.get_current_time()
print("Publishing cameras...")
pub_and_write_images(simulation_context, viewport_window_list,
ros_camera_list, config["rtx_mode"].get(), my_recorder)
timeline.set_current_time(ctime)
except:
extype, value, tb = sys.exc_info()
traceback.print_exc()
# ipdb.post_mortem(tb)
finally:
for pub in odom_pubs:
pub.unregister()
for pub in imu_pubs:
pub.unregister()
for pub in cam_pose_pubs:
pub.unregister()
parent.shutdown()
rospy.signal_shutdown("my_simulation complete")
simulation_context.stop()
try:
kit.close()
except:
pass
|
eliabntt/GRADE-RR/simulator/configs/config_multi_robot.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
env_path: "/media/ebonetto/WindowsData/Demo_GRADE"
use_stl: True # this disable stl loading thus placement AND robot heading
human_path: "/ps/project/irotate/cloth3d/exported_usd/"
base_env_path: "/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/isaac_sim_manager/usds/empty.usd"
clean_base_env: False
only_placement: False
robot_mesh_path: ["/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/isaac_sim_manager/meshes/robotino.dae",
"/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/isaac_sim_manager/meshes/drone.dae","/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/isaac_sim_manager/meshes/drone.dae"]
usd_robot_path: ["/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/multi_robot/usds/robotino.usd",
"/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/isaac_sim_manager/usds/drone_paper.usd",
"/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/isaac_sim_manager/usds/drone_paper.usd"]
out_folder: "/media/ebonetto/WindowsData/exp_out2"
out_folder_npy: "/media/ebonetto/WindowsData/exp_out2"
#out_folder: "/home/ebonetto/exp_out"
fps: 30
num_robots: 3
num_humans: 40
max_distance_human_ground: 0.1 # max distance from human to ground to be consider to force the first frame grounding of animations
allow_collision: 200
experiment_length: 3600 # camera frames length
autonomous: True # true -> FUEL, false -> random goals
obstacles: {
"shapenet": 0,
"google": 0
}
physics_hz: 240 # NOTE THAT THIS IS THE RATE OF CLOCK AND IMU
render_hz: 240 # LEAVE IT EQUAL TO PHYSICS HZ
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
bootstrap_exploration: 1 # seconds to boostrap exploration (min(abs(this_value), 1/(physics_hz/ratio_camera))
reverse_strategy: "avg" # in [min, max, avg, half, none], works only with animated sequences
robot_sensor_size: [ 640, 480 ]
npy_sensor_size: [ 640, 480]
_random_light: { "intensity": True,
"color": True,
"intensity_interval": [ 600, 4500 ], # [min, max], for rtx
"during_experiment": False,
"n-frames": 24,
"smooth": False }
_random_roughness: {
"enabled": True,
"intensity_interval": [ 0.1, 0.9 ]
}
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_"
human_base_prim_path: "/my_human_"
max_human_anim_len: 500 # max human anim length to be considered in frames
min_human_anim_len: 50 # min human anim length to be loaded in frames
_recorder_settings: {
"rgb": { "enabled": True },
"depth": { "enabled": False, "colorize": False, "npy": True },
"depthLinear": { "enabled": True, "colorize": False, "npy": True },
"instance": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"semantic": { "enabled": False, "colorize": False, "npy": True, "mappings": True },
"bbox_2d_tight": { "enabled": True, "colorize": False, "npy": True },
"bbox_2d_loose": { "enabled": True, "colorize": False, "npy": True },
"normals": { "enabled": False, "colorize": False, "npy": True },
"motion-vector": { "enabled": False, "colorize": True, "npy": True },
"bbox_3d": { "enabled": True, "colorize": False, "npy": True },
"camera": { "enabled": True, "colorize": True, "npy": True },
"poses": { "enabled": True, "colorize": True, "npy": True },
}
google_obj_folder: "/ps/project/irotate/google_scanned_objects"
google_obj_shortlist: ""
shapenet_local_dir: "/ps/project/irotate/ShapeNet"
shapenet_username: ""
shapenet_password: ""
synsetId: "random"
modelId: "random"
is_iRotate: [True, False, False] |
eliabntt/GRADE-RR/simulator/configs/config_paper.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
env_path: "/ps/project/irotate/3DFRONT/USD-exports"
use_stl: True # this disable stl loading thus placement AND initial robot heading
use_npy: True # use limits of the environment saved in the npy file located in the same folder of the environment
meters_per_unit: 0.01
human_path: "/ps/project/irotate/cloth3d/exported_usd/"
base_env_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/usds/env_base.usd"
reload_references: False
generate_map: True
clean_base_env: False
only_placement: False
robot_mesh_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/meshes/drone.dae"
usd_robot_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/usds/drone_2022.usd"
out_folder: "/media/ebonetto/WindowsData/exp_out2/"
out_folder_npy: "/media/ebonetto/WindowsData/exp_out2/"
fps: 30
num_robots: 1
num_humans: 40
max_distance_human_ground: 0.1 # max distance from human to ground to be consider to force the first frame grounding of animations
allow_collision: 200
experiment_length: 1800 # camera frames length
autonomous: True # true -> FUEL, false -> random goals
obstacles: {
"shapenet": 0, # 5, 10
"google": 1
}
physics_hz: 240 # NOTE THAT THIS IS THE RATE OF CLOCK AND IMU
render_hz: 240 # LEAVE IT EQUAL TO PHYSICS HZ
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
bootstrap_exploration: 1 # seconds to boostrap exploration (min(abs(this_value), 1/(physics_hz/ratio_camera))
reverse_strategy: "avg" # in [min, max, avg, half, none], works only with animated sequences
robot_sensor_size: [ 640, 480 ]
npy_sensor_size: [ 1920, 1080]
_random_light: { "intensity": True,
"color": True,
"intensity_interval": [ 600, 4500 ], # [min, max], for rtx
"during_experiment": False,
"n-frames": 24,
"smooth": False }
_random_roughness: {
"enabled": True,
"intensity_interval": [ 0.1, 0.9 ]
}
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_"
human_base_prim_path: "/my_human_"
max_human_anim_len: 500 # max human anim length to be considered in frames
min_human_anim_len: 50 # min human anim length to be loaded in frames
_recorder_settings: {
"rgb": { "enabled": True },
"depth": { "enabled": False, "colorize": False, "npy": True },
"depthLinear": { "enabled": True, "colorize": False, "npy": True },
"instance": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"semantic": { "enabled": False, "colorize": False, "npy": True, "mappings": True },
"bbox_2d_tight": { "enabled": True, "colorize": False, "npy": True },
"bbox_2d_loose": { "enabled": True, "colorize": False, "npy": True },
"normals": { "enabled": False, "colorize": False, "npy": True },
"motion-vector": { "enabled": False, "colorize": True, "npy": True },
"bbox_3d": { "enabled": True, "colorize": False, "npy": True },
"camera": { "enabled": True, "colorize": True, "npy": True },
"poses": { "enabled": True, "colorize": True, "npy": True },
}
google_obj_folder: "/ps/project/irotate/google_scanned_objects"
google_obj_shortlist: ""
shapenet_local_dir: "/ps/project/irotate/ShapeNet"
shapenet_username: ""
shapenet_password: ""
synsetId: "random"
modelId: "random"
is_iRotate: False
|
eliabntt/GRADE-RR/simulator/configs/config_savana.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
env_path: "/media/ebonetto/WindowsData/Demo_GRADE"
use_stl: False # this disable stl loading thus placement AND robot heading
base_env_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/usds/env_base.usd"
clean_base_env: True
only_placement: True
robot_mesh_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/meshes/drone.dae"
usd_robot_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/usds/drone_2022.usd"
out_folder: "/media/ebonetto/WindowsData/exp_out2022"
out_folder_npy: "/media/ebonetto/WindowsData/exp_out2022"
fps: 30
num_robots: 1
anim_exp_len: 450 # after how many frames we should roll back the animations
physics_hz: 240 # NOTE THAT THIS IS THE RATE OF CLOCK AND IMU
render_hz: 240 # LEAVE IT EQUAL TO PHYSICS HZ
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
bootstrap_exploration: 1 # seconds to boostrap exploration (min(abs(this_value), 1/(physics_hz/ratio_camera))
robot_sensor_size: [ 640, 480 ]
npy_sensor_size: [ 1920,1080]
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_"
use_npy: True
meters_per_unit: 0.01
_recorder_settings: {
"rgb": { "enabled": True },
"depth": { "enabled": True, "colorize": False, "npy": True },
"depthLinear": { "enabled": True, "colorize": False, "npy": True },
"instance": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"semantic": { "enabled": False, "colorize": False, "npy": True, "mappings": True },
"bbox_2d_tight": { "enabled": True, "colorize": False, "npy": True },
"bbox_2d_loose": { "enabled": True, "colorize": False, "npy": True },
"normals": { "enabled": False, "colorize": False, "npy": True },
"motion-vector": { "enabled": False, "colorize": True, "npy": True },
"bbox_3d": { "enabled": True, "colorize": False, "npy": True },
"camera": { "enabled": True, "colorize": True, "npy": True },
"poses": { "enabled": True, "colorize": True, "npy": True },
}
# the following cannot be both true at the same time
# if so, only the robot traj will be executed
# if both false we assume an external source is publishing something to your robot (in our case on /my_robot_0/joint_commands)
use_robot_traj: False # this is an absolute value. Note that the main root link and the actual position of the robot may differ based on the initial shift(which remains constant)
use_joint_traj: True # this is a relative value w.r.t. the starting location
robot_traj: # remember that movement will be linear and instantaneous. No acceleration or anything. This implies no odom, nor IMU data. If you want those, please add the same trajectory to a joint publisher.
- {pose: {x: 0, y: 0, z: 0, roll: 0, pitch: 0, yaw: 0}, time: 0}
- {pose: {x: -16.61, y: 4.4, z: 6.55, roll: 20, pitch: -10, yaw: 15}, time: 2}
- {pose: {x: -58.83, y: 11.00, z: -2.67, roll: 4, pitch: -22, yaw: 60}, time: 3}
- {pose: {x: 56.38, y: -55.85, z: 45.23, roll: -10, pitch: 30, yaw: 120}, time: 7}
- {pose: {x: -20.95, y: -37.64, z: -4.46, roll: 10, pitch: 50, yaw: 240}, time: 10}
- {pose: {x: 0, y: 0, z: 0, roll: 0, pitch: 0, yaw: 0}, time: 0}
init_loc: {
"use": [True],
"x": [26.35],
"y": [241.43],
"z": [8.57],
"yaw": [271],
"roll": [0],
"pitch": [0]
} |
eliabntt/GRADE-RR/simulator/configs/humans_and_objects.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
physics_hz: 240 # the size of a single physics step
render_hz: 240 # not influencing anything for now
base_env_path: "" # the base environment, e.g. GRADE-RR/usds/env_base.usd
env_path: "" # the parent folder that contains your environments, in subfolders
use_stl: True #
use_npy: True #
meters_per_unit: 0.01 # how many meters are in one single unit in the simulation. In this case x=1 will be 1 cm.
usd_robot_path: "" # the usd path of the robot, e.g. GRADE-RR/usds/drone_2022.usd
num_robots: 1 # how many robots we want to load
robot_sensor_size: [640,480]
ratio_joints: 2
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
# prefixes where to load the prims
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_" # 0, 1, 2 ... num_robots
obstacles: {
"shapenet": 0, # 5, 10
"google": 1
}
human_base_prim_path: "/my_human_"
max_human_anim_len: 500 # max human anim length to be considered in frames
min_human_anim_len: 50 # min human anim length to be loaded in frames
google_obj_folder: "" # the main folder of the google_scanned_objects models
google_obj_shortlist: "" # check out here https://github.com/eliabntt/GRADE-RR/blob/064c1b888727c6faa191f88519184dc272a8b950/simulator/utils/objects_utils.py#L55
shapenet_local_dir: "" # the main folder of the shapenet predownloaded models
shapenet_username: "" # leave empty, or find a way to download from the website dynamically
shapenet_password: ""
synsetId: "random" # check out here https://github.com/eliabntt/GRADE-RR/blob/064c1b888727c6faa191f88519184dc272a8b950/simulator/utils/objects_utils.py#L22
modelId: "random" |
eliabntt/GRADE-RR/simulator/configs/world_and_robot.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
physics_hz: 240 # the size of a single physics step
render_hz: 240 # not influencing anything for now
base_env_path: "" # the base environment, e.g. GRADE-RR/usds/env_base.usd
env_path: "" # the parent folder that contains your environments, in subfolders
use_stl: True #
use_npy: True #
meters_per_unit: 0.01 # how many meters are in one single unit in the simulation. In this case x=1 will be 1 cm.
usd_robot_path: "" # the usd path of the robot, e.g. GRADE-RR/usds/drone_2022.usd
num_robots: 1 # how many robots we want to load
# prefixes where to load the prims
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_" # 0, 1, 2 ... num_robots |
eliabntt/GRADE-RR/simulator/configs/config.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
env_path: "3DFRONT/USD-exports"
#env_path: "/media/ebonetto/WindowsData/Demo_GRADE"
use_stl: True # this disable stl loading thus placement AND robot heading
human_path: "cloth3d/exported_usd/"
base_env_path: "../usds/env_base.usd"
meters_per_unit: 1
clean_base_env: False
robot_mesh_path: "../meshes/drone.dae"
usd_robot_path: "../usds/drone.usd"
out_folder: "exp_out"
out_folder_npy: "exp_out"
num_robots: 1
num_humans: 40 # min 5, if 0 change the main code
max_distance_human_ground: 0.1 # max distance from human to ground to be consider to force the first frame grounding of animations
allow_collision: 200
experiment_length: 1800 # camera frames length
autonomous: True # true -> FUEL, false -> random goals
obstacles: {
"shapenet": 0,
"google": 0
}
physics_hz: 240 # NOTE THAT THIS IS THE RATE OF CLOCK AND IMU
render_hz: 240 # LEAVE IT EQUAL TO PHYSICS HZ
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
bootstrap_exploration: 1 # seconds to boostrap exploration (min(abs(this_value), 1/(physics_hz/ratio_camera))
reverse_strategy: "avg" # in [min, max, avg, half, none], works only with animated sequences
robot_sensor_size: [ 640, 480 ]
npy_sensor_size: [ 1920, 1080]
_random_light: { "intensity": True,
"color": True,
"intensity_interval": [ 600, 4500 ], # [min, max], for rtx
"during_experiment": False,
"n-frames": 24,
"smooth": False }
_random_roughness: {
"enabled": True,
"intensity_interval": [ 0.1, 0.9 ]
}
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_"
human_base_prim_path: "/my_human_"
max_human_anim_len: 500 # max human anim length to be considered in frames
min_human_anim_len: 50 # min human anim length to be loaded in frames
_recorder_settings: {
"rgb": { "enabled": True },
"depth": { "enabled": True, "colorize": False, "npy": True },
"depthLinear": { "enabled": True, "colorize": False, "npy": True },
"instance": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"semantic": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"bbox_2d_tight": { "enabled": True, "colorize": False, "npy": True },
"bbox_2d_loose": { "enabled": True, "colorize": False, "npy": True },
"normals": { "enabled": True, "colorize": False, "npy": True },
"motion-vector": { "enabled": False, "colorize": False, "npy": False },
"bbox_3d": { "enabled": True, "colorize": False, "npy": True },
"camera": { "enabled": True, "colorize": True, "npy": True },
"poses": { "enabled": True, "colorize": True, "npy": True },
}
google_obj_folder: "google_scanned_objects"
google_obj_shortlist: ""
shapenet_local_dir: "ShapeNet"
shapenet_username: ""
shapenet_password: ""
synsetId: "random"
modelId: "random"
is_iRotate: False
# the following cannot be both true at the same time
# if so, only the robot traj will be executed
# if both false we assume an external source is publishing something to your robot (in our case on /my_robot_0/joint_commands)
use_robot_traj: False # this is an absolute value. Note that the main root link and the actual position of the robot may differ based on the initial shift(which remains constant)
use_joint_traj: False # this is a relative value w.r.t. the starting location
robot_traj: # remember that movement will be linear and instantaneous. No acceleration or anything. This implies no odom, nor IMU data. If you want those, please add the same trajectory to a joint publisher.
- {pose: {x: 0, y: 0, z: 0, roll: 0, pitch: 0, yaw: 0}, time: 0}
- {pose: {x: -16.61, y: 4.4, z: 6.55, roll: 20, pitch: -15, yaw: 15}, time: 2}
- {pose: {x: -58.83, y: 11.00, z: -2.67, roll: 4, pitch: -27, yaw: 60}, time: 3}
- {pose: {x: 56.38, y: -55.85, z: 45.23, roll: -10, pitch: 40, yaw: 120}, time: 7}
- {pose: {x: -20.95, y: -37.64, z: -4.46, roll: 27, pitch: 15, yaw: 240}, time: 10}
init_loc: {
"use": False,
"x": 26.35,
"y": 241.43,
"z": 8.57,
"yaw": 271,
"roll": 0,
"pitch": 0
} |
eliabntt/GRADE-RR/simulator/configs/config_zebra_datagen.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
env_path: "/media/ebonetto/WindowsData/Zebra_envs/Usable"
use_stl: False # this disable stl loading thus placement AND robot heading
use_npy: False
base_env_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/usds/env_base.usd"
zebra_anims_loc: "/media/ebonetto/WindowsData/Zebra_anims"
randomize_sky: True # only if allowed
robot_mesh_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2021.2.1/GRADE-RR/meshes/drone.dae"
usd_robot_path: "/media/ebonetto/WindowsData/ov/isaac_sim-2022.2.1/GRADE-RR/usds/drone_2022.usd"
out_folder: "/media/ebonetto/WindowsData/zebra_out_close"
out_folder_npy: "/media/ebonetto/WindowsData/zebra_out_close"
fps: 30
num_robots: 3
experiment_length: 220
anim_exp_len: 200
physics_hz: 240 # NOTE THAT THIS IS THE RATE OF CLOCK AND IMU
render_hz: 240 # LEAVE IT EQUAL TO PHYSICS HZ
npy_sensor_size: [1920, 1080]
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_"
_recorder_settings: {
"rgb": { "enabled": True },
"depth": { "enabled": False, "colorize": False, "npy": True },
"depthLinear": { "enabled": True, "colorize": False, "npy": True },
"instance": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"semantic": { "enabled": False, "colorize": False, "npy": True, "mappings": True },
"bbox_2d_tight": { "enabled": True, "colorize": False, "npy": True },
"bbox_2d_loose": { "enabled": True, "colorize": False, "npy": True },
"normals": { "enabled": False, "colorize": False, "npy": True },
"motion-vector": { "enabled": False, "colorize": True, "npy": True },
"bbox_3d": { "enabled": True, "colorize": False, "npy": True },
"camera": { "enabled": True, "colorize": True, "npy": True },
"poses": { "enabled": True, "colorize": True, "npy": True },
}
min_number_zebras: 2
max_number_zebras: 5
|
eliabntt/GRADE-RR/simulator/configs/config_irotate.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
env_path: "/media/ebonetto/WindowsData/Demo_GRADE"
use_stl: True # this disable stl loading thus placement AND robot heading
base_env_path: "/home/ebonetto/Desktop/empty.usd"
clean_base_env: False
only_placement: False
robot_mesh_path: "/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/main_repo/meshes/robotino.dae"
usd_robot_path: "/home/ebonetto/.local/share/ov/pkg/isaac_sim-2021.2.1/main_repo/usds/robotino.usd"
out_folder: "/media/ebonetto/WindowsData/exp_out2/"
out_folder_npy: "/media/ebonetto/WindowsData/exp_out2/"
fps: 30
num_robots: 1
experiment_length: 180 # camera frames length
autonomous: True # true -> FUEL, false -> random goals
physics_hz: 240 # NOTE THAT THIS IS THE RATE OF CLOCK AND IMU
render_hz: 240 # LEAVE IT EQUAL TO PHYSICS HZ
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
bootstrap_exploration: 1 # seconds to boostrap exploration (min(abs(this_value), 1/(physics_hz/ratio_camera))
robot_sensor_size: [ 640, 480 ]
npy_sensor_size: [ 1920, 1080]
_random_light: { "intensity": True,
"color": True,
"intensity_interval": [ 600, 4500 ], # [min, max], for rtx
"during_experiment": False,
"n-frames": 24,
"smooth": False }
_random_roughness: {
"enabled": True,
"intensity_interval": [ 0.1, 0.9 ]
}
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_"
_recorder_settings: {
"rgb": { "enabled": True },
"depth": { "enabled": False, "colorize": False, "npy": True },
"depthLinear": { "enabled": True, "colorize": False, "npy": True },
"instance": { "enabled": True, "colorize": False, "npy": True, "mappings": True },
"semantic": { "enabled": False, "colorize": False, "npy": True, "mappings": True },
"bbox_2d_tight": { "enabled": True, "colorize": False, "npy": True },
"bbox_2d_loose": { "enabled": True, "colorize": False, "npy": True },
"normals": { "enabled": True, "colorize": False, "npy": True },
"motion-vector": { "enabled": False, "colorize": True, "npy": True },
"bbox_3d": { "enabled": True, "colorize": False, "npy": True },
"camera": { "enabled": True, "colorize": True, "npy": True },
"poses": { "enabled": True, "colorize": True, "npy": True },
}
is_iRotate: True |
eliabntt/GRADE-RR/simulator/configs/robot_with_ros.yaml | # GENERAL NOTE the paths need to be ABSOLUTE!
physics_hz: 240 # the size of a single physics step
render_hz: 240 # not influencing anything for now
base_env_path: "" # the base environment, e.g. GRADE-RR/usds/env_base.usd
env_path: "" # the parent folder that contains your environments, in subfolders
use_stl: True #
use_npy: True #
meters_per_unit: 0.01 # how many meters are in one single unit in the simulation. In this case x=1 will be 1 cm.
usd_robot_path: "" # the usd path of the robot, e.g. GRADE-RR/usds/drone_2022.usd
num_robots: 1 # how many robots we want to load
robot_sensor_size: [640,480]
ratio_joints: 2
ratio_tf: 2 # physics_hz/ratio_tf = tf publish hz
ratio_odom: 4 # physics_hz/ratio_odom = odom publish hz
ratio_camera: 8 # physics_hz/ratio_cam = imgs publish hz
# prefixes where to load the prims
env_prim_path: "/World/home"
robot_base_prim_path: "/my_robot_" # 0, 1, 2 ... num_robots
|
eliabntt/GRADE-RR/simulator/utils/robot_utils.py | import utils.misc_utils
from omni.isaac.core.utils.prims import set_targets
from scipy.spatial.transform import Rotation
from utils.misc_utils import *
from omni.isaac.core.utils.render_product import create_hydra_texture
def create_odom_message(_dc, robot_body_ptr, handle, meters_per_unit):
"""
Create odometry message for the robot_body_ptr.
Converts the readings from the IsaacSim unit to the mps when necessary
Gets the current rostime
header frame us "WORLD" and the child frame is from the "handle"
"""
lin_vel = _dc.get_rigid_body_local_linear_velocity(robot_body_ptr)
ang_vel = _dc.get_rigid_body_angular_velocity(robot_body_ptr)
pose = _dc.get_rigid_body_pose(robot_body_ptr)
odom_msg = Odometry()
odom_msg.header.frame_id = "world"
odom_msg.header.stamp = rospy.Time.now()
odom_msg.child_frame_id = handle[1:] if handle.startswith("/") else handle
odom_msg.pose.pose.position.x = pose.p.x * meters_per_unit
odom_msg.pose.pose.position.y = pose.p.y * meters_per_unit
odom_msg.pose.pose.position.z = pose.p.z * meters_per_unit
odom_msg.pose.pose.orientation.x = pose.r.x
odom_msg.pose.pose.orientation.y = pose.r.y
odom_msg.pose.pose.orientation.z = pose.r.z
odom_msg.pose.pose.orientation.w = pose.r.w
odom_msg.twist.twist.linear.x = lin_vel.x * meters_per_unit
odom_msg.twist.twist.linear.y = lin_vel.y * meters_per_unit
odom_msg.twist.twist.linear.z = lin_vel.z * meters_per_unit
odom_msg.twist.twist.angular.x = ang_vel.x
odom_msg.twist.twist.angular.y = ang_vel.y
odom_msg.twist.twist.angular.z = ang_vel.z
p_cov = np.array([0.0] * 36).reshape(6, 6)
p_cov[0:2, 0:2] = 0.00
p_cov[5, 5] = 0.00
odom_msg.pose.covariance = tuple(p_cov.ravel().tolist())
odom_msg.twist.covariance = tuple(p_cov.ravel().tolist())
return odom_msg
def create_diff_odom_message(_dc, robot_body_ptr, handle, meters_per_unit, base_body_ptr, base_handle):
"""
Create odometry message for the robot_body_ptr.
Converts the readings from the IsaacSim unit to the mps when necessary
Gets the current rostime
header frame us "WORLD" and the child frame is from the "handle"
"""
lin_vel = _dc.get_rigid_body_local_linear_velocity(robot_body_ptr)
ang_vel = _dc.get_rigid_body_angular_velocity(robot_body_ptr)
pose = _dc.get_rigid_body_pose(robot_body_ptr)
base_lin_vel = _dc.get_rigid_body_local_linear_velocity(base_body_ptr)
base_ang_vel = _dc.get_rigid_body_angular_velocity(base_body_ptr)
base_pose = _dc.get_rigid_body_pose(base_body_ptr)
odom_msg = Odometry()
odom_msg.header.frame_id = base_handle
odom_msg.header.stamp = rospy.Time.now()
odom_msg.child_frame_id = handle[1:] if handle.startswith("/") else handle
odom_msg.pose.pose.position.x = (pose.p.x - base_pose.p.x) * meters_per_unit
odom_msg.pose.pose.position.y = (pose.p.y - base_pose.p.y) * meters_per_unit
odom_msg.pose.pose.position.z = (pose.p.z - base_pose.p.z) * meters_per_unit
q1 = Quaternion(base_pose.r.w, base_pose.r.x, base_pose.r.y, base_pose.r.z)
q2 = Quaternion(pose.r.w, pose.r.x, pose.r.y, pose.r.z)
q = q1.conjugate * q2
odom_msg.pose.pose.orientation.x = q.x
odom_msg.pose.pose.orientation.y = q.y
odom_msg.pose.pose.orientation.z = q.z
odom_msg.pose.pose.orientation.w = q.w
odom_msg.twist.twist.linear.x = (lin_vel.x - base_lin_vel.x) * meters_per_unit
odom_msg.twist.twist.linear.y = (lin_vel.y - base_lin_vel.y) * meters_per_unit
odom_msg.twist.twist.linear.z = (lin_vel.z - base_lin_vel.z) * meters_per_unit
odom_msg.twist.twist.angular.x = (ang_vel.x - base_ang_vel.x)
odom_msg.twist.twist.angular.y = (ang_vel.y - base_ang_vel.y)
odom_msg.twist.twist.angular.z = (ang_vel.z - base_ang_vel.z)
p_cov = np.array([0.0] * 36).reshape(6, 6)
p_cov[0:2, 0:2] = 0.00
p_cov[5, 5] = 0.00
odom_msg.pose.covariance = tuple(p_cov.ravel().tolist())
odom_msg.twist.covariance = tuple(p_cov.ravel().tolist())
return odom_msg
def create_camera_pose_message(_dc, camera_body_ptr, handle, meters_per_unit):
"""
Similar to the odom, but it's just for a pose message, in this case for the camera
"""
pose = _dc.get_rigid_body_pose(camera_body_ptr)
camera_pose = PoseStamped()
camera_pose.header.frame_id = "world"
camera_pose.header.stamp = rospy.Time.now()
camera_pose.pose.position.x = pose.p.x * meters_per_unit
camera_pose.pose.position.y = pose.p.y * meters_per_unit
camera_pose.pose.position.z = pose.p.z * meters_per_unit
camera_pose.pose.orientation.x = pose.r.x
camera_pose.pose.orientation.y = pose.r.y
camera_pose.pose.orientation.z = pose.r.z
camera_pose.pose.orientation.w = pose.r.w
return camera_pose
def add_pose_tree(path: str, irotate: bool=False):
"""
Add the tf publisher to the desired path.
This path should be the robot itself.
Each robot has a pose tree.
"""
if path.startswith("/"):
path = path[1:]
og.Controller.edit(
{"graph_path": f"/{path}/TFActionGraph", "evaluator_name": "execution"},
{
og.Controller.Keys.CREATE_NODES: [
("ReadSimTime", "omni.isaac.core_nodes.IsaacReadSimulationTime"),
("OnImpulseEvent", "omni.graph.action.OnImpulseEvent"),
("PublishTF", "omni.isaac.ros_bridge.ROS1PublishTransformTree"),
],
og.Controller.Keys.CONNECT: [
("OnImpulseEvent.outputs:execOut", "PublishTF.inputs:execIn"),
("ReadSimTime.outputs:simulationTime", "PublishTF.inputs:timeStamp"),
],
og.Controller.Keys.SET_VALUES: [
("PublishTF.inputs:nodeNamespace", f"/{path}"),
]
},
)
# fixme
if irotate:
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path('/my_robot_0/ROS_PoseTree.poseTreePubTopic'),
value='/tf2',
prev='/tf')
set_target_prims(primPath=f"/{path}/TFActionGraph/PublishTF", inputName="inputs:targetPrims",
targetPrimPaths=[f"/{path}"])
return f"/{path}/TFActionGraph"
def add_camera_and_viewport(path: str, resolution: list, old_h_ape, old_v_ape, sc, index=0,
robot_index=0, cam_per_robot=1, camera_path="Camera"):
"""
The function create first the ROSBridge Camera and then the corresponding viewport.
index is the number of the camera for the given robot.
headless is a boolean that indicates if the simulation is headless or not (i.e. create a visual viewport or not).
robot_index correspond to the n-th robot in the scene.
"""
resolution = tuple(resolution)
camera_path = path + f"/{camera_path}"
index = robot_index * cam_per_robot + index
stage = omni.usd.get_context().get_stage()
camera = stage.GetPrimAtPath(camera_path)
old_h_ape.append(camera.GetAttribute("horizontalAperture").Get())
old_v_ape.append(camera.GetAttribute("verticalAperture").Get())
viewport_name = "Viewport" + (f" {index + 1}" if str(index + 1) != "0" and str(index + 1) != "1" else "")
sc.step()
keys = og.Controller.Keys
(camera_graph, _, _, _) = og.Controller.edit(
{
"graph_path": f"{path}/ROSCamera_{index}_Graph",
"evaluator_name": "push",
"pipeline_stage": og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_ONDEMAND,
},
{
keys.CREATE_NODES: [
("OnTick", "omni.graph.action.OnTick"),
("createViewport", "omni.isaac.core_nodes.IsaacCreateViewport"),
("setViewportResolution", "omni.isaac.core_nodes.IsaacSetViewportResolution"),
("getRenderProduct", "omni.isaac.core_nodes.IsaacGetViewportRenderProduct"),
("setCamera", "omni.isaac.core_nodes.IsaacSetCameraOnRenderProduct"),
("cameraHelperRgb", "omni.isaac.ros_bridge.ROS1CameraHelper"),
("cameraHelperInfo", "omni.isaac.ros_bridge.ROS1CameraHelper"),
("cameraHelperDepth", "omni.isaac.ros_bridge.ROS1CameraHelper"),
],
keys.CONNECT: [
("OnTick.outputs:tick", "createViewport.inputs:execIn"),
("createViewport.outputs:execOut", "getRenderProduct.inputs:execIn"),
("createViewport.outputs:viewport", "getRenderProduct.inputs:viewport"),
("createViewport.outputs:execOut", "setViewportResolution.inputs:execIn"),
("createViewport.outputs:viewport", "setViewportResolution.inputs:viewport"),
("getRenderProduct.outputs:execOut", "setCamera.inputs:execIn"),
("getRenderProduct.outputs:renderProductPath", "setCamera.inputs:renderProductPath"),
("setCamera.outputs:execOut", "cameraHelperRgb.inputs:execIn"),
("setCamera.outputs:execOut", "cameraHelperInfo.inputs:execIn"),
("setCamera.outputs:execOut", "cameraHelperDepth.inputs:execIn"),
("getRenderProduct.outputs:renderProductPath", "cameraHelperRgb.inputs:renderProductPath"),
("getRenderProduct.outputs:renderProductPath", "cameraHelperInfo.inputs:renderProductPath"),
("getRenderProduct.outputs:renderProductPath", "cameraHelperDepth.inputs:renderProductPath"),
],
og.Controller.Keys.SET_VALUES: [
("createViewport.inputs:viewportId", index),
("setViewportResolution.inputs:height", int(resolution[1])),
("setViewportResolution.inputs:width", int(resolution[0])),
("cameraHelperRgb.inputs:frameId", path[1:]),
("cameraHelperRgb.inputs:topicName", path + f"/{index}/rgb/image_raw"),
("cameraHelperRgb.inputs:type", "rgb"),
("cameraHelperDepth.inputs:frameId", path[1:]),
("cameraHelperDepth.inputs:topicName", path + f"/{index}/depth/image_raw"),
("cameraHelperDepth.inputs:type", "depth"),
("cameraHelperInfo.inputs:frameId", path[1:]),
("cameraHelperInfo.inputs:topicName", path + f"/{index}/camera_info"),
("cameraHelperInfo.inputs:type", "camera_info"),
],
},
)
set_targets(
prim=omni.usd.get_context().get_stage().GetPrimAtPath(f"{path}/ROSCamera_{index}_Graph/setCamera"),
attribute="inputs:cameraPrim",
target_prim_paths=[camera_path],
)
og.Controller.evaluate_sync(camera_graph)
for _ in range(5):
sc.step()
omni.kit.app.get_app().update()
viewport_handle = [x for x in omni.kit.viewport.window.get_viewport_window_instances()][-1].viewport_api
viewport_handle.set_texture_resolution((resolution[0], resolution[1]))
for _ in range(5):
sc.step()
omni.kit.app.get_app().update()
return camera_graph.get_path_to_graph(), viewport_handle
def add_joint_state(path: str):
if path.startswith("/"):
path = path[1:]
og.Controller.edit(
{"graph_path": f"/{path}/JointActionGraph", "evaluator_name": "execution"},
{
og.Controller.Keys.CREATE_NODES: [
("ReadSimTime", "omni.isaac.core_nodes.IsaacReadSimulationTime"),
("OnImpulseEvent", "omni.graph.action.OnImpulseEvent"),
("PublishJointState", "omni.isaac.ros_bridge.ROS1PublishJointState"),
("SubscribeJointState", "omni.isaac.ros_bridge.ROS1SubscribeJointState"),
("ArticulationController", "omni.isaac.core_nodes.IsaacArticulationController"),
],
og.Controller.Keys.CONNECT: [
("OnImpulseEvent.outputs:execOut", "PublishJointState.inputs:execIn"),
("OnImpulseEvent.outputs:execOut", "SubscribeJointState.inputs:execIn"),
("OnImpulseEvent.outputs:execOut", "ArticulationController.inputs:execIn"),
("ReadSimTime.outputs:simulationTime", "PublishJointState.inputs:timeStamp"),
("SubscribeJointState.outputs:jointNames", "ArticulationController.inputs:jointNames"),
("SubscribeJointState.outputs:positionCommand", "ArticulationController.inputs:positionCommand"),
("SubscribeJointState.outputs:velocityCommand", "ArticulationController.inputs:velocityCommand"),
("SubscribeJointState.outputs:effortCommand", "ArticulationController.inputs:effortCommand"),
],
og.Controller.Keys.SET_VALUES: [
# Providing path to Articulation Controller node
# Providing the robot path is equivalent to setting the targetPrim in Articulation Controller node
("ArticulationController.inputs:usePath", True),
("ArticulationController.inputs:robotPath", "/" + path),
# Assigning topic names to clock publishers
("PublishJointState.inputs:topicName", "/" + path + "/joint_states"),
("SubscribeJointState.inputs:topicName", "/" + path + "/joint_commands"),
],
},
)
# set_target_prims(primPath=f"/{path}/JointActionGraph/SubscribeJointState", targetPrimPaths=[f"/{path}"])
set_target_prims(primPath=f"/{path}/JointActionGraph/PublishJointState", targetPrimPaths=[f"/{path}"])
return f"/{path}/JointActionGraph"
def add_clock():
(_clock_graph, _, _, _) = og.Controller.edit(
{"graph_path": "/ClockActionGraph", "evaluator_name": "push",
"pipeline_stage": og.GraphPipelineStage.GRAPH_PIPELINE_STAGE_ONDEMAND, },
{
og.Controller.Keys.CREATE_NODES: [
("ReadSimTime", "omni.isaac.core_nodes.IsaacReadSimulationTime"),
("OnTick", "omni.graph.action.OnTick"),
("PublishManualClock", "omni.isaac.ros_bridge.ROS1PublishClock"),
],
og.Controller.Keys.CONNECT: [
# Connecting execution of OnImpulseEvent node to PublishManualClock so it will only publish when an impulse event is triggered
("OnTick.outputs:tick", "PublishManualClock.inputs:execIn"),
# Connecting simulationTime data of ReadSimTime to the clock publisher nodes
("ReadSimTime.outputs:simulationTime", "PublishManualClock.inputs:timeStamp"),
],
og.Controller.Keys.SET_VALUES: [
# Assigning topic names to clock publishers
("PublishManualClock.inputs:topicName", "/clock"),
],
},
)
return _clock_graph
def get_robot_yaw(x, y, z, env_mesh, shifts):
"""
Checks the best robot yaw angle for the given position.
Cast rays from the robot position to the environment mesh and returns the angle
It considers 36 rays.
For each ray we compute the distance to the nearest point on the mesh.
If the distance is infinite, it gets interpolated.
We compute a rolling window sum (with a window size of 4 rays) of the distances.
Return the best yaw angle in RADIANS.
"""
checking_steps = 36
angles = [[np.cos(np.pi * 2.0 / checking_steps * c_step), np.sin(np.pi * 2.0 / checking_steps * c_step), 0] for
c_step in range(checking_steps)]
positions = [[x + shifts[0], y + shifts[1], z + shifts[2]] for _ in range(checking_steps)]
checking_rays = trimesh.proximity.longest_ray(env_mesh, positions, angles)
checking_rays[checking_rays < 0] = 0
nans, x = inf_helper(checking_rays)
checking_rays[nans] = np.interp(x(nans), x(~nans), checking_rays[~nans])
checking_rays[checking_rays > 8] = 8
rolling_rays = int(40 / (360 / checking_steps))
checking_rays = np.append(checking_rays, checking_rays[:rolling_rays - 1])
checking_rays = np.convolve(checking_rays, np.ones(rolling_rays, dtype=int), 'valid') / rolling_rays
return (np.argmax(checking_rays) + rolling_rays / 2) * 2 * np.pi / checking_steps
def get_vp_list():
from omni.kit.viewport.window import get_viewport_window_instances
return [x for x in get_viewport_window_instances()]
def create_viewport(camera_path, is_headless, index, resolution, old_h_ape, old_v_ape, sc):
"""
The function create the viewport for the given camera.
Creates an handle, a viewport and the window position/size if the system is not headless.
"""
stage = omni.usd.get_context().get_stage()
camera = stage.GetPrimAtPath(camera_path)
old_h_ape.append(camera.GetAttribute("horizontalAperture").Get())
old_v_ape.append(camera.GetAttribute("verticalAperture").Get())
index += 1 # omniverse starts from 1
viewport_name = "Viewport" + (f" {index}" if str(index) != "0" and str(index) != "1" else "")
viewport = omni.kit.viewport.utility.get_active_viewport_window(window_name=viewport_name)
viewport_handle = omni.kit.viewport.utility.get_viewport_from_window_name(viewport_name)
if not viewport_handle:
viewport = omni.kit.viewport.utility.create_viewport_window(name=viewport_name)
viewport_handle = omni.kit.viewport.utility.get_viewport_from_window_name(viewport.name)
if not is_headless:
viewport.setPosition(1000, 400)
viewport.height, viewport.width = 300, 300
viewport_handle.set_active_camera(camera_path)
for _ in range(10):
sc.step()
viewport_handle.set_texture_resolution((resolution[0], resolution[1]))
sc.step()
return viewport_handle, viewport.name
def ros_launchers_setup(roslaunch, env_limits_shifted, config):
"""
Setup the ros launchers for the simulation.
We need an exploration manager for every robot, and a collision checking service to place the objects.
"""
roslaunch_files = []
roslaunch_args = []
launch_files = []
print("launching ros nodes...")
if not config["only_placement"].get():
for i in range(config["num_robots"].get()):
# TODO hack to be compatible with the old version
if type(config["is_iRotate"].get()) == list:
is_irotate = config["is_iRotate"].get()[i]
else:
is_irotate = config["is_iRotate"].get()
if not is_irotate:
cli_args1 = ["exploration_manager", "my_exploration.launch",
# cli_args1 = ["/home/ebonetto/catkin_ws/src/FUEL/fuel_planner/exploration_manager/launch/my_exploration.launch",
"box_min_x:={:.4f}".format(env_limits_shifted[0] - 0.2),
"box_min_y:={:.4f}".format(env_limits_shifted[1] - 0.2),
"box_min_z:={:.4f}".format(env_limits_shifted[2]),
"box_max_x:={:.4f}".format(env_limits_shifted[3] + 0.2),
"box_max_y:={:.4f}".format(env_limits_shifted[4] + 0.2),
"box_max_z:={:.4f}".format(min(3, env_limits_shifted[5] - 0.1)),
f"mav_name:={config['robot_base_prim_path'].get()}{i}"]
roslaunch_files.append(roslaunch.rlutil.resolve_launch_arguments(cli_args1)[0])
roslaunch_args.append(cli_args1[2:])
launch_files.append((roslaunch_files[-1], roslaunch_args[-1]))
else:
cli_args1 = ["custom_joint_controller_ros_irotate", "publish_joint_commands_node.launch",
"position_limit_x:={:.4f}".format(env_limits_shifted[3] + 0.2),
"position_limit_y:={:.4f}".format(env_limits_shifted[4] + 0.2),
"position_limit_z:={:.4f}".format(3),
"robot_id:=1", "frame_id:='base'"]
roslaunch_files.append(roslaunch.rlutil.resolve_launch_arguments(cli_args1)[0])
roslaunch_args.append(cli_args1[2:])
launch_files.append((roslaunch_files[-1], roslaunch_args[-1]))
# TODO hack because we pre-cache the robot mesh
if type(config["robot_mesh_path"].get()) == list:
mesh_path = config["robot_mesh_path"].get()[0]
else:
mesh_path = config["robot_mesh_path"].get()
cli_args2 = ["collision_check", "collision_check.launch",
"robot_mesh_path:={}".format(mesh_path)]
roslaunch_file2 = roslaunch.rlutil.resolve_launch_arguments(cli_args2)[0]
roslaunch_args2 = cli_args2[2:]
launch_files.append((roslaunch_file2, roslaunch_args2))
return launch_files
def create_imu_message(frame, last_reading, meters_per_unit):
"""
Create the IMU message from the last reading.
"""
imu_msg = Imu()
imu_msg.header.frame_id = frame[1:] if frame.startswith("/") else frame
imu_msg.header.stamp = rospy.Time.now()
imu_msg.angular_velocity.x = last_reading.ang_vel_x
imu_msg.angular_velocity.y = last_reading.ang_vel_y
imu_msg.angular_velocity.z = last_reading.ang_vel_z
imu_msg.linear_acceleration.x = last_reading.lin_acc_x * meters_per_unit * meters_per_unit
imu_msg.linear_acceleration.y = last_reading.lin_acc_y * meters_per_unit * meters_per_unit
imu_msg.linear_acceleration.z = last_reading.lin_acc_z * meters_per_unit * meters_per_unit
imu_msg.angular_velocity_covariance = [0, 0, 0, 0, 0, 0, 0, 0, 0]
imu_msg.linear_acceleration_covariance = [0, 0, 0, 0, 0, 0, 0, 0, 0]
return imu_msg
def setup_imu_sensor(_is, config, imu_sensor_path):
"""
Setup the IMU sensor config.
Keep in mind that this is relative to the parent body, so any transform the parent has is already reflected.
"""
add_imu_sensor, sensor = omni.kit.commands.execute(
"IsaacSensorCreateImuSensor",
path="/imu_sensor",
parent=imu_sensor_path,
sensor_period=1 / config["physics_hz"].get(),
orientation=Gf.Quatd(1, 0, 0, 0),
visualize=False,
)
if not add_imu_sensor:
raise Exception("Failed to add IMU sensor")
return sensor
def pub_imu(_is, imu_pubs, robot_imu_frames, meters_per_unit):
"""
Simple message publisher
"""
for index, handle in enumerate(robot_imu_frames):
last_reading = _is.get_sensor_sim_reading(handle + "/imu_sensor")
imu_pubs[index].publish(create_imu_message(handle, last_reading, meters_per_unit))
def pub_cam_pose(camera_pose_frames, cam_pose_pubs, _dc, meters_per_unit):
"""
Simple message publisher
"""
for index, handle in enumerate(camera_pose_frames):
camera_body_ptr = _dc.get_rigid_body(handle)
cam_pose_pubs[index].publish(create_camera_pose_message(_dc, camera_body_ptr, handle, meters_per_unit))
def pub_odom(robot_odom_frames, odom_pubs, _dc, meters_per_unit, diff_odom_frames=[]):
"""
Simple message publisher
"""
odoms = []
angles = []
if len(diff_odom_frames) == 0:
for index, handle in enumerate(robot_odom_frames):
robot_body_ptr = _dc.get_rigid_body(handle)
odom = create_odom_message(_dc, robot_body_ptr, handle, meters_per_unit)
odoms.append([odom.pose.pose.position.x, odom.pose.pose.position.y, odom.pose.pose.position.z])
angles.append(Rotation.from_quat(
[odom.pose.pose.orientation.x, odom.pose.pose.orientation.y, odom.pose.pose.orientation.z,
odom.pose.pose.orientation.w]).as_euler("XYZ"))
odom_pubs[index].publish(odom)
else:
for index, handle in enumerate(robot_odom_frames):
robot_body_ptr = _dc.get_rigid_body(handle)
diff_body_ptr = _dc.get_rigid_body(diff_odom_frames[index])
diff_handle = diff_odom_frames[index][1:] if diff_odom_frames[index].startswith("/") else diff_odom_frames[
index]
odom = create_diff_odom_message(_dc, robot_body_ptr, handle, meters_per_unit, diff_body_ptr, diff_handle)
odoms.append([odom.pose.pose.position.x, odom.pose.pose.position.y, odom.pose.pose.position.z])
angles.append(Rotation.from_quat(
[odom.pose.pose.orientation.x, odom.pose.pose.orientation.y, odom.pose.pose.orientation.z,
odom.pose.pose.orientation.w]).as_euler("XYZ"))
odom_pubs[index].publish(odom)
return odoms, angles
def import_robot(robot_base_prim_path, n, usd_robot_path, local_file_prefix=''):
"""
Add the robot to the stage.
Add semantics.
"""
stage = omni.usd.get_context().get_stage()
res, _ = omni.kit.commands.execute("CreateReferenceCommand",
usd_context=omni.usd.get_context(),
path_to=f"{robot_base_prim_path}{n}",
asset_path=local_file_prefix + usd_robot_path,
instanceable=False)
if res:
clear_properties(f"{robot_base_prim_path}{n}")
add_semantics(stage.GetPrimAtPath(f"{robot_base_prim_path}{n}"), "robot")
else:
raise Exception("Failed to import robot")
def get_valid_robot_location(environment, first):
"""
Query the service to place the robot in a free space AND compute an initial good yaw.
"""
x, y, z, _ = position_object(environment, type=0, reset=first)
# robot is nearly circular so I do not have to worry about collisionsif environment.env_mesh != None:
if environment.env_mesh != None:
yaw = get_robot_yaw(x[0], y[0], z[0], environment.env_mesh, environment.shifts)
print(f"Initial yaw: {yaw}")
return x[0], y[0], z[0], yaw
def control_camera(viewport, sc):
sc.step()
if viewport is not None:
import omni.syntheticdata._syntheticdata as sd
stage = omni.usd.get_context().get_stage()
# Required for editing the SDGPipeline graph which exists in the Session Layer
with Usd.EditContext(stage, stage.GetSessionLayer()):
# Get name of rendervar for RGB sensor type
rv_rgb = omni.syntheticdata.SyntheticData.convert_sensor_type_to_rendervar(sd.SensorType.Rgb.name)
# Get path to IsaacSimulationGate node in RGB pipeline
rgb_camera_gate_path = omni.syntheticdata.SyntheticData._get_node_path(
rv_rgb + "IsaacSimulationGate", viewport.get_render_product_path()
)
# Get name of rendervar for DistanceToImagePlane sensor type
rv_depth = omni.syntheticdata.SyntheticData.convert_sensor_type_to_rendervar(
sd.SensorType.DistanceToImagePlane.name)
# Get path to IsaacSimulationGate node in Depth pipeline
depth_camera_gate_path = omni.syntheticdata.SyntheticData._get_node_path(
rv_depth + "IsaacSimulationGate", viewport.get_render_product_path()
)
# Get path to IsaacSimulationGate node in CameraInfo pipeline
camera_info_gate_path = omni.syntheticdata.SyntheticData._get_node_path(
"PostProcessDispatch" + "IsaacSimulationGate", viewport.get_render_product_path()
)
return rgb_camera_gate_path, depth_camera_gate_path, camera_info_gate_path
def add_ros_components(robot_base_prim_path, n, ros_transform_components, ros_camera_list, viewport_window_list,
camera_pose_frames, cam_pose_pubs, imu_pubs, robot_imu_frames,
robot_odom_frames, odom_pubs, lidars,
dynamic_prims, config, old_h_ape, old_v_ape, _is, simulation_context, _clock, irotate=False):
"""
Add the ROS components to the robot.
This is done because we need different topics for each robot.
Components added:
- joint_states (publisher and subscriber)
- tf broadcaster
- camera
- camera pose
- imu
- odom
When necessary we create also the corresponding publisher (whenever the RosBridge component is not available).
Publishers created:
- imu
- odom
- camera pose
"""
ros_transform_components.append(add_joint_state(f"{robot_base_prim_path}{n}"))
ros_transform_components.append(add_pose_tree(f"{robot_base_prim_path}{n}", irotate))
# create camera
component, viewport = add_camera_and_viewport(f"{robot_base_prim_path}{n}/camera_link",
config["robot_sensor_size"].get(),
old_h_ape, old_v_ape, simulation_context,
0, n, cam_per_robot=1) # cam index is useful if you want multiple cameras
cam_outputs = control_camera(viewport, simulation_context)
ros_camera_list.append([n + 0, component, cam_outputs])
viewport_window_list.append(viewport)
# component, viewport = add_camera_and_viewport(f"{robot_base_prim_path}{n}/camera_link",
# config["robot_sensor_size"].get(),
# old_h_ape, old_v_ape, simulation_context,
# 1, n, cam_per_robot=2) # cam index is useful if you want multiple cameras
# cam_outputs = control_camera(viewport, simulation_context)
# ros_camera_list.append([n + 1, component, cam_outputs])
# viewport_window_list.append(viewport)
omni.kit.app.get_app().update()
# append camera pose frame (we need only one) and pubs
camera_pose_frames.append(f"{robot_base_prim_path}{n}/camera_link")
cam_pose_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/camera/pose", PoseStamped, queue_size=10))
for _ in range(10):
og.Controller.set(og.Controller.attribute(f"{ros_transform_components[-1]}/OnImpulseEvent.state:enableImpulse"),
True)
og.Controller.set(og.Controller.attribute(f"{ros_transform_components[-2]}/OnImpulseEvent.state:enableImpulse"),
True)
og.Controller.evaluate_sync(_clock)
simulation_context.step()
# attach IMU sensor to the robot
if irotate:
setup_imu_sensor(_is, config, f"{robot_base_prim_path}{n}/imu_link")
imu_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/imu_cam", Imu, queue_size=10))
robot_imu_frames.append(f"{robot_base_prim_path}{n}/imu_link")
setup_imu_sensor(_is, config, f"{robot_base_prim_path}{n}/base_link")
imu_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/imu_body", Imu, queue_size=10))
robot_imu_frames.append(f"{robot_base_prim_path}{n}/base_link")
robot_odom_frames.append(f"{robot_base_prim_path}{n}/base_link")
else:
setup_imu_sensor(_is, config, f"{robot_base_prim_path}{n}/imu_link")
imu_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/imu_body", Imu, queue_size=10))
robot_imu_frames.append(f"{robot_base_prim_path}{n}/imu_link")
setup_imu_sensor(_is, config, f"{robot_base_prim_path}{n}/camera_link")
imu_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/imu_camera", Imu, queue_size=10))
robot_imu_frames.append(f"{robot_base_prim_path}{n}/camera_link")
robot_odom_frames.append(f"{robot_base_prim_path}{n}/yaw_link")
odom_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/odom", Odometry, queue_size=10))
stage = omni.usd.get_context().get_stage()
dynamic_prims.append(stage.GetPrimAtPath(f"{robot_base_prim_path}{n}"))
if lidars:
stage = omni.usd.get_context().get_stage()
dynamic_prims.append(stage.GetPrimAtPath(f"{robot_base_prim_path}{n}"))
sensor = add_lidar(f"{robot_base_prim_path}{n}/yaw_link", [0, 0, -.1], [0, 0, 0], is_3d=True, is_2d=True)
lidars.append(sensor)
def get_robot_joint_init_loc(name):
"""
It gets the initial location of the robot's joints
:param name: The name of the robot
:return: The initial location of the robot's joints.
"""
stage = omni.usd.get_context().get_stage()
x = UsdPhysics.Joint.Get(stage, name + '/base_link/x_joint').GetLocalPos0Attr().Get()[0]
y = UsdPhysics.Joint.Get(stage, name + '/x_link/y_joint').GetLocalPos0Attr().Get()[1]
z = UsdPhysics.Joint.Get(stage, name + '/y_link/z_joint').GetLocalPos0Attr().Get()[2]
roll = UsdPhysics.RevoluteJoint.Get(stage, name + '/z_link/roll_joint').GetLocalRot0Attr().Get()
roll = Rotation.from_quat([roll.imaginary[0], roll.imaginary[1], roll.imaginary[2], roll.real]).as_euler('XYZ')[0]
pitch = UsdPhysics.RevoluteJoint.Get(stage, name + '/roll_link/pitch_joint').GetLocalRot0Attr().Get()
pitch = Rotation.from_quat([pitch.imaginary[0], pitch.imaginary[1], pitch.imaginary[2], pitch.real]).as_euler('XYZ')[
1]
yaw = UsdPhysics.RevoluteJoint.Get(stage, name + '/pitch_link/yaw_joint').GetLocalRot0Attr().Get()
yaw = Rotation.from_quat([yaw.imaginary[0], yaw.imaginary[1], yaw.imaginary[2], yaw.real]).as_euler('XYZ')[2]
return x, y, z, roll, pitch, yaw
def set_drone_joints_init_loc(name: str, pos: [], orientation: [], upper_zlim: float=100, lower_zlim: float=0, irotate=False):
"""
Move the drone to the specified location by acting on the JOINTS.
PLEASE NOTE: the intial joint position published by joint_states will be 0,0,0 strangely. #IsaacBug
The joints should be named as follows:
- base_link/x_joint
- x_link/y_joint
- y_link/z_joint
- z_link/roll_joint
- roll_link/pitch_joint
- pitch_link/yaw_joint
name: the name of the robot (e.g. "my_robot_0", the prim path)
pos: the position of the robot (x,y,z)
orientation: the orientation of the robot (roll,pitch,yaw), in rad
upper_zlim: the z limit of the robot (z)
irotate: if True, the joints considered are the iRotate ones
"""
x, y, z = pos
upper_zlim = max(upper_zlim, z)
roll, pitch, yaw = orientation
stage = omni.usd.get_context().get_stage()
if irotate:
UsdPhysics.Joint.Get(stage, name + '/x_link/x_joint').GetLocalPos0Attr().Set(Gf.Vec3f(x, 0, 0))
UsdPhysics.Joint.Get(stage, name + '/y_link/y_joint').GetLocalPos0Attr().Set(Gf.Vec3f(0, y, 0))
yaw = np.rad2deg(yaw)
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), 0)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), yaw)
)
UsdPhysics.RevoluteJoint.Get(stage, name + '/yaw_link/yaw_joint').GetLocalRot1Attr().Set(Gf.Quatf(quat.GetQuat()))
else:
UsdPhysics.Joint.Get(stage, name + '/base_link/x_joint').GetLocalPos0Attr().Set(Gf.Vec3f(x, 0, 0))
UsdPhysics.Joint.Get(stage, name + '/x_link/y_joint').GetLocalPos0Attr().Set(Gf.Vec3f(0, y, 0))
UsdPhysics.Joint.Get(stage, name + '/y_link/z_joint').GetLocalPos0Attr().Set(Gf.Vec3f(0, 0, z))
stage.GetPrimAtPath(name + '/y_link/z_joint').GetAttribute('physics:lowerLimit').Set(-z + lower_zlim)
stage.GetPrimAtPath(name + '/y_link/z_joint').GetAttribute('physics:upperLimit').Set(upper_zlim - z)
roll = np.rad2deg(roll)
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), roll)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), 0)
)
UsdPhysics.RevoluteJoint.Get(stage, name + '/z_link/roll_joint').GetLocalRot0Attr().Set(Gf.Quatf(quat.GetQuat()))
pitch = np.rad2deg(pitch)
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), pitch)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), 90)
)
UsdPhysics.RevoluteJoint.Get(stage, name + '/roll_link/pitch_joint').GetLocalRot0Attr().Set(
Gf.Quatf(quat.GetQuat()))
yaw = np.rad2deg(yaw)
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), 0)
* Gf.Rotation(Gf.Vec3d.YAxis(), 0)
* Gf.Rotation(Gf.Vec3d.ZAxis(), yaw)
)
UsdPhysics.RevoluteJoint.Get(stage, name + '/pitch_link/yaw_joint').GetLocalRot0Attr().Set(Gf.Quatf(quat.GetQuat()))
def add_robot_traj(path: str, config, meters_per_unit, time_codes_per_second):
"""
It adds a translation and rotation animation to the given path,
using the given configuration, meters per unit, and time codes per second
:param path: The path to the USD stage
:type path: str
:param config: The configuration file that contains the robot trajectory
:param meters_per_unit: The scale of the scene
:param time_codes_per_second: This is the number of time codes per second. This is the same as the frame rate of the
animation
"""
clear_properties(path)
for entry in config["robot_traj"].get():
add_translate_anim(path, Gf.Vec3d(entry["pose"]["x"] / meters_per_unit, entry["pose"]["y"] / meters_per_unit,
entry["pose"]["z"] / meters_per_unit),
entry["time"] * time_codes_per_second)
add_rotation_anim(path, Gf.Vec3d(entry["pose"]["roll"], entry["pose"]["pitch"], entry["pose"]["yaw"]),
entry["time"] * time_codes_per_second, use_double=True)
def diff_angle(alpha, beta):
dist = (alpha - beta + np.pi + 2 * np.pi) % (2 * np.pi) - np.pi
return dist
# assume position control
def check_pose_and_goals(init_loc, init_angle, c_pose, c_angle, path, goal_list, meters_per_unit, first):
"""
It sets the target position of the joints to the next goal in the list
:param init_loc: the initial location of the robot
:param init_angle: the initial orientation of the robot
:param c_pose: current pose of the robot
:param c_angle: current angle of the robot
:param path: the path to the robot in the simulation
:param goal_list: a list of goals, each goal is a list of 6 elements: x, y, z, roll, pitch, yaw
:param meters_per_unit: This is the scale of the robot
:param first: whether this is the first time the function is called
:return: The goal list is being returned.
"""
dist_roll = abs(diff_angle(np.deg2rad(goal_list[0][3]), diff_angle(c_angle[0], init_angle[0])))
dist_pitch = abs(diff_angle(np.deg2rad(goal_list[0][4]), diff_angle(c_angle[1], init_angle[1])))
dist_yaw = abs(diff_angle(np.deg2rad(goal_list[0][5]), diff_angle(c_angle[2], init_angle[2])))
sum_dist = dist_roll + dist_pitch + dist_yaw
if not first and \
(np.linalg.norm(np.array([goal_list[0][0], goal_list[0][1], goal_list[0][2]]) - np.array(c_pose) + np.array(
init_loc[0:3])) > 0.8 \
or sum_dist > 0.6):
return goal_list
if not first:
goal_list.pop(0)
if len(goal_list) == 0:
return []
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/base_link/x_joint.drive:linear:physics:stiffness'),
value=1200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/base_link/x_joint.drive:linear:physics:damping'),
value=1000.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/base_link/x_joint.drive:linear:physics:maxForce'),
value=500.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/base_link/x_joint.physxJoint:maxJointVelocity'),
value=200.0, # cm/s
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/base_link/x_joint.drive:linear:physics:targetPosition'),
value=(goal_list[0][0]) / meters_per_unit,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/x_link/y_joint.drive:linear:physics:stiffness'),
value=1200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/x_link/y_joint.drive:linear:physics:damping'),
value=1000.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/x_link/y_joint.drive:linear:physics:maxForce'),
value=500.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/x_link/y_joint.physxJoint:maxJointVelocity'),
value=200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/x_link/y_joint.drive:linear:physics:targetPosition'),
value=(goal_list[0][1]) / meters_per_unit,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/y_link/z_joint.drive:linear:physics:stiffness'),
value=1200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/y_link/z_joint.drive:linear:physics:damping'),
value=1000.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/y_link/z_joint.drive:linear:physics:maxForce'),
value=500.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/y_link/z_joint.physxJoint:maxJointVelocity'),
value=200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/y_link/z_joint.drive:linear:physics:targetPosition'),
value=(goal_list[0][2]) / meters_per_unit,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/z_link/roll_joint.drive:angular:physics:stiffness'),
value=1200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/z_link/roll_joint.drive:angular:physics:damping'),
value=1000.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/z_link/roll_joint.drive:angular:physics:maxForce'),
value=300.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/z_link/roll_joint.physxJoint:maxJointVelocity'),
value=0.2,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/z_link/roll_joint.drive:angular:physics:targetPosition'),
value=(goal_list[0][3]),
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/roll_link/pitch_joint.drive:angular:physics:stiffness'),
value=1200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/roll_link/pitch_joint.drive:angular:physics:damping'),
value=1000.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/roll_link/pitch_joint.drive:angular:physics:maxForce'),
value=300.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/roll_link/pitch_joint.physxJoint:maxJointVelocity'),
value=0.2,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/roll_link/pitch_joint.drive:angular:physics:targetPosition'),
value=(goal_list[0][4]),
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/pitch_link/yaw_joint.drive:angular:physics:stiffness'),
value=1200.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/pitch_link/yaw_joint.drive:angular:physics:damping'),
value=1000.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/pitch_link/yaw_joint.drive:angular:physics:maxForce'),
value=300.0,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/pitch_link/yaw_joint.physxJoint:maxJointVelocity'),
value=1.3,
prev=0.0)
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{path}/pitch_link/yaw_joint.drive:angular:physics:targetPosition'),
value=(goal_list[0][5]),
prev=0.0)
return goal_list
def add_irotate_ros_components(camera_odom_frames, camera_odom_pubs, lidar_components, robot_base_prim_path, n):
"""
Add the irotate-specific ros-components to the robot.
"""
camera_odom_frames.append(f"{robot_base_prim_path}{n}/cameraholder_link")
camera_odom_pubs.append(rospy.Publisher(f"{robot_base_prim_path}{n}/camera_odom", Odometry, queue_size=10))
lidar_components.append(add_lidar(f"{robot_base_prim_path}{n}/lasersensor_link"), is_2d = True, is_3d=False)
def add_lidar(path, translation=[0, 0, 0], orientation=[0, 0, 0], is_2d=True, is_3d=False, degrees=True):
# drive sim applies 0.5,-0.5,-0.5,w(-0.5), we have to apply the reverse
base_or = tf.Rotation.from_quat([0.5, -0.5, -0.5, -0.5])
orientation = tf.Rotation.from_euler('XYZ', orientation, degrees=degrees)
orientation = (base_or * orientation).as_quat()
success, sensor = omni.kit.commands.execute(
"IsaacSensorCreateRtxLidar",
path="/RTX_Lidar",
parent=path,
config="Example_Rotary",
translation=(translation[0], translation[1], translation[2]),
orientation=Gf.Quatd(orientation[3], orientation[0], orientation[1], orientation[2]), # Gf.Quatd is w,i,j,k
)
omni.kit.app.get_app().update()
omni.kit.app.get_app().update()
omni.kit.app.get_app().update()
render_product_path = rep.create.render_product(sensor.GetPath().pathString, resolution=(1, 1))
# _, render_product_path = create_hydra_texture([1, 1], sensor.GetPath().pathString)
omni.kit.app.get_app().update()
omni.kit.app.get_app().update()
# add the lidar to the graph
# config is isaac_sim-2022.2.1/exts/omni.sensors.nv.lidar/data/Example_Rotary.json
if is_3d:
writer = rep.writers.get("RtxLidar" + "ROS1PublishPointCloud")
writer.initialize(topicName=f"{path}/lidar/point_cloud", frameId=path[1:])
writer.attach([render_product_path])
if is_2d:
writer = rep.writers.get("RtxLidar" + "ROS1PublishLaserScan")
writer.initialize(topicName=f"{path}/lidar/laser_scan", frameId=path[1:], rotationRate=100,
horizontalFov=360, depthRange=[0.1,10000], horizontalResolution=0.1)
writer.attach([render_product_path])
# todo for lidar one can change directly /Render/PostProcess/SDGPipeline/RenderProduct_Isaac_RtxSensorCpuIsaacComputeRTXLidarFlatScan
# but NOT for the 3d lidar
# todo theoretically I can avoid returning anything making just sure that I render at each loop
return omni.syntheticdata.SyntheticData._get_node_path(
"PostProcessDispatch" + "IsaacSimulationGate", render_product_path
)
def add_npy_viewport(viewport_window_list, robot_base_prim_path, n, old_h_ape, old_v_ape, config, sc,
tot_num_ros_cam=1):
viewport_npy, _ = create_viewport(f"{robot_base_prim_path}{n}/camera_link/Camera_npy", config["headless"].get(),
tot_num_ros_cam + 1 * n, config["npy_sensor_size"].get(), old_h_ape, old_v_ape, sc)
viewport_window_list.append(viewport_npy)
def change_joint_limit(joint: str, limit):
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(f'{joint}'),
value=(limit),
prev=0.0)
|
eliabntt/GRADE-RR/simulator/utils/environment_utils.py | """
Use this class to load the environment and the relative information.
The init function should be used to load the environment.
It will get the environment from a given folder and create the necessary support variables.
"""
from omni.isaac.occupancy_map import _occupancy_map
from omni.isaac.occupancy_map.scripts.utils import update_location, compute_coordinates, generate_image
import utils.misc_utils
from utils.misc_utils import *
class environment:
def __init__(self, config, rng = np.random.default_rng(), local_file_prefix = '', meters_per_unit=0.01):
self.get_environment(config, rng, local_file_prefix)
self.meters_per_unit = meters_per_unit
def set_meters_per_unit(self, meters_per_unit):
self.meters_per_unit = meters_per_unit
def get_environment(self, config, rng: np.random.default_rng, local_file_prefix: str):
"""
If the name is not specified the environment will be taken at random using the rng.
Based on the config one can decide if
1. loading the stl of the environment
2. loading the environment limits with the npy file [note that this is preferable, otherwise default values will be used]
3. Using the limits the system will compute the necessary translations to center the environment in 0,0,0
config: the configuration processed by the main algorithm
rng: global rng
local_file_prefix: necessary to access the local storage from isaacsim
"""
self.env_usd_export_folder = config["env_path"].get()
if config["fix_env"].get() != "":
self.env_name = config["fix_env"].get()
else:
self.env_name = rng.choice([f for f in os.listdir(self.env_usd_export_folder) if not f.startswith('.')])
self.env_path = local_file_prefix + os.path.join(self.env_usd_export_folder, self.env_name, self.env_name + ".usd")
if config["use_stl"].get():
self.env_stl_path = os.path.join(self.env_usd_export_folder, self.env_name, self.env_name + ".stl")
self.env_mesh = trimesh.load(os.path.join(self.env_usd_export_folder, self.env_name, self.env_name + ".stl"))
else:
self.env_stl_path = None
self.env_mesh = None
if config["use_npy"].get():
self.env_info = np.load(os.path.join(self.env_usd_export_folder, self.env_name, self.env_name + ".npy"),
allow_pickle=True)
self.env_info = self.env_info.tolist()
else:
self.env_info = [0, 0, 0, 0, 0, 0, np.array([[-1000, -1000], [-1000, 1000], [1000, 1000], [1000, -1000]])]
self.env_limits = self.env_info[0:6]
self.shifts = [(self.env_limits[0] + self.env_limits[3]) / 2, (self.env_limits[1] + self.env_limits[4]) / 2,
self.env_limits[2]]
self.env_limits_shifted = [self.env_limits[i] - self.shifts[i % 3] for i, _ in enumerate(self.env_limits)]
self.area_polygon = get_area(self.env_info[6])
self.env_polygon = [Point(i[0], i[1], 0) for i in self.env_info[-1]]
def generate_map(self, out_path: str, zlim=[0, 1], cell_size = 0.05, origin=[0, 0, 0]):
"""
WARNING: HACK! ALL UNKNWON ARE WHITE!
Generates a map for the environment and save it to the out_path location in the disk.
First it searches for a non colliding location.
Then it creates a map of the environment.
We ovverride the unknown color to be "white" (i.e. free) as the system map unknown unreachable areas.
out_path: the folder where to save the map
z_limit: height to consider for projection
cell_size: size of a single cell in the map (cm)
origin: computed origin. Must be a free cell
"""
bound = int(
max(abs(self.env_limits_shifted[0]) + abs(self.env_limits_shifted[3]),
abs(self.env_limits_shifted[1]) + abs(self.env_limits_shifted[4])) / self.meters_per_unit * 1.5)
_om = _occupancy_map.acquire_occupancy_map_interface()
lower_bound = [-bound, -bound, zlim[0]/ self.meters_per_unit]
lower_bound = np.array(lower_bound) - np.array(origin) / self.meters_per_unit
upper_bound = [bound, bound, zlim[1]/ self.meters_per_unit *.8]
upper_bound = np.array(upper_bound) - np.array(origin) / self.meters_per_unit
center = np.array(origin) / self.meters_per_unit
center[2] += 0.1 / self.meters_per_unit # 10 cm above the floor
update_location(_om, center, lower_bound, upper_bound)
_om.set_cell_size(cell_size/self.meters_per_unit)
_om.generate()
image_buffer = generate_image(_om, [0, 0, 0, 255], [255, 255, 255, 255], [255, 255, 255, 255])
dims = _om.get_dimensions()
_im = Image.frombytes("RGBA", (dims.x, dims.y), bytes(image_buffer))
image_width = _im.width
image_height = _im.height
size = [0, 0, 0]
size[0] = image_width * cell_size
size[1] = image_height * cell_size
scale_to_meters = 1.0 / self.meters_per_unit
default_image_name = os.path.join(out_path, "map.png")
top_left, top_right, bottom_left, bottom_right, image_coords = compute_coordinates(_om, cell_size)
ros_yaml_file_text = "image: " + default_image_name
ros_yaml_file_text += f"\nresolution: {float(cell_size / scale_to_meters)}"
ros_yaml_file_text += (
f"\norigin: [{float(bottom_left[0] / scale_to_meters)}, {float(bottom_left[1] / scale_to_meters)}, 0.0000]"
)
ros_yaml_file_text += "\nnegate: 0"
ros_yaml_file_text += f"\noccupied_thresh: {0.65}"
ros_yaml_file_text += "\nfree_thresh: 0.196"
_im.save(default_image_name)
with open(default_image_name[:-3] + "yaml", 'w') as f:
f.write(ros_yaml_file_text)
center = lower_bound
center[2] = -100000000.0
update_location(_om, center, [0, 0, 0], [0, 0, 0])
_om.generate()
# disable_extension('omni.isaac.occupancy_map')
def load_and_center(self, prim_path: str = "/World/home", correct_paths_req: bool = False, push_in_floor: bool = False):
"""
Load the environment from the usd path env_path
Center it wrt the world coordinate frames
The environment is loaded at prim_path
prim_path: path that the environment should have in the prim tree
correct_paths_req: if True, corrects the paths of the assets in the environment
push_in_floor: if True, pushes the environment in the floor a bit. Useful for thin meshes that sometimes are not correctly visualized (flickering)
"""
stage = omni.usd.get_context().get_stage()
print("loading environment {}".format(self.env_name))
# from omni.isaac.core.utils.nucleus import find_nucleus_server
# result, nucleus_server = find_nucleus_server()
res, _ = omni.kit.commands.execute('CreateReferenceCommand',
usd_context=omni.usd.get_context(),
path_to=prim_path,
asset_path=self.env_path,
# asset_path= nucleus_server + "/Isaac/Environments/Simple_Warehouse/warehouse.usd",
instanceable=True)
if res:
clear_properties(prim_path)
if correct_paths_req:
print("Correcting paths... --- note that you might want to change utils/misc_utils.py:correct_paths")
try:
correct_paths(prim_path)
except:
print("Failed to correct paths for {}".format(prim_path))
time.sleep(10)
else:
print("Not correcting paths --- check that all textures are visibile and the reflection maps are correct")
# center the home in the middle of the environment
set_translate(stage.GetPrimAtPath(prim_path), list(- np.array(self.shifts) / self.meters_per_unit))
for child in stage.GetPrimAtPath(prim_path).GetAllChildren():
if "xform" == child.GetTypeName().lower():
clear_properties(str(child.GetPath()))
if push_in_floor and "floor" not in str(child.GetPath()).lower():
myold = child.GetProperty('xformOp:translate').Get()
myold = [myold[0], myold[1], myold[2] - 0.04]
set_translate(child, list(np.array(myold)))
return prim_path
else:
raise Exception("Failed to load environment {}".format(self.env_name))
|
eliabntt/GRADE-RR/simulator/utils/UTILS.md | ## Environment utils
Used to manage the environment.
With these functions you can load and center the environment, create a 2D occupancy map (only if the collisions are turned on), and center the environment.
This is where you want to act if you want to remove the centering of the environment, create a different kind of occupancy, or do something specific while loading.
Nothing super-fancy here.
## Human utils
Human management functions.
Can load the human, correct the paths of the assets if necessary, move it to the ground. Just showcasing some functions.
## Misc utils
Used as a collage library.
There are tools to add semantic information, change the path of the textures, add colliders (or unset them), randomize lights and roughness of the materials, add translate and rotate animations, the service to position objects (that works with ROS), tools to rotate/translate objects, teleport the prim.
This is the main file you want to edit for example if you want to change the position strategy. Our position strategy uses FCL library from MoveIt and check collisios between two STL meshes. The system is done in a way in which it caches the environment and the robot stls at the beginning. We have different placement strategies for different objects (eg. humans, robot, and objects have different rules).
## Objects utils
Used to load the objects in the simulation. It will automatically convert the objects to the USD format to cache it. The objects are converted in local directories located in the GSO/shapenet folders. Semantic and collisions can be added to objects using thee utilities. Everything can be expanded easily by adding new object types.
The `shapenet` and `google_scanned_objects` folders are set up at runtime for example through the `os.environ["SHAPENET_LOCAL_DIR"]`.
## Robot utils
Various ways to create messages, add sensors, differentiate between poses (`create_diff_odom_message`), create viewports and publish stuff. Moreover, you want to use this to load your robot, set its initial joint locations and manage the trajectory. In general each component is loaded with auto publishing false and need to be automatically ticked or published. Some things like the odometry do not have a specific sensor but you can publish all the data that you want.
Edit if you need new sensors, publish different data, or remove sensors. You would also like to clean the code or add noise to the data directly here.
## Simulation utils
Mainly used for configuration settings (enalbe/disable extensions, change raytracing/pathtracing options), check that nucleus is powered up and ros is working, and manage the timeline.
|
eliabntt/GRADE-RR/simulator/utils/zebra_utils.py | import utils.misc_utils
from omni.kit.sequencer.usd import SequenceSchema, usd_sequencer
from utils.misc_utils import *
def load_zebra(zebra_base_prim_path, n, asset_path):
stage = omni.usd.get_context().get_stage()
res, _ = omni.kit.commands.execute("CreateReferenceCommand",
usd_context=omni.usd.get_context(),
path_to=f"{zebra_base_prim_path}{n}",
asset_path=asset_path,
instanceable=False)
clear_properties(f"{zebra_base_prim_path}{n}")
return f"{zebra_base_prim_path}{n}"
def place_zebras(frame_info, rng, floor_points, meters_per_unit, hidden_position, config, max_anim_len, zebra_info):
stage = omni.usd.get_context().get_stage()
# create bool array as big as floor_points
occupied = np.zeros((floor_points.shape[0]-2, floor_points.shape[1]-2), dtype=bool)
deleted_zebras = []
out_frame_info = {}
min_number_zebras = config["min_number_zebras"].get()
max_number_zebras = config["max_number_zebras"].get()
selected_zebras = rng.choice(list(frame_info.keys()), size=int(rng.uniform(min_number_zebras, max_number_zebras)),
replace=False)
for zebra in selected_zebras:
out_frame_info[zebra] = frame_info[zebra].copy()
out_frame_info[zebra] = randomize_frame(out_frame_info[zebra], rng, max_anim_len, zebra_info)
# process the box and extract xmin xmax ymin ymax
box = np.array(out_frame_info[zebra]["box"])
xmin = np.min(box[:, 0])
xmax = np.max(box[:, 0])
ymin = np.min(box[:, 1])
ymax = np.max(box[:, 1])
# box is the 2D box
box = np.array([[xmin, ymin], [xmax, ymin], [xmax, ymax], [xmin, ymax]])
# random yaw rotation of the box
yaw = rng.uniform(0, 2 * np.pi)
# create a rotation matrix
rot = np.array([[np.cos(yaw), -np.sin(yaw)], [np.sin(yaw), np.cos(yaw)]])
# rotate the box
box = np.matmul(box, rot)
positioned = False
newbox = []
# get intermediate int points
for i in range(4):
p1 = np.round(box[i]).astype(int)
p2 = np.round(box[(i + 1) % 4]).astype(int)
# compute all int numbers between p1 and p2
dx = p2[0] - p1[0]
dy = p2[1] - p1[1]
if dx == 0:
x = p1[0]
y = np.arange(min(p1[1], p2[1]), max(p1[1], p2[1]) + 1 if max(p1[1], p2[1]) >= 0 else -1)
for j in range(len(y)):
newbox.append([x, y[j]])
elif dy == 0:
x = np.arange(min(p1[0], p2[0]), max(p1[0], p2[0]) + 1 if max(p1[0], p2[0]) >= 0 else -1)
y = p1[1]
for j in range(len(x)):
newbox.append([x[j], y])
elif dx == 0 and dy == 0:
newbox.append([p1[0], p1[1]])
else:
x = np.arange(min(p1[0], p2[0]), max(p1[0], p2[0]) + 1 if max(p1[0], p2[0]) >= 0 else -1)
y = p1[1] + (x - p1[0]) * dy / dx
for j in range(len(x)):
newbox.append([x[j], y[j]])
newbox = np.unique(np.array(newbox).astype(int), axis=0).astype(int)
for _ in range(100):
# get a random location in occupied -- this will be my center
center = np.array([rng.integers(0, occupied.shape[1]), rng.integers(0, occupied.shape[0])])
# check if all the cells covered by the box in occupied are free -- not only the boundaries
collision = False
for x_coor, y_coor in newbox:
try:
if occupied[center[0] - y_coor, center[1] + x_coor]:
collision = True
break
except IndexError:
collision = True
break
if collision:
break
if not collision:
tmp_floor_points = []
newcenter = np.array([center[0] + 1, center[1] + 1])
# if there is no collision, set the cells covered by the box to occupied
for x_coor, y_coor in newbox:
occupied[center[0] - y_coor, center[1] + x_coor] = True
# get the corresponding floor point given the center and x_coor and col
# NOTE THAT Y IS OPPOSITE SIGN
tmp_floor_points.append(floor_points[newcenter[0] - y_coor, newcenter[1] + x_coor])
# set the position of the zebra to the center
loc = np.mean(tmp_floor_points, axis=0) / meters_per_unit
loc = np.array(floor_points[newcenter[0], newcenter[1]]) / meters_per_unit
set_translate(stage.GetPrimAtPath(zebra), list(loc))
# set the rotation of the zebra to the roll, pitch, yaw
# lower_point = np.min(tmp_floor_points, axis=0)
# upper_point = np.max(tmp_floor_points, axis=0)
# vector = np.array(upper_point) - np.array(lower_point)
# compute roll pitch and yaw of vector
# roll, pitch, yaw = Rotation.from_rotvec(vector).as_euler("XYZ")
# transform = Rotation.from_matrix(
# trimesh.PointCloud(tmp_floor_points).bounding_box_oriented.transform[:3, :3]).as_euler("XYZ")
out_frame_info[zebra]["position"] = loc * meters_per_unit
out_frame_info[zebra]["rotation"] = [0, 0, yaw]
out_frame_info[zebra]["center"] = newcenter
out_frame_info[zebra]["box"] = box
set_rotate(stage.GetPrimAtPath(zebra), [0, 0, yaw]) # todo refine this to account for terrain
positioned = True
break
if not positioned:
print("Could not position zebra", zebra)
# delete the zebra
deleted_zebras.append(zebra)
set_translate(stage.GetPrimAtPath(zebra), list(hidden_position))
for zebra in deleted_zebras:
del out_frame_info[zebra]
return out_frame_info
def randomize_frame(zebra, rng, max_anim_len, zebra_info):
stage = omni.usd.get_context().get_stage()
zebra_path = zebra["path"]
scale = rng.integers(40, 100)
set_scale(stage.GetPrimAtPath(zebra_path), scale)
zebra_name = zebra["name"]
prim = stage.GetPrimAtPath(f"/World/Sequence{zebra_path}{zebra_path}_Clip")
anim_len = zebra_info[zebra_name]["length"]
timeslot = max_anim_len - rng.integers(0, anim_len)
prim.GetAttribute("startTime").Set(Sdf.TimeCode(timeslot * 1.0))
prim.GetAttribute("endTime").Set(
Sdf.TimeCode(float(max(timeslot + zebra_info[zebra_name]["length"], max_anim_len))))
points_in_mesh = zebra_info[zebra_name]["points"][max_anim_len - timeslot] * scale / 100
zebra = {"name": zebra_name, "time": timeslot, "used_frame": max_anim_len - timeslot + 1,
"scale": scale, "box": trimesh.PointCloud(points_in_mesh).bounding_box.vertices,
"path": zebra_path}
return zebra
def preload_all_zebras(config, rng, zebra_files, zebra_info, simulation_context, sequencer_drop_controller, max_anim_len,
hidden_position):
stage = omni.usd.get_context().get_stage()
# load a random number of zebras between min_number_zebra and max_number_zebra
num_zebras = config["max_number_zebras"].get()
frame_info = {}
for n in range(num_zebras):
# load a random zebra
zebra_file = rng.choice(zebra_files)
# load the zebra
zebra_path = load_zebra("/zebra_", n, zebra_file)
scale = rng.integers(40, 100)
set_scale(stage.GetPrimAtPath(zebra_path), scale)
zebra_name = zebra_file.split("/")[-1].split(".")[0]
add_semantics(stage.GetPrimAtPath(zebra_path), "zebra")
timeslot = max_anim_len - rng.integers(0, zebra_info[zebra_name]["length"])
sequencer_drop_controller.sequencer_drop(stage.GetPrimAtPath("/World/Sequence"), zebra_path, float(timeslot))
prim = stage.GetPrimAtPath(f"/World/Sequence{zebra_path}{zebra_path}_Clip")
prim.GetAttribute("startTime").Set(Sdf.TimeCode(timeslot * 1.0))
prim.GetAttribute("endTime").Set(
Sdf.TimeCode(float(max(timeslot + zebra_info[zebra_name]["length"], max_anim_len))))
points_in_mesh = zebra_info[zebra_name]["points"][max_anim_len - timeslot] * scale / 100
frame_info[zebra_path] = {"name": zebra_name, "time": timeslot, "used_frame": max_anim_len - timeslot + 1,
"scale": scale, "box": trimesh.PointCloud(points_in_mesh).bounding_box.vertices,
"path": zebra_path}
simulation_context.step(render=False)
simulation_context.render()
set_translate(stage.GetPrimAtPath(zebra_path), hidden_position)
return frame_info
|
eliabntt/GRADE-RR/simulator/utils/misc_utils.py | import asyncio
import carb
import ipdb
import json
import ntpath
import numpy as np
import os
import pickle as pkl
from PIL import Image
from pyquaternion import Quaternion
import scipy.spatial.transform as tf
from stl import mesh
import time
import trimesh
from typing import Dict, Optional, Union
# ros
import rospy, rosgraph
from geometry_msgs.msg import PoseStamped, Point
from nav_msgs.msg import Odometry
from sensor_msgs.msg import Imu
from std_msgs.msg import String
# omni
import omni.isaac.shapenet as shapenet
import omni.kit
from omni.isaac import RangeSensorSchema
from omni.isaac.core import SimulationContext, PhysicsContext
import omni.replicator.core as rep
from omni.isaac.core.prims import XFormPrim
from omni.isaac.core.utils.carb import set_carb_setting
from omni.isaac.core.utils.extensions import enable_extension, disable_extension
from omni.isaac.core.utils.stage import is_stage_loading, set_stage_up_axis
from omni.isaac.dynamic_control import _dynamic_control
import omni.isaac.IsaacSensorSchema as IsaacSensorSchema
from omni.isaac.synthetic_recorder import extension_custom
from omni.physxcommands import SetStaticColliderCommand, RemoveStaticColliderCommand
from pxr import UsdGeom, Gf, Usd, UsdSkel, AnimationSchema, Semantics, UsdPhysics, Sdf, UsdShade
from pxr.Usd import Prim
# 2022 edits
import omni.graph.core as og
from omni.isaac.core_nodes.scripts.utils import set_target_prims
def add_semantics(prim: Prim, semantic_label: str):
"""
Adds semantic labels to the prim.
prim: the prim to add the semantic label to
semantic_label: the semantic label to add
"""
if not prim.HasAPI(Semantics.SemanticsAPI):
sem = Semantics.SemanticsAPI.Apply(prim, "Semantics")
sem.CreateSemanticTypeAttr()
sem.CreateSemanticDataAttr()
else:
sem = Semantics.SemanticsAPI.Get(prim, "Semantics")
sem.GetSemanticTypeAttr().Set("class")
sem.GetSemanticDataAttr().Set(str(semantic_label))
def correct_paths(parent_name: str):
"""
Helper function to correct the paths of the world's materials (as they come from Windows).
parent_name: the prim path of the father.
"""
stage = omni.usd.get_context().get_stage()
for prim in stage.Traverse():
shader_path = prim.GetPath()
if parent_name.lower() in str(shader_path).lower():
if prim.GetTypeName().lower() == "mesh":
prim.GetProperty('doubleSided').Set(False)
if prim.GetTypeName().lower() == "shader":
try:
change_shader_path(shader_path)
except:
print(f"Error changing shader of in {shader_path}")
time.sleep(5)
def change_shader_path(shader_path: str):
"""
Changes the shader path of the material.
material_path: the prim path to the material collection (e.g. "/World/my_robot_0/materials, /World/home/materials")
"""
stage = omni.usd.get_context().get_stage()
shader = stage.GetPrimAtPath(shader_path)
if 'inputs:diffuse_texture' in shader.GetPropertyNames():
old_path = str(shader.GetAttribute('inputs:diffuse_texture').Get().resolvedPath)
new_path = old_path.replace("@", "")
# print(f"Changing path {old_path}")
if "something" in old_path or "P:" in old_path:
new_path = old_path.replace(ntpath.sep, os.sep).replace('P:/', '').replace("@", "")
elif "somethingelse" in old_path.lower():
splitted = old_path.split(ntpath.sep)
tmp_path = ""
for i in splitted:
tmp_path += i + ntpath.sep
if "something" in i:
break
tmp_path = tmp_path.replace(ntpath.sep, os.sep)
new_path = old_path.replace(ntpath.sep, os.sep).replace(tmp_path, '').replace(
"@", "")
shader.GetAttribute('inputs:diffuse_texture').Set(new_path)
if 'inputs:reflectionroughness_texture' in shader.GetPropertyNames():
old_path = str(shader.GetAttribute('inputs:reflectionroughness_texture').Get().resolvedPath)
new_path = old_path.replace("@", "")
# print(f"Changing path {old_path}")
if "something" in old_path or "P:" in old_path:
new_path = old_path.replace(ntpath.sep, os.sep).replace('P:/', '').replace("@", "")
elif "somethingelse" in old_path.lower():
splitted = old_path.split(ntpath.sep)
tmp_path = ""
for i in splitted:
tmp_path += i + ntpath.sep
if "something" in i:
break
tmp_path = tmp_path.replace(ntpath.sep, os.sep)
new_path = old_path.replace(ntpath.sep, os.sep).replace(tmp_path, '').replace(
"@", "")
shader.GetAttribute('inputs:reflectionroughness_texture').Set(new_path)
def set_colliders(path_main_asset: str, value: bool):
"""
It takes a path to a main asset, and a boolean value, and sets the physics:collisionEnabled attribute to the boolean
value for all children of the main asset. This effectively enable or disable collisions.
:param path_main_asset: The path to the main asset in the USD file
:type path_main_asset: str
:param value: bool
:type value: bool
"""
stage = omni.usd.get_context().get_stage()
for j in stage.GetPrimAtPath(path_main_asset).GetAllChildren():
for i in j.GetAllChildren():
if "physics:collisionEnabled" in i.GetPropertyNames():
if i.GetProperty("physics:collisionEnabled").Get() == value:
continue
i.GetProperty("physics:collisionEnabled").Set(value)
def add_colliders(path_main_asset: str):
"""
Adds the colliders to the main asset. This allows the object to have collisions or not (if supported).
Return True if the colliders were added, False otherwise.
path_main_asset: the path of the prim asset whose childs need to be processed
"""
stage = omni.usd.get_context().get_stage()
fres = True
for prim in stage.Traverse():
prim_path = prim.GetPath()
if path_main_asset.lower() in str(prim_path).lower():
if prim.GetTypeName().lower() == "mesh" or prim.GetTypeName().lower() == "xform":
res, _ = SetStaticColliderCommand.execute(str(prim.GetPath()))
fres = res and fres
return fres
def process_semantics(parent_name: str, name_to_label: str = None):
"""
Processes the semantics of the world.
In case the name_to_label is specified (not coming from Front3D), it will be set to the name_to_label param.
parent_name: the prim path of the father.
label: the eventual label to give to the set of assets
"""
for prim in omni.usd.get_context().get_stage().Traverse():
primpath = prim.GetPath()
if parent_name.lower() in str(primpath).lower():
if prim.GetTypeName().lower() == "mesh" or prim.GetTypeName().lower() == "xform":
if name_to_label == None:
# tmp = prim.GetAttribute('userProperties:category_id')
tmp = prim.GetAttribute('userProperties:semantic')
if tmp.Get() != None:
add_semantics(prim, str(tmp.Get()))
else:
add_semantics(prim, name_to_label)
def randomize_and_fix_lights(config: dict, rng: np.random.default_rng, parent_name: str, z_lim, meters_per_unit,
is_rtx: bool = False):
"""
Randomize the lights within an environment
config: the configuration dict with the parameters and enabled/disabled config for intensity/color
rng: global rng
parent_name: parent whose childs need to be considered to change the lights
"""
stage = omni.usd.get_context().get_stage()
if not (config["intensity"] or config["color"]):
return
min_int = config.get("intensity_interval", 0.0)[0]
max_int = config.get("intensity_interval", 1.0)[1]
for prim in stage.Traverse():
path = prim.GetPath()
if parent_name.lower() in str(path).lower():
if "light" in prim.GetTypeName().lower():
if "environment" in str(path).lower():
continue
if config["intensity"]:
prim.GetAttribute('intensity').Set(rng.uniform(low=min_int, high=max_int))
if config["color"]:
col = rng.random(size=3)
prim.GetAttribute('color').Set(Gf.Vec3f(col[0], col[1], col[2]))
if not is_rtx:
prim.GetAttribute('diffuse').Set(4)
prim.GetAttribute('specular').Set(4)
# FIXME no actual check I'm not moving other stuff. but this should work based on the "existance" of segmentation info and that lights on its own does not have a translation attribute
z_lamp = omni.usd.get_world_transform_matrix(prim)[3, 2] * meters_per_unit
if z_lamp > z_lim - 0.08:
diff = z_lamp - z_lim - 0.08
while not prim.HasAttribute('xformOp:translate'):
prim = prim.GetParent()
# while (not "semantic:Semantics:params:semanticData" in parent.GetPropertyNames()):
# parent = parent.GetParent()
p_lamp = prim.GetAttribute('xformOp:translate').Get()
p_lamp[2] -= diff
prim.GetAttribute('xformOp:translate').Set(p_lamp)
# move the light if it is too high
def randomize_roughness(config: dict, rng: np.random.default_rng, parent_name: str):
"""
Randomize the roughness (reflectivity) of assets within an environment
config: the configuration dict with the parameters and enabled/disabled config for intensity/color
rng: global rng
parent_name: parent whose childs need to be considered to change the lights
"""
stage = omni.usd.get_context().get_stage()
if not (config["enabled"]):
return
min_int = config.get("intensity_interval", 0.0)[0]
max_int = config.get("intensity_interval", 1.0)[1]
for prim in stage.Traverse():
path = prim.GetPath()
if parent_name.lower() in str(path).lower():
if prim.GetTypeName().lower() == "material" or prim.GetTypeName().lower() == "shader":
if "inputs:RoughnessMin" in prim.GetPropertyNames():
val = rng.uniform(low=min_int, high=max_int)
prim.GetAttribute('inputs:RoughnessMin').Set(val)
prim.GetAttribute('inputs:RoughnessMax').Set(val)
def get_area(polygon):
"""
Computes the area of a polygon.
"""
x = polygon[:, 0]
y = polygon[:, 1]
return .5 * np.absolute(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))
def change_prim_collision(enable, prim_path):
for j in omni.usd.get_context().get_stage().Traverse():
if str(j.GetPath()).startswith(prim_path):
if 'physics:collisionEnabled' in j.GetPropertyNames():
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(str(j.GetPath())+'.physics:collisionEnabled'),
value=enable,
prev=None)
def change_collision_at_path(enable, paths=['/my_robot_0/camera_link/Cube.physics:collisionEnabled','/my_robot_0/yaw_link/visuals.physics:collisionEnabled']):
"""
It enables or disables collisions for the paths
:param enable: True or False
"""
for path in paths:
omni.kit.commands.execute('ChangeProperty',
prop_path=Sdf.Path(path),
value=enable,
prev=None)
def add_translate_anim(prim_path: str, pos: Gf.Vec3d, time: float = 0.0):
"""
Add a goal location at a given timecode. The object will EVENTUALLY move there with a smooth movement.
prim_path: the path of the asset to be moved
pos: the final position
time: the time in FRAME
"""
omni.kit.commands.execute('ChangePropertyCommand',
prop_path=prim_path + '.xformOp:translate',
value=pos,
prev=Gf.Vec3d(0, 0, 0),
type_to_create_if_not_exist=UsdGeom.XformOp.TypeTranslate,
timecode=Usd.TimeCode(time))
def add_rotation_anim(prim_path: str, rot: list, time: float = 0.0, use_double=False):
"""
Add a goal rotation at a given timecode. The object will EVENTUALLY move there with a smooth movement.
EXPECT ROT IN RAD!
prim_path: the path of the asset to be moved
rot: the final position
time: the time in FRAME
"""
rot = np.array(rot) * 180 / np.pi
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), rot[0])
* Gf.Rotation(Gf.Vec3d.YAxis(), rot[1])
* Gf.Rotation(Gf.Vec3d.ZAxis(), rot[2])
)
omni.kit.commands.execute('ChangePropertyCommand',
prop_path=prim_path + ".xformOp:orient",
value=Gf.Quatf(quat.GetQuat()) if not use_double else Gf.Quatd(quat.GetQuat()),
prev=Gf.Quatf(0, 0, 0, 1) if not use_double else Gf.Quatd(0, 0, 0, 1),
type_to_create_if_not_exist=UsdGeom.XformOp.TypeOrient,
timecode=Usd.TimeCode(time))
def inf_helper(y: np.array):
"""Helper to handle indices and logical indices of NaNs.
Input:
- y, 1d numpy array with possible NaNs
Output:
- nans, logical indices of NaNs
- index, a function, with signature indices= index(logical_indices),
to convert logical indices of NaNs to 'equivalent' indices
"""
return np.isinf(y), lambda z: z.nonzero()[0]
def position_object(environment, type: int, objects: list = [], ob_stl_paths: list = [], reset: bool = False,
max_collisions: int = 200):
"""
type = 0 -> camera z_lim = [0.8 - 1.8] using camera stl
type = 1 -> humans z_lim = [0 - 0] using human stl
type = 2 -> shapenet z_lim = [0 - 1.8] using camera stl
type = 3 -> origin z_lim = [0 - 0] using camera stl
note: when min == max we apply a small offset to the max to address shifts in the z-axis to allow small collisions.
However, the result will be still published at the wanted height.
envionment: the environment object
type: see above
objects: the list of objects to be placed
ob_stl_paths: the corresponding stls
reset: if the collision checker need to be resetted forcefully
"""
# thih import will work if you compile our https://github.com/eliabntt/moveit_based_collision_checker_and_placement/tree/main
# and you add the source catkin command to isaac_X_X/setup_python_env.sh
from collision_check.srv import *
if environment.env_stl_path == None:
print(
"No stl is being loaded for the environment, please pre-fix all objects locations or implement your own strategy")
environment.env_stl_path = ""
print("Wait for service")
rospy.wait_for_service("/fake/collision_checker/check")
print("Service loaded")
try:
check_collision = rospy.ServiceProxy("/fake/collision_checker/check", collision_check_srv)
req = collision_check_srvRequest()
req.env_stl_path = environment.env_stl_path
req.env_polygon = environment.env_polygon
req.reset = reset
if type == 1:
for ob in objects:
req.ob_names.append(ob)
req.ob_stl_paths = ob_stl_paths
req.is_cam = True if type != 1 else False
min_z = (0.8 + environment.env_limits[2]) if type == 0 else environment.env_limits[2]
max_z = environment.env_limits[2] if (type == 1 or type == 3) else min(1.8 + environment.env_limits[2],
environment.env_limits[5] - 0.5)
if type == 4:
min_z = environment.env_limits[2]
max_z = environment.env_limits[2]
has_forced_z = -1
if min_z == max_z:
max_z += 0.5
has_forced_z = min_z
req.min_limits = [environment.env_limits[0] + 0.5, environment.env_limits[1] + 0.5, min_z]
req.max_limits = [environment.env_limits[3] - 0.5, environment.env_limits[4] - 0.5, max_z]
req.limit_collision = 0 if type != 1 else max_collisions
req.forced_z = has_forced_z
res = check_collision.call(req)
if has_forced_z != -1:
res.z = [min(has_forced_z, z) for z in res.z]
return np.array(res.x) - environment.shifts[0], np.array(res.y) - environment.shifts[1], np.array(res.z) - \
environment.shifts[2], res.yaw
except rospy.ServiceException as e:
print("Service call failed: %s" % e)
return [-1] * len(objects), [-1] * len(objects), [-1] * len(objects), [0] * len(objects)
def set_scale(prim: Prim, scale: float = 1.0):
"""
Set the scale of a Prim
prim: the prim
scale: the scale
"""
prop_names = prim.GetPropertyNames()
if "xformOp:scale" not in prop_names:
xformable = UsdGeom.Xformable(prim)
xform_op_scale = xformable.AddXformOp(UsdGeom.XformOp.TypeScale, UsdGeom.XformOp.PrecisionDouble, "")
else:
xform_op_scale = UsdGeom.XformOp(prim.GetAttribute("xformOp:scale"))
xform_op_scale.Set(Gf.Vec3d([scale, scale, scale]))
def clear_properties(path: str):
"""
The function clears all the POSE properties of the given prim.
This is to ensure a consistent way of setting those properties for different objects.
This should be called with ALL loaded objects so that we have consistent xformOp:trans/Orient
"""
current_position, current_orientation = XFormPrim(path).get_world_pose()
def set_translate(prim: Prim, new_loc: list):
"""
prim: must be prim type, the prim to be moved
new_loc: list [x-y-z] for the single prim
"""
properties = prim.GetPropertyNames()
if "xformOp:translate" in properties:
translate_attr = prim.GetAttribute("xformOp:translate")
translate_attr.Set(Gf.Vec3d(new_loc))
elif "xformOp:transform" in properties:
transform_attr = prim.GetAttribute("xformOp:transform")
matrix = prim.GetAttribute("xformOp:transform").Get()
matrix.SetTranslateOnly(Gf.Vec3d(new_loc))
transform_attr.Set(matrix)
else:
xform = UsdGeom.Xformable(prim)
xform_op = xform.AddXformOp(UsdGeom.XformOp.TypeTranslate, UsdGeom.XformOp.PrecisionDouble, "")
xform_op.Set(Gf.Vec3d(new_loc))
def set_rotate(prim: XFormPrim, rot: list):
"""
expects rot in rad
prim: The prim to be rotated
rot: roll-pitch-yaw in RAD
"""
properties = prim.GetPropertyNames()
rot = np.array(rot) * 180 / np.pi
quat = (
Gf.Rotation(Gf.Vec3d.XAxis(), rot[0])
* Gf.Rotation(Gf.Vec3d.YAxis(), rot[1])
* Gf.Rotation(Gf.Vec3d.ZAxis(), rot[2])
)
if "xformOp:orient" in properties:
rotation = prim.GetAttribute("xformOp:orient")
rotation.Set(Gf.Quatd(quat.GetQuat()))
else:
xform = UsdGeom.Xformable(prim)
xform_op = xform.AddXformOp(UsdGeom.XformOp.TypeOrient, UsdGeom.XformOp.PrecisionDouble, "")
xform_op.Set(Gf.Quatd(quat.GetQuat()))
def dynamic_control_interface():
"""
This is placed here as the extension is not loaded in the main script.
"""
return _dynamic_control.acquire_dynamic_control_interface()
def reload_references(path):
"""
It reloads all the references and payloads of a given prim
:param path: The path to the prim you want to reload references for
"""
stage = omni.usd.get_context().get_stage()
prim_list = []
for j in stage.GetPrimAtPath(path).GetAllChildren():
prim_list.append(j)
layers = set()
for prim in prim_list:
for (ref, intro_layer) in omni.usd.get_composed_references_from_prim(prim):
layer = Sdf.Find(intro_layer.ComputeAbsolutePath(ref.assetPath)) if ref.assetPath else None
if layer:
layers.add(layer)
for (ref, intro_layer) in omni.usd.get_composed_payloads_from_prim(prim):
layer = Sdf.Find(intro_layer.ComputeAbsolutePath(ref.assetPath)) if ref.assetPath else None
if layer:
layers.add(layer)
for l in layers:
l.Reload(force=True)
def teleport(path, loc, rot):
"""
It teleports the object at the given path to the given location and rotation
:param path: The path to the object you want to teleport
:param loc: (x, y, z)
:param rot: (x, y, z, w)
"""
omni.kit.commands.execute(
"IsaacSimTeleportPrim",
prim_path=path,
translation=(loc[0], loc[1], loc[2]),
rotation=(rot[0], rot[1], rot[2], rot[3]),
)
def toggle_dynamic_objects(dynamic_prims: list, status: bool):
"""
It toggles the visibility of the dynamic objects in the scene
:param dynamic_prims: a list of prims that you want to toggle
:type dynamic_prims: list
"""
# print("Toggling environment...")
for _ in range(3):
for prim in dynamic_prims:
imageable = UsdGeom.Imageable(prim)
if status:
imageable.MakeVisible()
else:
imageable.MakeInvisible()
imageable = []
def reset_physics(timeline, simulation_context):
timeline.stop()
simulation_context.reset()
timeline.play() |
eliabntt/GRADE-RR/simulator/utils/simulation_utils.py | import time
import utils.misc_utils
from utils.misc_utils import *
GRAPH_PATH = "/Render/PostProcess/SDGPipeline"
def set_common_stage_properties(rate):
"""
Note: some properties as of now can only be set with the general environment USD file.
"""
_desired_render_settings: Dict[str, Union[bool, int]] = {
"/app/asyncRendering": False,
"/app/renderer/skipWhileMinimized": False,
"/app/renderer/sleepMsOnFocus": 0,
"/app/renderer/sleepMsOutOfFocus": 0,
"/app/runLoops/main/rateLimitEnabled": True,
"/app/runLoops/main/rateLimitFrequency": rate,
"/persistent/simulation/minFrameRate": rate,
"/app/runLoops/main/rateLimitUseBusyLoop": True,
"/app/runLoops/rendering_0/rateLimitEnabled": True,
"/app/viewport/showSettingMenu": True,
"/app/viewport/showCameraMenu": True,
"/app/viewport/showRendererMenu": True,
"/app/viewport/showHideMenu": True,
"/app/viewport/showLayerMenu": True,
"/app/viewport/grid/showOrigin": False,
"/app/viewport/grid/enabled": False, ## this does not work
"/persistent/app/viewport/grid/lineWidth": 0,
"/rtx/multiThreading/enabled": True,
"/app/asyncRenderingLowLatency": False,
# "/persistent/app/captureFrame/viewport": True,
}
for setting_key, desired_value in _desired_render_settings.items():
set_carb_setting(carb.settings.get_settings(), setting_key, desired_value)
def simulation_environment_setup(need_ros = True):
"""
Enable the necessary extensions that will be used within the simulation
"""
enable_extension("omni.isaac.ros_bridge")
enable_extension("omni.isaac.physics_inspector")
enable_extension("omni.isaac.physics_utilities")
enable_extension("omni.anim.skelJoint")
enable_extension("omni.kit.window.sequencer")
enable_extension("omni.isaac.dynamic_control")
enable_extension("omni.isaac.shapenet")
enable_extension("semantics.schema.editor")
enable_extension("omni.hydra.iray")
enable_extension("omni.iray.settings.core")
enable_extension('omni.isaac.occupancy_map')
enable_extension('omni.isaac.shapenet')
enable_extension('omni.isaac.range_sensor')
disable_extension('omni.isaac.sun_study')
enable_extension('omni.isaac.core_nodes')
enable_extension('omni.isaac.sensor')
# Necessary ONLY if using NUCLEUS
# Locate /Isaac folder on nucleus server to load sample
from omni.isaac.core.utils.nucleus import get_assets_root_path
nucleus_server = get_assets_root_path()
if nucleus_server is None:
carb.log_error("Could not find nucleus server with /Isaac folder, exiting")
exit()
if need_ros:
if not rosgraph.is_master_online():
carb.log_error("Please run roscore before executing this script")
exit()
def set_raytracing_settings(physics_hz):
set_common_stage_properties(physics_hz)
settings = carb.settings.get_settings()
settings.set("/app/hydraEngine/waitIdle", True)
settings.set_string("/rtx/rendermode", "RayTracing")
settings.set_int('/rtx/post/aa/op', 2)
def set_pathtracing_settings(physics_hz):
set_common_stage_properties(physics_hz)
settings = carb.settings.get_settings()
settings.set_string("/rtx/rendermode", "PathTracing")
settings.set_int('/rtx/post/aa/op', 1)
# settings.set_int('/rtx/multiThreading/enabled', True)
# settings.set_bool('/rtx/multiThreading/enabled', True)
settings.set_int('/rtx/post/histogram/filterType', 1)
settings.set_int('/rtx/post/histogram/tau', 100)
settings.set_float('/rtx/post/histogram/minEV', 2)
settings.set_float('/rtx/post/histogram/maxEV', 50)
settings.set_bool('/rtx/post/histogram/enabaled', True)
settings.set_int('/rtx/post/tonemap/filmIso', 100) # 400
settings.set_int('/rtx/post/tonemap/cameraShutter', 30)
settings.set_int('/rtx/post/tonemap/fStop', 4)
settings.set_int("/rtx/pathtracing/maxBounces", 6) # 6
settings.set_int("/rtx/pathtracing/maxSpecularAndTransmissionBounces", 6)
# settings.set_int("/rtx/pathtracing/maxDiffuseBounces", 10)
settings.set_int("/rtx/pathtracing/spp", 1)
settings.set_int("/rtx/pathtracing/totalSpp", 64)
settings.set_int("/rtx/pathtracing/clampSpp", 64)
settings.set_int("/rtx/pathtracing/cached/enabled", False)
settings.set_bool("/rtx/pathtracing/cached/enabled", False)
settings.set_int("/rtx/pathtracing/lightcache/cached/enabled", False)
settings.set_bool("/rtx/pathtracing/lightcache/cached/enabled", False)
settings.set("/app/hydraEngine/waitIdle", False)
def compute_timeline_ratio(human_anim_len, reverse_strategy, experiment_length):
"""
based on the reverse strategy compute how the system should roll back animations
This might be counter-productive in some instances
"""
if len(human_anim_len) == 0:
return 1
if reverse_strategy == "avg":
return float(experiment_length) / (sum(human_anim_len) / len(human_anim_len))
elif reverse_strategy == "min":
return float(experiment_length) / min(human_anim_len)
elif reverse_strategy == "max":
return float(experiment_length) / max(human_anim_len)
elif reverse_strategy == "half":
return 2
elif reverse_strategy == "none":
return 1
else:
return 1
def pub_and_write_images(simulation_context, viewport_window_list, ros_camera_list, raytracing, my_recorder=None, enable_recorder=True):
sleeping(simulation_context, viewport_window_list, raytracing)
ctime = omni.timeline.get_timeline_interface().get_current_time()
for i, cam, outs in ros_camera_list:
print(f"Publishing camera {cam}...")
for output in outs:
og.Controller.attribute(output+ ".inputs:step").set(1)
simulation_context.render()
for i, cam, outs in ros_camera_list:
for output in outs:
og.Controller.attribute(output+ ".inputs:step").set(0)
omni.timeline.get_timeline_interface().set_current_time(ctime)
if my_recorder and my_recorder._enable_record and enable_recorder:
my_recorder._update()
print("Writing")
def sleeping(simulation_context, viewport_window_list, raytracing, totalSpp=64, spp=1):
"""
Sleeps the simulation to be sure that the whole frame has been rendered and updated.
First we render a couple of frames.
In rtx mode we need to wait the fps of the viewport to be reached.
In pathtracing mode we need to do "/rtx/pathtracing/spp" rendering steps.
e.g.
carb.settings.get_settings().get("/rtx/pathtracing/totalSpp")
carb.settings.get_settings().get("/rtx/pathtracing/spp")
"""
# todo is there a better way? I don"t think so, this is variable
# fixme making sure timeline does not advance
timeline = omni.timeline.get_timeline_interface()
mytime = timeline.get_current_time()
if raytracing:
sleep_time = 0
start = time.time()
for _ in range(100):
for vp in viewport_window_list:
if vp.fps == 0: continue
sleep_time = max(1 / vp.fps * 1.1, sleep_time)
if sleep_time != 0 and time.time() - start > sleep_time * 2: # overly cautious
break
simulation_context.render()
timeline.set_current_time(mytime)
else:
cnt = totalSpp
increase = spp
while cnt >= 0:
simulation_context.render()
timeline.set_current_time(mytime)
cnt -= increase
simulation_context.render()
timeline.set_current_time(mytime)
simulation_context.render()
timeline.set_current_time(mytime)
time.sleep(0.2)
def recorder_setup(_recorder_settings, out_path, enabled, skip_cameras=1):
my_recorder = extension_custom.MyRecorder()
my_recorder.on_startup()
my_recorder.set_single_settings(_recorder_settings)
my_recorder._dir_name = os.path.join(out_path)
my_recorder._enable_record = enabled
my_recorder.skip_cameras = skip_cameras
return my_recorder
def setup_timeline(config):
"""
It sets up the timeline to have a start time of 0.0, an end time of the experiment length * 2, and a time code per
second of the fps
:param config: a dictionary of parameters that are used to configure the experiment
:return: timeline
"""
timeline = omni.timeline.get_timeline_interface()
timeline.set_start_time(0.0)
if "fps" not in config:
fps = 30
else:
fps = config['fps'].get()
if "experiment_length" in config:
timeline.set_end_time(config["experiment_length"].get() * 2 / fps) # *2 to have room
else:
print("No experiment length found, setting it to 3600")
timeline.set_end_time(3600 / fps)
timeline.set_time_codes_per_second(fps)
return timeline
|
eliabntt/GRADE-RR/simulator/utils/human_utils.py | import utils.misc_utils
from utils.misc_utils import *
def move_humans_to_ground(my_humans_heights: list, body_lists: list, frame: float, meters_per_unit: float,
max_height: float):
"""
Function to keep the human at ground level (0 for now, but can be elaborated)
my_human_heights: list of [animation_frames, [vertices, z_loc]]. For every frame of the animation, for every vertex, the z loc
body_lists: Using to access the prim, list of prim paths
frame: the simulation frame we are in (float or int will get a cast to int)
meters_per_unit: meter per unit of distance in the simulation
"""
stage = omni.usd.get_context().get_stage()
for index, height in enumerate(my_humans_heights):
z_min = None
if height is None:
context = omni.usd.get_context()
stage = context.get_stage()
prim = stage.GetPrimAtPath(body_lists[index])
for i in prim.GetAllChildren():
if "armature" in str(i.GetPath()).lower():
prim = i
for i in prim.GetAllChildren():
if "body" in str(i.GetPath()).lower():
prim = i
for i in prim.GetAllChildren():
if "mesh" in str(i.GetPath()).lower():
prim = i
l = prim.GetPropertyNames()
if "points" in l:
k = prim.GetAttribute("points").Get()
if k is not None:
k = np.array(k)
z_min = min(k[:, 2])
else:
z_min = min(height[int(min(max(frame - 1, 0), len(height) - 1))]) / meters_per_unit
if z_min is None:
continue
if z_min < max_height:
loc = stage.GetPrimAtPath(body_lists[index]).GetProperty('xformOp:translate').Get()
loc = [loc[0], loc[1], loc[2] - z_min]
set_translate(stage.GetPrimAtPath(body_lists[index]), loc)
def load_human(human_base_prim_path, n, asset_path, dynamic_prims=[], added_prims=[], correct_texture_paths=False):
"""
Load the human based on the usd path and add it to the dynamic prims list
Follow prim naming convention /human_base_prim_path+n
Add also the semantic with the label "human"
human_base_prim_path: the base path to which we add the n of the n-th human as per the prim path
n: the number of the human
asset_path: the path of the ussd of the human
dynamic_prims: the list of dynamic prims in the world. Only the body, and the clothes will be added (not the armature) as separate objects
added_prims: the list of the number of prims added to the world
correct_texture_paths: if True, correct the texture paths to the correct path
"""
stage = omni.usd.get_context().get_stage()
res, _ = omni.kit.commands.execute("CreateReferenceCommand",
usd_context=omni.usd.get_context(),
path_to=f"{human_base_prim_path}{n}",
asset_path=asset_path,
instanceable=False)
cnt = 0
if res:
for child in stage.GetPrimAtPath(f"{human_base_prim_path}{n}").GetAllChildren():
if "armature" in child.GetName().lower():
for sub_child in child.GetAllChildren():
if "armature" not in sub_child.GetName().lower():
dynamic_prims.append(sub_child)
cnt += 1
added_prims.append(cnt)
clear_properties(f"{human_base_prim_path}{n}")
if correct_texture_paths:
print("Correcting texture paths, you might want to change utils/misc_utils.py:correct_paths")
correct_paths(f"{human_base_prim_path}{n}")
else:
print("Not correcting texture paths, you might want to check the textures")
process_semantics(f"{human_base_prim_path}{n}", "human")
else:
raise Exception(f"Failed to load human {n} from {asset_path}")
|
eliabntt/GRADE-RR/simulator/utils/objects_utils.py | import utils.misc_utils
from utils.misc_utils import *
mtl_created_list = []
def setup_shapenet(username, password, csv_location):
global database
shapenet.settings.ShapenetSettings()
if not os.path.exists(csv_location):
logged_in = shapenet.login.save_v1_csvs(username, password, csv_location)
database = shapenet.globals.get_database()
return database
def load_object(rng=np.random.default_rng(), obj_name="shapenet", config=None, scale=1):
if obj_name == "shapenet":
return load_shapenet_object(rng, config, scale)
elif obj_name == "google":
return load_google_obj(rng, config, scale)
def load_shapenet_object(rng=np.random.default_rng(), config=None, scale=1):
"""
It loads a random object from the ShapeNet database
:param rng: a random number generator. If you don't have one, you can use np.random.default_rng()
:param config: a dictionary of parameters that can be set by the user
:param scale: The scale of the object, defaults to 1 (optional)
:return: The path to the object and the synsetId and modelId of the object.
"""
global database
scale /= 100
synsetId = rng.choice(list(database)) if config["synsetId"].get() == "random" else config["synsetId"].get()
modelId = rng.choice(list(database[synsetId])) if config["modelId"].get() == "random" else config["modelId"].get()
_settings = carb.settings.get_settings()
prim = shapenet.shape.addShapePrim(_settings.get("/isaac/shapenet/omniverseServer"), synsetId, modelId,
Gf.Vec3d(0, 0, 0),
Gf.Rotation(Gf.Vec3d(1, 0, 0), 0),
scale, True, True)
if type(prim) == str:
raise Exception(prim)
return str(prim.GetPath()), [synsetId, modelId]
def load_google_obj(rng=np.random.default_rng(), config=None, scale = 1):
"""
It loads a random Google 3D asset from the Google Scanned Object, converts it to USD, and then creates a reference to it
in the current stage
:param rng: a random number generator
:param config: a dictionary of the config file
:return: The prim path of the asset and the name of the asset
"""
google_obj_folder = config['google_obj_folder'].get()
if config['google_obj_shortlist'].get() == "":
asset = rng.choice(os.listdir(google_obj_folder))
else:
with (open(config['google_obj_shortlist'].get(), 'r')) as f:
asset = rng.choice(f.read().splitlines())
if not os.path.exists(f"{google_obj_folder}/exported_usd/{asset}/"):
os.makedirs(f"{google_obj_folder}/exported_usd/{asset}/")
usd_asset_path = f"{google_obj_folder}/exported_usd/{asset}/{asset}.usd"
obj_asset_path = f"{google_obj_folder}/{asset}/meshes/model.obj"
print(f"Converting {obj_asset_path} to {usd_asset_path}")
if not os.path.exists(usd_asset_path):
success = asyncio.new_event_loop().run_until_complete(convert_google_obj(obj_asset_path, usd_asset_path))
if not success:
raise Exception("Failed to convert obj to usd")
stage = omni.usd.get_context().get_stage()
prim_path = str(stage.GetDefaultPrim().GetPath()) + "/" + asset
insta_count = 0
prim_path_len = len(prim_path)
while stage.GetPrimAtPath(prim_path):
insta_count += 1
prim_path = f"{prim_path[:prim_path_len]}_{insta_count}"
omni.kit.commands.execute('CreateReferenceCommand',
usd_context=omni.usd.get_context(),
path_to=prim_path,
asset_path=usd_asset_path,
instanceable=True)
texture_list = os.listdir(f"{google_obj_folder}/{asset}/materials/textures")[0]
# shader = UsdShade.Shader(stage.GetPrimAtPath(f"{prim_path}/Looks/material_0/material_0"))
# shader.CreateInput("diffuse_texture", Sdf.ValueTypeNames.Asset)
# omni.kit.commands.execute('ChangePropertyCommand',
# prop_path=f'{prim_path}/Looks/material_0/material_0.inputs:diffuse_texture',
# value=f"{google_obj_folder}/{asset}/materials/textures/{texture_list}",
# prev=None)
global mtl_created_list
omni.kit.commands.execute(
"CreateAndBindMdlMaterialFromLibrary",
mdl_name="OmniPBR.mdl",
mtl_name=f"{asset}",
mtl_created_list=mtl_created_list,
)
mtl_prim = stage.GetPrimAtPath(mtl_created_list[0])
omni.usd.create_material_input(
mtl_prim,
"diffuse_texture",
"my-computer://" + texture_list, # my-computer seems necessary
Sdf.ValueTypeNames.Asset,
)
obj_shade = UsdShade.Material(mtl_prim)
for child in stage.GetPrimAtPath(prim_path).GetAllChildren():
if child.GetTypeName().lower() == "xform":
for subchild in child.GetAllChildren():
if subchild.GetTypeName().lower() == "mesh":
UsdShade.MaterialBindingAPI(subchild).Bind(obj_shade, UsdShade.Tokens.strongerThanDescendants)
set_scale(stage.GetPrimAtPath(prim_path), scale)
return str(prim_path), asset
async def convert_google_obj(in_path, out_path):
"""
It converts a Google 3D model to a format that can be used in Omni
:param in_path: The path to the file you want to convert
:param out_path: The path to the output file
:return: A boolean value.
"""
import omni.kit.asset_converter as assetimport
context = omni.kit.asset_converter.AssetConverterContext()
converter_manager = omni.kit.asset_converter.get_instance()
context.embed_textures = False
task = converter_manager.create_converter_task(in_path, out_path, None, context)
success = await task.wait_until_finished()
return success
def load_objects(config, environment, rng, dynamic_prims, scale):
"""
Load objects in the environment
Config should contain `config["obstacles"]` with the various considered keys.
In our case those are shapenet and google(scanned_objects)
In config we define the # of objects for each class.
If the import fails the system tries to load it from another class.
For now we do not generate positions that are collision free, so the objects will go through obstacles/humans/camera.
config: the config dictionary
environment: the environment object
rng: the global rng
dynamic_prims: the list of dynamic prims that will be used in the main thread
"""
stage = omni.usd.get_context().get_stage()
shapenet_obs = config["obstacles"]["shapenet"].get()
google_obs = config["obstacles"]["google"].get()
num_obstacles = shapenet_obs + google_obs
loc = ''
google_obs_used = []
shapenet_obs_used = []
meters_per_unit = environment.meters_per_unit
if (num_obstacles > 0):
print("Loading obstacles..")
for n in range(num_obstacles):
print("Loading obstacle {}".format(n))
# set random valid location, use "camera"
x, y, z, yaw = position_object(environment, type=2)
if google_obs > 0:
ob_type = "google"
google_obs -= 1
else:
ob_type = "shapenet"
if loc == '':
loc = shapenet.globals.get_local_shape_loc()
print("Location is {}".format(loc))
csv_location = loc + "/v1_csv/"
database = setup_shapenet(config["shapenet_username"].get(), config["shapenet_password"].get(), csv_location)
if database is None:
print("Error loading database, resort to google")
ob_type = "google"
shapenet_obs -= 1
try:
my_shape, shape_infos = load_object(rng, ob_type, config, scale)
except:
print("Error loading object, try with the other type")
try:
my_shape, shape_infos = load_object(rng, "google" if ob_type == "shapenet" else "shapenet", config, scale)
except:
print("Error loading object, giving up")
continue
google_obs_used.append(shape_infos) if ob_type == "google" else shapenet_obs_used.append(shape_infos)
print(f"{my_shape} loaded.. pose and adding animation")
clear_properties(my_shape)
add_translate_anim(my_shape, Gf.Vec3d(x[0] / meters_per_unit, y[0] / meters_per_unit, z[0] / meters_per_unit))
add_rotation_anim(my_shape,
Gf.Vec3d(rng.uniform(0, 2 * np.pi), rng.uniform(0, 2 * np.pi), rng.uniform(0, 2 * np.pi)))
dynamic_prims.append(stage.GetPrimAtPath(my_shape))
num_keys = rng.choice(range(1, config["experiment_length"].get()), rng.integers(1, 10)).astype(float)
num_keys.sort()
for key in num_keys:
key *= 1
x, y, z, yaw = position_object(environment, type=2)
add_translate_anim(my_shape, Gf.Vec3d(x[0] / meters_per_unit, y[0] / meters_per_unit, z[0] / meters_per_unit),
key)
add_rotation_anim(my_shape, Gf.Vec3d(rng.uniform(0, 360), rng.uniform(0, 360), rng.uniform(0, 360)),
key)
if ob_type == "google":
add_colliders(my_shape)
add_semantics(stage.GetPrimAtPath(my_shape), ob_type)
print("Loading obstacle complete")
return google_obs_used, shapenet_obs_used
|
eliabntt/GRADE-RR/irotate_specific/irotate.rviz | Panels:
- Class: rviz/Displays
Help Height: 78
Name: Displays
Property Tree Widget:
Expanded:
- /Global Options1
- /Marker1/Namespaces1
- /TF1/Frames1
- /Odometry6
Splitter Ratio: 0.4965035021305084
Tree Height: 395
- Class: rviz/Selection
Name: Selection
- Class: rviz/Tool Properties
Expanded:
- /2D Pose Estimate1
- /2D Nav Goal1
- /Publish Point1
Name: Tool Properties
Splitter Ratio: 0.5886790156364441
- Class: rviz/Views
Expanded:
- /Current View1
- /Current View1/Focal Point1
Name: Views
Splitter Ratio: 0.5
- Class: rviz/Time
Experimental: false
Name: Time
SyncMode: 0
SyncSource: ""
Preferences:
PromptSaveOnExit: true
Toolbars:
toolButtonStyle: 2
Visualization Manager:
Class: ""
Displays:
- Alpha: 0.5
Cell Size: 1
Class: rviz/Grid
Color: 160; 160; 164
Enabled: false
Line Style:
Line Width: 0.029999999329447746
Value: Lines
Name: Grid
Normal Cell Count: 0
Offset:
X: 0
Y: 0
Z: 0
Plane: XY
Plane Cell Count: 10
Reference Frame: map
Value: false
- Alpha: 1
Class: rviz/RobotModel
Collision Enabled: false
Enabled: true
Links:
All Links Enabled: true
Expand Joint Details: false
Expand Link Details: false
Expand Tree: false
Link Tree Style: Links in Alphabetic Order
Name: RobotModel
Robot Description: robot_description
TF Prefix: ""
Update Interval: 0
Value: true
Visual Enabled: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: RGB8
Decay Time: 0
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Min Color: 0; 0; 0
Name: PointCloud2
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.009999999776482582
Style: Flat Squares
Topic: /voxel_cloud
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
- Alpha: 0.699999988079071
Class: rviz/Map
Color Scheme: costmap
Draw Behind: false
Enabled: true
Name: OctomapGrid
Topic: /rtabmap/grid_prob_map
Unreliable: false
Use Timestamp: false
Value: true
- Alpha: 1
Class: rviz/GridCells
Color: 239; 41; 41
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /occ_points_grid_cell
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 252; 175; 62
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /submap_bounding_box_grid_cell
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 252; 233; 79
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /cell
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 114; 159; 207
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /unk_points_grid_cell
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 138; 226; 52
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /free_points_grid_cell
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 206; 92; 0
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /raytracing_target
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 138; 226; 52
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /raytracing_free
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 239; 41; 41
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /raytracing_occu
Unreliable: false
Value: false
- Alpha: 1
Class: rviz/GridCells
Color: 114; 159; 207
Enabled: false
Name: GridCells
Queue Size: 10
Topic: /raytracing_unk
Unreliable: false
Value: false
- Class: rviz/Marker
Enabled: true
Marker Topic: /visualization_marker
Name: Marker
Namespaces:
{}
Queue Size: 100
Value: true
- Alpha: 1
Class: rviz/GridCells
Color: 25; 255; 0
Enabled: true
Name: GridCells
Queue Size: 10
Topic: /frontier_points_grid_cell
Unreliable: false
Value: true
- Alpha: 1
Class: rviz/GridCells
Color: 233; 185; 110
Enabled: true
Name: GridCells
Queue Size: 10
Topic: /opt_heading_cells
Unreliable: false
Value: true
- Class: octomap_rviz_plugin/ColorOccupancyGrid
Enabled: false
Max. Height Display: 3.4028234663852886e+38
Max. Octree Depth: 16
Min. Height Display: -3.4028234663852886e+38
Name: ColorOccupancyGrid
Octomap Topic: /rtabmap/octomap_full
Queue Size: 1
Value: false
Voxel Alpha: 1
Voxel Coloring: Cell Probability
Voxel Rendering: Occupied Voxels
- Class: rviz/TF
Enabled: true
Frame Timeout: 15
Frames:
All Enabled: false
fake/base_link:
Value: false
world:
Value: false
Marker Alpha: 1
Marker Scale: 1
Name: TF
Show Arrows: true
Show Axes: true
Show Names: true
Tree:
{}
Update Interval: 0
Value: true
- Alpha: 1
Buffer Length: 1
Class: rviz/Path
Color: 25; 255; 0
Enabled: true
Head Diameter: 0.10000000149011612
Head Length: 0.10000000149011612
Length: 0.30000001192092896
Line Style: Lines
Line Width: 0.029999999329447746
Name: Path
Offset:
X: 0
Y: 0
Z: 0
Pose Color: 0; 255; 0
Pose Style: Arrows
Queue Size: 10
Radius: 0.029999999329447746
Shaft Diameter: 0.05000000074505806
Shaft Length: 0.10000000149011612
Topic: /move_base/GlobalPlanner/plan
Unreliable: false
Value: true
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: true
Enabled: true
Keep: 1
Name: Odometry
Position Tolerance: 0.10000000149011612
Queue Size: 100
Shape:
Alpha: 1
Axes Length: 1
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Arrow
Topic: /odometry/filtered
Unreliable: false
Value: true
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: true
Enabled: true
Keep: 1
Name: Odometry
Position Tolerance: 0.10000000149011612
Queue Size: 100
Shape:
Alpha: 1
Axes Length: 0.5
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Axes
Topic: /my_robot_0/odom
Unreliable: false
Value: true
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: true
Enabled: true
Keep: 1
Name: Odometry
Position Tolerance: 0.10000000149011612
Queue Size: 100
Shape:
Alpha: 1
Axes Length: 1
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Arrow
Topic: /camera/odometry/filtered
Unreliable: false
Value: true
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: true
Enabled: true
Keep: 1
Name: Odometry
Position Tolerance: 0.10000000149011612
Queue Size: 100
Shape:
Alpha: 1
Axes Length: 0.5
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Axes
Topic: /my_robot_0/camera_odom
Unreliable: false
Value: true
- Alpha: 1
Arrow Length: 0.30000001192092896
Axes Length: 0.30000001192092896
Axes Radius: 0.009999999776482582
Class: rviz/PoseArray
Color: 255; 25; 0
Enabled: true
Head Length: 0.07000000029802322
Head Radius: 0.029999999329447746
Name: PoseArray
Queue Size: 10
Shaft Length: 0.23000000417232513
Shaft Radius: 0.009999999776482582
Shape: Arrow (Flat)
Topic: /traj
Unreliable: false
Value: true
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: false
Enabled: false
Keep: 100
Name: Odometry
Position Tolerance: 0.10000000149011612
Queue Size: 10
Shape:
Alpha: 1
Axes Length: 1
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Arrow
Topic: /waypoint
Unreliable: false
Value: false
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: true
Enabled: false
Keep: 1
Name: Odometry
Position Tolerance: 0.10000000149011612
Queue Size: 10
Shape:
Alpha: 1
Axes Length: 1
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Arrow
Topic: /rtabmap/odom
Unreliable: false
Value: false
Enabled: true
Global Options:
Background Color: 48; 48; 48
Default Light: true
Fixed Frame: map
Frame Rate: 30
Name: root
Tools:
- Class: rviz/Interact
Hide Inactive Objects: true
- Class: rviz/MoveCamera
- Class: rviz/Select
- Class: rviz/FocusCamera
- Class: rviz/Measure
- Class: rviz/SetInitialPose
Theta std deviation: 0.2617993950843811
Topic: /initialpose
X std deviation: 0.5
Y std deviation: 0.5
- Class: rviz/SetGoal
Topic: /move_base_simple/goal
- Class: rviz/PublishPoint
Single click: true
Topic: /clicked_point
Value: true
Views:
Current:
Class: rviz/Orbit
Distance: 33.44965362548828
Enable Stereo Rendering:
Stereo Eye Separation: 0.05999999865889549
Stereo Focal Distance: 1
Swap Stereo Eyes: false
Value: false
Field of View: 0.7853981852531433
Focal Point:
X: 0.7256796360015869
Y: -0.2849154472351074
Z: 1.5138822793960571
Focal Shape Fixed Size: true
Focal Shape Size: 0.05000000074505806
Invert Z Axis: false
Name: Current View
Near Clip Distance: 0.009999999776482582
Pitch: 1.5697963237762451
Target Frame: <Fixed Frame>
Yaw: 3.1437020301818848
Saved: ~
Window Geometry:
Displays:
collapsed: false
Height: 692
Hide Left Dock: false
Hide Right Dock: true
QMainWindow State: 000000ff00000000fd00000004000000000000023e00000216fc020000000bfb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000216000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d0065007200610000000212000000b00000000000000000fb0000000a0049006d0061006700650000000437000001890000000000000000fb0000000a0049006d00610067006501000006ef000000e90000000000000000000000010000010f000004c7fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003f000004c7000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000006b50000003efc0100000002fb0000000800540069006d00650100000000000006b5000002eb00fffffffb0000000800540069006d00650100000000000004500000000000000000000004710000021600000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
Selection:
collapsed: false
Time:
collapsed: false
Tool Properties:
collapsed: false
Views:
collapsed: true
Width: 1717
X: 2673
Y: 291
|
eliabntt/GRADE-RR/irotate_specific/republish_tf.py | #!/usr/bin/env python
import rospy
import ipdb
import random
from tf2_msgs.msg import TFMessage
import copy
def callback(data, pub):
data_to_pub = TFMessage()
data_to_pub.transforms = copy.copy(data.transforms)
cnt = 0
for i, d in enumerate(data.transforms):
if "x_link" in d.child_frame_id or "y_link" in d.child_frame_id or "yaw_link" in d.child_frame_id or "base_link" in d.child_frame_id or "cameraholder_link" in d.child_frame_id:
data_to_pub.transforms.pop(i - cnt)
cnt += 1
pub.publish(data_to_pub)
return
def listener():
rospy.init_node('tf_republisher')
pub = rospy.Publisher("tf", TFMessage, queue_size=1)
rospy.Subscriber("/tf2", TFMessage, callback, callback_args=(pub))
rospy.spin()
if __name__ == '__main__':
listener()
|
eliabntt/GRADE-RR/isaac_internals/setup_python_env.sh | #!/bin/bash
# source ~/catkin_ws/devel/setup.bash
#source ~/catkin_ws/devel/setup.bash --extend
SCRIPT_DIR="$(dirname "${BASH_SOURCE}")"
export PYTHONPATH=$PYTHONPATH:$SCRIPT_DIR/../../../$PYTHONPATH:$SCRIPT_DIR/exts/omni.isaac.kit:$SCRIPT_DIR/exts/omni.isaac.gym:$SCRIPT_DIR/kit/kernel/py:$SCRIPT_DIR/kit/plugins/bindings-python:$SCRIPT_DIR/kit/extscore/omni.kit.pip_archive/pip_prebundle:$SCRIPT_DIR/exts/omni.isaac.core_archive/pip_prebundle:$SCRIPT_DIR/exts/omni.isaac.ml_archive/pip_prebundle:$SCRIPT_DIR/extscache/omni.pip.torch-1_13_1-0.1.4+104.2.lx64/torch-1-13-1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$SCRIPT_DIR/../../../$LD_LIBRARY_PATH:$SCRIPT_DIR/.:$SCRIPT_DIR/exts/omni.usd.schema.isaac/bin:$SCRIPT_DIR/kit:$SCRIPT_DIR/kit/kernel/plugins:$SCRIPT_DIR/kit/libs/iray:$SCRIPT_DIR/kit/plugins:$SCRIPT_DIR/kit/plugins/bindings-python:$SCRIPT_DIR/kit/plugins/carb_gfx:$SCRIPT_DIR/kit/plugins/rtx:$SCRIPT_DIR/kit/plugins/gpu.foundation:$SCRIPT_DIR/kit/extensions/extensions-bundled/bin # $SCRIPT_DIR/exts/omni.isaac.motion_planning/bin
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/PACKAGE-LICENSES/omni.isaac.synthetic_utils-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/PACKAGE-LICENSES/dependencies/PIP-packages-LICENSES.txt | Jinja2-2.11.3
-------------
Copyright 2007 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MarkupSafe-2.0.1
----------------
Copyright 2010 Pallets
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Pint-0.17
---------
Copyright (c) 2012 by Hernan E. Grecco and contributors. See AUTHORS
for more details.
Some rights reserved.
Redistribution and use in source and binary forms of the software as well
as documentation, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* The names of the contributors may not be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
PyYAML-5.4.1
------------
Copyright (c) 2017-2021 Ingy döt Net
Copyright (c) 2006-2016 Kirill Simonov
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
bezier-2020.5.19
----------------
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
boto3-1.17.1
------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
charset-normalizer-2.0.4
------------------------
MIT License
Copyright (c) 2019 TAHRI Ahmed R.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
construct-2.10.61
-----------------
Copyright (C) 2006-2020
Arkadiusz Bulski ([email protected])
Tomer Filiba ([email protected])
Corbin Simpson ([email protected])
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
cycler-0.10.0
-------------
Copyright (c) 2015, matplotlib project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the matplotlib project nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
gunicorn-20.1.0
---------------
2009-2018 (c) Benoît Chesneau <[email protected]>
2009-2015 (c) Paul J. Davis <[email protected]>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
kiwisolver-1.3.2
----------------
=========================
The Kiwi licensing terms
=========================
Kiwi is licensed under the terms of the Modified BSD License (also known as
New or Revised BSD), as follows:
Copyright (c) 2013, Nucleic Development Team
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of the Nucleic Development Team nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
About Kiwi
----------
Chris Colbert began the Kiwi project in December 2013 in an effort to
create a blisteringly fast UI constraint solver. Chris is still the
project lead.
The Nucleic Development Team is the set of all contributors to the Nucleic
project and its subprojects.
The core team that coordinates development on GitHub can be found here:
http://github.com/nucleic. The current team consists of:
* Chris Colbert
Our Copyright Policy
--------------------
Nucleic uses a shared copyright model. Each contributor maintains copyright
over their contributions to Nucleic. But, it is important to note that these
contributions are typically only changes to the repositories. Thus, the Nucleic
source code, in its entirety is not the copyright of any single person or
institution. Instead, it is the collective copyright of the entire Nucleic
Development Team. If individual contributors want to maintain a record of what
changes/contributions they have specific copyright on, they should indicate
their copyright in the commit message of the change, when they commit the
change to one of the Nucleic repositories.
With this in mind, the following banner should be used in any source code file
to indicate the copyright and license terms:
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
#------------------------------------------------------------------------------
llvmlite-0.37.0
---------------
Copyright (c) 2014-, Continuum Analytics, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
matplotlib-3.4.3
----------------
License agreement for matplotlib versions 1.3.0 and later
=========================================================
1. This LICENSE AGREEMENT is between the Matplotlib Development Team
("MDT"), and the Individual or Organization ("Licensee") accessing and
otherwise using matplotlib software in source or binary form and its
associated documentation.
2. Subject to the terms and conditions of this License Agreement, MDT
hereby grants Licensee a nonexclusive, royalty-free, world-wide license
to reproduce, analyze, test, perform and/or display publicly, prepare
derivative works, distribute, and otherwise use matplotlib
alone or in any derivative version, provided, however, that MDT's
License Agreement and MDT's notice of copyright, i.e., "Copyright (c)
2012- Matplotlib Development Team; All Rights Reserved" are retained in
matplotlib alone or in any derivative version prepared by
Licensee.
3. In the event Licensee prepares a derivative work that is based on or
incorporates matplotlib or any part thereof, and wants to
make the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to matplotlib .
4. MDT is making matplotlib available to Licensee on an "AS
IS" basis. MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB
WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR
LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING
MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF
THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between MDT and
Licensee. This License Agreement does not grant permission to use MDT
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using matplotlib ,
Licensee agrees to be bound by the terms and conditions of this License
Agreement.
License agreement for matplotlib versions prior to 1.3.0
========================================================
1. This LICENSE AGREEMENT is between John D. Hunter ("JDH"), and the
Individual or Organization ("Licensee") accessing and otherwise using
matplotlib software in source or binary form and its associated
documentation.
2. Subject to the terms and conditions of this License Agreement, JDH
hereby grants Licensee a nonexclusive, royalty-free, world-wide license
to reproduce, analyze, test, perform and/or display publicly, prepare
derivative works, distribute, and otherwise use matplotlib
alone or in any derivative version, provided, however, that JDH's
License Agreement and JDH's notice of copyright, i.e., "Copyright (c)
2002-2011 John D. Hunter; All Rights Reserved" are retained in
matplotlib alone or in any derivative version prepared by
Licensee.
3. In the event Licensee prepares a derivative work that is based on or
incorporates matplotlib or any part thereof, and wants to
make the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to matplotlib.
4. JDH is making matplotlib available to Licensee on an "AS
IS" basis. JDH MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, JDH MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB
WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
5. JDH SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR
LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING
MATPLOTLIB , OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF
THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between JDH and
Licensee. This License Agreement does not grant permission to use JDH
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using matplotlib,
Licensee agrees to be bound by the terms and conditions of this License
Agreement.
nest_asyncio-1.5.1
------------------
BSD 2-Clause License
Copyright (c) 2018-2020, Ewald de Wit
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
numba-0.51.2
------------
Copyright (c) 2012, Anaconda, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
numpy-quaternion-2020.9.5.14.42.2
---------------------------------
The MIT License (MIT)
Copyright (c) 2018 Michael Boyle
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
nvsmi-0.4.2
-----------
MIT License
Copyright (c) 2019 Panagiotis Mavrogiorgos
Copyright (c) 2017 anderskm
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
oauthlib-3.1.1
--------------
Copyright (c) 2019 The OAuthlib Community
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of this project nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
osqp-0.6.2.post5
----------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
packaging-21.0
--------------
This software is made available under the terms of *either* of the licenses
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
under the terms of *both* these licenses.
pyparsing-2.4.7
---------------
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
qdldl-0.1.5.post0
-----------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2020 Paul Goulat, Bartolomeo Stellato, Goran Banjac
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
requests-2.26.0
---------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
requests-oauthlib-1.3.0
-----------------------
ISC License
Copyright (c) 2014 Kenneth Reitz.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
s3transfer-0.3.7
----------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
scipy-1.7.1
-----------
Copyright (c) 2001-2002 Enthought, Inc. 2003-2019, SciPy Developers.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----
This binary distribution of Scipy also bundles the following software:
Name: OpenBLAS
Files: .libs/libopenb*.so
Description: bundled as a dynamically linked library
Availability: https://github.com/xianyi/OpenBLAS/
License: 3-clause BSD
Copyright (c) 2011-2014, The OpenBLAS Project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
3. Neither the name of the OpenBLAS project nor the names of
its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Name: LAPACK
Files: .libs/libopenb*.so
Description: bundled in OpenBLAS
Availability: https://github.com/xianyi/OpenBLAS/
License 3-clause BSD
Copyright (c) 1992-2013 The University of Tennessee and The University
of Tennessee Research Foundation. All rights
reserved.
Copyright (c) 2000-2013 The University of California Berkeley. All
rights reserved.
Copyright (c) 2006-2013 The University of Colorado Denver. All rights
reserved.
$COPYRIGHT$
Additional copyrights may follow
$HEADER$
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer listed
in this license in the documentation and/or other materials
provided with the distribution.
- Neither the name of the copyright holders nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
The copyright holders provide no reassurances that the source code
provided does not infringe any patent, copyright, or any other
intellectual property rights of third parties. The copyright holders
disclaim any liability to any recipient for claims brought against
recipient by any third party for infringement of that parties
intellectual property rights.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Name: GCC runtime library
Files: .libs/libgfortran*.so
Description: dynamically linked to files compiled with gcc
Availability: https://gcc.gnu.org/viewcvs/gcc/
License: GPLv3 + runtime exception
Copyright (C) 2002-2017 Free Software Foundation, Inc.
Libgfortran is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option)
any later version.
Libgfortran is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Under Section 7 of GPL version 3, you are granted additional
permissions described in the GCC Runtime Library Exception, version
3.1, as published by the Free Software Foundation.
You should have received a copy of the GNU General Public License and
a copy of the GCC Runtime Library Exception along with this program;
see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
<http://www.gnu.org/licenses/>.
----
Full text of license texts referred to above follows (that they are
listed below does not necessarily imply the conditions apply to the
present binary release):
----
GCC RUNTIME LIBRARY EXCEPTION
Version 3.1, 31 March 2009
Copyright (C) 2009 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
This GCC Runtime Library Exception ("Exception") is an additional
permission under section 7 of the GNU General Public License, version
3 ("GPLv3"). It applies to a given file (the "Runtime Library") that
bears a notice placed by the copyright holder of the file stating that
the file is governed by GPLv3 along with this Exception.
When you use GCC to compile a program, GCC may combine portions of
certain GCC header files and runtime libraries with the compiled
program. The purpose of this Exception is to allow compilation of
non-GPL (including proprietary) programs to use, in this way, the
header files and runtime libraries covered by this Exception.
0. Definitions.
A file is an "Independent Module" if it either requires the Runtime
Library for execution after a Compilation Process, or makes use of an
interface provided by the Runtime Library, but is not otherwise based
on the Runtime Library.
"GCC" means a version of the GNU Compiler Collection, with or without
modifications, governed by version 3 (or a specified later version) of
the GNU General Public License (GPL) with the option of using any
subsequent versions published by the FSF.
"GPL-compatible Software" is software whose conditions of propagation,
modification and use would permit combination with GCC in accord with
the license of GCC.
"Target Code" refers to output from any compiler for a real or virtual
target processor architecture, in executable form or suitable for
input to an assembler, loader, linker and/or execution
phase. Notwithstanding that, Target Code does not include data in any
format that is used as a compiler intermediate representation, or used
for producing a compiler intermediate representation.
The "Compilation Process" transforms code entirely represented in
non-intermediate languages designed for human-written code, and/or in
Java Virtual Machine byte code, into Target Code. Thus, for example,
use of source code generators and preprocessors need not be considered
part of the Compilation Process, since the Compilation Process can be
understood as starting with the output of the generators or
preprocessors.
A Compilation Process is "Eligible" if it is done using GCC, alone or
with other GPL-compatible software, or if it is done without using any
work based on GCC. For example, using non-GPL-compatible Software to
optimize any GCC intermediate representations would not qualify as an
Eligible Compilation Process.
1. Grant of Additional Permission.
You have permission to propagate a work of Target Code formed by
combining the Runtime Library with Independent Modules, even if such
propagation would otherwise violate the terms of GPLv3, provided that
all Target Code was generated by Eligible Compilation Processes. You
may then convey such a combination under terms of your choice,
consistent with the licensing of the Independent Modules.
2. No Weakening of GCC Copyleft.
The availability of this Exception does not imply any general
presumption that third-party software is unaffected by the copyleft
requirements of the license of GCC.
----
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
selenium-3.141.0
----------------
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2018 Software Freedom Conservancy (SFC)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
urllib3-1.26.6
--------------
MIT License
Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
watchgod-0.7
------------
The MIT License (MIT)
Copyright (c) 2017, 2018, 2019, 2020 Samuel Colvin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
webbot-0.34
-----------
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
webbot, a library that makes web automation simple and damn easy.
Copyright (C) 2018 nateshmbhat
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
webbot Copyright (C) 2018 nateshmbhat
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.
pycapnp-1.0.0
-------------
Copyright (c) 2013, Jason Paryani <[email protected]>
Copyright (c) 2019-2020, Jacob Alexander <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
gym-0.26.2
----------
The MIT License
Copyright (c) 2016 OpenAI (https://openai.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
# Mujoco models
This work is derived from [MuJuCo models](http://www.mujoco.org/forum/index.php?resources/) used under the following license:
```
This file is part of MuJoCo.
Copyright 2009-2015 Roboti LLC.
Mujoco :: Advanced physics simulation engine
Source : www.roboti.us
Version : 1.31
Released : 23Apr16
Author :: Vikash Kumar
Contacts : [email protected]
```
torchvision-0.14.0+cu117
------------------------
BSD 3-Clause License
Copyright (c) Soumith Chintala 2016,
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
nvidia_lula_no_cuda-0.8.2
-------------------------
Copyright (c) 2019-2022 NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited.
**********************************************************************
THIRD-PARTY LICENSES
====================
ur_description 1.2.7
--------------------
The URDF and mesh files in content/external/universal_robots/ are derived from
those found in the "universal_robot" package of the ROS-Industrial project.
Site: http://wiki.ros.org/universal_robot
Copyright 2013-2019 Felix Messmer, Kelsey Hawkins, Shaun Edwards,
Stuart Glaser, Wim Meeussen, and Contributors
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
ros-industrial/fanuc 0.5.1
--------------------------
The URDF and mesh files in content/external/fanuc/ are derived from those found
in the "fanuc" package of the ROS-Industrial project.
Site: http://wiki.ros.org/fanuc
License: https://github.com/ros-industrial/fanuc/blob/0.5.1/LICENSE
Copyright (c) 2012-2015, TU Delft Robotics Institute
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the TU Delft Robotics Institute nor the names
of its contributors may be used to endorse or promote products
derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
franka_ros 0.7.0
----------------
The Franka mesh files in content/external/franka/meshes/ were converted
(with minor modifications) from those in the "franka_description" component
of the franka_ros package.
Site: https://github.com/frankaemika/franka_ros/
License: https://github.com/frankaemika/franka_ros/blob/0.7.0/NOTICE
https://github.com/frankaemika/franka_ros/blob/0.7.0/LICENSE
Copyright 2017 Franka Emika GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
Pygments-2.14.0
---------------
Copyright (c) 2006-2022 by the respective authors (see AUTHORS file).
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
jedi-0.18.2
-----------
All contributions towards Jedi are MIT licensed.
-------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) <2013> <David Halter and others, see AUTHORS.txt>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
parso-0.8.3
-----------
All contributions towards parso are MIT licensed.
Some Python files have been taken from the standard library and are therefore
PSF licensed. Modifications on these files are dual licensed (both MIT and
PSF). These files are:
- parso/pgen2/*
- parso/tokenize.py
- parso/token.py
- test/test_pgen2.py
Also some test files under test/normalizer_issue_files have been copied from
https://github.com/PyCQA/pycodestyle (Expat License == MIT License).
-------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) <2013-2017> <David Halter and others, see AUTHORS.txt>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------------------
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved"
are retained in Python alone or in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
prompt-toolkit-3.0.20
---------------------
Copyright (c) 2014, Jonathan Slenders
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
ptpython-3.0.18
---------------
Copyright (c) 2015, Jonathan Slenders
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
* Neither the name of the {organization} nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
wcwidth-0.2.6
-------------
The MIT License (MIT)
Copyright (c) 2014 Jeff Quast <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Markus Kuhn -- 2007-05-26 (Unicode 5.0)
Permission to use, copy, modify, and distribute this software
for any purpose and without fee is hereby granted. The author
disclaims all warranties with regard to this software.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/PACKAGE-LICENSES/dependencies/isaac-sim-LICENSES.txt | libogg-LICENSE.txt
pip-vendor-requests-LICENSE.txt
pip-LICENSE.txt
rapidjson-LICENSE.md
omni.sensors.nv.wpm-LICENSE.md
imgui-LICENSE.txt
pycapnp-LICENSE.md
franka-LICENSE.md
fmt-LICENSE.txt
ocs2-LICENSE.txt
physxsdk-LICENSE.md
pip-vendor-tenacity-LICENSE.txt
doxygen-LICENSE.txt
open_ai_assets-LICENSE.txt
pip-vendor-packaging-LICENSE.txt
usd_audio_schema-LICENSE.txt
valgrind-header-LICENSE.txt
pip-vendor-charset_normalizer-LICENSE.txt
pip-vendor-distlib-LICENSE.txt
dependencies
eigen-LICENSE.md
omni.sensors.nv.lidar-LICENSE.md
cpptoml-LICENSE.md
zlib-LICENSE.md
pip-vendor-msgpack-LICENSE.txt
openssl-LICENSE.md
ptex-LICENSE.md
breakpad-LICENSE.md
omni.sensors.nv.material_tools-LICENSE.md
sqlite-LICENSE.md
Tracy-LICENSE.txt
libcurl-LICENSE.txt
ingenuity-acknowledgement.txt
openexr-LICENSE.md
pip-vendor-distro-LICENSE.txt
lula-LICENSE.md
tracy_client_source-LICENSE.txt
omni.sensors.nv.materials-LICENSE.md
urdfdom-LICENSE.md
libbacktrace-LICENSE.txt
python38-LICENSE.txt
cxxopts-LICENSE.md
fmt-LICENSE.md
gtest-LICENSE.md
ant-LICENSE.txt
pip-vendor-certifi-LICENSE.txt
usd-LICENSE.md
yaml-cpp-LICENSE.md
opencascade-LICENSE.md
tracy-LICENSE.txt
omni.sensors.nv.lidar_tools-LICENSE.md
glew-LICENSE.md
libflac-LICENSE.txt
franka-LICENSE.txt
doctest-LICENSE.txt
libvorbis-LICENSE.txt
pybind11-LICENSE.md
opensubdiv-LICENSE.md
python-LICENSE.txt
omni-config-cpp-LICENSE.txt
pip-vendor-progress-LICENSE.txt
omni.sensors.nv.ultrasonic-LICENSE.md
libopus-LICENSE.txt
pip-vendor-resolvelib-LICENSE.txt
isaac-sim-LICENSE.txt
ycb-LICENSE.txt
concurrentqueue-mpmc_sema-LICENSE.md
gflag-LICENSE.md
omni.sensors.nv.radar-LICENSE.md
pip-vendor-six-LICENSE.txt
boost-LICENSE.md
zstd-LICENSE.txt
bzip2-LICENSE.md
bzip2-LICENSE.txt
carbonite-LICENSE.txt
pip-vendor-idna-LICENSE.md
usd-ext-physics-LICENSE.txt
concurrentqueue-LICENSE.md
kukaiiwa-LICENSE.txt
pip-vendor-colorama-LICENSE.txt
glfw-LICENSE.md
xz-LICENSE.txt
pip-vendor-pyparsing-LICENSE.txt
pip-vendor-appdirs-LICENSE.txt
python37-LICENSE.txt
minimp3-LICENSE.txt
nvtx-LICENSE.txt
pip-vendor-urllib3-LICENSE.txt
trifinger-LICENSE.txt
vulkansdk-LICENSE.md
tinytoml-LICENSE.md
step_reader-LICENSE.md
humanoid-LICENSE.txt
console-bridge-LICENSE.md
nv_usd-LICENSE.md
sqlite-LICENSE.txt
nv_usd-LICENSES.txt
amp_motions-acknowledgement.txt
anymal_c-LICENSE.txt
cuda-LICENSE.txt
gsl-LICENSE.md
pip-vendor-cachecontrol-LICENSE.txt
robinhoodhashing-LICENSE.md
valijson-LICENSE.txt
pip-vendor-webencodings-LICENSE.txt
forgeaudio-LICENSE.md
openssl-LICENSE.txt
vulkansdk-root-LICENSE.md
CUDA-license.txt
omni.sensors.nv.common-LICENSE.md
omni.sensors.nv.beams-LICENSE.md
octomap-LICENSE.txt
abseil-LICENSE.txt
pip-vendor-html5lib-LICENSE.txt
isaac_sdk-LICENSE.md
assimp-LICENSE.md
onshape_client-LICENSE.md
SDLGameControllerDB-LICENSE.md
libffi-LICENSE.txt
pip-vendor-pep517-LICENSE.txt
ocs2-LICENSE.md
omniverse usd resolver-LICENSES.txt
cartpole-LICENSE.txt
usd_schema_semantics-LICENSE.md
tray-LICENSE.txt
python-LICENSE.md
zlib-LICENSE.txt
omni.sensors.nv.ids-LICENSE.md
capnproto-LICENSE.md
usd-build-LICENSE.md
ryu-LICENSE.txt
libunwind-LICENSE.md
tinyxml2-LICENSE.txt
tinyxml-LICENSE.md
python39-LICENSE.txt
IlmBase-LICENSE.md
pip-vendor-tomli-LICENSE.txt
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/config/extension.toml | [core]
reloadable = true
order = 0
[package]
version = "0.4.3"
category = "Simulation"
title = "Isaac Sim Synthetic Data Utilities"
description = "Utility functions for generating synthetic data"
authors = ["NVIDIA"]
repository = ""
keywords = ["isaac", "synthetic", "utils"]
changelog = "docs/CHANGELOG.md"
readme = "docs/README.md"
icon = "data/icon.png"
writeTarget.kit = true
[dependencies]
"omni.kit.uiapp" = {}
"omni.syntheticdata" = {}
"omni.kit.pip_archive" = {} # pulls in pillow
"omni.isaac.core" = {}
"omni.isaac.core_archive" = {}
[[python.module]]
name = "omni.isaac.synthetic_utils"
[[python.module]]
name = "omni.isaac.synthetic_utils.tests"
[[test]]
dependencies = [
"omni.hydra.rtx", # "omni.hydra.pxr", Can we run and pass with Storm ?
"omni.kit.viewport.utility",
"omni.kit.viewport.window",
"omni.physx",
"omni.kit.primitive.mesh",
"omni.kit.material.library",
] |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/syntheticdata.py | # Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Helper class for obtaining groundtruth data from OmniKit.
Support provided for RGB, Depth, Bounding Box (2D Tight, 2D Loose, 3D),
segmentation (instance and semantic), and camera parameters.
Typical usage example:
kit = OmniKitHelper() # Start omniverse kit
sd_helper = SyntheticDataHelper()
gt = sd_helper.get_groundtruth(['rgb', 'depth', 'boundingBox2DTight'], viewport)
"""
import math
import time
import typing
import asyncio
import carb
import omni
import numpy as np
import builtins
from pxr import Usd
class SyntheticDataHelper:
def __init__(self):
self.app = omni.kit.app.get_app_interface()
ext_manager = self.app.get_extension_manager()
ext_manager.set_extension_enabled("omni.syntheticdata", True)
from omni.syntheticdata import sensors, helpers
import omni.syntheticdata._syntheticdata as sd # Must be imported after getting app interface
self.sd = sd
self.sd_interface = self.sd.acquire_syntheticdata_interface()
self.carb_settings = carb.settings.acquire_settings_interface()
self.sensor_helper_lib = sensors
self.generic_helper_lib = helpers
self.sensor_helpers = {
"rgb": sensors.get_rgb,
"depth": sensors.get_distance_to_image_plane,
"depthLinear": sensors.get_distance_to_camera,
"instanceSegmentation": sensors.get_instance_segmentation,
"semanticSegmentation": sensors.get_semantic_segmentation,
"boundingBox2DTight": sensors.get_bounding_box_2d_tight,
"boundingBox2DLoose": sensors.get_bounding_box_2d_loose,
"boundingBox3D": sensors.get_bounding_box_3d,
"motion-vector": sensors.get_motion_vector,
"normals": sensors.get_normals,
"camera": self.get_camera_params,
"pose": self.get_pose,
"occlusion": sensors.get_occlusion,
}
self.sensor_types = {
"rgb": self.sd.SensorType.Rgb,
"depth": self.sd.SensorType.DistanceToImagePlane,
"depthLinear": self.sd.SensorType.DistanceToCamera,
"instanceSegmentation": self.sd.SensorType.InstanceSegmentation,
"semanticSegmentation": self.sd.SensorType.SemanticSegmentation,
"boundingBox2DTight": self.sd.SensorType.BoundingBox2DTight,
"boundingBox2DLoose": self.sd.SensorType.BoundingBox2DLoose,
"boundingBox3D": self.sd.SensorType.BoundingBox3D,
"occlusion": self.sd.SensorType.Occlusion,
"motion-vector": self.sd.SensorType.MotionVector,
"normals": self.sd.SensorType.Normal,
}
self.sensor_state = {s: False for s in list(self.sensor_helpers.keys())}
def get_camera_params(self, viewport):
"""Get active camera intrinsic and extrinsic parameters.
Returns:
A dict of the active camera's parameters.
pose (numpy.ndarray): camera position in world coordinates,
fov (float): horizontal field of view in radians
focal_length (float)
horizontal_aperture (float)
view_projection_matrix (numpy.ndarray(dtype=float64, shape=(4, 4)))
resolution (dict): resolution as a dict with 'width' and 'height'.
clipping_range (tuple(float, float)): Near and Far clipping values.
"""
stage = omni.usd.get_context().get_stage()
prim = stage.GetPrimAtPath(viewport.get_active_camera())
prim_tf = omni.usd.get_world_transform_matrix(prim)
current_time = omni.timeline.get_timeline_interface().get_current_time()
view_params = self.generic_helper_lib.get_view_params(viewport)
hfov = 2 * math.atan(view_params["horizontal_aperture"] / (2 * view_params["focal_length"]))
vfov = prim.GetAttribute('verticalAperture').Get()
view_proj_mat = self.generic_helper_lib.get_view_proj_mat(view_params)
return {
"pose": np.array(prim_tf),
"hfov": hfov,
"vfov": vfov,
"ctime": current_time,
"focal_length": view_params["focal_length"],
"horizontal_aperture": view_params["horizontal_aperture"],
"view_projection_matrix": view_proj_mat,
"resolution": {"width": view_params["width"], "height": view_params["height"]},
"clipping_range": view_params["clipping_range"],
}
def get_pose(self):
"""Get pose of all objects with a semantic label.
"""
stage = omni.usd.get_context().get_stage()
mappings = self.generic_helper_lib.get_instance_mappings()
pose = []
timeline = omni.timeline.get_timeline_interface()
time = timeline.get_current_time() * timeline.get_time_codes_per_seconds()
time = Usd.TimeCode(time)
for m in mappings:
prim_path = m[1]
prim = stage.GetPrimAtPath(prim_path)
prim_tf = omni.usd.get_world_transform_matrix(prim, time)
pose.append((str(prim_path), m[2], str(m[3]), np.array(prim_tf)))
return pose
def initialize(self, sensor_names, viewport_api):
"""Initialize sensors in the list provided.
Args:
viewport_api (Any): Viewport from which to retrieve/create sensor.
sensor_types (list of omni.syntheticdata._syntheticdata.SensorType): List of sensor types to initialize.
"""
for sensor_name in sensor_names:
if sensor_name != "camera" and sensor_name != "pose":
self.sensor_helper_lib.enable_sensors(viewport_api, [self.sensor_types[sensor_name]])
if builtins.ISAAC_LAUNCHED_FROM_JUPYTER:
data = []
while data == []:
self.app.update()
data = self.sensor_helpers[sensor_name](viewport_api)
else:
future = asyncio.ensure_future(self.sensor_helper_lib.next_sensor_data_async(viewport_api))
while not future.done():
self.app.update()
self.app.update()
async def initialize_async(self, sensor_names, viewport_api):
"""Initialize sensors in the list provided. Async version
Args:
viewport_api (Any): Viewport from which to retrieve/create sensor.
sensor_types (list of omni.syntheticdata._syntheticdata.SensorType): List of sensor types to initialize.
"""
for sensor_name in sensor_names:
if sensor_name != "camera" and sensor_name != "pose":
await self.sensor_helper_lib.initialize_async(viewport_api, [self.sensor_types[sensor_name]])
await self.sensor_helper_lib.next_sensor_data_async(viewport_api)
pass
def get_groundtruth(self, sensor_names, viewport_api, verify_sensor_init=True, wait_for_sensor_data=0.1):
"""Get groundtruth from specified gt_sensors.
Args:
sensor_names (list): List of strings of sensor names. Valid sensors names: rgb, depth,
instanceSegmentation, semanticSegmentation, boundingBox2DTight,
boundingBox2DLoose, boundingBox3D, camera
viewport_api (Any): Viewport from which to retrieve/create sensor.
verify_sensor_init (bool): Additional check to verify creation and initialization of sensors.
wait_for_sensor_data (float): Additional time to sleep before returning ground truth so are correctly filled. Default is 0.1 seconds
Returns:
Dict of sensor outputs
"""
if wait_for_sensor_data > 0:
time.sleep(wait_for_sensor_data)
# Create and initialize sensors
if verify_sensor_init:
loop = asyncio.get_event_loop()
if loop and loop.is_running():
carb.log_warn("Set verify_sensor_init to false if running with asyncio")
pass
else:
self.initialize(sensor_names, viewport_api)
gt = {}
sensor_state = {}
# Process non-RT-only sensors
for sensor in sensor_names:
if sensor not in ["camera", "pose"]:
if sensor == "instanceSegmentation":
gt[sensor] = self.sensor_helpers[sensor](viewport_api, parsed=True, return_mapping=True)
elif sensor == "boundingBox3D":
gt[sensor] = self.sensor_helpers[sensor](viewport_api, parsed=True, return_corners=True)
else:
gt[sensor] = self.sensor_helpers[sensor](viewport_api)
self.sensor_helper_lib.create_or_retrieve_sensor(viewport_api, self.sensor_types[sensor])
# sensors are always initialized after they are created
sensor_state[sensor] = True
elif sensor == "pose":
sensor_state[sensor] = True
gt[sensor] = self.sensor_helpers[sensor]()
else:
sensor_state[sensor] = True
gt[sensor] = self.sensor_helpers[sensor](viewport_api)
gt["state"] = sensor_state
return gt
def get_semantic_ids(self, semantic_data: list = [[]]) -> typing.List[int]:
"""Returns unique id's for a semantic image
Args:
semantic_data (list, optional): Semantic Image. Defaults to [[]].
Returns:
typing.List[int]: List of unique semantic IDs in image
"""
return list(np.unique(semantic_data))
def get_semantic_id_map(self, semantic_labels: list = []) -> dict:
"""
Get map of semantic ID from label
"""
output = {}
if len(semantic_labels) > 0:
for label in semantic_labels:
idx = self.sd_interface.get_semantic_segmentation_id_from_data("class", label)
output[label] = idx
return output
def get_semantic_label_map(self, semantic_ids: list = []) -> dict:
"""
Get map of semantic label from ID
"""
output = {}
if len(semantic_ids) > 0:
for idx in semantic_ids:
label = self.sd_interface.get_semantic_segmentation_data_from_id(idx)
output[idx] = label
return output
def get_mapped_semantic_data(
self, semantic_data: list = [[]], user_semantic_label_map: dict = {}, remap_using_base_class=False
) -> dict:
"""Map semantic segmentation data to IDs specified by user
Usage:
gt = get_groundtruth()
user_semantic_label_map ={"cone":4, "cylinder":5, "cube":6}
mapped_data = get_mapped_semantic_data(gt["semanticSegmentation"], user_semantic_label_map)
Args:
semantic_data (list, optional): Raw semantic image. Defaults to [[]].
user_semantic_label_map (dict, optional): Dictionary of label to id pairs. Defaults to {}.
remap_using_base_class (bool, optional): If multiple class labels are found, use the topmost one. Defaults to False.
Returns:
dict: [description]
"""
semantic_data_np = np.array(semantic_data)
unique_semantic_ids = list(np.unique(semantic_data_np))
unique_semantic_labels_map = self.get_semantic_label_map(unique_semantic_ids)
for unique_id, unique_label in unique_semantic_labels_map.items():
label = unique_label
if remap_using_base_class:
label = unique_label.split(":")[-1]
if label in user_semantic_label_map:
semantic_data_np[np.where(semantic_data == unique_id)] = user_semantic_label_map[label]
return semantic_data_np.tolist()
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/__init__.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from . import syntheticdata as syntheticdata
from . import visualization as visualization
from .writers import *
from .syntheticdata import SyntheticDataHelper
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/visualization.py | # Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
import struct
import random
import colorsys
import numpy as np
from PIL import Image, ImageDraw
def random_colours(N, enable_random=True, num_channels=3):
"""
Generate random colors.
Generate visually distinct colours by linearly spacing the hue
channel in HSV space and then convert to RGB space.
"""
start = 0
if enable_random:
random.seed(10)
start = random.random()
hues = [(start + i / N) % 1.0 for i in range(N)]
colours = [list(colorsys.hsv_to_rgb(h, 0.9, 1.0)) for i, h in enumerate(hues)]
if num_channels == 4:
for color in colours:
color.append(1.0)
if enable_random:
random.shuffle(colours)
return colours
def plot_boxes(ax, bboxes, labels=None, colours=None, label_size=10):
import matplotlib.pyplot as plt
if colours is None:
colours = random_colours(len(bboxes))
if labels is None:
labels = [""] * len(bboxes)
for bb, label, colour in zip(bboxes, labels, colours):
maxint = 2 ** (struct.Struct("i").size * 8 - 1) - 1
# if a bbox is not visible, do not draw
if bb[0] != maxint and bb[1] != maxint:
x = bb[0]
y = bb[1]
w = bb[2] - x
h = bb[3] - y
box = plt.Rectangle((x, y), w, h, fill=False, edgecolor=colour)
ax.add_patch(box)
if label:
font = {"family": "sans-serif", "color": colour, "size": label_size}
ax.text(bb[0], bb[1], label, fontdict=font)
def colorize_depth(depth_image, width, height, num_channels=3):
""" Colorizes depth data for visualization.
Args:
depth_image (numpy.ndarray): Depth data from the sensor.
width (int): Width of the viewport.
height (int): Height of the viewport.
num_channels (int): Specify number of channels i.e. 3 or 4.
"""
colorized_image = np.zeros((height, width, num_channels))
depth_image[depth_image == 0.0] = 1e-5
depth_image = np.clip(depth_image, 0, 255)
depth_image -= np.min(depth_image)
depth_image /= np.max(depth_image) - np.min(depth_image)
colorized_image[:, :, 0] = depth_image
colorized_image[:, :, 1] = depth_image
colorized_image[:, :, 2] = depth_image
if num_channels == 4:
colorized_image[:, :, 3] = 1
colorized_image = (colorized_image * 255).astype(int)
return colorized_image
def colorize_segmentation(segmentation_image, width, height, num_channels=3, num_colors=None):
""" Colorizes segmentation data for visualization.
Args:
segmentation_image (numpy.ndarray): Segmentation data from the sensor.
width (int): Width of the viewport.
height (int): Height of the viewport.
num_channels (int): Specify number of channels i.e. 3 or 4.
num_colors (int): Specify number of colors for consistency across frames.
"""
segmentation_mappings = segmentation_image[:, :, 0]
segmentation_list = np.unique(segmentation_mappings)
if num_colors is None:
num_colors = np.max(segmentation_list) + 1
color_pixels = random_colours(num_colors, True, num_channels)
color_pixels = [[color_pixel[i] * 255 for i in range(num_channels)] for color_pixel in color_pixels]
segmentation_masks = np.zeros((len(segmentation_list), *segmentation_mappings.shape), dtype=np.bool)
index_list = []
for index, segmentation_id in enumerate(segmentation_list):
segmentation_masks[index] = segmentation_mappings == segmentation_id
index_list.append(segmentation_id)
color_image = np.zeros((height, width, num_channels), dtype=np.uint8)
for index, mask, colour in zip(index_list, segmentation_masks, color_pixels):
color_image[mask] = color_pixels[index] if index > 0 else 0
return color_image
def colorize_bboxes(bboxes_2d_data, bboxes_2d_rgb, num_channels=3):
""" Colorizes 2D bounding box data for visualization.
Args:
bboxes_2d_data (numpy.ndarray): 2D bounding box data from the sensor.
bboxes_2d_rgb (numpy.ndarray): RGB data from the sensor to embed bounding box.
num_channels (int): Specify number of channels i.e. 3 or 4.
"""
semantic_id_list = []
bbox_2d_list = []
rgb_img = Image.fromarray(bboxes_2d_rgb)
rgb_img_draw = ImageDraw.Draw(rgb_img)
for bbox_2d in bboxes_2d_data:
if bbox_2d[5] > 0:
semantic_id_list.append(bbox_2d[1])
bbox_2d_list.append(bbox_2d)
semantic_id_list_np = np.unique(np.array(semantic_id_list))
color_list = random_colours(len(semantic_id_list_np.tolist()), True, num_channels)
for bbox_2d in bbox_2d_list:
index = np.where(semantic_id_list_np == bbox_2d[1])[0][0]
bbox_color = color_list[index]
outline = (int(255 * bbox_color[0]), int(255 * bbox_color[1]), int(255 * bbox_color[2]))
if num_channels == 4:
outline = (
int(255 * bbox_color[0]),
int(255 * bbox_color[1]),
int(255 * bbox_color[2]),
int(255 * bbox_color[3]),
)
rgb_img_draw.rectangle([(bbox_2d[6], bbox_2d[7]), (bbox_2d[8], bbox_2d[9])], outline=outline, width=2)
bboxes_2d_rgb = np.array(rgb_img)
return bboxes_2d_rgb
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/tests/__init__.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .test_synthetic_utils import *
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/tests/test_synthetic_utils.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
# NOTE:
# omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests
# For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html
from omni.isaac.core.utils.viewports import set_camera_view
import omni.kit.test
import omni.kit.commands
import carb
import carb.tokens
import copy
import os
import asyncio
import numpy as np
from pxr import Gf, UsdGeom, UsdPhysics
import random
# Import extension python module we are testing with absolute import path, as if we are external user (other extension)
from omni.isaac.synthetic_utils import SyntheticDataHelper
from omni.isaac.synthetic_utils.writers import NumpyWriter
from omni.isaac.synthetic_utils.writers import KittiWriter
from omni.syntheticdata.tests.utils import add_semantics
from omni.isaac.core.utils.physics import simulate_async
from omni.isaac.core.utils.nucleus import get_assets_root_path
from omni.isaac.core.utils.semantics import add_update_semantics
from omni.isaac.core.utils.extensions import get_extension_path_from_name
from omni.isaac.core.utils.stage import set_stage_up_axis
from omni.isaac.core import PhysicsContext
from omni.physx.scripts.physicsUtils import add_ground_plane
from omni.kit.viewport.utility import get_active_viewport
# Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test
class TestSyntheticUtils(omni.kit.test.AsyncTestCase):
# Before running each test
async def setUp(self):
await omni.usd.get_context().new_stage_async()
await omni.kit.app.get_app().next_update_async()
self._physics_rate = 60
set_stage_up_axis("z")
PhysicsContext(physics_dt=1.0 / self._physics_rate)
self._time_step = 1.0 / self._physics_rate
carb.settings.get_settings().set_int("/app/runLoops/main/rateLimitFrequency", int(self._physics_rate))
carb.settings.get_settings().set_bool("/app/runLoops/main/rateLimitEnabled", True)
carb.settings.get_settings().set_int("/persistent/simulation/minFrameRate", int(self._physics_rate))
carb.settings.get_settings().set("/app/asyncRendering", False)
carb.settings.get_settings().set("/app/hydraEngine/waitIdle", True)
carb.settings.get_settings().set("/rtx/hydra/enableSemanticSchema", True)
await omni.kit.app.get_app().next_update_async()
# Start Simulation and wait
self._timeline = omni.timeline.get_timeline_interface()
self._viewport_api = get_active_viewport()
self._usd_context = omni.usd.get_context()
self._sd_helper = SyntheticDataHelper()
self._synthetic_utils_path = get_extension_path_from_name("omni.isaac.synthetic_utils")
self._stage = self._usd_context.get_stage()
self._camera_path = "/Camera"
camera = self._stage.DefinePrim(self._camera_path, "Camera")
self._viewport_api.set_active_camera(self._camera_path)
pass
# After running each test
async def tearDown(self):
await omni.kit.app.get_app().next_update_async()
self._timeline.stop()
while omni.usd.get_context().get_stage_loading_status()[2] > 0:
print("tearDown, assets still loading, waiting to finish...")
await omni.kit.app.get_app().next_update_async()
await omni.kit.app.get_app().next_update_async()
pass
async def initialize_sensors(self):
# Initialize syntheticdata sensors
await omni.kit.app.get_app().next_update_async()
await self._sd_helper.initialize_async(
[
"rgb",
"depth",
"instanceSegmentation",
"semanticSegmentation",
"boundingBox2DTight",
"boundingBox2DLoose",
"boundingBox3D",
],
self._viewport_api,
)
await omni.kit.app.get_app().next_update_async()
# Acquire a copy of the ground truth.
def get_groundtruth(self):
gt = self._sd_helper.get_groundtruth(
[
"rgb",
"depthLinear",
"boundingBox2DTight",
"boundingBox2DLoose",
"instanceSegmentation",
"semanticSegmentation",
"boundingBox3D",
"camera",
"pose",
],
self._viewport_api,
verify_sensor_init=False,
)
return copy.deepcopy(gt)
async def load_robot_scene(self):
assets_root_path = get_assets_root_path()
if assets_root_path is None:
carb.log_error("Could not find Isaac Sim assets folder")
return
robot_usd = assets_root_path + "/Isaac/Robots/Carter/carter_v1.usd"
add_ground_plane(self._stage, "/physics/groundPlane", "Z", 1000.0, Gf.Vec3f(0.0, 0, -0.25), Gf.Vec3f(1.0))
# setup high-level robot prim
self.prim = self._stage.DefinePrim("/robot", "Xform")
self.prim.GetReferences().AddReference(robot_usd)
add_semantics(self.prim, "robot")
rot_mat = Gf.Matrix3d(Gf.Rotation((0, 0, 1), 90))
omni.kit.commands.execute(
"TransformPrimCommand",
path=self.prim.GetPath(),
old_transform_matrix=None,
new_transform_matrix=Gf.Matrix4d().SetRotate(rot_mat).SetTranslateOnly(Gf.Vec3d(0, -0.64, 0)),
)
# setup scene camera
set_camera_view([3.00, 3.0, 3.00], [0, -0.64, 0], self._camera_path, self._viewport_api)
await self.initialize_sensors()
# Unit test for sensor groundtruth
async def test_groundtruth(self):
await self.load_robot_scene()
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
await simulate_async(1.0)
await omni.syntheticdata.sensors.next_sensor_data_async(self._viewport_api)
gt = self.get_groundtruth()
# Validate Depth groundtruth
gt_depth = gt["depthLinear"]
self.assertAlmostEqual(np.min(gt_depth), 5.11157, delta=0.1)
self.assertAlmostEqual(np.max(gt_depth), 7.4313293, delta=0.1)
# Validate 2D BBox groundtruth
gt_bbox2d = gt["boundingBox2DTight"]
self.assertEqual(len(gt_bbox2d), 1)
self.assertAlmostEqual(gt_bbox2d[0][6], 432, delta=2)
self.assertAlmostEqual(gt_bbox2d[0][7], 138, delta=2)
self.assertAlmostEqual(gt_bbox2d[0][8], 844, delta=2)
self.assertAlmostEqual(gt_bbox2d[0][9], 542, delta=2)
# Validate semantic segmentation groundtruth - 0 (unlabeled) and 1 (robot)
gt_semantic = gt["semanticSegmentation"]
self.assertEqual(len(np.unique(gt_semantic)), 2)
user_semantic_label_map = {"robot": 4, "cylinder": 5, "cube": 6}
mapped_data = self._sd_helper.get_mapped_semantic_data(gt_semantic, user_semantic_label_map, True)
unique_data = np.unique(mapped_data)
self.assertEqual(unique_data[0], 0)
self.assertEqual(unique_data[1], 4)
# Validate 3D BBox groundtruth
gt_bbox3d = gt["boundingBox3D"]
self.assertEqual(len(gt_bbox3d), 1)
self.assertAlmostEqual(gt_bbox3d[0][6], -0.43041847, delta=0.01)
self.assertAlmostEqual(gt_bbox3d[0][7], -0.31312422, delta=0.01)
self.assertAlmostEqual(gt_bbox3d[0][8], -0.25173292, delta=0.01)
self.assertAlmostEqual(gt_bbox3d[0][9], 0.24220554, delta=0.01)
self.assertAlmostEqual(gt_bbox3d[0][10], 0.3131649, delta=0.01)
self.assertAlmostEqual(gt_bbox3d[0][11], 0.4119104, delta=0.01)
# Validate camera groundtruth - position, fov, focal length, aperature
gt_camera = gt["camera"]
gt_camera_trans = gt_camera["pose"][3, :3]
self.assertAlmostEqual(gt_camera_trans[0], 3.000, delta=0.001)
self.assertAlmostEqual(gt_camera_trans[1], 3.000, delta=0.001)
self.assertAlmostEqual(gt_camera_trans[2], 3.000, delta=0.001)
self.assertEqual(gt_camera["resolution"]["width"], 1280)
self.assertEqual(gt_camera["resolution"]["height"], 720)
self.assertAlmostEqual(gt_camera["fov"], 0.4131223226073451, 1e-5)
self.assertAlmostEqual(gt_camera["focal_length"], 50.0, 1e-5)
self.assertAlmostEqual(gt_camera["horizontal_aperture"], 20.954999923706055, 1e-2)
# Validate pose groundtruth - prim path, semantic label, position
gt_pose = gt["pose"]
self.assertEqual(len(gt_pose), 1)
self.assertEqual(gt_pose[0][0], "/robot")
self.assertEqual(gt_pose[0][2], "robot")
gt_pose_trans = (gt_pose[0])[3][3, :3]
self.assertAlmostEqual(gt_pose_trans[0], 0.0, delta=0.001)
self.assertAlmostEqual(gt_pose_trans[1], -0.640, delta=0.001)
self.assertAlmostEqual(gt_pose_trans[2], 0.0, delta=0.001)
pass
# Unit test for data writer
async def test_writer(self):
await self.load_robot_scene()
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
await simulate_async(1.0)
await omni.kit.app.get_app().next_update_async()
viewport_window = omni.kit.viewport.utility.get_active_viewport_window()
# Setting up config for writer
sensor_settings = {}
sensor_settings_viewport = {"rgb": {"enabled": True}}
viewport_name = viewport_window.title
sensor_settings[viewport_name] = copy.deepcopy(sensor_settings_viewport)
# Initialize data writer
output_folder = os.getcwd() + "/output"
data_writer = NumpyWriter(output_folder, 4, 100, sensor_settings)
data_writer.start_threads()
# Get rgb groundtruth
gt = self._sd_helper.get_groundtruth(["rgb"], self._viewport_api, verify_sensor_init=False)
# Write rgb groundtruth
image_id = 1
groundtruth = {"METADATA": {"image_id": str(image_id), "viewport_name": viewport_name}, "DATA": {}}
groundtruth["DATA"]["RGB"] = gt["rgb"]
data_writer.q.put(groundtruth)
# Validate output file
output_file_path = os.path.join(output_folder, viewport_name, "rgb", str(image_id) + ".png")
data_writer.stop_threads()
await asyncio.sleep(0.1)
self.assertEqual(os.path.isfile(output_file_path), True)
pass
# Unit test for data writer
async def test_kitti_writer(self):
await self.load_robot_scene()
self._timeline.play()
await omni.kit.app.get_app().next_update_async()
await simulate_async(1.0)
await omni.kit.app.get_app().next_update_async()
viewport_window = omni.kit.viewport.utility.get_active_viewport_window()
# Setting up config for writer
sensor_settings = {}
sensor_settings_viewport = {"rgb": {"enabled": True}}
viewport_name = viewport_window.title
sensor_settings[viewport_name] = copy.deepcopy(sensor_settings_viewport)
# Initialize data writer
output_folder_tight = os.getcwd() + "/kitti_tight"
output_folder_loose = os.getcwd() + "/kitti_loose"
data_writer_tight = KittiWriter(
output_folder_tight, 4, 100, train_size=1, classes="robot", bbox_type="BBOX2DTIGHT"
)
data_writer_tight.start_threads()
data_writer_loose = KittiWriter(
output_folder_loose, 4, 100, train_size=1, classes="robot", bbox_type="BBOX2DLOOSE"
)
data_writer_loose.start_threads()
# Get rgb groundtruth
gt = self._sd_helper.get_groundtruth(
["rgb", "boundingBox2DTight", "boundingBox2DLoose"], self._viewport_api, verify_sensor_init=False
)
# Write rgb groundtruth
image_id = 0
groundtruth = {
"METADATA": {
"image_id": str(image_id),
"viewport_name": viewport_name,
"BBOX2DTIGHT": {},
"BBOX2DLOOSE": {},
},
"DATA": {},
}
image = gt["rgb"]
groundtruth["DATA"]["RGB"] = image
groundtruth["DATA"]["BBOX2DTIGHT"] = gt["boundingBox2DTight"]
groundtruth["METADATA"]["BBOX2DTIGHT"]["WIDTH"] = image.shape[1]
groundtruth["METADATA"]["BBOX2DTIGHT"]["HEIGHT"] = image.shape[0]
groundtruth["DATA"]["BBOX2DLOOSE"] = gt["boundingBox2DLoose"]
groundtruth["METADATA"]["BBOX2DLOOSE"]["WIDTH"] = image.shape[1]
groundtruth["METADATA"]["BBOX2DLOOSE"]["HEIGHT"] = image.shape[0]
for f in range(2):
groundtruth["METADATA"]["image_id"] = image_id
data_writer_tight.q.put(copy.deepcopy(groundtruth))
data_writer_loose.q.put(copy.deepcopy(groundtruth))
image_id = image_id + 1
# Validate output file
data_writer_tight.stop_threads()
data_writer_loose.stop_threads()
await asyncio.sleep(0.1)
for output_folder in [output_folder_tight, output_folder_loose]:
self.assertEqual(os.path.isfile(os.path.join(output_folder + "/training/image_2", str(0) + ".png")), True)
self.assertEqual(os.path.isfile(os.path.join(output_folder + "/training/label_2", str(0) + ".txt")), True)
self.assertEqual(os.path.isfile(os.path.join(output_folder + "/testing/image_2", str(1) + ".png")), True)
pass
# create a cube.
async def add_cube(self, path, size, offset):
cubeGeom = UsdGeom.Cube.Define(self._stage, path)
cubePrim = self._stage.GetPrimAtPath(path)
# use add_semantics to set its class to Cube
add_semantics(cubePrim, "cube")
cubeGeom.CreateSizeAttr(size)
cubeGeom.ClearXformOpOrder()
cubeGeom.AddTranslateOp().Set(offset)
await omni.kit.app.get_app().next_update_async()
UsdPhysics.CollisionAPI.Apply(cubePrim)
return cubePrim, cubeGeom
# create a scene with a cube.
async def load_cube_scene(self):
# ensure we are done with all of scene setup.
await omni.kit.app.get_app().next_update_async()
# check units
meters_per_unit = UsdGeom.GetStageMetersPerUnit(self._stage)
add_ground_plane(self._stage, "/physics/groundPlane", "Z", 1000.0, Gf.Vec3f(0.0, 0, -25), Gf.Vec3f(1.0))
# Add a cube at a "close" location
self.cube_location = Gf.Vec3f(-300.0, 0.0, 50.0)
self.cube, self.cube_geom = await self.add_cube("/World/Cube", 100.0, self.cube_location)
# setup scene camera
set_camera_view([1000, 1000, 1000], [0, 0, 0], self._camera_path, self._viewport_api)
await self.initialize_sensors()
# Unit test for sensor groundtruth
async def frame_lag_test(self, move):
# start the scene
# wait for update
move(Gf.Vec3f(random.random() * 100, random.random() * 100, random.random() * 100))
await omni.syntheticdata.sensors.next_sensor_data_async(self._viewport_api)
# grab ground truth
gt1 = self.get_groundtruth()
# move the cube
move(Gf.Vec3f(random.random() * 100, random.random() * 100, random.random() * 100))
# wait for update
await omni.syntheticdata.sensors.next_sensor_data_async(self._viewport_api)
# grab ground truth
gt2 = self.get_groundtruth()
await omni.syntheticdata.sensors.next_sensor_data_async(self._viewport_api)
gt3 = self.get_groundtruth()
# ensure segmentation is identical
gt_seg1 = gt1["semanticSegmentation"]
gt_seg2 = gt2["semanticSegmentation"]
self.assertEqual(len(np.unique(gt_seg1)), len(np.unique(gt_seg2)))
# the cube 3d bboxes should be different after update
gt_box3d1 = gt1["boundingBox3D"]
gt_box3d2 = gt2["boundingBox3D"]
gt_box3d3 = gt3["boundingBox3D"]
# check the list size
self.assertEqual(len(gt_box3d1), len(gt_box3d2))
# check the corners, they should/must move to pass the test.
self.assertNotEqual(gt_box3d1["corners"].tolist(), gt_box3d2["corners"].tolist())
# Should be no change between these two frames
self.assertEqual(gt_box3d2["corners"].tolist(), gt_box3d3["corners"].tolist())
await omni.syntheticdata.sensors.next_sensor_data_async(self._viewport_api)
# stop the scene
pass
# Test lag by executing a command
async def test_oneframelag_kitcommand(self):
await self.load_cube_scene()
def set_prim_pose(location):
omni.kit.commands.execute(
"TransformPrimCommand",
path=self.cube.GetPath(),
old_transform_matrix=None,
new_transform_matrix=Gf.Matrix4d()
.SetRotate(Gf.Matrix3d(Gf.Rotation((0, 0, 1), 90)))
.SetTranslateOnly(Gf.Vec3d(location)),
)
for frame in range(50):
await self.frame_lag_test(set_prim_pose)
pass
# Test lag using a USD prim.
async def test_oneframelag_usdprim(self):
await self.load_cube_scene()
def set_prim_pose(location):
properties = self.cube.GetPropertyNames()
if "xformOp:translate" in properties:
translate_attr = self.cube.GetAttribute("xformOp:translate")
translate_attr.Set(location)
for frame in range(50):
await self.frame_lag_test(set_prim_pose)
pass
async def test_remap_semantics(self):
set_camera_view([1000, 1000, 1000], [0, 0, 0], self._camera_path, self._viewport_api)
usd_path = self._synthetic_utils_path + "/data/usd/tests/nested_semantics.usd"
self.prim = self._stage.DefinePrim("/test_nested", "Xform")
self.prim.GetReferences().AddReference(usd_path)
await omni.kit.app.get_app().next_update_async()
await self.initialize_sensors()
gt = self.get_groundtruth()
ids = self._sd_helper.get_semantic_ids(gt["semanticSegmentation"])
labels = self._sd_helper.get_semantic_label_map(ids)
# make sure remapping with remap_using_base_class True should work even if we don't have nested classes
mapped_id_a = self._sd_helper.get_semantic_ids(
self._sd_helper.get_mapped_semantic_data(
gt["semanticSegmentation"], {"red": 1, "green": 10, "blue": 100}, remap_using_base_class=True
)
)
mapped_id_b = self._sd_helper.get_semantic_ids(
self._sd_helper.get_mapped_semantic_data(
gt["semanticSegmentation"], {"red": 1, "green": 10, "blue": 100}, remap_using_base_class=False
)
)
# if labels aren't nested, they should remain the same
unique_data_a = np.unique(mapped_id_a).tolist()
unique_data_b = np.unique(mapped_id_b).tolist()
self.assertListEqual(unique_data_a, unique_data_b)
self.assertEqual(unique_data_a[0], 0)
self.assertEqual(unique_data_a[1], 1)
self.assertEqual(unique_data_a[2], 10)
self.assertEqual(unique_data_a[3], 100)
async def test_nested_semantics(self):
set_camera_view([1000, 1000, 1000], [0, 0, 0], self._camera_path, self._viewport_api)
usd_path = self._synthetic_utils_path + "/data/usd/tests/nested_semantics.usd"
self.prim = self._stage.DefinePrim("/test_nested", "Xform")
add_update_semantics(self.prim, "combined")
self.prim.GetReferences().AddReference(usd_path)
await omni.kit.app.get_app().next_update_async()
await self.initialize_sensors()
gt = self.get_groundtruth()
ids = self._sd_helper.get_semantic_ids(gt["semanticSegmentation"])
labels = self._sd_helper.get_semantic_label_map(ids)
mapped_id_a = self._sd_helper.get_semantic_ids(
self._sd_helper.get_mapped_semantic_data(
gt["semanticSegmentation"], {"combined": 99}, remap_using_base_class=True
)
)
mapped_id_b = self._sd_helper.get_semantic_ids(
self._sd_helper.get_mapped_semantic_data(
gt["semanticSegmentation"], {"combined": 99}, remap_using_base_class=False
)
)
unique_data_a = np.unique(mapped_id_a).tolist()
unique_data_b = np.unique(mapped_id_b).tolist()
self.assertEqual(unique_data_a[0], 0)
self.assertEqual(unique_data_a[1], 99)
# remap_using_base_class false should result in the mapping not changing
self.assertEqual(unique_data_b[0], 0)
self.assertEqual(unique_data_b[1], 1)
self.assertEqual(unique_data_b[2], 2)
self.assertEqual(unique_data_b[3], 3)
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/writers/numpy.py | # Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Helper class for writing groundtruth data offline in numpy format.
"""
import copy
import omni
import os
import numpy as np
from PIL import Image
from .base import BaseWriter
from omni.isaac.core.utils.viewports import get_viewport_names
class NumpyWriter(BaseWriter):
def __init__(self, data_dir, num_worker_threads, max_queue_size=500, sensor_settings=None):
BaseWriter.__init__(self, data_dir, num_worker_threads, max_queue_size)
from omni.isaac.synthetic_utils import visualization
self.visualization = visualization
self.create_output_folders(sensor_settings)
def worker(self):
"""Processes task from queue. Each tasks contains groundtruth data and metadata which is used to transform the output and write it to disk."""
while True:
groundtruth = self.q.get()
if groundtruth is None:
break
filename = groundtruth["METADATA"]["image_id"]
viewport_name = groundtruth["METADATA"]["viewport_name"]
for gt_type, data in groundtruth["DATA"].items():
if gt_type == "RGB":
self.save_image(viewport_name, gt_type, data, filename)
elif gt_type == "DEPTH":
if groundtruth["METADATA"]["DEPTH"]["NPY"]:
self.depth_folder = self.data_dir + "/" + str(viewport_name) + "/depth/"
np.save(self.depth_folder + filename + ".npy", data)
if groundtruth["METADATA"]["DEPTH"]["COLORIZE"]:
self.save_image(viewport_name, gt_type, data, filename)
elif gt_type == "DEPTHLINEAR":
if groundtruth["METADATA"]["DEPTHLINEAR"]["NPY"]:
self.depthLinear_folder = self.data_dir + "/" + str(viewport_name) + "/depthLinear/"
np.save(self.depthLinear_folder + filename + ".npy", data)
if groundtruth["METADATA"]["DEPTHLINEAR"]["COLORIZE"]:
self.save_image(viewport_name, gt_type, data, filename)
elif gt_type == "INSTANCE":
self.save_segmentation(
viewport_name,
gt_type,
data,
filename,
groundtruth["METADATA"]["INSTANCE"]["WIDTH"],
groundtruth["METADATA"]["INSTANCE"]["HEIGHT"],
groundtruth["METADATA"]["INSTANCE"]["COLORIZE"],
groundtruth["METADATA"]["INSTANCE"]["MAPPINGS"],
groundtruth["METADATA"]["INSTANCE"]["NPY"],
)
elif gt_type == "SEMANTIC":
self.save_segmentation(
viewport_name,
gt_type,
data,
filename,
groundtruth["METADATA"]["SEMANTIC"]["WIDTH"],
groundtruth["METADATA"]["SEMANTIC"]["HEIGHT"],
groundtruth["METADATA"]["SEMANTIC"]["COLORIZE"],
groundtruth["METADATA"]["SEMANTIC"]["MAPPINGS"],
groundtruth["METADATA"]["SEMANTIC"]["NPY"],
)
elif gt_type in ["BBOX2DTIGHT", "BBOX2DLOOSE"]:
self.save_bbox(
viewport_name,
gt_type,
data,
filename,
groundtruth["METADATA"][gt_type]["COLORIZE"],
groundtruth["DATA"]["RGB"],
groundtruth["METADATA"][gt_type]["NPY"],
)
elif gt_type in ["BBOX3D"]:
self.save_bbox(
viewport_name,
gt_type,
data,
filename,
groundtruth["METADATA"][gt_type]["COLORIZE"],
groundtruth["METADATA"]["BBOX3D_IMAGE"],
groundtruth["METADATA"][gt_type]["NPY"],
)
elif gt_type in ["MOTIONVECTOR"]:
self.save_motion(
viewport_name,
gt_type,
data,
filename,
groundtruth["METADATA"][gt_type]["COLORIZE"],
groundtruth["DATA"]["RGB"],
groundtruth["METADATA"][gt_type]["NPY"],
)
elif gt_type == "CAMERA":
self.camera_folder = self.data_dir + "/" + str(viewport_name) + "/camera/"
np.save(self.camera_folder + filename + ".npy", data)
elif gt_type == "POSES":
self.poses_folder = self.data_dir + "/" + str(viewport_name) + "/poses/"
np.save(self.poses_folder + filename + ".npy", data)
elif gt_type == "NORMALS":
self.normals_folder = self.data_dir + "/" + str(viewport_name) + "/normals/"
np.save(self.normals_folder + filename + ".npy", data)
else:
raise NotImplementedError
self.q.task_done()
def save_motion(
self, viewport_name, data_type, data, filename, width=1280, height=720, display_rgb=True, save_npy=True
):
self.motion_folder = self.data_dir + "/" + str(viewport_name) + "/motion-vector/"
if save_npy:
np.save(self.motion_folder + filename + ".npy", data)
def save_segmentation(
self, viewport_name, data_type, data, filename, width=1280, height=720, display_rgb=True, mappings=True,
save_npy=True):
self.instance_folder = self.data_dir + "/" + str(viewport_name) + "/instance/"
self.semantic_folder = self.data_dir + "/" + str(viewport_name) + "/semantic/"
# Save ground truth data locally as npy
if not mappings:
data = data[0]
if data_type == "INSTANCE" and save_npy:
np.save(self.instance_folder + filename + ".npy", data)
if data_type == "SEMANTIC" and save_npy:
np.save(self.semantic_folder + filename + ".npy", data)
if mappings:
data = data[0]
if display_rgb:
image_data = np.frombuffer(data, dtype=np.uint8).reshape(*data.shape, -1)
num_colors = 50 if data_type == "SEMANTIC" else None
color_image = self.visualization.colorize_segmentation(image_data, width, height, 3, num_colors)
# color_image = visualize.colorize_instance(image_data)
color_image_rgb = Image.fromarray(color_image, "RGB")
if data_type == "INSTANCE":
color_image_rgb.save(f"{self.instance_folder}/{filename}.png")
if data_type == "SEMANTIC":
color_image_rgb.save(f"{self.semantic_folder}/{filename}.png")
def save_image(self, viewport_name, img_type, image_data, filename):
self.rgb_folder = self.data_dir + "/" + str(viewport_name) + "/rgb/"
self.depth_folder = self.data_dir + "/" + str(viewport_name) + "/depth/"
self.depthLinear_folder = self.data_dir + "/" + str(viewport_name) + "/depthLinear/"
if img_type == "RGB":
# Save ground truth data locally as png
rgb_img = Image.fromarray(image_data, "RGBA")
rgb_img.save(f"{self.rgb_folder}/{filename}.png")
elif img_type == "DEPTH" or img_type == "DEPTHLINEAR":
# Convert linear depth to inverse depth for better visualization
image_data = image_data * 100
# Save ground truth data locally as png
image_data[image_data == 0.0] = 1e-5
image_data = np.clip(image_data, 0, 255)
image_data -= np.min(image_data)
if np.max(image_data) > 0:
image_data /= np.max(image_data)
depth_img = Image.fromarray((image_data * 255.0).astype(np.uint8))
if img_type == "DEPTH":
depth_img.save(f"{self.depth_folder}/{filename}.png")
if img_type == "DEPTHLINEAR":
depth_img.save(f"{self.depthLinear_folder}/{filename}.png")
def save_bbox(self, viewport_name, data_type, data, filename, display_rgb=True, rgb_data=None, save_npy=True):
self.bbox_2d_tight_folder = self.data_dir + "/" + str(viewport_name) + "/bbox_2d_tight/"
self.bbox_2d_loose_folder = self.data_dir + "/" + str(viewport_name) + "/bbox_2d_loose/"
self.bbox_3d_folder = self.data_dir + "/" + str(viewport_name) + "/bbox_3d/"
# Save ground truth data locally as npy
if data_type == "BBOX2DTIGHT" and save_npy:
np.save(self.bbox_2d_tight_folder + filename + ".npy", data)
if data_type == "BBOX2DLOOSE" and save_npy:
np.save(self.bbox_2d_loose_folder + filename + ".npy", data)
if data_type == "BBOX3D" and save_npy:
np.save(self.bbox_3d_folder + filename + ".npy", data)
if display_rgb and rgb_data is not None:
if "2D" in data_type:
color_image = self.visualization.colorize_bboxes(data, rgb_data)
color_image_rgb = Image.fromarray(color_image, "RGBA")
if data_type == "BBOX2DTIGHT":
color_image_rgb.save(f"{self.bbox_2d_tight_folder}/{filename}.png")
if data_type == "BBOX2DLOOSE":
color_image_rgb.save(f"{self.bbox_2d_loose_folder}/{filename}.png")
if "3D" in data_type:
rgb_img = Image.fromarray(rgb_data, "RGBA")
rgb_img.save(f"{self.bbox_3d_folder}/{filename}.png")
def create_output_folders(self, sensor_settings=None):
"""Checks if the sensor output folder corresponding to each viewport is created. If not, it creates them."""
if not os.path.exists(self.data_dir):
os.mkdir(self.data_dir)
if sensor_settings is None:
sensor_settings = dict()
viewport_names = get_viewport_names()
sensor_settings_viewport = {
"rgb": {"enabled": True},
"depth": {"enabled": True, "colorize": True, "npy": True},
"depthLinear": {"enabled": True, "colorize": True, "npy": True},
"instance": {"enabled": True, "colorize": True, "npy": True},
"semantic": {"enabled": True, "colorize": True, "npy": True},
"bbox_2d_tight": {"enabled": True, "colorize": True, "npy": True},
"bbox_2d_loose": {"enabled": True, "colorize": True, "npy": True},
"camera": {"enabled": True, "npy": True},
"poses": {"enabled": True, "npy": True},
"motion-vector": {"enabled": True, "npy": True, "colorize": True},
"bbox_3d": {"enabled": True, "npy": True, "colorize": True},
"normals": {"enabled": True, "npy": True, "colorize": True},
}
for name in viewport_names:
sensor_settings[name] = copy.deepcopy(sensor_settings_viewport)
for viewport_name in sensor_settings:
viewport_folder = self.data_dir + "/" + str(viewport_name)
if not os.path.exists(viewport_folder):
os.mkdir(viewport_folder)
for sensor_name in sensor_settings[viewport_name]:
if sensor_settings[viewport_name][sensor_name]["enabled"]:
sensor_folder = self.data_dir + "/" + str(viewport_name) + "/" + str(sensor_name)
if not os.path.exists(sensor_folder):
os.mkdir(sensor_folder)
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/writers/base.py | # Copyright (c) 2020-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
"""Base class for writing groundtruth data offline.
"""
import atexit
import queue
import threading
class BaseWriter:
def __init__(self, data_dir, num_worker_threads, max_queue_size=500):
atexit.register(self.stop_threads)
# Threading for multiple scenes
self.num_worker_threads = num_worker_threads
# Initialize queue with a specified size
self.q = queue.Queue(max_queue_size)
self.data_dir = data_dir
self.threads = []
def start_threads(self):
"""Start worker threads."""
for _ in range(self.num_worker_threads):
t = threading.Thread(target=self.worker, daemon=True)
t.start()
self.threads.append(t)
def stop_threads(self):
"""Waits for all tasks to be completed before stopping worker threads."""
print("Finish writing data...")
# Block until all tasks are done
self.q.join()
print("Done.")
def worker(self):
"""Processes task from queue. Each tasks contains groundtruth data and metadata which is used to transform the output and write it to disk."""
pass
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/writers/__init__.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
from .base import BaseWriter
from .numpy import NumpyWriter
from .kitti import KittiWriter
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/omni/isaac/synthetic_utils/writers/kitti.py | # Copyright (c) 2018-2021, NVIDIA CORPORATION. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
#
"""Helper class for writing groundtruth data offline in kitti format.
"""
import csv
import os
from PIL import Image
from .base import BaseWriter
import carb
class KittiWriter(BaseWriter):
def __init__(
self,
data_dir="kitti_data",
num_worker_threads=4,
max_queue_size=500,
train_size=10,
classes=[],
bbox_type="BBOX2DLOOSE",
):
BaseWriter.__init__(self, data_dir, num_worker_threads, max_queue_size)
self.create_output_folders()
self.train_size = train_size
self.classes = classes
self.bbox_type = bbox_type
if self.bbox_type != "BBOX2DLOOSE" and self.bbox_type != "BBOX2DTIGHT":
carb.log_error(
f"bbox_type must be BBOX2DLOOSE or BBOX2DTIGHT, it is currently set to {self.bbox_type} which is not supported, defaulting to BBOX2DLOOSE"
)
self.bbox_type = "BBOX2DLOOSE"
def worker(self):
"""Processes task from queue. Each tasks contains groundtruth data and metadata which is used to transform the output and write it to disk."""
while True:
data = self.q.get()
if data is None:
break
else:
self.save_image(data)
if int(data["METADATA"]["image_id"]) < self.train_size:
self.save_label(data)
self.q.task_done()
def save_label(self, data):
"""Saves the labels for the 2d bounding boxes in Kitti format."""
label_set = []
viewport_width = data["METADATA"][self.bbox_type]["WIDTH"]
viewport_height = data["METADATA"][self.bbox_type]["HEIGHT"]
for box in data["DATA"][self.bbox_type]:
label = []
# 2D bounding box points
x_min, y_min, x_max, y_max = int(box[6]), int(box[7]), int(box[8]), int(box[9])
# Check if bounding boxes are in the viewport
if (
x_min < 0
or y_min < 0
or x_max > viewport_width
or y_max > viewport_height
or x_min > viewport_width
or y_min > viewport_height
or y_max < 0
or x_max < 0
):
continue
semantic_label = str(box[2])
# Skip label if not in class list
if self.classes != [] and semantic_label not in self.classes:
continue
# Adding Kitting Data, NOTE: Only class and 2d bbox coordinates are filled in
label.append(semantic_label)
label.append(f"{0.00:.2f}")
label.append(3)
label.append(f"{0.00:.2f}")
label.append(x_min)
label.append(y_min)
label.append(x_max)
label.append(y_max)
for _ in range(7):
label.append(f"{0.00:.2f}")
label_set.append(label)
with open(os.path.join(self.train_label_dir, f"{data['METADATA']['image_id']}.txt"), "w") as annotation_file:
writer = csv.writer(annotation_file, delimiter=" ")
writer.writerows(label_set)
def save_image(self, data):
"""Saves the RGB image in the correct directory for kitti"""
if int(data["METADATA"]["image_id"]) < self.train_size:
rgb_img = Image.fromarray(data["DATA"]["RGB"], "RGBA").convert("RGB")
rgb_img.save(f"{self.train_folder}/image_2/{data['METADATA']['image_id']}{'.png'}")
else:
rgb_img = Image.fromarray(data["DATA"]["RGB"], "RGBA").convert("RGB")
rgb_img.save(f"{self.test_folder}/image_2/{data['METADATA']['image_id']}{'.png'}")
def create_output_folders(self):
"""Checks if the output folders are created. If not, it creates them."""
if not os.path.exists(self.data_dir):
os.mkdir(self.data_dir)
self.train_folder = os.path.join(self.data_dir, "training")
self.test_folder = os.path.join(self.data_dir, "testing")
if not os.path.exists(self.train_folder):
os.mkdir(self.train_folder)
if not os.path.exists(self.test_folder):
os.mkdir(self.test_folder)
self.train_img_dir = os.path.join(self.train_folder, "image_2")
if not os.path.exists(self.train_img_dir):
os.mkdir(self.train_img_dir)
self.train_label_dir = os.path.join(self.train_folder, "label_2")
if not os.path.exists(self.train_label_dir):
os.mkdir(self.train_label_dir)
if not os.path.exists(os.path.join(self.test_folder, "image_2")):
os.mkdir(os.path.join(self.test_folder, "image_2"))
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/docs/CHANGELOG.md | # Changelog
## [0.4.3] - 2023-01-19
### Fixed
- test errors due to missing button
## [0.4.2] - 2022-10-17
### Fixed
- pass viewport api to next_sensor_data_async
## [0.4.1] - 2022-09-07
### Fixed
- Fixes for kit 103.5
## [0.4.0] - 2022-09-01
### Changed
- remove non synthetic data related legacy viewport calls
## [0.3.5] - 2022-08-12
### Removed
- removed isaac replicator style DOPE Writer
## [0.3.4] - 2022-08-11
### Removed
- removed isaac replicator style YCB Video writer
- YCB Video writer using OV Replicator style added to omni.replicator.isaac
## [0.3.3] - 2022-08-08
### Changed
- Raise exception in DOPE writer when s3 bucket name is invalid
## [0.3.2] - 2022-08-04
### Added
- Change output folder structure for DOPE writer
## [0.3.1] - 2022-07-29
### Added
- Write to s3 bucket for DOPE Writer
## [0.3.0] - 2022-07-11
### Added
- DOPE Writer
- Occlusion sensor in SyntheticDataHelper
- initialize_async
### Fixed
- get_groundtruth works in an async function
## [0.2.1] - 2022-05-05
### Changed
- Modify the initialize() function to wait until sensor data is available
## [0.2.0] - 2022-04-05
### Added
- YCB Video writer
## [0.1.7] - 2022-03-16
### Changed
- Replaced find_nucleus_server() with get_assets_root_path()
## [0.1.6] - 2022-01-24
### Changed
- updated code to match API changes in omni.syntheticdata
## [0.1.5] - 2021-11-01
### Added
- get_mapped_semantic_data
- get_semantic_label_map
- get_semantic_id_map
## [0.1.4] - 2021-10-18
### Added
- kitti writer supports both loose and tight 2d bounding boxes for labels
## [0.1.3] - 2021-10-09
### Changed
- Restructure files in extension
## [0.1.2] - 2021-08-13
### Removed
- Removed domain randomization helper file. Use commands directly.
- Moved shapenet utility file to omni.isaac.shapenet.
## [0.1.1] - 2021-08-02
### Added
- Unit tests
- Updated API
## [0.1.0] - 2021-07-08
### Added
- Initial version of Isaac Sim Synthetic Utils Extension
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/docs/README.md | # Usage
To enable this extension, go to the Extension Manager menu and enable omni.isaac.synthetic_utils extension.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.synthetic_utils/docs/index.rst | Tools for Generating Synthetic Data [omni.isaac.synthetic_utils]
################################################################
.. autoclass:: omni.isaac.synthetic_utils.SyntheticDataHelper
:members:
:undoc-members:
:no-show-inheritance:
.. autoclass:: omni.isaac.synthetic_utils.writers.NumpyWriter
:members:
:undoc-members:
:exclude-members: colorize_bboxes, colorize_segmentation, random_colours, save_bbox, save_image, save_segmentation
:no-show-inheritance:
.. autoclass:: omni.isaac.synthetic_utils.writers.KittiWriter
:members:
:undoc-members:
:exclude-members: save_label, save_image
:no-show-inheritance:
.. automodule:: omni.isaac.synthetic_utils.visualization
:members:
:undoc-members:
:exclude-members: plot_boxes
:no-show-inheritance: |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/omni.isaac.shapenet-LICENSE.md | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited. |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/libgcc-LICENSE.txt | GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.
GCC RUNTIME LIBRARY EXCEPTION
Version 3.1, 31 March 2009
Copyright (C) 2009 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
This GCC Runtime Library Exception ("Exception") is an additional
permission under section 7 of the GNU General Public License, version
3 ("GPLv3"). It applies to a given file (the "Runtime Library") that
bears a notice placed by the copyright holder of the file stating that
the file is governed by GPLv3 along with this Exception.
When you use GCC to compile a program, GCC may combine portions of
certain GCC header files and runtime libraries with the compiled
program. The purpose of this Exception is to allow compilation of
non-GPL (including proprietary) programs to use, in this way, the
header files and runtime libraries covered by this Exception.
0. Definitions.
A file is an "Independent Module" if it either requires the Runtime
Library for execution after a Compilation Process, or makes use of an
interface provided by the Runtime Library, but is not otherwise based
on the Runtime Library.
"GCC" means a version of the GNU Compiler Collection, with or without
modifications, governed by version 3 (or a specified later version) of
the GNU General Public License (GPL) with the option of using any
subsequent versions published by the FSF.
"GPL-compatible Software" is software whose conditions of propagation,
modification and use would permit combination with GCC in accord with
the license of GCC.
"Target Code" refers to output from any compiler for a real or virtual
target processor architecture, in executable form or suitable for
input to an assembler, loader, linker and/or execution
phase. Notwithstanding that, Target Code does not include data in any
format that is used as a compiler intermediate representation, or used
for producing a compiler intermediate representation.
The "Compilation Process" transforms code entirely represented in
non-intermediate languages designed for human-written code, and/or in
Java Virtual Machine byte code, into Target Code. Thus, for example,
use of source code generators and preprocessors need not be considered
part of the Compilation Process, since the Compilation Process can be
understood as starting with the output of the generators or
preprocessors.
A Compilation Process is "Eligible" if it is done using GCC, alone or
with other GPL-compatible software, or if it is done without using any
work based on GCC. For example, using non-GPL-compatible Software to
optimize any GCC intermediate representations would not qualify as an
Eligible Compilation Process.
1. Grant of Additional Permission.
You have permission to propagate a work of Target Code formed by
combining the Runtime Library with Independent Modules, even if such
propagation would otherwise violate the terms of GPLv3, provided that
all Target Code was generated by Eligible Compilation Processes. You
may then convey such a combination under terms of your choice,
consistent with the licensing of the Independent Modules.
2. No Weakening of GCC Copyleft.
The availability of this Exception does not imply any general
presumption that third-party software is unaffected by the copyleft
requirements of the license of GCC.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/libflac-LICENSE.txt | Copyright (C) 2000-2009 Josh Coalson
Copyright (C) 2011-2016 Xiph.Org Foundation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/libunwind-LICENSE.txt | Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/zlib-LICENSE.txt | zlib.h -- interface of the 'zlib' general purpose compression library
version 1.2.11, January 15th, 2017
Copyright (C) 1995-2017 Jean-loup Gailly and Mark Adler
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
Jean-loup Gailly Mark Adler
[email protected] [email protected] |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/omniverse.discovery.client.c.linux-x86_64-LICENSE.txt | Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/nv_usd-LICENSE.txt | Universal Scene Description (USD) components are licensed under the following terms:
Modified Apache 2.0 License
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor
and its affiliates, except as required to comply with Section 4(c) of
the License and to reproduce the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
============================================================
RapidJSON
============================================================
Tencent is pleased to support the open source community by making RapidJSON available.
Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved.
If you have downloaded a copy of the RapidJSON binary from Tencent, please note that the RapidJSON binary is licensed under the MIT License.
If you have downloaded a copy of the RapidJSON source code from Tencent, please note that RapidJSON source code is licensed under the MIT License, except for the third-party components listed below which are subject to different license terms. Your integration of RapidJSON into your own projects may require compliance with the MIT License, as well as the other licenses applicable to the third-party components included within RapidJSON. To avoid the problematic JSON license in your own projects, it's sufficient to exclude the bin/jsonchecker/ directory, as it's the only code under the JSON license.
A copy of the MIT License is included in this file.
Other dependencies and licenses:
Open Source Software Licensed Under the BSD License:
--------------------------------------------------------------------
The msinttypes r29
Copyright (c) 2006-2013 Alexander Chemeris
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Open Source Software Licensed Under the JSON License:
--------------------------------------------------------------------
json.org
Copyright (c) 2002 JSON.org
All Rights Reserved.
JSON_checker
Copyright (c) 2002 JSON.org
All Rights Reserved.
Terms of the JSON License:
---------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Terms of the MIT License:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
============================================================
pygilstate_check
============================================================
The MIT License (MIT)
Copyright (c) 2014, Pankaj Pandey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================
double-conversion
============================================================
Copyright 2006-2011, the V8 project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
============================================================
OpenEXR/IlmBase/Half
============================================================
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2002, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
============================================================
Apple Technical Q&A QA1361 - Detecting the Debugger
https://developer.apple.com/library/content/qa/qa1361/_index.html
============================================================
Sample code project: Detecting the Debugger
Version: 1.0
Abstract: Shows how to determine if code is being run under the debugger.
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
============================================================
LZ4
============================================================
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2017, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://www.lz4.org
- LZ4 source repository : https://github.com/lz4/lz4
============================================================
stb
============================================================
stb_image - v2.19 - public domain image loader - http://nothings.org/stb
no warranty implied; use at your own risk
stb_image_resize - v0.95 - public domain image resizing
by Jorge L Rodriguez (@VinoBS) - 2014
http://github.com/nothings/stb
stb_image_write - v1.09 - public domain - http://nothings.org/stb/stb_image_write.h
writes out PNG/BMP/TGA/JPEG/HDR images to C stdio - Sean Barrett 2010-2015
no warranty implied; use at your own risk
ALTERNATIVE B - Public Domain (www.unlicense.org)
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
software, either in source code form or as a compiled binary, for any purpose,
commercial or non-commercial, and by any means.
In jurisdictions that recognize copyright laws, the author or authors of this
software dedicate any and all copyright interest in the software to the public
domain. We make this dedication for the benefit of the public at large and to
the detriment of our heirs and successors. We intend this dedication to be an
overt act of relinquishment in perpetuity of all present and future rights to
this software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/forgeaudio-LICENSE.txt | # Forge™ technology components are Copyright © 2000-2017 NVIDIA Corporation.
Forge includes The Better String Library (bstring) Copyright © 2002-2006 Paul Hsieh
Forge include UThash components Copyright © 2003-2011, Troy D. Hanson
iniParser Portions Copyright © 2000 by Nicolas Devillard, used under the MIT License below.
Forge is distributed under the terms of the Forge Technology License listed below.
# NVIDIA Corporation Forge Technology License
Note that this license covers only some portions of the software, and does *NOT* apply to any other components you
may have obtained at the same time. Please see above for more details.
LICENSE AGREEMENT AND DISCLAIMER OF WARRANTY FOR NVIDIA COMPONENTS PLEASE READ THIS LICENSE CAREFULLY BEFORE USING THE SOFTWARE. THIS DOCUMENT IS AN AGREEMENT BETWEEN YOU AND NVIDIA CORPORATION, (THE "COMPANY"). NVIDIA IS WILLING TO LICENSE THE ENCLOSED SOFTWARE TO YOU ONLY ON THE CONDITION THAT YOU ACCEPT ALL THE TERMS CONTAINED IN THIS AGREEMENT. BY USING OR INSTALLING THE SOFTWARE YOU ACKNOWLEDGE THAT YOU HAVE READ THIS AGREEMENT, UNDERSTAND IT AND AGREE TO BE BOUND BY ALL OF ITS TERMS AND CONDITIONS.
1. License. This is a license agreement and NOT an agreement for sale. The Forge software (the "Software") is the property of NVIDIA Corporation (“NVIDIA”) and/or its Licensors. NVIDIA and/or its Licensors retain title to the Software and related documentation. Your rights to use the Software are only as explicitly specified in this Agreement.
2. Permitted Uses. You are granted the following right to the Software:
(a) Right to Install and Use. You may install and use the Software on a single computer. If you wish to use the Software on more than one computer, please contact NVIDIA for information concerning an upgraded license allowing use of the Software with additional computers.
3. Prohibited Uses. The following uses of the Software are prohibited. You may NOT:
(a) Make or distribute copies of the Software or documentation, or any portion thereof, except as expressly provided in this Agreement.
(b) Use any backup or archival copy of the Software (or allow someone else to use such copy) for any purpose other than to replace the original copy in the event it is destroyed or becomes defective;
(c) Alter, decompile, modify reverse engineer or disassemble the Software, create derivative works based upon the Software, or make any attempt to bypass, unlock or disable any protective or initialization system on the Software;
(d) Rent, lease, sub-license, time-share, or transfer the Software or documentation, or your rights under this Agreement.
(e) Remove or obscure any copyright or trademark notice(s) on the Software or documentation;
(f) Upload or transmit the Software, or any portion thereof, to any electronic bulletin board, network, or other type of multi-use computer system regardless of purpose;
(g) Include the Software in any commercial products intended for manufacture, distribution, or sale; or
(h) Include the Software in any product containing immoral, scandalous, controversial, derogatory, obscene, or offensive works.
4. Consent to Collection and Use of Data
You agree that NVIDIA and its affiliates may collect, use, store and transmit technical and related information that identifies your computer, operating system, peripheral hardware, and game play and software usage statistics, without further notice to you. NVIDIA and its affiliates may also use this information in the aggregate, in a form which does not personally identify you, to improve our products and services, and we may share anonymous aggregate data with our third party service providers. Data that personally identifies you is collected, used, stored and transmitted in accordance with NVIDIA’s Privacy Policy located at http://www.nvidia.com/object/privacy_policy.html.
5. Termination. This license is effective upon the first use, installation, loading or copying of the Software. You may terminate this Agreement at any time by destruction and disposal of the Software and all related documentation. This license will terminate automatically without notice from NVIDIA if you fail to comply with any provisions of this license. Upon termination, you shall destroy all copies of the Software and any accompanying documentation. All provisions of this Agreement as to warranties, limitation of liability, remedies or damages shall survive termination.
6. Ownership of the Software and Intellectual Property Rights. All rights, title and interest to all copies of the Software remain with NVIDIA, and / or its subsidiaries, licensors, or suppliers. The Software is copyrighted and protected by the laws of the United States and other countries, and international treaty provisions. You may not remove any copyright notices from the Software. NVIDIA may make changes to the Software, or to items referenced therein, at any time and without notice, but is not obligated to support or update the Software. Except as otherwise expressly provided, NVIDIA grants no express or implied right under any NVIDIA patents, copyrights, trademarks, or other intellectual property rights.
7. Applicable Laws. Claims arising under this Agreement shall be governed by the laws of Delaware, excluding its principles of conflict of laws and the United Nations Convention on Contracts for the Sale of Goods. The state and/or federal courts residing in Santa Clara County, California shall have exclusive jurisdiction over any dispute or claim arising out of this Agreement. You may not export the Software in violation of applicable export laws and regulations.
8. Disclaimer of Warranty. NVIDIA does not warrant that the Software or its operations or functions will meet your requirements, nor that the use thereof will be without interruption or error.
THE SOFTWARE IS PROVIDED "AS IS" WITHOUT ANY EXPRESS OR IMPLIED WARRANTY OF ANY KIND INCLUDING WARRANTIES OF MERCHANTABILITY, TITLE, NONINFRINGEMENT OF INTELLECTUAL PROPERTY, OR FITNESS FOR ANY PARTICULAR PURPOSE. IN NO EVENT SHALL NVIDIA OR ITS SUBSIDIARIES, LICENSORS, OR SUPPLIERS BE LIABLE FOR ANY DAMAGES WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF PROFITS, BUSINESS INTERRUPTION, LOSS OF INFORMATION) ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE, EVEN IF NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. BECAUSE SOME JURISDICTIONS PROHIBIT THE EXCLUSION OR LIMITATION OF LIABILITY FOR CONSEQUENTIAL OR INCIDENTAL DAMAGES, THE ABOVE LIMITATION MAY NOT APPLY TO YOU. NVIDIA does not warrant the accuracy or completeness of the information, text, graphics, links or other items contained within or accompanying the Software.
9. Limitation of Liability. IN NO EVENT SHALL NVIDIA OR ITS SUBSIDIARIES, LICENSORS, OR SUPPLIERS BE LIABLE FOR ANY INCIDENTAL, INDIRECT, SPECIAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF OR IN CONNECTION WITH THE LICENSE GRANTED UNDER THIS AGREEMENT INCLUDING AND WITH-OUT LIMITATION, LOSS OF USE, LOSS OF DATE, LOSS OF INCOME OR PROFIT, OR OTHER LOSS SUSTAINED AS A RESULT OF INJURY TO ANY PERSON, OR LOSS OF OR DAMAGE TO PROPERTY, OR CLAIMS OF THIRD PARTIES, EVEN IF NVIDIA HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. NOTWITHSTANDING THE FOREGOING, NVIDIA’S AGGREGATE LIABILITY ARISING OUT OF THIS AGREEMENT SHALL NOT EXCEED THE AMOUNT YOU PAID FOR THE SOFTWARE.
10. Entire Agreement. This Agreement constitutes the entire agreement between the parties with respect to the subject matter contemplated herein, and merges all prior and contemporaneous communications. This Agreement may be executed in counterparts, each of which shall be deemed an original, and all of which together shall constitute one instrument.
11. Those provisions in this Agreement, which by their nature need to survive the termination or expiration of this Agreement, shall survive termination or expiration of the Agreement.
# The Better String Library (bstring) License
Note that this license covers only some portions of the software, and does *NOT* apply to any other components you may have obtained at the same time. Please see above for more details.
copyright © 2002-2006 Paul Hsieh All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of bstrlib nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# iniParser components Licence
Note that this license covers only some portions of the software, and does *NOT* apply to any other components you may have obtained at the same time. Please see above for more details.
iniParser is Copyright © 2000 by Nicolas Devillard.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# UTHash License
Note that this license covers only some portions of the software, and does *NOT* apply to any other components you may have obtained at the same time. Please see above for more details.
Copyright (c) 2003-2012, Troy D.Hanson http://uthash.sourceforge.net
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pip-vendor-packaging-LICENSE.txt | Copyright (c) Donald Stufft and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pip-vendor-colorama-LICENSE.txt | Copyright (c) 2010 Jonathan Hartley
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holders, nor those of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pip-vendor-progress-LICENSE.txt | # Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/valijson-LICENSE.txt | Copyright (c) 2016, Tristan Penman
Copyright (c) 2016, Akamai Technolgies, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/ilmbase-LICENSE.txt | Copyright (c) 2006, Industrial Light & Magic, a division of Lucasfilm
Entertainment Company Ltd. Portions contributed and copyright held by
others as indicated. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the following
disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with
the distribution.
* Neither the name of Industrial Light & Magic nor the names of
any other contributors to this software may be used to endorse or
promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/libcurl-LICENSE.txt | COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 1996 - 2021, Daniel Stenberg, [email protected], and many contributors, see the THANKS file.
All rights reserved.
Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization of the copyright holder.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/libogg-LICENSE.txt | Copyright (c) 2002, Xiph.org Foundation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pybind11-LICENSE.txt | Copyright (c) 2016 Wenzel Jakob <[email protected]>, All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Please also refer to the file CONTRIBUTING.md, which clarifies licensing of
external contributions to this project including patches, pull requests, etc.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/doctest-1-LICENSE.txt | The MIT License (MIT)
Copyright (c) 2016-2019 Viktor Kirilov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/octomap-LICENSE.txt |
OctoMap - An Efficient Probabilistic 3D Mapping Framework Based on Octrees
License for the "octomap" library: New BSD License.
Copyright (c) 2009-2013, K.M. Wurm and A. Hornung, University of Freiburg
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the University of Freiburg nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/carbonite-LICENSE.txt |
Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited.
===========================================================================
Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/omni_isaac_sim-LICENSE.txt | Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
NVIDIA CORPORATION and its licensors retain all intellectual property
and proprietary rights in and to this software, related documentation
and any modifications thereto. Any use, reproduction, disclosure or
distribution of this software and related documentation without an express
license agreement from NVIDIA CORPORATION is strictly prohibited.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/breakpad-LICENSE.txt | Copyright (c) 2006, Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------
Copyright 2001-2004 Unicode, Inc.
Disclaimer
This source code is provided as is by Unicode, Inc. No claims are
made as to fitness for any particular purpose. No warranties of any
kind are expressed or implied. The recipient agrees to determine
applicability of information provided. If this file has been
purchased on magnetic or optical media from Unicode, Inc., the
sole remedy for any claim will be exchange of defective media
within 90 days of receipt.
Limitations on Rights to Redistribute This Code
Unicode, Inc. hereby grants the right to freely use the information
supplied in this file in the creation of products supporting the
Unicode Standard, and to make copies of this file in any form
for internal or external distribution as long as this notice
remains attached.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/lua-LICENSE.txt | Copyright © 1994–2019 Lua.org, PUC-Rio.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/glm-LICENSE.txt | ================================================================================
OpenGL Mathematics (GLM)
--------------------------------------------------------------------------------
GLM is licensed under The Happy Bunny License and MIT License
================================================================================
The Happy Bunny License (Modified MIT License)
--------------------------------------------------------------------------------
Copyright (c) 2005 - 2014 G-Truc Creation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
Restrictions:
By making use of the Software for military purposes, you choose to make a
Bunny unhappy.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
================================================================================
The MIT License
--------------------------------------------------------------------------------
Copyright (c) 2005 - 2014 G-Truc Creation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/tinyxml2-LICENSE.txt | This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any
damages arising from the use of this software.
Permission is granted to anyone to use this software for any
purpose, including commercial applications, and to alter it and
redistribute it freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must
not claim that you wrote the original software. If you use this
software in a product, an acknowledgment in the product documentation
would be appreciated but is not required.
2. Altered source versions must be plainly marked as such, and
must not be misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/bzip2-LICENSE.txt |
--------------------------------------------------------------------------
This program, "bzip2", the associated library "libbzip2", and all
documentation, are copyright (C) 1996-2019 Julian R Seward. All
rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. The origin of this software must not be misrepresented; you must
not claim that you wrote the original software. If you use this
software in a product, an acknowledgment in the product
documentation would be appreciated but is not required.
3. Altered source versions must be plainly marked as such, and must
not be misrepresented as being the original software.
4. The name of the author may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Julian Seward, [email protected]
bzip2/libbzip2 version 1.0.8 of 13 July 2019
--------------------------------------------------------------------------
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pip-vendor-distlib-LICENSE.txt | A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations (now Zope
Corporation, see http://www.zope.com). In 2001, the Python Software
Foundation (PSF, see http://www.python.org/psf/) was formed, a
non-profit organization created specifically to own Python-related
Intellectual Property. Zope Corporation is a sponsoring member of
the PSF.
All Python releases are Open Source (see http://www.opensource.org for
the Open Source Definition). Historically, most, but not all, Python
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.2 2.1.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2.1 2.2 2002 PSF yes
2.2.2 2.2.1 2002 PSF yes
2.2.3 2.2.2 2003 PSF yes
2.3 2.2.2 2002-2003 PSF yes
2.3.1 2.3 2002-2003 PSF yes
2.3.2 2.3.1 2002-2003 PSF yes
2.3.3 2.3.2 2002-2003 PSF yes
2.3.4 2.3.3 2004 PSF yes
2.3.5 2.3.4 2005 PSF yes
2.4 2.3 2004 PSF yes
2.4.1 2.4 2005 PSF yes
2.4.2 2.4.1 2005 PSF yes
2.4.3 2.4.2 2006 PSF yes
2.4.4 2.4.3 2006 PSF yes
2.5 2.4 2006 PSF yes
2.5.1 2.5 2007 PSF yes
2.5.2 2.5.1 2008 PSF yes
2.5.3 2.5.2 2008 PSF yes
2.6 2.5 2008 PSF yes
2.6.1 2.6 2008 PSF yes
2.6.2 2.6.1 2009 PSF yes
2.6.3 2.6.2 2009 PSF yes
2.6.4 2.6.3 2009 PSF yes
2.6.5 2.6.4 2010 PSF yes
3.0 2.6 2008 PSF yes
3.0.1 3.0 2009 PSF yes
3.1 3.0.1 2009 PSF yes
3.1.1 3.1 2009 PSF yes
3.1.2 3.1 2010 PSF yes
3.2 3.1 2010 PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Python Software Foundation; All Rights Reserved" are retained in Python alone or
in any derivative version prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the Internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the Internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
The Netherlands. All rights reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/tracy-LICENSE.txt | Tracy Profiler (https://github.com/wolfpld/tracy) is licensed under the
3-clause BSD license.
Copyright (c) 2017-2021, Bartosz Taudul <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
===========================================================================
Tracy Profiler (https://github.com/wolfpld/tracy) is licensed under the
3-clause BSD license.
Copyright (c) 2017-2020, Bartosz Taudul <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/ptex-LICENSE.txt | PTEX components are licensed under the following terms:
PTEX SOFTWARE
Copyright 2014 Disney Enterprises, Inc. All rights reserved
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* The names "Disney", "Walt Disney Pictures", "Walt Disney Animation
Studios" or the names of its contributors may NOT be used to
endorse or promote products derived from this software without
specific prior written permission from Walt Disney Pictures.
Disclaimer: THIS SOFTWARE IS PROVIDED BY WALT DISNEY PICTURES AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE, NONINFRINGEMENT AND TITLE ARE DISCLAIMED.
IN NO EVENT SHALL WALT DISNEY PICTURES, THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND BASED ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pip-vendor-urllib3-LICENSE.txt | MIT License
Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/concurrentqueue-mpmc_sema-LICENSE.txt | Code in the mpmc_sema namespace below is an adaptation of Jeff Preshing's
portable + lightweight semaphore implementations, originally from
https://github.com/preshing/cpp11-on-multicore/blob/master/common/sema.h
LICENSE:
Copyright (c) 2015 Jeff Preshing
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgement in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution. |
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/xz-LICENSE.txt |
XZ Utils Licensing
==================
Different licenses apply to different files in this package. Here
is a rough summary of which licenses apply to which parts of this
package (but check the individual files to be sure!):
- liblzma is in the public domain.
- xz, xzdec, and lzmadec command line tools are in the public
domain unless GNU getopt_long had to be compiled and linked
in from the lib directory. The getopt_long code is under
GNU LGPLv2.1+.
- The scripts to grep, diff, and view compressed files have been
adapted from gzip. These scripts and their documentation are
under GNU GPLv2+.
- All the documentation in the doc directory and most of the
XZ Utils specific documentation files in other directories
are in the public domain.
- Translated messages are in the public domain.
- The build system contains public domain files, and files that
are under GNU GPLv2+ or GNU GPLv3+. None of these files end up
in the binaries being built.
- Test files and test code in the tests directory, and debugging
utilities in the debug directory are in the public domain.
- The extra directory may contain public domain files, and files
that are under various free software licenses.
You can do whatever you want with the files that have been put into
the public domain. If you find public domain legally problematic,
take the previous sentence as a license grant. If you still find
the lack of copyright legally problematic, you have too many
lawyers.
As usual, this software is provided "as is", without any warranty.
If you copy significant amounts of public domain code from XZ Utils
into your project, acknowledging this somewhere in your software is
polite (especially if it is proprietary, non-free software), but
naturally it is not legally required. Here is an example of a good
notice to put into "about box" or into documentation:
This software includes code from XZ Utils <https://tukaani.org/xz/>.
The following license texts are included in the following files:
- COPYING.LGPLv2.1: GNU Lesser General Public License version 2.1
- COPYING.GPLv2: GNU General Public License version 2
- COPYING.GPLv3: GNU General Public License version 3
Note that the toolchain (compiler, linker etc.) may add some code
pieces that are copyrighted. Thus, it is possible that e.g. liblzma
binary wouldn't actually be in the public domain in its entirety
even though it contains no copyrighted code from the XZ Utils source
package.
If you have questions, don't hesitate to ask the author(s) for more
information.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/minimp3-LICENSE.txt | thanks to lieff for minimp3 https://github.com/lieff/minimp3
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/pip-vendor-msgpack-LICENSE.txt | Copyright (C) 2008-2011 INADA Naoki <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/usd-LICENSE.txt | Universal Scene Description (USD) components are licensed under the following terms:
Modified Apache 2.0 License
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor
and its affiliates, except as required to comply with Section 4(c) of
the License and to reproduce the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
============================================================
RapidJSON
============================================================
Tencent is pleased to support the open source community by making RapidJSON available.
Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. All rights reserved.
If you have downloaded a copy of the RapidJSON binary from Tencent, please note that the RapidJSON binary is licensed under the MIT License.
If you have downloaded a copy of the RapidJSON source code from Tencent, please note that RapidJSON source code is licensed under the MIT License, except for the third-party components listed below which are subject to different license terms. Your integration of RapidJSON into your own projects may require compliance with the MIT License, as well as the other licenses applicable to the third-party components included within RapidJSON. To avoid the problematic JSON license in your own projects, it's sufficient to exclude the bin/jsonchecker/ directory, as it's the only code under the JSON license.
A copy of the MIT License is included in this file.
Other dependencies and licenses:
Open Source Software Licensed Under the BSD License:
--------------------------------------------------------------------
The msinttypes r29
Copyright (c) 2006-2013 Alexander Chemeris
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Open Source Software Licensed Under the JSON License:
--------------------------------------------------------------------
json.org
Copyright (c) 2002 JSON.org
All Rights Reserved.
JSON_checker
Copyright (c) 2002 JSON.org
All Rights Reserved.
Terms of the JSON License:
---------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Terms of the MIT License:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
============================================================
pygilstate_check
============================================================
The MIT License (MIT)
Copyright (c) 2014, Pankaj Pandey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================
double-conversion
============================================================
Copyright 2006-2011, the V8 project authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
============================================================
OpenEXR/IlmBase/Half
============================================================
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2002, Industrial Light & Magic, a division of Lucas
// Digital Ltd. LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
///////////////////////////////////////////////////////////////////////////
============================================================
Apple Technical Q&A QA1361 - Detecting the Debugger
https://developer.apple.com/library/content/qa/qa1361/_index.html
============================================================
Sample code project: Detecting the Debugger
Version: 1.0
Abstract: Shows how to determine if code is being run under the debugger.
IMPORTANT: This Apple software is supplied to you by Apple
Inc. ("Apple") in consideration of your agreement to the following
terms, and your use, installation, modification or redistribution of
this Apple software constitutes acceptance of these terms. If you do
not agree with these terms, please do not use, install, modify or
redistribute this Apple software.
In consideration of your agreement to abide by the following terms, and
subject to these terms, Apple grants you a personal, non-exclusive
license, under Apple's copyrights in this original Apple software (the
"Apple Software"), to use, reproduce, modify and redistribute the Apple
Software, with or without modifications, in source and/or binary forms;
provided that if you redistribute the Apple Software in its entirety and
without modifications, you must retain this notice and the following
text and disclaimers in all such redistributions of the Apple Software.
Neither the name, trademarks, service marks or logos of Apple Inc. may
be used to endorse or promote products derived from the Apple Software
without specific prior written permission from Apple. Except as
expressly stated in this notice, no other rights or licenses, express or
implied, are granted by Apple herein, including but not limited to any
patent rights that may be infringed by your derivative works or by other
works in which the Apple Software may be incorporated.
The Apple Software is provided by Apple on an "AS IS" basis. APPLE
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
============================================================
LZ4
============================================================
LZ4 - Fast LZ compression algorithm
Copyright (C) 2011-2017, Yann Collet.
BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
You can contact the author at :
- LZ4 homepage : http://www.lz4.org
- LZ4 source repository : https://github.com/lz4/lz4
============================================================
stb
============================================================
stb_image - v2.19 - public domain image loader - http://nothings.org/stb
no warranty implied; use at your own risk
stb_image_resize - v0.95 - public domain image resizing
by Jorge L Rodriguez (@VinoBS) - 2014
http://github.com/nothings/stb
stb_image_write - v1.09 - public domain - http://nothings.org/stb/stb_image_write.h
writes out PNG/BMP/TGA/JPEG/HDR images to C stdio - Sean Barrett 2010-2015
no warranty implied; use at your own risk
ALTERNATIVE B - Public Domain (www.unlicense.org)
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
software, either in source code form or as a compiled binary, for any purpose,
commercial or non-commercial, and by any means.
In jurisdictions that recognize copyright laws, the author or authors of this
software dedicate any and all copyright interest in the software to the public
domain. We make this dedication for the benefit of the public at large and to
the detriment of our heirs and successors. We intend this dedication to be an
overt act of relinquishment in perpetuity of all present and future rights to
this software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/libvorbis-LICENSE.txt | Copyright (c) 2002-2020 Xiph.org Foundation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the Xiph.org Foundation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/nlohmann-json-LICENSE.txt | MIT License
Copyright (c) 2013-2019 Niels Lohmann
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
|
eliabntt/GRADE-RR/isaac_internals/exts/omni.isaac.shapenet/PACKAGE-LICENSES/dependencies/packages_list.txt | Jinja2
MarkupSafe
Pint
PyYAML
charset-normalizer
construct
six
cycler
gunicorn
kiwisolver
llvmlite
matplotlib
nest-asyncio
numba
numpy-quaternion
oauthlib
packaging
pyparsing
requests-oauthlib
requests
scipy
selenium
torch
torchvision
urllib3
watchgod
webbot
certifi
pycapnp
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.