commands2
Browse files- vre_dronescapes/commands.md +69 -1
vre_dronescapes/commands.md
CHANGED
@@ -77,7 +77,26 @@ tmux new -s sheerness
|
|
77 |
VRE_DEVICE=cuda CUDA_VISIBLE_DEVICES=0 vre ../raw_data/videos/new_videos/sheerness_youtube_1_540p.mp4 -o sheerness_youtube_1_540p/ --config_path cfg.yaml --output_dir_exists_mode skip_computed --exception_mode skip_representation --n_threads_data_storer 4 -I ../scripts/semantic_mapper/semantic_mapper.py:get_new_semantic_mapped_tasks
|
78 |
```
|
79 |
|
|
|
80 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
|
82 |
### Incldue the new videos in the txt files for training
|
83 |
|
@@ -85,7 +104,24 @@ This will make combined txt file of all new scenes + old data in vre_dronescapes
|
|
85 |
|
86 |
```bash
|
87 |
mkdir ../scripts/txt_files/experts # in root dir
|
88 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
89 |
cd ../scripts/txt_files/experts # move to txt files dir
|
90 |
cat ../annotated_and_segprop/train_files_11664.txt > train_set.txt
|
91 |
cat ../annotated_and_segprop/semisup_files_11299.txt >> train_set.txt
|
@@ -144,3 +180,35 @@ for line in ${dirs[@]}; do
|
|
144 |
cp -r $line/'normals_svd(depth_marigold)' $line/camera_normals_output
|
145 |
done
|
146 |
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
77 |
VRE_DEVICE=cuda CUDA_VISIBLE_DEVICES=0 vre ../raw_data/videos/new_videos/sheerness_youtube_1_540p.mp4 -o sheerness_youtube_1_540p/ --config_path cfg.yaml --output_dir_exists_mode skip_computed --exception_mode skip_representation --n_threads_data_storer 4 -I ../scripts/semantic_mapper/semantic_mapper.py:get_new_semantic_mapped_tasks
|
78 |
```
|
79 |
|
80 |
+
### Add the output tasks derived from existing ones and global statistics
|
81 |
|
82 |
+
```bash
|
83 |
+
dirs=(
|
84 |
+
norway_DJI_0708_540p
|
85 |
+
politehnica_DJI_0741_a2_540p
|
86 |
+
ovaselu_DJI_0372_540p
|
87 |
+
raciu_DJI_0418_540p
|
88 |
+
sanfrancisco_youtube_1_540p
|
89 |
+
paris_youtube_1_540p
|
90 |
+
riodejaneiro_youtube_1_540p
|
91 |
+
sheerness_youtube_1_540p
|
92 |
+
)
|
93 |
+
for line in ${dirs[@]}; do
|
94 |
+
echo $line
|
95 |
+
cp -r $line/depth_marigold $line/depth_output
|
96 |
+
../scripts/update_stastistics.py $line/depth_output/npz --statistics_file ../data/train_set/.task_statistics.npz --source_task depth_marigold --n_workers 32
|
97 |
+
cp -r $line/'normals_svd(depth_marigold)' $line/camera_normals_output
|
98 |
+
done
|
99 |
+
```
|
100 |
|
101 |
### Incldue the new videos in the txt files for training
|
102 |
|
|
|
104 |
|
105 |
```bash
|
106 |
mkdir ../scripts/txt_files/experts # in root dir
|
107 |
+
dirs=(
|
108 |
+
norway_DJI_0708_540p
|
109 |
+
politehnica_DJI_0741_a2_540p
|
110 |
+
ovaselu_DJI_0372_540p
|
111 |
+
raciu_DJI_0418_540p
|
112 |
+
sanfrancisco_youtube_1_540p
|
113 |
+
paris_youtube_1_540p
|
114 |
+
riodejaneiro_youtube_1_540p
|
115 |
+
sheerness_youtube_1_540p
|
116 |
+
)
|
117 |
+
for line in ${dirs[@]}; do
|
118 |
+
rm -f ../scripts/txt_files/experts/"$line".txt;
|
119 |
+
ls -v $line/rgb/npz | while read line2; do
|
120 |
+
stem=${line2:0:-4};
|
121 |
+
echo $line/$stem >> ../scripts/txt_files/experts/"$line".txt;
|
122 |
+
done;
|
123 |
+
done
|
124 |
+
|
125 |
cd ../scripts/txt_files/experts # move to txt files dir
|
126 |
cat ../annotated_and_segprop/train_files_11664.txt > train_set.txt
|
127 |
cat ../annotated_and_segprop/semisup_files_11299.txt >> train_set.txt
|
|
|
180 |
cp -r $line/'normals_svd(depth_marigold)' $line/camera_normals_output
|
181 |
done
|
182 |
```
|
183 |
+
|
184 |
+
### Incldue the new videos in the txt files for training
|
185 |
+
|
186 |
+
This will make combined txt file of all new scenes + old data in vre_dronescapes_dir
|
187 |
+
|
188 |
+
```bash
|
189 |
+
mkdir ../scripts/txt_files/experts2 # in root dir
|
190 |
+
dirs=(
|
191 |
+
castelulcorvinilor_DJI_20230915122046_0040_D_540p
|
192 |
+
comana_DJI_20240721134505_0029_D_540p
|
193 |
+
herculane_DJI_20240807121925_0049_D_540p
|
194 |
+
horezeu_DJI_20240924171535_0059_D_540p
|
195 |
+
olanesti_DJI_20240924125710_0005_D_540p
|
196 |
+
pietrosa_DJI_20240603132329_0021_D_540p
|
197 |
+
slanic_DJI_20240604124727_0023_D_540p
|
198 |
+
voronet_DJI_20230918122921_0102_D_540p
|
199 |
+
)
|
200 |
+
for line in ${dirs[@]}; do
|
201 |
+
rm -f ../scripts/txt_files/experts2/"$line".txt;
|
202 |
+
ls -v $line/rgb/npz | while read line2; do
|
203 |
+
stem=${line2:0:-4};
|
204 |
+
echo $line/$stem >> ../scripts/txt_files/experts2/"$line".txt;
|
205 |
+
done;
|
206 |
+
done
|
207 |
+
|
208 |
+
cd ../scripts/txt_files/experts2 # move to txt files dir
|
209 |
+
cat ../experts/train_set.txt > train_set.txt
|
210 |
+
ls | grep -v train_set | while read line; do cat $line >> train_set.txt; done
|
211 |
+
echo "The new train set has $(cat train_set.txt | wc -l) data points across all experts."
|
212 |
+
cd ../../../ # root dir
|
213 |
+
python scripts/symlinks_from_txt_list.py vre_dronescapes/ --txt_file scripts/txt_files/experts2/train_set.txt -o data/train_set_experts2
|
214 |
+
```
|