Datasets:
File size: 5,687 Bytes
da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 3341f2f da341b6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 |
#!/bin/bash
# Usage function
usage() {
echo "Usage: $0 -s SOURCE_DIR -t TARGET_DIR [-c CHUNK_TYPE] [-m MAX_FILES] [-h]"
echo "Uncompress chunked DeepFurniture dataset"
echo ""
echo "Required arguments:"
echo " -s SOURCE_DIR Source directory containing the chunked dataset"
echo " -t TARGET_DIR Target directory for the uncompressed dataset"
echo ""
echo "Optional arguments:"
echo " -c CHUNK_TYPE Specific chunk type to process (scenes, furnitures, queries)"
echo " If not specified, all chunk types will be processed"
echo " -m MAX_FILES Maximum number of files to process per type (default: process all)"
echo " -h Show this help message"
exit 1
}
# Process command line arguments
while getopts "s:t:c:m:h" opt; do
case $opt in
s) SOURCE_DIR="$OPTARG";;
t) TARGET_DIR="$OPTARG";;
c) CHUNK_TYPE="$OPTARG";;
m) MAX_FILES="$OPTARG";;
h) usage;;
?) usage;;
esac
done
# Check required arguments
if [ -z "$SOURCE_DIR" ] || [ -z "$TARGET_DIR" ]; then
echo "Error: Source and target directories are required"
usage
fi
# Validate source directory
if [ ! -d "$SOURCE_DIR" ]; then
echo "Error: Source directory does not exist: $SOURCE_DIR"
exit 1
fi
# Validate MAX_FILES if provided
if [ -n "$MAX_FILES" ]; then
if ! [[ "$MAX_FILES" =~ ^[0-9]+$ ]]; then
echo "Error: MAX_FILES must be a positive integer"
exit 1
fi
echo "Will process maximum $MAX_FILES files per type"
fi
# Create target directory structure
mkdir -p "$TARGET_DIR"/{metadata,scenes,furnitures,queries}
# Copy metadata files (excluding index files)
echo "Copying metadata files..."
for file in "$SOURCE_DIR"/metadata/*.json*; do
if [[ ! $file =~ _index.json$ ]]; then
cp "$file" "$TARGET_DIR/metadata/"
fi
done
# Function to process chunks of a specific type
process_chunks() {
local type=$1
local src_dir="$SOURCE_DIR/$type"
local target_dir="$TARGET_DIR/$type"
echo "Processing $type chunks..."
# Check if source directory exists
if [ ! -d "$src_dir" ]; then
echo "Warning: Directory not found: $src_dir"
return
fi
# Get list of chunks and sort them
chunks=($(ls -v "$src_dir"/*.tar.gz 2>/dev/null))
total_chunks=${#chunks[@]}
if [ "$total_chunks" -eq 0 ]; then
echo "No chunks found in $src_dir"
return
fi
# Determine how many chunks to process based on MAX_FILES
files_per_chunk=1000 # Default files per chunk based on dataset structure
if [ -n "$MAX_FILES" ]; then
chunks_needed=$(( (MAX_FILES + files_per_chunk - 1) / files_per_chunk ))
if [ "$chunks_needed" -lt "$total_chunks" ]; then
total_chunks=$chunks_needed
echo "Limiting to $total_chunks chunks ($MAX_FILES files) for $type"
fi
fi
# Process chunks
for ((i = 0; i < total_chunks; i++)); do
chunk="${chunks[$i]}"
chunk_name=$(basename "$chunk")
printf "Extracting %s (%d/%d)..." "$chunk_name" $((i + 1)) "$total_chunks"
if tar -xzf "$chunk" -C "$target_dir" 2>/dev/null; then
echo " done"
else
echo " failed"
echo "Warning: Failed to extract $chunk_name"
fi
# If this is the last chunk and we have MAX_FILES set,
# we might need to remove excess files
if [ -n "$MAX_FILES" ] && [ "$i" -eq "$((total_chunks - 1))" ]; then
# Calculate how many files we should have
local expected_total=$MAX_FILES
local current_total=$(ls "$target_dir" | wc -l)
if [ "$current_total" -gt "$expected_total" ]; then
echo "Trimming excess files to meet MAX_FILES limit..."
# Remove excess files (keeping the first MAX_FILES files)
ls "$target_dir" | sort | tail -n+"$((expected_total + 1))" | \
xargs -I {} rm -rf "$target_dir/{}"
fi
fi
done
}
# Process chunks based on input
if [ -n "$CHUNK_TYPE" ]; then
case "$CHUNK_TYPE" in
scenes|furnitures|queries)
process_chunks "$CHUNK_TYPE"
;;
*)
echo "Error: Invalid chunk type: $CHUNK_TYPE"
echo "Valid types are: scenes, furnitures, queries"
exit 1
;;
esac
else
# Process all chunk types
for type in scenes furnitures queries; do
process_chunks "$type"
done
fi
# Basic validation
echo -e "\nValidating extracted files..."
# Check scenes
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "scenes" ]; then
missing_files=0
total_scenes=0
for scene_dir in "$TARGET_DIR"/scenes/*; do
if [ -d "$scene_dir" ]; then
total_scenes=$((total_scenes + 1))
for required in "image.jpg" "annotation.json"; do
if [ ! -f "$scene_dir/$required" ]; then
echo "Warning: Missing $required in $(basename "$scene_dir")"
missing_files=$((missing_files + 1))
fi
done
fi
done
echo "Scene validation complete. Processed $total_scenes scenes. Missing files: $missing_files"
fi
# Print final statistics
echo -e "\nExtraction Summary:"
for type in scenes furnitures queries; do
if [ -z "$CHUNK_TYPE" ] || [ "$CHUNK_TYPE" = "$type" ]; then
file_count=$(find "$TARGET_DIR/$type" -type f | wc -l)
echo "$type: $file_count files"
fi
done
echo "Dataset uncompression completed!"
echo "Output directory: $TARGET_DIR" |