geyik2 commited on
Commit
3836717
·
verified ·
1 Parent(s): f0f7f3b

Upload requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +34 -0
requirements.txt ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ --extra-index-url https://download.pytorch.org/whl/cu121
2
+ --find-links https://download.pytorch.org/whl/torch_stable.html
3
+
4
+ torch==2.4.0+cu121
5
+ torchvision==0.19.0+cu121
6
+ numpy>=1.24.0
7
+ pillow==10.4.0
8
+ imageio==2.36.1
9
+ imageio-ffmpeg==0.5.1
10
+ tqdm==4.67.1
11
+ easydict==1.13
12
+ opencv-python-headless==4.10.0.84
13
+ scipy==1.14.1
14
+ rembg==2.0.60
15
+ onnxruntime==1.20.1
16
+ trimesh==4.5.3
17
+ xatlas==0.0.9
18
+ pyvista==0.44.2
19
+ pymeshfix==0.17.0
20
+ igraph==0.11.8
21
+ git+https://github.com/EasternJournalist/utils3d.git@9a4eb15e4021b67b12c460c7057d642626897ec8
22
+ xformers==0.0.27.post2
23
+ spconv-cu121==2.3.6
24
+ transformers==4.46.3
25
+ gradio_litmodel3d==0.0.1
26
+ diff-gaussian-rasterization @ https://huggingface.co/spaces/JeffreyXiang/TRELLIS/resolve/main/wheels/diff_gaussian_rasterization-0.0.0-cp310-cp310-linux_x86_64.whl
27
+ nvdiffrast @ https://huggingface.co/spaces/JeffreyXiang/TRELLIS/resolve/main/wheels/nvdiffrast-0.3.3-cp310-cp310-linux_x86_64.whl
28
+ flash-attn @ https://github.com/Dao-AILab/flash-attention/releases/download/v2.7.0.post2/flash_attn-2.7.0.post2+cu121torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
29
+ accelerate
30
+ diffusers
31
+ peft
32
+ sentencepiece
33
+ bitsandbytes
34
+ gguf