mknolan commited on
Commit
5b040dc
·
verified ·
1 Parent(s): c93688f

Upload InternVL2 implementation

Browse files
Files changed (2) hide show
  1. Dockerfile +8 -0
  2. app_internvl2.py +9 -2
Dockerfile CHANGED
@@ -27,6 +27,11 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
27
  # Create a working directory
28
  WORKDIR /app
29
 
 
 
 
 
 
30
  # Copy requirements file
31
  COPY requirements.txt .
32
 
@@ -53,6 +58,9 @@ RUN pip3 install --no-cache-dir --upgrade pip && \
53
  # Copy the application files
54
  COPY . .
55
 
 
 
 
56
  # Make port 7860 available for the app
57
  EXPOSE 7860
58
 
 
27
  # Create a working directory
28
  WORKDIR /app
29
 
30
+ # Create and set permissions for Gradio cache directory
31
+ RUN mkdir -p /app/gradio_cached_examples && \
32
+ chmod 777 /app/gradio_cached_examples && \
33
+ chmod 777 /app
34
+
35
  # Copy requirements file
36
  COPY requirements.txt .
37
 
 
58
  # Copy the application files
59
  COPY . .
60
 
61
+ # Set permissions for the entire app directory
62
+ RUN chmod -R 777 /app
63
+
64
  # Make port 7860 available for the app
65
  EXPOSE 7860
66
 
app_internvl2.py CHANGED
@@ -9,6 +9,12 @@ import warnings
9
  # Set environment variables
10
  os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "max_split_size_mb:128"
11
 
 
 
 
 
 
 
12
  # Suppress specific warnings that might be caused by package version mismatches
13
  warnings.filterwarnings("ignore", message=".*The 'nopython' keyword.*")
14
  warnings.filterwarnings("ignore", message=".*Torch is not compiled with CUDA enabled.*")
@@ -179,6 +185,7 @@ def create_interface():
179
  outputs=output_text,
180
  fn=process_image,
181
  cache_examples=True,
 
182
  )
183
 
184
  return demo
@@ -188,5 +195,5 @@ if __name__ == "__main__":
188
  # Create the Gradio interface
189
  demo = create_interface()
190
 
191
- # Launch the interface
192
- demo.launch(share=False)
 
9
  # Set environment variables
10
  os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "max_split_size_mb:128"
11
 
12
+ # Ensure cache directories exist with proper permissions
13
+ cache_dir = os.path.join(os.getcwd(), "gradio_cached_examples")
14
+ os.makedirs(cache_dir, exist_ok=True)
15
+ if os.path.exists(cache_dir):
16
+ os.chmod(cache_dir, 0o777)
17
+
18
  # Suppress specific warnings that might be caused by package version mismatches
19
  warnings.filterwarnings("ignore", message=".*The 'nopython' keyword.*")
20
  warnings.filterwarnings("ignore", message=".*Torch is not compiled with CUDA enabled.*")
 
185
  outputs=output_text,
186
  fn=process_image,
187
  cache_examples=True,
188
+ examples_dir=cache_dir
189
  )
190
 
191
  return demo
 
195
  # Create the Gradio interface
196
  demo = create_interface()
197
 
198
+ # Launch the interface with explicit cache directory
199
+ demo.launch(share=False, cache_dir=cache_dir)