AnkitS1997 commited on
Commit
20ed9e7
·
1 Parent(s): e7ac41b

updated cors

Browse files
.ipynb_checkpoints/app-checkpoint.py CHANGED
@@ -1,4 +1,5 @@
1
  from fastapi import FastAPI, File, UploadFile
 
2
  from PIL import Image
3
  from transformers import AutoProcessor, Blip2ForConditionalGeneration
4
  import torch
@@ -6,6 +7,14 @@ import io
6
 
7
  app = FastAPI()
8
 
 
 
 
 
 
 
 
 
9
  # Load the model and processor
10
  model = Blip2ForConditionalGeneration.from_pretrained("ybelkada/blip2-opt-2.7b-fp16-sharded")
11
  model.load_adapter('blip-cpu-model')
 
1
  from fastapi import FastAPI, File, UploadFile
2
+ from fastapi.middleware.cors import CORSMiddleware
3
  from PIL import Image
4
  from transformers import AutoProcessor, Blip2ForConditionalGeneration
5
  import torch
 
7
 
8
  app = FastAPI()
9
 
10
+ app.add_middleware(
11
+ CORSMiddleware,
12
+ allow_origins=["*"], # Adjust this as needed for security
13
+ allow_credentials=True,
14
+ allow_methods=["*"],
15
+ allow_headers=["*"],
16
+ )
17
+
18
  # Load the model and processor
19
  model = Blip2ForConditionalGeneration.from_pretrained("ybelkada/blip2-opt-2.7b-fp16-sharded")
20
  model.load_adapter('blip-cpu-model')
.ipynb_checkpoints/streamlit_app-checkpoint.py CHANGED
@@ -11,7 +11,7 @@ if uploaded_file is not None:
11
  st.image(image, caption="Uploaded Image", use_column_width=True)
12
 
13
  files = {"file": uploaded_file.getvalue()}
14
- response = requests.post("http://localhost:8502/generate-caption/", files=files)
15
  caption = response.json().get("caption")
16
 
17
  st.write("Generated Caption:")
 
11
  st.image(image, caption="Uploaded Image", use_column_width=True)
12
 
13
  files = {"file": uploaded_file.getvalue()}
14
+ response = requests.post("http://0.0.0.0:8502/generate-caption/", files=files)
15
  caption = response.json().get("caption")
16
 
17
  st.write("Generated Caption:")
app.py CHANGED
@@ -1,4 +1,5 @@
1
  from fastapi import FastAPI, File, UploadFile
 
2
  from PIL import Image
3
  from transformers import AutoProcessor, Blip2ForConditionalGeneration
4
  import torch
@@ -6,6 +7,14 @@ import io
6
 
7
  app = FastAPI()
8
 
 
 
 
 
 
 
 
 
9
  # Load the model and processor
10
  model = Blip2ForConditionalGeneration.from_pretrained("ybelkada/blip2-opt-2.7b-fp16-sharded")
11
  model.load_adapter('blip-cpu-model')
 
1
  from fastapi import FastAPI, File, UploadFile
2
+ from fastapi.middleware.cors import CORSMiddleware
3
  from PIL import Image
4
  from transformers import AutoProcessor, Blip2ForConditionalGeneration
5
  import torch
 
7
 
8
  app = FastAPI()
9
 
10
+ app.add_middleware(
11
+ CORSMiddleware,
12
+ allow_origins=["*"], # Adjust this as needed for security
13
+ allow_credentials=True,
14
+ allow_methods=["*"],
15
+ allow_headers=["*"],
16
+ )
17
+
18
  # Load the model and processor
19
  model = Blip2ForConditionalGeneration.from_pretrained("ybelkada/blip2-opt-2.7b-fp16-sharded")
20
  model.load_adapter('blip-cpu-model')
streamlit_app.py CHANGED
@@ -11,7 +11,8 @@ if uploaded_file is not None:
11
  st.image(image, caption="Uploaded Image", use_column_width=True)
12
 
13
  files = {"file": uploaded_file.getvalue()}
14
- response = requests.post("http://localhost:8502/generate-caption/", files=files)
 
15
  caption = response.json().get("caption")
16
 
17
  st.write("Generated Caption:")
 
11
  st.image(image, caption="Uploaded Image", use_column_width=True)
12
 
13
  files = {"file": uploaded_file.getvalue()}
14
+ print("Sending API request")
15
+ response = requests.post("http://0.0.0.0:8502/generate-caption/", files=files)
16
  caption = response.json().get("caption")
17
 
18
  st.write("Generated Caption:")