KYAGABA commited on
Commit
a94ec70
·
1 Parent(s): 747ba73

added_cors_middle

Browse files
Files changed (1) hide show
  1. app.py +11 -3
app.py CHANGED
@@ -14,16 +14,24 @@ from huggingface_hub import hf_hub_download
14
  import pydicom
15
  import gc
16
  from model import CombinedModel, ImageToTextProjector
17
-
18
  from fastapi import FastAPI, Request
 
 
19
 
20
  app = FastAPI()
21
 
 
 
 
 
 
 
 
 
22
  @app.get("/")
23
  async def root(request: Request):
24
  return {"message": "Welcome to Phronesis"}
25
 
26
-
27
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
28
 
29
  def dicom_to_png(dicom_data):
@@ -111,7 +119,7 @@ async def predict(files: list[UploadFile]):
111
  gc.collect()
112
  if torch.cuda.is_available():
113
  torch.cuda.empty_cache()
114
-
115
  return {
116
  "predicted_class": predicted_class_name,
117
  "generated_report": generated_report[0] if generated_report else "No report generated."
 
14
  import pydicom
15
  import gc
16
  from model import CombinedModel, ImageToTextProjector
 
17
  from fastapi import FastAPI, Request
18
+ from fastapi.middleware.cors import CORSMiddleware
19
+
20
 
21
  app = FastAPI()
22
 
23
+ app.add_middleware(
24
+ CORSMiddleware,
25
+ allow_origins=["*"],
26
+ allow_credentials=True,
27
+ allow_methods=["*"],
28
+ allow_headers=["*"],
29
+ )
30
+
31
  @app.get("/")
32
  async def root(request: Request):
33
  return {"message": "Welcome to Phronesis"}
34
 
 
35
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
36
 
37
  def dicom_to_png(dicom_data):
 
119
  gc.collect()
120
  if torch.cuda.is_available():
121
  torch.cuda.empty_cache()
122
+
123
  return {
124
  "predicted_class": predicted_class_name,
125
  "generated_report": generated_report[0] if generated_report else "No report generated."