sounar commited on
Commit
3d49b4f
·
verified ·
1 Parent(s): 2974476

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -11
app.py CHANGED
@@ -11,29 +11,29 @@ api_token = os.getenv("HF_TOKEN").strip()
11
 
12
  # Quantization configuration
13
  bnb_config = BitsAndBytesConfig(
14
- load_in_4bit=True,
15
- bnb_4bit_quant_type="nf4",
16
- bnb_4bit_use_double_quant=True,
17
  bnb_4bit_compute_dtype=torch.float16
18
  )
19
 
20
  # Load model
21
  model = AutoModel.from_pretrained(
22
- "ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1",
23
- quantization_config=bnb_config,
24
- device_map="auto",
25
- torch_dtype=torch.float16,
26
  trust_remote_code=True,
27
  token=api_token
28
  )
29
 
30
  tokenizer = AutoTokenizer.from_pretrained(
31
- "ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1",
32
  trust_remote_code=True,
33
  token=api_token
34
  )
35
 
36
- def analyze_input(image_data=None, question=""):
37
  try:
38
  # Handle base64 image if provided
39
  if isinstance(image_data, str) and image_data.startswith('data:image'):
@@ -73,7 +73,7 @@ def analyze_input(image_data=None, question=""):
73
  demo = gr.Interface(
74
  fn=analyze_input,
75
  inputs=[
76
- gr.Image(type="numpy", label="Medical Image (Optional)", optional=True),
77
  gr.Textbox(label="Question", placeholder="Enter your medical query...")
78
  ],
79
  outputs=gr.JSON(label="Analysis"),
@@ -83,4 +83,9 @@ demo = gr.Interface(
83
  )
84
 
85
  # Launch with API access enabled
86
- demo.launch(share=True, server_name="0.0.0.0", server_port=7860, enable_queue=True)
 
 
 
 
 
 
11
 
12
  # Quantization configuration
13
  bnb_config = BitsAndBytesConfig(
14
+ load_in_4bit=True,
15
+ bnb_4bit_quant_type="nf4",
16
+ bnb_4bit_use_double_quant=True,
17
  bnb_4bit_compute_dtype=torch.float16
18
  )
19
 
20
  # Load model
21
  model = AutoModel.from_pretrained(
22
+ "ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1",
23
+ quantization_config=bnb_config,
24
+ device_map="auto",
25
+ torch_dtype=torch.float16,
26
  trust_remote_code=True,
27
  token=api_token
28
  )
29
 
30
  tokenizer = AutoTokenizer.from_pretrained(
31
+ "ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1",
32
  trust_remote_code=True,
33
  token=api_token
34
  )
35
 
36
+ def analyze_input(image_data, question):
37
  try:
38
  # Handle base64 image if provided
39
  if isinstance(image_data, str) and image_data.startswith('data:image'):
 
73
  demo = gr.Interface(
74
  fn=analyze_input,
75
  inputs=[
76
+ gr.Image(type="numpy", label="Medical Image"), # Removed optional parameter
77
  gr.Textbox(label="Question", placeholder="Enter your medical query...")
78
  ],
79
  outputs=gr.JSON(label="Analysis"),
 
83
  )
84
 
85
  # Launch with API access enabled
86
+ demo.launch(
87
+ share=True,
88
+ server_name="0.0.0.0",
89
+ server_port=7860,
90
+ enable_queue=True
91
+ )