mgbam commited on
Commit
9606bd7
·
verified ·
1 Parent(s): 42fb2d0

Update image_pipeline.py

Browse files
Files changed (1) hide show
  1. image_pipeline.py +10 -10
image_pipeline.py CHANGED
@@ -1,17 +1,17 @@
1
- from huggingface_hub import InferenceApi
2
  from PIL import Image
3
- import base64
4
  from config import HF_IMAGE_MODEL, HF_TOKEN
5
 
6
- # Initialize Hugging Face Inference API
7
- inference = InferenceApi(repo_id=HF_IMAGE_MODEL, token=HF_TOKEN)
 
8
 
9
  def analyze_medical_image(image_file):
10
  """
11
- Analyze a medical image using the Hugging Face Inference API.
12
  """
13
- with open(image_file, "rb") as img:
14
- base64_image = base64.b64encode(img.read()).decode("utf-8")
15
-
16
- response = inference(inputs={"image": base64_image})
17
- return response.get("generated_text", "No insights generated.")
 
1
+ from transformers import AutoProcessor, AutoModel
2
  from PIL import Image
3
+
4
  from config import HF_IMAGE_MODEL, HF_TOKEN
5
 
6
+ # Load the Hugging Face model for medical image analysis
7
+ model = AutoModel.from_pretrained(HF_IMAGE_MODEL, trust_remote_code=True)
8
+ processor = AutoProcessor.from_pretrained(HF_IMAGE_MODEL)
9
 
10
  def analyze_medical_image(image_file):
11
  """
12
+ Process and analyze a medical image to generate diagnostic insights.
13
  """
14
+ image = Image.open(image_file).convert("RGB")
15
+ inputs = processor(images=image, return_tensors="pt")
16
+ outputs = model.generate(**inputs, max_length=256)
17
+ return processor.decode(outputs[0], skip_special_tokens=True)