import logging import requests import gradio as gr # Set up logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) # Configuration for the model API_URL = "https://api-inference.huggingface.co/models/aai540-group3/diabetes-readmission" # Define constants for the Gradio interface AGE_RANGE = (0, 100) TIME_IN_HOSPITAL_RANGE = (1, 14) NUM_PROCEDURES_RANGE = (0, 10) NUM_MEDICATIONS_RANGE = (0, 20) NUMBER_DIAGNOSES_RANGE = (1, 10) READMITTED_CHOICES = ["<30", ">30", "NO"] # Define the inference function def predict( age, time_in_hospital, num_procedures, num_medications, number_diagnoses, metformin, repaglinide, nateglinide, chlorpropamide, glimepiride, glipizide, glyburide, pioglitazone, rosiglitazone, acarbose, insulin, readmitted ): # Create a dictionary from the input features input_data = { "age": age, "time_in_hospital": time_in_hospital, "num_procedures": num_procedures, "num_medications": num_medications, "number_diagnoses": number_diagnoses, "metformin": metformin, "repaglinide": repaglinide, "nateglinide": nateglinide, "chlorpropamide": chlorpropamide, "glimepiride": glimepiride, "glipizide": glipizide, "glyburide": glyburide, "pioglitazone": pioglitazone, "rosiglitazone": rosiglitazone, "acarbose": acarbose, "insulin": insulin, "readmitted": readmitted } try: # Make a request to the Hugging Face inference API response = requests.post(API_URL, json={"inputs": input_data}) response.raise_for_status() # Raise an error for bad responses prediction = response.json() logger.info(f"Prediction received: {prediction}") return f"