Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -108,6 +108,15 @@ def chat_response(message, history):
|
|
108 |
except:
|
109 |
return "I apologize, but I couldn't generate a proper response. Please try again."
|
110 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
# Load model and tokenizer
|
112 |
print("Loading model...")
|
113 |
model, tokenizer = load_model()
|
@@ -127,4 +136,6 @@ demo = gr.ChatInterface(
|
|
127 |
)
|
128 |
|
129 |
if __name__ == "__main__":
|
130 |
-
|
|
|
|
|
|
108 |
except:
|
109 |
return "I apologize, but I couldn't generate a proper response. Please try again."
|
110 |
|
111 |
+
# Define a Gradio Interface for the API
|
112 |
+
api_interface = gr.Interface(
|
113 |
+
fn=chat_response,
|
114 |
+
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter your message here..."),
|
115 |
+
outputs=gr.outputs.Textbox(label="Response"),
|
116 |
+
title="Admissions Agent API",
|
117 |
+
description="API endpoint for interacting with the AI-powered admissions coordinator."
|
118 |
+
)
|
119 |
+
|
120 |
# Load model and tokenizer
|
121 |
print("Loading model...")
|
122 |
model, tokenizer = load_model()
|
|
|
136 |
)
|
137 |
|
138 |
if __name__ == "__main__":
|
139 |
+
# Launch both the chat interface and the API interface
|
140 |
+
demo.launch()
|
141 |
+
api_interface.launch(share=True) # This will expose the API endpoint
|