Quantum-API / app /app.py
subatomicERROR's picture
✨ Added main router and integrated ollama & codelama endpoints
4b35062
raw
history blame
1.29 kB
import streamlit as st
import requests
# URL of the FastAPI backend endpoint
API_URL_OLAMA = "http://localhost:7860/ollama/response"
API_URL_CODELAMA = "http://localhost:7860/codelama/run"
def main():
st.title("Quantum-API Chat Interface with Olama and CodeLlama")
user_input = st.text_input("Ask a question:")
if user_input:
if st.button("Chat with Olama"):
# Make a POST request to the FastAPI server for Olama
response = requests.post(API_URL_OLAMA, json={"question": user_input})
if response.status_code == 200:
# Display the response from Olama
st.write(f"Olama says: {response.json()['response']}")
else:
st.error(f"Error contacting Olama API: {response.status_code}")
if st.button("Run Code with CodeLlama"):
# Make a GET request to the FastAPI server for CodeLlama
response = requests.get(API_URL_CODELAMA)
if response.status_code == 200:
# Display the response from CodeLlama
st.write(f"CodeLlama result: {response.json()['result']}")
else:
st.error(f"Error contacting CodeLlama API: {response.status_code}")
if __name__ == "__main__":
main()