Spaces:
Build error
Build error
import os | |
from openai import AsyncOpenAI | |
from fastapi.responses import JSONResponse | |
from chainlit.auth import create_jwt | |
from chainlit.server import app | |
import chainlit as cl | |
import requests | |
def connect_to_ollama(): | |
try: | |
response = requests.get('https://bentebbutt-langroid-custom-front-end.hf.space:11434') | |
if response.status_code == 200: | |
print('Connected to ollama server') | |
else: | |
print('Failed to connect to ollama server') | |
except requests.exceptions.RequestException as e: | |
print(f'Error: could not connect to ollama server - {str(e)}') | |
connect_to_ollama() | |
async def custom_auth(): | |
# Verify the user's identity with custom logic. | |
token = create_jwt(cl.User(identifier="Test User")) | |
return JSONResponse({"token": token}) | |
import langroid as lr | |
import langroid.language_models as lm | |
import chainlit as cl | |
async def on_chat_start(): | |
lm_config = lm.OpenAIGPTConfig( | |
chat_model='ollama/phi3', | |
chat_context_length=4000, # set this based on model | |
max_output_tokens=4096, | |
temperature=0.2, | |
stream=True, | |
timeout=45, | |
base_url="https://bentebbutt-langroid-custom-front-end.hf.space", | |
) | |
agent = lr.ChatAgent(lr.ChatAgentConfig(llm=lm_config)) | |
task = lr.Task(agent, interactive=True) | |
msg = "Help me with some questions" | |
lr.ChainlitTaskCallbacks(task) | |
await task.run_async(msg) |