Spaces:
Build error
Build error
File size: 1,456 Bytes
8297f0d 8d6098f b912f4f 8d6098f 8297f0d 387b280 5c15a5f 387b280 b912f4f 2f5cc3f 387b280 8297f0d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
import os
from openai import AsyncOpenAI
from fastapi.responses import JSONResponse
from chainlit.auth import create_jwt
from chainlit.server import app
import chainlit as cl
import requests
def connect_to_ollama():
try:
response = requests.get('https://bentebbutt-langroid-custom-front-end.hf.space:11434')
if response.status_code == 200:
print('Connected to ollama server')
else:
print('Failed to connect to ollama server')
except requests.exceptions.RequestException as e:
print(f'Error: could not connect to ollama server - {str(e)}')
connect_to_ollama()
@app.get("/custom-auth")
async def custom_auth():
# Verify the user's identity with custom logic.
token = create_jwt(cl.User(identifier="Test User"))
return JSONResponse({"token": token})
import langroid as lr
import langroid.language_models as lm
import chainlit as cl
@cl.on_chat_start
async def on_chat_start():
lm_config = lm.OpenAIGPTConfig(
chat_model='ollama/phi3',
chat_context_length=4000, # set this based on model
max_output_tokens=4096,
temperature=0.2,
stream=True,
timeout=45,
base_url="https://bentebbutt-langroid-custom-front-end.hf.space",
)
agent = lr.ChatAgent(lr.ChatAgentConfig(llm=lm_config))
task = lr.Task(agent, interactive=True)
msg = "Help me with some questions"
lr.ChainlitTaskCallbacks(task)
await task.run_async(msg) |