bentebbutt commited on
Commit
d60875c
·
verified ·
1 Parent(s): c78ffc9

Update backend/app.py

Browse files
Files changed (1) hide show
  1. backend/app.py +3 -15
backend/app.py CHANGED
@@ -5,9 +5,6 @@ from fastapi.responses import JSONResponse
5
 
6
  from chainlit.auth import create_jwt
7
  from chainlit.server import app
8
- import chainlit as cl
9
- import requests
10
-
11
 
12
  @app.get("/custom-auth")
13
  async def custom_auth():
@@ -16,6 +13,7 @@ async def custom_auth():
16
  return JSONResponse({"token": token})
17
 
18
 
 
19
  import langroid as lr
20
  import langroid.language_models as lm
21
  import chainlit as cl
@@ -23,20 +21,10 @@ import chainlit as cl
23
 
24
  @cl.on_chat_start
25
  async def on_chat_start():
26
- lm_config = lm.OpenAIGPTConfig(
27
- chat_model='ollama/phi3',
28
- chat_context_length=4000, # set this based on model
29
- max_output_tokens=4096,
30
- temperature=0.2,
31
- stream=True,
32
- timeout=45,
33
-
34
-
35
- )
36
-
37
  agent = lr.ChatAgent(lr.ChatAgentConfig(llm=lm_config))
38
  task = lr.Task(agent, interactive=True)
39
-
40
  msg = "Help me with some questions"
41
  lr.ChainlitTaskCallbacks(task)
42
  await task.run_async(msg)
 
5
 
6
  from chainlit.auth import create_jwt
7
  from chainlit.server import app
 
 
 
8
 
9
  @app.get("/custom-auth")
10
  async def custom_auth():
 
13
  return JSONResponse({"token": token})
14
 
15
 
16
+
17
  import langroid as lr
18
  import langroid.language_models as lm
19
  import chainlit as cl
 
21
 
22
  @cl.on_chat_start
23
  async def on_chat_start():
24
+ lm_config = lm.OpenAIGPTConfig(chat_model="ollama/mistral")
 
 
 
 
 
 
 
 
 
 
25
  agent = lr.ChatAgent(lr.ChatAgentConfig(llm=lm_config))
26
  task = lr.Task(agent, interactive=True)
27
+
28
  msg = "Help me with some questions"
29
  lr.ChainlitTaskCallbacks(task)
30
  await task.run_async(msg)