affine commited on
Commit
ccaef53
β€’
1 Parent(s): d949859

Upload 4 files

Browse files
Files changed (4) hide show
  1. .gitignore +3 -0
  2. ask.py +64 -0
  3. chainlit.md +14 -0
  4. requirements.txt +4 -0
.gitignore ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ # created by virtualenv automatically
2
+ *
3
+ ./venv
ask.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_groq import ChatGroq
2
+ from langchain_core.prompts import ChatPromptTemplate
3
+ from langchain.schema import StrOutputParser
4
+ from langchain.schema.runnable import Runnable
5
+ from langchain.schema.runnable.config import RunnableConfig
6
+ from chainlit.input_widget import Select
7
+ import chainlit as cl
8
+ from typing import Optional
9
+
10
+
11
+ @cl.author_rename
12
+ def rename(orig_author: str):
13
+ rename_dict = {"LLMMathChain": "Albert Einstein", "Chatbot": "Assistant"}
14
+ return rename_dict.get(orig_author, orig_author)
15
+
16
+ @cl.on_chat_start
17
+ async def on_chat_start():
18
+
19
+ # Sending an image with the local file path
20
+ # elements = [
21
+ # cl.Image(name="image1", display="inline", path="groq.jpeg")
22
+ # ]
23
+ settings = await cl.ChatSettings(
24
+ [
25
+ Select(
26
+ id="Model",
27
+ label="OpenAI - Model",
28
+ values=["mixtral-8x7b-32768","llama2-70b-4096"],
29
+ initial_index=0,
30
+ )
31
+ ]
32
+ ).send()
33
+
34
+ value = settings["Model"]
35
+
36
+ await cl.Message(content="Hello there, I am Groq. How can I help you ?").send()
37
+
38
+ model = ChatGroq(temperature=0,model_name=value,api_key="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")
39
+ prompt = ChatPromptTemplate.from_messages(
40
+ [
41
+ (
42
+ "system",
43
+ "You're a helpful assistant",
44
+ ),
45
+ ("human", "{question}"),
46
+ ]
47
+ )
48
+ runnable = prompt | model | StrOutputParser()
49
+ cl.user_session.set("runnable", runnable)
50
+
51
+
52
+ @cl.on_message
53
+ async def on_message(message: cl.Message):
54
+ runnable = cl.user_session.get("runnable") # type: Runnable
55
+
56
+ msg = cl.Message(content="")
57
+
58
+ async for chunk in runnable.astream(
59
+ {"question": message.content},
60
+ config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
61
+ ):
62
+ await msg.stream_token(chunk)
63
+
64
+ await msg.send()
chainlit.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Welcome to Chainlit! πŸš€πŸ€–
2
+
3
+ Hi there, Developer! πŸ‘‹ We're excited to have you on board. Chainlit is a powerful tool designed to help you prototype, debug and share applications built on top of LLMs.
4
+
5
+ ## Useful Links πŸ”—
6
+
7
+ - **Documentation:** Get started with our comprehensive [Chainlit Documentation](https://docs.chainlit.io) πŸ“š
8
+ - **Discord Community:** Join our friendly [Chainlit Discord](https://discord.gg/k73SQ3FyUh) to ask questions, share your projects, and connect with other developers! πŸ’¬
9
+
10
+ We can't wait to see what you create with Chainlit! Happy coding! πŸ’»πŸ˜Š
11
+
12
+ ## Welcome screen
13
+
14
+ To modify the welcome screen, edit the `chainlit.md` file at the root of your project. If you do not want a welcome screen, just leave this file empty.
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ langchain-groq
2
+ langchain
3
+ chainlit
4
+ openai