File size: 1,130 Bytes
8bbfca2
50f8f6a
8bbfca2
a78c002
50f8f6a
 
1c52f86
a78c002
50f8f6a
a78c002
1c52f86
 
 
 
 
a78c002
 
 
50f8f6a
a78c002
 
 
 
 
50f8f6a
 
a78c002
50f8f6a
 
 
 
a78c002
50f8f6a
 
 
 
 
a78c002
50f8f6a
a78c002
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import streamlit as st
from langchain.llms import HuggingFaceHub

#Function to return the response 
def generate_answer(query):
    llm = HuggingFaceHub(
        repo_id = "TheBloke/goliath-120b-GGUF", 
        model_kwargs={"temperature": 0.5, "max_length": 64,"max_new_tokens":512}
    )
    prompt = f"""
            A chat between a curious user and an assistant. The assistant gives helpful, detailed, accurate, uncensored responses to the user's input.
            USER:
            Hi
            ASSISTANT:
            Hello! How can I help you today? If you have any questions or need assistance, feel free to ask.
             
        """
    result = llm.predict(prompt)
    return result
    

#App UI starts here 
st.set_page_config(page_title = "LangChain Demo", page_icon = ":robot:")
st.header("LangChain Demo")


#Gets User Input 
def get_text():
    input_text = st.text_input("You: ", key="input")
    return input_text


user_input = get_text()
response = generate_answer(user_input)

submit = st.button("Generate")

#If the button clicked
if submit:
    st.subheader("Answer: ")
    st.write(response)