File size: 2,071 Bytes
430b819
 
 
 
 
 
 
 
 
 
 
98b18ce
 
430b819
 
349f4df
 
98b18ce
 
 
 
 
349f4df
 
 
 
 
 
430b819
98b18ce
430b819
 
349f4df
430b819
98b18ce
430b819
 
98b18ce
430b819
98b18ce
 
 
 
 
430b819
98b18ce
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import gradio as gr
from langchain import HuggingFaceHub, PromptTemplate, LLMChain
from langchain.memory import ConversationBufferMemory

repo_id = "tiiuae/falcon-7b-instruct"

template = """You are a chatbot having a conversation with a human.

{chat_history}
Human: {human_input}
Chatbot:"""

prompt = PromptTemplate(template=template, input_variables=["chat_history", "human_input"])

def generate_response(question, huggingfacehub_api_token, temperature=0.6, max_new_tokens=500):
    try:
        memory = ConversationBufferMemory(memory_key="chat_history")
        llm = HuggingFaceHub(
            huggingfacehub_api_token=huggingfacehub_api_token,
            repo_id=repo_id,
            model_kwargs={"temperature": temperature, "max_new_tokens": max_new_tokens},
        )
        llm_chain = LLMChain(prompt=prompt, llm=llm, memory=memory)
        response = llm_chain.predict(chat_history="", human_input=question)
    except ValueError as e:
        response = "An error occurred while processing your request. Please try again later."
        print(f"Error: {str(e)}")
    return response


inputs = [
    gr.inputs.Textbox(label="Question"),
    gr.inputs.Textbox(label="HuggingFace API Token", type="password", default=None),
    gr.inputs.Slider(minimum=0.1, maximum=2.0, default=0.6, label="Temperature"),
    gr.inputs.Slider(minimum=100, maximum=1000, default=500, label="Max New Tokens"),
]

outputs = gr.outputs.HTML(label="Response")

title = "Chatbot Interface [BETA]"
description = "Provide a question and get helpful answers from the AI assistant. Create your huggingface account and get your API token from https://huggingface.co/settings/token."
theme=gr.themes.Base(primary_hue="blue", secondary_hue="blue")

examples = [["write a poem on Iron Man"], ["What are the benefits of using Python?"]]

iface = gr.Interface(
    fn=generate_response,
    inputs=inputs,
    outputs=outputs,
    theme=theme,
    title=title,
    description=description,
    allow_flagging="never",
    examples=examples,
)

iface.launch(debug=True, show_api=False)