Spaces:
Runtime error
Runtime error
# -*- coding: utf-8 -*- | |
"""custom_chatbot.ipynb | |
Automatically generated by Colaboratory. | |
Original file is located at | |
https://colab.research.google.com/drive/1xT4n5rN6yNyzf-CO8Pifz0yWCEI4VmjV | |
# Install the dependicies | |
Run the code below to install the depencies we need for our functions | |
""" | |
# Commented out IPython magic to ensure Python compatibility. | |
# %pip install llama-index | |
# %pip install langchain | |
# %pip install gradio | |
# %pip install openai | |
"""### **How to Train with your data. ** | |
You can use your github repository link. Make sure repository name should be same as given repo. | |
""" | |
!git clone https://github.com/talib-raath/context_data.git | |
"""# Define the functions | |
The following code defines the functions we need to construct the index and query it | |
""" | |
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper | |
from langchain import OpenAI | |
import sys | |
import os | |
from IPython.display import Markdown, display | |
def construct_index(directory_path): | |
# set maximum input size | |
max_input_size = 4096 | |
# set number of output tokens | |
num_outputs = 2000 | |
# set maximum chunk overlap | |
max_chunk_overlap = 20 | |
# set chunk size limit | |
chunk_size_limit = 600 | |
# define LLM | |
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="gpt-3.5-turbo", max_tokens=num_outputs)) | |
prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit) | |
documents = SimpleDirectoryReader(directory_path).load_data() | |
index = GPTSimpleVectorIndex.from_documents(documents) | |
index.save_to_disk('index.json') | |
return index | |
def ask_ai(): | |
index = GPTSimpleVectorIndex.load_from_disk('index.json') | |
while True: | |
query = input("What do you want to ask? ") | |
response = index.query(query, response_mode="compact") | |
display(Markdown(f"Response: <b>{response.response}</b>")) | |
"""# Set OpenAI API Key | |
You can use this key also but it may expire if it does not work you can get his own api key | |
**Use this Key** | |
"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" | |
You need an OPENAI API key to be able to run this code. | |
If you don't have one yet, get it by [signing up](https://platform.openai.com/overview). Then click your account icon on the top right of the screen and select "View API Keys". Create an API key. | |
Then run the code below and paste your API key into the text input. | |
""" | |
os.environ["OPENAI_API_KEY"] = input("Paste your OpenAI key here and hit enter:") | |
"""#Construct an index | |
Now we are ready to construct the index. This will take every file in the folder 'data', split it into chunks, and embed it with OpenAI's embeddings API. | |
**Notice:** running this code will cost you credits on your OpenAPI account ($0.02 for every 1,000 tokens). If you've just set up your account, the free credits that you have should be more than enough for this experiment. | |
""" | |
construct_index("context_data") | |
import tkinter as tk | |
from llama_index import GPTSimpleVectorIndex, LLMPredictor, PromptHelper | |
from langchain import OpenAI | |
from IPython.display import Markdown, display | |
# Define the ask_ai() function | |
def ask_ai(question): | |
index = GPTSimpleVectorIndex.load_from_disk('index.json') | |
response = index.query(question, response_mode="compact") | |
return response.response | |
# Define the GUI | |
class ChatBotGUI: | |
def __init__(self, master): | |
self.master = master | |
master.title("Chat Bot") | |
# Create a label and an entry for the question | |
self.label = tk.Label(master, text="Ask me anything:") | |
self.label.pack() | |
self.entry = tk.Entry(master) | |
self.entry.pack() | |
# Create a button to submit the question | |
self.button = tk.Button(master, text="Submit", command=self.submit_question) | |
self.button.pack() | |
# Create a text box to display the response | |
self.textbox = tk.Text(master) | |
self.textbox.pack() | |
def submit_question(self): | |
question = self.entry.get() | |
response = ask_ai(question) | |
self.textbox.insert(tk.END, "You: " + question + "\n") | |
self.textbox.insert(tk.END, "Bot: " + response + "\n\n") | |
self.entry.delete(0, tk.END) | |
# Create an instance of the GUI and start the main loop | |
''' | |
root = tk.Tk() | |
chatbot_gui = ChatBotGUI(root) | |
root.mainloop() | |
''' | |
import gradio as gr | |
iface = gr.Interface(fn=ask_ai, inputs="text", outputs="text" ,title="Chatbot") | |
iface.launch(share = True) |