File size: 1,854 Bytes
9083c84
37d7b31
79bee64
bd6aadd
 
 
 
42c37e7
 
 
bd6aadd
42c37e7
bd6aadd
42c37e7
bd6aadd
42c37e7
bd6aadd
 
42c37e7
bd6aadd
 
 
 
 
 
 
 
 
 
 
42c37e7
 
 
e5da904
bd6aadd
 
 
 
 
 
e5da904
42c37e7
 
 
 
 
e5da904
42c37e7
e5da904
bd6aadd
 
42c37e7
bd6aadd
24a063c
42c37e7
24a063c
 
 
 
 
58bb673
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import gradio as gr
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain import OpenAI
import sys
import os
from IPython.display import Markdown, display


def construct_index(directory_path):
    # set maximum input size
    max_input_size = 4096
    # set number of output tokens
    num_outputs = 2000
    # set maximum chunk overlap
    max_chunk_overlap = 20
    # set chunk size limit
    chunk_size_limit = 600

    # define LLM
    llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
    prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)

    documents = SimpleDirectoryReader(directory_path).load_data()

    index = GPTSimpleVectorIndex.from_documents(documents)

    index.save_to_disk('index.json')

    return index




'''
def ask_ai():
    index = GPTSimpleVectorIndex.load_from_disk('index.json')
    while True: 
        query = input("What do you want to ask? ")
        response = index.query(query, response_mode="compact")
        display(Markdown(f"Response: <b>{response.response}</b>"))
'''
# Define the ask_ai() function
def ask_ai(question,api):
    if api == "":
      api = "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"
    os.environ["OPENAI_API_KEY"] = api
    index = GPTSimpleVectorIndex.load_from_disk('index.json')
    response = index.query(question, response_mode="compact")
    return response.response


construct_index("context_data")

# Create Gradio interface to prompt for API key
api_key = gr.inputs.Textbox(label="Enter your OpenAI API key:")

# Define the interface
iface = gr.Interface(fn=ask_ai, inputs=["text", api_key], outputs="text" ,title="Jim's Chatbot")

# Start the interface
iface.launch()