File size: 1,189 Bytes
9083c84
dd9022d
a5d600a
37d7b31
dd9022d
bd6aadd
a5d600a
bd6aadd
 
42c37e7
5106cd4
bd6aadd
5106cd4
bd6aadd
5106cd4
bd6aadd
5106cd4
bd6aadd
5106cd4
 
bd6aadd
 
3ee0620
4cf0f40
 
5106cd4
59c458c
 
95eb069
 
 
a5d600a
4cf0f40
bd6aadd
5106cd4
 
24a063c
a5d600a
5106cd4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import gradio as gr
import json 
import os
from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
from langchain import OpenAI
import sys

from IPython.display import Markdown, display

def construct_index(directory_path):

    max_input_size = 4096

    num_outputs = 2000

    max_chunk_overlap = 20

    chunk_size_limit = 600


    llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="gpt-3.5-turbo", max_tokens=num_outputs))
    prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)

construct_index("data")

def ask_ai(question,api):
    if api == "":
        os.environ["OPENAI_API_KEY"] = "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"
    index = GPTSimpleVectorIndex.load_from_disk('index.json')
    response = index.query(question, response_mode="compact")
    return response.response

#os.environ["OPENAI_API_KEY"] = "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"

api_key = gr.inputs.Textbox(label="OpenAI API Key")
iface = gr.Interface(fn=ask_ai, inputs=["text", api_key], outputs="text", title="Chatbot")


iface.launch()