import gradio as gr import json import os from llama_index import SimpleDirectoryReader, GPTListIndex, readers, GPTSimpleVectorIndex, LLMPredictor, PromptHelper from langchain import OpenAI import sys from IPython.display import Markdown, display def construct_index(directory_path): max_input_size = 4096 num_outputs = 2000 max_chunk_overlap = 20 chunk_size_limit = 600 llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.5, model_name="gpt-3.5-turbo", max_tokens=num_outputs)) prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit) construct_index("data") os.environ["OPENAI_API_KEY"] = "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" def ask_ai(question,api): os.environ["OPENAI_API_KEY"] = "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" if api == "": os.environ["OPENAI_API_KEY"] = "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" else: os.environ["OPENAI_API_KEY"] = = api index = GPTSimpleVectorIndex.load_from_disk('index.json') response = index.query(question, response_mode="compact") return response.response api_key = gr.inputs.Textbox(label="OpenAI API Key") iface = gr.Interface(fn=ask_ai, inputs=["text", api_key], outputs="text", title="Chatbot") iface.launch()