daveckw commited on
Commit
e2a955f
·
2 Parent(s): d9814cc ab0a935

Merge branch 'main' of https://huggingface.co/spaces/daveckw/custom-chatgpt

Browse files
Files changed (2) hide show
  1. app.py +2 -74
  2. requirements.txt +4 -0
app.py CHANGED
@@ -10,71 +10,6 @@ from llama_index import (
10
  from llama_index.node_parser import SimpleNodeParser
11
  from langchain import OpenAI
12
  import gradio as gr
13
- import sys
14
- import os
15
- import os.path
16
- import shutil
17
-
18
- sys.path.append("/my_functions")
19
-
20
- os.environ["OPENAI_API_KEY"] = "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"
21
-
22
- # Defining the parameters for the index
23
- max_input_size = 4096
24
- num_outputs = 1024
25
- max_chunk_overlap = 20
26
-
27
- prompt_helper = PromptHelper(
28
- max_input_size,
29
- num_outputs,
30
- max_chunk_overlap,
31
- )
32
-
33
- llm_predictor = LLMPredictor(
34
- llm=OpenAI(temperature=0.7, model_name="text-davinci-003", max_tokens=num_outputs)
35
- )
36
-
37
- service_context = ServiceContext.from_defaults(
38
- llm_predictor=llm_predictor, prompt_helper=prompt_helper
39
- )
40
-
41
-
42
- def construct_index(directory_path):
43
- if os.path.isfile("index.json"):
44
- # Index file exists, so we'll load it and add new documents to it
45
- index = GPTSimpleVectorIndex.load_from_disk(
46
- "index.json", service_context=service_context
47
- )
48
- documents = SimpleDirectoryReader(directory_path).load_data()
49
- for doc in documents:
50
- index.insert(doc, service_context=service_context)
51
- index.save_to_disk("index.json")
52
- else:
53
- # Index file doesn't exist, so we'll create a new index from scratch
54
- documents = SimpleDirectoryReader(directory_path).load_data()
55
- index = GPTSimpleVectorIndex.from_documents(
56
- documents, service_context=service_context
57
- )
58
- index.save_to_disk("index.json")
59
-
60
- # Define the paths to the source and destination folders
61
- absolute_path = os.path.dirname(__file__)
62
- src_folder = os.path.join(absolute_path, "docs/")
63
- dest_folder = os.path.join(absolute_path, "indexed_documents/")
64
-
65
- # Get a list of all the files in the source folder
66
- files = os.listdir(src_folder)
67
-
68
-
69
- # Move each file from the source folder to the destination folder,
70
- # except for the "do_not_delete.txt" file
71
- for file in files:
72
- if file != "do_not_delete.txt":
73
- src_path = os.path.join(src_folder, file)
74
- dest_path = os.path.join(dest_folder, file)
75
- shutil.move(src_path, dest_path)
76
-
77
- return index
78
 
79
 
80
  def chatbot(input_text):
@@ -89,12 +24,5 @@ def chatbot(input_text):
89
  return response.response, response.get_formatted_sources()
90
 
91
 
92
- iface = gr.Interface(
93
- fn=chatbot,
94
- inputs=gr.inputs.Textbox(lines=7, label="Enter your text"),
95
- outputs=[gr.Textbox(lines=30, label="Output"), gr.Textbox(lines=4, label="Source")],
96
- title="Custom-trained AI Chatbot",
97
- )
98
-
99
- index = construct_index("docs")
100
- iface.launch(share=True)
 
10
  from llama_index.node_parser import SimpleNodeParser
11
  from langchain import OpenAI
12
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
 
15
  def chatbot(input_text):
 
24
  return response.response, response.get_formatted_sources()
25
 
26
 
27
+ iface = gr.Interface(fn=greet, inputs="text", outputs="text")
28
+ iface.launch()
 
 
 
 
 
 
 
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ llama_index
2
+ langchain
3
+ gradio
4
+