Commit
Β·
a5930e3
1
Parent(s):
c722e8c
refactor: get function
Browse files
app.py
CHANGED
@@ -28,17 +28,39 @@ def get_pdf_text(pdf_docs):
|
|
28 |
# κ³Όμ
|
29 |
# μλ ν
μ€νΈ μΆμΆ ν¨μλ₯Ό μμ±
|
30 |
|
31 |
-
def get_text_file(
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
|
38 |
-
def get_json_file(docs):
|
39 |
-
pass
|
40 |
|
41 |
-
|
42 |
# λ¬Έμλ€μ μ²λ¦¬νμ¬ ν
μ€νΈ μ²ν¬λ‘ λλλ ν¨μμ
λλ€.
|
43 |
def get_text_chunks(documents):
|
44 |
text_splitter = RecursiveCharacterTextSplitter(
|
@@ -64,7 +86,7 @@ def get_vectorstore(text_chunks):
|
|
64 |
def get_conversation_chain(vectorstore):
|
65 |
gpt_model_name = 'gpt-3.5-turbo'
|
66 |
llm = ChatOpenAI(model_name = gpt_model_name) #gpt-3.5 λͺ¨λΈ λ‘λ
|
67 |
-
|
68 |
# λν κΈ°λ‘μ μ μ₯νκΈ° μν λ©λͺ¨λ¦¬λ₯Ό μμ±ν©λλ€.
|
69 |
memory = ConversationBufferMemory(
|
70 |
memory_key='chat_history', return_messages=True)
|
|
|
28 |
# κ³Όμ
|
29 |
# μλ ν
μ€νΈ μΆμΆ ν¨μλ₯Ό μμ±
|
30 |
|
31 |
+
def get_text_file(text_docs):
|
32 |
+
temp_dir = tempfile.TemporaryDirectory()
|
33 |
+
temp_filepath = os.path.join(temp_dir.name, text_docs.name)
|
34 |
+
with open(temp_filepath, "wb") as f:
|
35 |
+
f.write(text_docs.getvalue())
|
36 |
+
text_loader = TextLoader(temp_filepath)
|
37 |
+
text_doc = text_loader.load()
|
38 |
+
return text_doc
|
39 |
+
|
40 |
+
|
41 |
+
def get_csv_file(csv_docs):
|
42 |
+
temp_dir = tempfile.TemporaryDirectory()
|
43 |
+
temp_filepath = os.path.join(temp_dir.name, csv_docs.name)
|
44 |
+
with open(temp_filepath, "wb") as f:
|
45 |
+
f.write(csv_docs.getvalue())
|
46 |
+
csv_loader = CSVLoader(temp_filepath)
|
47 |
+
csv_doc = csv_loader.load()
|
48 |
+
return csv_doc
|
49 |
+
|
50 |
+
def get_json_file(json_docs):
|
51 |
+
temp_dir = tempfile.TemporaryDirectory()
|
52 |
+
temp_filepath = os.path.join(temp_dir.name, json_docs.name)
|
53 |
+
with open(temp_filepath, "wb") as f:
|
54 |
+
f.write(json_docs.getvalue())
|
55 |
+
json_loader = JSONLoader(
|
56 |
+
file_path=temp_filepath,
|
57 |
+
jq_schema='.messages[].content',
|
58 |
+
text_content=False
|
59 |
+
)
|
60 |
+
json_doc = json_loader.load()
|
61 |
+
return json_doc
|
62 |
|
|
|
|
|
63 |
|
|
|
64 |
# λ¬Έμλ€μ μ²λ¦¬νμ¬ ν
μ€νΈ μ²ν¬λ‘ λλλ ν¨μμ
λλ€.
|
65 |
def get_text_chunks(documents):
|
66 |
text_splitter = RecursiveCharacterTextSplitter(
|
|
|
86 |
def get_conversation_chain(vectorstore):
|
87 |
gpt_model_name = 'gpt-3.5-turbo'
|
88 |
llm = ChatOpenAI(model_name = gpt_model_name) #gpt-3.5 λͺ¨λΈ λ‘λ
|
89 |
+
|
90 |
# λν κΈ°λ‘μ μ μ₯νκΈ° μν λ©λͺ¨λ¦¬λ₯Ό μμ±ν©λλ€.
|
91 |
memory = ConversationBufferMemory(
|
92 |
memory_key='chat_history', return_messages=True)
|