File size: 3,154 Bytes
d14246a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import openai
import streamlit as st
import os
import pickle
import logging

from llama_index import  SimpleDirectoryReader
from llama_index.chat_engine import CondenseQuestionChatEngine;
from llama_index.response_synthesizers import get_response_synthesizer
from llama_index import Prompt, SimpleDirectoryReader

from logging import getLogger, StreamHandler, Formatter

index_name = "./storage"
pkl_name = "stored_documents.pkl"

custom_prompt = Prompt("""\
  以下はこれまでの会話履歴と、ドキュメントを検索して回答する必要がある、ユーザーからの会話文です。
  会話と新しい会話文に基づいて、検索クエリを作成します。回答は日本語で行います。
  新しい会話文が挨拶の場合、挨拶を返してください。
  新しい会話文が質問の場合、検索した結果の回答を返してください。
  答えがわからない場合は正直にわからないと回答してください。
  会話履歴:
  {chat_history}
  新しい会話文:
  {question}
  Search query:
""")

chat_history = []

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("__name__")
logger.debug("調査用ログ")

if "file_uploader_key" not in st.session_state:
    st.session_state["file_uploader_key"] = 0

st.title("📝 ImportFile")

uploaded_file = st.file_uploader("Upload an article", type=("txt", "md","pdf"),key=st.session_state["file_uploader_key"])
if st.button("import",use_container_width=True):
    filepath = None
    try:
        filepath = os.path.join('documents', os.path.basename( uploaded_file.name))
        logger.info(filepath)
        with open(filepath, 'wb') as f:
            f.write(uploaded_file.getvalue())
            f.close()
        document = SimpleDirectoryReader(input_files=[filepath]).load_data()[0]
        logger.info(document)
        st.session_state.stored_docs.append(uploaded_file.name) 
        logger.info(st.session_state.stored_docs)
        st.session_state.index.insert(document=document)
        st.session_state.index.storage_context.persist(persist_dir=index_name)
        response_synthesizer = get_response_synthesizer(response_mode='refine')
        st.session_state.query_engine = st.session_state.index.as_query_engine(response_synthesizer=response_synthesizer)
        st.session_state.chat_engine = CondenseQuestionChatEngine.from_defaults(
            query_engine=st.session_state.query_engine, 
            condense_question_prompt=custom_prompt,
            chat_history=chat_history,
            verbose=True
        )
        with open(pkl_name, "wb") as f:
            print("pickle")
            pickle.dump(st.session_state.stored_docs, f)
        st.session_state["file_uploader_key"] += 1
        st.experimental_rerun()
    except Exception as e:
        # cleanup temp file
        logger.error(e)
        if filepath is not None and os.path.exists(filepath):
            os.remove(filepath)

st.subheader("Import File List")
if "stored_docs" in st.session_state: 
    logger.info(st.session_state.stored_docs)
    for docname in st.session_state.stored_docs:
      st.write(docname)