import streamlit as st from gpt_researcher import GPTResearcher import os # Access secrets openai_api_key = st.secrets["OPENAI_API_KEY"] tavily_api_key = st.secrets["TAVILY_API_KEY"] # Set the document path environment variable os.environ["DOC_PATH"] = "./local" # Path to the folder with documents # Constants REPORT_TYPE = "research_report" # Define the function to fetch the report def fetch_report(query, report_type): """ Fetch a research report based on the provided query and report type. Research is conducted on a local document. """ try: researcher = GPTResearcher( query=query, report_type=report_type, report_source="local" ) researcher.conduct_research() return researcher.write_report() except Exception as e: return f"Error during research: {str(e)}" # Cache the report generation function to avoid redundant computations @st.cache(suppress_st_warning=True, show_spinner=False) def cached_fetch_report(query, report_type): return fetch_report(query, report_type) # Streamlit interface st.title("Google Leak Reporting Tool") # User input for the query using a text area query = st.text_area( "Enter your research query:", "Extract all the information about how the ranking for internal links works.", height=150, # Adjustable height ) # Start the report generation process if st.button("Generate Report"): if not query: st.warning("Please enter a query to generate a report.") else: with st.spinner("Generating report..."): report = cached_fetch_report(query, REPORT_TYPE) # Display the report or error message if report and not report.startswith("Error"): st.success("Report generated successfully!") st.write(report) # Display the report in the app # Create a download button for the report st.download_button( label="Download Report as Text File", data=report, file_name="research_report.txt", mime="text/plain", ) else: st.error(report) # Show the error message if any