GoogleLeak / app.py
cyberandy's picture
Update app.py
dbcc073 verified
raw
history blame
1.84 kB
import streamlit as st
from gpt_researcher import GPTResearcher
import asyncio
import nest_asyncio
import os
# Access secrets
openai_api_key = st.secrets["OPENAI_API_KEY"]
tavily_api_key = st.secrets["TAVILY_API_KEY"]
# Apply the asyncio patch from nest_asyncio if required
nest_asyncio.apply()
# Set the document path environment variable
os.environ['DOC_PATH'] = './' # Path to the folder with documents
# Constants
REPORT_TYPE = "research_report"
DOCUMENT_FILE = 'removed_code.txt' # Name of the document file
# Function to handle asynchronous calls
def run_async(coroutine):
loop = asyncio.get_event_loop()
return loop.run_until_complete(coroutine)
# Define the asynchronous function to fetch the report
async def fetch_report(query, report_type):
"""
Fetch a research report based on the provided query and report type.
Research is conducted on a local document specified by DOCUMENT_FILE.
"""
researcher = GPTResearcher(query=query, report_type=report_type, report_source='local')
await researcher.conduct_research()
report = await researcher.write_report()
return report
# Streamlit interface
st.title("Google Leak Reporting Tool")
# User input for the query
query = st.text_input(
"Enter your research query:",
"Extract all the information about how the ranking for internal links works."
)
# Button to generate report
if st.button("Generate Report"):
if not query:
st.warning("Please enter a query to generate a report.")
else:
with st.spinner("Generating report..."):
# Fetch the report asynchronously using the local document
fetch_report_coroutine = fetch_report(query, REPORT_TYPE)
report = run_async(fetch_report_coroutine)
st.success("Report generated successfully!")
st.write(report)