Spaces:
Sleeping
Sleeping
import streamlit as st | |
import openai | |
import anthropic | |
import fitz | |
# Set up API clients | |
openai_api_key = st.secrets["OPENAI_API_KEY"] | |
openai_client = openai.OpenAI(api_key=openai_api_key) | |
anthropic_api_key = st.secrets["ANTHROPIC_API_KEY"] | |
anthropic_client = anthropic.Anthropic(api_key=anthropic_api_key) | |
# Password protection | |
def check_password(): | |
if "password_correct" not in st.session_state: | |
st.session_state["password_correct"] = False | |
if not st.session_state["password_correct"]: | |
password = st.text_input("Enter the password", type="password") | |
if st.button("Submit"): | |
if password == st.secrets["PASSWORD"]: | |
st.session_state["password_correct"] = True | |
st.rerun() | |
else: | |
st.error("π Password incorrect") | |
return False | |
else: | |
return True | |
def extract_text_from_pdf(file): | |
try: | |
with fitz.open(stream=file.read(), filetype="pdf") as doc: | |
text = "" | |
for page in doc: | |
text += page.get_text() | |
return text | |
except Exception as e: | |
st.error(f"Error parsing PDF: {str(e)}") | |
return None | |
def main(): | |
st.title("Harvey Legal Research Take-Home") | |
if not check_password(): | |
return | |
# Model selector | |
model = st.selectbox("Select Model", ["GPT-4o", "Claude 3.5 Sonnet"]) | |
# Prompt input | |
prompt = st.text_area("Enter your prompt:", height=200) | |
# Document upload | |
uploaded_file = st.file_uploader("Upload a document", type=["txt", "pdf"]) | |
document_text = None | |
if uploaded_file is not None: | |
if uploaded_file.type == "text/plain": | |
document_text = uploaded_file.getvalue().decode() | |
elif uploaded_file.type == "application/pdf": | |
document_text = extract_text_from_pdf(uploaded_file) | |
else: | |
st.error("Unsupported file type. Please upload a text or PDF file.") | |
# Run button | |
if st.button("Run"): | |
if not prompt: | |
st.warning("Please enter a prompt.") | |
return | |
if document_text is None: | |
full_prompt = prompt | |
else: | |
full_prompt = f"Document content:\n\n{document_text}\n\nPrompt:\n\n{prompt}" | |
if model == "GPT-4o": | |
with st.spinner("Processing with GPT-4o..."): | |
stream = openai_client.chat.completions.create( | |
model="gpt-4o", | |
messages=[{"role": "user", "content": full_prompt}], | |
stream=True, | |
) | |
full_response = "" | |
response_area = st.empty() | |
for chunk in stream: | |
if chunk.choices[0].delta.content is not None: | |
full_response += chunk.choices[0].delta.content | |
response_area.markdown(f"**Response:**\n\n{full_response}") | |
elif model == "Claude 3.5 Sonnet": | |
with st.spinner("Processing with Claude 3.5 Sonnet..."): | |
with anthropic_client.messages.stream( | |
model="claude-3-5-sonnet-20240620", | |
max_tokens=8192, | |
messages=[{"role": "user", "content": full_prompt}], | |
) as stream: | |
full_response = "" | |
response_area = st.empty() | |
for text in stream.text_stream: | |
full_response += text | |
response_area.markdown(f"**Response:**\n\n{full_response}") | |
if __name__ == "__main__": | |
main() | |