Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -422,6 +422,24 @@ def perform_ai_lookup(q, vocal_summary=True, extended_refs=False,
|
|
422 |
- Verify maintains the requested style throughout
|
423 |
"""
|
424 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
425 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
426 |
refs = client.predict(q, 20, "Semantic Search",
|
427 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
|
|
422 |
- Verify maintains the requested style throughout
|
423 |
"""
|
424 |
|
425 |
+
|
426 |
+
# Claude then Arxiv..
|
427 |
+
|
428 |
+
# Claude:
|
429 |
+
response = client.messages.create(
|
430 |
+
model="claude-3-sonnet-20240229",
|
431 |
+
max_tokens=1000,
|
432 |
+
messages=[
|
433 |
+
{"role": "user", "content": user_input}
|
434 |
+
]
|
435 |
+
st.write("Claude's reply 🧠:")
|
436 |
+
st.write(response.content[0].text)
|
437 |
+
filename = generate_filename(user_input, "md")
|
438 |
+
create_file(filename, user_input, response.content[0].text)
|
439 |
+
|
440 |
+
|
441 |
+
# Arxiv:
|
442 |
+
|
443 |
client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
|
444 |
refs = client.predict(q, 20, "Semantic Search",
|
445 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|