Spaces:
Runtime error
Runtime error
File size: 4,465 Bytes
537a5de 81c1253 309e129 3eb637a 2ac9b48 81c1253 4176da2 537a5de 79add1a 81c1253 da5515b 8bcdde0 309e129 8bcdde0 309e129 8bcdde0 309e129 81c1253 309e129 81c1253 309e129 3eb637a 309e129 da5515b 3eb637a 309e129 3eb637a 309e129 da5515b 3eb637a 309e129 da5515b 3eb637a 309e129 c5a0f5f f2ee7f2 d0a490c 1e020b6 e191a5c da5515b ab38016 1e020b6 e191a5c 79add1a 309e129 79add1a 309e129 79add1a 309e129 935ddcb 79add1a 935ddcb 309e129 935ddcb 0c77896 79add1a 309e129 da5515b 79add1a 309e129 79add1a 0c77896 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 |
import gradio as gr
import json
import markdown
from telegraph import Telegraph
from gradio_client import Client
import time
# Set up the Telegraph client
telegraph = Telegraph()
telegraph.create_account(short_name='BookMindAI')
with open('detail_queries.json', 'r') as file:
detail_queries = json.load(file)
with open('lang.json', 'r') as file:
languages = [str(x) for x in json.load(file).keys()]
def markdown_to_html(md_content):
return markdown.markdown(md_content)
def predict(input, images = []):
client = Client("https://roboflow-gemini.hf.space/--replicas/bkd57/")
result = client.predict(
None,
images,
0.4,
2048,
"",
32,
1,
[[input,None]],
api_name="/bot"
)
return result[0][1]
def fetch_summary(book_name, author, language):
question = f"Provide a short summary of the book '{book_name}' by {author} in {language} language."
answer = predict(question)
return answer
def post_to_telegraph(title, content):
html_content = markdown_to_html(content)
response = telegraph.create_page(
title=title,
html_content=html_content
)
return 'https://telegra.ph/{}'.format(response['path'])
def generate_predictions(book_name, author, language_choice, detail_options=[]):
details = ""
for option in detail_options:
query_template = detail_queries.get(option).format(book_name=book_name, author=author) + '. Answer in ' + language_choice[3:]
try:
response = predict(query_template)
details += f"\n\n**{option}**:\n{response}"
except:
time.sleep(2)
try:
response = predict(query_template)
details += f"\n\n**{option}**:\n{response}"
except:
pass
summary = fetch_summary(book_name, author, language_choice[3:])
combined_summary = summary + details
try:
telegraph_url = post_to_telegraph(f"Summary of {book_name} by {author}", combined_summary)
except requests.exceptions.ConnectionError:
telegraph_url = "Error connecting to Telegraph API"
return combined_summary, telegraph_url
with gr.Blocks(title="π BookMindAI", theme=gr.themes.Base()).queue() as demo:
gr.DuplicateButton()
with gr.Tab("Summarize bookπ―"):
with gr.Row():
with gr.Column():
book_name_input = gr.Textbox(placeholder="Enter Book Name", label="Book Name")
author_name_input = gr.Textbox(placeholder="Enter Author Name", label="Author Name")
language_input = gr.Dropdown(choices=languages, label="Language")
detail_options_input = gr.CheckboxGroup(choices=list(detail_queries.keys()), label="Details to Include", visible=True)
run_button_summarize = gr.Button("Run", visible=True)
with gr.Column():
telegraph_link_output = gr.Markdown(label="View on Telegraph", visible=True)
with gr.Row():
summary_output = gr.Markdown(label="Parsed Content", visible=True)
run_button_summarize.click(fn=generate_predictions,
inputs=[book_name_input, author_name_input, language_input, detail_options_input],
outputs=[summary_output, telegraph_link_output],
show_progress=True, queue=True)
examples_summarize = [
["Harry Potter and the Philosopher's Stone", "J.K. Rowling", "π¬π§ english"],
["Pride and Prejudice", "Jane Austen", "πΊπ¦ ukrainian"],
["The Great Gatsby", "F. Scott Fitzgerald", "π«π· french"]
]
gr.Examples(examples=examples_summarize, inputs=[book_name_input, author_name_input, language_input, detail_options_input])
with gr.Tab("Talk about bookπ"):
chat_examples = [
"How do the underlying themes of a book reflect the societal values and beliefs of its time?",
"In what ways do the characters' personal journeys mirror the broader human experience?"
]
def chat_response(message, history):
for i in range(len(message)):
response = predict(message)
yield response
chat_interface = gr.ChatInterface(chat_response, examples=chat_examples, title='Talk with Gemini PRO about any book.')
demo.launch() |