|
import gradio as gr |
|
from llama_cpp import Llama |
|
from huggingface_hub import hf_hub_download |
|
import random |
|
|
|
|
|
model_path = hf_hub_download( |
|
repo_id="AstroMLab/AstroSage-8B-GGUF", |
|
filename="AstroSage-8B-Q8_0.gguf" |
|
) |
|
|
|
llm = Llama( |
|
model_path=model_path, |
|
n_ctx=2048, |
|
n_threads=4, |
|
chat_format="llama-3", |
|
seed=42, |
|
f16_kv=True, |
|
logits_all=False, |
|
use_mmap=True, |
|
use_gpu=True |
|
) |
|
|
|
|
|
GREETING_MESSAGES = [ |
|
"Greetings! I am AstroSage, your guide to the cosmos. What would you like to explore today?", |
|
"Welcome to our cosmic journey! I am AstroSage. How may I assist you in understanding the universe?", |
|
"AstroSage here. Ready to explore the mysteries of space and time. How may I be of assistance?", |
|
"The universe awaits! I'm AstroSage. What astronomical wonders shall we discuss?", |
|
] |
|
|
|
def format_chat_history(history): |
|
"""Format the chat history for the model.""" |
|
formatted_messages = [] |
|
for human, assistant in history: |
|
formatted_messages.append({"role": "user", "content": human}) |
|
if assistant: |
|
formatted_messages.append({"role": "assistant", "content": assistant}) |
|
return formatted_messages |
|
|
|
def generate_response(message, history): |
|
"""Generate response using the LLM.""" |
|
if not message: |
|
return random.choice(GREETING_MESSAGES) |
|
|
|
formatted_history = format_chat_history(history) |
|
|
|
response = llm.create_chat_completion( |
|
messages=[ |
|
{"role": "system", "content": "You are AstroSage, an intelligent AI assistant specializing in astronomy, astrophysics, and space science. You provide accurate, scientific information while making complex concepts accessible. You're enthusiastic about space exploration and maintain a sense of wonder about the cosmos."}, |
|
*formatted_history, |
|
{"role": "user", "content": message} |
|
], |
|
max_tokens=512, |
|
temperature=0.7, |
|
top_p=0.95, |
|
stream=True |
|
) |
|
|
|
partial_message = "" |
|
for chunk in response: |
|
if chunk and "content" in chunk["choices"][0]["delta"]: |
|
partial_message += chunk["choices"][0]["delta"]["content"] |
|
yield partial_message |
|
|
|
|
|
custom_css = """ |
|
#component-0 { |
|
background-color: #1a1a2e; |
|
border-radius: 15px; |
|
padding: 20px; |
|
} |
|
.dark { |
|
background-color: #0f0f1a; |
|
} |
|
.contain { |
|
max-width: 1200px !important; |
|
} |
|
""" |
|
|
|
|
|
with gr.Blocks(css=custom_css, theme=gr.themes.Soft(primary_hue="indigo", neutral_hue="slate")) as demo: |
|
gr.Markdown( |
|
""" |
|
# π AstroSage: Your Cosmic AI Companion |
|
|
|
Welcome to AstroSage, an advanced AI assistant specializing in astronomy, astrophysics, and space science. |
|
Powered by the AstroSage-8B model, I'm here to help you explore the wonders of the universe! |
|
|
|
### What Can I Help You With? |
|
- πͺ Explanations of astronomical phenomena |
|
- π Space exploration and missions |
|
- β Stars, galaxies, and cosmic objects |
|
- π Planetary science and exoplanets |
|
- π Astrophysics concepts and theories |
|
- π Astronomical instruments and observations |
|
|
|
Just type your question below and let's embark on a cosmic journey together! |
|
""" |
|
) |
|
|
|
chatbot = gr.Chatbot( |
|
label="Chat with AstroSage", |
|
bubble_full_width=False, |
|
show_label=True, |
|
height=450 |
|
) |
|
|
|
with gr.Row(): |
|
msg = gr.Textbox( |
|
label="Type your message here", |
|
placeholder="Ask me anything about space and astronomy...", |
|
scale=9 |
|
) |
|
clear = gr.Button("Clear Chat", scale=1) |
|
|
|
|
|
gr.Examples( |
|
examples=[ |
|
"What is a black hole and how does it form?", |
|
"Can you explain the life cycle of a star?", |
|
"What are exoplanets and how do we detect them?", |
|
"Tell me about the James Webb Space Telescope.", |
|
"What is dark matter and why is it important?" |
|
], |
|
inputs=msg, |
|
label="Example Questions" |
|
) |
|
|
|
|
|
msg.submit( |
|
generate_response, |
|
[msg, chatbot], |
|
[msg, chatbot], |
|
queue=True |
|
) |
|
|
|
clear.click( |
|
lambda: (None, None), |
|
None, |
|
[msg, chatbot], |
|
queue=False |
|
) |
|
|
|
|
|
demo.load( |
|
lambda: (random.choice(GREETING_MESSAGES), []), |
|
None, |
|
[msg, chatbot], |
|
queue=False |
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
demo.launch() |