Spaces:
Running
Running
import streamlit as st | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import groq | |
# Constants | |
PRIMARY_MODEL = "Qwen/CodeQwen1.5-7B-Chat" | |
BACKUP_MODEL = "llama-3.3-70b-versatile" | |
DEVICE = "cuda" if torch.cuda.is_available() else "cpu" | |
# Load Primary Model & Tokenizer | |
def load_model(): | |
tokenizer = AutoTokenizer.from_pretrained(PRIMARY_MODEL) | |
model = AutoModelForCausalLM.from_pretrained(PRIMARY_MODEL, device_map="auto") | |
return model, tokenizer | |
model, tokenizer = load_model() | |
def generate_code(prompt): | |
"""Generate code from prompt using the primary model.""" | |
try: | |
inputs = tokenizer(prompt, return_tensors="pt").to(DEVICE) | |
outputs = model.generate(**inputs, max_length=512) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
except Exception as e: | |
st.error("Primary model failed. Switching to backup...") | |
return generate_code_backup(prompt) | |
def generate_code_backup(prompt): | |
"""Use Groq API for backup model code generation.""" | |
client = groq.Client(api_key="YOUR_GROQ_API_KEY") | |
response = client.chat.completions.create( | |
model=BACKUP_MODEL, | |
messages=[{"role": "system", "content": "You are a helpful AI."}, | |
{"role": "user", "content": prompt}] | |
) | |
return response.choices[0].message.content | |
def explain_code(code): | |
"""Generate a detailed explanation of the given code.""" | |
prompt = f"Explain this code in detail:\n{code}" | |
return generate_code(prompt) | |
# Streamlit UI | |
st.set_page_config(page_title="AI Code Generator", layout="wide") | |
st.title("π AI Code Generator App") | |
st.markdown("### Generate and edit code with AI!") | |
# Sidebar for Alternative Solutions | |
st.sidebar.header("β‘ Alternative Solutions") | |
user_query = st.text_area("Enter your app idea:", height=150) | |
if st.button("Generate Code"): | |
with st.spinner("Generating code..."): | |
generated_code = generate_code(user_query) | |
st.code(generated_code, language="python") | |
st.session_state["generated_code"] = generated_code | |
if "generated_code" in st.session_state and st.button("π Explain Code"): | |
explanation = explain_code(st.session_state["generated_code"]) | |
st.text_area("Explanation:", explanation, height=250) | |
st.sidebar.button("π New Code", on_click=lambda: st.experimental_rerun()) | |