Spaces:
Running
Running
File size: 2,360 Bytes
8a51bf1 9d29b06 8a51bf1 9d29b06 8a51bf1 9d29b06 8a51bf1 9d29b06 8a51bf1 9d29b06 bd2552c 9d29b06 bd2552c 9d29b06 bd2552c 9d29b06 8a51bf1 9d29b06 bd2552c 9d29b06 bd2552c 9d29b06 bd2552c 9d29b06 bd2552c 9d29b06 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 |
import streamlit as st
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import groq
# Constants
PRIMARY_MODEL = "Qwen/CodeQwen1.5-7B-Chat"
BACKUP_MODEL = "llama-3.3-70b-versatile"
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
# Load Primary Model & Tokenizer
def load_model():
tokenizer = AutoTokenizer.from_pretrained(PRIMARY_MODEL)
model = AutoModelForCausalLM.from_pretrained(PRIMARY_MODEL, device_map="auto")
return model, tokenizer
model, tokenizer = load_model()
def generate_code(prompt):
"""Generate code from prompt using the primary model."""
try:
inputs = tokenizer(prompt, return_tensors="pt").to(DEVICE)
outputs = model.generate(**inputs, max_length=512)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
except Exception as e:
st.error("Primary model failed. Switching to backup...")
return generate_code_backup(prompt)
def generate_code_backup(prompt):
"""Use Groq API for backup model code generation."""
client = groq.Client(api_key="YOUR_GROQ_API_KEY")
response = client.chat.completions.create(
model=BACKUP_MODEL,
messages=[{"role": "system", "content": "You are a helpful AI."},
{"role": "user", "content": prompt}]
)
return response.choices[0].message.content
def explain_code(code):
"""Generate a detailed explanation of the given code."""
prompt = f"Explain this code in detail:\n{code}"
return generate_code(prompt)
# Streamlit UI
st.set_page_config(page_title="AI Code Generator", layout="wide")
st.title("π AI Code Generator App")
st.markdown("### Generate and edit code with AI!")
# Sidebar for Alternative Solutions
st.sidebar.header("β‘ Alternative Solutions")
user_query = st.text_area("Enter your app idea:", height=150)
if st.button("Generate Code"):
with st.spinner("Generating code..."):
generated_code = generate_code(user_query)
st.code(generated_code, language="python")
st.session_state["generated_code"] = generated_code
if "generated_code" in st.session_state and st.button("π Explain Code"):
explanation = explain_code(st.session_state["generated_code"])
st.text_area("Explanation:", explanation, height=250)
st.sidebar.button("π New Code", on_click=lambda: st.experimental_rerun())
|