File size: 5,862 Bytes
f8dbf90 1b83088 98e1f97 1b83088 4bf76df f8dbf90 1b83088 3134b3b 1b83088 f8dbf90 4bf76df f8dbf90 e705807 f8dbf90 3134b3b 1b83088 4bf76df 1b83088 f8dbf90 cba9efc f8dbf90 1b83088 9f8e60f 1b83088 3134b3b 1b83088 4bf76df 1b83088 4bf76df 1b83088 3134b3b 1b83088 3134b3b 4bf76df 3134b3b 1b83088 3134b3b 1b83088 75b06d3 1b83088 838191d 1b83088 da7ac0b 990d424 d35faf8 1b83088 83ac817 1b83088 3134b3b 1b83088 3134b3b f1447e0 d35faf8 990d424 1b83088 d35faf8 75b06d3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 |
import streamlit as st
import google.generativeai as genai
import requests
import subprocess
import os
import pylint.epylint as lint
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
# Configure the Gemini API
genai.configure(api_key=st.secrets["GOOGLE_API_KEY"])
# Create the model with optimized parameters and enhanced system instructions
generation_config = {
"temperature": 0.6, # Lower temperature for more deterministic responses
"top_p": 0.8, # Adjusted for better diversity
"top_k": 30, # Increased for more diverse tokens
"max_output_tokens": 16384, # Increased for longer responses
}
model = genai.GenerativeModel(
model_name="gemini-1.5-pro",
generation_config=generation_config,
system_instruction="""
You are Ath, a highly knowledgeable code assistant. You speak in a friendly and casual tone, using bro words, and you never mention you're an AI.
Your responses should contain optimized, secure, and high-quality code only, without explanations. You are designed to provide accurate, efficient, and cutting-edge code solutions.
"""
)
chat_session = model.start_chat(history=[])
def generate_response(user_input):
try:
response = chat_session.send_message(user_input)
return response.text
except Exception as e:
return f"Error: {e}"
def optimize_code(code):
# Placeholder for advanced code optimization logic
# This could involve using external tools or libraries for static analysis and optimization
(pylint_stdout, pylint_stderr) = lint.py_run(code, return_std=True)
return code
def fetch_from_github(query):
# Placeholder for fetching code snippets from GitHub
# This could involve using the GitHub API to search for relevant code
return ""
def interact_with_api(api_url):
# Placeholder for interacting with external APIs
response = requests.get(api_url)
return response.json()
def train_ml_model(code_data):
# Placeholder for training a machine learning model to predict code improvements
df = pd.DataFrame(code_data)
X = df.drop('target', axis=1)
y = df['target']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
model = RandomForestClassifier()
model.fit(X_train, y_train)
return model
# Streamlit UI setup
st.set_page_config(page_title="Sleek AI Code Assistant", page_icon="💻", layout="wide")
st.markdown("""
<style>
@import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;600;700&display=swap');
body {
font-family: 'Inter', sans-serif;
background-color: #f0f4f8;
color: #1a202c;
}
.stApp {
max-width: 1000px;
margin: 0 auto;
padding: 2rem;
}
.main-container {
background: #ffffff;
border-radius: 16px;
padding: 2rem;
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.05);
}
h1 {
font-size: 2.5rem;
font-weight: 700;
color: #2d3748;
text-align: center;
margin-bottom: 1rem;
}
.subtitle {
font-size: 1.1rem;
text-align: center;
color: #4a5568;
margin-bottom: 2rem;
}
.stTextArea textarea {
border: 2px solid #e2e8f0;
border-radius: 8px;
font-size: 1rem;
padding: 0.75rem;
transition: all 0.3s ease;
}
.stTextArea textarea:focus {
border-color: #4299e1;
box-shadow: 0 0 0 3px rgba(66, 153, 225, 0.5);
}
.stButton button {
background-color: #4299e1;
color: white;
border: none;
border-radius: 8px;
font-size: 1.1rem;
font-weight: 600;
padding: 0.75rem 2rem;
transition: all 0.3s ease;
width: 100%;
}
.stButton button:hover {
background-color: #3182ce;
}
.output-container {
background: #f7fafc;
border-radius: 8px;
padding: 1rem;
margin-top: 2rem;
}
.code-block {
background-color: #2d3748;
color: #e2e8f0;
font-family: 'Fira Code', monospace;
font-size: 0.9rem;
border-radius: 8px;
padding: 1rem;
margin-top: 1rem;
overflow-x: auto;
}
.stAlert {
background-color: #ebf8ff;
color: #2b6cb0;
border-radius: 8px;
border: none;
padding: 0.75rem 1rem;
}
.stSpinner {
color: #4299e1;
}
</style>
""", unsafe_allow_html=True)
st.markdown('<div class="main-container">', unsafe_allow_html=True)
st.title("💻 Sleek AI Code Assistant")
st.markdown('<p class="subtitle">Powered by Google Gemini</p>', unsafe_allow_html=True)
prompt = st.text_area("What code can I help you with today?", height=120)
if st.button("Generate Code"):
if prompt.strip() == "":
st.error("Please enter a valid prompt.")
else:
with st.spinner("Generating code..."):
completed_text = generate_response(prompt)
if "Error" in completed_text:
st.error(completed_text)
else:
optimized_code = optimize_code(completed_text)
st.success("Code generated and optimized successfully!")
st.markdown('<div class="output-container">', unsafe_allow_html=True)
st.markdown('<div class="code-block">', unsafe_allow_html=True)
st.code(optimized_code)
st.markdown('</div>', unsafe_allow_html=True)
st.markdown('</div>', unsafe_allow_html=True)
st.markdown("""
<div style='text-align: center; margin-top: 2rem; color: #4a5568;'>
Created with ❤️ by Your Sleek AI Code Assistant
</div>
""", unsafe_allow_html=True)
st.markdown('</div>', unsafe_allow_html=True) |