tommy / app.py
ToonTownTommy's picture
Update app.py
6121e18 verified
raw
history blame
583 Bytes
import gradio as gr
import google.generativeai as genai
from huggingface_hub import InferenceClient
import gradio as gr
client = InferenceClient("Google/gemini-1.5-flash")
genai.configure(api_key=os.environ["GEMINI_API_KEY"])
generation_config = {
"temperature": 0.7,
"top_p": 0.95,
"top_k": 64,
"max_output_tokens": 128,
"response_mime_type": "text/plain",
}
model = genai.GenerativeModel(
model_name="gemini-1.5-flash",
generation_config=generation_config,
)
response = model.generate_content
print(response.text)
if __name__ == "__main__":
demo.launch()