Spaces:
Runtime error
Runtime error
File size: 3,160 Bytes
3b6941d a892226 3b6941d a892226 3b6941d a892226 3b6941d 91f620a 3b6941d a892226 3b6941d a892226 3b6941d 09f73f8 3b6941d 09f73f8 3b6941d a892226 3b6941d 91f620a a588801 3b6941d a892226 09f73f8 3b6941d a892226 3b6941d a892226 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
import os
import openai
import logging
import gradio as gr
import asyncio
from typing import Dict, Any
from cryptography.fernet import Fernet
# Configure logging
logging.basicConfig(level=logging.INFO)
class EnvironmentManager:
"""Handles environment variable validation."""
@staticmethod
def load_env_variables() -> Dict[str, str]:
required_vars = ["Key"]
env_vars = {var: os.getenv(var) for var in required_vars}
missing_vars = [var for var, value in env_vars.items() if not value]
if missing_vars:
raise ValueError(f"Missing environment variables: {', '.join(missing_vars)}")
return env_vars
class EncryptionManager:
"""Handles encryption and decryption of sensitive data."""
def __init__(self, key: str):
self.cipher = Fernet(key.encode())
def encrypt(self, data: str) -> str:
return self.cipher.encrypt(data.encode()).decode()
def decrypt(self, encrypted_data: str) -> str:
return self.cipher.decrypt(encrypted_data.encode()).decode()
class AICore:
"""AI Core system integrating OpenAI's GPT API."""
def __init__(self, env_vars: Dict[str, str]):
self.env_vars = env_vars
self.encryption_manager = EncryptionManager(env_vars["Key"])
self.openai_api_key = env_vars["HF_KEY"]
async def generate_response(self, query: str) -> Dict[str, Any]:
try:
encrypted_query = self.encryption_manager.encrypt(query)
response = await openai.ChatCompletion.acreate(
model="ft:gpt-4o-2024-08-06:raiffs-bits:codettev7", # Ensure this model is supported
messages=[
{"role": "system", "content": "You are a helpful AI assistant."},
{"role": "user", "content": query}
],
api_key=self.openai_api_key
)
model_response = response["choices"][0]["message"]["content"]
return {
"encrypted_query": encrypted_query,
"model_response": model_response
}
except Exception as e:
logging.error(f"Error generating response: {e}")
return {"error": "Failed to generate response"}
# Hugging Face Gradio App
def main():
try:
env_vars = EnvironmentManager.load_env_variables()
ai_core = AICore(env_vars)
async def async_respond(message: str) -> str:
response_data = await ai_core.generate_response(message)
return response_data.get("model_response", "Error: Response not available")
def respond(message: str) -> str:
return asyncio.run(async_respond(message))
interface = gr.Interface(
fn=respond,
inputs="text",
outputs="text",
title="AI Chatbot - Hugging Face Space"
)
# Use `server_name="0.0.0.0"` to work properly on Hugging Face Spaces
interface.launch(server_name="0.0.0.0", server_port=7860)
except Exception as e:
logging.error(f"Application failed to start: {e}")
if __name__ == "__main__":
main() |