|
import gradio as gr |
|
from transformers import pipeline, RagTokenizer, RagRetriever, RagSequenceForGeneration |
|
import paho.mqtt.client as mqtt |
|
from gtts import gTTS |
|
import os |
|
import sqlite3 |
|
from sklearn.ensemble import IsolationForest |
|
|
|
|
|
conn = sqlite3.connect('preferences.db') |
|
cursor = conn.cursor() |
|
cursor.execute('''CREATE TABLE IF NOT EXISTS preferences (id INTEGER PRIMARY KEY, setting TEXT, value TEXT)''') |
|
cursor.execute('''CREATE TABLE IF NOT EXISTS history (id INTEGER PRIMARY KEY, command TEXT, response TEXT)''') |
|
conn.commit() |
|
|
|
|
|
anomaly_model = IsolationForest(contamination=0.1) |
|
data = [] |
|
|
|
|
|
retriever = RagRetriever.from_pretrained("facebook/rag-sequence-base") |
|
tokenizer = RagTokenizer.from_pretrained("facebook/rag-sequence-base") |
|
model = RagSequenceForGeneration.from_pretrained("facebook/rag-sequence-base") |
|
nlp = pipeline("conversational") |
|
|
|
|
|
def control_device(command): |
|
client = mqtt.Client() |
|
client.connect("broker.hivemq.com", 1883, 60) |
|
if "light" in command and "on" in command: |
|
client.publish("home/light", "ON") |
|
return "Light turned on." |
|
elif "light" in command and "off" in command: |
|
client.publish("home/light", "OFF") |
|
return "Light turned off." |
|
else: |
|
return "Command not recognized." |
|
|
|
|
|
def process_command(command): |
|
if "light" in command: |
|
return control_device(command) |
|
else: |
|
inputs = tokenizer(command, return_tensors="pt") |
|
retrieved_docs = retriever(command, return_tensors="pt") |
|
outputs = model.generate(input_ids=inputs['input_ids'], context_input_ids=retrieved_docs['context_input_ids']) |
|
return tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
|
|
def log_history(command, response): |
|
cursor.execute("INSERT INTO history (command, response) VALUES (?, ?)", (command, response)) |
|
conn.commit() |
|
|
|
|
|
def detect_anomalies(command): |
|
global data |
|
data.append(len(command)) |
|
if len(data) > 10: |
|
anomaly_model.fit([[x] for x in data]) |
|
if anomaly_model.predict([[len(command)]])[0] == -1: |
|
return True |
|
return False |
|
|
|
|
|
def assistant(command): |
|
if detect_anomalies(command): |
|
return "Warning: Anomalous behavior detected!", "" |
|
response = process_command(command) |
|
log_history(command, response) |
|
tts = gTTS(text=response, lang='en') |
|
tts.save("response.mp3") |
|
return response, "response.mp3" |
|
|
|
|
|
demo = gr.Interface(fn=assistant, inputs="text", outputs=["text", "audio"]) |
|
demo.launch() |
|
|