cyberbot / app.py
Faizal2805's picture
Update app.py
bf01853 verified
from fastapi import FastAPI
from pydantic import BaseModel
from transformers import AutoModelForCausalLM, AutoTokenizer
from datasets import load_dataset
import torch
# Load model and tokenizer
MODEL_NAME = "meta-llama/Llama-3.2-1B" # Replace with your fine-tuned model
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME, torch_dtype=torch.float16)
# Load AWS-Bot dataset
DATASET_NAME = "Faizal2805/cyberbot" # Replace with your dataset
dataset = load_dataset(DATASET_NAME, split="train")
def get_dataset_response(prompt: str):
"""
Check if the user's input matches a dataset entry and return a predefined response.
If no match is found, return None.
"""
for example in dataset:
if example["text"].startswith(f"<s>[INST] {prompt} [/INST]"):
return example["text"].split("</s>")[-1].strip()
return None
def generate_response(prompt: str):
"""
Generate a response from the dataset if available; otherwise, use the model.
"""
dataset_response = get_dataset_response(prompt)
if dataset_response:
return dataset_response # Return predefined dataset response
# Fallback to model-based response
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_length=200)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
app = FastAPI()
class Query(BaseModel):
text: str
@app.post("/chat")
def chat(query: Query):
response = generate_response(query.text)
return {"response": response}
@app.get("/")
def root():
return {"message": "AWS-Bot is running!"}