|
from fastapi import FastAPI, Request |
|
from pydantic import BaseModel |
|
import transformers |
|
import torch |
|
from fastapi.middleware.cors import CORSMiddleware |
|
|
|
|
|
import os |
|
access_token_read = os.getenv('DS4') |
|
print(access_token_read) |
|
|
|
from huggingface_hub import login |
|
login(token = access_token_read) |
|
|
|
|
|
app = FastAPI() |
|
|
|
app.add_middleware( |
|
CORSMiddleware, |
|
allow_origins=["*"], |
|
allow_methods=["*"], |
|
allow_headers=["*"], |
|
) |
|
|
|
|
|
model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct" |
|
tokenizer = transformers.AutoTokenizer.from_pretrained(model_id) |
|
model = transformers.AutoModelForCausalLM.from_pretrained( |
|
model_id, device_map="auto", torch_dtype=torch.bfloat16 |
|
) |
|
pipeline = transformers.pipeline( |
|
"text-generation", |
|
model=model, |
|
tokenizer=tokenizer, |
|
max_new_tokens=150, |
|
temperature=0.7, |
|
device_map="auto", |
|
) |
|
|
|
|
|
class EmailRequest(BaseModel): |
|
subject: str |
|
sender: str |
|
recipients: str |
|
body: str |
|
|
|
|
|
@app.post("/summarize-email/") |
|
async def summarize_email(email: EmailRequest): |
|
prompt = create_email_prompt(email.subject, email.sender, email.recipients, email.body) |
|
|
|
|
|
summary = pipeline(prompt)[0]["generated_text"] |
|
|
|
return {"summary": summary} |
|
|