|
import json |
|
import requests |
|
import gradio as gr |
|
import pandas as pd |
|
import os |
|
import openai |
|
|
|
openai.api_key = os.environ.get('GPT_3_Token') |
|
|
|
def openai_query( |
|
recipient:str = "Employer", |
|
len:int = 400, |
|
recipient_name:str = "John Doe", |
|
context:str = "", |
|
input:str = "", |
|
random_state:float = 0.85 |
|
) -> str: |
|
|
|
return openai.Completion.create( |
|
engine='text-davinci-002', |
|
prompt="Write a professional email to my " + recipient.lower() + " starting with Hello " + recipient_name + ", about the subject " + context + " and the email should be based on this draft: " + input, |
|
temperature = random_state, |
|
max_tokens= len, |
|
frequency_penalty=0.25, |
|
presence_penalty=0.75, |
|
best_of=1 |
|
).get("choices")[0]['text'].strip() |
|
|
|
def query(payload, API_URL): |
|
response = requests.request("POST", API_URL, json=payload) |
|
return response.json() |
|
|
|
def pre_query(recipient, recipient_name, sender, context, dates, input, model_id): |
|
API_URL = "https://api-inference.huggingface.co/models/" + model_id |
|
|
|
if model_id == "EleutherAI/gpt-neo-1.3B": |
|
input_string = "Write a professional email to my " + recipient.lower() + " starting with Hello " + recipient_name + ", about the subject " + context + " and the email should be based on this draft: " + input |
|
data = query({ |
|
"inputs":input_string, |
|
"parameters":{ |
|
"wait_for_model": True} |
|
}, API_URL) |
|
|
|
|
|
|
|
return data[0]['generated_text'] |
|
|
|
if model_id == "bigscience/bloom": |
|
input_string = "Write a professional email to my " + recipient.lower() + " starting with Hello " + recipient_name + ", about the subject " + context + " and the email should be based on this draft: " + input + ": Hello " + recipient_name + ",\n\n" |
|
data = query({ |
|
"inputs":input_string, |
|
"parameters":{"max_new_tokens":96, |
|
"return_full_text": False, |
|
"wait_for_model": True} |
|
}, API_URL) |
|
|
|
|
|
|
|
return "Hello " + recipient_name + ",\n\n" + data[0]['generated_text'].replace(input_string,'') |
|
|
|
if model_id == "GPT-3": |
|
return openai_query(recipient, 250, recipient_name, context, input) |
|
|
|
return |
|
|
|
demo = gr.Blocks() |
|
|
|
with demo: |
|
gr.Markdown( |
|
""" |
|
# <center> Email Assistant |
|
Please fill out the fields below! |
|
""") |
|
with gr.Row(): |
|
with gr.Column(): |
|
with gr.Group(): |
|
with gr.Row(): |
|
|
|
sender = gr.Dropdown(["student", "professor", "employee", "employer", "coworker", "applicant", "recruiter"], label="I am a...") |
|
recipient = gr.Dropdown(["student", "professor", "employee", "employer", "coworker", "applicant", "recruiter"], label="I am writing to my...") |
|
recipient_name = gr.Textbox(label="Recipient Name", placeholder = "Their name is...") |
|
|
|
|
|
subject = gr.Dropdown([ "Requesting a meeting", "Reporting conflict with scheduled meeting time", "Requesting clarification", "Requesting to leave early", "Requesting a leave of absence", "Requesting a letter of recommendation", "Requesting a referral for a job application"], label= "In this email, I am...") |
|
dates = gr.Textbox(label="Relevant Dates", placeholder ="MM/DD/YYYY") |
|
email = gr.Textbox(label="Input", lines=10, placeholder="Enter your Message Here!") |
|
model_id = gr.Dropdown(["GPT-3", "EleutherAI/gpt-neo-1.3B", "bigscience/bloom"] ,label = "Choose a model") |
|
|
|
submit_button = gr.Button("Generate my email!") |
|
text_output = gr.Textbox(lines=10, label = "Email", placeholder = "Your generated email!", interactive = True) |
|
|
|
input_list = [sender, recipient, recipient_name, subject, dates, email, model_id] |
|
|
|
submit_button.click(pre_query, inputs = input_list, outputs=text_output) |
|
|
|
demo.launch(debug=True) |
|
|