File size: 3,945 Bytes
99ee8d1 9d0d207 99ee8d1 dd4c9d1 99ee8d1 dd4c9d1 9d0d207 dd4c9d1 7aa1b36 99ee8d1 9d0d207 99ee8d1 dd4c9d1 99ee8d1 7aa1b36 99ee8d1 9d0d207 99ee8d1 9d0d207 dd4c9d1 9d0d207 99ee8d1 3248b60 dd4c9d1 9d0d207 dd4c9d1 9d0d207 dd4c9d1 bed46c4 c3caa05 dd4c9d1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
import json
import requests
import gradio as gr
import pandas as pd
import os
import openai
openai.api_key = os.environ.get('GPT_3_Token')
def openai_query(
recipient:str = "Employer",
len:int = 400,
recipient_name:str = "John Doe",
context:str = "",
input:str = "",
random_state:float = 0.85
) -> str:
return openai.Completion.create(
engine='text-davinci-002',
prompt="Write a professional email to my " + recipient.lower() + " starting with Hello " + recipient_name + ", about the subject " + context + " and the email should be based on this draft: " + input,
temperature = random_state,
max_tokens= len,
frequency_penalty=0.25,
presence_penalty=0.75,
best_of=1
).get("choices")[0]['text'].strip()
def query(payload, API_URL):
response = requests.request("POST", API_URL, json=payload)
return response.json()
def pre_query(recipient, recipient_name, sender, context, dates, input, model_id):
API_URL = "https://api-inference.huggingface.co/models/" + model_id
if model_id == "EleutherAI/gpt-neo-1.3B":
input_string = "Write a professional email to my " + recipient.lower() + " starting with Hello " + recipient_name + ", about the subject " + context + " and the email should be based on this draft: " + input
data = query({
"inputs":input_string,
"parameters":{
"wait_for_model": True}
}, API_URL)
#if type(data) is dict:
# return data['error']
#else:
return data[0]['generated_text']
if model_id == "bigscience/bloom":
input_string = "Write a professional email to my " + recipient.lower() + " starting with Hello " + recipient_name + ", about the subject " + context + " and the email should be based on this draft: " + input + ": Hello " + recipient_name + ",\n\n"
data = query({
"inputs":input_string,
"parameters":{"max_new_tokens":96,
"return_full_text": False,
"wait_for_model": True}
}, API_URL)
#if type(data) is dict:
# return data['error']
#else:
return "Hello " + recipient_name + ",\n\n" + data[0]['generated_text'].replace(input_string,'')
if model_id == "GPT-3":
return openai_query(recipient, 250, recipient_name, context, input)
return
demo = gr.Blocks()
with demo:
gr.Markdown(
"""
# <center> Email Assistant
Please fill out the fields below!
""")
with gr.Row():
with gr.Column():
with gr.Group():
with gr.Row():
sender = gr.Dropdown(["student", "professor", "employee", "employer", "coworker", "applicant", "recruiter"], label="I am a...")
recipient = gr.Dropdown(["student", "professor", "employee", "employer", "coworker", "applicant", "recruiter"], label="I am writing to my...")
recipient_name = gr.Textbox(label="Recipient Name", placeholder = "Their name is...")
subject = gr.Dropdown([ "Requesting a meeting", "Reporting conflict with scheduled meeting time", "Requesting clarification", "Requesting to leave early", "Requesting a leave of absence", "Requesting a letter of recommendation", "Requesting a referral for a job application"], label= "In this email, I am...")
dates = gr.Textbox(label="Relevant Dates", placeholder ="MM/DD/YYYY")
email = gr.Textbox(label="Input", lines=10, placeholder="Enter your Message Here!")
model_id = gr.Dropdown(["GPT-3", "EleutherAI/gpt-neo-1.3B", "bigscience/bloom"] ,label = "Choose a model")
submit_button = gr.Button("Generate my email!")
text_output = gr.Textbox(lines=10, label = "Email", placeholder = "Your generated email!", interactive = True)
input_list = [sender, recipient, recipient_name, subject, dates, email, model_id]
submit_button.click(pre_query, inputs = input_list, outputs=text_output)
demo.launch(debug=True)
|