hyechanjun commited on
Commit
39eaf65
·
1 Parent(s): f0bcacc

Switch to streamlit

Browse files
Files changed (1) hide show
  1. app.py +55 -0
app.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import torch
3
+ from transformers import BartForConditionalGeneration, BartTokenizer
4
+
5
+ # initialize model + tok variables
6
+ model = None
7
+ tok = None
8
+
9
+ # Examples for each models
10
+ examples = [
11
+ ["interview-question-remake", "I have a cat named dolche and he's not very fri$
12
+ ["interview-length-tagged","Today's weather was really nice."],
13
+ ["reverse-interview-question", "There are so many incredible musicians out the$
14
+ ]
15
+
16
+ # Title
17
+ st.title("Interview AI Test Website")
18
+
19
+ # Input field
20
+ input = st.text_input('Context')
21
+
22
+ option = st.selectbox(
23
+ 'Please select a model.',
24
+ ('interview-question-remake', 'interview-length-tagged', 'reverse-interview-question'))
25
+
26
+ if st.button('Submit'):
27
+ genQuestion(option, input)
28
+
29
+ # Descriptions for each models
30
+ # descriptions = "Interview question remake is a model that..."
31
+
32
+ # pass in Strings of model choice and input text for context
33
+ def genQuestion(model_choice, context):
34
+ # global descriptions
35
+ if model_choice=="interview-question-remake":
36
+ model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-question-remake")
37
+ tok = BartTokenizer.from_pretrained("hyechanjun/interview-question-remake")
38
+ # descriptions = "Interview question remake is a model that..."
39
+ elif model_choice=="interview-length-tagged":
40
+ model = BartForConditionalGeneration.from_pretrained("hyechanjun/interview-length-tagged")
41
+ tok = BartTokenizer.from_pretrained("hyechanjun/interview-length-tagged")
42
+ # descriptions = "Interview question tagged is a model that..."
43
+ elif model_choice=="reverse-interview-question":
44
+ model = BartForConditionalGeneration.from_pretrained("hyechanjun/reverse-interview-question")
45
+ tok = BartTokenizer.from_pretrained("hyechanjun/reverse-interview-question")
46
+ # descriptions = "Reverse interview question is a model that..."
47
+
48
+ inputs = tok(context, return_tensors="pt")
49
+ output = model.generate(inputs["input_ids"], num_beams=4, max_length=64, min_length=9, num_return_sequences=4, diversity_penalty =1.0, num_beam_groups=4)
50
+ final_output = ''
51
+
52
+ for i in range(4):
53
+ final_output += [tok.decode(beam, skip_special_tokens=True, clean_up_tokenization_spaces=False) for beam in output][i] + "\n"
54
+
55
+ return final_output