Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -20,10 +20,10 @@ def make_inference(resume):
|
|
20 |
batch = tokenizer(f"Below is an a resume, please write an excecutive summary this resume.\n\n### Resume:\n{resume}", return_tensors='pt')
|
21 |
|
22 |
with torch.cuda.amp.autocast():
|
23 |
-
output_tokens = model.generate(**batch, max_new_tokens=
|
24 |
-
|
25 |
-
display(Markdown((tokenizer.decode(output_tokens[0], skip_special_tokens=True))))
|
26 |
|
|
|
|
|
27 |
if __name__ == "__main__":
|
28 |
import gradio as gr
|
29 |
|
@@ -34,7 +34,7 @@ if __name__ == "__main__":
|
|
34 |
],
|
35 |
gr.outputs.Textbox(label="Executive Summary"),
|
36 |
title="Exective Summary Generator",
|
37 |
-
description="
|
38 |
).launch()
|
39 |
|
40 |
|
|
|
20 |
batch = tokenizer(f"Below is an a resume, please write an excecutive summary this resume.\n\n### Resume:\n{resume}", return_tensors='pt')
|
21 |
|
22 |
with torch.cuda.amp.autocast():
|
23 |
+
output_tokens = model.generate(**batch, max_new_tokens=50)
|
|
|
|
|
24 |
|
25 |
+
return tokenizer.decode(output_tokens[0], skip_special_tokens=True)
|
26 |
+
|
27 |
if __name__ == "__main__":
|
28 |
import gradio as gr
|
29 |
|
|
|
34 |
],
|
35 |
gr.outputs.Textbox(label="Executive Summary"),
|
36 |
title="Exective Summary Generator",
|
37 |
+
description="This generates an Exectutive Summary from a Resume",
|
38 |
).launch()
|
39 |
|
40 |
|