Spaces:
Running
on
Zero
Running
on
Zero
import spaces | |
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
model_name = 'yuntian-deng/gpt2-implicit-cot-multiplication' | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
def preprocess(num): | |
num = str(num).strip().replace(' ', '') | |
reversed_num = ' '.join(num[::-1]) | |
return reversed_num | |
def postprocess(raw_output): | |
prediction = raw_output.replace(' ', '')[::-1] | |
return prediction | |
def predict_product(num1, num2): | |
input_text = f'{preprocess(num1)} * {preprocess(num2)} =' | |
inputs = tokenizer(input_text, return_tensors='pt').to('cuda' if torch.cuda.is_available() else 'cpu') | |
model.to('cuda' if torch.cuda.is_available() else 'cpu') | |
outputs = model.generate(**inputs, max_new_tokens=40) | |
output = outputs[0][inputs['input_ids'].shape[-1]:] | |
raw_output = tokenizer.decode(output, skip_special_tokens=True) | |
prediction = postprocess(raw_output) | |
return input_text, raw_output, prediction | |
demo = gr.Interface( | |
fn=predict_product, | |
inputs=[gr.Number(label='First Number (up to 9 digits)'), gr.Number(label='Second Number (up to 9 digits)')], | |
outputs=[ | |
gr.Textbox(label='Raw Input to GPT-2'), | |
gr.Textbox(label='Raw Output from GPT-2'), | |
gr.Textbox(label='Predicted Product') | |
], | |
title='GPT-2 Multiplication Predictor', | |
description='Enter two numbers up to 9 digits each and get the predicted product.', | |
article=""" | |
### Additional Resources | |
- [Paper: From Explicit CoT to Implicit CoT: Learning to Internalize CoT Step by Step](https://arxiv.org/pdf/2405.14838) | |
- [Code Repository](https://github.com/da03/Internalize_CoT_Step_by_Step) | |
- [Tweet Announcement](https://twitter.com/yuntiandeng/status/1795854740879774036) | |
""" | |
) | |
demo.launch() | |