import spaces import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM model_name = 'yuntian-deng/gpt2-implicit-cot-multiplication' tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) def preprocess(num): num = str(num).strip().replace(' ', '') reversed_num = ' '.join(num[::-1]) return reversed_num @spaces.GPU def predict_product(num1, num2): input_text = f'{preprocess(num1)} * {preprocess(num2)} =' inputs = tokenizer(input_text, return_tensors='pt').to('cuda' if torch.cuda.is_available() else 'cpu') model.to('cuda' if torch.cuda.is_available() else 'cpu') outputs = model.generate(**inputs, max_new_tokens=40) raw_output = tokenizer.decode(outputs[0], skip_special_tokens=True) prediction = raw_output.strip().replace(' ', '')[::-1] return input_text, raw_output, prediction demo = gr.Interface( fn=predict_product, inputs=[gr.Number(label='First Number (up to 9 digits)'), gr.Number(label='Second Number (up to 9 digits)')], outputs=[ gr.Textbox(label='Raw Input to GPT-2'), gr.Textbox(label='Raw Output from GPT-2'), gr.Textbox(label='Predicted Product') ], title='GPT-2 Multiplication Predictor', description='Enter two numbers up to 9 digits each and get the predicted product.' ) demo.launch()