import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Load GraphCodeBERT model and tokenizer tokenizer = AutoTokenizer.from_pretrained("microsoft/graphcodebert-base") model = AutoModelForCausalLM.from_pretrained("microsoft/graphcodebert-base") # Define input and output interfaces input = gr.Textbox(lines=5, label="Input") output = gr.Textbox(label="Output") # Define function to use GraphCodeBERT def use_graphcodebert(input): # Encode input input_ids = tokenizer.encode(input, return_tensors="pt") # Generate output output_ids = model.generate(input_ids, max_length=50) # Decode output output = tokenizer.decode(output_ids[0], skip_special_tokens=True) # Return output return output # Create and launch Gradio interface # Create and launch Gradio interface iface = gr.Interface.from_pretrained( # Use from_pretrained instead of from_pipeline fn=use_graphcodebert, inputs=input, outputs=output, model=model, # Pass the model as a parameter tokenizer=tokenizer, # Pass the tokenizer as a parameter title="GraphCodeBERT Code Synthesis", # Add a title for the web app description="Enter a natural language query and get a code snippet generated by GraphCodeBERT.", # Add a description for the web app examples=[["create a function that returns the sum of two numbers"], ["sort a list of numbers in ascending order"]], # Add some examples for the input theme="huggingface" # Choose a theme for the web app ) iface.launch()