from transformers import AutoModelForCausalLM, AutoTokenizer # Load the MobiLlama model and tokenizer model_name = "MBZUAI/MobiLlama-05B" model = AutoModelForCausalLM.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) # Example C# code snippet code_snippet_csharp = """ public int Add(int a, int b) { return a + b; } """ # Tokenize and generate summary inputs = tokenizer(code_snippet_csharp, return_tensors="pt") outputs = model.generate(inputs.input_ids, max_length=50) summary = tokenizer.decode(outputs[0], skip_special_tokens=True) print("C# Code Summary:", summary)