import torch from huggingface_hub import login login(token="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") # Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="meta-llama/Llama-2-7b-chat-hf") print("Providing input to the pipeline.....") response = pipe("Hello, how are you?") print(response)