File size: 356 Bytes
e24c717
 
 
 
 
 
 
a8c8525
 
e24c717
a8c8525
e24c717
a8c8525
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17


import torch
from huggingface_hub import login

login(token="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")

# Use a pipeline as a high-level helper
from transformers import pipeline

pipe = pipeline("text-generation", model="meta-llama/Llama-2-7b-chat-hf")

print("Providing input to the pipeline.....")

response = pipe("Hello, how are you?")
print(response)