File size: 587 Bytes
9657faa
e8eee54
9657faa
 
 
 
825f4dd
 
9657faa
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
from huggingface_hub import InferenceClient
import os
api_key=os.getenv("HF_TOKEN")
client = InferenceClient(api_key=api_key)

messages = [
    { "role": "system", "content": "You are a good image generation prompt engineer for diffuser image generation models" },
	{ "role": "user", "content": "Visualy describe a random character in extreme detail" }
]

stream = client.chat.completions.create(
    model="HuggingFaceH4/zephyr-7b-beta", 
	messages=messages, 
	temperature=0.5,
	max_tokens=2048,
	top_p=0.7,
	stream=True
)

for chunk in stream:
    print(chunk.choices[0].delta.content)