alyliann commited on
Commit
4217254
1 Parent(s): fcff1ba

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -23
app.py CHANGED
@@ -1,25 +1,20 @@
1
- import os
2
- import torch
3
- import pandas as pd
4
- import streamlit as st
5
- from transformers import pipeline
6
  from huggingface_hub import InferenceClient
7
- client = InferenceClient(api_key=os.environ['HF_KEY'])
8
- MODEL_NAME = 'HuggingFaceTB/SmolLM2-1.7B-Instruct'
9
- chat = [
10
- {"role": "user",
11
- "content": "You are a helpful chatbot that suggests 5-letter words to guess in a word-guessing game. No need for real-time information, just helpful suggestion words that are always 5 letters."},
 
 
 
12
  ]
13
- pipe = pipeline("text-generation", model=MODEL_NAME)
14
- pipe(chat, max_new_tokens=128)
15
- prompt = st.text_area('Enter your last Wordle guess results:')
16
- if prompt:
17
- chat.append(
18
- {
19
- 'role': 'user',
20
- 'content': prompt
21
- }
22
- )
23
- out = pipe(chat, max_new_tokens=128)
24
- st.subheader('Suggestion:')
25
- st.write(out[0]['generated_text'][-1]['content'])
 
 
 
 
 
 
1
  from huggingface_hub import InferenceClient
2
+
3
+ client = InferenceClient(api_key="hf_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")
4
+
5
+ messages = [
6
+ {
7
+ "role": "user",
8
+ "content": "What is the capital of France?"
9
+ }
10
  ]
11
+
12
+ stream = client.chat.completions.create(
13
+ model="HuggingFaceTB/SmolLM2-1.7B-Instruct",
14
+ messages=messages,
15
+ max_tokens=500,
16
+ stream=True
17
+ )
18
+
19
+ for chunk in stream:
20
+ print(chunk.choices[0].delta.content, end="")