Spaces:
Sleeping
Sleeping
File size: 1,150 Bytes
43037cf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
from typing import Iterator
import gradio as gr
from huggingface_hub import InferenceClient
model_id = "microsoft/Phi-3-mini-4k-instruct"
client = InferenceClient(model_id)
GENERATE_DATASET_NAMES_FOR_QUERY = (
"A Machine Learning Practioner is looking for a dataset that matches '{query}'. "
"Generate a list of 10 names of quality dataset that don't exist but sound plausible and would "
"be helpful. Feel free to reuse words from the query '{query}' to name the datasets. "
"Give each dataset descriptive tags/keywords and use the following format:\n1. DatasetName (tag1, tag2, tag3)"
)
def stream_reponse(msg: str) -> Iterator[str]:
for message in client.chat_completion(
messages=[{"role": "user", "content": msg}],
max_tokens=500,
stream=True,
):
yield message.choices[0].delta.content
def gen_datasets(query: str) -> Iterator[str]:
output = ""
for token in stream_reponse(GENERATE_DATASET_NAMES_FOR_QUERY.format(query=query)):
output += token
yield output
demo = gr.Interface(fn=gen_datasets, inputs="text", outputs="text")
demo.launch()
|