Spaces:
Sleeping
Sleeping
initial commit
Browse files
app.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Iterator
|
2 |
+
|
3 |
+
import gradio as gr
|
4 |
+
from huggingface_hub import InferenceClient
|
5 |
+
|
6 |
+
|
7 |
+
model_id = "microsoft/Phi-3-mini-4k-instruct"
|
8 |
+
client = InferenceClient(model_id)
|
9 |
+
|
10 |
+
GENERATE_DATASET_NAMES_FOR_QUERY = (
|
11 |
+
"A Machine Learning Practioner is looking for a dataset that matches '{query}'. "
|
12 |
+
"Generate a list of 10 names of quality dataset that don't exist but sound plausible and would "
|
13 |
+
"be helpful. Feel free to reuse words from the query '{query}' to name the datasets. "
|
14 |
+
"Give each dataset descriptive tags/keywords and use the following format:\n1. DatasetName (tag1, tag2, tag3)"
|
15 |
+
)
|
16 |
+
|
17 |
+
|
18 |
+
def stream_reponse(msg: str) -> Iterator[str]:
|
19 |
+
for message in client.chat_completion(
|
20 |
+
messages=[{"role": "user", "content": msg}],
|
21 |
+
max_tokens=500,
|
22 |
+
stream=True,
|
23 |
+
):
|
24 |
+
yield message.choices[0].delta.content
|
25 |
+
|
26 |
+
|
27 |
+
def gen_datasets(query: str) -> Iterator[str]:
|
28 |
+
output = ""
|
29 |
+
for token in stream_reponse(GENERATE_DATASET_NAMES_FOR_QUERY.format(query=query)):
|
30 |
+
output += token
|
31 |
+
yield output
|
32 |
+
|
33 |
+
|
34 |
+
demo = gr.Interface(fn=gen_datasets, inputs="text", outputs="text")
|
35 |
+
demo.launch()
|