Commit
·
70abf20
1
Parent(s):
68d064d
chore add vllm to dependencies
Browse files- pyproject.toml +1 -1
pyproject.toml
CHANGED
@@ -18,7 +18,7 @@ readme = "README.md"
|
|
18 |
license = {text = "Apache 2"}
|
19 |
|
20 |
dependencies = [
|
21 |
-
"distilabel[argilla,hf-inference-endpoints,hf-transformers,instructor,llama-cpp,ollama,openai,outlines] @ git+https://github.com/argilla-io/distilabel.git@feat/add-magpie-support-llama-cpp-ollama",
|
22 |
"gradio[oauth]>=5.4.0,<6.0.0",
|
23 |
"transformers>=4.44.2,<5.0.0",
|
24 |
"sentence-transformers>=3.2.0,<4.0.0",
|
|
|
18 |
license = {text = "Apache 2"}
|
19 |
|
20 |
dependencies = [
|
21 |
+
"distilabel[argilla,hf-inference-endpoints,hf-transformers,instructor,llama-cpp,ollama,openai,outlines,vllm] @ git+https://github.com/argilla-io/distilabel.git@feat/add-magpie-support-llama-cpp-ollama",
|
22 |
"gradio[oauth]>=5.4.0,<6.0.0",
|
23 |
"transformers>=4.44.2,<5.0.0",
|
24 |
"sentence-transformers>=3.2.0,<4.0.0",
|