mosaicml[gcs,libcloud,mlflow,oci,wandb]<0.17,>=0.16.3 accelerate>=0.20 transformers>=4.33 mosaicml-streaming<0.7,>=0.6 torch>=1.13.1 datasets>=2.14.5 fsspec==2023.6.0 sentencepiece==0.1.97 einops==0.5.0 omegaconf<3,>=2.2.3 slack-sdk<4 mosaicml-cli<1,>=0.3 onnx==1.14.0 onnxruntime==1.15.1 boto3<2,>=1.21.45 huggingface-hub<1.0,>=0.17.0 [all] tiktoken==0.4.0 scipy<=1.11.0,>=1.10.0 xentropy-cuda-lib@ git+https://github.com/HazyResearch/flash-attention.git@v1.0.9#subdirectory=csrc/xentropy pytest<8,>=7.2.1 mosaicml[tensorboard]<0.17,>=0.16.1 peft==0.4.0 pre-commit<3,>=2.18.1 pytest_codeblocks<0.17,>=0.16.1 bitsandbytes==0.39.1 hf_transfer==0.1.3 toml<0.11,>=0.10.2 pyright==1.1.256 packaging<23,>=21 mosaicml-turbo==0.0.4 loralib==0.1.1 flash-attn==1.0.9 pytest-cov<5,>=4 openai==0.27.8 [all-cpu] tiktoken==0.4.0 scipy<=1.11.0,>=1.10.0 pytest<8,>=7.2.1 mosaicml[tensorboard]<0.17,>=0.16.1 peft==0.4.0 pre-commit<3,>=2.18.1 pytest_codeblocks<0.17,>=0.16.1 bitsandbytes==0.39.1 hf_transfer==0.1.3 toml<0.11,>=0.10.2 pyright==1.1.256 packaging<23,>=21 loralib==0.1.1 pytest-cov<5,>=4 openai==0.27.8 [all-flash2] scipy<=1.11.0,>=1.10.0 peft==0.4.0 pytest_codeblocks<0.17,>=0.16.1 xentropy-cuda-lib@ git+https://github.com/HazyResearch/flash-attention.git@v2.3.2#subdirectory=csrc/xentropy pyright==1.1.256 tiktoken==0.4.0 hf_transfer==0.1.3 mosaicml-turbo==0.0.4 openai==0.27.8 xentropy-cuda-lib@ git+https://github.com/HazyResearch/flash-attention.git@v1.0.9#subdirectory=csrc/xentropy mosaicml[tensorboard]<0.17,>=0.16.1 bitsandbytes==0.39.1 toml<0.11,>=0.10.2 loralib==0.1.1 pytest<8,>=7.2.1 pre-commit<3,>=2.18.1 flash-attn==2.3.2 packaging<23,>=21 flash-attn==1.0.9 pytest-cov<5,>=4 [dev] pre-commit<3,>=2.18.1 pytest<8,>=7.2.1 pytest_codeblocks<0.17,>=0.16.1 pytest-cov<5,>=4 pyright==1.1.256 toml<0.11,>=0.10.2 packaging<23,>=21 hf_transfer==0.1.3 [gpu] flash-attn==1.0.9 mosaicml-turbo==0.0.4 xentropy-cuda-lib@ git+https://github.com/HazyResearch/flash-attention.git@v1.0.9#subdirectory=csrc/xentropy [gpu-flash2] flash-attn==2.3.2 mosaicml-turbo==0.0.4 xentropy-cuda-lib@ git+https://github.com/HazyResearch/flash-attention.git@v2.3.2#subdirectory=csrc/xentropy [openai] openai==0.27.8 tiktoken==0.4.0 [peft] loralib==0.1.1 bitsandbytes==0.39.1 scipy<=1.11.0,>=1.10.0 peft==0.4.0 [tensorboard] mosaicml[tensorboard]<0.17,>=0.16.1