flashsloth / train /train_pretrain.py
Tongbo's picture
Upload folder using huggingface_hub
04f8e39 verified
# Adopted from https://github.com/lm-sys/FastChat. Below is the original copyright:
# Adopted from tatsu-lab@stanford_alpaca. Below is the original copyright:
# Make it more memory efficient by monkey patching the LLaMA model with FlashAttn.
import os, sys
sys.path.append('./')
# Need to call this before importing transformers.
# from llava.train.llama_flash_attn_monkey_patch import replace_llama_attn_with_flash_attn
# replace_llama_attn_with_flash_attn()
from flashsloth.train.pretrain import train
if __name__ == "__main__":
train()