Florence-VL-8B / llava /train /train_mem.py
jiuhai's picture
llama
c4a668c
raw
history blame contribute delete
115 Bytes
from llava.train.train import train
if __name__ == "__main__":
train(attn_implementation="flash_attention_2")