from llava_llama3.train.train import train if __name__ == "__main__": train(attn_implementation="flash_attention_2")