llama3.2_3b_lora_sft_241010 / trainer_log.jsonl
jhkim01's picture
Upload 7 files
6584b4a verified
{"current_steps": 10, "total_steps": 50, "loss": 2.7421, "learning_rate": 4.849231551964771e-05, "epoch": 1.7391304347826086, "percentage": 20.0, "elapsed_time": "0:01:32", "remaining_time": "0:06:08", "throughput": "0.00", "total_tokens": 0}
{"current_steps": 20, "total_steps": 50, "loss": 2.3269, "learning_rate": 3.7500000000000003e-05, "epoch": 3.4782608695652173, "percentage": 40.0, "elapsed_time": "0:03:05", "remaining_time": "0:04:38", "throughput": "0.00", "total_tokens": 0}
{"current_steps": 30, "total_steps": 50, "loss": 2.0529, "learning_rate": 2.0658795558326743e-05, "epoch": 5.217391304347826, "percentage": 60.0, "elapsed_time": "0:04:33", "remaining_time": "0:03:02", "throughput": "0.00", "total_tokens": 0}
{"current_steps": 40, "total_steps": 50, "loss": 1.9645, "learning_rate": 5.848888922025553e-06, "epoch": 6.956521739130435, "percentage": 80.0, "elapsed_time": "0:06:09", "remaining_time": "0:01:32", "throughput": "0.00", "total_tokens": 0}
{"current_steps": 50, "total_steps": 50, "loss": 1.9208, "learning_rate": 0.0, "epoch": 8.695652173913043, "percentage": 100.0, "elapsed_time": "0:07:45", "remaining_time": "0:00:00", "throughput": "0.00", "total_tokens": 0}
{"current_steps": 50, "total_steps": 50, "eval_loss": 1.8110594749450684, "epoch": 8.695652173913043, "percentage": 100.0, "elapsed_time": "0:07:51", "remaining_time": "0:00:00", "throughput": "0.00", "total_tokens": 0}
{"current_steps": 50, "total_steps": 50, "epoch": 8.695652173913043, "percentage": 100.0, "elapsed_time": "0:07:52", "remaining_time": "0:00:00", "throughput": "0.00", "total_tokens": 0}