{ "repo_id": "habanoz/wiki-text-gpt-v0.11", "trainer_config": { "seq_length": 512, "gradient_accumulation_steps": 1, "batch_size": 32, "data_dir": "eco-news-tr", "warmup_iters": 100, "learning_rate": 0.001, "lr_decay_iters": 5000, "max_iters": 5000, "min_lr": 0.0001, "weight_decay": 0.1, "beta1": 0.9, "beta2": 0.99, "compile": false, "decay_lr": true, "seed": 147, "log_interval": 10, "eval_interval": 250, "eval_iters": 200, "out_dir": "wiki-text-gpt-v0.11", "wandb_log": true, "wandb_project": "NB-Haber-GPT-Training", "wandb_run_name": "haber-gpt-v1.021(wikitr-tokenizer-seed-147)", "wandb_run_id": "1721342972", "grad_norm_clip": 1.0, "dtype": "float16" } }