Josh Cole commited on
Commit
fd184d9
·
1 Parent(s): b493b18

two epochs only

Browse files
Files changed (3) hide show
  1. Generate.ipynb +11 -11
  2. pytorch_model.bin +1 -1
  3. training_args.bin +1 -1
Generate.ipynb CHANGED
@@ -253,7 +253,7 @@
253
  },
254
  {
255
  "cell_type": "code",
256
- "execution_count": 50,
257
  "id": "71351cf4-6d00-40ae-89cc-cedb87073625",
258
  "metadata": {},
259
  "outputs": [
@@ -363,7 +363,7 @@
363
  },
364
  {
365
  "cell_type": "code",
366
- "execution_count": 51,
367
  "id": "208eac7d-9fdd-4c82-b46f-25c1a1f246ee",
368
  "metadata": {},
369
  "outputs": [
@@ -410,7 +410,7 @@
410
  },
411
  {
412
  "cell_type": "code",
413
- "execution_count": 52,
414
  "id": "d58f6b8c-441c-4fa9-a308-e687948875e1",
415
  "metadata": {},
416
  "outputs": [
@@ -421,11 +421,11 @@
421
  "The following columns in the training set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length.\n",
422
  "***** Running training *****\n",
423
  " Num examples = 1\n",
424
- " Num Epochs = 3\n",
425
  " Instantaneous batch size per device = 8\n",
426
  " Total train batch size (w. parallel, distributed & accumulation) = 8\n",
427
  " Gradient Accumulation steps = 1\n",
428
- " Total optimization steps = 3\n"
429
  ]
430
  },
431
  {
@@ -434,8 +434,8 @@
434
  "\n",
435
  " <div>\n",
436
  " \n",
437
- " <progress value='3' max='3' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
438
- " [3/3 00:02, Epoch 3/3]\n",
439
  " </div>\n",
440
  " <table border=\"1\" class=\"dataframe\">\n",
441
  " <thead>\n",
@@ -470,10 +470,10 @@
470
  {
471
  "data": {
472
  "text/plain": [
473
- "TrainOutput(global_step=3, training_loss=15.702210744222006, metrics={'train_runtime': 3.157, 'train_samples_per_second': 0.95, 'train_steps_per_second': 0.95, 'total_flos': 94374986431680.0, 'train_loss': 15.702210744222006, 'epoch': 3.0})"
474
  ]
475
  },
476
- "execution_count": 52,
477
  "metadata": {},
478
  "output_type": "execute_result"
479
  }
@@ -484,7 +484,7 @@
484
  },
485
  {
486
  "cell_type": "code",
487
- "execution_count": 47,
488
  "id": "333d43cf-add3-4d78-bbca-b44c638519fe",
489
  "metadata": {},
490
  "outputs": [
@@ -505,7 +505,7 @@
505
  "traceback": [
506
  "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
507
  "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
508
- "Input \u001b[0;32mIn [47]\u001b[0m, in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpush_to_hub\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhub_model_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msharpcoder/wav2vec2_bjorn\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
509
  "File \u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/trainer.py:2677\u001b[0m, in \u001b[0;36mTrainer.push_to_hub\u001b[0;34m(self, commit_message, blocking, **kwargs)\u001b[0m\n\u001b[1;32m 2674\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_world_process_zero():\n\u001b[1;32m 2675\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[0;32m-> 2677\u001b[0m git_head_commit_url \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrepo\u001b[49m\u001b[38;5;241m.\u001b[39mpush_to_hub(commit_message\u001b[38;5;241m=\u001b[39mcommit_message, blocking\u001b[38;5;241m=\u001b[39mblocking)\n\u001b[1;32m 2678\u001b[0m \u001b[38;5;66;03m# push separately the model card to be independant from the rest of the model\u001b[39;00m\n\u001b[1;32m 2679\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs\u001b[38;5;241m.\u001b[39mshould_save:\n",
510
  "\u001b[0;31mAttributeError\u001b[0m: 'Trainer' object has no attribute 'repo'"
511
  ]
 
253
  },
254
  {
255
  "cell_type": "code",
256
+ "execution_count": 54,
257
  "id": "71351cf4-6d00-40ae-89cc-cedb87073625",
258
  "metadata": {},
259
  "outputs": [
 
363
  },
364
  {
365
  "cell_type": "code",
366
+ "execution_count": 55,
367
  "id": "208eac7d-9fdd-4c82-b46f-25c1a1f246ee",
368
  "metadata": {},
369
  "outputs": [
 
410
  },
411
  {
412
  "cell_type": "code",
413
+ "execution_count": 56,
414
  "id": "d58f6b8c-441c-4fa9-a308-e687948875e1",
415
  "metadata": {},
416
  "outputs": [
 
421
  "The following columns in the training set don't have a corresponding argument in `Wav2Vec2ForCTC.forward` and have been ignored: input_length.\n",
422
  "***** Running training *****\n",
423
  " Num examples = 1\n",
424
+ " Num Epochs = 30\n",
425
  " Instantaneous batch size per device = 8\n",
426
  " Total train batch size (w. parallel, distributed & accumulation) = 8\n",
427
  " Gradient Accumulation steps = 1\n",
428
+ " Total optimization steps = 30\n"
429
  ]
430
  },
431
  {
 
434
  "\n",
435
  " <div>\n",
436
  " \n",
437
+ " <progress value='30' max='30' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
438
+ " [30/30 00:28, Epoch 30/30]\n",
439
  " </div>\n",
440
  " <table border=\"1\" class=\"dataframe\">\n",
441
  " <thead>\n",
 
470
  {
471
  "data": {
472
  "text/plain": [
473
+ "TrainOutput(global_step=30, training_loss=16.291970825195314, metrics={'train_runtime': 29.1768, 'train_samples_per_second': 1.028, 'train_steps_per_second': 1.028, 'total_flos': 943749864316800.0, 'train_loss': 16.291970825195314, 'epoch': 30.0})"
474
  ]
475
  },
476
+ "execution_count": 56,
477
  "metadata": {},
478
  "output_type": "execute_result"
479
  }
 
484
  },
485
  {
486
  "cell_type": "code",
487
+ "execution_count": 57,
488
  "id": "333d43cf-add3-4d78-bbca-b44c638519fe",
489
  "metadata": {},
490
  "outputs": [
 
505
  "traceback": [
506
  "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
507
  "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
508
+ "Input \u001b[0;32mIn [57]\u001b[0m, in \u001b[0;36m<cell line: 1>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpush_to_hub\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhub_model_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msharpcoder/wav2vec2_bjorn\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
509
  "File \u001b[0;32m~/.local/lib/python3.10/site-packages/transformers/trainer.py:2677\u001b[0m, in \u001b[0;36mTrainer.push_to_hub\u001b[0;34m(self, commit_message, blocking, **kwargs)\u001b[0m\n\u001b[1;32m 2674\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mis_world_process_zero():\n\u001b[1;32m 2675\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[0;32m-> 2677\u001b[0m git_head_commit_url \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrepo\u001b[49m\u001b[38;5;241m.\u001b[39mpush_to_hub(commit_message\u001b[38;5;241m=\u001b[39mcommit_message, blocking\u001b[38;5;241m=\u001b[39mblocking)\n\u001b[1;32m 2678\u001b[0m \u001b[38;5;66;03m# push separately the model card to be independant from the rest of the model\u001b[39;00m\n\u001b[1;32m 2679\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs\u001b[38;5;241m.\u001b[39mshould_save:\n",
510
  "\u001b[0;31mAttributeError\u001b[0m: 'Trainer' object has no attribute 'repo'"
511
  ]
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d3f3abcf77f71881019078ae17cf773e46b424e4176401072a817530aabafac
3
  size 377667031
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e55a1042cf8cc902bd68a3e8798f90a9cbb95b313e4a3c79082b4dc9d0fc05f
3
  size 377667031
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed19b832c7db582771504df1e4a7dc89ac95ce233c3914ed7c2c37ff4ea55f88
3
  size 2735
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb6aaa145951105af08a3ef5d6fd296d211fe596176abfe9aee5116147e093b5
3
  size 2735