Roberto commited on
Commit
018bf44
Β·
1 Parent(s): 9ce5be9

hg first push

Browse files
Files changed (33) hide show
  1. SnowballTarget.onnx +1 -1
  2. SnowballTarget/{SnowballTarget-1599976.onnx β†’ SnowballTarget-149984.onnx} +1 -1
  3. SnowballTarget/{SnowballTarget-1599976.pt β†’ SnowballTarget-149984.pt} +2 -2
  4. SnowballTarget/SnowballTarget-1699936.pt +0 -3
  5. SnowballTarget/SnowballTarget-1749960.pt +0 -3
  6. SnowballTarget/SnowballTarget-1799984.onnx +0 -3
  7. SnowballTarget/SnowballTarget-1799984.pt +0 -3
  8. SnowballTarget/SnowballTarget-1849984.onnx +0 -3
  9. SnowballTarget/SnowballTarget-1849984.pt +0 -3
  10. SnowballTarget/SnowballTarget-1899944.onnx +0 -3
  11. SnowballTarget/SnowballTarget-1899944.pt +0 -3
  12. SnowballTarget/SnowballTarget-1949968.onnx +0 -3
  13. SnowballTarget/SnowballTarget-1949968.pt +0 -3
  14. SnowballTarget/{SnowballTarget-1699936.onnx β†’ SnowballTarget-199984.onnx} +1 -1
  15. SnowballTarget/{SnowballTarget-1649992.pt β†’ SnowballTarget-199984.pt} +2 -2
  16. SnowballTarget/SnowballTarget-1999992.onnx +0 -3
  17. SnowballTarget/SnowballTarget-1999992.pt +0 -3
  18. SnowballTarget/SnowballTarget-2000376.onnx +0 -3
  19. SnowballTarget/SnowballTarget-2000376.pt +0 -3
  20. SnowballTarget/{SnowballTarget-1649992.onnx β†’ SnowballTarget-200112.onnx} +1 -1
  21. SnowballTarget/SnowballTarget-200112.pt +3 -0
  22. SnowballTarget/{SnowballTarget-1749960.onnx β†’ SnowballTarget-49936.onnx} +1 -1
  23. SnowballTarget/SnowballTarget-49936.pt +3 -0
  24. SnowballTarget/SnowballTarget-99960.onnx +3 -0
  25. SnowballTarget/SnowballTarget-99960.pt +3 -0
  26. SnowballTarget/checkpoint.pt +2 -2
  27. SnowballTarget/events.out.tfevents.1673823722.lamarmite.489332.0 +0 -3
  28. SnowballTarget/events.out.tfevents.1673863873.b5f5a205233b.1180.0 +3 -0
  29. config.json +1 -1
  30. configuration.yaml +3 -3
  31. run_logs/Player-0.log +52 -39
  32. run_logs/timers.json +166 -166
  33. run_logs/training_status.json +30 -75
SnowballTarget.onnx CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:acabf98140a6dc8ab34f9f2c68cdb740bedc5635763d13106971e2699b5c6f21
3
  size 645119
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb217b6792f02c6c737080e305d390e06713cc7b16ba1ced89488b05124c5647
3
  size 645119
SnowballTarget/{SnowballTarget-1599976.onnx β†’ SnowballTarget-149984.onnx} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7933853f98178301d95060023013db82b527b77fa06f17738fe2943855c490c1
3
  size 645119
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:189b2aad6961f7a45b4be94611267cf4451b87d3b22db634d7371d46bd098e7b
3
  size 645119
SnowballTarget/{SnowballTarget-1599976.pt β†’ SnowballTarget-149984.pt} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:10c86fe78bc1226ed4779d8d08689f768ab09d883c46085972e0619e1d0a243a
3
- size 3844788
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b2232857644e2948c3dcecc7e89a8e480b1987cb06a29072fd4889225474053
3
+ size 3845312
SnowballTarget/SnowballTarget-1699936.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:76d6660f41179480ea3da7519f9c44b388054b7f359c178e6cfb1b54489e7cc7
3
- size 3844788
 
 
 
 
SnowballTarget/SnowballTarget-1749960.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d6d3577c9354eedb5be3da4f4770a961432492bd6f51693aa950b9f7b86d8fd4
3
- size 3844788
 
 
 
 
SnowballTarget/SnowballTarget-1799984.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:7580bb27200b65ae3a8c26bab14ed7023d66a6a9a54d39dd837592a6f1fe3ec3
3
- size 645119
 
 
 
 
SnowballTarget/SnowballTarget-1799984.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:c0455f06960f461a7a0de8f59bbf9d7aa8cdbec03f1ec9f5034521cc5eed6183
3
- size 3844788
 
 
 
 
SnowballTarget/SnowballTarget-1849984.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1adda266c50c8490c4da7c4489f4cc2097318addbd67cc79f038e56e293b4cb9
3
- size 645119
 
 
 
 
SnowballTarget/SnowballTarget-1849984.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a3cca88014eaac491218345ba618b91efd116982db883512a435c1b8f2bdcfb8
3
- size 3844788
 
 
 
 
SnowballTarget/SnowballTarget-1899944.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:34b724efc9c127cf9ad586af51783d46f1535e8f9623a933b621aef8d6094136
3
- size 645119
 
 
 
 
SnowballTarget/SnowballTarget-1899944.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4927af5f83c62881111b940b56fe7e94c7db70802cd1416dc1764e0b7f935043
3
- size 3844788
 
 
 
 
SnowballTarget/SnowballTarget-1949968.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e716baaef7e7b98c4ed8b0f303a1bdff26fb071a720f1a999f818503272a2e26
3
- size 645119
 
 
 
 
SnowballTarget/SnowballTarget-1949968.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4ea4e3dab9ced3cce4002f2433e8660ff1158776cf9b3d2ed0de58c37fda5c22
3
- size 3844788
 
 
 
 
SnowballTarget/{SnowballTarget-1699936.onnx β†’ SnowballTarget-199984.onnx} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3721232ff4cad21c66d35474dcac52002cef30b7a78bf7038b6459d14eedfca7
3
  size 645119
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb217b6792f02c6c737080e305d390e06713cc7b16ba1ced89488b05124c5647
3
  size 645119
SnowballTarget/{SnowballTarget-1649992.pt β†’ SnowballTarget-199984.pt} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:08829efcb81b159036fd83ea2035db6738f9fef870f0dc7fd7c3259cab8e241e
3
- size 3844788
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46d758ff0bb3aadfbfaa6bd3d5bcdf36fff725792f4316b52c31de7fe7156f21
3
+ size 3845312
SnowballTarget/SnowballTarget-1999992.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:acabf98140a6dc8ab34f9f2c68cdb740bedc5635763d13106971e2699b5c6f21
3
- size 645119
 
 
 
 
SnowballTarget/SnowballTarget-1999992.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:379665e081fde322f42d5c7c5d8fc3d5660500fbb2a7aa213eb917e92772cf43
3
- size 3844788
 
 
 
 
SnowballTarget/SnowballTarget-2000376.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:acabf98140a6dc8ab34f9f2c68cdb740bedc5635763d13106971e2699b5c6f21
3
- size 645119
 
 
 
 
SnowballTarget/SnowballTarget-2000376.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f042d4d81496589845355f39b74993dc72222d318356823bda4427ae06cd6416
3
- size 3844788
 
 
 
 
SnowballTarget/{SnowballTarget-1649992.onnx β†’ SnowballTarget-200112.onnx} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ab4c6b348a63561d55a1695bb8c1be8c990005436e6af9b125d8a8c74151d5f
3
  size 645119
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb217b6792f02c6c737080e305d390e06713cc7b16ba1ced89488b05124c5647
3
  size 645119
SnowballTarget/SnowballTarget-200112.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:539c948f6760ad6c27f1e6f290055888f6cc6509eea9d297c305f3e81d0e6c00
3
+ size 3845312
SnowballTarget/{SnowballTarget-1749960.onnx β†’ SnowballTarget-49936.onnx} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:45f87a792b86aa7fc5625320232453d0d71459353fde8fc47e6d3cc3adbee6b8
3
  size 645119
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0db3efa9503804cf00edd95e716b0fe1fcff9b80d74f9bcde621e7dd9fecfa79
3
  size 645119
SnowballTarget/SnowballTarget-49936.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d30780fb72b15ec2d812dc89b6d61661cfd2790a805ed9e71822ab7a7aa80b21
3
+ size 3845312
SnowballTarget/SnowballTarget-99960.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d7cbba532f6e04e0391280df44dc478ec07836a32d1c7ec9488a6b8853ee2aa
3
+ size 645119
SnowballTarget/SnowballTarget-99960.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01e1e296a3db73069539798e2d9964ca0b33930590af14853171c75a321f8c08
3
+ size 3845312
SnowballTarget/checkpoint.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f042d4d81496589845355f39b74993dc72222d318356823bda4427ae06cd6416
3
- size 3844788
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:539c948f6760ad6c27f1e6f290055888f6cc6509eea9d297c305f3e81d0e6c00
3
+ size 3845312
SnowballTarget/events.out.tfevents.1673823722.lamarmite.489332.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1b846d79ba1385ed590cd2eddd2fa66e17fe3aa100a4ba4ca33632a59370a843
3
- size 170111
 
 
 
 
SnowballTarget/events.out.tfevents.1673863873.b5f5a205233b.1180.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:734c5503f5aa3146c8776c4f08edd9c479ab04ab180128dfc340975be85fbea9
3
+ size 28090
config.json CHANGED
@@ -1 +1 @@
1
- {"default_settings": null, "behaviors": {"SnowballTarget": {"trainer_type": "ppo", "hyperparameters": {"batch_size": 128, "buffer_size": 2048, "learning_rate": 0.0002, "beta": 0.005, "epsilon": 0.2, "lambd": 0.95, "num_epoch": 3, "learning_rate_schedule": "linear", "beta_schedule": "linear", "epsilon_schedule": "linear"}, "network_settings": {"normalize": false, "hidden_units": 256, "num_layers": 2, "vis_encode_type": "simple", "memory": null, "goal_conditioning_type": "hyper", "deterministic": false}, "reward_signals": {"extrinsic": {"gamma": 0.99, "strength": 1.0, "network_settings": {"normalize": false, "hidden_units": 128, "num_layers": 2, "vis_encode_type": "simple", "memory": null, "goal_conditioning_type": "hyper", "deterministic": false}}}, "init_path": null, "keep_checkpoints": 10, "checkpoint_interval": 50000, "max_steps": 2000000, "time_horizon": 64, "summary_freq": 10000, "threaded": true, "self_play": null, "behavioral_cloning": null}}, "env_settings": {"env_path": "./training-envs-executables/linux/SnowballTarget/SnowballTarget", "env_args": null, "base_port": 5005, "num_envs": 1, "num_areas": 1, "seed": -1, "max_lifetime_restarts": 10, "restarts_rate_limit_n": 1, "restarts_rate_limit_period_s": 60}, "engine_settings": {"width": 84, "height": 84, "quality_level": 5, "time_scale": 20, "target_frame_rate": -1, "capture_frame_rate": 60, "no_graphics": true}, "environment_parameters": null, "checkpoint_settings": {"run_id": "SnowballTarget5", "initialize_from": null, "load_model": false, "resume": false, "force": false, "train_model": false, "inference": false, "results_dir": "results"}, "torch_settings": {"device": null}, "debug": false}
 
1
+ {"default_settings": null, "behaviors": {"SnowballTarget": {"trainer_type": "ppo", "hyperparameters": {"batch_size": 128, "buffer_size": 2048, "learning_rate": 0.0003, "beta": 0.005, "epsilon": 0.2, "lambd": 0.95, "num_epoch": 3, "learning_rate_schedule": "linear", "beta_schedule": "linear", "epsilon_schedule": "linear"}, "network_settings": {"normalize": false, "hidden_units": 256, "num_layers": 2, "vis_encode_type": "simple", "memory": null, "goal_conditioning_type": "hyper", "deterministic": false}, "reward_signals": {"extrinsic": {"gamma": 0.99, "strength": 1.0, "network_settings": {"normalize": false, "hidden_units": 128, "num_layers": 2, "vis_encode_type": "simple", "memory": null, "goal_conditioning_type": "hyper", "deterministic": false}}}, "init_path": null, "keep_checkpoints": 10, "checkpoint_interval": 50000, "max_steps": 200000, "time_horizon": 64, "summary_freq": 10000, "threaded": true, "self_play": null, "behavioral_cloning": null}}, "env_settings": {"env_path": "./training-envs-executables/linux/SnowballTarget/SnowballTarget", "env_args": null, "base_port": 5005, "num_envs": 1, "num_areas": 1, "seed": -1, "max_lifetime_restarts": 10, "restarts_rate_limit_n": 1, "restarts_rate_limit_period_s": 60}, "engine_settings": {"width": 84, "height": 84, "quality_level": 5, "time_scale": 20, "target_frame_rate": -1, "capture_frame_rate": 60, "no_graphics": true}, "environment_parameters": null, "checkpoint_settings": {"run_id": "SnowballTarget1", "initialize_from": null, "load_model": false, "resume": false, "force": false, "train_model": false, "inference": false, "results_dir": "results"}, "torch_settings": {"device": null}, "debug": false}
configuration.yaml CHANGED
@@ -5,7 +5,7 @@ behaviors:
5
  hyperparameters:
6
  batch_size: 128
7
  buffer_size: 2048
8
- learning_rate: 0.0002
9
  beta: 0.005
10
  epsilon: 0.2
11
  lambd: 0.95
@@ -36,7 +36,7 @@ behaviors:
36
  init_path: null
37
  keep_checkpoints: 10
38
  checkpoint_interval: 50000
39
- max_steps: 2000000
40
  time_horizon: 64
41
  summary_freq: 10000
42
  threaded: true
@@ -62,7 +62,7 @@ engine_settings:
62
  no_graphics: true
63
  environment_parameters: null
64
  checkpoint_settings:
65
- run_id: SnowballTarget5
66
  initialize_from: null
67
  load_model: false
68
  resume: false
 
5
  hyperparameters:
6
  batch_size: 128
7
  buffer_size: 2048
8
+ learning_rate: 0.0003
9
  beta: 0.005
10
  epsilon: 0.2
11
  lambd: 0.95
 
36
  init_path: null
37
  keep_checkpoints: 10
38
  checkpoint_interval: 50000
39
+ max_steps: 200000
40
  time_horizon: 64
41
  summary_freq: 10000
42
  threaded: true
 
62
  no_graphics: true
63
  environment_parameters: null
64
  checkpoint_settings:
65
+ run_id: SnowballTarget1
66
  initialize_from: null
67
  load_model: false
68
  resume: false
run_logs/Player-0.log CHANGED
@@ -1,17 +1,40 @@
1
- Mono path[0] = '/home/emmanuel/Documents/code/rl/ml-agents/training-envs-executables/linux/SnowballTarget/SnowballTarget_Data/Managed'
2
- Mono config path = '/home/emmanuel/Documents/code/rl/ml-agents/training-envs-executables/linux/SnowballTarget/SnowballTarget_Data/MonoBleedingEdge/etc'
3
  Preloaded 'lib_burst_generated.so'
4
  Preloaded 'libgrpc_csharp_ext.x64.so'
 
 
 
5
  Initialize engine version: 2021.3.14f1 (eee1884e7226)
6
- [Subsystems] Discovering subsystems at path /home/emmanuel/Documents/code/rl/ml-agents/training-envs-executables/linux/SnowballTarget/SnowballTarget_Data/UnitySubsystems
7
  Forcing GfxDevice: Null
8
  GfxDevice: creating device client; threaded=0; jobified=0
9
  NullGfxDevice:
10
  Version: NULL 1.0 [1.0]
11
  Renderer: Null Device
12
  Vendor: Unity Technologies
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  Begin MonoManager ReloadAssembly
14
- - Completed reload, in 0.870 seconds
15
  ERROR: Shader Sprites/Default shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
16
  ERROR: Shader Sprites/Mask shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
17
  ERROR: Shader Legacy Shaders/VertexLit shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
@@ -22,15 +45,15 @@ ERROR: Shader Standard shader is not supported on this GPU (none of subshaders/f
22
  WARNING: Shader Unsupported: 'Standard' - All subshaders removed
23
  WARNING: Shader Did you use #pragma only_renderers and omit this platform?
24
  WARNING: Shader If subshaders removal was intentional, you may have forgotten turning Fallback off?
25
- UnloadTime: 0.896450 ms
26
  ERROR: Shader UI/Default shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
27
  requesting resize 84 x 84
28
- Setting up 2 worker threads for Enlighten.
29
  Memory Statistics:
30
  [ALLOC_TEMP_TLS] TLS Allocator
31
  StackAllocators :
32
  [ALLOC_TEMP_MAIN]
33
- Peak usage frame count: [8.0 KB-16.0 KB]: 53653 frames, [16.0 KB-32.0 KB]: 909 frames, [2.0 MB-4.0 MB]: 1 frames
34
  Initial Block Size 4.0 MB
35
  Current Block Size 4.0 MB
36
  Peak Allocated Bytes 2.0 MB
@@ -38,7 +61,7 @@ Memory Statistics:
38
  [ALLOC_TEMP_Loading.AsyncRead]
39
  Initial Block Size 64.0 KB
40
  Current Block Size 64.0 KB
41
- Peak Allocated Bytes 246 B
42
  Overflow Count 0
43
  [ALLOC_TEMP_Loading.PreloadManager]
44
  Initial Block Size 256.0 KB
@@ -80,7 +103,7 @@ Memory Statistics:
80
  Current Block Size 32.0 KB
81
  Peak Allocated Bytes 0 B
82
  Overflow Count 0
83
- [ALLOC_TEMP_EnlightenWorker] x 2
84
  Initial Block Size 64.0 KB
85
  Current Block Size 64.0 KB
86
  Peak Allocated Bytes 0 B
@@ -105,7 +128,7 @@ Memory Statistics:
105
  Current Block Size 32.0 KB
106
  Peak Allocated Bytes 0 B
107
  Overflow Count 0
108
- [ALLOC_TEMP_AssetGarbageCollectorHelper] x 3
109
  Initial Block Size 64.0 KB
110
  Current Block Size 64.0 KB
111
  Peak Allocated Bytes 0 B
@@ -120,22 +143,12 @@ Memory Statistics:
120
  Current Block Size 32.0 KB
121
  Peak Allocated Bytes 0 B
122
  Overflow Count 0
123
- [ALLOC_TEMP_Job.Worker 1]
124
- Initial Block Size 256.0 KB
125
- Current Block Size 256.0 KB
126
- Peak Allocated Bytes 3.6 KB
127
- Overflow Count 0
128
- [ALLOC_TEMP_Job.Worker 2]
129
- Initial Block Size 256.0 KB
130
- Current Block Size 256.0 KB
131
- Peak Allocated Bytes 3.6 KB
132
- Overflow Count 0
133
- [ALLOC_TEMP_Background Job.Worker 3]
134
  Initial Block Size 32.0 KB
135
  Current Block Size 32.0 KB
136
  Peak Allocated Bytes 0 B
137
  Overflow Count 0
138
- [ALLOC_TEMP_Background Job.Worker 11]
139
  Initial Block Size 32.0 KB
140
  Current Block Size 32.0 KB
141
  Peak Allocated Bytes 0 B
@@ -156,22 +169,22 @@ Memory Statistics:
156
  Peak Allocated Bytes 0 B
157
  Overflow Count 0
158
  [ALLOC_DEFAULT] Dual Thread Allocator
159
- Peak main deferred allocation count 41
160
  [ALLOC_BUCKET]
161
  Large Block size 4.0 MB
162
  Used Block count 1
163
- Peak Allocated bytes 1.0 MB
164
  [ALLOC_DEFAULT_MAIN]
165
- Peak usage frame count: [4.0 MB-8.0 MB]: 11046 frames, [8.0 MB-16.0 MB]: 27196 frames, [16.0 MB-32.0 MB]: 16321 frames
166
  Requested Block Size 16.0 MB
167
- Peak Block count 2
168
- Peak Allocated memory 24.4 MB
169
  Peak Large allocation bytes 0 B
170
  [ALLOC_DEFAULT_THREAD]
171
- Peak usage frame count: [16.0 MB-32.0 MB]: 54563 frames
172
  Requested Block Size 16.0 MB
173
  Peak Block count 1
174
- Peak Allocated memory 17.6 MB
175
  Peak Large allocation bytes 16.0 MB
176
  [ALLOC_TEMP_JOB_1_FRAME]
177
  Initial Block Size 2.0 MB
@@ -198,15 +211,15 @@ Memory Statistics:
198
  [ALLOC_BUCKET]
199
  Large Block size 4.0 MB
200
  Used Block count 1
201
- Peak Allocated bytes 1.0 MB
202
  [ALLOC_GFX_MAIN]
203
- Peak usage frame count: [32.0 KB-64.0 KB]: 52645 frames, [64.0 KB-128.0 KB]: 1918 frames
204
  Requested Block Size 16.0 MB
205
  Peak Block count 1
206
- Peak Allocated memory 67.7 KB
207
  Peak Large allocation bytes 0 B
208
  [ALLOC_GFX_THREAD]
209
- Peak usage frame count: [32.0 KB-64.0 KB]: 54563 frames
210
  Requested Block Size 16.0 MB
211
  Peak Block count 1
212
  Peak Allocated memory 39.6 KB
@@ -216,15 +229,15 @@ Memory Statistics:
216
  [ALLOC_BUCKET]
217
  Large Block size 4.0 MB
218
  Used Block count 1
219
- Peak Allocated bytes 1.0 MB
220
  [ALLOC_CACHEOBJECTS_MAIN]
221
- Peak usage frame count: [0.5 MB-1.0 MB]: 54563 frames
222
  Requested Block Size 4.0 MB
223
  Peak Block count 1
224
  Peak Allocated memory 0.6 MB
225
  Peak Large allocation bytes 0 B
226
  [ALLOC_CACHEOBJECTS_THREAD]
227
- Peak usage frame count: [0.5 MB-1.0 MB]: 54562 frames, [2.0 MB-4.0 MB]: 1 frames
228
  Requested Block Size 4.0 MB
229
  Peak Block count 1
230
  Peak Allocated memory 2.2 MB
@@ -234,15 +247,15 @@ Memory Statistics:
234
  [ALLOC_BUCKET]
235
  Large Block size 4.0 MB
236
  Used Block count 1
237
- Peak Allocated bytes 1.0 MB
238
  [ALLOC_TYPETREE_MAIN]
239
- Peak usage frame count: [0-1.0 KB]: 54563 frames
240
  Requested Block Size 2.0 MB
241
  Peak Block count 1
242
  Peak Allocated memory 1.0 KB
243
  Peak Large allocation bytes 0 B
244
  [ALLOC_TYPETREE_THREAD]
245
- Peak usage frame count: [1.0 KB-2.0 KB]: 54563 frames
246
  Requested Block Size 2.0 MB
247
  Peak Block count 1
248
  Peak Allocated memory 1.7 KB
 
1
+ Mono path[0] = '/content/ml-agents/training-envs-executables/linux/SnowballTarget/SnowballTarget_Data/Managed'
2
+ Mono config path = '/content/ml-agents/training-envs-executables/linux/SnowballTarget/SnowballTarget_Data/MonoBleedingEdge/etc'
3
  Preloaded 'lib_burst_generated.so'
4
  Preloaded 'libgrpc_csharp_ext.x64.so'
5
+ PlayerPrefs - Creating folder: /root/.config/unity3d/Hugging Face
6
+ PlayerPrefs - Creating folder: /root/.config/unity3d/Hugging Face/SnowballTarget
7
+ Unable to load player prefs
8
  Initialize engine version: 2021.3.14f1 (eee1884e7226)
9
+ [Subsystems] Discovering subsystems at path /content/ml-agents/training-envs-executables/linux/SnowballTarget/SnowballTarget_Data/UnitySubsystems
10
  Forcing GfxDevice: Null
11
  GfxDevice: creating device client; threaded=0; jobified=0
12
  NullGfxDevice:
13
  Version: NULL 1.0 [1.0]
14
  Renderer: Null Device
15
  Vendor: Unity Technologies
16
+ ALSA lib confmisc.c:767:(parse_card) cannot find card '0'
17
+ ALSA lib conf.c:4528:(_snd_config_evaluate) function snd_func_card_driver returned error: No such file or directory
18
+ ALSA lib confmisc.c:392:(snd_func_concat) error evaluating strings
19
+ ALSA lib conf.c:4528:(_snd_config_evaluate) function snd_func_concat returned error: No such file or directory
20
+ ALSA lib confmisc.c:1246:(snd_func_refer) error evaluating name
21
+ ALSA lib conf.c:4528:(_snd_config_evaluate) function snd_func_refer returned error: No such file or directory
22
+ ALSA lib conf.c:5007:(snd_config_expand) Evaluate error: No such file or directory
23
+ ALSA lib pcm.c:2495:(snd_pcm_open_noupdate) Unknown PCM default
24
+ FMOD failed to initialize the output device.: "Error initializing output device. " (60)
25
+ Forced to initialize FMOD to to the device driver's system output rate 48000, this may impact performance and/or give inconsistent experiences compared to selected sample rate 48000
26
+ ALSA lib confmisc.c:767:(parse_card) cannot find card '0'
27
+ ALSA lib conf.c:4528:(_snd_config_evaluate) function snd_func_card_driver returned error: No such file or directory
28
+ ALSA lib confmisc.c:392:(snd_func_concat) error evaluating strings
29
+ ALSA lib conf.c:4528:(_snd_config_evaluate) function snd_func_concat returned error: No such file or directory
30
+ ALSA lib confmisc.c:1246:(snd_func_refer) error evaluating name
31
+ ALSA lib conf.c:4528:(_snd_config_evaluate) function snd_func_refer returned error: No such file or directory
32
+ ALSA lib conf.c:5007:(snd_config_expand) Evaluate error: No such file or directory
33
+ ALSA lib pcm.c:2495:(snd_pcm_open_noupdate) Unknown PCM default
34
+ FMOD failed to initialize the output device.: "Error initializing output device. " (60)
35
+ FMOD initialized on nosound output
36
  Begin MonoManager ReloadAssembly
37
+ - Completed reload, in 0.084 seconds
38
  ERROR: Shader Sprites/Default shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
39
  ERROR: Shader Sprites/Mask shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
40
  ERROR: Shader Legacy Shaders/VertexLit shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
 
45
  WARNING: Shader Unsupported: 'Standard' - All subshaders removed
46
  WARNING: Shader Did you use #pragma only_renderers and omit this platform?
47
  WARNING: Shader If subshaders removal was intentional, you may have forgotten turning Fallback off?
48
+ UnloadTime: 0.715321 ms
49
  ERROR: Shader UI/Default shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
50
  requesting resize 84 x 84
51
+ Setting up 1 worker threads for Enlighten.
52
  Memory Statistics:
53
  [ALLOC_TEMP_TLS] TLS Allocator
54
  StackAllocators :
55
  [ALLOC_TEMP_MAIN]
56
+ Peak usage frame count: [8.0 KB-16.0 KB]: 5369 frames, [16.0 KB-32.0 KB]: 91 frames, [2.0 MB-4.0 MB]: 1 frames
57
  Initial Block Size 4.0 MB
58
  Current Block Size 4.0 MB
59
  Peak Allocated Bytes 2.0 MB
 
61
  [ALLOC_TEMP_Loading.AsyncRead]
62
  Initial Block Size 64.0 KB
63
  Current Block Size 64.0 KB
64
+ Peak Allocated Bytes 198 B
65
  Overflow Count 0
66
  [ALLOC_TEMP_Loading.PreloadManager]
67
  Initial Block Size 256.0 KB
 
103
  Current Block Size 32.0 KB
104
  Peak Allocated Bytes 0 B
105
  Overflow Count 0
106
+ [ALLOC_TEMP_EnlightenWorker]
107
  Initial Block Size 64.0 KB
108
  Current Block Size 64.0 KB
109
  Peak Allocated Bytes 0 B
 
128
  Current Block Size 32.0 KB
129
  Peak Allocated Bytes 0 B
130
  Overflow Count 0
131
+ [ALLOC_TEMP_AssetGarbageCollectorHelper]
132
  Initial Block Size 64.0 KB
133
  Current Block Size 64.0 KB
134
  Peak Allocated Bytes 0 B
 
143
  Current Block Size 32.0 KB
144
  Peak Allocated Bytes 0 B
145
  Overflow Count 0
146
+ [ALLOC_TEMP_Background Job.Worker 11]
 
 
 
 
 
 
 
 
 
 
147
  Initial Block Size 32.0 KB
148
  Current Block Size 32.0 KB
149
  Peak Allocated Bytes 0 B
150
  Overflow Count 0
151
+ [ALLOC_TEMP_Background Job.Worker 3]
152
  Initial Block Size 32.0 KB
153
  Current Block Size 32.0 KB
154
  Peak Allocated Bytes 0 B
 
169
  Peak Allocated Bytes 0 B
170
  Overflow Count 0
171
  [ALLOC_DEFAULT] Dual Thread Allocator
172
+ Peak main deferred allocation count 40
173
  [ALLOC_BUCKET]
174
  Large Block size 4.0 MB
175
  Used Block count 1
176
+ Peak Allocated bytes 0.9 MB
177
  [ALLOC_DEFAULT_MAIN]
178
+ Peak usage frame count: [4.0 MB-8.0 MB]: 5461 frames
179
  Requested Block Size 16.0 MB
180
+ Peak Block count 1
181
+ Peak Allocated memory 6.5 MB
182
  Peak Large allocation bytes 0 B
183
  [ALLOC_DEFAULT_THREAD]
184
+ Peak usage frame count: [16.0 MB-32.0 MB]: 5461 frames
185
  Requested Block Size 16.0 MB
186
  Peak Block count 1
187
+ Peak Allocated memory 17.8 MB
188
  Peak Large allocation bytes 16.0 MB
189
  [ALLOC_TEMP_JOB_1_FRAME]
190
  Initial Block Size 2.0 MB
 
211
  [ALLOC_BUCKET]
212
  Large Block size 4.0 MB
213
  Used Block count 1
214
+ Peak Allocated bytes 0.9 MB
215
  [ALLOC_GFX_MAIN]
216
+ Peak usage frame count: [32.0 KB-64.0 KB]: 4687 frames, [64.0 KB-128.0 KB]: 774 frames
217
  Requested Block Size 16.0 MB
218
  Peak Block count 1
219
+ Peak Allocated memory 67.3 KB
220
  Peak Large allocation bytes 0 B
221
  [ALLOC_GFX_THREAD]
222
+ Peak usage frame count: [32.0 KB-64.0 KB]: 5461 frames
223
  Requested Block Size 16.0 MB
224
  Peak Block count 1
225
  Peak Allocated memory 39.6 KB
 
229
  [ALLOC_BUCKET]
230
  Large Block size 4.0 MB
231
  Used Block count 1
232
+ Peak Allocated bytes 0.9 MB
233
  [ALLOC_CACHEOBJECTS_MAIN]
234
+ Peak usage frame count: [0.5 MB-1.0 MB]: 5461 frames
235
  Requested Block Size 4.0 MB
236
  Peak Block count 1
237
  Peak Allocated memory 0.6 MB
238
  Peak Large allocation bytes 0 B
239
  [ALLOC_CACHEOBJECTS_THREAD]
240
+ Peak usage frame count: [0.5 MB-1.0 MB]: 5460 frames, [2.0 MB-4.0 MB]: 1 frames
241
  Requested Block Size 4.0 MB
242
  Peak Block count 1
243
  Peak Allocated memory 2.2 MB
 
247
  [ALLOC_BUCKET]
248
  Large Block size 4.0 MB
249
  Used Block count 1
250
+ Peak Allocated bytes 0.9 MB
251
  [ALLOC_TYPETREE_MAIN]
252
+ Peak usage frame count: [0-1.0 KB]: 5461 frames
253
  Requested Block Size 2.0 MB
254
  Peak Block count 1
255
  Peak Allocated memory 1.0 KB
256
  Peak Large allocation bytes 0 B
257
  [ALLOC_TYPETREE_THREAD]
258
+ Peak usage frame count: [1.0 KB-2.0 KB]: 5461 frames
259
  Requested Block Size 2.0 MB
260
  Peak Block count 1
261
  Peak Allocated memory 1.7 KB
run_logs/timers.json CHANGED
@@ -2,220 +2,220 @@
2
  "name": "root",
3
  "gauges": {
4
  "SnowballTarget.Policy.Entropy.mean": {
5
- "value": 0.5959854125976562,
6
- "min": 0.5709379315376282,
7
- "max": 2.8557894229888916,
8
- "count": 200
9
  },
10
  "SnowballTarget.Policy.Entropy.sum": {
11
- "value": 6129.7099609375,
12
- "min": 5529.92138671875,
13
- "max": 29246.140625,
14
- "count": 200
15
  },
16
  "SnowballTarget.Step.mean": {
17
- "value": 1999992.0,
18
  "min": 9952.0,
19
- "max": 1999992.0,
20
- "count": 200
21
  },
22
  "SnowballTarget.Step.sum": {
23
- "value": 1999992.0,
24
  "min": 9952.0,
25
- "max": 1999992.0,
26
- "count": 200
27
  },
28
  "SnowballTarget.Policy.ExtrinsicValueEstimate.mean": {
29
- "value": 14.251872062683105,
30
- "min": 0.47750917077064514,
31
- "max": 14.377264022827148,
32
- "count": 200
33
  },
34
  "SnowballTarget.Policy.ExtrinsicValueEstimate.sum": {
35
- "value": 2921.6337890625,
36
- "min": 92.63677978515625,
37
- "max": 2939.79443359375,
38
- "count": 200
39
  },
40
  "SnowballTarget.Environment.EpisodeLength.mean": {
41
  "value": 199.0,
42
  "min": 199.0,
43
  "max": 199.0,
44
- "count": 200
45
  },
46
  "SnowballTarget.Environment.EpisodeLength.sum": {
47
- "value": 10945.0,
48
  "min": 8756.0,
49
  "max": 10945.0,
50
- "count": 200
51
  },
52
  "SnowballTarget.Losses.PolicyLoss.mean": {
53
- "value": 0.07214724341619706,
54
- "min": 0.06031834367476062,
55
- "max": 0.07803036969657043,
56
- "count": 200
57
  },
58
  "SnowballTarget.Losses.PolicyLoss.sum": {
59
- "value": 0.3607362170809853,
60
- "min": 0.24127337469904248,
61
- "max": 0.37994980020623326,
62
- "count": 200
63
  },
64
  "SnowballTarget.Losses.ValueLoss.mean": {
65
- "value": 0.14957752292062723,
66
- "min": 0.11958930644608451,
67
- "max": 0.272374791023778,
68
- "count": 200
69
  },
70
  "SnowballTarget.Losses.ValueLoss.sum": {
71
- "value": 0.7478876146031361,
72
- "min": 0.47835722578433804,
73
- "max": 1.3618739551188899,
74
- "count": 200
75
  },
76
  "SnowballTarget.Policy.LearningRate.mean": {
77
- "value": 4.688997656000026e-07,
78
- "min": 4.688997656000026e-07,
79
- "max": 0.0001994588002706,
80
- "count": 200
81
  },
82
  "SnowballTarget.Policy.LearningRate.sum": {
83
- "value": 2.344498828000013e-06,
84
- "min": 2.344498828000013e-06,
85
- "max": 0.000992344003828,
86
- "count": 200
87
  },
88
  "SnowballTarget.Policy.Epsilon.mean": {
89
- "value": 0.10023440000000002,
90
- "min": 0.10023440000000002,
91
- "max": 0.1997294,
92
- "count": 200
93
  },
94
  "SnowballTarget.Policy.Epsilon.sum": {
95
- "value": 0.5011720000000001,
96
- "min": 0.4029176,
97
- "max": 0.996172,
98
- "count": 200
99
  },
100
  "SnowballTarget.Policy.Beta.mean": {
101
- "value": 2.1696560000000067e-05,
102
- "min": 2.1696560000000067e-05,
103
- "max": 0.004986497059999999,
104
- "count": 200
105
  },
106
  "SnowballTarget.Policy.Beta.sum": {
107
- "value": 0.00010848280000000034,
108
- "min": 0.00010848280000000034,
109
- "max": 0.024808982800000004,
110
- "count": 200
111
  },
112
  "SnowballTarget.Environment.CumulativeReward.mean": {
113
- "value": 27.98181818181818,
114
- "min": 3.4318181818181817,
115
- "max": 28.509090909090908,
116
- "count": 200
117
  },
118
  "SnowballTarget.Environment.CumulativeReward.sum": {
119
- "value": 1539.0,
120
- "min": 151.0,
121
- "max": 1568.0,
122
- "count": 200
123
  },
124
  "SnowballTarget.Policy.ExtrinsicReward.mean": {
125
- "value": 27.98181818181818,
126
- "min": 3.4318181818181817,
127
- "max": 28.509090909090908,
128
- "count": 200
129
  },
130
  "SnowballTarget.Policy.ExtrinsicReward.sum": {
131
- "value": 1539.0,
132
- "min": 151.0,
133
- "max": 1568.0,
134
- "count": 200
135
  },
136
  "SnowballTarget.IsTraining.mean": {
137
  "value": 1.0,
138
  "min": 1.0,
139
  "max": 1.0,
140
- "count": 200
141
  },
142
  "SnowballTarget.IsTraining.sum": {
143
  "value": 1.0,
144
  "min": 1.0,
145
  "max": 1.0,
146
- "count": 200
147
  }
148
  },
149
  "metadata": {
150
  "timer_format_version": "0.1.0",
151
- "start_time_seconds": "1673823719",
152
- "python_version": "3.8.16 (default, Dec 7 2022, 01:12:06) \n[GCC 11.3.0]",
153
- "command_line_arguments": "/home/emmanuel/Documents/code/rl/ml-agents/venv/bin/mlagents-learn ./config/ppo/SnowballTarget.yaml --env=./training-envs-executables/linux/SnowballTarget/SnowballTarget --run-id=SnowballTarget5 --no-graphics",
154
  "mlagents_version": "0.29.0.dev0",
155
  "mlagents_envs_version": "0.29.0.dev0",
156
  "communication_protocol_version": "1.5.0",
157
  "pytorch_version": "1.8.1+cu102",
158
- "numpy_version": "1.20.0",
159
- "end_time_seconds": "1673827248"
160
  },
161
- "total": 3528.254173344001,
162
  "count": 1,
163
- "self": 0.32245695596793666,
164
  "children": {
165
  "run_training.setup": {
166
- "total": 0.12691729102516547,
167
  "count": 1,
168
- "self": 0.12691729102516547
169
  },
170
  "TrainerController.start_learning": {
171
- "total": 3527.804799097008,
172
  "count": 1,
173
- "self": 5.41858486039564,
174
  "children": {
175
  "TrainerController._reset_env": {
176
- "total": 2.0124125689908396,
177
  "count": 1,
178
- "self": 2.0124125689908396
179
  },
180
  "TrainerController.advance": {
181
- "total": 3520.259165747644,
182
- "count": 181874,
183
- "self": 2.6112302511755843,
184
  "children": {
185
  "env_step": {
186
- "total": 3517.6479354964686,
187
- "count": 181874,
188
- "self": 2569.623420906806,
189
  "children": {
190
  "SubprocessEnvManager._take_step": {
191
- "total": 945.3249118052481,
192
- "count": 181874,
193
- "self": 13.851257300615543,
194
  "children": {
195
  "TorchPolicy.evaluate": {
196
- "total": 931.4736545046326,
197
- "count": 181874,
198
- "self": 127.77290135616204,
199
  "children": {
200
  "TorchPolicy.sample_actions": {
201
- "total": 803.7007531484705,
202
- "count": 181874,
203
- "self": 803.7007531484705
204
  }
205
  }
206
  }
207
  }
208
  },
209
  "workers": {
210
- "total": 2.69960278441431,
211
- "count": 181874,
212
  "self": 0.0,
213
  "children": {
214
  "worker_root": {
215
- "total": 3521.14344334777,
216
- "count": 181874,
217
  "is_parallel": true,
218
- "self": 1719.3351771153102,
219
  "children": {
220
  "run_training.setup": {
221
  "total": 0.0,
@@ -224,48 +224,48 @@
224
  "self": 0.0,
225
  "children": {
226
  "steps_from_proto": {
227
- "total": 0.0019330479844938964,
228
  "count": 1,
229
  "is_parallel": true,
230
- "self": 0.0005646030185744166,
231
  "children": {
232
  "_process_rank_one_or_two_observation": {
233
- "total": 0.0013684449659194797,
234
  "count": 10,
235
  "is_parallel": true,
236
- "self": 0.0013684449659194797
237
  }
238
  }
239
  },
240
  "UnityEnvironment.step": {
241
- "total": 0.048041015019407496,
242
  "count": 1,
243
  "is_parallel": true,
244
- "self": 0.00035946001298725605,
245
  "children": {
246
  "UnityEnvironment._generate_step_input": {
247
- "total": 0.0002662810147739947,
248
  "count": 1,
249
  "is_parallel": true,
250
- "self": 0.0002662810147739947
251
  },
252
  "communicator.exchange": {
253
- "total": 0.046154933981597424,
254
  "count": 1,
255
  "is_parallel": true,
256
- "self": 0.046154933981597424
257
  },
258
  "steps_from_proto": {
259
- "total": 0.0012603400100488216,
260
  "count": 1,
261
  "is_parallel": true,
262
- "self": 0.00030130898812785745,
263
  "children": {
264
  "_process_rank_one_or_two_observation": {
265
- "total": 0.0009590310219209641,
266
  "count": 10,
267
  "is_parallel": true,
268
- "self": 0.0009590310219209641
269
  }
270
  }
271
  }
@@ -274,34 +274,34 @@
274
  }
275
  },
276
  "UnityEnvironment.step": {
277
- "total": 1801.8082662324596,
278
- "count": 181873,
279
  "is_parallel": true,
280
- "self": 66.5949679738842,
281
  "children": {
282
  "UnityEnvironment._generate_step_input": {
283
- "total": 41.4052872666507,
284
- "count": 181873,
285
  "is_parallel": true,
286
- "self": 41.4052872666507
287
  },
288
  "communicator.exchange": {
289
- "total": 1474.7900642491877,
290
- "count": 181873,
291
  "is_parallel": true,
292
- "self": 1474.7900642491877
293
  },
294
  "steps_from_proto": {
295
- "total": 219.01794674273697,
296
- "count": 181873,
297
  "is_parallel": true,
298
- "self": 50.45674358413089,
299
  "children": {
300
  "_process_rank_one_or_two_observation": {
301
- "total": 168.56120315860608,
302
- "count": 1818730,
303
  "is_parallel": true,
304
- "self": 168.56120315860608
305
  }
306
  }
307
  }
@@ -316,9 +316,9 @@
316
  }
317
  },
318
  "trainer_threads": {
319
- "total": 0.0003306930011603981,
320
  "count": 1,
321
- "self": 0.0003306930011603981,
322
  "children": {
323
  "thread_root": {
324
  "total": 0.0,
@@ -327,36 +327,36 @@
327
  "self": 0.0,
328
  "children": {
329
  "trainer_advance": {
330
- "total": 3502.2347593795857,
331
- "count": 3965452,
332
  "is_parallel": true,
333
- "self": 56.153643384197494,
334
  "children": {
335
  "process_trajectory": {
336
- "total": 1823.7294392998156,
337
- "count": 3965452,
338
  "is_parallel": true,
339
- "self": 1812.3949490989326,
340
  "children": {
341
  "RLTrainer._checkpoint": {
342
- "total": 11.33449020088301,
343
- "count": 40,
344
  "is_parallel": true,
345
- "self": 11.33449020088301
346
  }
347
  }
348
  },
349
  "_update_policy": {
350
- "total": 1622.3516766955727,
351
- "count": 909,
352
  "is_parallel": true,
353
- "self": 472.6568366049323,
354
  "children": {
355
  "TorchPPOOptimizer.update": {
356
- "total": 1149.6948400906404,
357
- "count": 46257,
358
  "is_parallel": true,
359
- "self": 1149.6948400906404
360
  }
361
  }
362
  }
@@ -367,14 +367,14 @@
367
  }
368
  },
369
  "TrainerController._save_models": {
370
- "total": 0.11430522697628476,
371
  "count": 1,
372
- "self": 0.000969816988799721,
373
  "children": {
374
  "RLTrainer._checkpoint": {
375
- "total": 0.11333540998748504,
376
  "count": 1,
377
- "self": 0.11333540998748504
378
  }
379
  }
380
  }
 
2
  "name": "root",
3
  "gauges": {
4
  "SnowballTarget.Policy.Entropy.mean": {
5
+ "value": 0.895298957824707,
6
+ "min": 0.895298957824707,
7
+ "max": 2.861755847930908,
8
+ "count": 20
9
  },
10
  "SnowballTarget.Policy.Entropy.sum": {
11
+ "value": 8548.314453125,
12
+ "min": 8548.314453125,
13
+ "max": 29307.2421875,
14
+ "count": 20
15
  },
16
  "SnowballTarget.Step.mean": {
17
+ "value": 199984.0,
18
  "min": 9952.0,
19
+ "max": 199984.0,
20
+ "count": 20
21
  },
22
  "SnowballTarget.Step.sum": {
23
+ "value": 199984.0,
24
  "min": 9952.0,
25
+ "max": 199984.0,
26
+ "count": 20
27
  },
28
  "SnowballTarget.Policy.ExtrinsicValueEstimate.mean": {
29
+ "value": 12.95840835571289,
30
+ "min": 0.3743203580379486,
31
+ "max": 12.95840835571289,
32
+ "count": 20
33
  },
34
  "SnowballTarget.Policy.ExtrinsicValueEstimate.sum": {
35
+ "value": 2526.8896484375,
36
+ "min": 72.61814880371094,
37
+ "max": 2633.7275390625,
38
+ "count": 20
39
  },
40
  "SnowballTarget.Environment.EpisodeLength.mean": {
41
  "value": 199.0,
42
  "min": 199.0,
43
  "max": 199.0,
44
+ "count": 20
45
  },
46
  "SnowballTarget.Environment.EpisodeLength.sum": {
47
+ "value": 8756.0,
48
  "min": 8756.0,
49
  "max": 10945.0,
50
+ "count": 20
51
  },
52
  "SnowballTarget.Losses.PolicyLoss.mean": {
53
+ "value": 0.06924321818537553,
54
+ "min": 0.06071020828139987,
55
+ "max": 0.07382922422843252,
56
+ "count": 20
57
  },
58
  "SnowballTarget.Losses.PolicyLoss.sum": {
59
+ "value": 0.2769728727415021,
60
+ "min": 0.2536787194834438,
61
+ "max": 0.3619471734518633,
62
+ "count": 20
63
  },
64
  "SnowballTarget.Losses.ValueLoss.mean": {
65
+ "value": 0.20040757851857763,
66
+ "min": 0.10921537366412654,
67
+ "max": 0.2797591955784489,
68
+ "count": 20
69
  },
70
  "SnowballTarget.Losses.ValueLoss.sum": {
71
+ "value": 0.8016303140743105,
72
+ "min": 0.43686149465650614,
73
+ "max": 1.3076020321425268,
74
+ "count": 20
75
  },
76
  "SnowballTarget.Policy.LearningRate.mean": {
77
+ "value": 8.082097306000005e-06,
78
+ "min": 8.082097306000005e-06,
79
+ "max": 0.000291882002706,
80
+ "count": 20
81
  },
82
  "SnowballTarget.Policy.LearningRate.sum": {
83
+ "value": 3.232838922400002e-05,
84
+ "min": 3.232838922400002e-05,
85
+ "max": 0.00138516003828,
86
+ "count": 20
87
  },
88
  "SnowballTarget.Policy.Epsilon.mean": {
89
+ "value": 0.10269400000000001,
90
+ "min": 0.10269400000000001,
91
+ "max": 0.19729400000000002,
92
+ "count": 20
93
  },
94
  "SnowballTarget.Policy.Epsilon.sum": {
95
+ "value": 0.41077600000000003,
96
+ "min": 0.41077600000000003,
97
+ "max": 0.96172,
98
+ "count": 20
99
  },
100
  "SnowballTarget.Policy.Beta.mean": {
101
+ "value": 0.0001444306000000001,
102
+ "min": 0.0001444306000000001,
103
+ "max": 0.0048649706,
104
+ "count": 20
105
  },
106
  "SnowballTarget.Policy.Beta.sum": {
107
+ "value": 0.0005777224000000004,
108
+ "min": 0.0005777224000000004,
109
+ "max": 0.023089828,
110
+ "count": 20
111
  },
112
  "SnowballTarget.Environment.CumulativeReward.mean": {
113
+ "value": 25.931818181818183,
114
+ "min": 3.159090909090909,
115
+ "max": 25.931818181818183,
116
+ "count": 20
117
  },
118
  "SnowballTarget.Environment.CumulativeReward.sum": {
119
+ "value": 1141.0,
120
+ "min": 139.0,
121
+ "max": 1401.0,
122
+ "count": 20
123
  },
124
  "SnowballTarget.Policy.ExtrinsicReward.mean": {
125
+ "value": 25.931818181818183,
126
+ "min": 3.159090909090909,
127
+ "max": 25.931818181818183,
128
+ "count": 20
129
  },
130
  "SnowballTarget.Policy.ExtrinsicReward.sum": {
131
+ "value": 1141.0,
132
+ "min": 139.0,
133
+ "max": 1401.0,
134
+ "count": 20
135
  },
136
  "SnowballTarget.IsTraining.mean": {
137
  "value": 1.0,
138
  "min": 1.0,
139
  "max": 1.0,
140
+ "count": 20
141
  },
142
  "SnowballTarget.IsTraining.sum": {
143
  "value": 1.0,
144
  "min": 1.0,
145
  "max": 1.0,
146
+ "count": 20
147
  }
148
  },
149
  "metadata": {
150
  "timer_format_version": "0.1.0",
151
+ "start_time_seconds": "1673863867",
152
+ "python_version": "3.8.16 (default, Dec 7 2022, 01:12:13) \n[GCC 7.5.0]",
153
+ "command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/SnowballTarget.yaml --env=./training-envs-executables/linux/SnowballTarget/SnowballTarget --run-id=SnowballTarget1 --no-graphics",
154
  "mlagents_version": "0.29.0.dev0",
155
  "mlagents_envs_version": "0.29.0.dev0",
156
  "communication_protocol_version": "1.5.0",
157
  "pytorch_version": "1.8.1+cu102",
158
+ "numpy_version": "1.21.6",
159
+ "end_time_seconds": "1673864354"
160
  },
161
+ "total": 486.907974191,
162
  "count": 1,
163
+ "self": 0.38870987900008913,
164
  "children": {
165
  "run_training.setup": {
166
+ "total": 0.10959962699999437,
167
  "count": 1,
168
+ "self": 0.10959962699999437
169
  },
170
  "TrainerController.start_learning": {
171
+ "total": 486.40966468499994,
172
  "count": 1,
173
+ "self": 0.6225702460067168,
174
  "children": {
175
  "TrainerController._reset_env": {
176
+ "total": 9.280716196000014,
177
  "count": 1,
178
+ "self": 9.280716196000014
179
  },
180
  "TrainerController.advance": {
181
+ "total": 476.3659393689932,
182
+ "count": 18203,
183
+ "self": 0.3478397549930037,
184
  "children": {
185
  "env_step": {
186
+ "total": 476.0180996140002,
187
+ "count": 18203,
188
+ "self": 314.11936406199607,
189
  "children": {
190
  "SubprocessEnvManager._take_step": {
191
+ "total": 161.54845037800106,
192
+ "count": 18203,
193
+ "self": 1.7809457140016889,
194
  "children": {
195
  "TorchPolicy.evaluate": {
196
+ "total": 159.76750466399938,
197
+ "count": 18203,
198
+ "self": 36.14241361299844,
199
  "children": {
200
  "TorchPolicy.sample_actions": {
201
+ "total": 123.62509105100094,
202
+ "count": 18203,
203
+ "self": 123.62509105100094
204
  }
205
  }
206
  }
207
  }
208
  },
209
  "workers": {
210
+ "total": 0.35028517400309056,
211
+ "count": 18203,
212
  "self": 0.0,
213
  "children": {
214
  "worker_root": {
215
+ "total": 484.91362469400053,
216
+ "count": 18203,
217
  "is_parallel": true,
218
+ "self": 231.5020337780084,
219
  "children": {
220
  "run_training.setup": {
221
  "total": 0.0,
 
224
  "self": 0.0,
225
  "children": {
226
  "steps_from_proto": {
227
+ "total": 0.00492641399998206,
228
  "count": 1,
229
  "is_parallel": true,
230
+ "self": 0.002707334000007222,
231
  "children": {
232
  "_process_rank_one_or_two_observation": {
233
+ "total": 0.002219079999974838,
234
  "count": 10,
235
  "is_parallel": true,
236
+ "self": 0.002219079999974838
237
  }
238
  }
239
  },
240
  "UnityEnvironment.step": {
241
+ "total": 0.036247952999985955,
242
  "count": 1,
243
  "is_parallel": true,
244
+ "self": 0.0005773249999947438,
245
  "children": {
246
  "UnityEnvironment._generate_step_input": {
247
+ "total": 0.000317691000020659,
248
  "count": 1,
249
  "is_parallel": true,
250
+ "self": 0.000317691000020659
251
  },
252
  "communicator.exchange": {
253
+ "total": 0.033358028999998623,
254
  "count": 1,
255
  "is_parallel": true,
256
+ "self": 0.033358028999998623
257
  },
258
  "steps_from_proto": {
259
+ "total": 0.001994907999971929,
260
  "count": 1,
261
  "is_parallel": true,
262
+ "self": 0.00044596400010732395,
263
  "children": {
264
  "_process_rank_one_or_two_observation": {
265
+ "total": 0.0015489439998646048,
266
  "count": 10,
267
  "is_parallel": true,
268
+ "self": 0.0015489439998646048
269
  }
270
  }
271
  }
 
274
  }
275
  },
276
  "UnityEnvironment.step": {
277
+ "total": 253.41159091599212,
278
+ "count": 18202,
279
  "is_parallel": true,
280
+ "self": 9.52658634299246,
281
  "children": {
282
  "UnityEnvironment._generate_step_input": {
283
+ "total": 5.742697782995265,
284
+ "count": 18202,
285
  "is_parallel": true,
286
+ "self": 5.742697782995265
287
  },
288
  "communicator.exchange": {
289
+ "total": 202.5598034690018,
290
+ "count": 18202,
291
  "is_parallel": true,
292
+ "self": 202.5598034690018
293
  },
294
  "steps_from_proto": {
295
+ "total": 35.582503321002605,
296
+ "count": 18202,
297
  "is_parallel": true,
298
+ "self": 8.055542941989927,
299
  "children": {
300
  "_process_rank_one_or_two_observation": {
301
+ "total": 27.526960379012678,
302
+ "count": 182020,
303
  "is_parallel": true,
304
+ "self": 27.526960379012678
305
  }
306
  }
307
  }
 
316
  }
317
  },
318
  "trainer_threads": {
319
+ "total": 5.062799993993394e-05,
320
  "count": 1,
321
+ "self": 5.062799993993394e-05,
322
  "children": {
323
  "thread_root": {
324
  "total": 0.0,
 
327
  "self": 0.0,
328
  "children": {
329
  "trainer_advance": {
330
+ "total": 472.76664193404326,
331
+ "count": 393759,
332
  "is_parallel": true,
333
+ "self": 10.79233854306824,
334
  "children": {
335
  "process_trajectory": {
336
+ "total": 271.9592353249749,
337
+ "count": 393759,
338
  "is_parallel": true,
339
+ "self": 271.07731584897493,
340
  "children": {
341
  "RLTrainer._checkpoint": {
342
+ "total": 0.8819194759999505,
343
+ "count": 4,
344
  "is_parallel": true,
345
+ "self": 0.8819194759999505
346
  }
347
  }
348
  },
349
  "_update_policy": {
350
+ "total": 190.01506806600014,
351
+ "count": 90,
352
  "is_parallel": true,
353
+ "self": 45.93433721800204,
354
  "children": {
355
  "TorchPPOOptimizer.update": {
356
+ "total": 144.0807308479981,
357
+ "count": 4587,
358
  "is_parallel": true,
359
+ "self": 144.0807308479981
360
  }
361
  }
362
  }
 
367
  }
368
  },
369
  "TrainerController._save_models": {
370
+ "total": 0.14038824600004318,
371
  "count": 1,
372
+ "self": 0.0009810320000269712,
373
  "children": {
374
  "RLTrainer._checkpoint": {
375
+ "total": 0.1394072140000162,
376
  "count": 1,
377
+ "self": 0.1394072140000162
378
  }
379
  }
380
  }
run_logs/training_status.json CHANGED
@@ -2,103 +2,58 @@
2
  "SnowballTarget": {
3
  "checkpoints": [
4
  {
5
- "steps": 1599976,
6
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1599976.onnx",
7
- "reward": 27.727272727272727,
8
- "creation_time": 1673826592.562898,
9
  "auxillary_file_paths": [
10
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1599976.pt"
11
  ]
12
  },
13
  {
14
- "steps": 1649992,
15
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1649992.onnx",
16
- "reward": 27.6,
17
- "creation_time": 1673826675.4486237,
18
  "auxillary_file_paths": [
19
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1649992.pt"
20
  ]
21
  },
22
  {
23
- "steps": 1699936,
24
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1699936.onnx",
25
- "reward": 27.545454545454547,
26
- "creation_time": 1673826756.14448,
27
  "auxillary_file_paths": [
28
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1699936.pt"
29
  ]
30
  },
31
  {
32
- "steps": 1749960,
33
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1749960.onnx",
34
- "reward": 28.181818181818183,
35
- "creation_time": 1673826837.3941545,
36
  "auxillary_file_paths": [
37
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1749960.pt"
38
  ]
39
  },
40
  {
41
- "steps": 1799984,
42
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1799984.onnx",
43
- "reward": 28.0,
44
- "creation_time": 1673826920.0776684,
45
  "auxillary_file_paths": [
46
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1799984.pt"
47
- ]
48
- },
49
- {
50
- "steps": 1849984,
51
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1849984.onnx",
52
- "reward": 28.181818181818183,
53
- "creation_time": 1673827000.915594,
54
- "auxillary_file_paths": [
55
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1849984.pt"
56
- ]
57
- },
58
- {
59
- "steps": 1899944,
60
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1899944.onnx",
61
- "reward": 28.272727272727273,
62
- "creation_time": 1673827082.3755538,
63
- "auxillary_file_paths": [
64
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1899944.pt"
65
- ]
66
- },
67
- {
68
- "steps": 1949968,
69
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1949968.onnx",
70
- "reward": 27.454545454545453,
71
- "creation_time": 1673827165.5384333,
72
- "auxillary_file_paths": [
73
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1949968.pt"
74
- ]
75
- },
76
- {
77
- "steps": 1999992,
78
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-1999992.onnx",
79
- "reward": 28.454545454545453,
80
- "creation_time": 1673827247.6539536,
81
- "auxillary_file_paths": [
82
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-1999992.pt"
83
- ]
84
- },
85
- {
86
- "steps": 2000376,
87
- "file_path": "results/SnowballTarget5/SnowballTarget/SnowballTarget-2000376.onnx",
88
- "reward": 28.454545454545453,
89
- "creation_time": 1673827247.8208814,
90
- "auxillary_file_paths": [
91
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-2000376.pt"
92
  ]
93
  }
94
  ],
95
  "final_checkpoint": {
96
- "steps": 2000376,
97
- "file_path": "results/SnowballTarget5/SnowballTarget.onnx",
98
- "reward": 28.454545454545453,
99
- "creation_time": 1673827247.8208814,
100
  "auxillary_file_paths": [
101
- "results/SnowballTarget5/SnowballTarget/SnowballTarget-2000376.pt"
102
  ]
103
  }
104
  },
 
2
  "SnowballTarget": {
3
  "checkpoints": [
4
  {
5
+ "steps": 49936,
6
+ "file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-49936.onnx",
7
+ "reward": 13.090909090909092,
8
+ "creation_time": 1673863996.6326642,
9
  "auxillary_file_paths": [
10
+ "results/SnowballTarget1/SnowballTarget/SnowballTarget-49936.pt"
11
  ]
12
  },
13
  {
14
+ "steps": 99960,
15
+ "file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-99960.onnx",
16
+ "reward": 21.818181818181817,
17
+ "creation_time": 1673864116.8484483,
18
  "auxillary_file_paths": [
19
+ "results/SnowballTarget1/SnowballTarget/SnowballTarget-99960.pt"
20
  ]
21
  },
22
  {
23
+ "steps": 149984,
24
+ "file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-149984.onnx",
25
+ "reward": 24.727272727272727,
26
+ "creation_time": 1673864236.36558,
27
  "auxillary_file_paths": [
28
+ "results/SnowballTarget1/SnowballTarget/SnowballTarget-149984.pt"
29
  ]
30
  },
31
  {
32
+ "steps": 199984,
33
+ "file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-199984.onnx",
34
+ "reward": 26.272727272727273,
35
+ "creation_time": 1673864354.1693516,
36
  "auxillary_file_paths": [
37
+ "results/SnowballTarget1/SnowballTarget/SnowballTarget-199984.pt"
38
  ]
39
  },
40
  {
41
+ "steps": 200112,
42
+ "file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-200112.onnx",
43
+ "reward": 26.272727272727273,
44
+ "creation_time": 1673864354.3456814,
45
  "auxillary_file_paths": [
46
+ "results/SnowballTarget1/SnowballTarget/SnowballTarget-200112.pt"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  ]
48
  }
49
  ],
50
  "final_checkpoint": {
51
+ "steps": 200112,
52
+ "file_path": "results/SnowballTarget1/SnowballTarget.onnx",
53
+ "reward": 26.272727272727273,
54
+ "creation_time": 1673864354.3456814,
55
  "auxillary_file_paths": [
56
+ "results/SnowballTarget1/SnowballTarget/SnowballTarget-200112.pt"
57
  ]
58
  }
59
  },