Upload model weights without loading
Browse files- global_step250/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
- global_step250/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt +3 -0
- global_step250/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt +3 -0
- global_step250/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt +3 -0
- global_step250/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt +3 -0
- global_step250/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt +3 -0
- global_step250/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt +3 -0
- global_step250/mp_rank_00_model_states.pt +3 -0
- latest +1 -1
- model-00001-of-00004.safetensors +1 -1
- model-00002-of-00004.safetensors +1 -1
- model-00003-of-00004.safetensors +1 -1
- model-00004-of-00004.safetensors +1 -1
- rng_state_0.pth +1 -1
- rng_state_1.pth +1 -1
- rng_state_2.pth +1 -1
- rng_state_3.pth +1 -1
- rng_state_4.pth +1 -1
- rng_state_5.pth +1 -1
- rng_state_6.pth +1 -1
- scheduler.pt +1 -1
- trainer_state.json +2 -2312
global_step250/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:86f0e6bc951a93ac39db4a44f299de83ade29062afced9bb379f1349903e3a15
|
| 3 |
+
size 6561681042
|
global_step250/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:337d51d18b17196fd8c32379b262f19468f1e1f43919074a266a60af8bd61369
|
| 3 |
+
size 6561681938
|
global_step250/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3223c884602b6fafc572f6c77cd59f31cf17fba5180e053578a0fd52eda83923
|
| 3 |
+
size 6561682258
|
global_step250/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3abdb950669dd654e6b4e97951a9d0ed173307fbbb72114f38125d3c29259987
|
| 3 |
+
size 6561682322
|
global_step250/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:afa9b1588788c2c211b94d114b59b8d30e9475308b81d19118a303c7ec7f63a5
|
| 3 |
+
size 6561682066
|
global_step250/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e829c67495623963abc2a5d314cfd3657d65ee56347e2329f6a90d4b4b3b9f62
|
| 3 |
+
size 6561682194
|
global_step250/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1af45740b93c3df5fd638805d7bfaaf213cf45fea86b1a98ba60d692df592850
|
| 3 |
+
size 6561680914
|
global_step250/mp_rank_00_model_states.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9e0c85d761a34dd5008299bd13daf706810467809fa880cc31d5e42d3728d9a6
|
| 3 |
+
size 15231325496
|
latest
CHANGED
|
@@ -1 +1 @@
|
|
| 1 |
-
|
|
|
|
| 1 |
+
global_step250
|
model-00001-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4877660776
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c78d0bce9791b543a7af89c557c229e654f50989e81d982cb76181c4056ac3fb
|
| 3 |
size 4877660776
|
model-00002-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4932751008
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:191285cfb3da4f3826c48b7ebdcb1163fe004e4c76ce52392d406e4a60e3a3ef
|
| 3 |
size 4932751008
|
model-00003-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4330865200
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:15463bf3a253b51f57b77c76c08b2c38328058bd786e22cfa6fe5c4955603b5f
|
| 3 |
size 4330865200
|
model-00004-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1089994880
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7847bd0dfe43baf65264360ad8bc2ef6593520642d9d3b9849cf5ce06b33c8e7
|
| 3 |
size 1089994880
|
rng_state_0.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:53a4a6e4ee2c1c3e54aeba8fde3b12aed386b25672713081c2ddb430c79e0675
|
| 3 |
size 15728
|
rng_state_1.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c1f7d859088c96953afc5ac400719d89ff0f766fb4408002312f248920718b71
|
| 3 |
size 15728
|
rng_state_2.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d1f2871c55d9d48a29d339e9af3014584d847660f80a288c6cedb04faf7b78ea
|
| 3 |
size 15728
|
rng_state_3.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15792
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cb9b78ed51210787260e8696ca01a949db4d11e28ad8963528231cb9dd41ef70
|
| 3 |
size 15792
|
rng_state_4.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5777c7050cc3294af29788a1145da1f300d67442aa5a78d661a09525b61b1cff
|
| 3 |
size 15728
|
rng_state_5.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:84b2d5cc25fe46d8f3916099450e0b7e0b820f3236fc46032db853e5ff809c7c
|
| 3 |
size 15728
|
rng_state_6.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 15728
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3bb02a663a81d6ab856ac3e25e8c97e2ece72b0143440a7c95a92c92d39c9dfc
|
| 3 |
size 15728
|
scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cdfd40b6bea2f9491916899d888ae142373ca96e34d6115e4bebaa51cd22ea04
|
| 3 |
size 1064
|
trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch":
|
| 6 |
"eval_steps": 500,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -2323,2316 +2323,6 @@
|
|
| 2323 |
"learning_rate": 1e-06,
|
| 2324 |
"loss": 0.0002,
|
| 2325 |
"step": 250
|
| 2326 |
-
},
|
| 2327 |
-
{
|
| 2328 |
-
"clip_ratio": 0.00014695563593358245,
|
| 2329 |
-
"epoch": 1.6416666666666666,
|
| 2330 |
-
"grad_norm": 0.08961236476898193,
|
| 2331 |
-
"learning_rate": 1e-06,
|
| 2332 |
-
"loss": -0.0001,
|
| 2333 |
-
"step": 251
|
| 2334 |
-
},
|
| 2335 |
-
{
|
| 2336 |
-
"clip_ratio": 0.0002890962827049883,
|
| 2337 |
-
"epoch": 1.6481481481481481,
|
| 2338 |
-
"grad_norm": 0.08664807677268982,
|
| 2339 |
-
"learning_rate": 1e-06,
|
| 2340 |
-
"loss": -0.0006,
|
| 2341 |
-
"step": 252
|
| 2342 |
-
},
|
| 2343 |
-
{
|
| 2344 |
-
"clip_ratio": 0.0,
|
| 2345 |
-
"completion_length": 372.3832252139137,
|
| 2346 |
-
"epoch": 1.6546296296296297,
|
| 2347 |
-
"grad_norm": 0.0890711322426796,
|
| 2348 |
-
"learning_rate": 1e-06,
|
| 2349 |
-
"loss": 0.0023,
|
| 2350 |
-
"num_tokens": 234104038.0,
|
| 2351 |
-
"reward": 1.6360544533956618,
|
| 2352 |
-
"reward_std": 0.20212856946246965,
|
| 2353 |
-
"rewards/acc_reward_func": 1.6360544250124978,
|
| 2354 |
-
"step": 253
|
| 2355 |
-
},
|
| 2356 |
-
{
|
| 2357 |
-
"clip_ratio": 0.00010686751574255704,
|
| 2358 |
-
"epoch": 1.661111111111111,
|
| 2359 |
-
"grad_norm": 0.09000733494758606,
|
| 2360 |
-
"learning_rate": 1e-06,
|
| 2361 |
-
"loss": 0.0021,
|
| 2362 |
-
"step": 254
|
| 2363 |
-
},
|
| 2364 |
-
{
|
| 2365 |
-
"clip_ratio": 0.00012221902753004716,
|
| 2366 |
-
"epoch": 1.6675925925925927,
|
| 2367 |
-
"grad_norm": 0.08966827392578125,
|
| 2368 |
-
"learning_rate": 1e-06,
|
| 2369 |
-
"loss": 0.0018,
|
| 2370 |
-
"step": 255
|
| 2371 |
-
},
|
| 2372 |
-
{
|
| 2373 |
-
"clip_ratio": 0.0002542839824205397,
|
| 2374 |
-
"epoch": 1.674074074074074,
|
| 2375 |
-
"grad_norm": 0.08624038100242615,
|
| 2376 |
-
"learning_rate": 1e-06,
|
| 2377 |
-
"loss": 0.0014,
|
| 2378 |
-
"step": 256
|
| 2379 |
-
},
|
| 2380 |
-
{
|
| 2381 |
-
"clip_ratio": 0.0,
|
| 2382 |
-
"completion_length": 393.71202305385043,
|
| 2383 |
-
"epoch": 1.6805555555555556,
|
| 2384 |
-
"grad_norm": 0.08406448364257812,
|
| 2385 |
-
"learning_rate": 1e-06,
|
| 2386 |
-
"loss": -0.0043,
|
| 2387 |
-
"num_tokens": 237931352.0,
|
| 2388 |
-
"reward": 1.5668934555280776,
|
| 2389 |
-
"reward_std": 0.18583334485689798,
|
| 2390 |
-
"rewards/acc_reward_func": 1.5668934299832298,
|
| 2391 |
-
"step": 257
|
| 2392 |
-
},
|
| 2393 |
-
{
|
| 2394 |
-
"clip_ratio": 0.0001015219997844681,
|
| 2395 |
-
"epoch": 1.6870370370370371,
|
| 2396 |
-
"grad_norm": 0.08446004986763,
|
| 2397 |
-
"learning_rate": 1e-06,
|
| 2398 |
-
"loss": -0.0045,
|
| 2399 |
-
"step": 258
|
| 2400 |
-
},
|
| 2401 |
-
{
|
| 2402 |
-
"clip_ratio": 0.00016563806968319806,
|
| 2403 |
-
"epoch": 1.6935185185185184,
|
| 2404 |
-
"grad_norm": 0.08104430139064789,
|
| 2405 |
-
"learning_rate": 1e-06,
|
| 2406 |
-
"loss": -0.0048,
|
| 2407 |
-
"step": 259
|
| 2408 |
-
},
|
| 2409 |
-
{
|
| 2410 |
-
"clip_ratio": 0.0004377198553508303,
|
| 2411 |
-
"epoch": 1.7,
|
| 2412 |
-
"grad_norm": 0.08080463111400604,
|
| 2413 |
-
"learning_rate": 1e-06,
|
| 2414 |
-
"loss": -0.0051,
|
| 2415 |
-
"step": 260
|
| 2416 |
-
},
|
| 2417 |
-
{
|
| 2418 |
-
"clip_ratio": 0.0,
|
| 2419 |
-
"completion_length": 417.3129316057478,
|
| 2420 |
-
"epoch": 1.7064814814814815,
|
| 2421 |
-
"grad_norm": 0.10368765890598297,
|
| 2422 |
-
"learning_rate": 1e-06,
|
| 2423 |
-
"loss": -0.0016,
|
| 2424 |
-
"num_tokens": 241470596.0,
|
| 2425 |
-
"reward": 1.4274376801082067,
|
| 2426 |
-
"reward_std": 0.2483105512247199,
|
| 2427 |
-
"rewards/acc_reward_func": 1.4274376460484095,
|
| 2428 |
-
"step": 261
|
| 2429 |
-
},
|
| 2430 |
-
{
|
| 2431 |
-
"clip_ratio": 0.00011860012004728473,
|
| 2432 |
-
"epoch": 1.7129629629629628,
|
| 2433 |
-
"grad_norm": 0.10247006267309189,
|
| 2434 |
-
"learning_rate": 1e-06,
|
| 2435 |
-
"loss": -0.0018,
|
| 2436 |
-
"step": 262
|
| 2437 |
-
},
|
| 2438 |
-
{
|
| 2439 |
-
"clip_ratio": 0.00021534529583905603,
|
| 2440 |
-
"epoch": 1.7194444444444446,
|
| 2441 |
-
"grad_norm": 0.09949088841676712,
|
| 2442 |
-
"learning_rate": 1e-06,
|
| 2443 |
-
"loss": -0.0022,
|
| 2444 |
-
"step": 263
|
| 2445 |
-
},
|
| 2446 |
-
{
|
| 2447 |
-
"clip_ratio": 0.0004019789394944729,
|
| 2448 |
-
"epoch": 1.7259259259259259,
|
| 2449 |
-
"grad_norm": 0.09739667177200317,
|
| 2450 |
-
"learning_rate": 1e-06,
|
| 2451 |
-
"loss": -0.0028,
|
| 2452 |
-
"step": 264
|
| 2453 |
-
},
|
| 2454 |
-
{
|
| 2455 |
-
"clip_ratio": 0.0,
|
| 2456 |
-
"completion_length": 414.7698480515253,
|
| 2457 |
-
"epoch": 1.7324074074074074,
|
| 2458 |
-
"grad_norm": 0.09280133247375488,
|
| 2459 |
-
"learning_rate": 1e-06,
|
| 2460 |
-
"loss": 0.0005,
|
| 2461 |
-
"num_tokens": 245293317.0,
|
| 2462 |
-
"reward": 1.6485261122385662,
|
| 2463 |
-
"reward_std": 0.21551773555222012,
|
| 2464 |
-
"rewards/acc_reward_func": 1.6485260725021362,
|
| 2465 |
-
"step": 265
|
| 2466 |
-
},
|
| 2467 |
-
{
|
| 2468 |
-
"clip_ratio": 0.0001486230517219242,
|
| 2469 |
-
"epoch": 1.738888888888889,
|
| 2470 |
-
"grad_norm": 0.0918872281908989,
|
| 2471 |
-
"learning_rate": 1e-06,
|
| 2472 |
-
"loss": 0.0003,
|
| 2473 |
-
"step": 266
|
| 2474 |
-
},
|
| 2475 |
-
{
|
| 2476 |
-
"clip_ratio": 0.00017756144734448753,
|
| 2477 |
-
"epoch": 1.7453703703703702,
|
| 2478 |
-
"grad_norm": 0.09080260992050171,
|
| 2479 |
-
"learning_rate": 1e-06,
|
| 2480 |
-
"loss": -0.0,
|
| 2481 |
-
"step": 267
|
| 2482 |
-
},
|
| 2483 |
-
{
|
| 2484 |
-
"clip_ratio": 0.0002545833766427157,
|
| 2485 |
-
"epoch": 1.751851851851852,
|
| 2486 |
-
"grad_norm": 0.09026115387678146,
|
| 2487 |
-
"learning_rate": 1e-06,
|
| 2488 |
-
"loss": -0.0005,
|
| 2489 |
-
"step": 268
|
| 2490 |
-
},
|
| 2491 |
-
{
|
| 2492 |
-
"clip_ratio": 0.0,
|
| 2493 |
-
"completion_length": 450.11565580822173,
|
| 2494 |
-
"epoch": 1.7583333333333333,
|
| 2495 |
-
"grad_norm": 0.07813248783349991,
|
| 2496 |
-
"learning_rate": 1e-06,
|
| 2497 |
-
"loss": 0.0121,
|
| 2498 |
-
"num_tokens": 249129855.0,
|
| 2499 |
-
"reward": 1.5600907234918504,
|
| 2500 |
-
"reward_std": 0.1942712063235896,
|
| 2501 |
-
"rewards/acc_reward_func": 1.560090700785319,
|
| 2502 |
-
"step": 269
|
| 2503 |
-
},
|
| 2504 |
-
{
|
| 2505 |
-
"clip_ratio": 8.448378156615599e-05,
|
| 2506 |
-
"epoch": 1.7648148148148148,
|
| 2507 |
-
"grad_norm": 0.07819830626249313,
|
| 2508 |
-
"learning_rate": 1e-06,
|
| 2509 |
-
"loss": 0.012,
|
| 2510 |
-
"step": 270
|
| 2511 |
-
},
|
| 2512 |
-
{
|
| 2513 |
-
"clip_ratio": 0.00012770562101477046,
|
| 2514 |
-
"epoch": 1.7712962962962964,
|
| 2515 |
-
"grad_norm": 0.07886083424091339,
|
| 2516 |
-
"learning_rate": 1e-06,
|
| 2517 |
-
"loss": 0.0117,
|
| 2518 |
-
"step": 271
|
| 2519 |
-
},
|
| 2520 |
-
{
|
| 2521 |
-
"clip_ratio": 0.00021004644683368193,
|
| 2522 |
-
"epoch": 1.7777777777777777,
|
| 2523 |
-
"grad_norm": 0.0773688331246376,
|
| 2524 |
-
"learning_rate": 1e-06,
|
| 2525 |
-
"loss": 0.0113,
|
| 2526 |
-
"step": 272
|
| 2527 |
-
},
|
| 2528 |
-
{
|
| 2529 |
-
"clip_ratio": 0.0,
|
| 2530 |
-
"completion_length": 439.6462692987351,
|
| 2531 |
-
"epoch": 1.7842592592592592,
|
| 2532 |
-
"grad_norm": 0.09219877421855927,
|
| 2533 |
-
"learning_rate": 1e-06,
|
| 2534 |
-
"loss": -0.0026,
|
| 2535 |
-
"num_tokens": 252783987.0,
|
| 2536 |
-
"reward": 1.5691610290890647,
|
| 2537 |
-
"reward_std": 0.21298281227548918,
|
| 2538 |
-
"rewards/acc_reward_func": 1.5691610007059007,
|
| 2539 |
-
"step": 273
|
| 2540 |
-
},
|
| 2541 |
-
{
|
| 2542 |
-
"clip_ratio": 9.461846673816798e-05,
|
| 2543 |
-
"epoch": 1.7907407407407407,
|
| 2544 |
-
"grad_norm": 0.08525452762842178,
|
| 2545 |
-
"learning_rate": 1e-06,
|
| 2546 |
-
"loss": -0.0028,
|
| 2547 |
-
"step": 274
|
| 2548 |
-
},
|
| 2549 |
-
{
|
| 2550 |
-
"clip_ratio": 0.0001551211105487753,
|
| 2551 |
-
"epoch": 1.7972222222222223,
|
| 2552 |
-
"grad_norm": 0.08564640581607819,
|
| 2553 |
-
"learning_rate": 1e-06,
|
| 2554 |
-
"loss": -0.0031,
|
| 2555 |
-
"step": 275
|
| 2556 |
-
},
|
| 2557 |
-
{
|
| 2558 |
-
"clip_ratio": 0.0003054911636960848,
|
| 2559 |
-
"epoch": 1.8037037037037038,
|
| 2560 |
-
"grad_norm": 0.08445427566766739,
|
| 2561 |
-
"learning_rate": 1e-06,
|
| 2562 |
-
"loss": -0.0035,
|
| 2563 |
-
"step": 276
|
| 2564 |
-
},
|
| 2565 |
-
{
|
| 2566 |
-
"clip_ratio": 0.0,
|
| 2567 |
-
"completion_length": 469.36622256324404,
|
| 2568 |
-
"epoch": 1.8101851851851851,
|
| 2569 |
-
"grad_norm": 0.09835602343082428,
|
| 2570 |
-
"learning_rate": 1e-06,
|
| 2571 |
-
"loss": 0.0067,
|
| 2572 |
-
"num_tokens": 256514324.0,
|
| 2573 |
-
"reward": 1.5283446907997131,
|
| 2574 |
-
"reward_std": 0.23371348583272525,
|
| 2575 |
-
"rewards/acc_reward_func": 1.5283446680931818,
|
| 2576 |
-
"step": 277
|
| 2577 |
-
},
|
| 2578 |
-
{
|
| 2579 |
-
"clip_ratio": 0.00011316709839905213,
|
| 2580 |
-
"epoch": 1.8166666666666667,
|
| 2581 |
-
"grad_norm": 0.09218237549066544,
|
| 2582 |
-
"learning_rate": 1e-06,
|
| 2583 |
-
"loss": 0.0065,
|
| 2584 |
-
"step": 278
|
| 2585 |
-
},
|
| 2586 |
-
{
|
| 2587 |
-
"clip_ratio": 0.00012780956482553543,
|
| 2588 |
-
"epoch": 1.8231481481481482,
|
| 2589 |
-
"grad_norm": 0.08990070223808289,
|
| 2590 |
-
"learning_rate": 1e-06,
|
| 2591 |
-
"loss": 0.0062,
|
| 2592 |
-
"step": 279
|
| 2593 |
-
},
|
| 2594 |
-
{
|
| 2595 |
-
"clip_ratio": 0.0002694847620053527,
|
| 2596 |
-
"epoch": 1.8296296296296295,
|
| 2597 |
-
"grad_norm": 0.08729380369186401,
|
| 2598 |
-
"learning_rate": 1e-06,
|
| 2599 |
-
"loss": 0.0057,
|
| 2600 |
-
"step": 280
|
| 2601 |
-
},
|
| 2602 |
-
{
|
| 2603 |
-
"clip_ratio": 0.0,
|
| 2604 |
-
"completion_length": 466.97166224888394,
|
| 2605 |
-
"epoch": 1.8361111111111112,
|
| 2606 |
-
"grad_norm": 0.08512269705533981,
|
| 2607 |
-
"learning_rate": 1e-06,
|
| 2608 |
-
"loss": 0.0023,
|
| 2609 |
-
"num_tokens": 260558815.0,
|
| 2610 |
-
"reward": 1.6553288300832112,
|
| 2611 |
-
"reward_std": 0.18018270248458498,
|
| 2612 |
-
"rewards/acc_reward_func": 1.6553287903467815,
|
| 2613 |
-
"step": 281
|
| 2614 |
-
},
|
| 2615 |
-
{
|
| 2616 |
-
"clip_ratio": 0.00011174656295528014,
|
| 2617 |
-
"epoch": 1.8425925925925926,
|
| 2618 |
-
"grad_norm": 0.07830575108528137,
|
| 2619 |
-
"learning_rate": 1e-06,
|
| 2620 |
-
"loss": 0.0021,
|
| 2621 |
-
"step": 282
|
| 2622 |
-
},
|
| 2623 |
-
{
|
| 2624 |
-
"clip_ratio": 0.00014859736603241237,
|
| 2625 |
-
"epoch": 1.849074074074074,
|
| 2626 |
-
"grad_norm": 0.07735547423362732,
|
| 2627 |
-
"learning_rate": 1e-06,
|
| 2628 |
-
"loss": 0.0018,
|
| 2629 |
-
"step": 283
|
| 2630 |
-
},
|
| 2631 |
-
{
|
| 2632 |
-
"clip_ratio": 0.00023661474794304618,
|
| 2633 |
-
"epoch": 1.8555555555555556,
|
| 2634 |
-
"grad_norm": 0.07640089839696884,
|
| 2635 |
-
"learning_rate": 1e-06,
|
| 2636 |
-
"loss": 0.0015,
|
| 2637 |
-
"step": 284
|
| 2638 |
-
},
|
| 2639 |
-
{
|
| 2640 |
-
"clip_ratio": 0.0,
|
| 2641 |
-
"completion_length": 473.8832281203497,
|
| 2642 |
-
"epoch": 1.862037037037037,
|
| 2643 |
-
"grad_norm": 0.0777854397892952,
|
| 2644 |
-
"learning_rate": 1e-06,
|
| 2645 |
-
"loss": 0.0035,
|
| 2646 |
-
"num_tokens": 264206382.0,
|
| 2647 |
-
"reward": 1.6394558179946173,
|
| 2648 |
-
"reward_std": 0.19011170026801882,
|
| 2649 |
-
"rewards/acc_reward_func": 1.6394557669049217,
|
| 2650 |
-
"step": 285
|
| 2651 |
-
},
|
| 2652 |
-
{
|
| 2653 |
-
"clip_ratio": 8.368942369651493e-05,
|
| 2654 |
-
"epoch": 1.8685185185185185,
|
| 2655 |
-
"grad_norm": 0.07746341824531555,
|
| 2656 |
-
"learning_rate": 1e-06,
|
| 2657 |
-
"loss": 0.0034,
|
| 2658 |
-
"step": 286
|
| 2659 |
-
},
|
| 2660 |
-
{
|
| 2661 |
-
"clip_ratio": 0.0001167819041473281,
|
| 2662 |
-
"epoch": 1.875,
|
| 2663 |
-
"grad_norm": 0.0767863318324089,
|
| 2664 |
-
"learning_rate": 1e-06,
|
| 2665 |
-
"loss": 0.0031,
|
| 2666 |
-
"step": 287
|
| 2667 |
-
},
|
| 2668 |
-
{
|
| 2669 |
-
"clip_ratio": 0.00020507513424187587,
|
| 2670 |
-
"epoch": 1.8814814814814815,
|
| 2671 |
-
"grad_norm": 0.07616633176803589,
|
| 2672 |
-
"learning_rate": 1e-06,
|
| 2673 |
-
"loss": 0.0028,
|
| 2674 |
-
"step": 288
|
| 2675 |
-
},
|
| 2676 |
-
{
|
| 2677 |
-
"clip_ratio": 0.0,
|
| 2678 |
-
"completion_length": 480.0975123814174,
|
| 2679 |
-
"epoch": 1.887962962962963,
|
| 2680 |
-
"grad_norm": 0.07935984432697296,
|
| 2681 |
-
"learning_rate": 1e-06,
|
| 2682 |
-
"loss": 0.0079,
|
| 2683 |
-
"num_tokens": 268028222.0,
|
| 2684 |
-
"reward": 1.5997732764198667,
|
| 2685 |
-
"reward_std": 0.18707973510026932,
|
| 2686 |
-
"rewards/acc_reward_func": 1.5997732423600697,
|
| 2687 |
-
"step": 289
|
| 2688 |
-
},
|
| 2689 |
-
{
|
| 2690 |
-
"clip_ratio": 7.331416522252507e-05,
|
| 2691 |
-
"epoch": 1.8944444444444444,
|
| 2692 |
-
"grad_norm": 0.07752402871847153,
|
| 2693 |
-
"learning_rate": 1e-06,
|
| 2694 |
-
"loss": 0.0078,
|
| 2695 |
-
"step": 290
|
| 2696 |
-
},
|
| 2697 |
-
{
|
| 2698 |
-
"clip_ratio": 9.849115892956477e-05,
|
| 2699 |
-
"epoch": 1.900925925925926,
|
| 2700 |
-
"grad_norm": 0.076600082218647,
|
| 2701 |
-
"learning_rate": 1e-06,
|
| 2702 |
-
"loss": 0.0075,
|
| 2703 |
-
"step": 291
|
| 2704 |
-
},
|
| 2705 |
-
{
|
| 2706 |
-
"clip_ratio": 0.0001755388210377922,
|
| 2707 |
-
"epoch": 1.9074074074074074,
|
| 2708 |
-
"grad_norm": 0.07660045474767685,
|
| 2709 |
-
"learning_rate": 1e-06,
|
| 2710 |
-
"loss": 0.0071,
|
| 2711 |
-
"step": 292
|
| 2712 |
-
},
|
| 2713 |
-
{
|
| 2714 |
-
"clip_ratio": 0.0,
|
| 2715 |
-
"completion_length": 470.53515625,
|
| 2716 |
-
"epoch": 1.9138888888888888,
|
| 2717 |
-
"grad_norm": 0.0926726683974266,
|
| 2718 |
-
"learning_rate": 1e-06,
|
| 2719 |
-
"loss": 0.0015,
|
| 2720 |
-
"num_tokens": 271639278.0,
|
| 2721 |
-
"reward": 1.452380983602433,
|
| 2722 |
-
"reward_std": 0.24590044894388743,
|
| 2723 |
-
"rewards/acc_reward_func": 1.4523809580575853,
|
| 2724 |
-
"step": 293
|
| 2725 |
-
},
|
| 2726 |
-
{
|
| 2727 |
-
"clip_ratio": 8.711325686558016e-05,
|
| 2728 |
-
"epoch": 1.9203703703703705,
|
| 2729 |
-
"grad_norm": 0.09404096752405167,
|
| 2730 |
-
"learning_rate": 1e-06,
|
| 2731 |
-
"loss": 0.0013,
|
| 2732 |
-
"step": 294
|
| 2733 |
-
},
|
| 2734 |
-
{
|
| 2735 |
-
"clip_ratio": 0.00013792818329723863,
|
| 2736 |
-
"epoch": 1.9268518518518518,
|
| 2737 |
-
"grad_norm": 0.09250401705503464,
|
| 2738 |
-
"learning_rate": 1e-06,
|
| 2739 |
-
"loss": 0.0009,
|
| 2740 |
-
"step": 295
|
| 2741 |
-
},
|
| 2742 |
-
{
|
| 2743 |
-
"clip_ratio": 0.0002357466875678039,
|
| 2744 |
-
"epoch": 1.9333333333333333,
|
| 2745 |
-
"grad_norm": 0.09176366031169891,
|
| 2746 |
-
"learning_rate": 1e-06,
|
| 2747 |
-
"loss": 0.0004,
|
| 2748 |
-
"step": 296
|
| 2749 |
-
},
|
| 2750 |
-
{
|
| 2751 |
-
"clip_ratio": 0.0,
|
| 2752 |
-
"completion_length": 469.4886707124256,
|
| 2753 |
-
"epoch": 1.9398148148148149,
|
| 2754 |
-
"grad_norm": 0.08760599046945572,
|
| 2755 |
-
"learning_rate": 1e-06,
|
| 2756 |
-
"loss": 0.0028,
|
| 2757 |
-
"num_tokens": 275315687.0,
|
| 2758 |
-
"reward": 1.529478478999365,
|
| 2759 |
-
"reward_std": 0.2415944811488901,
|
| 2760 |
-
"rewards/acc_reward_func": 1.5294784619694664,
|
| 2761 |
-
"step": 297
|
| 2762 |
-
},
|
| 2763 |
-
{
|
| 2764 |
-
"clip_ratio": 0.00010062488824284325,
|
| 2765 |
-
"epoch": 1.9462962962962962,
|
| 2766 |
-
"grad_norm": 0.08795120567083359,
|
| 2767 |
-
"learning_rate": 1e-06,
|
| 2768 |
-
"loss": 0.0026,
|
| 2769 |
-
"step": 298
|
| 2770 |
-
},
|
| 2771 |
-
{
|
| 2772 |
-
"clip_ratio": 0.00017867312445083545,
|
| 2773 |
-
"epoch": 1.9527777777777777,
|
| 2774 |
-
"grad_norm": 0.08775324374437332,
|
| 2775 |
-
"learning_rate": 1e-06,
|
| 2776 |
-
"loss": 0.0023,
|
| 2777 |
-
"step": 299
|
| 2778 |
-
},
|
| 2779 |
-
{
|
| 2780 |
-
"clip_ratio": 0.000306412472300941,
|
| 2781 |
-
"epoch": 1.9592592592592593,
|
| 2782 |
-
"grad_norm": 0.08796869218349457,
|
| 2783 |
-
"learning_rate": 1e-06,
|
| 2784 |
-
"loss": 0.0018,
|
| 2785 |
-
"step": 300
|
| 2786 |
-
},
|
| 2787 |
-
{
|
| 2788 |
-
"clip_ratio": 0.0,
|
| 2789 |
-
"completion_length": 469.61678786504837,
|
| 2790 |
-
"epoch": 1.9657407407407408,
|
| 2791 |
-
"grad_norm": 0.08162126690149307,
|
| 2792 |
-
"learning_rate": 1e-06,
|
| 2793 |
-
"loss": 0.0043,
|
| 2794 |
-
"num_tokens": 279066213.0,
|
| 2795 |
-
"reward": 1.515873046148391,
|
| 2796 |
-
"reward_std": 0.19076500141194888,
|
| 2797 |
-
"rewards/acc_reward_func": 1.5158730177652269,
|
| 2798 |
-
"step": 301
|
| 2799 |
-
},
|
| 2800 |
-
{
|
| 2801 |
-
"clip_ratio": 7.91146989545918e-05,
|
| 2802 |
-
"epoch": 1.9722222222222223,
|
| 2803 |
-
"grad_norm": 0.0824337899684906,
|
| 2804 |
-
"learning_rate": 1e-06,
|
| 2805 |
-
"loss": 0.0041,
|
| 2806 |
-
"step": 302
|
| 2807 |
-
},
|
| 2808 |
-
{
|
| 2809 |
-
"clip_ratio": 0.00014237434654552046,
|
| 2810 |
-
"epoch": 1.9787037037037036,
|
| 2811 |
-
"grad_norm": 0.08004167675971985,
|
| 2812 |
-
"learning_rate": 1e-06,
|
| 2813 |
-
"loss": 0.0038,
|
| 2814 |
-
"step": 303
|
| 2815 |
-
},
|
| 2816 |
-
{
|
| 2817 |
-
"clip_ratio": 0.00029009427366656295,
|
| 2818 |
-
"epoch": 1.9851851851851852,
|
| 2819 |
-
"grad_norm": 0.08044147491455078,
|
| 2820 |
-
"learning_rate": 1e-06,
|
| 2821 |
-
"loss": 0.0034,
|
| 2822 |
-
"step": 304
|
| 2823 |
-
},
|
| 2824 |
-
{
|
| 2825 |
-
"clip_ratio": 0.0,
|
| 2826 |
-
"completion_length": 471.05669875372024,
|
| 2827 |
-
"epoch": 2.0064814814814813,
|
| 2828 |
-
"grad_norm": 0.07870854437351227,
|
| 2829 |
-
"learning_rate": 1e-06,
|
| 2830 |
-
"loss": 0.0013,
|
| 2831 |
-
"num_tokens": 282465419.0,
|
| 2832 |
-
"reward": 1.5374149935586112,
|
| 2833 |
-
"reward_std": 0.1854196455152262,
|
| 2834 |
-
"rewards/acc_reward_func": 1.53741497085208,
|
| 2835 |
-
"step": 305
|
| 2836 |
-
},
|
| 2837 |
-
{
|
| 2838 |
-
"clip_ratio": 8.949786959939437e-05,
|
| 2839 |
-
"epoch": 2.012962962962963,
|
| 2840 |
-
"grad_norm": 0.08076539635658264,
|
| 2841 |
-
"learning_rate": 1e-06,
|
| 2842 |
-
"loss": 0.0012,
|
| 2843 |
-
"step": 306
|
| 2844 |
-
},
|
| 2845 |
-
{
|
| 2846 |
-
"clip_ratio": 0.00015401908898465556,
|
| 2847 |
-
"epoch": 2.0194444444444444,
|
| 2848 |
-
"grad_norm": 0.0782662183046341,
|
| 2849 |
-
"learning_rate": 1e-06,
|
| 2850 |
-
"loss": 0.0009,
|
| 2851 |
-
"step": 307
|
| 2852 |
-
},
|
| 2853 |
-
{
|
| 2854 |
-
"clip_ratio": 0.00023743030829964916,
|
| 2855 |
-
"epoch": 2.025925925925926,
|
| 2856 |
-
"grad_norm": 0.07929681241512299,
|
| 2857 |
-
"learning_rate": 1e-06,
|
| 2858 |
-
"loss": 0.0005,
|
| 2859 |
-
"step": 308
|
| 2860 |
-
},
|
| 2861 |
-
{
|
| 2862 |
-
"clip_ratio": 0.0,
|
| 2863 |
-
"completion_length": 467.8979681105841,
|
| 2864 |
-
"epoch": 2.0324074074074074,
|
| 2865 |
-
"grad_norm": 0.07813294231891632,
|
| 2866 |
-
"learning_rate": 1e-06,
|
| 2867 |
-
"loss": 0.0066,
|
| 2868 |
-
"num_tokens": 286079843.0,
|
| 2869 |
-
"reward": 1.673469407217843,
|
| 2870 |
-
"reward_std": 0.17615552140133722,
|
| 2871 |
-
"rewards/acc_reward_func": 1.673469378834679,
|
| 2872 |
-
"step": 309
|
| 2873 |
-
},
|
| 2874 |
-
{
|
| 2875 |
-
"clip_ratio": 0.00010939102676708163,
|
| 2876 |
-
"epoch": 2.0388888888888888,
|
| 2877 |
-
"grad_norm": 0.07774676382541656,
|
| 2878 |
-
"learning_rate": 1e-06,
|
| 2879 |
-
"loss": 0.0064,
|
| 2880 |
-
"step": 310
|
| 2881 |
-
},
|
| 2882 |
-
{
|
| 2883 |
-
"clip_ratio": 0.00011380412355980038,
|
| 2884 |
-
"epoch": 2.0453703703703705,
|
| 2885 |
-
"grad_norm": 0.07755687832832336,
|
| 2886 |
-
"learning_rate": 1e-06,
|
| 2887 |
-
"loss": 0.0062,
|
| 2888 |
-
"step": 311
|
| 2889 |
-
},
|
| 2890 |
-
{
|
| 2891 |
-
"clip_ratio": 0.0002462299581522876,
|
| 2892 |
-
"epoch": 2.051851851851852,
|
| 2893 |
-
"grad_norm": 0.08249640464782715,
|
| 2894 |
-
"learning_rate": 1e-06,
|
| 2895 |
-
"loss": 0.0058,
|
| 2896 |
-
"step": 312
|
| 2897 |
-
},
|
| 2898 |
-
{
|
| 2899 |
-
"clip_ratio": 0.0,
|
| 2900 |
-
"completion_length": 468.54082670665923,
|
| 2901 |
-
"epoch": 2.058333333333333,
|
| 2902 |
-
"grad_norm": 0.07517673820257187,
|
| 2903 |
-
"learning_rate": 1e-06,
|
| 2904 |
-
"loss": 0.0047,
|
| 2905 |
-
"num_tokens": 289924850.0,
|
| 2906 |
-
"reward": 1.6281179274831499,
|
| 2907 |
-
"reward_std": 0.17834148626951946,
|
| 2908 |
-
"rewards/acc_reward_func": 1.6281179132915677,
|
| 2909 |
-
"step": 313
|
| 2910 |
-
},
|
| 2911 |
-
{
|
| 2912 |
-
"clip_ratio": 0.00011314422001651976,
|
| 2913 |
-
"epoch": 2.064814814814815,
|
| 2914 |
-
"grad_norm": 0.073227159678936,
|
| 2915 |
-
"learning_rate": 1e-06,
|
| 2916 |
-
"loss": 0.0045,
|
| 2917 |
-
"step": 314
|
| 2918 |
-
},
|
| 2919 |
-
{
|
| 2920 |
-
"clip_ratio": 0.00020379069714441096,
|
| 2921 |
-
"epoch": 2.071296296296296,
|
| 2922 |
-
"grad_norm": 0.07385105639696121,
|
| 2923 |
-
"learning_rate": 1e-06,
|
| 2924 |
-
"loss": 0.0043,
|
| 2925 |
-
"step": 315
|
| 2926 |
-
},
|
| 2927 |
-
{
|
| 2928 |
-
"clip_ratio": 0.0003329941499119048,
|
| 2929 |
-
"epoch": 2.077777777777778,
|
| 2930 |
-
"grad_norm": 0.07256048172712326,
|
| 2931 |
-
"learning_rate": 1e-06,
|
| 2932 |
-
"loss": 0.0039,
|
| 2933 |
-
"step": 316
|
| 2934 |
-
},
|
| 2935 |
-
{
|
| 2936 |
-
"clip_ratio": 0.0,
|
| 2937 |
-
"completion_length": 450.64512997581846,
|
| 2938 |
-
"epoch": 2.0842592592592593,
|
| 2939 |
-
"grad_norm": 0.08051841706037521,
|
| 2940 |
-
"learning_rate": 1e-06,
|
| 2941 |
-
"loss": 0.0052,
|
| 2942 |
-
"num_tokens": 293558677.0,
|
| 2943 |
-
"reward": 1.6541950475601923,
|
| 2944 |
-
"reward_std": 0.17689220057356925,
|
| 2945 |
-
"rewards/acc_reward_func": 1.6541950021471297,
|
| 2946 |
-
"step": 317
|
| 2947 |
-
},
|
| 2948 |
-
{
|
| 2949 |
-
"clip_ratio": 8.453805778463859e-05,
|
| 2950 |
-
"epoch": 2.0907407407407406,
|
| 2951 |
-
"grad_norm": 0.08467243611812592,
|
| 2952 |
-
"learning_rate": 1e-06,
|
| 2953 |
-
"loss": 0.005,
|
| 2954 |
-
"step": 318
|
| 2955 |
-
},
|
| 2956 |
-
{
|
| 2957 |
-
"clip_ratio": 0.00011742045968449453,
|
| 2958 |
-
"epoch": 2.0972222222222223,
|
| 2959 |
-
"grad_norm": 0.07905680686235428,
|
| 2960 |
-
"learning_rate": 1e-06,
|
| 2961 |
-
"loss": 0.0047,
|
| 2962 |
-
"step": 319
|
| 2963 |
-
},
|
| 2964 |
-
{
|
| 2965 |
-
"clip_ratio": 0.0002576940849914016,
|
| 2966 |
-
"epoch": 2.1037037037037036,
|
| 2967 |
-
"grad_norm": 0.07652027159929276,
|
| 2968 |
-
"learning_rate": 1e-06,
|
| 2969 |
-
"loss": 0.0043,
|
| 2970 |
-
"step": 320
|
| 2971 |
-
},
|
| 2972 |
-
{
|
| 2973 |
-
"clip_ratio": 0.0,
|
| 2974 |
-
"completion_length": 452.39683605375745,
|
| 2975 |
-
"epoch": 2.1101851851851854,
|
| 2976 |
-
"grad_norm": 0.07079870998859406,
|
| 2977 |
-
"learning_rate": 1e-06,
|
| 2978 |
-
"loss": -0.0031,
|
| 2979 |
-
"num_tokens": 297388737.0,
|
| 2980 |
-
"reward": 1.6224490063531058,
|
| 2981 |
-
"reward_std": 0.14390913556729043,
|
| 2982 |
-
"rewards/acc_reward_func": 1.6224489779699416,
|
| 2983 |
-
"step": 321
|
| 2984 |
-
},
|
| 2985 |
-
{
|
| 2986 |
-
"clip_ratio": 6.244152800285346e-05,
|
| 2987 |
-
"epoch": 2.1166666666666667,
|
| 2988 |
-
"grad_norm": 0.07108927518129349,
|
| 2989 |
-
"learning_rate": 1e-06,
|
| 2990 |
-
"loss": -0.0032,
|
| 2991 |
-
"step": 322
|
| 2992 |
-
},
|
| 2993 |
-
{
|
| 2994 |
-
"clip_ratio": 9.540307673021397e-05,
|
| 2995 |
-
"epoch": 2.123148148148148,
|
| 2996 |
-
"grad_norm": 0.07107880711555481,
|
| 2997 |
-
"learning_rate": 1e-06,
|
| 2998 |
-
"loss": -0.0035,
|
| 2999 |
-
"step": 323
|
| 3000 |
-
},
|
| 3001 |
-
{
|
| 3002 |
-
"clip_ratio": 0.00015793243524274745,
|
| 3003 |
-
"epoch": 2.1296296296296298,
|
| 3004 |
-
"grad_norm": 0.07058020681142807,
|
| 3005 |
-
"learning_rate": 1e-06,
|
| 3006 |
-
"loss": -0.0038,
|
| 3007 |
-
"step": 324
|
| 3008 |
-
},
|
| 3009 |
-
{
|
| 3010 |
-
"clip_ratio": 0.0,
|
| 3011 |
-
"completion_length": 451.31520298549106,
|
| 3012 |
-
"epoch": 2.136111111111111,
|
| 3013 |
-
"grad_norm": 0.07716906815767288,
|
| 3014 |
-
"learning_rate": 1e-06,
|
| 3015 |
-
"loss": 0.0032,
|
| 3016 |
-
"num_tokens": 301268873.0,
|
| 3017 |
-
"reward": 1.6712018421718053,
|
| 3018 |
-
"reward_std": 0.1573598569347745,
|
| 3019 |
-
"rewards/acc_reward_func": 1.671201813788641,
|
| 3020 |
-
"step": 325
|
| 3021 |
-
},
|
| 3022 |
-
{
|
| 3023 |
-
"clip_ratio": 6.981899336789779e-05,
|
| 3024 |
-
"epoch": 2.1425925925925924,
|
| 3025 |
-
"grad_norm": 0.07711810618638992,
|
| 3026 |
-
"learning_rate": 1e-06,
|
| 3027 |
-
"loss": 0.003,
|
| 3028 |
-
"step": 326
|
| 3029 |
-
},
|
| 3030 |
-
{
|
| 3031 |
-
"clip_ratio": 9.463478012808732e-05,
|
| 3032 |
-
"epoch": 2.149074074074074,
|
| 3033 |
-
"grad_norm": 0.07729792594909668,
|
| 3034 |
-
"learning_rate": 1e-06,
|
| 3035 |
-
"loss": 0.0027,
|
| 3036 |
-
"step": 327
|
| 3037 |
-
},
|
| 3038 |
-
{
|
| 3039 |
-
"clip_ratio": 0.0001468960305958587,
|
| 3040 |
-
"epoch": 2.1555555555555554,
|
| 3041 |
-
"grad_norm": 0.07669426500797272,
|
| 3042 |
-
"learning_rate": 1e-06,
|
| 3043 |
-
"loss": 0.0023,
|
| 3044 |
-
"step": 328
|
| 3045 |
-
},
|
| 3046 |
-
{
|
| 3047 |
-
"clip_ratio": 0.0,
|
| 3048 |
-
"completion_length": 445.0521647135417,
|
| 3049 |
-
"epoch": 2.162037037037037,
|
| 3050 |
-
"grad_norm": 0.08283355832099915,
|
| 3051 |
-
"learning_rate": 1e-06,
|
| 3052 |
-
"loss": -0.0017,
|
| 3053 |
-
"num_tokens": 304759563.0,
|
| 3054 |
-
"reward": 1.6179138592311315,
|
| 3055 |
-
"reward_std": 0.18949996839676583,
|
| 3056 |
-
"rewards/acc_reward_func": 1.6179138365246,
|
| 3057 |
-
"step": 329
|
| 3058 |
-
},
|
| 3059 |
-
{
|
| 3060 |
-
"clip_ratio": 0.0001103849049069963,
|
| 3061 |
-
"epoch": 2.1685185185185185,
|
| 3062 |
-
"grad_norm": 0.08356571942567825,
|
| 3063 |
-
"learning_rate": 1e-06,
|
| 3064 |
-
"loss": -0.0019,
|
| 3065 |
-
"step": 330
|
| 3066 |
-
},
|
| 3067 |
-
{
|
| 3068 |
-
"clip_ratio": 0.0001704507456105646,
|
| 3069 |
-
"epoch": 2.175,
|
| 3070 |
-
"grad_norm": 0.08093303442001343,
|
| 3071 |
-
"learning_rate": 1e-06,
|
| 3072 |
-
"loss": -0.0023,
|
| 3073 |
-
"step": 331
|
| 3074 |
-
},
|
| 3075 |
-
{
|
| 3076 |
-
"clip_ratio": 0.0002644155105198955,
|
| 3077 |
-
"epoch": 2.1814814814814816,
|
| 3078 |
-
"grad_norm": 0.07993580400943756,
|
| 3079 |
-
"learning_rate": 1e-06,
|
| 3080 |
-
"loss": -0.0027,
|
| 3081 |
-
"step": 332
|
| 3082 |
-
},
|
| 3083 |
-
{
|
| 3084 |
-
"clip_ratio": 0.0,
|
| 3085 |
-
"completion_length": 448.93311564127606,
|
| 3086 |
-
"epoch": 2.187962962962963,
|
| 3087 |
-
"grad_norm": 0.08723075687885284,
|
| 3088 |
-
"learning_rate": 1e-06,
|
| 3089 |
-
"loss": 0.0027,
|
| 3090 |
-
"num_tokens": 308389564.0,
|
| 3091 |
-
"reward": 1.6746032010941279,
|
| 3092 |
-
"reward_std": 0.19914495199918747,
|
| 3093 |
-
"rewards/acc_reward_func": 1.6746031670343309,
|
| 3094 |
-
"step": 333
|
| 3095 |
-
},
|
| 3096 |
-
{
|
| 3097 |
-
"clip_ratio": 0.0001357406850036655,
|
| 3098 |
-
"epoch": 2.1944444444444446,
|
| 3099 |
-
"grad_norm": 0.09279884397983551,
|
| 3100 |
-
"learning_rate": 1e-06,
|
| 3101 |
-
"loss": 0.0025,
|
| 3102 |
-
"step": 334
|
| 3103 |
-
},
|
| 3104 |
-
{
|
| 3105 |
-
"clip_ratio": 0.0002144234143391562,
|
| 3106 |
-
"epoch": 2.200925925925926,
|
| 3107 |
-
"grad_norm": 0.08094783127307892,
|
| 3108 |
-
"learning_rate": 1e-06,
|
| 3109 |
-
"loss": 0.0021,
|
| 3110 |
-
"step": 335
|
| 3111 |
-
},
|
| 3112 |
-
{
|
| 3113 |
-
"clip_ratio": 0.0003632333076287371,
|
| 3114 |
-
"epoch": 2.2074074074074073,
|
| 3115 |
-
"grad_norm": 0.08018580079078674,
|
| 3116 |
-
"learning_rate": 1e-06,
|
| 3117 |
-
"loss": 0.0017,
|
| 3118 |
-
"step": 336
|
| 3119 |
-
},
|
| 3120 |
-
{
|
| 3121 |
-
"clip_ratio": 0.0,
|
| 3122 |
-
"completion_length": 456.8310735793341,
|
| 3123 |
-
"epoch": 2.213888888888889,
|
| 3124 |
-
"grad_norm": 0.07070093601942062,
|
| 3125 |
-
"learning_rate": 1e-06,
|
| 3126 |
-
"loss": 0.0012,
|
| 3127 |
-
"num_tokens": 312450389.0,
|
| 3128 |
-
"reward": 1.6836735010147095,
|
| 3129 |
-
"reward_std": 0.15429549912611643,
|
| 3130 |
-
"rewards/acc_reward_func": 1.6836734669549125,
|
| 3131 |
-
"step": 337
|
| 3132 |
-
},
|
| 3133 |
-
{
|
| 3134 |
-
"clip_ratio": 5.7854162670472374e-05,
|
| 3135 |
-
"epoch": 2.2203703703703703,
|
| 3136 |
-
"grad_norm": 0.07059533894062042,
|
| 3137 |
-
"learning_rate": 1e-06,
|
| 3138 |
-
"loss": 0.0011,
|
| 3139 |
-
"step": 338
|
| 3140 |
-
},
|
| 3141 |
-
{
|
| 3142 |
-
"clip_ratio": 7.601507691322782e-05,
|
| 3143 |
-
"epoch": 2.226851851851852,
|
| 3144 |
-
"grad_norm": 0.07046937197446823,
|
| 3145 |
-
"learning_rate": 1e-06,
|
| 3146 |
-
"loss": 0.0008,
|
| 3147 |
-
"step": 339
|
| 3148 |
-
},
|
| 3149 |
-
{
|
| 3150 |
-
"clip_ratio": 0.00011438851513611596,
|
| 3151 |
-
"epoch": 2.2333333333333334,
|
| 3152 |
-
"grad_norm": 0.06987206637859344,
|
| 3153 |
-
"learning_rate": 1e-06,
|
| 3154 |
-
"loss": 0.0005,
|
| 3155 |
-
"step": 340
|
| 3156 |
-
},
|
| 3157 |
-
{
|
| 3158 |
-
"clip_ratio": 0.0,
|
| 3159 |
-
"completion_length": 459.7800525483631,
|
| 3160 |
-
"epoch": 2.2398148148148147,
|
| 3161 |
-
"grad_norm": 0.07333923131227493,
|
| 3162 |
-
"learning_rate": 1e-06,
|
| 3163 |
-
"loss": 0.003,
|
| 3164 |
-
"num_tokens": 316124745.0,
|
| 3165 |
-
"reward": 1.651927459807623,
|
| 3166 |
-
"reward_std": 0.15663272621376173,
|
| 3167 |
-
"rewards/acc_reward_func": 1.6519274314244587,
|
| 3168 |
-
"step": 341
|
| 3169 |
-
},
|
| 3170 |
-
{
|
| 3171 |
-
"clip_ratio": 5.6131516820252205e-05,
|
| 3172 |
-
"epoch": 2.2462962962962965,
|
| 3173 |
-
"grad_norm": 0.07241741567850113,
|
| 3174 |
-
"learning_rate": 1e-06,
|
| 3175 |
-
"loss": 0.0028,
|
| 3176 |
-
"step": 342
|
| 3177 |
-
},
|
| 3178 |
-
{
|
| 3179 |
-
"clip_ratio": 7.750764762888485e-05,
|
| 3180 |
-
"epoch": 2.2527777777777778,
|
| 3181 |
-
"grad_norm": 0.07209280133247375,
|
| 3182 |
-
"learning_rate": 1e-06,
|
| 3183 |
-
"loss": 0.0026,
|
| 3184 |
-
"step": 343
|
| 3185 |
-
},
|
| 3186 |
-
{
|
| 3187 |
-
"clip_ratio": 0.0001592640822179549,
|
| 3188 |
-
"epoch": 2.259259259259259,
|
| 3189 |
-
"grad_norm": 0.07179038226604462,
|
| 3190 |
-
"learning_rate": 1e-06,
|
| 3191 |
-
"loss": 0.0022,
|
| 3192 |
-
"step": 344
|
| 3193 |
-
},
|
| 3194 |
-
{
|
| 3195 |
-
"clip_ratio": 0.0,
|
| 3196 |
-
"completion_length": 454.90136864071803,
|
| 3197 |
-
"epoch": 2.265740740740741,
|
| 3198 |
-
"grad_norm": 0.06366118788719177,
|
| 3199 |
-
"learning_rate": 1e-06,
|
| 3200 |
-
"loss": -0.0023,
|
| 3201 |
-
"num_tokens": 319907790.0,
|
| 3202 |
-
"reward": 1.6088435649871826,
|
| 3203 |
-
"reward_std": 0.13132147632894062,
|
| 3204 |
-
"rewards/acc_reward_func": 1.6088435309273856,
|
| 3205 |
-
"step": 345
|
| 3206 |
-
},
|
| 3207 |
-
{
|
| 3208 |
-
"clip_ratio": 5.585887724702756e-05,
|
| 3209 |
-
"epoch": 2.272222222222222,
|
| 3210 |
-
"grad_norm": 0.06443970650434494,
|
| 3211 |
-
"learning_rate": 1e-06,
|
| 3212 |
-
"loss": -0.0025,
|
| 3213 |
-
"step": 346
|
| 3214 |
-
},
|
| 3215 |
-
{
|
| 3216 |
-
"clip_ratio": 6.936551792369712e-05,
|
| 3217 |
-
"epoch": 2.278703703703704,
|
| 3218 |
-
"grad_norm": 0.06347978860139847,
|
| 3219 |
-
"learning_rate": 1e-06,
|
| 3220 |
-
"loss": -0.0027,
|
| 3221 |
-
"step": 347
|
| 3222 |
-
},
|
| 3223 |
-
{
|
| 3224 |
-
"clip_ratio": 8.864202668302737e-05,
|
| 3225 |
-
"epoch": 2.285185185185185,
|
| 3226 |
-
"grad_norm": 0.06255137920379639,
|
| 3227 |
-
"learning_rate": 1e-06,
|
| 3228 |
-
"loss": -0.003,
|
| 3229 |
-
"step": 348
|
| 3230 |
-
},
|
| 3231 |
-
{
|
| 3232 |
-
"clip_ratio": 0.0,
|
| 3233 |
-
"completion_length": 451.8322099958147,
|
| 3234 |
-
"epoch": 2.2916666666666665,
|
| 3235 |
-
"grad_norm": 0.06696058064699173,
|
| 3236 |
-
"learning_rate": 1e-06,
|
| 3237 |
-
"loss": 0.0003,
|
| 3238 |
-
"num_tokens": 323384510.0,
|
| 3239 |
-
"reward": 1.6383220184416998,
|
| 3240 |
-
"reward_std": 0.13071540867288908,
|
| 3241 |
-
"rewards/acc_reward_func": 1.6383219900585355,
|
| 3242 |
-
"step": 349
|
| 3243 |
-
},
|
| 3244 |
-
{
|
| 3245 |
-
"clip_ratio": 4.920152094287221e-05,
|
| 3246 |
-
"epoch": 2.2981481481481483,
|
| 3247 |
-
"grad_norm": 0.06489527225494385,
|
| 3248 |
-
"learning_rate": 1e-06,
|
| 3249 |
-
"loss": 0.0001,
|
| 3250 |
-
"step": 350
|
| 3251 |
-
},
|
| 3252 |
-
{
|
| 3253 |
-
"clip_ratio": 6.74184571142264e-05,
|
| 3254 |
-
"epoch": 2.3046296296296296,
|
| 3255 |
-
"grad_norm": 0.06311435252428055,
|
| 3256 |
-
"learning_rate": 1e-06,
|
| 3257 |
-
"loss": -0.0001,
|
| 3258 |
-
"step": 351
|
| 3259 |
-
},
|
| 3260 |
-
{
|
| 3261 |
-
"clip_ratio": 0.0001653696353536188,
|
| 3262 |
-
"epoch": 2.311111111111111,
|
| 3263 |
-
"grad_norm": 0.06215111166238785,
|
| 3264 |
-
"learning_rate": 1e-06,
|
| 3265 |
-
"loss": -0.0004,
|
| 3266 |
-
"step": 352
|
| 3267 |
-
},
|
| 3268 |
-
{
|
| 3269 |
-
"clip_ratio": 0.0,
|
| 3270 |
-
"completion_length": 454.8628191266741,
|
| 3271 |
-
"epoch": 2.3175925925925926,
|
| 3272 |
-
"grad_norm": 0.08817350119352341,
|
| 3273 |
-
"learning_rate": 1e-06,
|
| 3274 |
-
"loss": 0.0,
|
| 3275 |
-
"num_tokens": 327426049.0,
|
| 3276 |
-
"reward": 1.68934242498307,
|
| 3277 |
-
"reward_std": 0.14556503544251123,
|
| 3278 |
-
"rewards/acc_reward_func": 1.6893424022765386,
|
| 3279 |
-
"step": 353
|
| 3280 |
-
},
|
| 3281 |
-
{
|
| 3282 |
-
"clip_ratio": 4.580863883131228e-05,
|
| 3283 |
-
"epoch": 2.324074074074074,
|
| 3284 |
-
"grad_norm": 0.07056381553411484,
|
| 3285 |
-
"learning_rate": 1e-06,
|
| 3286 |
-
"loss": -0.0001,
|
| 3287 |
-
"step": 354
|
| 3288 |
-
},
|
| 3289 |
-
{
|
| 3290 |
-
"clip_ratio": 8.168562462309464e-05,
|
| 3291 |
-
"epoch": 2.3305555555555557,
|
| 3292 |
-
"grad_norm": 0.0691598653793335,
|
| 3293 |
-
"learning_rate": 1e-06,
|
| 3294 |
-
"loss": -0.0004,
|
| 3295 |
-
"step": 355
|
| 3296 |
-
},
|
| 3297 |
-
{
|
| 3298 |
-
"clip_ratio": 0.00018179262354221595,
|
| 3299 |
-
"epoch": 2.337037037037037,
|
| 3300 |
-
"grad_norm": 0.06830444186925888,
|
| 3301 |
-
"learning_rate": 1e-06,
|
| 3302 |
-
"loss": -0.0008,
|
| 3303 |
-
"step": 356
|
| 3304 |
-
},
|
| 3305 |
-
{
|
| 3306 |
-
"clip_ratio": 0.0,
|
| 3307 |
-
"completion_length": 441.96145775204616,
|
| 3308 |
-
"epoch": 2.3435185185185183,
|
| 3309 |
-
"grad_norm": 0.07399642467498779,
|
| 3310 |
-
"learning_rate": 1e-06,
|
| 3311 |
-
"loss": 0.0066,
|
| 3312 |
-
"num_tokens": 331174365.0,
|
| 3313 |
-
"reward": 1.6439909594399589,
|
| 3314 |
-
"reward_std": 0.15822177433541842,
|
| 3315 |
-
"rewards/acc_reward_func": 1.6439909310567946,
|
| 3316 |
-
"step": 357
|
| 3317 |
-
},
|
| 3318 |
-
{
|
| 3319 |
-
"clip_ratio": 4.970507004708495e-05,
|
| 3320 |
-
"epoch": 2.35,
|
| 3321 |
-
"grad_norm": 0.07388998568058014,
|
| 3322 |
-
"learning_rate": 1e-06,
|
| 3323 |
-
"loss": 0.0064,
|
| 3324 |
-
"step": 358
|
| 3325 |
-
},
|
| 3326 |
-
{
|
| 3327 |
-
"clip_ratio": 7.191943188358674e-05,
|
| 3328 |
-
"epoch": 2.3564814814814814,
|
| 3329 |
-
"grad_norm": 0.07321937382221222,
|
| 3330 |
-
"learning_rate": 1e-06,
|
| 3331 |
-
"loss": 0.0061,
|
| 3332 |
-
"step": 359
|
| 3333 |
-
},
|
| 3334 |
-
{
|
| 3335 |
-
"clip_ratio": 0.00010987346589293641,
|
| 3336 |
-
"epoch": 2.362962962962963,
|
| 3337 |
-
"grad_norm": 0.07204120606184006,
|
| 3338 |
-
"learning_rate": 1e-06,
|
| 3339 |
-
"loss": 0.0057,
|
| 3340 |
-
"step": 360
|
| 3341 |
-
},
|
| 3342 |
-
{
|
| 3343 |
-
"clip_ratio": 0.0,
|
| 3344 |
-
"completion_length": 441.12698654901413,
|
| 3345 |
-
"epoch": 2.3694444444444445,
|
| 3346 |
-
"grad_norm": 0.0716153234243393,
|
| 3347 |
-
"learning_rate": 1e-06,
|
| 3348 |
-
"loss": 0.0084,
|
| 3349 |
-
"num_tokens": 334945303.0,
|
| 3350 |
-
"reward": 1.568027240889413,
|
| 3351 |
-
"reward_std": 0.14656859547609374,
|
| 3352 |
-
"rewards/acc_reward_func": 1.568027206829616,
|
| 3353 |
-
"step": 361
|
| 3354 |
-
},
|
| 3355 |
-
{
|
| 3356 |
-
"clip_ratio": 5.729519952659584e-05,
|
| 3357 |
-
"epoch": 2.3759259259259258,
|
| 3358 |
-
"grad_norm": 0.07136084884405136,
|
| 3359 |
-
"learning_rate": 1e-06,
|
| 3360 |
-
"loss": 0.0083,
|
| 3361 |
-
"step": 362
|
| 3362 |
-
},
|
| 3363 |
-
{
|
| 3364 |
-
"clip_ratio": 5.302412908003178e-05,
|
| 3365 |
-
"epoch": 2.3824074074074075,
|
| 3366 |
-
"grad_norm": 0.07047837227582932,
|
| 3367 |
-
"learning_rate": 1e-06,
|
| 3368 |
-
"loss": 0.008,
|
| 3369 |
-
"step": 363
|
| 3370 |
-
},
|
| 3371 |
-
{
|
| 3372 |
-
"clip_ratio": 0.00014433350086273138,
|
| 3373 |
-
"epoch": 2.388888888888889,
|
| 3374 |
-
"grad_norm": 0.06907393783330917,
|
| 3375 |
-
"learning_rate": 1e-06,
|
| 3376 |
-
"loss": 0.0076,
|
| 3377 |
-
"step": 364
|
| 3378 |
-
},
|
| 3379 |
-
{
|
| 3380 |
-
"clip_ratio": 0.0,
|
| 3381 |
-
"completion_length": 434.1598714192708,
|
| 3382 |
-
"epoch": 2.3953703703703706,
|
| 3383 |
-
"grad_norm": 0.08112609386444092,
|
| 3384 |
-
"learning_rate": 1e-06,
|
| 3385 |
-
"loss": 0.0019,
|
| 3386 |
-
"num_tokens": 338547496.0,
|
| 3387 |
-
"reward": 1.613378712109157,
|
| 3388 |
-
"reward_std": 0.16461583830061413,
|
| 3389 |
-
"rewards/acc_reward_func": 1.613378683725993,
|
| 3390 |
-
"step": 365
|
| 3391 |
-
},
|
| 3392 |
-
{
|
| 3393 |
-
"clip_ratio": 6.238901100697971e-05,
|
| 3394 |
-
"epoch": 2.401851851851852,
|
| 3395 |
-
"grad_norm": 0.08133766055107117,
|
| 3396 |
-
"learning_rate": 1e-06,
|
| 3397 |
-
"loss": 0.0017,
|
| 3398 |
-
"step": 366
|
| 3399 |
-
},
|
| 3400 |
-
{
|
| 3401 |
-
"clip_ratio": 0.00019287066121857302,
|
| 3402 |
-
"epoch": 2.408333333333333,
|
| 3403 |
-
"grad_norm": 0.07959448546171188,
|
| 3404 |
-
"learning_rate": 1e-06,
|
| 3405 |
-
"loss": 0.0013,
|
| 3406 |
-
"step": 367
|
| 3407 |
-
},
|
| 3408 |
-
{
|
| 3409 |
-
"clip_ratio": 0.0002893279022481736,
|
| 3410 |
-
"epoch": 2.414814814814815,
|
| 3411 |
-
"grad_norm": 0.07730524241924286,
|
| 3412 |
-
"learning_rate": 1e-06,
|
| 3413 |
-
"loss": 0.0008,
|
| 3414 |
-
"step": 368
|
| 3415 |
-
},
|
| 3416 |
-
{
|
| 3417 |
-
"clip_ratio": 0.0,
|
| 3418 |
-
"completion_length": 410.75284249441967,
|
| 3419 |
-
"epoch": 2.4212962962962963,
|
| 3420 |
-
"grad_norm": 0.08015070855617523,
|
| 3421 |
-
"learning_rate": 1e-06,
|
| 3422 |
-
"loss": 0.0001,
|
| 3423 |
-
"num_tokens": 342065684.0,
|
| 3424 |
-
"reward": 1.6065759829112463,
|
| 3425 |
-
"reward_std": 0.1567436396366074,
|
| 3426 |
-
"rewards/acc_reward_func": 1.6065759658813477,
|
| 3427 |
-
"step": 369
|
| 3428 |
-
},
|
| 3429 |
-
{
|
| 3430 |
-
"clip_ratio": 4.735970131670391e-05,
|
| 3431 |
-
"epoch": 2.4277777777777776,
|
| 3432 |
-
"grad_norm": 0.08372899144887924,
|
| 3433 |
-
"learning_rate": 1e-06,
|
| 3434 |
-
"loss": -0.0001,
|
| 3435 |
-
"step": 370
|
| 3436 |
-
},
|
| 3437 |
-
{
|
| 3438 |
-
"clip_ratio": 0.00012169528140691996,
|
| 3439 |
-
"epoch": 2.4342592592592593,
|
| 3440 |
-
"grad_norm": 0.0796024277806282,
|
| 3441 |
-
"learning_rate": 1e-06,
|
| 3442 |
-
"loss": -0.0005,
|
| 3443 |
-
"step": 371
|
| 3444 |
-
},
|
| 3445 |
-
{
|
| 3446 |
-
"clip_ratio": 0.0003254882942114602,
|
| 3447 |
-
"epoch": 2.4407407407407407,
|
| 3448 |
-
"grad_norm": 0.07758983969688416,
|
| 3449 |
-
"learning_rate": 1e-06,
|
| 3450 |
-
"loss": -0.0009,
|
| 3451 |
-
"step": 372
|
| 3452 |
-
},
|
| 3453 |
-
{
|
| 3454 |
-
"clip_ratio": 0.0,
|
| 3455 |
-
"completion_length": 410.54762776692706,
|
| 3456 |
-
"epoch": 2.4472222222222224,
|
| 3457 |
-
"grad_norm": 0.0863879844546318,
|
| 3458 |
-
"learning_rate": 1e-06,
|
| 3459 |
-
"loss": -0.0048,
|
| 3460 |
-
"num_tokens": 345663755.0,
|
| 3461 |
-
"reward": 1.697278931027367,
|
| 3462 |
-
"reward_std": 0.1657904459252244,
|
| 3463 |
-
"rewards/acc_reward_func": 1.6972789196741014,
|
| 3464 |
-
"step": 373
|
| 3465 |
-
},
|
| 3466 |
-
{
|
| 3467 |
-
"clip_ratio": 7.448090400430374e-05,
|
| 3468 |
-
"epoch": 2.4537037037037037,
|
| 3469 |
-
"grad_norm": 0.08563799411058426,
|
| 3470 |
-
"learning_rate": 1e-06,
|
| 3471 |
-
"loss": -0.005,
|
| 3472 |
-
"step": 374
|
| 3473 |
-
},
|
| 3474 |
-
{
|
| 3475 |
-
"clip_ratio": 0.00020172142172842066,
|
| 3476 |
-
"epoch": 2.460185185185185,
|
| 3477 |
-
"grad_norm": 0.0837676078081131,
|
| 3478 |
-
"learning_rate": 1e-06,
|
| 3479 |
-
"loss": -0.0054,
|
| 3480 |
-
"step": 375
|
| 3481 |
-
},
|
| 3482 |
-
{
|
| 3483 |
-
"clip_ratio": 0.0004377457608782043,
|
| 3484 |
-
"epoch": 2.466666666666667,
|
| 3485 |
-
"grad_norm": 0.08231651037931442,
|
| 3486 |
-
"learning_rate": 1e-06,
|
| 3487 |
-
"loss": -0.006,
|
| 3488 |
-
"step": 376
|
| 3489 |
-
},
|
| 3490 |
-
{
|
| 3491 |
-
"clip_ratio": 0.0,
|
| 3492 |
-
"completion_length": 411.5204133533296,
|
| 3493 |
-
"epoch": 2.473148148148148,
|
| 3494 |
-
"grad_norm": 0.08515173196792603,
|
| 3495 |
-
"learning_rate": 1e-06,
|
| 3496 |
-
"loss": 0.0034,
|
| 3497 |
-
"num_tokens": 349245410.0,
|
| 3498 |
-
"reward": 1.7052154427482968,
|
| 3499 |
-
"reward_std": 0.16742009811458133,
|
| 3500 |
-
"rewards/acc_reward_func": 1.7052154200417655,
|
| 3501 |
-
"step": 377
|
| 3502 |
-
},
|
| 3503 |
-
{
|
| 3504 |
-
"clip_ratio": 6.578304687753276e-05,
|
| 3505 |
-
"epoch": 2.4796296296296294,
|
| 3506 |
-
"grad_norm": 0.08399412035942078,
|
| 3507 |
-
"learning_rate": 1e-06,
|
| 3508 |
-
"loss": 0.0032,
|
| 3509 |
-
"step": 378
|
| 3510 |
-
},
|
| 3511 |
-
{
|
| 3512 |
-
"clip_ratio": 0.00018409334491783133,
|
| 3513 |
-
"epoch": 2.486111111111111,
|
| 3514 |
-
"grad_norm": 0.08153863251209259,
|
| 3515 |
-
"learning_rate": 1e-06,
|
| 3516 |
-
"loss": 0.0028,
|
| 3517 |
-
"step": 379
|
| 3518 |
-
},
|
| 3519 |
-
{
|
| 3520 |
-
"clip_ratio": 0.0003922197685737739,
|
| 3521 |
-
"epoch": 2.4925925925925925,
|
| 3522 |
-
"grad_norm": 0.080192930996418,
|
| 3523 |
-
"learning_rate": 1e-06,
|
| 3524 |
-
"loss": 0.0022,
|
| 3525 |
-
"step": 380
|
| 3526 |
-
},
|
| 3527 |
-
{
|
| 3528 |
-
"clip_ratio": 0.0,
|
| 3529 |
-
"completion_length": 432.0147487095424,
|
| 3530 |
-
"epoch": 2.4990740740740742,
|
| 3531 |
-
"grad_norm": 0.07259111106395721,
|
| 3532 |
-
"learning_rate": 1e-06,
|
| 3533 |
-
"loss": 0.0008,
|
| 3534 |
-
"num_tokens": 352687257.0,
|
| 3535 |
-
"reward": 1.6315193062736875,
|
| 3536 |
-
"reward_std": 0.13436711685998098,
|
| 3537 |
-
"rewards/acc_reward_func": 1.6315192778905232,
|
| 3538 |
-
"step": 381
|
| 3539 |
-
},
|
| 3540 |
-
{
|
| 3541 |
-
"clip_ratio": 6.481163377646313e-05,
|
| 3542 |
-
"epoch": 2.5055555555555555,
|
| 3543 |
-
"grad_norm": 0.0701545923948288,
|
| 3544 |
-
"learning_rate": 1e-06,
|
| 3545 |
-
"loss": 0.0006,
|
| 3546 |
-
"step": 382
|
| 3547 |
-
},
|
| 3548 |
-
{
|
| 3549 |
-
"clip_ratio": 0.00020013943399784954,
|
| 3550 |
-
"epoch": 2.512037037037037,
|
| 3551 |
-
"grad_norm": 0.06911647319793701,
|
| 3552 |
-
"learning_rate": 1e-06,
|
| 3553 |
-
"loss": 0.0004,
|
| 3554 |
-
"step": 383
|
| 3555 |
-
},
|
| 3556 |
-
{
|
| 3557 |
-
"clip_ratio": 0.00035089113017810244,
|
| 3558 |
-
"epoch": 2.5185185185185186,
|
| 3559 |
-
"grad_norm": 0.06758937239646912,
|
| 3560 |
-
"learning_rate": 1e-06,
|
| 3561 |
-
"loss": -0.0,
|
| 3562 |
-
"step": 384
|
| 3563 |
-
},
|
| 3564 |
-
{
|
| 3565 |
-
"clip_ratio": 0.0,
|
| 3566 |
-
"completion_length": 422.76077706473217,
|
| 3567 |
-
"epoch": 2.525,
|
| 3568 |
-
"grad_norm": 0.08028464764356613,
|
| 3569 |
-
"learning_rate": 1e-06,
|
| 3570 |
-
"loss": 0.0012,
|
| 3571 |
-
"num_tokens": 356398820.0,
|
| 3572 |
-
"reward": 1.6156462885084606,
|
| 3573 |
-
"reward_std": 0.15897739288352786,
|
| 3574 |
-
"rewards/acc_reward_func": 1.6156462658019293,
|
| 3575 |
-
"step": 385
|
| 3576 |
-
},
|
| 3577 |
-
{
|
| 3578 |
-
"clip_ratio": 6.950612147366406e-05,
|
| 3579 |
-
"epoch": 2.5314814814814817,
|
| 3580 |
-
"grad_norm": 0.0779779925942421,
|
| 3581 |
-
"learning_rate": 1e-06,
|
| 3582 |
-
"loss": 0.001,
|
| 3583 |
-
"step": 386
|
| 3584 |
-
},
|
| 3585 |
-
{
|
| 3586 |
-
"clip_ratio": 0.00015359692943526344,
|
| 3587 |
-
"epoch": 2.537962962962963,
|
| 3588 |
-
"grad_norm": 0.07718382030725479,
|
| 3589 |
-
"learning_rate": 1e-06,
|
| 3590 |
-
"loss": 0.0006,
|
| 3591 |
-
"step": 387
|
| 3592 |
-
},
|
| 3593 |
-
{
|
| 3594 |
-
"clip_ratio": 0.00016256648242623278,
|
| 3595 |
-
"epoch": 2.5444444444444443,
|
| 3596 |
-
"grad_norm": 0.07550051063299179,
|
| 3597 |
-
"learning_rate": 1e-06,
|
| 3598 |
-
"loss": 0.0001,
|
| 3599 |
-
"step": 388
|
| 3600 |
-
},
|
| 3601 |
-
{
|
| 3602 |
-
"clip_ratio": 0.0,
|
| 3603 |
-
"completion_length": 408.92857869466144,
|
| 3604 |
-
"epoch": 2.550925925925926,
|
| 3605 |
-
"grad_norm": 0.06849638372659683,
|
| 3606 |
-
"learning_rate": 1e-06,
|
| 3607 |
-
"loss": 0.0035,
|
| 3608 |
-
"num_tokens": 360012227.0,
|
| 3609 |
-
"reward": 1.6315192977587383,
|
| 3610 |
-
"reward_std": 0.11362639105036146,
|
| 3611 |
-
"rewards/acc_reward_func": 1.631519269375574,
|
| 3612 |
-
"step": 389
|
| 3613 |
-
},
|
| 3614 |
-
{
|
| 3615 |
-
"clip_ratio": 5.923737660937366e-05,
|
| 3616 |
-
"epoch": 2.5574074074074074,
|
| 3617 |
-
"grad_norm": 0.06760665029287338,
|
| 3618 |
-
"learning_rate": 1e-06,
|
| 3619 |
-
"loss": 0.0034,
|
| 3620 |
-
"step": 390
|
| 3621 |
-
},
|
| 3622 |
-
{
|
| 3623 |
-
"clip_ratio": 0.0001121529177388376,
|
| 3624 |
-
"epoch": 2.563888888888889,
|
| 3625 |
-
"grad_norm": 0.06625531613826752,
|
| 3626 |
-
"learning_rate": 1e-06,
|
| 3627 |
-
"loss": 0.0031,
|
| 3628 |
-
"step": 391
|
| 3629 |
-
},
|
| 3630 |
-
{
|
| 3631 |
-
"clip_ratio": 0.00021175693923613012,
|
| 3632 |
-
"epoch": 2.5703703703703704,
|
| 3633 |
-
"grad_norm": 0.06420101970434189,
|
| 3634 |
-
"learning_rate": 1e-06,
|
| 3635 |
-
"loss": 0.0027,
|
| 3636 |
-
"step": 392
|
| 3637 |
-
},
|
| 3638 |
-
{
|
| 3639 |
-
"clip_ratio": 0.0,
|
| 3640 |
-
"completion_length": 408.4580586751302,
|
| 3641 |
-
"epoch": 2.5768518518518517,
|
| 3642 |
-
"grad_norm": 0.08265353739261627,
|
| 3643 |
-
"learning_rate": 1e-06,
|
| 3644 |
-
"loss": 0.0014,
|
| 3645 |
-
"num_tokens": 363854623.0,
|
| 3646 |
-
"reward": 1.6439909594399589,
|
| 3647 |
-
"reward_std": 0.15082332562832607,
|
| 3648 |
-
"rewards/acc_reward_func": 1.6439909310567946,
|
| 3649 |
-
"step": 393
|
| 3650 |
-
},
|
| 3651 |
-
{
|
| 3652 |
-
"clip_ratio": 3.310179055829178e-05,
|
| 3653 |
-
"epoch": 2.5833333333333335,
|
| 3654 |
-
"grad_norm": 0.0822535827755928,
|
| 3655 |
-
"learning_rate": 1e-06,
|
| 3656 |
-
"loss": 0.0011,
|
| 3657 |
-
"step": 394
|
| 3658 |
-
},
|
| 3659 |
-
{
|
| 3660 |
-
"clip_ratio": 0.00014647337836019383,
|
| 3661 |
-
"epoch": 2.589814814814815,
|
| 3662 |
-
"grad_norm": 0.07974167913198471,
|
| 3663 |
-
"learning_rate": 1e-06,
|
| 3664 |
-
"loss": 0.0007,
|
| 3665 |
-
"step": 395
|
| 3666 |
-
},
|
| 3667 |
-
{
|
| 3668 |
-
"clip_ratio": 0.0003786702473007608,
|
| 3669 |
-
"epoch": 2.5962962962962965,
|
| 3670 |
-
"grad_norm": 0.076680988073349,
|
| 3671 |
-
"learning_rate": 1e-06,
|
| 3672 |
-
"loss": 0.0002,
|
| 3673 |
-
"step": 396
|
| 3674 |
-
},
|
| 3675 |
-
{
|
| 3676 |
-
"clip_ratio": 0.0,
|
| 3677 |
-
"completion_length": 401.6383274623326,
|
| 3678 |
-
"epoch": 2.602777777777778,
|
| 3679 |
-
"grad_norm": 0.07131548970937729,
|
| 3680 |
-
"learning_rate": 1e-06,
|
| 3681 |
-
"loss": -0.0067,
|
| 3682 |
-
"num_tokens": 367731450.0,
|
| 3683 |
-
"reward": 1.6530612593605405,
|
| 3684 |
-
"reward_std": 0.131582503340074,
|
| 3685 |
-
"rewards/acc_reward_func": 1.6530612139474778,
|
| 3686 |
-
"step": 397
|
| 3687 |
-
},
|
| 3688 |
-
{
|
| 3689 |
-
"clip_ratio": 2.882930996184725e-05,
|
| 3690 |
-
"epoch": 2.609259259259259,
|
| 3691 |
-
"grad_norm": 0.0698467567563057,
|
| 3692 |
-
"learning_rate": 1e-06,
|
| 3693 |
-
"loss": -0.0069,
|
| 3694 |
-
"step": 398
|
| 3695 |
-
},
|
| 3696 |
-
{
|
| 3697 |
-
"clip_ratio": 0.00012053488760665503,
|
| 3698 |
-
"epoch": 2.6157407407407405,
|
| 3699 |
-
"grad_norm": 0.06839628517627716,
|
| 3700 |
-
"learning_rate": 1e-06,
|
| 3701 |
-
"loss": -0.0072,
|
| 3702 |
-
"step": 399
|
| 3703 |
-
},
|
| 3704 |
-
{
|
| 3705 |
-
"clip_ratio": 0.00024320762410449484,
|
| 3706 |
-
"epoch": 2.6222222222222222,
|
| 3707 |
-
"grad_norm": 0.06797367334365845,
|
| 3708 |
-
"learning_rate": 1e-06,
|
| 3709 |
-
"loss": -0.0076,
|
| 3710 |
-
"step": 400
|
| 3711 |
-
},
|
| 3712 |
-
{
|
| 3713 |
-
"clip_ratio": 0.0,
|
| 3714 |
-
"completion_length": 395.0634998139881,
|
| 3715 |
-
"epoch": 2.6287037037037035,
|
| 3716 |
-
"grad_norm": 0.11827152222394943,
|
| 3717 |
-
"learning_rate": 1e-06,
|
| 3718 |
-
"loss": 0.0001,
|
| 3719 |
-
"num_tokens": 371186810.0,
|
| 3720 |
-
"reward": 1.606575988587879,
|
| 3721 |
-
"reward_std": 0.14957289291279657,
|
| 3722 |
-
"rewards/acc_reward_func": 1.6065759658813477,
|
| 3723 |
-
"step": 401
|
| 3724 |
-
},
|
| 3725 |
-
{
|
| 3726 |
-
"clip_ratio": 6.91131310651101e-05,
|
| 3727 |
-
"epoch": 2.6351851851851853,
|
| 3728 |
-
"grad_norm": 0.07700794190168381,
|
| 3729 |
-
"learning_rate": 1e-06,
|
| 3730 |
-
"loss": -0.0001,
|
| 3731 |
-
"step": 402
|
| 3732 |
-
},
|
| 3733 |
-
{
|
| 3734 |
-
"clip_ratio": 0.00016931103008183918,
|
| 3735 |
-
"epoch": 2.6416666666666666,
|
| 3736 |
-
"grad_norm": 0.07600509375333786,
|
| 3737 |
-
"learning_rate": 1e-06,
|
| 3738 |
-
"loss": -0.0005,
|
| 3739 |
-
"step": 403
|
| 3740 |
-
},
|
| 3741 |
-
{
|
| 3742 |
-
"clip_ratio": 0.00031958719098059064,
|
| 3743 |
-
"epoch": 2.648148148148148,
|
| 3744 |
-
"grad_norm": 0.0738753154873848,
|
| 3745 |
-
"learning_rate": 1e-06,
|
| 3746 |
-
"loss": -0.001,
|
| 3747 |
-
"step": 404
|
| 3748 |
-
},
|
| 3749 |
-
{
|
| 3750 |
-
"clip_ratio": 0.0,
|
| 3751 |
-
"completion_length": 407.74490501767116,
|
| 3752 |
-
"epoch": 2.6546296296296297,
|
| 3753 |
-
"grad_norm": 0.06802462786436081,
|
| 3754 |
-
"learning_rate": 1e-06,
|
| 3755 |
-
"loss": 0.0008,
|
| 3756 |
-
"num_tokens": 374782817.0,
|
| 3757 |
-
"reward": 1.6836734896614438,
|
| 3758 |
-
"reward_std": 0.12749963058602243,
|
| 3759 |
-
"rewards/acc_reward_func": 1.6836734783081782,
|
| 3760 |
-
"step": 405
|
| 3761 |
-
},
|
| 3762 |
-
{
|
| 3763 |
-
"clip_ratio": 4.254553156594435e-05,
|
| 3764 |
-
"epoch": 2.661111111111111,
|
| 3765 |
-
"grad_norm": 0.06773427873849869,
|
| 3766 |
-
"learning_rate": 1e-06,
|
| 3767 |
-
"loss": 0.0007,
|
| 3768 |
-
"step": 406
|
| 3769 |
-
},
|
| 3770 |
-
{
|
| 3771 |
-
"clip_ratio": 0.00012479171484647806,
|
| 3772 |
-
"epoch": 2.6675925925925927,
|
| 3773 |
-
"grad_norm": 0.06698736548423767,
|
| 3774 |
-
"learning_rate": 1e-06,
|
| 3775 |
-
"loss": 0.0004,
|
| 3776 |
-
"step": 407
|
| 3777 |
-
},
|
| 3778 |
-
{
|
| 3779 |
-
"clip_ratio": 0.00020478161154425747,
|
| 3780 |
-
"epoch": 2.674074074074074,
|
| 3781 |
-
"grad_norm": 0.06650257110595703,
|
| 3782 |
-
"learning_rate": 1e-06,
|
| 3783 |
-
"loss": 0.0,
|
| 3784 |
-
"step": 408
|
| 3785 |
-
},
|
| 3786 |
-
{
|
| 3787 |
-
"clip_ratio": 0.0,
|
| 3788 |
-
"completion_length": 400.48753429594495,
|
| 3789 |
-
"epoch": 2.6805555555555554,
|
| 3790 |
-
"grad_norm": 0.08356206119060516,
|
| 3791 |
-
"learning_rate": 1e-06,
|
| 3792 |
-
"loss": 0.0047,
|
| 3793 |
-
"num_tokens": 378096657.0,
|
| 3794 |
-
"reward": 1.643990953763326,
|
| 3795 |
-
"reward_std": 0.1610797480458305,
|
| 3796 |
-
"rewards/acc_reward_func": 1.6439909310567946,
|
| 3797 |
-
"step": 409
|
| 3798 |
-
},
|
| 3799 |
-
{
|
| 3800 |
-
"clip_ratio": 3.973056993258762e-05,
|
| 3801 |
-
"epoch": 2.687037037037037,
|
| 3802 |
-
"grad_norm": 0.0816047191619873,
|
| 3803 |
-
"learning_rate": 1e-06,
|
| 3804 |
-
"loss": 0.0044,
|
| 3805 |
-
"step": 410
|
| 3806 |
-
},
|
| 3807 |
-
{
|
| 3808 |
-
"clip_ratio": 6.90307292859957e-05,
|
| 3809 |
-
"epoch": 2.6935185185185184,
|
| 3810 |
-
"grad_norm": 0.07993968576192856,
|
| 3811 |
-
"learning_rate": 1e-06,
|
| 3812 |
-
"loss": 0.004,
|
| 3813 |
-
"step": 411
|
| 3814 |
-
},
|
| 3815 |
-
{
|
| 3816 |
-
"clip_ratio": 0.00017553037434395047,
|
| 3817 |
-
"epoch": 2.7,
|
| 3818 |
-
"grad_norm": 0.07977181673049927,
|
| 3819 |
-
"learning_rate": 1e-06,
|
| 3820 |
-
"loss": 0.0035,
|
| 3821 |
-
"step": 412
|
| 3822 |
-
},
|
| 3823 |
-
{
|
| 3824 |
-
"clip_ratio": 0.0,
|
| 3825 |
-
"completion_length": 407.8163350423177,
|
| 3826 |
-
"epoch": 2.7064814814814815,
|
| 3827 |
-
"grad_norm": 0.07647784799337387,
|
| 3828 |
-
"learning_rate": 1e-06,
|
| 3829 |
-
"loss": 0.0005,
|
| 3830 |
-
"num_tokens": 381935865.0,
|
| 3831 |
-
"reward": 1.674603195417495,
|
| 3832 |
-
"reward_std": 0.16518590492861612,
|
| 3833 |
-
"rewards/acc_reward_func": 1.674603161357698,
|
| 3834 |
-
"step": 413
|
| 3835 |
-
},
|
| 3836 |
-
{
|
| 3837 |
-
"clip_ratio": 4.758690729864784e-05,
|
| 3838 |
-
"epoch": 2.712962962962963,
|
| 3839 |
-
"grad_norm": 0.07679473608732224,
|
| 3840 |
-
"learning_rate": 1e-06,
|
| 3841 |
-
"loss": 0.0003,
|
| 3842 |
-
"step": 414
|
| 3843 |
-
},
|
| 3844 |
-
{
|
| 3845 |
-
"clip_ratio": 8.775196450490814e-05,
|
| 3846 |
-
"epoch": 2.7194444444444446,
|
| 3847 |
-
"grad_norm": 0.07613964378833771,
|
| 3848 |
-
"learning_rate": 1e-06,
|
| 3849 |
-
"loss": -0.0,
|
| 3850 |
-
"step": 415
|
| 3851 |
-
},
|
| 3852 |
-
{
|
| 3853 |
-
"clip_ratio": 0.0002375333022514713,
|
| 3854 |
-
"epoch": 2.725925925925926,
|
| 3855 |
-
"grad_norm": 0.07401680946350098,
|
| 3856 |
-
"learning_rate": 1e-06,
|
| 3857 |
-
"loss": -0.0005,
|
| 3858 |
-
"step": 416
|
| 3859 |
-
},
|
| 3860 |
-
{
|
| 3861 |
-
"clip_ratio": 0.0,
|
| 3862 |
-
"completion_length": 388.7766549246652,
|
| 3863 |
-
"epoch": 2.7324074074074076,
|
| 3864 |
-
"grad_norm": 0.07717788219451904,
|
| 3865 |
-
"learning_rate": 1e-06,
|
| 3866 |
-
"loss": 0.0003,
|
| 3867 |
-
"num_tokens": 385755250.0,
|
| 3868 |
-
"reward": 1.769841296332223,
|
| 3869 |
-
"reward_std": 0.13284824416041374,
|
| 3870 |
-
"rewards/acc_reward_func": 1.769841267949059,
|
| 3871 |
-
"step": 417
|
| 3872 |
-
},
|
| 3873 |
-
{
|
| 3874 |
-
"clip_ratio": 4.6571577730078606e-05,
|
| 3875 |
-
"epoch": 2.738888888888889,
|
| 3876 |
-
"grad_norm": 0.07409494370222092,
|
| 3877 |
-
"learning_rate": 1e-06,
|
| 3878 |
-
"loss": 0.0001,
|
| 3879 |
-
"step": 418
|
| 3880 |
-
},
|
| 3881 |
-
{
|
| 3882 |
-
"clip_ratio": 0.00010308226739566418,
|
| 3883 |
-
"epoch": 2.7453703703703702,
|
| 3884 |
-
"grad_norm": 0.07239258289337158,
|
| 3885 |
-
"learning_rate": 1e-06,
|
| 3886 |
-
"loss": -0.0002,
|
| 3887 |
-
"step": 419
|
| 3888 |
-
},
|
| 3889 |
-
{
|
| 3890 |
-
"clip_ratio": 0.00022064264131976024,
|
| 3891 |
-
"epoch": 2.751851851851852,
|
| 3892 |
-
"grad_norm": 0.07175586372613907,
|
| 3893 |
-
"learning_rate": 1e-06,
|
| 3894 |
-
"loss": -0.0007,
|
| 3895 |
-
"step": 420
|
| 3896 |
-
},
|
| 3897 |
-
{
|
| 3898 |
-
"clip_ratio": 0.0,
|
| 3899 |
-
"completion_length": 403.83787318638394,
|
| 3900 |
-
"epoch": 2.7583333333333333,
|
| 3901 |
-
"grad_norm": 0.0722418949007988,
|
| 3902 |
-
"learning_rate": 1e-06,
|
| 3903 |
-
"loss": 0.0029,
|
| 3904 |
-
"num_tokens": 389321531.0,
|
| 3905 |
-
"reward": 1.5861678350539434,
|
| 3906 |
-
"reward_std": 0.1339329518377781,
|
| 3907 |
-
"rewards/acc_reward_func": 1.5861677896408808,
|
| 3908 |
-
"step": 421
|
| 3909 |
-
},
|
| 3910 |
-
{
|
| 3911 |
-
"clip_ratio": 3.0351422104840387e-05,
|
| 3912 |
-
"epoch": 2.764814814814815,
|
| 3913 |
-
"grad_norm": 0.07102184742689133,
|
| 3914 |
-
"learning_rate": 1e-06,
|
| 3915 |
-
"loss": 0.0028,
|
| 3916 |
-
"step": 422
|
| 3917 |
-
},
|
| 3918 |
-
{
|
| 3919 |
-
"clip_ratio": 6.684829956308629e-05,
|
| 3920 |
-
"epoch": 2.7712962962962964,
|
| 3921 |
-
"grad_norm": 0.06921263784170151,
|
| 3922 |
-
"learning_rate": 1e-06,
|
| 3923 |
-
"loss": 0.0025,
|
| 3924 |
-
"step": 423
|
| 3925 |
-
},
|
| 3926 |
-
{
|
| 3927 |
-
"clip_ratio": 0.0001603819608655093,
|
| 3928 |
-
"epoch": 2.7777777777777777,
|
| 3929 |
-
"grad_norm": 0.06847840547561646,
|
| 3930 |
-
"learning_rate": 1e-06,
|
| 3931 |
-
"loss": 0.002,
|
| 3932 |
-
"step": 424
|
| 3933 |
-
},
|
| 3934 |
-
{
|
| 3935 |
-
"clip_ratio": 0.0,
|
| 3936 |
-
"completion_length": 395.74603852771577,
|
| 3937 |
-
"epoch": 2.784259259259259,
|
| 3938 |
-
"grad_norm": 0.07620932906866074,
|
| 3939 |
-
"learning_rate": 1e-06,
|
| 3940 |
-
"loss": -0.0,
|
| 3941 |
-
"num_tokens": 392811117.0,
|
| 3942 |
-
"reward": 1.675736977940514,
|
| 3943 |
-
"reward_std": 0.15024714987902416,
|
| 3944 |
-
"rewards/acc_reward_func": 1.6757369552339827,
|
| 3945 |
-
"step": 425
|
| 3946 |
-
},
|
| 3947 |
-
{
|
| 3948 |
-
"clip_ratio": 5.349587324114206e-05,
|
| 3949 |
-
"epoch": 2.7907407407407407,
|
| 3950 |
-
"grad_norm": 0.07477039843797684,
|
| 3951 |
-
"learning_rate": 1e-06,
|
| 3952 |
-
"loss": -0.0002,
|
| 3953 |
-
"step": 426
|
| 3954 |
-
},
|
| 3955 |
-
{
|
| 3956 |
-
"clip_ratio": 0.0001953024965630556,
|
| 3957 |
-
"epoch": 2.7972222222222225,
|
| 3958 |
-
"grad_norm": 0.07259545475244522,
|
| 3959 |
-
"learning_rate": 1e-06,
|
| 3960 |
-
"loss": -0.0006,
|
| 3961 |
-
"step": 427
|
| 3962 |
-
},
|
| 3963 |
-
{
|
| 3964 |
-
"clip_ratio": 0.0004224494755146138,
|
| 3965 |
-
"epoch": 2.803703703703704,
|
| 3966 |
-
"grad_norm": 0.0708785280585289,
|
| 3967 |
-
"learning_rate": 1e-06,
|
| 3968 |
-
"loss": -0.001,
|
| 3969 |
-
"step": 428
|
| 3970 |
-
},
|
| 3971 |
-
{
|
| 3972 |
-
"clip_ratio": 0.0,
|
| 3973 |
-
"completion_length": 366.82993861607144,
|
| 3974 |
-
"epoch": 2.810185185185185,
|
| 3975 |
-
"grad_norm": 0.07910261303186417,
|
| 3976 |
-
"learning_rate": 1e-06,
|
| 3977 |
-
"loss": 0.0041,
|
| 3978 |
-
"num_tokens": 396527025.0,
|
| 3979 |
-
"reward": 1.7210884661901564,
|
| 3980 |
-
"reward_std": 0.16589947470596858,
|
| 3981 |
-
"rewards/acc_reward_func": 1.7210884264537267,
|
| 3982 |
-
"step": 429
|
| 3983 |
-
},
|
| 3984 |
-
{
|
| 3985 |
-
"clip_ratio": 7.650961353127579e-05,
|
| 3986 |
-
"epoch": 2.8166666666666664,
|
| 3987 |
-
"grad_norm": 0.07643198221921921,
|
| 3988 |
-
"learning_rate": 1e-06,
|
| 3989 |
-
"loss": 0.0038,
|
| 3990 |
-
"step": 430
|
| 3991 |
-
},
|
| 3992 |
-
{
|
| 3993 |
-
"clip_ratio": 0.00023775612498866394,
|
| 3994 |
-
"epoch": 2.823148148148148,
|
| 3995 |
-
"grad_norm": 0.07540789246559143,
|
| 3996 |
-
"learning_rate": 1e-06,
|
| 3997 |
-
"loss": 0.0034,
|
| 3998 |
-
"step": 431
|
| 3999 |
-
},
|
| 4000 |
-
{
|
| 4001 |
-
"clip_ratio": 0.0005787453077833302,
|
| 4002 |
-
"epoch": 2.8296296296296295,
|
| 4003 |
-
"grad_norm": 0.07343152165412903,
|
| 4004 |
-
"learning_rate": 1e-06,
|
| 4005 |
-
"loss": 0.0029,
|
| 4006 |
-
"step": 432
|
| 4007 |
-
},
|
| 4008 |
-
{
|
| 4009 |
-
"clip_ratio": 0.0,
|
| 4010 |
-
"completion_length": 357.15986996605284,
|
| 4011 |
-
"epoch": 2.8361111111111112,
|
| 4012 |
-
"grad_norm": 0.08127926290035248,
|
| 4013 |
-
"learning_rate": 1e-06,
|
| 4014 |
-
"loss": 0.0055,
|
| 4015 |
-
"num_tokens": 400366410.0,
|
| 4016 |
-
"reward": 1.6848072721844627,
|
| 4017 |
-
"reward_std": 0.11902960372113046,
|
| 4018 |
-
"rewards/acc_reward_func": 1.684807260831197,
|
| 4019 |
-
"step": 433
|
| 4020 |
-
},
|
| 4021 |
-
{
|
| 4022 |
-
"clip_ratio": 5.624502747585731e-05,
|
| 4023 |
-
"epoch": 2.8425925925925926,
|
| 4024 |
-
"grad_norm": 0.08154301345348358,
|
| 4025 |
-
"learning_rate": 1e-06,
|
| 4026 |
-
"loss": 0.0052,
|
| 4027 |
-
"step": 434
|
| 4028 |
-
},
|
| 4029 |
-
{
|
| 4030 |
-
"clip_ratio": 0.00014876067438135144,
|
| 4031 |
-
"epoch": 2.849074074074074,
|
| 4032 |
-
"grad_norm": 0.07982967048883438,
|
| 4033 |
-
"learning_rate": 1e-06,
|
| 4034 |
-
"loss": 0.0048,
|
| 4035 |
-
"step": 435
|
| 4036 |
-
},
|
| 4037 |
-
{
|
| 4038 |
-
"clip_ratio": 0.00047466065838567114,
|
| 4039 |
-
"epoch": 2.8555555555555556,
|
| 4040 |
-
"grad_norm": 0.0750332623720169,
|
| 4041 |
-
"learning_rate": 1e-06,
|
| 4042 |
-
"loss": 0.0043,
|
| 4043 |
-
"step": 436
|
| 4044 |
-
},
|
| 4045 |
-
{
|
| 4046 |
-
"clip_ratio": 0.0,
|
| 4047 |
-
"completion_length": 338.58050246465774,
|
| 4048 |
-
"epoch": 2.862037037037037,
|
| 4049 |
-
"grad_norm": 0.06907039880752563,
|
| 4050 |
-
"learning_rate": 1e-06,
|
| 4051 |
-
"loss": 0.0029,
|
| 4052 |
-
"num_tokens": 404099120.0,
|
| 4053 |
-
"reward": 1.6791383482161022,
|
| 4054 |
-
"reward_std": 0.09301007627731278,
|
| 4055 |
-
"rewards/acc_reward_func": 1.6791383255095709,
|
| 4056 |
-
"step": 437
|
| 4057 |
-
},
|
| 4058 |
-
{
|
| 4059 |
-
"clip_ratio": 5.430434404323543e-05,
|
| 4060 |
-
"epoch": 2.8685185185185187,
|
| 4061 |
-
"grad_norm": 0.06709130853414536,
|
| 4062 |
-
"learning_rate": 1e-06,
|
| 4063 |
-
"loss": 0.0027,
|
| 4064 |
-
"step": 438
|
| 4065 |
-
},
|
| 4066 |
-
{
|
| 4067 |
-
"clip_ratio": 0.00010574558421337445,
|
| 4068 |
-
"epoch": 2.875,
|
| 4069 |
-
"grad_norm": 0.06637100130319595,
|
| 4070 |
-
"learning_rate": 1e-06,
|
| 4071 |
-
"loss": 0.0024,
|
| 4072 |
-
"step": 439
|
| 4073 |
-
},
|
| 4074 |
-
{
|
| 4075 |
-
"clip_ratio": 0.0004048011510998809,
|
| 4076 |
-
"epoch": 2.8814814814814813,
|
| 4077 |
-
"grad_norm": 0.06594450771808624,
|
| 4078 |
-
"learning_rate": 1e-06,
|
| 4079 |
-
"loss": 0.002,
|
| 4080 |
-
"step": 440
|
| 4081 |
-
},
|
| 4082 |
-
{
|
| 4083 |
-
"clip_ratio": 0.0,
|
| 4084 |
-
"completion_length": 328.1303899855841,
|
| 4085 |
-
"epoch": 2.887962962962963,
|
| 4086 |
-
"grad_norm": 0.09415791928768158,
|
| 4087 |
-
"learning_rate": 1e-06,
|
| 4088 |
-
"loss": -0.0036,
|
| 4089 |
-
"num_tokens": 407581593.0,
|
| 4090 |
-
"reward": 1.5895691911379497,
|
| 4091 |
-
"reward_std": 0.15091915730209576,
|
| 4092 |
-
"rewards/acc_reward_func": 1.5895691627547854,
|
| 4093 |
-
"step": 441
|
| 4094 |
-
},
|
| 4095 |
-
{
|
| 4096 |
-
"clip_ratio": 7.479514776302192e-05,
|
| 4097 |
-
"epoch": 2.8944444444444444,
|
| 4098 |
-
"grad_norm": 0.09067609906196594,
|
| 4099 |
-
"learning_rate": 1e-06,
|
| 4100 |
-
"loss": -0.004,
|
| 4101 |
-
"step": 442
|
| 4102 |
-
},
|
| 4103 |
-
{
|
| 4104 |
-
"clip_ratio": 0.00017182660078452456,
|
| 4105 |
-
"epoch": 2.900925925925926,
|
| 4106 |
-
"grad_norm": 0.08883418887853622,
|
| 4107 |
-
"learning_rate": 1e-06,
|
| 4108 |
-
"loss": -0.0045,
|
| 4109 |
-
"step": 443
|
| 4110 |
-
},
|
| 4111 |
-
{
|
| 4112 |
-
"clip_ratio": 0.00044846041141898327,
|
| 4113 |
-
"epoch": 2.9074074074074074,
|
| 4114 |
-
"grad_norm": 0.08536746352910995,
|
| 4115 |
-
"learning_rate": 1e-06,
|
| 4116 |
-
"loss": -0.0052,
|
| 4117 |
-
"step": 444
|
| 4118 |
-
},
|
| 4119 |
-
{
|
| 4120 |
-
"clip_ratio": 0.0,
|
| 4121 |
-
"completion_length": 314.00907825288317,
|
| 4122 |
-
"epoch": 2.9138888888888888,
|
| 4123 |
-
"grad_norm": 0.08820128440856934,
|
| 4124 |
-
"learning_rate": 1e-06,
|
| 4125 |
-
"loss": 0.0061,
|
| 4126 |
-
"num_tokens": 411218279.0,
|
| 4127 |
-
"reward": 1.749433125768389,
|
| 4128 |
-
"reward_std": 0.12802751929987044,
|
| 4129 |
-
"rewards/acc_reward_func": 1.7494331030618577,
|
| 4130 |
-
"step": 445
|
| 4131 |
-
},
|
| 4132 |
-
{
|
| 4133 |
-
"clip_ratio": 8.860694513367933e-05,
|
| 4134 |
-
"epoch": 2.9203703703703705,
|
| 4135 |
-
"grad_norm": 0.08626239001750946,
|
| 4136 |
-
"learning_rate": 1e-06,
|
| 4137 |
-
"loss": 0.0058,
|
| 4138 |
-
"step": 446
|
| 4139 |
-
},
|
| 4140 |
-
{
|
| 4141 |
-
"clip_ratio": 0.0003607673178997911,
|
| 4142 |
-
"epoch": 2.926851851851852,
|
| 4143 |
-
"grad_norm": 0.08441189676523209,
|
| 4144 |
-
"learning_rate": 1e-06,
|
| 4145 |
-
"loss": 0.0053,
|
| 4146 |
-
"step": 447
|
| 4147 |
-
},
|
| 4148 |
-
{
|
| 4149 |
-
"clip_ratio": 0.0008602460790522551,
|
| 4150 |
-
"epoch": 2.9333333333333336,
|
| 4151 |
-
"grad_norm": 0.08371831476688385,
|
| 4152 |
-
"learning_rate": 1e-06,
|
| 4153 |
-
"loss": 0.0046,
|
| 4154 |
-
"step": 448
|
| 4155 |
-
},
|
| 4156 |
-
{
|
| 4157 |
-
"clip_ratio": 0.0,
|
| 4158 |
-
"completion_length": 329.5929768880208,
|
| 4159 |
-
"epoch": 2.939814814814815,
|
| 4160 |
-
"grad_norm": 0.08556215465068817,
|
| 4161 |
-
"learning_rate": 1e-06,
|
| 4162 |
-
"loss": 0.0046,
|
| 4163 |
-
"num_tokens": 415063728.0,
|
| 4164 |
-
"reward": 1.6507936772846041,
|
| 4165 |
-
"reward_std": 0.12087976418080784,
|
| 4166 |
-
"rewards/acc_reward_func": 1.6507936602547055,
|
| 4167 |
-
"step": 449
|
| 4168 |
-
},
|
| 4169 |
-
{
|
| 4170 |
-
"clip_ratio": 6.594504590057546e-05,
|
| 4171 |
-
"epoch": 2.946296296296296,
|
| 4172 |
-
"grad_norm": 0.08255585283041,
|
| 4173 |
-
"learning_rate": 1e-06,
|
| 4174 |
-
"loss": 0.0043,
|
| 4175 |
-
"step": 450
|
| 4176 |
-
},
|
| 4177 |
-
{
|
| 4178 |
-
"clip_ratio": 0.00024212310728173527,
|
| 4179 |
-
"epoch": 2.9527777777777775,
|
| 4180 |
-
"grad_norm": 0.0834159404039383,
|
| 4181 |
-
"learning_rate": 1e-06,
|
| 4182 |
-
"loss": 0.0038,
|
| 4183 |
-
"step": 451
|
| 4184 |
-
},
|
| 4185 |
-
{
|
| 4186 |
-
"clip_ratio": 0.0005293784523105604,
|
| 4187 |
-
"epoch": 2.9592592592592593,
|
| 4188 |
-
"grad_norm": 0.07938043773174286,
|
| 4189 |
-
"learning_rate": 1e-06,
|
| 4190 |
-
"loss": 0.0031,
|
| 4191 |
-
"step": 452
|
| 4192 |
-
},
|
| 4193 |
-
{
|
| 4194 |
-
"clip_ratio": 0.0,
|
| 4195 |
-
"completion_length": 310.26644606817337,
|
| 4196 |
-
"epoch": 2.965740740740741,
|
| 4197 |
-
"grad_norm": 0.0936601534485817,
|
| 4198 |
-
"learning_rate": 1e-06,
|
| 4199 |
-
"loss": 0.0064,
|
| 4200 |
-
"num_tokens": 418688725.0,
|
| 4201 |
-
"reward": 1.658730183328901,
|
| 4202 |
-
"reward_std": 0.14163324290088244,
|
| 4203 |
-
"rewards/acc_reward_func": 1.6587301662990026,
|
| 4204 |
-
"step": 453
|
| 4205 |
-
},
|
| 4206 |
-
{
|
| 4207 |
-
"clip_ratio": 7.96998169140092e-05,
|
| 4208 |
-
"epoch": 2.9722222222222223,
|
| 4209 |
-
"grad_norm": 0.08971893042325974,
|
| 4210 |
-
"learning_rate": 1e-06,
|
| 4211 |
-
"loss": 0.006,
|
| 4212 |
-
"step": 454
|
| 4213 |
-
},
|
| 4214 |
-
{
|
| 4215 |
-
"clip_ratio": 0.00018841165833042135,
|
| 4216 |
-
"epoch": 2.9787037037037036,
|
| 4217 |
-
"grad_norm": 0.08801492303609848,
|
| 4218 |
-
"learning_rate": 1e-06,
|
| 4219 |
-
"loss": 0.0054,
|
| 4220 |
-
"step": 455
|
| 4221 |
-
},
|
| 4222 |
-
{
|
| 4223 |
-
"clip_ratio": 0.00046365962216874496,
|
| 4224 |
-
"epoch": 2.985185185185185,
|
| 4225 |
-
"grad_norm": 0.0843043327331543,
|
| 4226 |
-
"learning_rate": 1e-06,
|
| 4227 |
-
"loss": 0.0047,
|
| 4228 |
-
"step": 456
|
| 4229 |
-
},
|
| 4230 |
-
{
|
| 4231 |
-
"clip_ratio": 0.0,
|
| 4232 |
-
"completion_length": 306.1598714192708,
|
| 4233 |
-
"epoch": 3.0064814814814813,
|
| 4234 |
-
"grad_norm": 0.12096734344959259,
|
| 4235 |
-
"learning_rate": 1e-06,
|
| 4236 |
-
"loss": 0.0056,
|
| 4237 |
-
"num_tokens": 422140912.0,
|
| 4238 |
-
"reward": 1.7018140838259743,
|
| 4239 |
-
"reward_std": 0.16363864338823728,
|
| 4240 |
-
"rewards/acc_reward_func": 1.7018140667960757,
|
| 4241 |
-
"step": 457
|
| 4242 |
-
},
|
| 4243 |
-
{
|
| 4244 |
-
"clip_ratio": 7.911362107344238e-05,
|
| 4245 |
-
"epoch": 3.012962962962963,
|
| 4246 |
-
"grad_norm": 0.1199815645813942,
|
| 4247 |
-
"learning_rate": 1e-06,
|
| 4248 |
-
"loss": 0.005,
|
| 4249 |
-
"step": 458
|
| 4250 |
-
},
|
| 4251 |
-
{
|
| 4252 |
-
"clip_ratio": 0.0003393752437356549,
|
| 4253 |
-
"epoch": 3.0194444444444444,
|
| 4254 |
-
"grad_norm": 0.11338788270950317,
|
| 4255 |
-
"learning_rate": 1e-06,
|
| 4256 |
-
"loss": 0.004,
|
| 4257 |
-
"step": 459
|
| 4258 |
-
},
|
| 4259 |
-
{
|
| 4260 |
-
"clip_ratio": 0.0012846073105243878,
|
| 4261 |
-
"epoch": 3.025925925925926,
|
| 4262 |
-
"grad_norm": 0.11216680705547333,
|
| 4263 |
-
"learning_rate": 1e-06,
|
| 4264 |
-
"loss": 0.0029,
|
| 4265 |
-
"step": 460
|
| 4266 |
-
},
|
| 4267 |
-
{
|
| 4268 |
-
"clip_ratio": 0.0,
|
| 4269 |
-
"completion_length": 326.7562415713356,
|
| 4270 |
-
"epoch": 3.0324074074074074,
|
| 4271 |
-
"grad_norm": 0.14053334295749664,
|
| 4272 |
-
"learning_rate": 1e-06,
|
| 4273 |
-
"loss": 0.0426,
|
| 4274 |
-
"num_tokens": 425862443.0,
|
| 4275 |
-
"reward": 1.6666666950498308,
|
| 4276 |
-
"reward_std": 0.2066345683165959,
|
| 4277 |
-
"rewards/acc_reward_func": 1.6666666723432995,
|
| 4278 |
-
"step": 461
|
| 4279 |
-
},
|
| 4280 |
-
{
|
| 4281 |
-
"clip_ratio": 0.0001409024182413261,
|
| 4282 |
-
"epoch": 3.0388888888888888,
|
| 4283 |
-
"grad_norm": 0.13825556635856628,
|
| 4284 |
-
"learning_rate": 1e-06,
|
| 4285 |
-
"loss": 0.0418,
|
| 4286 |
-
"step": 462
|
| 4287 |
-
},
|
| 4288 |
-
{
|
| 4289 |
-
"clip_ratio": 0.00045791927654395944,
|
| 4290 |
-
"epoch": 3.0453703703703705,
|
| 4291 |
-
"grad_norm": 0.1277666687965393,
|
| 4292 |
-
"learning_rate": 1e-06,
|
| 4293 |
-
"loss": 0.0407,
|
| 4294 |
-
"step": 463
|
| 4295 |
-
},
|
| 4296 |
-
{
|
| 4297 |
-
"clip_ratio": 0.0016882882032188632,
|
| 4298 |
-
"epoch": 3.051851851851852,
|
| 4299 |
-
"grad_norm": 0.11887232214212418,
|
| 4300 |
-
"learning_rate": 1e-06,
|
| 4301 |
-
"loss": 0.0394,
|
| 4302 |
-
"step": 464
|
| 4303 |
-
},
|
| 4304 |
-
{
|
| 4305 |
-
"clip_ratio": 0.0,
|
| 4306 |
-
"completion_length": 289.12245686848956,
|
| 4307 |
-
"epoch": 3.058333333333333,
|
| 4308 |
-
"grad_norm": 0.1422978937625885,
|
| 4309 |
-
"learning_rate": 1e-06,
|
| 4310 |
-
"loss": 0.0074,
|
| 4311 |
-
"num_tokens": 429768887.0,
|
| 4312 |
-
"reward": 1.7029478720256261,
|
| 4313 |
-
"reward_std": 0.16384412295051984,
|
| 4314 |
-
"rewards/acc_reward_func": 1.7029478549957275,
|
| 4315 |
-
"step": 465
|
| 4316 |
-
},
|
| 4317 |
-
{
|
| 4318 |
-
"clip_ratio": 0.00022631913868411045,
|
| 4319 |
-
"epoch": 3.064814814814815,
|
| 4320 |
-
"grad_norm": 0.12926463782787323,
|
| 4321 |
-
"learning_rate": 1e-06,
|
| 4322 |
-
"loss": 0.0067,
|
| 4323 |
-
"step": 466
|
| 4324 |
-
},
|
| 4325 |
-
{
|
| 4326 |
-
"clip_ratio": 0.0016295394466613374,
|
| 4327 |
-
"epoch": 3.071296296296296,
|
| 4328 |
-
"grad_norm": 0.12168161571025848,
|
| 4329 |
-
"learning_rate": 1e-06,
|
| 4330 |
-
"loss": 0.0057,
|
| 4331 |
-
"step": 467
|
| 4332 |
-
},
|
| 4333 |
-
{
|
| 4334 |
-
"clip_ratio": 0.004102049317831795,
|
| 4335 |
-
"epoch": 3.077777777777778,
|
| 4336 |
-
"grad_norm": 0.13436855375766754,
|
| 4337 |
-
"learning_rate": 1e-06,
|
| 4338 |
-
"loss": 0.0047,
|
| 4339 |
-
"step": 468
|
| 4340 |
-
},
|
| 4341 |
-
{
|
| 4342 |
-
"clip_ratio": 0.0,
|
| 4343 |
-
"completion_length": 279.0952410016741,
|
| 4344 |
-
"epoch": 3.0842592592592593,
|
| 4345 |
-
"grad_norm": 0.10855058580636978,
|
| 4346 |
-
"learning_rate": 1e-06,
|
| 4347 |
-
"loss": 0.0037,
|
| 4348 |
-
"num_tokens": 433203107.0,
|
| 4349 |
-
"reward": 1.6882086594899495,
|
| 4350 |
-
"reward_std": 0.13231020988453002,
|
| 4351 |
-
"rewards/acc_reward_func": 1.688208608400254,
|
| 4352 |
-
"step": 469
|
| 4353 |
-
},
|
| 4354 |
-
{
|
| 4355 |
-
"clip_ratio": 0.00115988185557182,
|
| 4356 |
-
"epoch": 3.0907407407407406,
|
| 4357 |
-
"grad_norm": 0.11043041199445724,
|
| 4358 |
-
"learning_rate": 1e-06,
|
| 4359 |
-
"loss": 0.0032,
|
| 4360 |
-
"step": 470
|
| 4361 |
-
},
|
| 4362 |
-
{
|
| 4363 |
-
"clip_ratio": 0.002692721124428014,
|
| 4364 |
-
"epoch": 3.0972222222222223,
|
| 4365 |
-
"grad_norm": 0.12333739548921585,
|
| 4366 |
-
"learning_rate": 1e-06,
|
| 4367 |
-
"loss": 0.0025,
|
| 4368 |
-
"step": 471
|
| 4369 |
-
},
|
| 4370 |
-
{
|
| 4371 |
-
"clip_ratio": 0.0018077372972454344,
|
| 4372 |
-
"epoch": 3.1037037037037036,
|
| 4373 |
-
"grad_norm": 0.0895155668258667,
|
| 4374 |
-
"learning_rate": 1e-06,
|
| 4375 |
-
"loss": 0.0016,
|
| 4376 |
-
"step": 472
|
| 4377 |
-
},
|
| 4378 |
-
{
|
| 4379 |
-
"clip_ratio": 0.0,
|
| 4380 |
-
"completion_length": 266.54309227353053,
|
| 4381 |
-
"epoch": 3.1101851851851854,
|
| 4382 |
-
"grad_norm": 0.1423775851726532,
|
| 4383 |
-
"learning_rate": 1e-06,
|
| 4384 |
-
"loss": 0.0015,
|
| 4385 |
-
"num_tokens": 436395760.0,
|
| 4386 |
-
"reward": 1.6870748485837663,
|
| 4387 |
-
"reward_std": 0.14476618241696132,
|
| 4388 |
-
"rewards/acc_reward_func": 1.687074825877235,
|
| 4389 |
-
"step": 473
|
| 4390 |
-
},
|
| 4391 |
-
{
|
| 4392 |
-
"clip_ratio": 0.0009848821119660335,
|
| 4393 |
-
"epoch": 3.1166666666666667,
|
| 4394 |
-
"grad_norm": 0.11566051095724106,
|
| 4395 |
-
"learning_rate": 1e-06,
|
| 4396 |
-
"loss": 0.0008,
|
| 4397 |
-
"step": 474
|
| 4398 |
-
},
|
| 4399 |
-
{
|
| 4400 |
-
"clip_ratio": 0.004755329806357622,
|
| 4401 |
-
"epoch": 3.123148148148148,
|
| 4402 |
-
"grad_norm": 0.13648808002471924,
|
| 4403 |
-
"learning_rate": 1e-06,
|
| 4404 |
-
"loss": 0.0001,
|
| 4405 |
-
"step": 475
|
| 4406 |
-
},
|
| 4407 |
-
{
|
| 4408 |
-
"clip_ratio": 0.0044265871623619685,
|
| 4409 |
-
"epoch": 3.1296296296296298,
|
| 4410 |
-
"grad_norm": 0.146810844540596,
|
| 4411 |
-
"learning_rate": 1e-06,
|
| 4412 |
-
"loss": -0.0009,
|
| 4413 |
-
"step": 476
|
| 4414 |
-
},
|
| 4415 |
-
{
|
| 4416 |
-
"clip_ratio": 0.0,
|
| 4417 |
-
"completion_length": 269.2312970842634,
|
| 4418 |
-
"epoch": 3.136111111111111,
|
| 4419 |
-
"grad_norm": 0.12026971578598022,
|
| 4420 |
-
"learning_rate": 1e-06,
|
| 4421 |
-
"loss": 0.0082,
|
| 4422 |
-
"num_tokens": 439744150.0,
|
| 4423 |
-
"reward": 1.633786882672991,
|
| 4424 |
-
"reward_std": 0.11844597526249431,
|
| 4425 |
-
"rewards/acc_reward_func": 1.6337868429365612,
|
| 4426 |
-
"step": 477
|
| 4427 |
-
},
|
| 4428 |
-
{
|
| 4429 |
-
"clip_ratio": 0.0009484389579267285,
|
| 4430 |
-
"epoch": 3.1425925925925924,
|
| 4431 |
-
"grad_norm": 0.1215616911649704,
|
| 4432 |
-
"learning_rate": 1e-06,
|
| 4433 |
-
"loss": 0.0077,
|
| 4434 |
-
"step": 478
|
| 4435 |
-
},
|
| 4436 |
-
{
|
| 4437 |
-
"clip_ratio": 0.003697521280541661,
|
| 4438 |
-
"epoch": 3.149074074074074,
|
| 4439 |
-
"grad_norm": 0.14095111191272736,
|
| 4440 |
-
"learning_rate": 1e-06,
|
| 4441 |
-
"loss": 0.007,
|
| 4442 |
-
"step": 479
|
| 4443 |
-
},
|
| 4444 |
-
{
|
| 4445 |
-
"clip_ratio": 0.0034387974633968304,
|
| 4446 |
-
"epoch": 3.1555555555555554,
|
| 4447 |
-
"grad_norm": 0.12663334608078003,
|
| 4448 |
-
"learning_rate": 1e-06,
|
| 4449 |
-
"loss": 0.0059,
|
| 4450 |
-
"step": 480
|
| 4451 |
-
},
|
| 4452 |
-
{
|
| 4453 |
-
"clip_ratio": 0.0,
|
| 4454 |
-
"completion_length": 265.1598692394438,
|
| 4455 |
-
"epoch": 3.162037037037037,
|
| 4456 |
-
"grad_norm": 0.1346891224384308,
|
| 4457 |
-
"learning_rate": 1e-06,
|
| 4458 |
-
"loss": 0.0047,
|
| 4459 |
-
"num_tokens": 443271385.0,
|
| 4460 |
-
"reward": 1.7256236189887637,
|
| 4461 |
-
"reward_std": 0.14089716740307354,
|
| 4462 |
-
"rewards/acc_reward_func": 1.7256235792523338,
|
| 4463 |
-
"step": 481
|
| 4464 |
-
},
|
| 4465 |
-
{
|
| 4466 |
-
"clip_ratio": 0.0004448807906425957,
|
| 4467 |
-
"epoch": 3.1685185185185185,
|
| 4468 |
-
"grad_norm": 0.12079239636659622,
|
| 4469 |
-
"learning_rate": 1e-06,
|
| 4470 |
-
"loss": 0.0041,
|
| 4471 |
-
"step": 482
|
| 4472 |
-
},
|
| 4473 |
-
{
|
| 4474 |
-
"clip_ratio": 0.0020043016965722756,
|
| 4475 |
-
"epoch": 3.175,
|
| 4476 |
-
"grad_norm": 0.14017271995544434,
|
| 4477 |
-
"learning_rate": 1e-06,
|
| 4478 |
-
"loss": 0.0033,
|
| 4479 |
-
"step": 483
|
| 4480 |
-
},
|
| 4481 |
-
{
|
| 4482 |
-
"clip_ratio": 0.0017200966179925239,
|
| 4483 |
-
"epoch": 3.1814814814814816,
|
| 4484 |
-
"grad_norm": 0.15152569115161896,
|
| 4485 |
-
"learning_rate": 1e-06,
|
| 4486 |
-
"loss": 0.0023,
|
| 4487 |
-
"step": 484
|
| 4488 |
-
},
|
| 4489 |
-
{
|
| 4490 |
-
"clip_ratio": 0.0,
|
| 4491 |
-
"completion_length": 256.5975094749814,
|
| 4492 |
-
"epoch": 3.187962962962963,
|
| 4493 |
-
"grad_norm": 0.10771705955266953,
|
| 4494 |
-
"learning_rate": 1e-06,
|
| 4495 |
-
"loss": 0.0069,
|
| 4496 |
-
"num_tokens": 446677080.0,
|
| 4497 |
-
"reward": 1.7256236189887637,
|
| 4498 |
-
"reward_std": 0.12167022980394818,
|
| 4499 |
-
"rewards/acc_reward_func": 1.7256235792523338,
|
| 4500 |
-
"step": 485
|
| 4501 |
-
},
|
| 4502 |
-
{
|
| 4503 |
-
"clip_ratio": 0.0006665461551165208,
|
| 4504 |
-
"epoch": 3.1944444444444446,
|
| 4505 |
-
"grad_norm": 0.10599280893802643,
|
| 4506 |
-
"learning_rate": 1e-06,
|
| 4507 |
-
"loss": 0.0064,
|
| 4508 |
-
"step": 486
|
| 4509 |
-
},
|
| 4510 |
-
{
|
| 4511 |
-
"clip_ratio": 0.0033372725759233746,
|
| 4512 |
-
"epoch": 3.200925925925926,
|
| 4513 |
-
"grad_norm": 0.13055044412612915,
|
| 4514 |
-
"learning_rate": 1e-06,
|
| 4515 |
-
"loss": 0.0058,
|
| 4516 |
-
"step": 487
|
| 4517 |
-
},
|
| 4518 |
-
{
|
| 4519 |
-
"clip_ratio": 0.0035411600755261524,
|
| 4520 |
-
"epoch": 3.2074074074074073,
|
| 4521 |
-
"grad_norm": 0.12772558629512787,
|
| 4522 |
-
"learning_rate": 1e-06,
|
| 4523 |
-
"loss": 0.0049,
|
| 4524 |
-
"step": 488
|
| 4525 |
-
},
|
| 4526 |
-
{
|
| 4527 |
-
"clip_ratio": 0.0,
|
| 4528 |
-
"completion_length": 232.3888920375279,
|
| 4529 |
-
"epoch": 3.213888888888889,
|
| 4530 |
-
"grad_norm": 0.16783253848552704,
|
| 4531 |
-
"learning_rate": 1e-06,
|
| 4532 |
-
"loss": 0.0085,
|
| 4533 |
-
"num_tokens": 450388591.0,
|
| 4534 |
-
"reward": 1.6995465131033034,
|
| 4535 |
-
"reward_std": 0.13824334208454406,
|
| 4536 |
-
"rewards/acc_reward_func": 1.6995464733668737,
|
| 4537 |
-
"step": 489
|
| 4538 |
-
},
|
| 4539 |
-
{
|
| 4540 |
-
"clip_ratio": 0.0005684612243342036,
|
| 4541 |
-
"epoch": 3.2203703703703703,
|
| 4542 |
-
"grad_norm": 0.14137648046016693,
|
| 4543 |
-
"learning_rate": 1e-06,
|
| 4544 |
-
"loss": 0.0077,
|
| 4545 |
-
"step": 490
|
| 4546 |
-
},
|
| 4547 |
-
{
|
| 4548 |
-
"clip_ratio": 0.004452694151994018,
|
| 4549 |
-
"epoch": 3.226851851851852,
|
| 4550 |
-
"grad_norm": 0.14627555012702942,
|
| 4551 |
-
"learning_rate": 1e-06,
|
| 4552 |
-
"loss": 0.0068,
|
| 4553 |
-
"step": 491
|
| 4554 |
-
},
|
| 4555 |
-
{
|
| 4556 |
-
"clip_ratio": 0.006235157244927471,
|
| 4557 |
-
"epoch": 3.2333333333333334,
|
| 4558 |
-
"grad_norm": 0.17356812953948975,
|
| 4559 |
-
"learning_rate": 1e-06,
|
| 4560 |
-
"loss": 0.0059,
|
| 4561 |
-
"step": 492
|
| 4562 |
-
},
|
| 4563 |
-
{
|
| 4564 |
-
"clip_ratio": 0.0,
|
| 4565 |
-
"completion_length": 230.3911626906622,
|
| 4566 |
-
"epoch": 3.2398148148148147,
|
| 4567 |
-
"grad_norm": 0.1436953991651535,
|
| 4568 |
-
"learning_rate": 1e-06,
|
| 4569 |
-
"loss": -0.001,
|
| 4570 |
-
"num_tokens": 454082620.0,
|
| 4571 |
-
"reward": 1.640589606194269,
|
| 4572 |
-
"reward_std": 0.13000182736487614,
|
| 4573 |
-
"rewards/acc_reward_func": 1.6405895664578392,
|
| 4574 |
-
"step": 493
|
| 4575 |
-
},
|
| 4576 |
-
{
|
| 4577 |
-
"clip_ratio": 0.00045952137346224237,
|
| 4578 |
-
"epoch": 3.2462962962962965,
|
| 4579 |
-
"grad_norm": 0.14312081038951874,
|
| 4580 |
-
"learning_rate": 1e-06,
|
| 4581 |
-
"loss": -0.0018,
|
| 4582 |
-
"step": 494
|
| 4583 |
-
},
|
| 4584 |
-
{
|
| 4585 |
-
"clip_ratio": 0.0025044624372163697,
|
| 4586 |
-
"epoch": 3.2527777777777778,
|
| 4587 |
-
"grad_norm": 0.15083995461463928,
|
| 4588 |
-
"learning_rate": 1e-06,
|
| 4589 |
-
"loss": -0.003,
|
| 4590 |
-
"step": 495
|
| 4591 |
-
},
|
| 4592 |
-
{
|
| 4593 |
-
"clip_ratio": 0.002988206178304695,
|
| 4594 |
-
"epoch": 3.259259259259259,
|
| 4595 |
-
"grad_norm": 0.15178053081035614,
|
| 4596 |
-
"learning_rate": 1e-06,
|
| 4597 |
-
"loss": -0.0044,
|
| 4598 |
-
"step": 496
|
| 4599 |
-
},
|
| 4600 |
-
{
|
| 4601 |
-
"clip_ratio": 0.0,
|
| 4602 |
-
"completion_length": 239.91270228794642,
|
| 4603 |
-
"epoch": 3.265740740740741,
|
| 4604 |
-
"grad_norm": 0.11761778593063354,
|
| 4605 |
-
"learning_rate": 1e-06,
|
| 4606 |
-
"loss": -0.0011,
|
| 4607 |
-
"num_tokens": 457521509.0,
|
| 4608 |
-
"reward": 1.7120181776228405,
|
| 4609 |
-
"reward_std": 0.10198826484736942,
|
| 4610 |
-
"rewards/acc_reward_func": 1.7120181322097778,
|
| 4611 |
-
"step": 497
|
| 4612 |
-
},
|
| 4613 |
-
{
|
| 4614 |
-
"clip_ratio": 0.00034070668923613125,
|
| 4615 |
-
"epoch": 3.272222222222222,
|
| 4616 |
-
"grad_norm": 0.10410414636135101,
|
| 4617 |
-
"learning_rate": 1e-06,
|
| 4618 |
-
"loss": -0.0014,
|
| 4619 |
-
"step": 498
|
| 4620 |
-
},
|
| 4621 |
-
{
|
| 4622 |
-
"clip_ratio": 0.0006974699981150306,
|
| 4623 |
-
"epoch": 3.278703703703704,
|
| 4624 |
-
"grad_norm": 0.10498173534870148,
|
| 4625 |
-
"learning_rate": 1e-06,
|
| 4626 |
-
"loss": -0.0021,
|
| 4627 |
-
"step": 499
|
| 4628 |
-
},
|
| 4629 |
-
{
|
| 4630 |
-
"clip_ratio": 0.0008304606145386407,
|
| 4631 |
-
"epoch": 3.285185185185185,
|
| 4632 |
-
"grad_norm": 0.10428803414106369,
|
| 4633 |
-
"learning_rate": 1e-06,
|
| 4634 |
-
"loss": -0.0029,
|
| 4635 |
-
"step": 500
|
| 4636 |
}
|
| 4637 |
],
|
| 4638 |
"logging_steps": 1,
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 1.6351851851851853,
|
| 6 |
"eval_steps": 500,
|
| 7 |
+
"global_step": 250,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 2323 |
"learning_rate": 1e-06,
|
| 2324 |
"loss": 0.0002,
|
| 2325 |
"step": 250
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2326 |
}
|
| 2327 |
],
|
| 2328 |
"logging_steps": 1,
|