update files
Browse files- Modelfile +3 -1
- README.md +0 -116
- adapter/README.md +6 -6
- adapter/adapter_config.json +4 -4
- adapter/adapter_model.safetensors +1 -1
- adapter/all_results.json +5 -5
- adapter/checkpoint-204/README.md +202 -0
- adapter/checkpoint-204/adapter_config.json +34 -0
- adapter/checkpoint-204/adapter_model.safetensors +3 -0
- adapter/checkpoint-204/optimizer.pt +3 -0
- adapter/checkpoint-204/rng_state.pth +3 -0
- adapter/checkpoint-204/scheduler.pt +3 -0
- adapter/checkpoint-204/special_tokens_map.json +32 -0
- adapter/checkpoint-204/tokenizer.json +3 -0
- adapter/checkpoint-204/tokenizer_config.json +200 -0
- adapter/checkpoint-204/trainer_state.json +1461 -0
- adapter/checkpoint-204/training_args.bin +3 -0
- adapter/train_results.json +5 -5
- adapter/trainer_log.jsonl +205 -279
- adapter/trainer_state.json +824 -1300
- adapter/training_args.bin +1 -1
- model-00001-of-00004.safetensors +1 -1
- model-00002-of-00004.safetensors +1 -1
- model-00003-of-00004.safetensors +1 -1
- model-00004-of-00004.safetensors +1 -1
Modelfile
CHANGED
|
@@ -2,7 +2,9 @@
|
|
| 2 |
|
| 3 |
FROM .
|
| 4 |
|
| 5 |
-
TEMPLATE """{{ if .System }}{{ .System }}{{ end }}{{ range .Messages }}{{ if eq .Role "user" }}<|User|>{{ .Content }}{{ else if eq .Role "assistant" }}
|
|
|
|
|
|
|
| 6 |
|
| 7 |
PARAMETER stop "<|end▁of▁sentence|>"
|
| 8 |
PARAMETER num_ctx 4096
|
|
|
|
| 2 |
|
| 3 |
FROM .
|
| 4 |
|
| 5 |
+
TEMPLATE """{{ if .System }}<|begin▁of▁sentence|>{{ .System }}{{ end }}{{ range .Messages }}{{ if eq .Role "user" }}<|User|>{{ .Content }}<|Assistant|>{{ else if eq .Role "assistant" }}{{ .Content }}<|end▁of▁sentence|>{{ end }}{{ end }}"""
|
| 6 |
+
|
| 7 |
+
SYSTEM """Please reason step by step inside the tags <think> and </think>, and put your final answer within \boxed{}."""
|
| 8 |
|
| 9 |
PARAMETER stop "<|end▁of▁sentence|>"
|
| 10 |
PARAMETER num_ctx 4096
|
README.md
DELETED
|
@@ -1,116 +0,0 @@
|
|
| 1 |
-
---
|
| 2 |
-
license: mit
|
| 3 |
-
datasets:
|
| 4 |
-
- GAIR/LIMO
|
| 5 |
-
language:
|
| 6 |
-
- en
|
| 7 |
-
base_model:
|
| 8 |
-
- deepseek-ai/DeepSeek-R1-Distill-Qwen-7B
|
| 9 |
-
tags:
|
| 10 |
-
- R1
|
| 11 |
-
- DeepSeek
|
| 12 |
-
- Distill
|
| 13 |
-
- Qwen
|
| 14 |
-
- 7B
|
| 15 |
-
- LIMO
|
| 16 |
-
---
|
| 17 |
-
# LIMO-R1-Distill-Qwen-7B
|
| 18 |
-
Using [deepseek-ai/DeepSeek-R1-Distill-Qwen-7B](https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B) as base model.
|
| 19 |
-
|
| 20 |
-
Fine-tuned on [GAIR/LIMO](https://huggingface.co/GAIR/LIMO).
|
| 21 |
-
|
| 22 |
-
Trained using LLaMA-Factory with the config:
|
| 23 |
-
```
|
| 24 |
-
max_seq_length = 6*1024
|
| 25 |
-
|
| 26 |
-
lora_rank = 32
|
| 27 |
-
lora_alpha = lora_rank * 2
|
| 28 |
-
lora_target = ["q_proj", "k_proj", "v_proj", "o_proj",
|
| 29 |
-
"gate_proj", "up_proj", "down_proj"]
|
| 30 |
-
|
| 31 |
-
args = dict(
|
| 32 |
-
stage="sft",
|
| 33 |
-
do_train=True,
|
| 34 |
-
model_name_or_path="unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit",
|
| 35 |
-
dataset="limo_restructured",
|
| 36 |
-
template="custom_template",
|
| 37 |
-
finetuning_type="lora",
|
| 38 |
-
lora_target=lora_target,
|
| 39 |
-
output_dir="qwen_distill_7b_lora",
|
| 40 |
-
per_device_train_batch_size=1,
|
| 41 |
-
gradient_accumulation_steps=3,
|
| 42 |
-
lr_scheduler_type="cosine",
|
| 43 |
-
logging_steps=1,
|
| 44 |
-
warmup_ratio=0.1,
|
| 45 |
-
save_steps=100,
|
| 46 |
-
learning_rate=1e-4,
|
| 47 |
-
num_train_epochs=1.0,
|
| 48 |
-
max_grad_norm=1.0,
|
| 49 |
-
loraplus_lr_ratio=16.0,
|
| 50 |
-
fp16=True,
|
| 51 |
-
report_to="none",
|
| 52 |
-
preprocessing_num_workers=16,
|
| 53 |
-
cutoff_len=max_seq_length,
|
| 54 |
-
)
|
| 55 |
-
```
|
| 56 |
-
|
| 57 |
-
System used:
|
| 58 |
-
```
|
| 59 |
-
'You are a helpful assistant. Please reason step by step inside the tags <think> and </think>. Conclude with **Answer** and put your final answer within \\boxed{}.'
|
| 60 |
-
```
|
| 61 |
-
|
| 62 |
-
Custom template used in training:
|
| 63 |
-
```
|
| 64 |
-
register_template(
|
| 65 |
-
name="custom_template",
|
| 66 |
-
format_user=StringFormatter(
|
| 67 |
-
slots=["<|User|>{{content}}"]
|
| 68 |
-
),
|
| 69 |
-
format_assistant=StringFormatter(
|
| 70 |
-
slots=["<|Assistant|>{{content}}<|end▁of▁sentence|>"]
|
| 71 |
-
),
|
| 72 |
-
format_system=StringFormatter(
|
| 73 |
-
slots=["{{content}}"]
|
| 74 |
-
),
|
| 75 |
-
format_function=FunctionFormatter(
|
| 76 |
-
slots=[
|
| 77 |
-
"<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>{{type}}<|tool▁sep|>{{name}}\n```json\n{{arguments}}\n```<|tool▁call▁end|><|tool▁calls▁end|><|end▁of▁sentence|>"
|
| 78 |
-
],
|
| 79 |
-
tool_format="qwen"
|
| 80 |
-
),
|
| 81 |
-
format_observation=StringFormatter(
|
| 82 |
-
slots=[
|
| 83 |
-
"<|tool▁outputs▁begin|><|tool▁output_begin|>{{content}}<|tool▁output▁end|><|tool▁outputs▁end|>"
|
| 84 |
-
]
|
| 85 |
-
),
|
| 86 |
-
format_tools=ToolFormatter(tool_format="qwen"),
|
| 87 |
-
default_system="",
|
| 88 |
-
stop_words=["<|end▁of▁sentence|>"]
|
| 89 |
-
)
|
| 90 |
-
```
|
| 91 |
-
|
| 92 |
-
In the dataset for variation, I randomly replaced the start of the string "Okay," with one of the following:
|
| 93 |
-
```
|
| 94 |
-
starts = [
|
| 95 |
-
"Alright,",
|
| 96 |
-
"Well,",
|
| 97 |
-
"So,",
|
| 98 |
-
"Hmm,",
|
| 99 |
-
"Okay then,",
|
| 100 |
-
"Right,",
|
| 101 |
-
"Let's see,",
|
| 102 |
-
"Now,",
|
| 103 |
-
"Alrighty,",
|
| 104 |
-
"Thinking about it,",
|
| 105 |
-
"You know,",
|
| 106 |
-
"Well then,",
|
| 107 |
-
"Come to think of it,",
|
| 108 |
-
"Actually,",
|
| 109 |
-
"Now that I think about it,",
|
| 110 |
-
"Good question,",
|
| 111 |
-
"Let me think,",
|
| 112 |
-
"Let's see now,",
|
| 113 |
-
"Interesting,",
|
| 114 |
-
"Now then,"
|
| 115 |
-
]
|
| 116 |
-
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
adapter/README.md
CHANGED
|
@@ -7,14 +7,14 @@ tags:
|
|
| 7 |
- lora
|
| 8 |
- generated_from_trainer
|
| 9 |
model-index:
|
| 10 |
-
- name:
|
| 11 |
results: []
|
| 12 |
---
|
| 13 |
|
| 14 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 15 |
should probably proofread and complete it, then remove this comment. -->
|
| 16 |
|
| 17 |
-
#
|
| 18 |
|
| 19 |
This model is a fine-tuned version of [unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit](https://huggingface.co/unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit) on the limo_restructured dataset.
|
| 20 |
|
|
@@ -39,11 +39,11 @@ The following hyperparameters were used during training:
|
|
| 39 |
- train_batch_size: 1
|
| 40 |
- eval_batch_size: 8
|
| 41 |
- seed: 42
|
| 42 |
-
- gradient_accumulation_steps:
|
| 43 |
-
- total_train_batch_size:
|
| 44 |
-
- optimizer: Use
|
| 45 |
- lr_scheduler_type: cosine
|
| 46 |
-
- lr_scheduler_warmup_ratio: 0.
|
| 47 |
- num_epochs: 1.0
|
| 48 |
- mixed_precision_training: Native AMP
|
| 49 |
|
|
|
|
| 7 |
- lora
|
| 8 |
- generated_from_trainer
|
| 9 |
model-index:
|
| 10 |
+
- name: qwen7b_limo_lora_2
|
| 11 |
results: []
|
| 12 |
---
|
| 13 |
|
| 14 |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 15 |
should probably proofread and complete it, then remove this comment. -->
|
| 16 |
|
| 17 |
+
# qwen7b_limo_lora_2
|
| 18 |
|
| 19 |
This model is a fine-tuned version of [unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit](https://huggingface.co/unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit) on the limo_restructured dataset.
|
| 20 |
|
|
|
|
| 39 |
- train_batch_size: 1
|
| 40 |
- eval_batch_size: 8
|
| 41 |
- seed: 42
|
| 42 |
+
- gradient_accumulation_steps: 4
|
| 43 |
+
- total_train_batch_size: 4
|
| 44 |
+
- optimizer: Use paged_adamw_8bit with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 45 |
- lr_scheduler_type: cosine
|
| 46 |
+
- lr_scheduler_warmup_ratio: 0.05
|
| 47 |
- num_epochs: 1.0
|
| 48 |
- mixed_precision_training: Native AMP
|
| 49 |
|
adapter/adapter_config.json
CHANGED
|
@@ -20,12 +20,12 @@
|
|
| 20 |
"rank_pattern": {},
|
| 21 |
"revision": null,
|
| 22 |
"target_modules": [
|
| 23 |
-
"k_proj",
|
| 24 |
-
"q_proj",
|
| 25 |
-
"v_proj",
|
| 26 |
-
"o_proj",
|
| 27 |
"up_proj",
|
|
|
|
|
|
|
| 28 |
"down_proj",
|
|
|
|
|
|
|
| 29 |
"gate_proj"
|
| 30 |
],
|
| 31 |
"task_type": "CAUSAL_LM",
|
|
|
|
| 20 |
"rank_pattern": {},
|
| 21 |
"revision": null,
|
| 22 |
"target_modules": [
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
"up_proj",
|
| 24 |
+
"o_proj",
|
| 25 |
+
"v_proj",
|
| 26 |
"down_proj",
|
| 27 |
+
"k_proj",
|
| 28 |
+
"q_proj",
|
| 29 |
"gate_proj"
|
| 30 |
],
|
| 31 |
"task_type": "CAUSAL_LM",
|
adapter/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 80792096
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d4efd5a47811401eb3c4597e839a7128162a61f31bb44b93d877e5b663410f72
|
| 3 |
size 80792096
|
adapter/all_results.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
"epoch": 0.9987760097919217,
|
| 3 |
-
"total_flos": 1.
|
| 4 |
-
"train_loss": 0.
|
| 5 |
-
"train_runtime":
|
| 6 |
-
"train_samples_per_second": 0.
|
| 7 |
-
"train_steps_per_second": 0.
|
| 8 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"epoch": 0.9987760097919217,
|
| 3 |
+
"total_flos": 1.7231028658783027e+17,
|
| 4 |
+
"train_loss": 0.4889225305295458,
|
| 5 |
+
"train_runtime": 5417.273,
|
| 6 |
+
"train_samples_per_second": 0.151,
|
| 7 |
+
"train_steps_per_second": 0.038
|
| 8 |
}
|
adapter/checkpoint-204/README.md
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
base_model: unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit
|
| 3 |
+
library_name: peft
|
| 4 |
+
---
|
| 5 |
+
|
| 6 |
+
# Model Card for Model ID
|
| 7 |
+
|
| 8 |
+
<!-- Provide a quick summary of what the model is/does. -->
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
## Model Details
|
| 13 |
+
|
| 14 |
+
### Model Description
|
| 15 |
+
|
| 16 |
+
<!-- Provide a longer summary of what this model is. -->
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
- **Developed by:** [More Information Needed]
|
| 21 |
+
- **Funded by [optional]:** [More Information Needed]
|
| 22 |
+
- **Shared by [optional]:** [More Information Needed]
|
| 23 |
+
- **Model type:** [More Information Needed]
|
| 24 |
+
- **Language(s) (NLP):** [More Information Needed]
|
| 25 |
+
- **License:** [More Information Needed]
|
| 26 |
+
- **Finetuned from model [optional]:** [More Information Needed]
|
| 27 |
+
|
| 28 |
+
### Model Sources [optional]
|
| 29 |
+
|
| 30 |
+
<!-- Provide the basic links for the model. -->
|
| 31 |
+
|
| 32 |
+
- **Repository:** [More Information Needed]
|
| 33 |
+
- **Paper [optional]:** [More Information Needed]
|
| 34 |
+
- **Demo [optional]:** [More Information Needed]
|
| 35 |
+
|
| 36 |
+
## Uses
|
| 37 |
+
|
| 38 |
+
<!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
|
| 39 |
+
|
| 40 |
+
### Direct Use
|
| 41 |
+
|
| 42 |
+
<!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
|
| 43 |
+
|
| 44 |
+
[More Information Needed]
|
| 45 |
+
|
| 46 |
+
### Downstream Use [optional]
|
| 47 |
+
|
| 48 |
+
<!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
|
| 49 |
+
|
| 50 |
+
[More Information Needed]
|
| 51 |
+
|
| 52 |
+
### Out-of-Scope Use
|
| 53 |
+
|
| 54 |
+
<!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
|
| 55 |
+
|
| 56 |
+
[More Information Needed]
|
| 57 |
+
|
| 58 |
+
## Bias, Risks, and Limitations
|
| 59 |
+
|
| 60 |
+
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
|
| 61 |
+
|
| 62 |
+
[More Information Needed]
|
| 63 |
+
|
| 64 |
+
### Recommendations
|
| 65 |
+
|
| 66 |
+
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
|
| 67 |
+
|
| 68 |
+
Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
|
| 69 |
+
|
| 70 |
+
## How to Get Started with the Model
|
| 71 |
+
|
| 72 |
+
Use the code below to get started with the model.
|
| 73 |
+
|
| 74 |
+
[More Information Needed]
|
| 75 |
+
|
| 76 |
+
## Training Details
|
| 77 |
+
|
| 78 |
+
### Training Data
|
| 79 |
+
|
| 80 |
+
<!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
|
| 81 |
+
|
| 82 |
+
[More Information Needed]
|
| 83 |
+
|
| 84 |
+
### Training Procedure
|
| 85 |
+
|
| 86 |
+
<!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
|
| 87 |
+
|
| 88 |
+
#### Preprocessing [optional]
|
| 89 |
+
|
| 90 |
+
[More Information Needed]
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
#### Training Hyperparameters
|
| 94 |
+
|
| 95 |
+
- **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
|
| 96 |
+
|
| 97 |
+
#### Speeds, Sizes, Times [optional]
|
| 98 |
+
|
| 99 |
+
<!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
|
| 100 |
+
|
| 101 |
+
[More Information Needed]
|
| 102 |
+
|
| 103 |
+
## Evaluation
|
| 104 |
+
|
| 105 |
+
<!-- This section describes the evaluation protocols and provides the results. -->
|
| 106 |
+
|
| 107 |
+
### Testing Data, Factors & Metrics
|
| 108 |
+
|
| 109 |
+
#### Testing Data
|
| 110 |
+
|
| 111 |
+
<!-- This should link to a Dataset Card if possible. -->
|
| 112 |
+
|
| 113 |
+
[More Information Needed]
|
| 114 |
+
|
| 115 |
+
#### Factors
|
| 116 |
+
|
| 117 |
+
<!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
|
| 118 |
+
|
| 119 |
+
[More Information Needed]
|
| 120 |
+
|
| 121 |
+
#### Metrics
|
| 122 |
+
|
| 123 |
+
<!-- These are the evaluation metrics being used, ideally with a description of why. -->
|
| 124 |
+
|
| 125 |
+
[More Information Needed]
|
| 126 |
+
|
| 127 |
+
### Results
|
| 128 |
+
|
| 129 |
+
[More Information Needed]
|
| 130 |
+
|
| 131 |
+
#### Summary
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
## Model Examination [optional]
|
| 136 |
+
|
| 137 |
+
<!-- Relevant interpretability work for the model goes here -->
|
| 138 |
+
|
| 139 |
+
[More Information Needed]
|
| 140 |
+
|
| 141 |
+
## Environmental Impact
|
| 142 |
+
|
| 143 |
+
<!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
|
| 144 |
+
|
| 145 |
+
Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
|
| 146 |
+
|
| 147 |
+
- **Hardware Type:** [More Information Needed]
|
| 148 |
+
- **Hours used:** [More Information Needed]
|
| 149 |
+
- **Cloud Provider:** [More Information Needed]
|
| 150 |
+
- **Compute Region:** [More Information Needed]
|
| 151 |
+
- **Carbon Emitted:** [More Information Needed]
|
| 152 |
+
|
| 153 |
+
## Technical Specifications [optional]
|
| 154 |
+
|
| 155 |
+
### Model Architecture and Objective
|
| 156 |
+
|
| 157 |
+
[More Information Needed]
|
| 158 |
+
|
| 159 |
+
### Compute Infrastructure
|
| 160 |
+
|
| 161 |
+
[More Information Needed]
|
| 162 |
+
|
| 163 |
+
#### Hardware
|
| 164 |
+
|
| 165 |
+
[More Information Needed]
|
| 166 |
+
|
| 167 |
+
#### Software
|
| 168 |
+
|
| 169 |
+
[More Information Needed]
|
| 170 |
+
|
| 171 |
+
## Citation [optional]
|
| 172 |
+
|
| 173 |
+
<!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
|
| 174 |
+
|
| 175 |
+
**BibTeX:**
|
| 176 |
+
|
| 177 |
+
[More Information Needed]
|
| 178 |
+
|
| 179 |
+
**APA:**
|
| 180 |
+
|
| 181 |
+
[More Information Needed]
|
| 182 |
+
|
| 183 |
+
## Glossary [optional]
|
| 184 |
+
|
| 185 |
+
<!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
|
| 186 |
+
|
| 187 |
+
[More Information Needed]
|
| 188 |
+
|
| 189 |
+
## More Information [optional]
|
| 190 |
+
|
| 191 |
+
[More Information Needed]
|
| 192 |
+
|
| 193 |
+
## Model Card Authors [optional]
|
| 194 |
+
|
| 195 |
+
[More Information Needed]
|
| 196 |
+
|
| 197 |
+
## Model Card Contact
|
| 198 |
+
|
| 199 |
+
[More Information Needed]
|
| 200 |
+
### Framework versions
|
| 201 |
+
|
| 202 |
+
- PEFT 0.12.0
|
adapter/checkpoint-204/adapter_config.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
| 4 |
+
"base_model_name_or_path": "unsloth/DeepSeek-R1-Distill-Qwen-7B-bnb-4bit",
|
| 5 |
+
"bias": "none",
|
| 6 |
+
"fan_in_fan_out": false,
|
| 7 |
+
"inference_mode": true,
|
| 8 |
+
"init_lora_weights": true,
|
| 9 |
+
"layer_replication": null,
|
| 10 |
+
"layers_pattern": null,
|
| 11 |
+
"layers_to_transform": null,
|
| 12 |
+
"loftq_config": {},
|
| 13 |
+
"lora_alpha": 16,
|
| 14 |
+
"lora_dropout": 0.0,
|
| 15 |
+
"megatron_config": null,
|
| 16 |
+
"megatron_core": "megatron.core",
|
| 17 |
+
"modules_to_save": null,
|
| 18 |
+
"peft_type": "LORA",
|
| 19 |
+
"r": 8,
|
| 20 |
+
"rank_pattern": {},
|
| 21 |
+
"revision": null,
|
| 22 |
+
"target_modules": [
|
| 23 |
+
"up_proj",
|
| 24 |
+
"o_proj",
|
| 25 |
+
"v_proj",
|
| 26 |
+
"down_proj",
|
| 27 |
+
"k_proj",
|
| 28 |
+
"q_proj",
|
| 29 |
+
"gate_proj"
|
| 30 |
+
],
|
| 31 |
+
"task_type": "CAUSAL_LM",
|
| 32 |
+
"use_dora": false,
|
| 33 |
+
"use_rslora": false
|
| 34 |
+
}
|
adapter/checkpoint-204/adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d4efd5a47811401eb3c4597e839a7128162a61f31bb44b93d877e5b663410f72
|
| 3 |
+
size 80792096
|
adapter/checkpoint-204/optimizer.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:10f8385c6f048703abf86b66dc1f0603b5d15920e67a1beb1c6ba515ba88dec8
|
| 3 |
+
size 41462068
|
adapter/checkpoint-204/rng_state.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:386fcc8cc1089aade9450d86fb239ea3483f455fd2d78d8378645feecfec9d69
|
| 3 |
+
size 14244
|
adapter/checkpoint-204/scheduler.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0eb8c9c4d57dfe4861fde320fdf1ee79e5bdbcd601cfaf0223e3f33f9016b27c
|
| 3 |
+
size 1064
|
adapter/checkpoint-204/special_tokens_map.json
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
{
|
| 4 |
+
"content": "<|end▁of▁sentence|>",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false
|
| 9 |
+
}
|
| 10 |
+
],
|
| 11 |
+
"bos_token": {
|
| 12 |
+
"content": "<|begin▁of▁sentence|>",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false
|
| 17 |
+
},
|
| 18 |
+
"eos_token": {
|
| 19 |
+
"content": "<|end▁of▁sentence|>",
|
| 20 |
+
"lstrip": false,
|
| 21 |
+
"normalized": false,
|
| 22 |
+
"rstrip": false,
|
| 23 |
+
"single_word": false
|
| 24 |
+
},
|
| 25 |
+
"pad_token": {
|
| 26 |
+
"content": "<|vision_pad|>",
|
| 27 |
+
"lstrip": false,
|
| 28 |
+
"normalized": false,
|
| 29 |
+
"rstrip": false,
|
| 30 |
+
"single_word": false
|
| 31 |
+
}
|
| 32 |
+
}
|
adapter/checkpoint-204/tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e20ddafc659ba90242154b55275402edeca0715e5dbb30f56815a4ce081f4893
|
| 3 |
+
size 11422778
|
adapter/checkpoint-204/tokenizer_config.json
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
+
"add_eos_token": false,
|
| 4 |
+
"add_prefix_space": null,
|
| 5 |
+
"added_tokens_decoder": {
|
| 6 |
+
"151643": {
|
| 7 |
+
"content": "<|end▁of▁sentence|>",
|
| 8 |
+
"lstrip": false,
|
| 9 |
+
"normalized": false,
|
| 10 |
+
"rstrip": false,
|
| 11 |
+
"single_word": false,
|
| 12 |
+
"special": true
|
| 13 |
+
},
|
| 14 |
+
"151644": {
|
| 15 |
+
"content": "<|User|>",
|
| 16 |
+
"lstrip": false,
|
| 17 |
+
"normalized": false,
|
| 18 |
+
"rstrip": false,
|
| 19 |
+
"single_word": false,
|
| 20 |
+
"special": false
|
| 21 |
+
},
|
| 22 |
+
"151645": {
|
| 23 |
+
"content": "<|Assistant|>",
|
| 24 |
+
"lstrip": false,
|
| 25 |
+
"normalized": false,
|
| 26 |
+
"rstrip": false,
|
| 27 |
+
"single_word": false,
|
| 28 |
+
"special": false
|
| 29 |
+
},
|
| 30 |
+
"151646": {
|
| 31 |
+
"content": "<|begin▁of▁sentence|>",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false,
|
| 36 |
+
"special": true
|
| 37 |
+
},
|
| 38 |
+
"151647": {
|
| 39 |
+
"content": "<|EOT|>",
|
| 40 |
+
"lstrip": false,
|
| 41 |
+
"normalized": false,
|
| 42 |
+
"rstrip": false,
|
| 43 |
+
"single_word": false,
|
| 44 |
+
"special": false
|
| 45 |
+
},
|
| 46 |
+
"151648": {
|
| 47 |
+
"content": "<think>",
|
| 48 |
+
"lstrip": false,
|
| 49 |
+
"normalized": false,
|
| 50 |
+
"rstrip": false,
|
| 51 |
+
"single_word": false,
|
| 52 |
+
"special": false
|
| 53 |
+
},
|
| 54 |
+
"151649": {
|
| 55 |
+
"content": "</think>",
|
| 56 |
+
"lstrip": false,
|
| 57 |
+
"normalized": false,
|
| 58 |
+
"rstrip": false,
|
| 59 |
+
"single_word": false,
|
| 60 |
+
"special": false
|
| 61 |
+
},
|
| 62 |
+
"151650": {
|
| 63 |
+
"content": "<|quad_start|>",
|
| 64 |
+
"lstrip": false,
|
| 65 |
+
"normalized": false,
|
| 66 |
+
"rstrip": false,
|
| 67 |
+
"single_word": false,
|
| 68 |
+
"special": true
|
| 69 |
+
},
|
| 70 |
+
"151651": {
|
| 71 |
+
"content": "<|quad_end|>",
|
| 72 |
+
"lstrip": false,
|
| 73 |
+
"normalized": false,
|
| 74 |
+
"rstrip": false,
|
| 75 |
+
"single_word": false,
|
| 76 |
+
"special": true
|
| 77 |
+
},
|
| 78 |
+
"151652": {
|
| 79 |
+
"content": "<|vision_start|>",
|
| 80 |
+
"lstrip": false,
|
| 81 |
+
"normalized": false,
|
| 82 |
+
"rstrip": false,
|
| 83 |
+
"single_word": false,
|
| 84 |
+
"special": true
|
| 85 |
+
},
|
| 86 |
+
"151653": {
|
| 87 |
+
"content": "<|vision_end|>",
|
| 88 |
+
"lstrip": false,
|
| 89 |
+
"normalized": false,
|
| 90 |
+
"rstrip": false,
|
| 91 |
+
"single_word": false,
|
| 92 |
+
"special": true
|
| 93 |
+
},
|
| 94 |
+
"151654": {
|
| 95 |
+
"content": "<|vision_pad|>",
|
| 96 |
+
"lstrip": false,
|
| 97 |
+
"normalized": false,
|
| 98 |
+
"rstrip": false,
|
| 99 |
+
"single_word": false,
|
| 100 |
+
"special": true
|
| 101 |
+
},
|
| 102 |
+
"151655": {
|
| 103 |
+
"content": "<|image_pad|>",
|
| 104 |
+
"lstrip": false,
|
| 105 |
+
"normalized": false,
|
| 106 |
+
"rstrip": false,
|
| 107 |
+
"single_word": false,
|
| 108 |
+
"special": true
|
| 109 |
+
},
|
| 110 |
+
"151656": {
|
| 111 |
+
"content": "<|video_pad|>",
|
| 112 |
+
"lstrip": false,
|
| 113 |
+
"normalized": false,
|
| 114 |
+
"rstrip": false,
|
| 115 |
+
"single_word": false,
|
| 116 |
+
"special": true
|
| 117 |
+
},
|
| 118 |
+
"151657": {
|
| 119 |
+
"content": "<tool_call>",
|
| 120 |
+
"lstrip": false,
|
| 121 |
+
"normalized": false,
|
| 122 |
+
"rstrip": false,
|
| 123 |
+
"single_word": false,
|
| 124 |
+
"special": false
|
| 125 |
+
},
|
| 126 |
+
"151658": {
|
| 127 |
+
"content": "</tool_call>",
|
| 128 |
+
"lstrip": false,
|
| 129 |
+
"normalized": false,
|
| 130 |
+
"rstrip": false,
|
| 131 |
+
"single_word": false,
|
| 132 |
+
"special": false
|
| 133 |
+
},
|
| 134 |
+
"151659": {
|
| 135 |
+
"content": "<|fim_prefix|>",
|
| 136 |
+
"lstrip": false,
|
| 137 |
+
"normalized": false,
|
| 138 |
+
"rstrip": false,
|
| 139 |
+
"single_word": false,
|
| 140 |
+
"special": false
|
| 141 |
+
},
|
| 142 |
+
"151660": {
|
| 143 |
+
"content": "<|fim_middle|>",
|
| 144 |
+
"lstrip": false,
|
| 145 |
+
"normalized": false,
|
| 146 |
+
"rstrip": false,
|
| 147 |
+
"single_word": false,
|
| 148 |
+
"special": false
|
| 149 |
+
},
|
| 150 |
+
"151661": {
|
| 151 |
+
"content": "<|fim_suffix|>",
|
| 152 |
+
"lstrip": false,
|
| 153 |
+
"normalized": false,
|
| 154 |
+
"rstrip": false,
|
| 155 |
+
"single_word": false,
|
| 156 |
+
"special": false
|
| 157 |
+
},
|
| 158 |
+
"151662": {
|
| 159 |
+
"content": "<|fim_pad|>",
|
| 160 |
+
"lstrip": false,
|
| 161 |
+
"normalized": false,
|
| 162 |
+
"rstrip": false,
|
| 163 |
+
"single_word": false,
|
| 164 |
+
"special": false
|
| 165 |
+
},
|
| 166 |
+
"151663": {
|
| 167 |
+
"content": "<|repo_name|>",
|
| 168 |
+
"lstrip": false,
|
| 169 |
+
"normalized": false,
|
| 170 |
+
"rstrip": false,
|
| 171 |
+
"single_word": false,
|
| 172 |
+
"special": false
|
| 173 |
+
},
|
| 174 |
+
"151664": {
|
| 175 |
+
"content": "<|file_sep|>",
|
| 176 |
+
"lstrip": false,
|
| 177 |
+
"normalized": false,
|
| 178 |
+
"rstrip": false,
|
| 179 |
+
"single_word": false,
|
| 180 |
+
"special": false
|
| 181 |
+
}
|
| 182 |
+
},
|
| 183 |
+
"additional_special_tokens": [
|
| 184 |
+
"<|end▁of▁sentence|>"
|
| 185 |
+
],
|
| 186 |
+
"bos_token": "<|begin▁of▁sentence|>",
|
| 187 |
+
"chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '</think>' in content %}{% set content = content.split('</think>')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|><think>\\n'}}{% endif %}",
|
| 188 |
+
"clean_up_tokenization_spaces": false,
|
| 189 |
+
"eos_token": "<|end▁of▁sentence|>",
|
| 190 |
+
"extra_special_tokens": {},
|
| 191 |
+
"legacy": true,
|
| 192 |
+
"model_max_length": 6144,
|
| 193 |
+
"pad_token": "<|vision_pad|>",
|
| 194 |
+
"padding_side": "right",
|
| 195 |
+
"sp_model_kwargs": {},
|
| 196 |
+
"split_special_tokens": false,
|
| 197 |
+
"tokenizer_class": "LlamaTokenizer",
|
| 198 |
+
"unk_token": null,
|
| 199 |
+
"use_default_system_prompt": false
|
| 200 |
+
}
|
adapter/checkpoint-204/trainer_state.json
ADDED
|
@@ -0,0 +1,1461 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": null,
|
| 3 |
+
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.9987760097919217,
|
| 5 |
+
"eval_steps": 500,
|
| 6 |
+
"global_step": 204,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.004895960832313341,
|
| 13 |
+
"grad_norm": 0.03658522292971611,
|
| 14 |
+
"learning_rate": 9.090909090909091e-06,
|
| 15 |
+
"loss": 0.4365,
|
| 16 |
+
"step": 1
|
| 17 |
+
},
|
| 18 |
+
{
|
| 19 |
+
"epoch": 0.009791921664626682,
|
| 20 |
+
"grad_norm": 0.035473328083753586,
|
| 21 |
+
"learning_rate": 1.8181818181818182e-05,
|
| 22 |
+
"loss": 0.6677,
|
| 23 |
+
"step": 2
|
| 24 |
+
},
|
| 25 |
+
{
|
| 26 |
+
"epoch": 0.014687882496940025,
|
| 27 |
+
"grad_norm": 0.030256139114499092,
|
| 28 |
+
"learning_rate": 2.7272727272727273e-05,
|
| 29 |
+
"loss": 0.4084,
|
| 30 |
+
"step": 3
|
| 31 |
+
},
|
| 32 |
+
{
|
| 33 |
+
"epoch": 0.019583843329253364,
|
| 34 |
+
"grad_norm": 0.04286854714155197,
|
| 35 |
+
"learning_rate": 3.6363636363636364e-05,
|
| 36 |
+
"loss": 0.3422,
|
| 37 |
+
"step": 4
|
| 38 |
+
},
|
| 39 |
+
{
|
| 40 |
+
"epoch": 0.02447980416156671,
|
| 41 |
+
"grad_norm": 0.06271344423294067,
|
| 42 |
+
"learning_rate": 4.545454545454546e-05,
|
| 43 |
+
"loss": 0.5332,
|
| 44 |
+
"step": 5
|
| 45 |
+
},
|
| 46 |
+
{
|
| 47 |
+
"epoch": 0.02937576499388005,
|
| 48 |
+
"grad_norm": 0.09929833561182022,
|
| 49 |
+
"learning_rate": 5.4545454545454546e-05,
|
| 50 |
+
"loss": 0.7114,
|
| 51 |
+
"step": 6
|
| 52 |
+
},
|
| 53 |
+
{
|
| 54 |
+
"epoch": 0.03427172582619339,
|
| 55 |
+
"grad_norm": 0.08789193630218506,
|
| 56 |
+
"learning_rate": 6.363636363636364e-05,
|
| 57 |
+
"loss": 0.5457,
|
| 58 |
+
"step": 7
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
"epoch": 0.03916768665850673,
|
| 62 |
+
"grad_norm": 0.0518026277422905,
|
| 63 |
+
"learning_rate": 7.272727272727273e-05,
|
| 64 |
+
"loss": 0.3846,
|
| 65 |
+
"step": 8
|
| 66 |
+
},
|
| 67 |
+
{
|
| 68 |
+
"epoch": 0.044063647490820076,
|
| 69 |
+
"grad_norm": 0.042549822479486465,
|
| 70 |
+
"learning_rate": 8.181818181818183e-05,
|
| 71 |
+
"loss": 0.5076,
|
| 72 |
+
"step": 9
|
| 73 |
+
},
|
| 74 |
+
{
|
| 75 |
+
"epoch": 0.04895960832313342,
|
| 76 |
+
"grad_norm": 0.051444098353385925,
|
| 77 |
+
"learning_rate": 9.090909090909092e-05,
|
| 78 |
+
"loss": 0.4675,
|
| 79 |
+
"step": 10
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.05385556915544676,
|
| 83 |
+
"grad_norm": 0.08935613930225372,
|
| 84 |
+
"learning_rate": 0.0001,
|
| 85 |
+
"loss": 0.3412,
|
| 86 |
+
"step": 11
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
"epoch": 0.0587515299877601,
|
| 90 |
+
"grad_norm": 0.10168597102165222,
|
| 91 |
+
"learning_rate": 9.99933760728612e-05,
|
| 92 |
+
"loss": 0.4016,
|
| 93 |
+
"step": 12
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
"epoch": 0.06364749082007344,
|
| 97 |
+
"grad_norm": 0.1793641448020935,
|
| 98 |
+
"learning_rate": 9.997350604650123e-05,
|
| 99 |
+
"loss": 0.3738,
|
| 100 |
+
"step": 13
|
| 101 |
+
},
|
| 102 |
+
{
|
| 103 |
+
"epoch": 0.06854345165238677,
|
| 104 |
+
"grad_norm": 0.24378074705600739,
|
| 105 |
+
"learning_rate": 9.994039518562432e-05,
|
| 106 |
+
"loss": 0.5781,
|
| 107 |
+
"step": 14
|
| 108 |
+
},
|
| 109 |
+
{
|
| 110 |
+
"epoch": 0.07343941248470012,
|
| 111 |
+
"grad_norm": 0.08958296477794647,
|
| 112 |
+
"learning_rate": 9.989405226318772e-05,
|
| 113 |
+
"loss": 0.4275,
|
| 114 |
+
"step": 15
|
| 115 |
+
},
|
| 116 |
+
{
|
| 117 |
+
"epoch": 0.07833537331701346,
|
| 118 |
+
"grad_norm": 0.16461962461471558,
|
| 119 |
+
"learning_rate": 9.983448955807708e-05,
|
| 120 |
+
"loss": 0.4416,
|
| 121 |
+
"step": 16
|
| 122 |
+
},
|
| 123 |
+
{
|
| 124 |
+
"epoch": 0.0832313341493268,
|
| 125 |
+
"grad_norm": 0.2743546664714813,
|
| 126 |
+
"learning_rate": 9.976172285185314e-05,
|
| 127 |
+
"loss": 0.5598,
|
| 128 |
+
"step": 17
|
| 129 |
+
},
|
| 130 |
+
{
|
| 131 |
+
"epoch": 0.08812729498164015,
|
| 132 |
+
"grad_norm": 0.05548033118247986,
|
| 133 |
+
"learning_rate": 9.967577142457032e-05,
|
| 134 |
+
"loss": 0.3509,
|
| 135 |
+
"step": 18
|
| 136 |
+
},
|
| 137 |
+
{
|
| 138 |
+
"epoch": 0.09302325581395349,
|
| 139 |
+
"grad_norm": 0.10299059748649597,
|
| 140 |
+
"learning_rate": 9.957665804966829e-05,
|
| 141 |
+
"loss": 0.7287,
|
| 142 |
+
"step": 19
|
| 143 |
+
},
|
| 144 |
+
{
|
| 145 |
+
"epoch": 0.09791921664626684,
|
| 146 |
+
"grad_norm": 0.1017536073923111,
|
| 147 |
+
"learning_rate": 9.946440898793801e-05,
|
| 148 |
+
"loss": 0.638,
|
| 149 |
+
"step": 20
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.10281517747858017,
|
| 153 |
+
"grad_norm": 0.09810858964920044,
|
| 154 |
+
"learning_rate": 9.933905398056372e-05,
|
| 155 |
+
"loss": 0.4959,
|
| 156 |
+
"step": 21
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
"epoch": 0.10771113831089352,
|
| 160 |
+
"grad_norm": 0.07071840763092041,
|
| 161 |
+
"learning_rate": 9.920062624124282e-05,
|
| 162 |
+
"loss": 0.5835,
|
| 163 |
+
"step": 22
|
| 164 |
+
},
|
| 165 |
+
{
|
| 166 |
+
"epoch": 0.11260709914320685,
|
| 167 |
+
"grad_norm": 0.23103618621826172,
|
| 168 |
+
"learning_rate": 9.904916244738571e-05,
|
| 169 |
+
"loss": 0.7785,
|
| 170 |
+
"step": 23
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
"epoch": 0.1175030599755202,
|
| 174 |
+
"grad_norm": 0.11760212481021881,
|
| 175 |
+
"learning_rate": 9.888470273039775e-05,
|
| 176 |
+
"loss": 0.3422,
|
| 177 |
+
"step": 24
|
| 178 |
+
},
|
| 179 |
+
{
|
| 180 |
+
"epoch": 0.12239902080783353,
|
| 181 |
+
"grad_norm": 0.07370053976774216,
|
| 182 |
+
"learning_rate": 9.870729066504629e-05,
|
| 183 |
+
"loss": 0.3107,
|
| 184 |
+
"step": 25
|
| 185 |
+
},
|
| 186 |
+
{
|
| 187 |
+
"epoch": 0.12729498164014688,
|
| 188 |
+
"grad_norm": 0.07285131514072418,
|
| 189 |
+
"learning_rate": 9.851697325791505e-05,
|
| 190 |
+
"loss": 0.4473,
|
| 191 |
+
"step": 26
|
| 192 |
+
},
|
| 193 |
+
{
|
| 194 |
+
"epoch": 0.13219094247246022,
|
| 195 |
+
"grad_norm": 0.05355285108089447,
|
| 196 |
+
"learning_rate": 9.831380093494957e-05,
|
| 197 |
+
"loss": 0.3368,
|
| 198 |
+
"step": 27
|
| 199 |
+
},
|
| 200 |
+
{
|
| 201 |
+
"epoch": 0.13708690330477355,
|
| 202 |
+
"grad_norm": 0.07507691532373428,
|
| 203 |
+
"learning_rate": 9.809782752809644e-05,
|
| 204 |
+
"loss": 0.3709,
|
| 205 |
+
"step": 28
|
| 206 |
+
},
|
| 207 |
+
{
|
| 208 |
+
"epoch": 0.1419828641370869,
|
| 209 |
+
"grad_norm": 0.055260878056287766,
|
| 210 |
+
"learning_rate": 9.786911026104007e-05,
|
| 211 |
+
"loss": 0.4986,
|
| 212 |
+
"step": 29
|
| 213 |
+
},
|
| 214 |
+
{
|
| 215 |
+
"epoch": 0.14687882496940025,
|
| 216 |
+
"grad_norm": 0.16646364331245422,
|
| 217 |
+
"learning_rate": 9.762770973404094e-05,
|
| 218 |
+
"loss": 0.6252,
|
| 219 |
+
"step": 30
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 0.15177478580171358,
|
| 223 |
+
"grad_norm": 0.22445577383041382,
|
| 224 |
+
"learning_rate": 9.737368990787916e-05,
|
| 225 |
+
"loss": 0.4772,
|
| 226 |
+
"step": 31
|
| 227 |
+
},
|
| 228 |
+
{
|
| 229 |
+
"epoch": 0.15667074663402691,
|
| 230 |
+
"grad_norm": 0.22849321365356445,
|
| 231 |
+
"learning_rate": 9.710711808690754e-05,
|
| 232 |
+
"loss": 0.381,
|
| 233 |
+
"step": 32
|
| 234 |
+
},
|
| 235 |
+
{
|
| 236 |
+
"epoch": 0.16156670746634028,
|
| 237 |
+
"grad_norm": 0.05855480954051018,
|
| 238 |
+
"learning_rate": 9.682806490121885e-05,
|
| 239 |
+
"loss": 0.3429,
|
| 240 |
+
"step": 33
|
| 241 |
+
},
|
| 242 |
+
{
|
| 243 |
+
"epoch": 0.1664626682986536,
|
| 244 |
+
"grad_norm": 0.06263675540685654,
|
| 245 |
+
"learning_rate": 9.653660428793188e-05,
|
| 246 |
+
"loss": 0.3927,
|
| 247 |
+
"step": 34
|
| 248 |
+
},
|
| 249 |
+
{
|
| 250 |
+
"epoch": 0.17135862913096694,
|
| 251 |
+
"grad_norm": 0.277313232421875,
|
| 252 |
+
"learning_rate": 9.623281347160127e-05,
|
| 253 |
+
"loss": 0.9417,
|
| 254 |
+
"step": 35
|
| 255 |
+
},
|
| 256 |
+
{
|
| 257 |
+
"epoch": 0.1762545899632803,
|
| 258 |
+
"grad_norm": 0.07143048942089081,
|
| 259 |
+
"learning_rate": 9.591677294375636e-05,
|
| 260 |
+
"loss": 0.4428,
|
| 261 |
+
"step": 36
|
| 262 |
+
},
|
| 263 |
+
{
|
| 264 |
+
"epoch": 0.18115055079559364,
|
| 265 |
+
"grad_norm": 0.07706379145383835,
|
| 266 |
+
"learning_rate": 9.558856644157432e-05,
|
| 267 |
+
"loss": 0.3614,
|
| 268 |
+
"step": 37
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"epoch": 0.18604651162790697,
|
| 272 |
+
"grad_norm": 0.25005754828453064,
|
| 273 |
+
"learning_rate": 9.52482809256934e-05,
|
| 274 |
+
"loss": 0.6474,
|
| 275 |
+
"step": 38
|
| 276 |
+
},
|
| 277 |
+
{
|
| 278 |
+
"epoch": 0.1909424724602203,
|
| 279 |
+
"grad_norm": 0.06337836384773254,
|
| 280 |
+
"learning_rate": 9.489600655717217e-05,
|
| 281 |
+
"loss": 0.4504,
|
| 282 |
+
"step": 39
|
| 283 |
+
},
|
| 284 |
+
{
|
| 285 |
+
"epoch": 0.19583843329253367,
|
| 286 |
+
"grad_norm": 0.21187523007392883,
|
| 287 |
+
"learning_rate": 9.453183667360062e-05,
|
| 288 |
+
"loss": 0.6336,
|
| 289 |
+
"step": 40
|
| 290 |
+
},
|
| 291 |
+
{
|
| 292 |
+
"epoch": 0.200734394124847,
|
| 293 |
+
"grad_norm": 0.1760697066783905,
|
| 294 |
+
"learning_rate": 9.415586776436973e-05,
|
| 295 |
+
"loss": 0.5665,
|
| 296 |
+
"step": 41
|
| 297 |
+
},
|
| 298 |
+
{
|
| 299 |
+
"epoch": 0.20563035495716034,
|
| 300 |
+
"grad_norm": 0.10999207943677902,
|
| 301 |
+
"learning_rate": 9.376819944510598e-05,
|
| 302 |
+
"loss": 0.3929,
|
| 303 |
+
"step": 42
|
| 304 |
+
},
|
| 305 |
+
{
|
| 306 |
+
"epoch": 0.21052631578947367,
|
| 307 |
+
"grad_norm": 0.09944123774766922,
|
| 308 |
+
"learning_rate": 9.336893443127738e-05,
|
| 309 |
+
"loss": 0.4195,
|
| 310 |
+
"step": 43
|
| 311 |
+
},
|
| 312 |
+
{
|
| 313 |
+
"epoch": 0.21542227662178703,
|
| 314 |
+
"grad_norm": 0.1161671131849289,
|
| 315 |
+
"learning_rate": 9.295817851097837e-05,
|
| 316 |
+
"loss": 0.4643,
|
| 317 |
+
"step": 44
|
| 318 |
+
},
|
| 319 |
+
{
|
| 320 |
+
"epoch": 0.22031823745410037,
|
| 321 |
+
"grad_norm": 0.11634702235460281,
|
| 322 |
+
"learning_rate": 9.253604051690046e-05,
|
| 323 |
+
"loss": 0.5375,
|
| 324 |
+
"step": 45
|
| 325 |
+
},
|
| 326 |
+
{
|
| 327 |
+
"epoch": 0.2252141982864137,
|
| 328 |
+
"grad_norm": 0.07312487810850143,
|
| 329 |
+
"learning_rate": 9.210263229749626e-05,
|
| 330 |
+
"loss": 0.3291,
|
| 331 |
+
"step": 46
|
| 332 |
+
},
|
| 333 |
+
{
|
| 334 |
+
"epoch": 0.23011015911872704,
|
| 335 |
+
"grad_norm": 0.08545304834842682,
|
| 336 |
+
"learning_rate": 9.165806868734444e-05,
|
| 337 |
+
"loss": 0.5543,
|
| 338 |
+
"step": 47
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"epoch": 0.2350061199510404,
|
| 342 |
+
"grad_norm": 0.15719719231128693,
|
| 343 |
+
"learning_rate": 9.120246747672347e-05,
|
| 344 |
+
"loss": 0.7045,
|
| 345 |
+
"step": 48
|
| 346 |
+
},
|
| 347 |
+
{
|
| 348 |
+
"epoch": 0.23990208078335373,
|
| 349 |
+
"grad_norm": 0.11078042536973953,
|
| 350 |
+
"learning_rate": 9.073594938040231e-05,
|
| 351 |
+
"loss": 0.572,
|
| 352 |
+
"step": 49
|
| 353 |
+
},
|
| 354 |
+
{
|
| 355 |
+
"epoch": 0.24479804161566707,
|
| 356 |
+
"grad_norm": 0.1860225647687912,
|
| 357 |
+
"learning_rate": 9.025863800565613e-05,
|
| 358 |
+
"loss": 0.7102,
|
| 359 |
+
"step": 50
|
| 360 |
+
},
|
| 361 |
+
{
|
| 362 |
+
"epoch": 0.24969400244798043,
|
| 363 |
+
"grad_norm": 0.11442095041275024,
|
| 364 |
+
"learning_rate": 8.977065981951566e-05,
|
| 365 |
+
"loss": 0.5135,
|
| 366 |
+
"step": 51
|
| 367 |
+
},
|
| 368 |
+
{
|
| 369 |
+
"epoch": 0.25458996328029376,
|
| 370 |
+
"grad_norm": 0.17788943648338318,
|
| 371 |
+
"learning_rate": 8.927214411525895e-05,
|
| 372 |
+
"loss": 0.4911,
|
| 373 |
+
"step": 52
|
| 374 |
+
},
|
| 375 |
+
{
|
| 376 |
+
"epoch": 0.2594859241126071,
|
| 377 |
+
"grad_norm": 0.24241389334201813,
|
| 378 |
+
"learning_rate": 8.876322297815405e-05,
|
| 379 |
+
"loss": 0.6657,
|
| 380 |
+
"step": 53
|
| 381 |
+
},
|
| 382 |
+
{
|
| 383 |
+
"epoch": 0.26438188494492043,
|
| 384 |
+
"grad_norm": 0.07881529629230499,
|
| 385 |
+
"learning_rate": 8.824403125046225e-05,
|
| 386 |
+
"loss": 0.4495,
|
| 387 |
+
"step": 54
|
| 388 |
+
},
|
| 389 |
+
{
|
| 390 |
+
"epoch": 0.2692778457772338,
|
| 391 |
+
"grad_norm": 0.12150023877620697,
|
| 392 |
+
"learning_rate": 8.771470649571056e-05,
|
| 393 |
+
"loss": 0.3651,
|
| 394 |
+
"step": 55
|
| 395 |
+
},
|
| 396 |
+
{
|
| 397 |
+
"epoch": 0.2741738066095471,
|
| 398 |
+
"grad_norm": 0.07412393391132355,
|
| 399 |
+
"learning_rate": 8.717538896224332e-05,
|
| 400 |
+
"loss": 0.3277,
|
| 401 |
+
"step": 56
|
| 402 |
+
},
|
| 403 |
+
{
|
| 404 |
+
"epoch": 0.27906976744186046,
|
| 405 |
+
"grad_norm": 0.06195759400725365,
|
| 406 |
+
"learning_rate": 8.662622154606237e-05,
|
| 407 |
+
"loss": 0.361,
|
| 408 |
+
"step": 57
|
| 409 |
+
},
|
| 410 |
+
{
|
| 411 |
+
"epoch": 0.2839657282741738,
|
| 412 |
+
"grad_norm": 0.06260855495929718,
|
| 413 |
+
"learning_rate": 8.606734975296578e-05,
|
| 414 |
+
"loss": 0.4541,
|
| 415 |
+
"step": 58
|
| 416 |
+
},
|
| 417 |
+
{
|
| 418 |
+
"epoch": 0.28886168910648713,
|
| 419 |
+
"grad_norm": 0.22213032841682434,
|
| 420 |
+
"learning_rate": 8.549892165999505e-05,
|
| 421 |
+
"loss": 0.4133,
|
| 422 |
+
"step": 59
|
| 423 |
+
},
|
| 424 |
+
{
|
| 425 |
+
"epoch": 0.2937576499388005,
|
| 426 |
+
"grad_norm": 0.08584744483232498,
|
| 427 |
+
"learning_rate": 8.492108787620105e-05,
|
| 428 |
+
"loss": 0.3445,
|
| 429 |
+
"step": 60
|
| 430 |
+
},
|
| 431 |
+
{
|
| 432 |
+
"epoch": 0.29865361077111385,
|
| 433 |
+
"grad_norm": 0.07675541192293167,
|
| 434 |
+
"learning_rate": 8.433400150273906e-05,
|
| 435 |
+
"loss": 0.4518,
|
| 436 |
+
"step": 61
|
| 437 |
+
},
|
| 438 |
+
{
|
| 439 |
+
"epoch": 0.30354957160342716,
|
| 440 |
+
"grad_norm": 0.07371577620506287,
|
| 441 |
+
"learning_rate": 8.373781809230355e-05,
|
| 442 |
+
"loss": 0.308,
|
| 443 |
+
"step": 62
|
| 444 |
+
},
|
| 445 |
+
{
|
| 446 |
+
"epoch": 0.3084455324357405,
|
| 447 |
+
"grad_norm": 0.10943123698234558,
|
| 448 |
+
"learning_rate": 8.313269560791342e-05,
|
| 449 |
+
"loss": 0.7011,
|
| 450 |
+
"step": 63
|
| 451 |
+
},
|
| 452 |
+
{
|
| 453 |
+
"epoch": 0.31334149326805383,
|
| 454 |
+
"grad_norm": 0.07427150011062622,
|
| 455 |
+
"learning_rate": 8.251879438105854e-05,
|
| 456 |
+
"loss": 0.3193,
|
| 457 |
+
"step": 64
|
| 458 |
+
},
|
| 459 |
+
{
|
| 460 |
+
"epoch": 0.3182374541003672,
|
| 461 |
+
"grad_norm": 0.06212422996759415,
|
| 462 |
+
"learning_rate": 8.189627706921877e-05,
|
| 463 |
+
"loss": 0.3383,
|
| 464 |
+
"step": 65
|
| 465 |
+
},
|
| 466 |
+
{
|
| 467 |
+
"epoch": 0.32313341493268055,
|
| 468 |
+
"grad_norm": 0.1618916094303131,
|
| 469 |
+
"learning_rate": 8.126530861276677e-05,
|
| 470 |
+
"loss": 0.5653,
|
| 471 |
+
"step": 66
|
| 472 |
+
},
|
| 473 |
+
{
|
| 474 |
+
"epoch": 0.32802937576499386,
|
| 475 |
+
"grad_norm": 0.06333769857883453,
|
| 476 |
+
"learning_rate": 8.062605619126584e-05,
|
| 477 |
+
"loss": 0.3395,
|
| 478 |
+
"step": 67
|
| 479 |
+
},
|
| 480 |
+
{
|
| 481 |
+
"epoch": 0.3329253365973072,
|
| 482 |
+
"grad_norm": 0.16169676184654236,
|
| 483 |
+
"learning_rate": 7.997868917917453e-05,
|
| 484 |
+
"loss": 0.7753,
|
| 485 |
+
"step": 68
|
| 486 |
+
},
|
| 487 |
+
{
|
| 488 |
+
"epoch": 0.3378212974296206,
|
| 489 |
+
"grad_norm": 0.06635843217372894,
|
| 490 |
+
"learning_rate": 7.932337910096961e-05,
|
| 491 |
+
"loss": 0.3176,
|
| 492 |
+
"step": 69
|
| 493 |
+
},
|
| 494 |
+
{
|
| 495 |
+
"epoch": 0.3427172582619339,
|
| 496 |
+
"grad_norm": 0.16557282209396362,
|
| 497 |
+
"learning_rate": 7.866029958569956e-05,
|
| 498 |
+
"loss": 0.4603,
|
| 499 |
+
"step": 70
|
| 500 |
+
},
|
| 501 |
+
{
|
| 502 |
+
"epoch": 0.34761321909424725,
|
| 503 |
+
"grad_norm": 0.08522031456232071,
|
| 504 |
+
"learning_rate": 7.798962632098024e-05,
|
| 505 |
+
"loss": 0.5636,
|
| 506 |
+
"step": 71
|
| 507 |
+
},
|
| 508 |
+
{
|
| 509 |
+
"epoch": 0.3525091799265606,
|
| 510 |
+
"grad_norm": 0.05583783611655235,
|
| 511 |
+
"learning_rate": 7.73115370064452e-05,
|
| 512 |
+
"loss": 0.2692,
|
| 513 |
+
"step": 72
|
| 514 |
+
},
|
| 515 |
+
{
|
| 516 |
+
"epoch": 0.3574051407588739,
|
| 517 |
+
"grad_norm": 0.08181650191545486,
|
| 518 |
+
"learning_rate": 7.6626211306663e-05,
|
| 519 |
+
"loss": 0.6908,
|
| 520 |
+
"step": 73
|
| 521 |
+
},
|
| 522 |
+
{
|
| 523 |
+
"epoch": 0.3623011015911873,
|
| 524 |
+
"grad_norm": 0.07728707045316696,
|
| 525 |
+
"learning_rate": 7.59338308035337e-05,
|
| 526 |
+
"loss": 0.4046,
|
| 527 |
+
"step": 74
|
| 528 |
+
},
|
| 529 |
+
{
|
| 530 |
+
"epoch": 0.3671970624235006,
|
| 531 |
+
"grad_norm": 0.09214276075363159,
|
| 532 |
+
"learning_rate": 7.523457894817745e-05,
|
| 533 |
+
"loss": 0.816,
|
| 534 |
+
"step": 75
|
| 535 |
+
},
|
| 536 |
+
{
|
| 537 |
+
"epoch": 0.37209302325581395,
|
| 538 |
+
"grad_norm": 0.07536876946687698,
|
| 539 |
+
"learning_rate": 7.452864101232798e-05,
|
| 540 |
+
"loss": 0.334,
|
| 541 |
+
"step": 76
|
| 542 |
+
},
|
| 543 |
+
{
|
| 544 |
+
"epoch": 0.3769889840881273,
|
| 545 |
+
"grad_norm": 0.09890823811292648,
|
| 546 |
+
"learning_rate": 7.381620403924333e-05,
|
| 547 |
+
"loss": 0.4769,
|
| 548 |
+
"step": 77
|
| 549 |
+
},
|
| 550 |
+
{
|
| 551 |
+
"epoch": 0.3818849449204406,
|
| 552 |
+
"grad_norm": 0.09237520396709442,
|
| 553 |
+
"learning_rate": 7.30974567941475e-05,
|
| 554 |
+
"loss": 0.4149,
|
| 555 |
+
"step": 78
|
| 556 |
+
},
|
| 557 |
+
{
|
| 558 |
+
"epoch": 0.386780905752754,
|
| 559 |
+
"grad_norm": 0.14629222452640533,
|
| 560 |
+
"learning_rate": 7.237258971421587e-05,
|
| 561 |
+
"loss": 0.9113,
|
| 562 |
+
"step": 79
|
| 563 |
+
},
|
| 564 |
+
{
|
| 565 |
+
"epoch": 0.39167686658506734,
|
| 566 |
+
"grad_norm": 0.0960475504398346,
|
| 567 |
+
"learning_rate": 7.164179485811727e-05,
|
| 568 |
+
"loss": 0.4643,
|
| 569 |
+
"step": 80
|
| 570 |
+
},
|
| 571 |
+
{
|
| 572 |
+
"epoch": 0.39657282741738065,
|
| 573 |
+
"grad_norm": 0.07956342399120331,
|
| 574 |
+
"learning_rate": 7.090526585512696e-05,
|
| 575 |
+
"loss": 0.3902,
|
| 576 |
+
"step": 81
|
| 577 |
+
},
|
| 578 |
+
{
|
| 579 |
+
"epoch": 0.401468788249694,
|
| 580 |
+
"grad_norm": 0.08580750226974487,
|
| 581 |
+
"learning_rate": 7.016319785382296e-05,
|
| 582 |
+
"loss": 0.5235,
|
| 583 |
+
"step": 82
|
| 584 |
+
},
|
| 585 |
+
{
|
| 586 |
+
"epoch": 0.40636474908200737,
|
| 587 |
+
"grad_norm": 0.12661615014076233,
|
| 588 |
+
"learning_rate": 6.941578747038023e-05,
|
| 589 |
+
"loss": 0.4481,
|
| 590 |
+
"step": 83
|
| 591 |
+
},
|
| 592 |
+
{
|
| 593 |
+
"epoch": 0.4112607099143207,
|
| 594 |
+
"grad_norm": 0.0649070218205452,
|
| 595 |
+
"learning_rate": 6.866323273647563e-05,
|
| 596 |
+
"loss": 0.4189,
|
| 597 |
+
"step": 84
|
| 598 |
+
},
|
| 599 |
+
{
|
| 600 |
+
"epoch": 0.41615667074663404,
|
| 601 |
+
"grad_norm": 0.1020541861653328,
|
| 602 |
+
"learning_rate": 6.79057330468182e-05,
|
| 603 |
+
"loss": 0.4533,
|
| 604 |
+
"step": 85
|
| 605 |
+
},
|
| 606 |
+
{
|
| 607 |
+
"epoch": 0.42105263157894735,
|
| 608 |
+
"grad_norm": 0.10127013921737671,
|
| 609 |
+
"learning_rate": 6.7143489106318e-05,
|
| 610 |
+
"loss": 0.522,
|
| 611 |
+
"step": 86
|
| 612 |
+
},
|
| 613 |
+
{
|
| 614 |
+
"epoch": 0.4259485924112607,
|
| 615 |
+
"grad_norm": 0.14281871914863586,
|
| 616 |
+
"learning_rate": 6.637670287690799e-05,
|
| 617 |
+
"loss": 0.4772,
|
| 618 |
+
"step": 87
|
| 619 |
+
},
|
| 620 |
+
{
|
| 621 |
+
"epoch": 0.43084455324357407,
|
| 622 |
+
"grad_norm": 0.065117247402668,
|
| 623 |
+
"learning_rate": 6.560557752403277e-05,
|
| 624 |
+
"loss": 0.5043,
|
| 625 |
+
"step": 88
|
| 626 |
+
},
|
| 627 |
+
{
|
| 628 |
+
"epoch": 0.4357405140758874,
|
| 629 |
+
"grad_norm": 0.12292537093162537,
|
| 630 |
+
"learning_rate": 6.483031736281843e-05,
|
| 631 |
+
"loss": 0.4375,
|
| 632 |
+
"step": 89
|
| 633 |
+
},
|
| 634 |
+
{
|
| 635 |
+
"epoch": 0.44063647490820074,
|
| 636 |
+
"grad_norm": 0.06215154007077217,
|
| 637 |
+
"learning_rate": 6.40511278039378e-05,
|
| 638 |
+
"loss": 0.3519,
|
| 639 |
+
"step": 90
|
| 640 |
+
},
|
| 641 |
+
{
|
| 642 |
+
"epoch": 0.4455324357405141,
|
| 643 |
+
"grad_norm": 0.070677250623703,
|
| 644 |
+
"learning_rate": 6.326821529918553e-05,
|
| 645 |
+
"loss": 0.3407,
|
| 646 |
+
"step": 91
|
| 647 |
+
},
|
| 648 |
+
{
|
| 649 |
+
"epoch": 0.4504283965728274,
|
| 650 |
+
"grad_norm": 0.06239693611860275,
|
| 651 |
+
"learning_rate": 6.248178728677711e-05,
|
| 652 |
+
"loss": 0.3799,
|
| 653 |
+
"step": 92
|
| 654 |
+
},
|
| 655 |
+
{
|
| 656 |
+
"epoch": 0.45532435740514077,
|
| 657 |
+
"grad_norm": 0.07950470596551895,
|
| 658 |
+
"learning_rate": 6.16920521363867e-05,
|
| 659 |
+
"loss": 0.3063,
|
| 660 |
+
"step": 93
|
| 661 |
+
},
|
| 662 |
+
{
|
| 663 |
+
"epoch": 0.4602203182374541,
|
| 664 |
+
"grad_norm": 0.06621692329645157,
|
| 665 |
+
"learning_rate": 6.089921909393812e-05,
|
| 666 |
+
"loss": 0.4386,
|
| 667 |
+
"step": 94
|
| 668 |
+
},
|
| 669 |
+
{
|
| 670 |
+
"epoch": 0.46511627906976744,
|
| 671 |
+
"grad_norm": 0.3681151270866394,
|
| 672 |
+
"learning_rate": 6.0103498226163603e-05,
|
| 673 |
+
"loss": 0.5919,
|
| 674 |
+
"step": 95
|
| 675 |
+
},
|
| 676 |
+
{
|
| 677 |
+
"epoch": 0.4700122399020808,
|
| 678 |
+
"grad_norm": 0.20958742499351501,
|
| 679 |
+
"learning_rate": 5.93051003649452e-05,
|
| 680 |
+
"loss": 0.4636,
|
| 681 |
+
"step": 96
|
| 682 |
+
},
|
| 683 |
+
{
|
| 684 |
+
"epoch": 0.4749082007343941,
|
| 685 |
+
"grad_norm": 0.09456545114517212,
|
| 686 |
+
"learning_rate": 5.850423705145334e-05,
|
| 687 |
+
"loss": 0.4564,
|
| 688 |
+
"step": 97
|
| 689 |
+
},
|
| 690 |
+
{
|
| 691 |
+
"epoch": 0.47980416156670747,
|
| 692 |
+
"grad_norm": 0.06202203407883644,
|
| 693 |
+
"learning_rate": 5.770112048009747e-05,
|
| 694 |
+
"loss": 0.3652,
|
| 695 |
+
"step": 98
|
| 696 |
+
},
|
| 697 |
+
{
|
| 698 |
+
"epoch": 0.4847001223990208,
|
| 699 |
+
"grad_norm": 0.11682210117578506,
|
| 700 |
+
"learning_rate": 5.68959634423037e-05,
|
| 701 |
+
"loss": 0.5179,
|
| 702 |
+
"step": 99
|
| 703 |
+
},
|
| 704 |
+
{
|
| 705 |
+
"epoch": 0.48959608323133413,
|
| 706 |
+
"grad_norm": 0.11552639305591583,
|
| 707 |
+
"learning_rate": 5.60889792701342e-05,
|
| 708 |
+
"loss": 0.467,
|
| 709 |
+
"step": 100
|
| 710 |
+
},
|
| 711 |
+
{
|
| 712 |
+
"epoch": 0.4944920440636475,
|
| 713 |
+
"grad_norm": 0.053526621311903,
|
| 714 |
+
"learning_rate": 5.52803817797633e-05,
|
| 715 |
+
"loss": 0.2341,
|
| 716 |
+
"step": 101
|
| 717 |
+
},
|
| 718 |
+
{
|
| 719 |
+
"epoch": 0.49938800489596086,
|
| 720 |
+
"grad_norm": 0.11124971508979797,
|
| 721 |
+
"learning_rate": 5.4470385214825416e-05,
|
| 722 |
+
"loss": 0.5641,
|
| 723 |
+
"step": 102
|
| 724 |
+
},
|
| 725 |
+
{
|
| 726 |
+
"epoch": 0.5042839657282742,
|
| 727 |
+
"grad_norm": 0.1113283783197403,
|
| 728 |
+
"learning_rate": 5.365920418964973e-05,
|
| 729 |
+
"loss": 0.438,
|
| 730 |
+
"step": 103
|
| 731 |
+
},
|
| 732 |
+
{
|
| 733 |
+
"epoch": 0.5091799265605875,
|
| 734 |
+
"grad_norm": 0.0814870297908783,
|
| 735 |
+
"learning_rate": 5.28470536323965e-05,
|
| 736 |
+
"loss": 0.4562,
|
| 737 |
+
"step": 104
|
| 738 |
+
},
|
| 739 |
+
{
|
| 740 |
+
"epoch": 0.5140758873929009,
|
| 741 |
+
"grad_norm": 0.09781132638454437,
|
| 742 |
+
"learning_rate": 5.2034148728110424e-05,
|
| 743 |
+
"loss": 0.5587,
|
| 744 |
+
"step": 105
|
| 745 |
+
},
|
| 746 |
+
{
|
| 747 |
+
"epoch": 0.5189718482252142,
|
| 748 |
+
"grad_norm": 0.11908842623233795,
|
| 749 |
+
"learning_rate": 5.1220704861705774e-05,
|
| 750 |
+
"loss": 0.7554,
|
| 751 |
+
"step": 106
|
| 752 |
+
},
|
| 753 |
+
{
|
| 754 |
+
"epoch": 0.5238678090575275,
|
| 755 |
+
"grad_norm": 0.06143520399928093,
|
| 756 |
+
"learning_rate": 5.0406937560898646e-05,
|
| 757 |
+
"loss": 0.2544,
|
| 758 |
+
"step": 107
|
| 759 |
+
},
|
| 760 |
+
{
|
| 761 |
+
"epoch": 0.5287637698898409,
|
| 762 |
+
"grad_norm": 0.08653257042169571,
|
| 763 |
+
"learning_rate": 4.9593062439101365e-05,
|
| 764 |
+
"loss": 0.5423,
|
| 765 |
+
"step": 108
|
| 766 |
+
},
|
| 767 |
+
{
|
| 768 |
+
"epoch": 0.5336597307221542,
|
| 769 |
+
"grad_norm": 0.07373999804258347,
|
| 770 |
+
"learning_rate": 4.877929513829424e-05,
|
| 771 |
+
"loss": 0.3577,
|
| 772 |
+
"step": 109
|
| 773 |
+
},
|
| 774 |
+
{
|
| 775 |
+
"epoch": 0.5385556915544676,
|
| 776 |
+
"grad_norm": 0.2530810236930847,
|
| 777 |
+
"learning_rate": 4.796585127188958e-05,
|
| 778 |
+
"loss": 0.454,
|
| 779 |
+
"step": 110
|
| 780 |
+
},
|
| 781 |
+
{
|
| 782 |
+
"epoch": 0.543451652386781,
|
| 783 |
+
"grad_norm": 0.0999133363366127,
|
| 784 |
+
"learning_rate": 4.715294636760352e-05,
|
| 785 |
+
"loss": 0.4601,
|
| 786 |
+
"step": 111
|
| 787 |
+
},
|
| 788 |
+
{
|
| 789 |
+
"epoch": 0.5483476132190942,
|
| 790 |
+
"grad_norm": 0.07089534401893616,
|
| 791 |
+
"learning_rate": 4.634079581035029e-05,
|
| 792 |
+
"loss": 0.2958,
|
| 793 |
+
"step": 112
|
| 794 |
+
},
|
| 795 |
+
{
|
| 796 |
+
"epoch": 0.5532435740514076,
|
| 797 |
+
"grad_norm": 0.15452376008033752,
|
| 798 |
+
"learning_rate": 4.55296147851746e-05,
|
| 799 |
+
"loss": 0.5048,
|
| 800 |
+
"step": 113
|
| 801 |
+
},
|
| 802 |
+
{
|
| 803 |
+
"epoch": 0.5581395348837209,
|
| 804 |
+
"grad_norm": 0.12181607633829117,
|
| 805 |
+
"learning_rate": 4.471961822023671e-05,
|
| 806 |
+
"loss": 0.5176,
|
| 807 |
+
"step": 114
|
| 808 |
+
},
|
| 809 |
+
{
|
| 810 |
+
"epoch": 0.5630354957160343,
|
| 811 |
+
"grad_norm": 0.07680846750736237,
|
| 812 |
+
"learning_rate": 4.391102072986581e-05,
|
| 813 |
+
"loss": 0.2968,
|
| 814 |
+
"step": 115
|
| 815 |
+
},
|
| 816 |
+
{
|
| 817 |
+
"epoch": 0.5679314565483476,
|
| 818 |
+
"grad_norm": 0.071733757853508,
|
| 819 |
+
"learning_rate": 4.3104036557696295e-05,
|
| 820 |
+
"loss": 0.6076,
|
| 821 |
+
"step": 116
|
| 822 |
+
},
|
| 823 |
+
{
|
| 824 |
+
"epoch": 0.572827417380661,
|
| 825 |
+
"grad_norm": 0.07903078943490982,
|
| 826 |
+
"learning_rate": 4.229887951990255e-05,
|
| 827 |
+
"loss": 0.5459,
|
| 828 |
+
"step": 117
|
| 829 |
+
},
|
| 830 |
+
{
|
| 831 |
+
"epoch": 0.5777233782129743,
|
| 832 |
+
"grad_norm": 0.09677067399024963,
|
| 833 |
+
"learning_rate": 4.149576294854668e-05,
|
| 834 |
+
"loss": 0.44,
|
| 835 |
+
"step": 118
|
| 836 |
+
},
|
| 837 |
+
{
|
| 838 |
+
"epoch": 0.5826193390452876,
|
| 839 |
+
"grad_norm": 0.09369610249996185,
|
| 840 |
+
"learning_rate": 4.069489963505482e-05,
|
| 841 |
+
"loss": 0.6845,
|
| 842 |
+
"step": 119
|
| 843 |
+
},
|
| 844 |
+
{
|
| 845 |
+
"epoch": 0.587515299877601,
|
| 846 |
+
"grad_norm": 0.06507845222949982,
|
| 847 |
+
"learning_rate": 3.98965017738364e-05,
|
| 848 |
+
"loss": 0.3054,
|
| 849 |
+
"step": 120
|
| 850 |
+
},
|
| 851 |
+
{
|
| 852 |
+
"epoch": 0.5924112607099143,
|
| 853 |
+
"grad_norm": 0.1210411936044693,
|
| 854 |
+
"learning_rate": 3.9100780906061896e-05,
|
| 855 |
+
"loss": 0.4176,
|
| 856 |
+
"step": 121
|
| 857 |
+
},
|
| 858 |
+
{
|
| 859 |
+
"epoch": 0.5973072215422277,
|
| 860 |
+
"grad_norm": 0.07214700430631638,
|
| 861 |
+
"learning_rate": 3.83079478636133e-05,
|
| 862 |
+
"loss": 0.4256,
|
| 863 |
+
"step": 122
|
| 864 |
+
},
|
| 865 |
+
{
|
| 866 |
+
"epoch": 0.602203182374541,
|
| 867 |
+
"grad_norm": 0.07553218305110931,
|
| 868 |
+
"learning_rate": 3.7518212713222906e-05,
|
| 869 |
+
"loss": 0.5853,
|
| 870 |
+
"step": 123
|
| 871 |
+
},
|
| 872 |
+
{
|
| 873 |
+
"epoch": 0.6070991432068543,
|
| 874 |
+
"grad_norm": 0.08783773332834244,
|
| 875 |
+
"learning_rate": 3.673178470081448e-05,
|
| 876 |
+
"loss": 0.5636,
|
| 877 |
+
"step": 124
|
| 878 |
+
},
|
| 879 |
+
{
|
| 880 |
+
"epoch": 0.6119951040391677,
|
| 881 |
+
"grad_norm": 0.07783231139183044,
|
| 882 |
+
"learning_rate": 3.594887219606221e-05,
|
| 883 |
+
"loss": 0.6704,
|
| 884 |
+
"step": 125
|
| 885 |
+
},
|
| 886 |
+
{
|
| 887 |
+
"epoch": 0.616891064871481,
|
| 888 |
+
"grad_norm": 0.14530447125434875,
|
| 889 |
+
"learning_rate": 3.516968263718159e-05,
|
| 890 |
+
"loss": 0.4881,
|
| 891 |
+
"step": 126
|
| 892 |
+
},
|
| 893 |
+
{
|
| 894 |
+
"epoch": 0.6217870257037944,
|
| 895 |
+
"grad_norm": 0.23401491343975067,
|
| 896 |
+
"learning_rate": 3.439442247596724e-05,
|
| 897 |
+
"loss": 0.4918,
|
| 898 |
+
"step": 127
|
| 899 |
+
},
|
| 900 |
+
{
|
| 901 |
+
"epoch": 0.6266829865361077,
|
| 902 |
+
"grad_norm": 0.07801464200019836,
|
| 903 |
+
"learning_rate": 3.3623297123092006e-05,
|
| 904 |
+
"loss": 0.5203,
|
| 905 |
+
"step": 128
|
| 906 |
+
},
|
| 907 |
+
{
|
| 908 |
+
"epoch": 0.631578947368421,
|
| 909 |
+
"grad_norm": 0.1613989621400833,
|
| 910 |
+
"learning_rate": 3.285651089368202e-05,
|
| 911 |
+
"loss": 0.7026,
|
| 912 |
+
"step": 129
|
| 913 |
+
},
|
| 914 |
+
{
|
| 915 |
+
"epoch": 0.6364749082007344,
|
| 916 |
+
"grad_norm": 0.12846659123897552,
|
| 917 |
+
"learning_rate": 3.209426695318182e-05,
|
| 918 |
+
"loss": 0.4857,
|
| 919 |
+
"step": 130
|
| 920 |
+
},
|
| 921 |
+
{
|
| 922 |
+
"epoch": 0.6413708690330477,
|
| 923 |
+
"grad_norm": 0.11735550314188004,
|
| 924 |
+
"learning_rate": 3.133676726352438e-05,
|
| 925 |
+
"loss": 0.542,
|
| 926 |
+
"step": 131
|
| 927 |
+
},
|
| 928 |
+
{
|
| 929 |
+
"epoch": 0.6462668298653611,
|
| 930 |
+
"grad_norm": 0.09229713678359985,
|
| 931 |
+
"learning_rate": 3.0584212529619775e-05,
|
| 932 |
+
"loss": 0.4807,
|
| 933 |
+
"step": 132
|
| 934 |
+
},
|
| 935 |
+
{
|
| 936 |
+
"epoch": 0.6511627906976745,
|
| 937 |
+
"grad_norm": 0.1247001513838768,
|
| 938 |
+
"learning_rate": 2.9836802146177034e-05,
|
| 939 |
+
"loss": 0.5123,
|
| 940 |
+
"step": 133
|
| 941 |
+
},
|
| 942 |
+
{
|
| 943 |
+
"epoch": 0.6560587515299877,
|
| 944 |
+
"grad_norm": 0.12723222374916077,
|
| 945 |
+
"learning_rate": 2.9094734144873036e-05,
|
| 946 |
+
"loss": 0.7619,
|
| 947 |
+
"step": 134
|
| 948 |
+
},
|
| 949 |
+
{
|
| 950 |
+
"epoch": 0.6609547123623011,
|
| 951 |
+
"grad_norm": 0.06785371154546738,
|
| 952 |
+
"learning_rate": 2.835820514188273e-05,
|
| 953 |
+
"loss": 0.488,
|
| 954 |
+
"step": 135
|
| 955 |
+
},
|
| 956 |
+
{
|
| 957 |
+
"epoch": 0.6658506731946144,
|
| 958 |
+
"grad_norm": 0.07913102209568024,
|
| 959 |
+
"learning_rate": 2.7627410285784163e-05,
|
| 960 |
+
"loss": 0.5913,
|
| 961 |
+
"step": 136
|
| 962 |
+
},
|
| 963 |
+
{
|
| 964 |
+
"epoch": 0.6707466340269278,
|
| 965 |
+
"grad_norm": 0.10038694739341736,
|
| 966 |
+
"learning_rate": 2.6902543205852492e-05,
|
| 967 |
+
"loss": 0.6774,
|
| 968 |
+
"step": 137
|
| 969 |
+
},
|
| 970 |
+
{
|
| 971 |
+
"epoch": 0.6756425948592412,
|
| 972 |
+
"grad_norm": 0.06108963489532471,
|
| 973 |
+
"learning_rate": 2.618379596075668e-05,
|
| 974 |
+
"loss": 0.304,
|
| 975 |
+
"step": 138
|
| 976 |
+
},
|
| 977 |
+
{
|
| 978 |
+
"epoch": 0.6805385556915544,
|
| 979 |
+
"grad_norm": 0.06344286352396011,
|
| 980 |
+
"learning_rate": 2.5471358987672017e-05,
|
| 981 |
+
"loss": 0.394,
|
| 982 |
+
"step": 139
|
| 983 |
+
},
|
| 984 |
+
{
|
| 985 |
+
"epoch": 0.6854345165238678,
|
| 986 |
+
"grad_norm": 0.1912202686071396,
|
| 987 |
+
"learning_rate": 2.476542105182254e-05,
|
| 988 |
+
"loss": 0.4608,
|
| 989 |
+
"step": 140
|
| 990 |
+
},
|
| 991 |
+
{
|
| 992 |
+
"epoch": 0.6903304773561811,
|
| 993 |
+
"grad_norm": 0.0918666198849678,
|
| 994 |
+
"learning_rate": 2.4066169196466326e-05,
|
| 995 |
+
"loss": 0.7695,
|
| 996 |
+
"step": 141
|
| 997 |
+
},
|
| 998 |
+
{
|
| 999 |
+
"epoch": 0.6952264381884945,
|
| 1000 |
+
"grad_norm": 0.09997723251581192,
|
| 1001 |
+
"learning_rate": 2.3373788693337024e-05,
|
| 1002 |
+
"loss": 0.5197,
|
| 1003 |
+
"step": 142
|
| 1004 |
+
},
|
| 1005 |
+
{
|
| 1006 |
+
"epoch": 0.7001223990208079,
|
| 1007 |
+
"grad_norm": 0.08384369313716888,
|
| 1008 |
+
"learning_rate": 2.268846299355481e-05,
|
| 1009 |
+
"loss": 0.4842,
|
| 1010 |
+
"step": 143
|
| 1011 |
+
},
|
| 1012 |
+
{
|
| 1013 |
+
"epoch": 0.7050183598531212,
|
| 1014 |
+
"grad_norm": 0.14654509723186493,
|
| 1015 |
+
"learning_rate": 2.2010373679019776e-05,
|
| 1016 |
+
"loss": 0.4083,
|
| 1017 |
+
"step": 144
|
| 1018 |
+
},
|
| 1019 |
+
{
|
| 1020 |
+
"epoch": 0.7099143206854345,
|
| 1021 |
+
"grad_norm": 0.23128315806388855,
|
| 1022 |
+
"learning_rate": 2.133970041430044e-05,
|
| 1023 |
+
"loss": 0.6992,
|
| 1024 |
+
"step": 145
|
| 1025 |
+
},
|
| 1026 |
+
{
|
| 1027 |
+
"epoch": 0.7148102815177478,
|
| 1028 |
+
"grad_norm": 0.09588706493377686,
|
| 1029 |
+
"learning_rate": 2.067662089903039e-05,
|
| 1030 |
+
"loss": 0.5621,
|
| 1031 |
+
"step": 146
|
| 1032 |
+
},
|
| 1033 |
+
{
|
| 1034 |
+
"epoch": 0.7197062423500612,
|
| 1035 |
+
"grad_norm": 0.10101126879453659,
|
| 1036 |
+
"learning_rate": 2.002131082082549e-05,
|
| 1037 |
+
"loss": 0.4135,
|
| 1038 |
+
"step": 147
|
| 1039 |
+
},
|
| 1040 |
+
{
|
| 1041 |
+
"epoch": 0.7246022031823746,
|
| 1042 |
+
"grad_norm": 0.15346215665340424,
|
| 1043 |
+
"learning_rate": 1.937394380873418e-05,
|
| 1044 |
+
"loss": 0.5933,
|
| 1045 |
+
"step": 148
|
| 1046 |
+
},
|
| 1047 |
+
{
|
| 1048 |
+
"epoch": 0.7294981640146879,
|
| 1049 |
+
"grad_norm": 0.07918453961610794,
|
| 1050 |
+
"learning_rate": 1.873469138723325e-05,
|
| 1051 |
+
"loss": 0.3081,
|
| 1052 |
+
"step": 149
|
| 1053 |
+
},
|
| 1054 |
+
{
|
| 1055 |
+
"epoch": 0.7343941248470012,
|
| 1056 |
+
"grad_norm": 0.0649547427892685,
|
| 1057 |
+
"learning_rate": 1.8103722930781247e-05,
|
| 1058 |
+
"loss": 0.407,
|
| 1059 |
+
"step": 150
|
| 1060 |
+
},
|
| 1061 |
+
{
|
| 1062 |
+
"epoch": 0.7392900856793145,
|
| 1063 |
+
"grad_norm": 0.07419539242982864,
|
| 1064 |
+
"learning_rate": 1.748120561894147e-05,
|
| 1065 |
+
"loss": 0.7313,
|
| 1066 |
+
"step": 151
|
| 1067 |
+
},
|
| 1068 |
+
{
|
| 1069 |
+
"epoch": 0.7441860465116279,
|
| 1070 |
+
"grad_norm": 0.09094121307134628,
|
| 1071 |
+
"learning_rate": 1.6867304392086575e-05,
|
| 1072 |
+
"loss": 0.3871,
|
| 1073 |
+
"step": 152
|
| 1074 |
+
},
|
| 1075 |
+
{
|
| 1076 |
+
"epoch": 0.7490820073439413,
|
| 1077 |
+
"grad_norm": 0.10387779027223587,
|
| 1078 |
+
"learning_rate": 1.6262181907696454e-05,
|
| 1079 |
+
"loss": 0.5026,
|
| 1080 |
+
"step": 153
|
| 1081 |
+
},
|
| 1082 |
+
{
|
| 1083 |
+
"epoch": 0.7539779681762546,
|
| 1084 |
+
"grad_norm": 0.22015878558158875,
|
| 1085 |
+
"learning_rate": 1.5665998497260958e-05,
|
| 1086 |
+
"loss": 0.4817,
|
| 1087 |
+
"step": 154
|
| 1088 |
+
},
|
| 1089 |
+
{
|
| 1090 |
+
"epoch": 0.758873929008568,
|
| 1091 |
+
"grad_norm": 0.06719034910202026,
|
| 1092 |
+
"learning_rate": 1.5078912123798961e-05,
|
| 1093 |
+
"loss": 0.5186,
|
| 1094 |
+
"step": 155
|
| 1095 |
+
},
|
| 1096 |
+
{
|
| 1097 |
+
"epoch": 0.7637698898408812,
|
| 1098 |
+
"grad_norm": 0.07599823921918869,
|
| 1099 |
+
"learning_rate": 1.4501078340004953e-05,
|
| 1100 |
+
"loss": 0.4275,
|
| 1101 |
+
"step": 156
|
| 1102 |
+
},
|
| 1103 |
+
{
|
| 1104 |
+
"epoch": 0.7686658506731946,
|
| 1105 |
+
"grad_norm": 0.1412460058927536,
|
| 1106 |
+
"learning_rate": 1.3932650247034218e-05,
|
| 1107 |
+
"loss": 0.626,
|
| 1108 |
+
"step": 157
|
| 1109 |
+
},
|
| 1110 |
+
{
|
| 1111 |
+
"epoch": 0.773561811505508,
|
| 1112 |
+
"grad_norm": 0.1420402228832245,
|
| 1113 |
+
"learning_rate": 1.337377845393763e-05,
|
| 1114 |
+
"loss": 0.5193,
|
| 1115 |
+
"step": 158
|
| 1116 |
+
},
|
| 1117 |
+
{
|
| 1118 |
+
"epoch": 0.7784577723378213,
|
| 1119 |
+
"grad_norm": 0.09665469825267792,
|
| 1120 |
+
"learning_rate": 1.2824611037756684e-05,
|
| 1121 |
+
"loss": 0.5848,
|
| 1122 |
+
"step": 159
|
| 1123 |
+
},
|
| 1124 |
+
{
|
| 1125 |
+
"epoch": 0.7833537331701347,
|
| 1126 |
+
"grad_norm": 0.06045060604810715,
|
| 1127 |
+
"learning_rate": 1.2285293504289447e-05,
|
| 1128 |
+
"loss": 0.2627,
|
| 1129 |
+
"step": 160
|
| 1130 |
+
},
|
| 1131 |
+
{
|
| 1132 |
+
"epoch": 0.7882496940024479,
|
| 1133 |
+
"grad_norm": 0.08824898302555084,
|
| 1134 |
+
"learning_rate": 1.1755968749537754e-05,
|
| 1135 |
+
"loss": 0.5949,
|
| 1136 |
+
"step": 161
|
| 1137 |
+
},
|
| 1138 |
+
{
|
| 1139 |
+
"epoch": 0.7931456548347613,
|
| 1140 |
+
"grad_norm": 0.13809214532375336,
|
| 1141 |
+
"learning_rate": 1.1236777021845956e-05,
|
| 1142 |
+
"loss": 0.7397,
|
| 1143 |
+
"step": 162
|
| 1144 |
+
},
|
| 1145 |
+
{
|
| 1146 |
+
"epoch": 0.7980416156670747,
|
| 1147 |
+
"grad_norm": 0.12224618345499039,
|
| 1148 |
+
"learning_rate": 1.0727855884741056e-05,
|
| 1149 |
+
"loss": 0.553,
|
| 1150 |
+
"step": 163
|
| 1151 |
+
},
|
| 1152 |
+
{
|
| 1153 |
+
"epoch": 0.802937576499388,
|
| 1154 |
+
"grad_norm": 0.07628399133682251,
|
| 1155 |
+
"learning_rate": 1.022934018048432e-05,
|
| 1156 |
+
"loss": 0.3447,
|
| 1157 |
+
"step": 164
|
| 1158 |
+
},
|
| 1159 |
+
{
|
| 1160 |
+
"epoch": 0.8078335373317014,
|
| 1161 |
+
"grad_norm": 0.06734273582696915,
|
| 1162 |
+
"learning_rate": 9.741361994343867e-06,
|
| 1163 |
+
"loss": 0.7473,
|
| 1164 |
+
"step": 165
|
| 1165 |
+
},
|
| 1166 |
+
{
|
| 1167 |
+
"epoch": 0.8127294981640147,
|
| 1168 |
+
"grad_norm": 0.0891406238079071,
|
| 1169 |
+
"learning_rate": 9.264050619597697e-06,
|
| 1170 |
+
"loss": 0.5008,
|
| 1171 |
+
"step": 166
|
| 1172 |
+
},
|
| 1173 |
+
{
|
| 1174 |
+
"epoch": 0.817625458996328,
|
| 1175 |
+
"grad_norm": 0.08535400032997131,
|
| 1176 |
+
"learning_rate": 8.797532523276542e-06,
|
| 1177 |
+
"loss": 0.4171,
|
| 1178 |
+
"step": 167
|
| 1179 |
+
},
|
| 1180 |
+
{
|
| 1181 |
+
"epoch": 0.8225214198286414,
|
| 1182 |
+
"grad_norm": 0.3191221058368683,
|
| 1183 |
+
"learning_rate": 8.341931312655582e-06,
|
| 1184 |
+
"loss": 0.7633,
|
| 1185 |
+
"step": 168
|
| 1186 |
+
},
|
| 1187 |
+
{
|
| 1188 |
+
"epoch": 0.8274173806609547,
|
| 1189 |
+
"grad_norm": 0.11288396269083023,
|
| 1190 |
+
"learning_rate": 7.897367702503756e-06,
|
| 1191 |
+
"loss": 0.4054,
|
| 1192 |
+
"step": 169
|
| 1193 |
+
},
|
| 1194 |
+
{
|
| 1195 |
+
"epoch": 0.8323133414932681,
|
| 1196 |
+
"grad_norm": 0.09110561013221741,
|
| 1197 |
+
"learning_rate": 7.463959483099547e-06,
|
| 1198 |
+
"loss": 0.4642,
|
| 1199 |
+
"step": 170
|
| 1200 |
+
},
|
| 1201 |
+
{
|
| 1202 |
+
"epoch": 0.8372093023255814,
|
| 1203 |
+
"grad_norm": 0.08345432579517365,
|
| 1204 |
+
"learning_rate": 7.041821489021639e-06,
|
| 1205 |
+
"loss": 0.3769,
|
| 1206 |
+
"step": 171
|
| 1207 |
+
},
|
| 1208 |
+
{
|
| 1209 |
+
"epoch": 0.8421052631578947,
|
| 1210 |
+
"grad_norm": 0.19082055985927582,
|
| 1211 |
+
"learning_rate": 6.631065568722633e-06,
|
| 1212 |
+
"loss": 0.4487,
|
| 1213 |
+
"step": 172
|
| 1214 |
+
},
|
| 1215 |
+
{
|
| 1216 |
+
"epoch": 0.847001223990208,
|
| 1217 |
+
"grad_norm": 0.06951060146093369,
|
| 1218 |
+
"learning_rate": 6.231800554894029e-06,
|
| 1219 |
+
"loss": 0.2915,
|
| 1220 |
+
"step": 173
|
| 1221 |
+
},
|
| 1222 |
+
{
|
| 1223 |
+
"epoch": 0.8518971848225214,
|
| 1224 |
+
"grad_norm": 0.08803869783878326,
|
| 1225 |
+
"learning_rate": 5.844132235630273e-06,
|
| 1226 |
+
"loss": 0.5065,
|
| 1227 |
+
"step": 174
|
| 1228 |
+
},
|
| 1229 |
+
{
|
| 1230 |
+
"epoch": 0.8567931456548348,
|
| 1231 |
+
"grad_norm": 0.06576603651046753,
|
| 1232 |
+
"learning_rate": 5.468163326399389e-06,
|
| 1233 |
+
"loss": 0.4307,
|
| 1234 |
+
"step": 175
|
| 1235 |
+
},
|
| 1236 |
+
{
|
| 1237 |
+
"epoch": 0.8616891064871481,
|
| 1238 |
+
"grad_norm": 0.11544258892536163,
|
| 1239 |
+
"learning_rate": 5.103993442827831e-06,
|
| 1240 |
+
"loss": 0.6356,
|
| 1241 |
+
"step": 176
|
| 1242 |
+
},
|
| 1243 |
+
{
|
| 1244 |
+
"epoch": 0.8665850673194615,
|
| 1245 |
+
"grad_norm": 0.08324428647756577,
|
| 1246 |
+
"learning_rate": 4.751719074306604e-06,
|
| 1247 |
+
"loss": 0.5426,
|
| 1248 |
+
"step": 177
|
| 1249 |
+
},
|
| 1250 |
+
{
|
| 1251 |
+
"epoch": 0.8714810281517748,
|
| 1252 |
+
"grad_norm": 0.0815030187368393,
|
| 1253 |
+
"learning_rate": 4.411433558425698e-06,
|
| 1254 |
+
"loss": 0.4278,
|
| 1255 |
+
"step": 178
|
| 1256 |
+
},
|
| 1257 |
+
{
|
| 1258 |
+
"epoch": 0.8763769889840881,
|
| 1259 |
+
"grad_norm": 0.06282901763916016,
|
| 1260 |
+
"learning_rate": 4.083227056243644e-06,
|
| 1261 |
+
"loss": 0.2847,
|
| 1262 |
+
"step": 179
|
| 1263 |
+
},
|
| 1264 |
+
{
|
| 1265 |
+
"epoch": 0.8812729498164015,
|
| 1266 |
+
"grad_norm": 0.07095268368721008,
|
| 1267 |
+
"learning_rate": 3.767186528398725e-06,
|
| 1268 |
+
"loss": 0.3923,
|
| 1269 |
+
"step": 180
|
| 1270 |
+
},
|
| 1271 |
+
{
|
| 1272 |
+
"epoch": 0.8861689106487148,
|
| 1273 |
+
"grad_norm": 0.09169545769691467,
|
| 1274 |
+
"learning_rate": 3.4633957120681293e-06,
|
| 1275 |
+
"loss": 0.5155,
|
| 1276 |
+
"step": 181
|
| 1277 |
+
},
|
| 1278 |
+
{
|
| 1279 |
+
"epoch": 0.8910648714810282,
|
| 1280 |
+
"grad_norm": 0.24409419298171997,
|
| 1281 |
+
"learning_rate": 3.1719350987811534e-06,
|
| 1282 |
+
"loss": 0.5456,
|
| 1283 |
+
"step": 182
|
| 1284 |
+
},
|
| 1285 |
+
{
|
| 1286 |
+
"epoch": 0.8959608323133414,
|
| 1287 |
+
"grad_norm": 0.08426636457443237,
|
| 1288 |
+
"learning_rate": 2.8928819130924657e-06,
|
| 1289 |
+
"loss": 0.5679,
|
| 1290 |
+
"step": 183
|
| 1291 |
+
},
|
| 1292 |
+
{
|
| 1293 |
+
"epoch": 0.9008567931456548,
|
| 1294 |
+
"grad_norm": 0.23968654870986938,
|
| 1295 |
+
"learning_rate": 2.6263100921208482e-06,
|
| 1296 |
+
"loss": 0.4841,
|
| 1297 |
+
"step": 184
|
| 1298 |
+
},
|
| 1299 |
+
{
|
| 1300 |
+
"epoch": 0.9057527539779682,
|
| 1301 |
+
"grad_norm": 0.10230088233947754,
|
| 1302 |
+
"learning_rate": 2.372290265959065e-06,
|
| 1303 |
+
"loss": 0.6307,
|
| 1304 |
+
"step": 185
|
| 1305 |
+
},
|
| 1306 |
+
{
|
| 1307 |
+
"epoch": 0.9106487148102815,
|
| 1308 |
+
"grad_norm": 0.07012014836072922,
|
| 1309 |
+
"learning_rate": 2.130889738959946e-06,
|
| 1310 |
+
"loss": 0.489,
|
| 1311 |
+
"step": 186
|
| 1312 |
+
},
|
| 1313 |
+
{
|
| 1314 |
+
"epoch": 0.9155446756425949,
|
| 1315 |
+
"grad_norm": 0.0840025320649147,
|
| 1316 |
+
"learning_rate": 1.9021724719035628e-06,
|
| 1317 |
+
"loss": 0.3811,
|
| 1318 |
+
"step": 187
|
| 1319 |
+
},
|
| 1320 |
+
{
|
| 1321 |
+
"epoch": 0.9204406364749081,
|
| 1322 |
+
"grad_norm": 0.06925945729017258,
|
| 1323 |
+
"learning_rate": 1.6861990650504255e-06,
|
| 1324 |
+
"loss": 0.3263,
|
| 1325 |
+
"step": 188
|
| 1326 |
+
},
|
| 1327 |
+
{
|
| 1328 |
+
"epoch": 0.9253365973072215,
|
| 1329 |
+
"grad_norm": 0.07820533215999603,
|
| 1330 |
+
"learning_rate": 1.4830267420849585e-06,
|
| 1331 |
+
"loss": 0.5176,
|
| 1332 |
+
"step": 189
|
| 1333 |
+
},
|
| 1334 |
+
{
|
| 1335 |
+
"epoch": 0.9302325581395349,
|
| 1336 |
+
"grad_norm": 0.10654879361391068,
|
| 1337 |
+
"learning_rate": 1.292709334953729e-06,
|
| 1338 |
+
"loss": 0.5375,
|
| 1339 |
+
"step": 190
|
| 1340 |
+
},
|
| 1341 |
+
{
|
| 1342 |
+
"epoch": 0.9351285189718482,
|
| 1343 |
+
"grad_norm": 0.06305018812417984,
|
| 1344 |
+
"learning_rate": 1.1152972696022445e-06,
|
| 1345 |
+
"loss": 0.4505,
|
| 1346 |
+
"step": 191
|
| 1347 |
+
},
|
| 1348 |
+
{
|
| 1349 |
+
"epoch": 0.9400244798041616,
|
| 1350 |
+
"grad_norm": 0.08257196098566055,
|
| 1351 |
+
"learning_rate": 9.508375526142976e-07,
|
| 1352 |
+
"loss": 0.4773,
|
| 1353 |
+
"step": 192
|
| 1354 |
+
},
|
| 1355 |
+
{
|
| 1356 |
+
"epoch": 0.944920440636475,
|
| 1357 |
+
"grad_norm": 0.0973181426525116,
|
| 1358 |
+
"learning_rate": 7.993737587571826e-07,
|
| 1359 |
+
"loss": 0.4163,
|
| 1360 |
+
"step": 193
|
| 1361 |
+
},
|
| 1362 |
+
{
|
| 1363 |
+
"epoch": 0.9498164014687882,
|
| 1364 |
+
"grad_norm": 0.1023239865899086,
|
| 1365 |
+
"learning_rate": 6.609460194362927e-07,
|
| 1366 |
+
"loss": 0.4597,
|
| 1367 |
+
"step": 194
|
| 1368 |
+
},
|
| 1369 |
+
{
|
| 1370 |
+
"epoch": 0.9547123623011016,
|
| 1371 |
+
"grad_norm": 0.09319756925106049,
|
| 1372 |
+
"learning_rate": 5.355910120620034e-07,
|
| 1373 |
+
"loss": 0.5937,
|
| 1374 |
+
"step": 195
|
| 1375 |
+
},
|
| 1376 |
+
{
|
| 1377 |
+
"epoch": 0.9596083231334149,
|
| 1378 |
+
"grad_norm": 0.07661579549312592,
|
| 1379 |
+
"learning_rate": 4.233419503317182e-07,
|
| 1380 |
+
"loss": 0.6014,
|
| 1381 |
+
"step": 196
|
| 1382 |
+
},
|
| 1383 |
+
{
|
| 1384 |
+
"epoch": 0.9645042839657283,
|
| 1385 |
+
"grad_norm": 0.07042757421731949,
|
| 1386 |
+
"learning_rate": 3.242285754296859e-07,
|
| 1387 |
+
"loss": 0.4437,
|
| 1388 |
+
"step": 197
|
| 1389 |
+
},
|
| 1390 |
+
{
|
| 1391 |
+
"epoch": 0.9694002447980417,
|
| 1392 |
+
"grad_norm": 0.07441776990890503,
|
| 1393 |
+
"learning_rate": 2.3827714814686486e-07,
|
| 1394 |
+
"loss": 0.2822,
|
| 1395 |
+
"step": 198
|
| 1396 |
+
},
|
| 1397 |
+
{
|
| 1398 |
+
"epoch": 0.9742962056303549,
|
| 1399 |
+
"grad_norm": 0.06588555127382278,
|
| 1400 |
+
"learning_rate": 1.655104419229281e-07,
|
| 1401 |
+
"loss": 0.3,
|
| 1402 |
+
"step": 199
|
| 1403 |
+
},
|
| 1404 |
+
{
|
| 1405 |
+
"epoch": 0.9791921664626683,
|
| 1406 |
+
"grad_norm": 0.06164183467626572,
|
| 1407 |
+
"learning_rate": 1.059477368122841e-07,
|
| 1408 |
+
"loss": 0.3109,
|
| 1409 |
+
"step": 200
|
| 1410 |
+
},
|
| 1411 |
+
{
|
| 1412 |
+
"epoch": 0.9840881272949816,
|
| 1413 |
+
"grad_norm": 0.13465183973312378,
|
| 1414 |
+
"learning_rate": 5.960481437568555e-08,
|
| 1415 |
+
"loss": 0.8621,
|
| 1416 |
+
"step": 201
|
| 1417 |
+
},
|
| 1418 |
+
{
|
| 1419 |
+
"epoch": 0.988984088127295,
|
| 1420 |
+
"grad_norm": 0.06679002195596695,
|
| 1421 |
+
"learning_rate": 2.649395349879069e-08,
|
| 1422 |
+
"loss": 0.5233,
|
| 1423 |
+
"step": 202
|
| 1424 |
+
},
|
| 1425 |
+
{
|
| 1426 |
+
"epoch": 0.9938800489596084,
|
| 1427 |
+
"grad_norm": 0.07267452776432037,
|
| 1428 |
+
"learning_rate": 6.623927138804664e-09,
|
| 1429 |
+
"loss": 0.59,
|
| 1430 |
+
"step": 203
|
| 1431 |
+
},
|
| 1432 |
+
{
|
| 1433 |
+
"epoch": 0.9987760097919217,
|
| 1434 |
+
"grad_norm": 0.07579752057790756,
|
| 1435 |
+
"learning_rate": 0.0,
|
| 1436 |
+
"loss": 0.4031,
|
| 1437 |
+
"step": 204
|
| 1438 |
+
}
|
| 1439 |
+
],
|
| 1440 |
+
"logging_steps": 1,
|
| 1441 |
+
"max_steps": 204,
|
| 1442 |
+
"num_input_tokens_seen": 0,
|
| 1443 |
+
"num_train_epochs": 1,
|
| 1444 |
+
"save_steps": 500,
|
| 1445 |
+
"stateful_callbacks": {
|
| 1446 |
+
"TrainerControl": {
|
| 1447 |
+
"args": {
|
| 1448 |
+
"should_epoch_stop": false,
|
| 1449 |
+
"should_evaluate": false,
|
| 1450 |
+
"should_log": false,
|
| 1451 |
+
"should_save": true,
|
| 1452 |
+
"should_training_stop": true
|
| 1453 |
+
},
|
| 1454 |
+
"attributes": {}
|
| 1455 |
+
}
|
| 1456 |
+
},
|
| 1457 |
+
"total_flos": 1.7231028658783027e+17,
|
| 1458 |
+
"train_batch_size": 1,
|
| 1459 |
+
"trial_name": null,
|
| 1460 |
+
"trial_params": null
|
| 1461 |
+
}
|
adapter/checkpoint-204/training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7df378d0939130465fa7f5d61728d6ae9bbd818e223e5a12ec124d00fe8993d3
|
| 3 |
+
size 5624
|
adapter/train_results.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
{
|
| 2 |
"epoch": 0.9987760097919217,
|
| 3 |
-
"total_flos": 1.
|
| 4 |
-
"train_loss": 0.
|
| 5 |
-
"train_runtime":
|
| 6 |
-
"train_samples_per_second": 0.
|
| 7 |
-
"train_steps_per_second": 0.
|
| 8 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"epoch": 0.9987760097919217,
|
| 3 |
+
"total_flos": 1.7231028658783027e+17,
|
| 4 |
+
"train_loss": 0.4889225305295458,
|
| 5 |
+
"train_runtime": 5417.273,
|
| 6 |
+
"train_samples_per_second": 0.151,
|
| 7 |
+
"train_steps_per_second": 0.038
|
| 8 |
}
|
adapter/trainer_log.jsonl
CHANGED
|
@@ -1,279 +1,205 @@
|
|
| 1 |
-
{"current_steps":
|
| 2 |
-
{"current_steps":
|
| 3 |
-
{"current_steps":
|
| 4 |
-
{"current_steps":
|
| 5 |
-
{"current_steps": 5, "total_steps":
|
| 6 |
-
{"current_steps":
|
| 7 |
-
{"current_steps":
|
| 8 |
-
{"current_steps":
|
| 9 |
-
{"current_steps":
|
| 10 |
-
{"current_steps":
|
| 11 |
-
{"current_steps":
|
| 12 |
-
{"current_steps":
|
| 13 |
-
{"current_steps":
|
| 14 |
-
{"current_steps":
|
| 15 |
-
{"current_steps":
|
| 16 |
-
{"current_steps":
|
| 17 |
-
{"current_steps":
|
| 18 |
-
{"current_steps":
|
| 19 |
-
{"current_steps":
|
| 20 |
-
{"current_steps":
|
| 21 |
-
{"current_steps":
|
| 22 |
-
{"current_steps":
|
| 23 |
-
{"current_steps":
|
| 24 |
-
{"current_steps":
|
| 25 |
-
{"current_steps":
|
| 26 |
-
{"current_steps":
|
| 27 |
-
{"current_steps":
|
| 28 |
-
{"current_steps":
|
| 29 |
-
{"current_steps":
|
| 30 |
-
{"current_steps":
|
| 31 |
-
{"current_steps":
|
| 32 |
-
{"current_steps":
|
| 33 |
-
{"current_steps":
|
| 34 |
-
{"current_steps":
|
| 35 |
-
{"current_steps":
|
| 36 |
-
{"current_steps":
|
| 37 |
-
{"current_steps":
|
| 38 |
-
{"current_steps":
|
| 39 |
-
{"current_steps":
|
| 40 |
-
{"current_steps":
|
| 41 |
-
{"current_steps":
|
| 42 |
-
{"current_steps":
|
| 43 |
-
{"current_steps":
|
| 44 |
-
{"current_steps":
|
| 45 |
-
{"current_steps":
|
| 46 |
-
{"current_steps":
|
| 47 |
-
{"current_steps":
|
| 48 |
-
{"current_steps":
|
| 49 |
-
{"current_steps":
|
| 50 |
-
{"current_steps":
|
| 51 |
-
{"current_steps":
|
| 52 |
-
{"current_steps":
|
| 53 |
-
{"current_steps":
|
| 54 |
-
{"current_steps":
|
| 55 |
-
{"current_steps":
|
| 56 |
-
{"current_steps":
|
| 57 |
-
{"current_steps":
|
| 58 |
-
{"current_steps":
|
| 59 |
-
{"current_steps":
|
| 60 |
-
{"current_steps":
|
| 61 |
-
{"current_steps":
|
| 62 |
-
{"current_steps":
|
| 63 |
-
{"current_steps":
|
| 64 |
-
{"current_steps":
|
| 65 |
-
{"current_steps":
|
| 66 |
-
{"current_steps":
|
| 67 |
-
{"current_steps":
|
| 68 |
-
{"current_steps":
|
| 69 |
-
{"current_steps":
|
| 70 |
-
{"current_steps":
|
| 71 |
-
{"current_steps":
|
| 72 |
-
{"current_steps":
|
| 73 |
-
{"current_steps":
|
| 74 |
-
{"current_steps":
|
| 75 |
-
{"current_steps":
|
| 76 |
-
{"current_steps":
|
| 77 |
-
{"current_steps":
|
| 78 |
-
{"current_steps":
|
| 79 |
-
{"current_steps":
|
| 80 |
-
{"current_steps":
|
| 81 |
-
{"current_steps":
|
| 82 |
-
{"current_steps":
|
| 83 |
-
{"current_steps":
|
| 84 |
-
{"current_steps":
|
| 85 |
-
{"current_steps":
|
| 86 |
-
{"current_steps":
|
| 87 |
-
{"current_steps":
|
| 88 |
-
{"current_steps":
|
| 89 |
-
{"current_steps":
|
| 90 |
-
{"current_steps":
|
| 91 |
-
{"current_steps":
|
| 92 |
-
{"current_steps":
|
| 93 |
-
{"current_steps":
|
| 94 |
-
{"current_steps":
|
| 95 |
-
{"current_steps":
|
| 96 |
-
{"current_steps":
|
| 97 |
-
{"current_steps":
|
| 98 |
-
{"current_steps":
|
| 99 |
-
{"current_steps":
|
| 100 |
-
{"current_steps":
|
| 101 |
-
{"current_steps":
|
| 102 |
-
{"current_steps":
|
| 103 |
-
{"current_steps":
|
| 104 |
-
{"current_steps":
|
| 105 |
-
{"current_steps":
|
| 106 |
-
{"current_steps":
|
| 107 |
-
{"current_steps":
|
| 108 |
-
{"current_steps":
|
| 109 |
-
{"current_steps":
|
| 110 |
-
{"current_steps":
|
| 111 |
-
{"current_steps":
|
| 112 |
-
{"current_steps":
|
| 113 |
-
{"current_steps":
|
| 114 |
-
{"current_steps":
|
| 115 |
-
{"current_steps":
|
| 116 |
-
{"current_steps":
|
| 117 |
-
{"current_steps":
|
| 118 |
-
{"current_steps":
|
| 119 |
-
{"current_steps":
|
| 120 |
-
{"current_steps":
|
| 121 |
-
{"current_steps":
|
| 122 |
-
{"current_steps":
|
| 123 |
-
{"current_steps":
|
| 124 |
-
{"current_steps":
|
| 125 |
-
{"current_steps":
|
| 126 |
-
{"current_steps":
|
| 127 |
-
{"current_steps":
|
| 128 |
-
{"current_steps":
|
| 129 |
-
{"current_steps":
|
| 130 |
-
{"current_steps":
|
| 131 |
-
{"current_steps":
|
| 132 |
-
{"current_steps":
|
| 133 |
-
{"current_steps":
|
| 134 |
-
{"current_steps":
|
| 135 |
-
{"current_steps":
|
| 136 |
-
{"current_steps":
|
| 137 |
-
{"current_steps":
|
| 138 |
-
{"current_steps":
|
| 139 |
-
{"current_steps":
|
| 140 |
-
{"current_steps":
|
| 141 |
-
{"current_steps":
|
| 142 |
-
{"current_steps":
|
| 143 |
-
{"current_steps":
|
| 144 |
-
{"current_steps":
|
| 145 |
-
{"current_steps":
|
| 146 |
-
{"current_steps":
|
| 147 |
-
{"current_steps":
|
| 148 |
-
{"current_steps":
|
| 149 |
-
{"current_steps":
|
| 150 |
-
{"current_steps":
|
| 151 |
-
{"current_steps":
|
| 152 |
-
{"current_steps":
|
| 153 |
-
{"current_steps":
|
| 154 |
-
{"current_steps":
|
| 155 |
-
{"current_steps":
|
| 156 |
-
{"current_steps":
|
| 157 |
-
{"current_steps":
|
| 158 |
-
{"current_steps":
|
| 159 |
-
{"current_steps":
|
| 160 |
-
{"current_steps":
|
| 161 |
-
{"current_steps":
|
| 162 |
-
{"current_steps":
|
| 163 |
-
{"current_steps":
|
| 164 |
-
{"current_steps":
|
| 165 |
-
{"current_steps":
|
| 166 |
-
{"current_steps":
|
| 167 |
-
{"current_steps":
|
| 168 |
-
{"current_steps":
|
| 169 |
-
{"current_steps":
|
| 170 |
-
{"current_steps":
|
| 171 |
-
{"current_steps":
|
| 172 |
-
{"current_steps":
|
| 173 |
-
{"current_steps":
|
| 174 |
-
{"current_steps":
|
| 175 |
-
{"current_steps":
|
| 176 |
-
{"current_steps":
|
| 177 |
-
{"current_steps":
|
| 178 |
-
{"current_steps":
|
| 179 |
-
{"current_steps":
|
| 180 |
-
{"current_steps":
|
| 181 |
-
{"current_steps":
|
| 182 |
-
{"current_steps":
|
| 183 |
-
{"current_steps":
|
| 184 |
-
{"current_steps":
|
| 185 |
-
{"current_steps":
|
| 186 |
-
{"current_steps":
|
| 187 |
-
{"current_steps":
|
| 188 |
-
{"current_steps":
|
| 189 |
-
{"current_steps":
|
| 190 |
-
{"current_steps":
|
| 191 |
-
{"current_steps":
|
| 192 |
-
{"current_steps":
|
| 193 |
-
{"current_steps":
|
| 194 |
-
{"current_steps":
|
| 195 |
-
{"current_steps":
|
| 196 |
-
{"current_steps":
|
| 197 |
-
{"current_steps":
|
| 198 |
-
{"current_steps":
|
| 199 |
-
{"current_steps":
|
| 200 |
-
{"current_steps":
|
| 201 |
-
{"current_steps":
|
| 202 |
-
{"current_steps":
|
| 203 |
-
{"current_steps":
|
| 204 |
-
{"current_steps":
|
| 205 |
-
{"current_steps":
|
| 206 |
-
{"current_steps": 200, "total_steps": 272, "loss": 0.4444, "lr": 1.9989285972581595e-05, "epoch": 0.7343941248470012, "percentage": 73.53, "elapsed_time": "1:14:37", "remaining_time": "0:26:51"}
|
| 207 |
-
{"current_steps": 201, "total_steps": 272, "loss": 0.8168, "lr": 1.947687598396154e-05, "epoch": 0.7380660954712362, "percentage": 73.9, "elapsed_time": "1:15:00", "remaining_time": "0:26:29"}
|
| 208 |
-
{"current_steps": 202, "total_steps": 272, "loss": 0.3696, "lr": 1.896952590862886e-05, "epoch": 0.7417380660954712, "percentage": 74.26, "elapsed_time": "1:15:19", "remaining_time": "0:26:06"}
|
| 209 |
-
{"current_steps": 203, "total_steps": 272, "loss": 0.3903, "lr": 1.8467319851584954e-05, "epoch": 0.7454100367197063, "percentage": 74.63, "elapsed_time": "1:15:39", "remaining_time": "0:25:42"}
|
| 210 |
-
{"current_steps": 204, "total_steps": 272, "loss": 0.5486, "lr": 1.7970341065091245e-05, "epoch": 0.7490820073439413, "percentage": 75.0, "elapsed_time": "1:16:01", "remaining_time": "0:25:20"}
|
| 211 |
-
{"current_steps": 205, "total_steps": 272, "loss": 0.4969, "lr": 1.7478671934868302e-05, "epoch": 0.7527539779681762, "percentage": 75.37, "elapsed_time": "1:16:22", "remaining_time": "0:24:57"}
|
| 212 |
-
{"current_steps": 206, "total_steps": 272, "loss": 0.6211, "lr": 1.6992393966438407e-05, "epoch": 0.7564259485924113, "percentage": 75.74, "elapsed_time": "1:16:47", "remaining_time": "0:24:36"}
|
| 213 |
-
{"current_steps": 207, "total_steps": 272, "loss": 0.3785, "lr": 1.6511587771614205e-05, "epoch": 0.7600979192166463, "percentage": 76.1, "elapsed_time": "1:17:11", "remaining_time": "0:24:14"}
|
| 214 |
-
{"current_steps": 208, "total_steps": 272, "loss": 0.4173, "lr": 1.6036333055135344e-05, "epoch": 0.7637698898408812, "percentage": 76.47, "elapsed_time": "1:17:36", "remaining_time": "0:23:52"}
|
| 215 |
-
{"current_steps": 209, "total_steps": 272, "loss": 0.7029, "lr": 1.556670860145567e-05, "epoch": 0.7674418604651163, "percentage": 76.84, "elapsed_time": "1:17:54", "remaining_time": "0:23:29"}
|
| 216 |
-
{"current_steps": 210, "total_steps": 272, "loss": 0.4614, "lr": 1.5102792261682813e-05, "epoch": 0.7711138310893513, "percentage": 77.21, "elapsed_time": "1:18:09", "remaining_time": "0:23:04"}
|
| 217 |
-
{"current_steps": 211, "total_steps": 272, "loss": 0.8333, "lr": 1.4644660940672627e-05, "epoch": 0.7747858017135862, "percentage": 77.57, "elapsed_time": "1:18:34", "remaining_time": "0:22:42"}
|
| 218 |
-
{"current_steps": 212, "total_steps": 272, "loss": 0.3017, "lr": 1.4192390584280346e-05, "epoch": 0.7784577723378213, "percentage": 77.94, "elapsed_time": "1:18:55", "remaining_time": "0:22:20"}
|
| 219 |
-
{"current_steps": 213, "total_steps": 272, "loss": 0.304, "lr": 1.374605616677087e-05, "epoch": 0.7821297429620563, "percentage": 78.31, "elapsed_time": "1:19:15", "remaining_time": "0:21:57"}
|
| 220 |
-
{"current_steps": 214, "total_steps": 272, "loss": 0.4155, "lr": 1.3305731678390048e-05, "epoch": 0.7858017135862914, "percentage": 78.68, "elapsed_time": "1:19:37", "remaining_time": "0:21:34"}
|
| 221 |
-
{"current_steps": 215, "total_steps": 272, "loss": 0.7763, "lr": 1.2871490113099066e-05, "epoch": 0.7894736842105263, "percentage": 79.04, "elapsed_time": "1:20:05", "remaining_time": "0:21:13"}
|
| 222 |
-
{"current_steps": 216, "total_steps": 272, "loss": 0.5421, "lr": 1.2443403456474017e-05, "epoch": 0.7931456548347613, "percentage": 79.41, "elapsed_time": "1:20:20", "remaining_time": "0:20:49"}
|
| 223 |
-
{"current_steps": 217, "total_steps": 272, "loss": 0.3638, "lr": 1.2021542673772585e-05, "epoch": 0.7968176254589964, "percentage": 79.78, "elapsed_time": "1:20:50", "remaining_time": "0:20:29"}
|
| 224 |
-
{"current_steps": 218, "total_steps": 272, "loss": 0.5641, "lr": 1.1605977698170001e-05, "epoch": 0.8004895960832313, "percentage": 80.15, "elapsed_time": "1:21:14", "remaining_time": "0:20:07"}
|
| 225 |
-
{"current_steps": 219, "total_steps": 272, "loss": 0.407, "lr": 1.1196777419165927e-05, "epoch": 0.8041615667074663, "percentage": 80.51, "elapsed_time": "1:21:40", "remaining_time": "0:19:45"}
|
| 226 |
-
{"current_steps": 220, "total_steps": 272, "loss": 0.8802, "lr": 1.0794009671164484e-05, "epoch": 0.8078335373317014, "percentage": 80.88, "elapsed_time": "1:22:05", "remaining_time": "0:19:24"}
|
| 227 |
-
{"current_steps": 221, "total_steps": 272, "loss": 0.5714, "lr": 1.0397741222229057e-05, "epoch": 0.8115055079559363, "percentage": 81.25, "elapsed_time": "1:22:25", "remaining_time": "0:19:01"}
|
| 228 |
-
{"current_steps": 222, "total_steps": 272, "loss": 0.4146, "lr": 1.0008037763014032e-05, "epoch": 0.8151774785801713, "percentage": 81.62, "elapsed_time": "1:22:47", "remaining_time": "0:18:38"}
|
| 229 |
-
{"current_steps": 223, "total_steps": 272, "loss": 0.4979, "lr": 9.624963895874994e-06, "epoch": 0.8188494492044064, "percentage": 81.99, "elapsed_time": "1:23:07", "remaining_time": "0:18:16"}
|
| 230 |
-
{"current_steps": 224, "total_steps": 272, "loss": 0.7919, "lr": 9.248583124159438e-06, "epoch": 0.8225214198286414, "percentage": 82.35, "elapsed_time": "1:23:34", "remaining_time": "0:17:54"}
|
| 231 |
-
{"current_steps": 225, "total_steps": 272, "loss": 0.4572, "lr": 8.878957841679541e-06, "epoch": 0.8261933904528764, "percentage": 82.72, "elapsed_time": "1:23:52", "remaining_time": "0:17:31"}
|
| 232 |
-
{"current_steps": 226, "total_steps": 272, "loss": 0.3024, "lr": 8.516149322369054e-06, "epoch": 0.8298653610771114, "percentage": 83.09, "elapsed_time": "1:24:10", "remaining_time": "0:17:07"}
|
| 233 |
-
{"current_steps": 227, "total_steps": 272, "loss": 0.5087, "lr": 8.160217710125662e-06, "epoch": 0.8335373317013464, "percentage": 83.46, "elapsed_time": "1:24:33", "remaining_time": "0:16:45"}
|
| 234 |
-
{"current_steps": 228, "total_steps": 272, "loss": 0.4048, "lr": 7.81122200884072e-06, "epoch": 0.8372093023255814, "percentage": 83.82, "elapsed_time": "1:24:48", "remaining_time": "0:16:22"}
|
| 235 |
-
{"current_steps": 229, "total_steps": 272, "loss": 0.4902, "lr": 7.469220072618094e-06, "epoch": 0.8408812729498164, "percentage": 84.19, "elapsed_time": "1:25:07", "remaining_time": "0:15:59"}
|
| 236 |
-
{"current_steps": 230, "total_steps": 272, "loss": 0.2719, "lr": 7.13426859618338e-06, "epoch": 0.8445532435740514, "percentage": 84.56, "elapsed_time": "1:25:27", "remaining_time": "0:15:36"}
|
| 237 |
-
{"current_steps": 231, "total_steps": 272, "loss": 0.3147, "lr": 6.806423105485577e-06, "epoch": 0.8482252141982864, "percentage": 84.93, "elapsed_time": "1:25:47", "remaining_time": "0:15:13"}
|
| 238 |
-
{"current_steps": 232, "total_steps": 272, "loss": 0.5688, "lr": 6.4857379484922375e-06, "epoch": 0.8518971848225214, "percentage": 85.29, "elapsed_time": "1:26:11", "remaining_time": "0:14:51"}
|
| 239 |
-
{"current_steps": 233, "total_steps": 272, "loss": 0.3724, "lr": 6.1722662861801614e-06, "epoch": 0.8555691554467564, "percentage": 85.66, "elapsed_time": "1:26:39", "remaining_time": "0:14:30"}
|
| 240 |
-
{"current_steps": 234, "total_steps": 272, "loss": 0.6448, "lr": 5.866060083722624e-06, "epoch": 0.8592411260709915, "percentage": 86.03, "elapsed_time": "1:27:05", "remaining_time": "0:14:08"}
|
| 241 |
-
{"current_steps": 235, "total_steps": 272, "loss": 0.7383, "lr": 5.5671701018750745e-06, "epoch": 0.8629130966952264, "percentage": 86.4, "elapsed_time": "1:27:29", "remaining_time": "0:13:46"}
|
| 242 |
-
{"current_steps": 236, "total_steps": 272, "loss": 0.3452, "lr": 5.275645888560232e-06, "epoch": 0.8665850673194615, "percentage": 86.76, "elapsed_time": "1:27:45", "remaining_time": "0:13:23"}
|
| 243 |
-
{"current_steps": 237, "total_steps": 272, "loss": 0.3893, "lr": 4.9915357706544485e-06, "epoch": 0.8702570379436965, "percentage": 87.13, "elapsed_time": "1:28:10", "remaining_time": "0:13:01"}
|
| 244 |
-
{"current_steps": 238, "total_steps": 272, "loss": 0.2986, "lr": 4.714886845976429e-06, "epoch": 0.8739290085679314, "percentage": 87.5, "elapsed_time": "1:28:35", "remaining_time": "0:12:39"}
|
| 245 |
-
{"current_steps": 239, "total_steps": 272, "loss": 0.4395, "lr": 4.445744975479626e-06, "epoch": 0.8776009791921665, "percentage": 87.87, "elapsed_time": "1:28:59", "remaining_time": "0:12:17"}
|
| 246 |
-
{"current_steps": 240, "total_steps": 272, "loss": 0.2981, "lr": 4.184154775649768e-06, "epoch": 0.8812729498164015, "percentage": 88.24, "elapsed_time": "1:29:22", "remaining_time": "0:11:55"}
|
| 247 |
-
{"current_steps": 241, "total_steps": 272, "loss": 0.5546, "lr": 3.9301596111086025e-06, "epoch": 0.8849449204406364, "percentage": 88.6, "elapsed_time": "1:29:45", "remaining_time": "0:11:32"}
|
| 248 |
-
{"current_steps": 242, "total_steps": 272, "loss": 0.4488, "lr": 3.683801587425251e-06, "epoch": 0.8886168910648715, "percentage": 88.97, "elapsed_time": "1:30:06", "remaining_time": "0:11:10"}
|
| 249 |
-
{"current_steps": 243, "total_steps": 272, "loss": 0.5273, "lr": 3.4451215441362263e-06, "epoch": 0.8922888616891065, "percentage": 89.34, "elapsed_time": "1:30:32", "remaining_time": "0:10:48"}
|
| 250 |
-
{"current_steps": 244, "total_steps": 272, "loss": 0.6317, "lr": 3.2141590479753236e-06, "epoch": 0.8959608323133414, "percentage": 89.71, "elapsed_time": "1:30:58", "remaining_time": "0:10:26"}
|
| 251 |
-
{"current_steps": 245, "total_steps": 272, "loss": 0.5092, "lr": 2.990952386314505e-06, "epoch": 0.8996328029375765, "percentage": 90.07, "elapsed_time": "1:31:23", "remaining_time": "0:10:04"}
|
| 252 |
-
{"current_steps": 246, "total_steps": 272, "loss": 0.5734, "lr": 2.775538560816937e-06, "epoch": 0.9033047735618115, "percentage": 90.44, "elapsed_time": "1:31:41", "remaining_time": "0:09:41"}
|
| 253 |
-
{"current_steps": 247, "total_steps": 272, "loss": 0.535, "lr": 2.5679532813030596e-06, "epoch": 0.9069767441860465, "percentage": 90.81, "elapsed_time": "1:32:06", "remaining_time": "0:09:19"}
|
| 254 |
-
{"current_steps": 248, "total_steps": 272, "loss": 0.5474, "lr": 2.3682309598308747e-06, "epoch": 0.9106487148102815, "percentage": 91.18, "elapsed_time": "1:32:38", "remaining_time": "0:08:57"}
|
| 255 |
-
{"current_steps": 249, "total_steps": 272, "loss": 0.3862, "lr": 2.1764047049913527e-06, "epoch": 0.9143206854345165, "percentage": 91.54, "elapsed_time": "1:33:02", "remaining_time": "0:08:35"}
|
| 256 |
-
{"current_steps": 250, "total_steps": 272, "loss": 0.3631, "lr": 1.992506316419912e-06, "epoch": 0.9179926560587516, "percentage": 91.91, "elapsed_time": "1:33:21", "remaining_time": "0:08:12"}
|
| 257 |
-
{"current_steps": 251, "total_steps": 272, "loss": 0.2861, "lr": 1.8165662795249172e-06, "epoch": 0.9216646266829865, "percentage": 92.28, "elapsed_time": "1:33:42", "remaining_time": "0:07:50"}
|
| 258 |
-
{"current_steps": 252, "total_steps": 272, "loss": 0.6115, "lr": 1.6486137604339813e-06, "epoch": 0.9253365973072215, "percentage": 92.65, "elapsed_time": "1:34:03", "remaining_time": "0:07:27"}
|
| 259 |
-
{"current_steps": 253, "total_steps": 272, "loss": 0.6009, "lr": 1.4886766011590448e-06, "epoch": 0.9290085679314566, "percentage": 93.01, "elapsed_time": "1:34:24", "remaining_time": "0:07:05"}
|
| 260 |
-
{"current_steps": 254, "total_steps": 272, "loss": 0.5147, "lr": 1.3367813149808729e-06, "epoch": 0.9326805385556916, "percentage": 93.38, "elapsed_time": "1:34:46", "remaining_time": "0:06:42"}
|
| 261 |
-
{"current_steps": 255, "total_steps": 272, "loss": 0.3699, "lr": 1.1929530820539269e-06, "epoch": 0.9363525091799265, "percentage": 93.75, "elapsed_time": "1:35:11", "remaining_time": "0:06:20"}
|
| 262 |
-
{"current_steps": 256, "total_steps": 272, "loss": 0.4612, "lr": 1.0572157452321097e-06, "epoch": 0.9400244798041616, "percentage": 94.12, "elapsed_time": "1:35:37", "remaining_time": "0:05:58"}
|
| 263 |
-
{"current_steps": 257, "total_steps": 272, "loss": 0.3989, "lr": 9.295918061163033e-07, "epoch": 0.9436964504283966, "percentage": 94.49, "elapsed_time": "1:36:09", "remaining_time": "0:05:36"}
|
| 264 |
-
{"current_steps": 258, "total_steps": 272, "loss": 0.7361, "lr": 8.101024213241826e-07, "epoch": 0.9473684210526315, "percentage": 94.85, "elapsed_time": "1:36:28", "remaining_time": "0:05:14"}
|
| 265 |
-
{"current_steps": 259, "total_steps": 272, "loss": 0.356, "lr": 6.987673989830523e-07, "epoch": 0.9510403916768666, "percentage": 95.22, "elapsed_time": "1:36:53", "remaining_time": "0:04:51"}
|
| 266 |
-
{"current_steps": 260, "total_steps": 272, "loss": 0.5988, "lr": 5.956051954461472e-07, "epoch": 0.9547123623011016, "percentage": 95.59, "elapsed_time": "1:37:10", "remaining_time": "0:04:29"}
|
| 267 |
-
{"current_steps": 261, "total_steps": 272, "loss": 0.6091, "lr": 5.006329122330899e-07, "epoch": 0.9583843329253366, "percentage": 95.96, "elapsed_time": "1:37:34", "remaining_time": "0:04:06"}
|
| 268 |
-
{"current_steps": 262, "total_steps": 272, "loss": 0.5726, "lr": 4.1386629319492556e-07, "epoch": 0.9620563035495716, "percentage": 96.32, "elapsed_time": "1:37:57", "remaining_time": "0:03:44"}
|
| 269 |
-
{"current_steps": 263, "total_steps": 272, "loss": 0.2667, "lr": 3.3531972190419815e-07, "epoch": 0.9657282741738066, "percentage": 96.69, "elapsed_time": "1:38:11", "remaining_time": "0:03:21"}
|
| 270 |
-
{"current_steps": 264, "total_steps": 272, "loss": 0.2876, "lr": 2.6500621927054715e-07, "epoch": 0.9694002447980417, "percentage": 97.06, "elapsed_time": "1:38:31", "remaining_time": "0:02:59"}
|
| 271 |
-
{"current_steps": 265, "total_steps": 272, "loss": 0.3169, "lr": 2.0293744138219495e-07, "epoch": 0.9730722154222766, "percentage": 97.43, "elapsed_time": "1:38:47", "remaining_time": "0:02:36"}
|
| 272 |
-
{"current_steps": 266, "total_steps": 272, "loss": 0.3369, "lr": 1.4912367757366487e-07, "epoch": 0.9767441860465116, "percentage": 97.79, "elapsed_time": "1:39:08", "remaining_time": "0:02:14"}
|
| 273 |
-
{"current_steps": 267, "total_steps": 272, "loss": 0.2544, "lr": 1.0357384872011766e-07, "epoch": 0.9804161566707467, "percentage": 98.16, "elapsed_time": "1:39:28", "remaining_time": "0:01:51"}
|
| 274 |
-
{"current_steps": 268, "total_steps": 272, "loss": 0.9444, "lr": 6.629550575847354e-08, "epoch": 0.9840881272949816, "percentage": 98.53, "elapsed_time": "1:39:58", "remaining_time": "0:01:29"}
|
| 275 |
-
{"current_steps": 269, "total_steps": 272, "loss": 0.5641, "lr": 3.729482843569665e-08, "epoch": 0.9877600979192166, "percentage": 98.9, "elapsed_time": "1:40:24", "remaining_time": "0:01:07"}
|
| 276 |
-
{"current_steps": 270, "total_steps": 272, "loss": 0.3192, "lr": 1.6576624284347918e-08, "epoch": 0.9914320685434517, "percentage": 99.26, "elapsed_time": "1:40:51", "remaining_time": "0:00:44"}
|
| 277 |
-
{"current_steps": 271, "total_steps": 272, "loss": 0.6975, "lr": 4.1443278256170226e-09, "epoch": 0.9951040391676866, "percentage": 99.63, "elapsed_time": "1:41:14", "remaining_time": "0:00:22"}
|
| 278 |
-
{"current_steps": 272, "total_steps": 272, "loss": 0.4445, "lr": 0.0, "epoch": 0.9987760097919217, "percentage": 100.0, "elapsed_time": "1:41:37", "remaining_time": "0:00:00"}
|
| 279 |
-
{"current_steps": 272, "total_steps": 272, "epoch": 0.9987760097919217, "percentage": 100.0, "elapsed_time": "1:41:37", "remaining_time": "0:00:00"}
|
|
|
|
| 1 |
+
{"current_steps": 1, "total_steps": 204, "loss": 0.4365, "lr": 9.090909090909091e-06, "epoch": 0.004895960832313341, "percentage": 0.49, "elapsed_time": "0:00:30", "remaining_time": "1:44:35"}
|
| 2 |
+
{"current_steps": 2, "total_steps": 204, "loss": 0.6677, "lr": 1.8181818181818182e-05, "epoch": 0.009791921664626682, "percentage": 0.98, "elapsed_time": "0:01:05", "remaining_time": "1:49:43"}
|
| 3 |
+
{"current_steps": 3, "total_steps": 204, "loss": 0.4084, "lr": 2.7272727272727273e-05, "epoch": 0.014687882496940025, "percentage": 1.47, "elapsed_time": "0:01:34", "remaining_time": "1:45:29"}
|
| 4 |
+
{"current_steps": 4, "total_steps": 204, "loss": 0.3422, "lr": 3.6363636363636364e-05, "epoch": 0.019583843329253364, "percentage": 1.96, "elapsed_time": "0:01:57", "remaining_time": "1:38:05"}
|
| 5 |
+
{"current_steps": 5, "total_steps": 204, "loss": 0.5332, "lr": 4.545454545454546e-05, "epoch": 0.02447980416156671, "percentage": 2.45, "elapsed_time": "0:02:27", "remaining_time": "1:37:52"}
|
| 6 |
+
{"current_steps": 6, "total_steps": 204, "loss": 0.7114, "lr": 5.4545454545454546e-05, "epoch": 0.02937576499388005, "percentage": 2.94, "elapsed_time": "0:02:51", "remaining_time": "1:34:12"}
|
| 7 |
+
{"current_steps": 7, "total_steps": 204, "loss": 0.5457, "lr": 6.363636363636364e-05, "epoch": 0.03427172582619339, "percentage": 3.43, "elapsed_time": "0:03:19", "remaining_time": "1:33:24"}
|
| 8 |
+
{"current_steps": 8, "total_steps": 204, "loss": 0.3846, "lr": 7.272727272727273e-05, "epoch": 0.03916768665850673, "percentage": 3.92, "elapsed_time": "0:03:42", "remaining_time": "1:31:03"}
|
| 9 |
+
{"current_steps": 9, "total_steps": 204, "loss": 0.5076, "lr": 8.181818181818183e-05, "epoch": 0.044063647490820076, "percentage": 4.41, "elapsed_time": "0:04:12", "remaining_time": "1:31:00"}
|
| 10 |
+
{"current_steps": 10, "total_steps": 204, "loss": 0.4675, "lr": 9.090909090909092e-05, "epoch": 0.04895960832313342, "percentage": 4.9, "elapsed_time": "0:04:34", "remaining_time": "1:28:44"}
|
| 11 |
+
{"current_steps": 11, "total_steps": 204, "loss": 0.3412, "lr": 0.0001, "epoch": 0.05385556915544676, "percentage": 5.39, "elapsed_time": "0:05:05", "remaining_time": "1:29:16"}
|
| 12 |
+
{"current_steps": 12, "total_steps": 204, "loss": 0.4016, "lr": 9.99933760728612e-05, "epoch": 0.0587515299877601, "percentage": 5.88, "elapsed_time": "0:05:38", "remaining_time": "1:30:21"}
|
| 13 |
+
{"current_steps": 13, "total_steps": 204, "loss": 0.3738, "lr": 9.997350604650123e-05, "epoch": 0.06364749082007344, "percentage": 6.37, "elapsed_time": "0:06:06", "remaining_time": "1:29:41"}
|
| 14 |
+
{"current_steps": 14, "total_steps": 204, "loss": 0.5781, "lr": 9.994039518562432e-05, "epoch": 0.06854345165238677, "percentage": 6.86, "elapsed_time": "0:06:37", "remaining_time": "1:29:53"}
|
| 15 |
+
{"current_steps": 15, "total_steps": 204, "loss": 0.4275, "lr": 9.989405226318772e-05, "epoch": 0.07343941248470012, "percentage": 7.35, "elapsed_time": "0:07:00", "remaining_time": "1:28:23"}
|
| 16 |
+
{"current_steps": 16, "total_steps": 204, "loss": 0.4416, "lr": 9.983448955807708e-05, "epoch": 0.07833537331701346, "percentage": 7.84, "elapsed_time": "0:07:31", "remaining_time": "1:28:21"}
|
| 17 |
+
{"current_steps": 17, "total_steps": 204, "loss": 0.5598, "lr": 9.976172285185314e-05, "epoch": 0.0832313341493268, "percentage": 8.33, "elapsed_time": "0:08:00", "remaining_time": "1:28:01"}
|
| 18 |
+
{"current_steps": 18, "total_steps": 204, "loss": 0.3509, "lr": 9.967577142457032e-05, "epoch": 0.08812729498164015, "percentage": 8.82, "elapsed_time": "0:08:28", "remaining_time": "1:27:33"}
|
| 19 |
+
{"current_steps": 19, "total_steps": 204, "loss": 0.7287, "lr": 9.957665804966829e-05, "epoch": 0.09302325581395349, "percentage": 9.31, "elapsed_time": "0:08:56", "remaining_time": "1:27:00"}
|
| 20 |
+
{"current_steps": 20, "total_steps": 204, "loss": 0.638, "lr": 9.946440898793801e-05, "epoch": 0.09791921664626684, "percentage": 9.8, "elapsed_time": "0:09:27", "remaining_time": "1:26:57"}
|
| 21 |
+
{"current_steps": 21, "total_steps": 204, "loss": 0.4959, "lr": 9.933905398056372e-05, "epoch": 0.10281517747858017, "percentage": 10.29, "elapsed_time": "0:09:52", "remaining_time": "1:26:03"}
|
| 22 |
+
{"current_steps": 22, "total_steps": 204, "loss": 0.5835, "lr": 9.920062624124282e-05, "epoch": 0.10771113831089352, "percentage": 10.78, "elapsed_time": "0:10:17", "remaining_time": "1:25:05"}
|
| 23 |
+
{"current_steps": 23, "total_steps": 204, "loss": 0.7785, "lr": 9.904916244738571e-05, "epoch": 0.11260709914320685, "percentage": 11.27, "elapsed_time": "0:10:46", "remaining_time": "1:24:49"}
|
| 24 |
+
{"current_steps": 24, "total_steps": 204, "loss": 0.3422, "lr": 9.888470273039775e-05, "epoch": 0.1175030599755202, "percentage": 11.76, "elapsed_time": "0:11:11", "remaining_time": "1:23:53"}
|
| 25 |
+
{"current_steps": 25, "total_steps": 204, "loss": 0.3107, "lr": 9.870729066504629e-05, "epoch": 0.12239902080783353, "percentage": 12.25, "elapsed_time": "0:11:38", "remaining_time": "1:23:22"}
|
| 26 |
+
{"current_steps": 26, "total_steps": 204, "loss": 0.4473, "lr": 9.851697325791505e-05, "epoch": 0.12729498164014688, "percentage": 12.75, "elapsed_time": "0:12:02", "remaining_time": "1:22:25"}
|
| 27 |
+
{"current_steps": 27, "total_steps": 204, "loss": 0.3368, "lr": 9.831380093494957e-05, "epoch": 0.13219094247246022, "percentage": 13.24, "elapsed_time": "0:12:30", "remaining_time": "1:21:58"}
|
| 28 |
+
{"current_steps": 28, "total_steps": 204, "loss": 0.3709, "lr": 9.809782752809644e-05, "epoch": 0.13708690330477355, "percentage": 13.73, "elapsed_time": "0:12:50", "remaining_time": "1:20:42"}
|
| 29 |
+
{"current_steps": 29, "total_steps": 204, "loss": 0.4986, "lr": 9.786911026104007e-05, "epoch": 0.1419828641370869, "percentage": 14.22, "elapsed_time": "0:13:13", "remaining_time": "1:19:50"}
|
| 30 |
+
{"current_steps": 30, "total_steps": 204, "loss": 0.6252, "lr": 9.762770973404094e-05, "epoch": 0.14687882496940025, "percentage": 14.71, "elapsed_time": "0:13:40", "remaining_time": "1:19:16"}
|
| 31 |
+
{"current_steps": 31, "total_steps": 204, "loss": 0.4772, "lr": 9.737368990787916e-05, "epoch": 0.15177478580171358, "percentage": 15.2, "elapsed_time": "0:14:06", "remaining_time": "1:18:42"}
|
| 32 |
+
{"current_steps": 32, "total_steps": 204, "loss": 0.381, "lr": 9.710711808690754e-05, "epoch": 0.15667074663402691, "percentage": 15.69, "elapsed_time": "0:14:32", "remaining_time": "1:18:10"}
|
| 33 |
+
{"current_steps": 33, "total_steps": 204, "loss": 0.3429, "lr": 9.682806490121885e-05, "epoch": 0.16156670746634028, "percentage": 16.18, "elapsed_time": "0:15:01", "remaining_time": "1:17:53"}
|
| 34 |
+
{"current_steps": 34, "total_steps": 204, "loss": 0.3927, "lr": 9.653660428793188e-05, "epoch": 0.1664626682986536, "percentage": 16.67, "elapsed_time": "0:15:30", "remaining_time": "1:17:34"}
|
| 35 |
+
{"current_steps": 35, "total_steps": 204, "loss": 0.9417, "lr": 9.623281347160127e-05, "epoch": 0.17135862913096694, "percentage": 17.16, "elapsed_time": "0:15:54", "remaining_time": "1:16:47"}
|
| 36 |
+
{"current_steps": 36, "total_steps": 204, "loss": 0.4428, "lr": 9.591677294375636e-05, "epoch": 0.1762545899632803, "percentage": 17.65, "elapsed_time": "0:16:25", "remaining_time": "1:16:40"}
|
| 37 |
+
{"current_steps": 37, "total_steps": 204, "loss": 0.3614, "lr": 9.558856644157432e-05, "epoch": 0.18115055079559364, "percentage": 18.14, "elapsed_time": "0:16:47", "remaining_time": "1:15:46"}
|
| 38 |
+
{"current_steps": 38, "total_steps": 204, "loss": 0.6474, "lr": 9.52482809256934e-05, "epoch": 0.18604651162790697, "percentage": 18.63, "elapsed_time": "0:17:08", "remaining_time": "1:14:51"}
|
| 39 |
+
{"current_steps": 39, "total_steps": 204, "loss": 0.4504, "lr": 9.489600655717217e-05, "epoch": 0.1909424724602203, "percentage": 19.12, "elapsed_time": "0:17:31", "remaining_time": "1:14:06"}
|
| 40 |
+
{"current_steps": 40, "total_steps": 204, "loss": 0.6336, "lr": 9.453183667360062e-05, "epoch": 0.19583843329253367, "percentage": 19.61, "elapsed_time": "0:17:50", "remaining_time": "1:13:10"}
|
| 41 |
+
{"current_steps": 41, "total_steps": 204, "loss": 0.5665, "lr": 9.415586776436973e-05, "epoch": 0.200734394124847, "percentage": 20.1, "elapsed_time": "0:18:15", "remaining_time": "1:12:36"}
|
| 42 |
+
{"current_steps": 42, "total_steps": 204, "loss": 0.3929, "lr": 9.376819944510598e-05, "epoch": 0.20563035495716034, "percentage": 20.59, "elapsed_time": "0:18:40", "remaining_time": "1:12:03"}
|
| 43 |
+
{"current_steps": 43, "total_steps": 204, "loss": 0.4195, "lr": 9.336893443127738e-05, "epoch": 0.21052631578947367, "percentage": 21.08, "elapsed_time": "0:19:01", "remaining_time": "1:11:12"}
|
| 44 |
+
{"current_steps": 44, "total_steps": 204, "loss": 0.4643, "lr": 9.295817851097837e-05, "epoch": 0.21542227662178703, "percentage": 21.57, "elapsed_time": "0:19:21", "remaining_time": "1:10:24"}
|
| 45 |
+
{"current_steps": 45, "total_steps": 204, "loss": 0.5375, "lr": 9.253604051690046e-05, "epoch": 0.22031823745410037, "percentage": 22.06, "elapsed_time": "0:19:46", "remaining_time": "1:09:53"}
|
| 46 |
+
{"current_steps": 46, "total_steps": 204, "loss": 0.3291, "lr": 9.210263229749626e-05, "epoch": 0.2252141982864137, "percentage": 22.55, "elapsed_time": "0:20:09", "remaining_time": "1:09:13"}
|
| 47 |
+
{"current_steps": 47, "total_steps": 204, "loss": 0.5543, "lr": 9.165806868734444e-05, "epoch": 0.23011015911872704, "percentage": 23.04, "elapsed_time": "0:20:41", "remaining_time": "1:09:05"}
|
| 48 |
+
{"current_steps": 48, "total_steps": 204, "loss": 0.7045, "lr": 9.120246747672347e-05, "epoch": 0.2350061199510404, "percentage": 23.53, "elapsed_time": "0:21:02", "remaining_time": "1:08:22"}
|
| 49 |
+
{"current_steps": 49, "total_steps": 204, "loss": 0.572, "lr": 9.073594938040231e-05, "epoch": 0.23990208078335373, "percentage": 24.02, "elapsed_time": "0:21:29", "remaining_time": "1:07:57"}
|
| 50 |
+
{"current_steps": 50, "total_steps": 204, "loss": 0.7102, "lr": 9.025863800565613e-05, "epoch": 0.24479804161566707, "percentage": 24.51, "elapsed_time": "0:21:56", "remaining_time": "1:07:34"}
|
| 51 |
+
{"current_steps": 51, "total_steps": 204, "loss": 0.5135, "lr": 8.977065981951566e-05, "epoch": 0.24969400244798043, "percentage": 25.0, "elapsed_time": "0:22:27", "remaining_time": "1:07:21"}
|
| 52 |
+
{"current_steps": 52, "total_steps": 204, "loss": 0.4911, "lr": 8.927214411525895e-05, "epoch": 0.25458996328029376, "percentage": 25.49, "elapsed_time": "0:22:51", "remaining_time": "1:06:47"}
|
| 53 |
+
{"current_steps": 53, "total_steps": 204, "loss": 0.6657, "lr": 8.876322297815405e-05, "epoch": 0.2594859241126071, "percentage": 25.98, "elapsed_time": "0:23:19", "remaining_time": "1:06:28"}
|
| 54 |
+
{"current_steps": 54, "total_steps": 204, "loss": 0.4495, "lr": 8.824403125046225e-05, "epoch": 0.26438188494492043, "percentage": 26.47, "elapsed_time": "0:23:52", "remaining_time": "1:06:18"}
|
| 55 |
+
{"current_steps": 55, "total_steps": 204, "loss": 0.3651, "lr": 8.771470649571056e-05, "epoch": 0.2692778457772338, "percentage": 26.96, "elapsed_time": "0:24:14", "remaining_time": "1:05:39"}
|
| 56 |
+
{"current_steps": 56, "total_steps": 204, "loss": 0.3277, "lr": 8.717538896224332e-05, "epoch": 0.2741738066095471, "percentage": 27.45, "elapsed_time": "0:24:42", "remaining_time": "1:05:19"}
|
| 57 |
+
{"current_steps": 57, "total_steps": 204, "loss": 0.361, "lr": 8.662622154606237e-05, "epoch": 0.27906976744186046, "percentage": 27.94, "elapsed_time": "0:25:07", "remaining_time": "1:04:49"}
|
| 58 |
+
{"current_steps": 58, "total_steps": 204, "loss": 0.4541, "lr": 8.606734975296578e-05, "epoch": 0.2839657282741738, "percentage": 28.43, "elapsed_time": "0:25:39", "remaining_time": "1:04:36"}
|
| 59 |
+
{"current_steps": 59, "total_steps": 204, "loss": 0.4133, "lr": 8.549892165999505e-05, "epoch": 0.28886168910648713, "percentage": 28.92, "elapsed_time": "0:26:09", "remaining_time": "1:04:16"}
|
| 60 |
+
{"current_steps": 60, "total_steps": 204, "loss": 0.3445, "lr": 8.492108787620105e-05, "epoch": 0.2937576499388005, "percentage": 29.41, "elapsed_time": "0:26:34", "remaining_time": "1:03:45"}
|
| 61 |
+
{"current_steps": 61, "total_steps": 204, "loss": 0.4518, "lr": 8.433400150273906e-05, "epoch": 0.29865361077111385, "percentage": 29.9, "elapsed_time": "0:27:00", "remaining_time": "1:03:19"}
|
| 62 |
+
{"current_steps": 62, "total_steps": 204, "loss": 0.308, "lr": 8.373781809230355e-05, "epoch": 0.30354957160342716, "percentage": 30.39, "elapsed_time": "0:27:26", "remaining_time": "1:02:51"}
|
| 63 |
+
{"current_steps": 63, "total_steps": 204, "loss": 0.7011, "lr": 8.313269560791342e-05, "epoch": 0.3084455324357405, "percentage": 30.88, "elapsed_time": "0:27:47", "remaining_time": "1:02:12"}
|
| 64 |
+
{"current_steps": 64, "total_steps": 204, "loss": 0.3193, "lr": 8.251879438105854e-05, "epoch": 0.31334149326805383, "percentage": 31.37, "elapsed_time": "0:28:14", "remaining_time": "1:01:47"}
|
| 65 |
+
{"current_steps": 65, "total_steps": 204, "loss": 0.3383, "lr": 8.189627706921877e-05, "epoch": 0.3182374541003672, "percentage": 31.86, "elapsed_time": "0:28:43", "remaining_time": "1:01:26"}
|
| 66 |
+
{"current_steps": 66, "total_steps": 204, "loss": 0.5653, "lr": 8.126530861276677e-05, "epoch": 0.32313341493268055, "percentage": 32.35, "elapsed_time": "0:29:12", "remaining_time": "1:01:05"}
|
| 67 |
+
{"current_steps": 67, "total_steps": 204, "loss": 0.3395, "lr": 8.062605619126584e-05, "epoch": 0.32802937576499386, "percentage": 32.84, "elapsed_time": "0:29:35", "remaining_time": "1:00:31"}
|
| 68 |
+
{"current_steps": 68, "total_steps": 204, "loss": 0.7753, "lr": 7.997868917917453e-05, "epoch": 0.3329253365973072, "percentage": 33.33, "elapsed_time": "0:30:07", "remaining_time": "1:00:14"}
|
| 69 |
+
{"current_steps": 69, "total_steps": 204, "loss": 0.3176, "lr": 7.932337910096961e-05, "epoch": 0.3378212974296206, "percentage": 33.82, "elapsed_time": "0:30:31", "remaining_time": "0:59:43"}
|
| 70 |
+
{"current_steps": 70, "total_steps": 204, "loss": 0.4603, "lr": 7.866029958569956e-05, "epoch": 0.3427172582619339, "percentage": 34.31, "elapsed_time": "0:30:54", "remaining_time": "0:59:10"}
|
| 71 |
+
{"current_steps": 71, "total_steps": 204, "loss": 0.5636, "lr": 7.798962632098024e-05, "epoch": 0.34761321909424725, "percentage": 34.8, "elapsed_time": "0:31:17", "remaining_time": "0:58:37"}
|
| 72 |
+
{"current_steps": 72, "total_steps": 204, "loss": 0.2692, "lr": 7.73115370064452e-05, "epoch": 0.3525091799265606, "percentage": 35.29, "elapsed_time": "0:31:43", "remaining_time": "0:58:09"}
|
| 73 |
+
{"current_steps": 73, "total_steps": 204, "loss": 0.6908, "lr": 7.6626211306663e-05, "epoch": 0.3574051407588739, "percentage": 35.78, "elapsed_time": "0:32:12", "remaining_time": "0:57:48"}
|
| 74 |
+
{"current_steps": 74, "total_steps": 204, "loss": 0.4046, "lr": 7.59338308035337e-05, "epoch": 0.3623011015911873, "percentage": 36.27, "elapsed_time": "0:32:34", "remaining_time": "0:57:12"}
|
| 75 |
+
{"current_steps": 75, "total_steps": 204, "loss": 0.816, "lr": 7.523457894817745e-05, "epoch": 0.3671970624235006, "percentage": 36.76, "elapsed_time": "0:33:03", "remaining_time": "0:56:50"}
|
| 76 |
+
{"current_steps": 76, "total_steps": 204, "loss": 0.334, "lr": 7.452864101232798e-05, "epoch": 0.37209302325581395, "percentage": 37.25, "elapsed_time": "0:33:29", "remaining_time": "0:56:24"}
|
| 77 |
+
{"current_steps": 77, "total_steps": 204, "loss": 0.4769, "lr": 7.381620403924333e-05, "epoch": 0.3769889840881273, "percentage": 37.75, "elapsed_time": "0:33:58", "remaining_time": "0:56:02"}
|
| 78 |
+
{"current_steps": 78, "total_steps": 204, "loss": 0.4149, "lr": 7.30974567941475e-05, "epoch": 0.3818849449204406, "percentage": 38.24, "elapsed_time": "0:34:22", "remaining_time": "0:55:31"}
|
| 79 |
+
{"current_steps": 79, "total_steps": 204, "loss": 0.9113, "lr": 7.237258971421587e-05, "epoch": 0.386780905752754, "percentage": 38.73, "elapsed_time": "0:34:54", "remaining_time": "0:55:14"}
|
| 80 |
+
{"current_steps": 80, "total_steps": 204, "loss": 0.4643, "lr": 7.164179485811727e-05, "epoch": 0.39167686658506734, "percentage": 39.22, "elapsed_time": "0:35:16", "remaining_time": "0:54:39"}
|
| 81 |
+
{"current_steps": 81, "total_steps": 204, "loss": 0.3902, "lr": 7.090526585512696e-05, "epoch": 0.39657282741738065, "percentage": 39.71, "elapsed_time": "0:35:38", "remaining_time": "0:54:07"}
|
| 82 |
+
{"current_steps": 82, "total_steps": 204, "loss": 0.5235, "lr": 7.016319785382296e-05, "epoch": 0.401468788249694, "percentage": 40.2, "elapsed_time": "0:36:07", "remaining_time": "0:53:44"}
|
| 83 |
+
{"current_steps": 83, "total_steps": 204, "loss": 0.4481, "lr": 6.941578747038023e-05, "epoch": 0.40636474908200737, "percentage": 40.69, "elapsed_time": "0:36:34", "remaining_time": "0:53:19"}
|
| 84 |
+
{"current_steps": 84, "total_steps": 204, "loss": 0.4189, "lr": 6.866323273647563e-05, "epoch": 0.4112607099143207, "percentage": 41.18, "elapsed_time": "0:36:59", "remaining_time": "0:52:51"}
|
| 85 |
+
{"current_steps": 85, "total_steps": 204, "loss": 0.4533, "lr": 6.79057330468182e-05, "epoch": 0.41615667074663404, "percentage": 41.67, "elapsed_time": "0:37:24", "remaining_time": "0:52:22"}
|
| 86 |
+
{"current_steps": 86, "total_steps": 204, "loss": 0.522, "lr": 6.7143489106318e-05, "epoch": 0.42105263157894735, "percentage": 42.16, "elapsed_time": "0:37:55", "remaining_time": "0:52:01"}
|
| 87 |
+
{"current_steps": 87, "total_steps": 204, "loss": 0.4772, "lr": 6.637670287690799e-05, "epoch": 0.4259485924112607, "percentage": 42.65, "elapsed_time": "0:38:22", "remaining_time": "0:51:36"}
|
| 88 |
+
{"current_steps": 88, "total_steps": 204, "loss": 0.5043, "lr": 6.560557752403277e-05, "epoch": 0.43084455324357407, "percentage": 43.14, "elapsed_time": "0:38:57", "remaining_time": "0:51:21"}
|
| 89 |
+
{"current_steps": 89, "total_steps": 204, "loss": 0.4375, "lr": 6.483031736281843e-05, "epoch": 0.4357405140758874, "percentage": 43.63, "elapsed_time": "0:39:25", "remaining_time": "0:50:57"}
|
| 90 |
+
{"current_steps": 90, "total_steps": 204, "loss": 0.3519, "lr": 6.40511278039378e-05, "epoch": 0.44063647490820074, "percentage": 44.12, "elapsed_time": "0:39:55", "remaining_time": "0:50:34"}
|
| 91 |
+
{"current_steps": 91, "total_steps": 204, "loss": 0.3407, "lr": 6.326821529918553e-05, "epoch": 0.4455324357405141, "percentage": 44.61, "elapsed_time": "0:40:21", "remaining_time": "0:50:06"}
|
| 92 |
+
{"current_steps": 92, "total_steps": 204, "loss": 0.3799, "lr": 6.248178728677711e-05, "epoch": 0.4504283965728274, "percentage": 45.1, "elapsed_time": "0:40:50", "remaining_time": "0:49:42"}
|
| 93 |
+
{"current_steps": 93, "total_steps": 204, "loss": 0.3063, "lr": 6.16920521363867e-05, "epoch": 0.45532435740514077, "percentage": 45.59, "elapsed_time": "0:41:07", "remaining_time": "0:49:04"}
|
| 94 |
+
{"current_steps": 94, "total_steps": 204, "loss": 0.4386, "lr": 6.089921909393812e-05, "epoch": 0.4602203182374541, "percentage": 46.08, "elapsed_time": "0:41:37", "remaining_time": "0:48:42"}
|
| 95 |
+
{"current_steps": 95, "total_steps": 204, "loss": 0.5919, "lr": 6.0103498226163603e-05, "epoch": 0.46511627906976744, "percentage": 46.57, "elapsed_time": "0:41:53", "remaining_time": "0:48:03"}
|
| 96 |
+
{"current_steps": 96, "total_steps": 204, "loss": 0.4636, "lr": 5.93051003649452e-05, "epoch": 0.4700122399020808, "percentage": 47.06, "elapsed_time": "0:42:31", "remaining_time": "0:47:50"}
|
| 97 |
+
{"current_steps": 97, "total_steps": 204, "loss": 0.4564, "lr": 5.850423705145334e-05, "epoch": 0.4749082007343941, "percentage": 47.55, "elapsed_time": "0:42:51", "remaining_time": "0:47:17"}
|
| 98 |
+
{"current_steps": 98, "total_steps": 204, "loss": 0.3652, "lr": 5.770112048009747e-05, "epoch": 0.47980416156670747, "percentage": 48.04, "elapsed_time": "0:43:17", "remaining_time": "0:46:49"}
|
| 99 |
+
{"current_steps": 99, "total_steps": 204, "loss": 0.5179, "lr": 5.68959634423037e-05, "epoch": 0.4847001223990208, "percentage": 48.53, "elapsed_time": "0:43:40", "remaining_time": "0:46:19"}
|
| 100 |
+
{"current_steps": 100, "total_steps": 204, "loss": 0.467, "lr": 5.60889792701342e-05, "epoch": 0.48959608323133413, "percentage": 49.02, "elapsed_time": "0:44:08", "remaining_time": "0:45:54"}
|
| 101 |
+
{"current_steps": 101, "total_steps": 204, "loss": 0.2341, "lr": 5.52803817797633e-05, "epoch": 0.4944920440636475, "percentage": 49.51, "elapsed_time": "0:44:38", "remaining_time": "0:45:31"}
|
| 102 |
+
{"current_steps": 102, "total_steps": 204, "loss": 0.5641, "lr": 5.4470385214825416e-05, "epoch": 0.49938800489596086, "percentage": 50.0, "elapsed_time": "0:45:10", "remaining_time": "0:45:10"}
|
| 103 |
+
{"current_steps": 103, "total_steps": 204, "loss": 0.438, "lr": 5.365920418964973e-05, "epoch": 0.5042839657282742, "percentage": 50.49, "elapsed_time": "0:45:38", "remaining_time": "0:44:44"}
|
| 104 |
+
{"current_steps": 104, "total_steps": 204, "loss": 0.4562, "lr": 5.28470536323965e-05, "epoch": 0.5091799265605875, "percentage": 50.98, "elapsed_time": "0:46:06", "remaining_time": "0:44:20"}
|
| 105 |
+
{"current_steps": 105, "total_steps": 204, "loss": 0.5587, "lr": 5.2034148728110424e-05, "epoch": 0.5140758873929009, "percentage": 51.47, "elapsed_time": "0:46:32", "remaining_time": "0:43:53"}
|
| 106 |
+
{"current_steps": 106, "total_steps": 204, "loss": 0.7554, "lr": 5.1220704861705774e-05, "epoch": 0.5189718482252142, "percentage": 51.96, "elapsed_time": "0:47:01", "remaining_time": "0:43:28"}
|
| 107 |
+
{"current_steps": 107, "total_steps": 204, "loss": 0.2544, "lr": 5.0406937560898646e-05, "epoch": 0.5238678090575275, "percentage": 52.45, "elapsed_time": "0:47:24", "remaining_time": "0:42:58"}
|
| 108 |
+
{"current_steps": 108, "total_steps": 204, "loss": 0.5423, "lr": 4.9593062439101365e-05, "epoch": 0.5287637698898409, "percentage": 52.94, "elapsed_time": "0:47:49", "remaining_time": "0:42:30"}
|
| 109 |
+
{"current_steps": 109, "total_steps": 204, "loss": 0.3577, "lr": 4.877929513829424e-05, "epoch": 0.5336597307221542, "percentage": 53.43, "elapsed_time": "0:48:12", "remaining_time": "0:42:00"}
|
| 110 |
+
{"current_steps": 110, "total_steps": 204, "loss": 0.454, "lr": 4.796585127188958e-05, "epoch": 0.5385556915544676, "percentage": 53.92, "elapsed_time": "0:48:40", "remaining_time": "0:41:36"}
|
| 111 |
+
{"current_steps": 111, "total_steps": 204, "loss": 0.4601, "lr": 4.715294636760352e-05, "epoch": 0.543451652386781, "percentage": 54.41, "elapsed_time": "0:49:05", "remaining_time": "0:41:08"}
|
| 112 |
+
{"current_steps": 112, "total_steps": 204, "loss": 0.2958, "lr": 4.634079581035029e-05, "epoch": 0.5483476132190942, "percentage": 54.9, "elapsed_time": "0:49:31", "remaining_time": "0:40:40"}
|
| 113 |
+
{"current_steps": 113, "total_steps": 204, "loss": 0.5048, "lr": 4.55296147851746e-05, "epoch": 0.5532435740514076, "percentage": 55.39, "elapsed_time": "0:50:01", "remaining_time": "0:40:17"}
|
| 114 |
+
{"current_steps": 114, "total_steps": 204, "loss": 0.5176, "lr": 4.471961822023671e-05, "epoch": 0.5581395348837209, "percentage": 55.88, "elapsed_time": "0:50:29", "remaining_time": "0:39:51"}
|
| 115 |
+
{"current_steps": 115, "total_steps": 204, "loss": 0.2968, "lr": 4.391102072986581e-05, "epoch": 0.5630354957160343, "percentage": 56.37, "elapsed_time": "0:50:48", "remaining_time": "0:39:18"}
|
| 116 |
+
{"current_steps": 116, "total_steps": 204, "loss": 0.6076, "lr": 4.3104036557696295e-05, "epoch": 0.5679314565483476, "percentage": 56.86, "elapsed_time": "0:51:20", "remaining_time": "0:38:56"}
|
| 117 |
+
{"current_steps": 117, "total_steps": 204, "loss": 0.5459, "lr": 4.229887951990255e-05, "epoch": 0.572827417380661, "percentage": 57.35, "elapsed_time": "0:51:49", "remaining_time": "0:38:32"}
|
| 118 |
+
{"current_steps": 118, "total_steps": 204, "loss": 0.44, "lr": 4.149576294854668e-05, "epoch": 0.5777233782129743, "percentage": 57.84, "elapsed_time": "0:52:21", "remaining_time": "0:38:09"}
|
| 119 |
+
{"current_steps": 119, "total_steps": 204, "loss": 0.6845, "lr": 4.069489963505482e-05, "epoch": 0.5826193390452876, "percentage": 58.33, "elapsed_time": "0:52:49", "remaining_time": "0:37:43"}
|
| 120 |
+
{"current_steps": 120, "total_steps": 204, "loss": 0.3054, "lr": 3.98965017738364e-05, "epoch": 0.587515299877601, "percentage": 58.82, "elapsed_time": "0:53:11", "remaining_time": "0:37:13"}
|
| 121 |
+
{"current_steps": 121, "total_steps": 204, "loss": 0.4176, "lr": 3.9100780906061896e-05, "epoch": 0.5924112607099143, "percentage": 59.31, "elapsed_time": "0:53:36", "remaining_time": "0:36:46"}
|
| 122 |
+
{"current_steps": 122, "total_steps": 204, "loss": 0.4256, "lr": 3.83079478636133e-05, "epoch": 0.5973072215422277, "percentage": 59.8, "elapsed_time": "0:54:01", "remaining_time": "0:36:18"}
|
| 123 |
+
{"current_steps": 123, "total_steps": 204, "loss": 0.5853, "lr": 3.7518212713222906e-05, "epoch": 0.602203182374541, "percentage": 60.29, "elapsed_time": "0:54:29", "remaining_time": "0:35:53"}
|
| 124 |
+
{"current_steps": 124, "total_steps": 204, "loss": 0.5636, "lr": 3.673178470081448e-05, "epoch": 0.6070991432068543, "percentage": 60.78, "elapsed_time": "0:54:54", "remaining_time": "0:35:25"}
|
| 125 |
+
{"current_steps": 125, "total_steps": 204, "loss": 0.6704, "lr": 3.594887219606221e-05, "epoch": 0.6119951040391677, "percentage": 61.27, "elapsed_time": "0:55:20", "remaining_time": "0:34:58"}
|
| 126 |
+
{"current_steps": 126, "total_steps": 204, "loss": 0.4881, "lr": 3.516968263718159e-05, "epoch": 0.616891064871481, "percentage": 61.76, "elapsed_time": "0:55:53", "remaining_time": "0:34:35"}
|
| 127 |
+
{"current_steps": 127, "total_steps": 204, "loss": 0.4918, "lr": 3.439442247596724e-05, "epoch": 0.6217870257037944, "percentage": 62.25, "elapsed_time": "0:56:21", "remaining_time": "0:34:10"}
|
| 128 |
+
{"current_steps": 128, "total_steps": 204, "loss": 0.5203, "lr": 3.3623297123092006e-05, "epoch": 0.6266829865361077, "percentage": 62.75, "elapsed_time": "0:56:46", "remaining_time": "0:33:42"}
|
| 129 |
+
{"current_steps": 129, "total_steps": 204, "loss": 0.7026, "lr": 3.285651089368202e-05, "epoch": 0.631578947368421, "percentage": 63.24, "elapsed_time": "0:57:15", "remaining_time": "0:33:17"}
|
| 130 |
+
{"current_steps": 130, "total_steps": 204, "loss": 0.4857, "lr": 3.209426695318182e-05, "epoch": 0.6364749082007344, "percentage": 63.73, "elapsed_time": "0:57:38", "remaining_time": "0:32:48"}
|
| 131 |
+
{"current_steps": 131, "total_steps": 204, "loss": 0.542, "lr": 3.133676726352438e-05, "epoch": 0.6413708690330477, "percentage": 64.22, "elapsed_time": "0:58:08", "remaining_time": "0:32:23"}
|
| 132 |
+
{"current_steps": 132, "total_steps": 204, "loss": 0.4807, "lr": 3.0584212529619775e-05, "epoch": 0.6462668298653611, "percentage": 64.71, "elapsed_time": "0:58:31", "remaining_time": "0:31:55"}
|
| 133 |
+
{"current_steps": 133, "total_steps": 204, "loss": 0.5123, "lr": 2.9836802146177034e-05, "epoch": 0.6511627906976745, "percentage": 65.2, "elapsed_time": "0:58:55", "remaining_time": "0:31:27"}
|
| 134 |
+
{"current_steps": 134, "total_steps": 204, "loss": 0.7619, "lr": 2.9094734144873036e-05, "epoch": 0.6560587515299877, "percentage": 65.69, "elapsed_time": "0:59:21", "remaining_time": "0:31:00"}
|
| 135 |
+
{"current_steps": 135, "total_steps": 204, "loss": 0.488, "lr": 2.835820514188273e-05, "epoch": 0.6609547123623011, "percentage": 66.18, "elapsed_time": "0:59:50", "remaining_time": "0:30:35"}
|
| 136 |
+
{"current_steps": 136, "total_steps": 204, "loss": 0.5913, "lr": 2.7627410285784163e-05, "epoch": 0.6658506731946144, "percentage": 66.67, "elapsed_time": "1:00:18", "remaining_time": "0:30:09"}
|
| 137 |
+
{"current_steps": 137, "total_steps": 204, "loss": 0.6774, "lr": 2.6902543205852492e-05, "epoch": 0.6707466340269278, "percentage": 67.16, "elapsed_time": "1:00:43", "remaining_time": "0:29:41"}
|
| 138 |
+
{"current_steps": 138, "total_steps": 204, "loss": 0.304, "lr": 2.618379596075668e-05, "epoch": 0.6756425948592412, "percentage": 67.65, "elapsed_time": "1:01:08", "remaining_time": "0:29:14"}
|
| 139 |
+
{"current_steps": 139, "total_steps": 204, "loss": 0.394, "lr": 2.5471358987672017e-05, "epoch": 0.6805385556915544, "percentage": 68.14, "elapsed_time": "1:01:35", "remaining_time": "0:28:48"}
|
| 140 |
+
{"current_steps": 140, "total_steps": 204, "loss": 0.4608, "lr": 2.476542105182254e-05, "epoch": 0.6854345165238678, "percentage": 68.63, "elapsed_time": "1:02:01", "remaining_time": "0:28:21"}
|
| 141 |
+
{"current_steps": 141, "total_steps": 204, "loss": 0.7695, "lr": 2.4066169196466326e-05, "epoch": 0.6903304773561811, "percentage": 69.12, "elapsed_time": "1:02:34", "remaining_time": "0:27:57"}
|
| 142 |
+
{"current_steps": 142, "total_steps": 204, "loss": 0.5197, "lr": 2.3373788693337024e-05, "epoch": 0.6952264381884945, "percentage": 69.61, "elapsed_time": "1:03:05", "remaining_time": "0:27:32"}
|
| 143 |
+
{"current_steps": 143, "total_steps": 204, "loss": 0.4842, "lr": 2.268846299355481e-05, "epoch": 0.7001223990208079, "percentage": 70.1, "elapsed_time": "1:03:28", "remaining_time": "0:27:04"}
|
| 144 |
+
{"current_steps": 144, "total_steps": 204, "loss": 0.4083, "lr": 2.2010373679019776e-05, "epoch": 0.7050183598531212, "percentage": 70.59, "elapsed_time": "1:04:00", "remaining_time": "0:26:40"}
|
| 145 |
+
{"current_steps": 145, "total_steps": 204, "loss": 0.6992, "lr": 2.133970041430044e-05, "epoch": 0.7099143206854345, "percentage": 71.08, "elapsed_time": "1:04:29", "remaining_time": "0:26:14"}
|
| 146 |
+
{"current_steps": 146, "total_steps": 204, "loss": 0.5621, "lr": 2.067662089903039e-05, "epoch": 0.7148102815177478, "percentage": 71.57, "elapsed_time": "1:04:56", "remaining_time": "0:25:47"}
|
| 147 |
+
{"current_steps": 147, "total_steps": 204, "loss": 0.4135, "lr": 2.002131082082549e-05, "epoch": 0.7197062423500612, "percentage": 72.06, "elapsed_time": "1:05:15", "remaining_time": "0:25:18"}
|
| 148 |
+
{"current_steps": 148, "total_steps": 204, "loss": 0.5933, "lr": 1.937394380873418e-05, "epoch": 0.7246022031823746, "percentage": 72.55, "elapsed_time": "1:05:37", "remaining_time": "0:24:49"}
|
| 149 |
+
{"current_steps": 149, "total_steps": 204, "loss": 0.3081, "lr": 1.873469138723325e-05, "epoch": 0.7294981640146879, "percentage": 73.04, "elapsed_time": "1:05:58", "remaining_time": "0:24:21"}
|
| 150 |
+
{"current_steps": 150, "total_steps": 204, "loss": 0.407, "lr": 1.8103722930781247e-05, "epoch": 0.7343941248470012, "percentage": 73.53, "elapsed_time": "1:06:27", "remaining_time": "0:23:55"}
|
| 151 |
+
{"current_steps": 151, "total_steps": 204, "loss": 0.7313, "lr": 1.748120561894147e-05, "epoch": 0.7392900856793145, "percentage": 74.02, "elapsed_time": "1:06:51", "remaining_time": "0:23:27"}
|
| 152 |
+
{"current_steps": 152, "total_steps": 204, "loss": 0.3871, "lr": 1.6867304392086575e-05, "epoch": 0.7441860465116279, "percentage": 74.51, "elapsed_time": "1:07:17", "remaining_time": "0:23:01"}
|
| 153 |
+
{"current_steps": 153, "total_steps": 204, "loss": 0.5026, "lr": 1.6262181907696454e-05, "epoch": 0.7490820073439413, "percentage": 75.0, "elapsed_time": "1:07:46", "remaining_time": "0:22:35"}
|
| 154 |
+
{"current_steps": 154, "total_steps": 204, "loss": 0.4817, "lr": 1.5665998497260958e-05, "epoch": 0.7539779681762546, "percentage": 75.49, "elapsed_time": "1:08:12", "remaining_time": "0:22:08"}
|
| 155 |
+
{"current_steps": 155, "total_steps": 204, "loss": 0.5186, "lr": 1.5078912123798961e-05, "epoch": 0.758873929008568, "percentage": 75.98, "elapsed_time": "1:08:41", "remaining_time": "0:21:42"}
|
| 156 |
+
{"current_steps": 156, "total_steps": 204, "loss": 0.4275, "lr": 1.4501078340004953e-05, "epoch": 0.7637698898408812, "percentage": 76.47, "elapsed_time": "1:09:10", "remaining_time": "0:21:17"}
|
| 157 |
+
{"current_steps": 157, "total_steps": 204, "loss": 0.626, "lr": 1.3932650247034218e-05, "epoch": 0.7686658506731946, "percentage": 76.96, "elapsed_time": "1:09:29", "remaining_time": "0:20:48"}
|
| 158 |
+
{"current_steps": 158, "total_steps": 204, "loss": 0.5193, "lr": 1.337377845393763e-05, "epoch": 0.773561811505508, "percentage": 77.45, "elapsed_time": "1:09:54", "remaining_time": "0:20:21"}
|
| 159 |
+
{"current_steps": 159, "total_steps": 204, "loss": 0.5848, "lr": 1.2824611037756684e-05, "epoch": 0.7784577723378213, "percentage": 77.94, "elapsed_time": "1:10:22", "remaining_time": "0:19:54"}
|
| 160 |
+
{"current_steps": 160, "total_steps": 204, "loss": 0.2627, "lr": 1.2285293504289447e-05, "epoch": 0.7833537331701347, "percentage": 78.43, "elapsed_time": "1:10:48", "remaining_time": "0:19:28"}
|
| 161 |
+
{"current_steps": 161, "total_steps": 204, "loss": 0.5949, "lr": 1.1755968749537754e-05, "epoch": 0.7882496940024479, "percentage": 78.92, "elapsed_time": "1:11:13", "remaining_time": "0:19:01"}
|
| 162 |
+
{"current_steps": 162, "total_steps": 204, "loss": 0.7397, "lr": 1.1236777021845956e-05, "epoch": 0.7931456548347613, "percentage": 79.41, "elapsed_time": "1:11:37", "remaining_time": "0:18:34"}
|
| 163 |
+
{"current_steps": 163, "total_steps": 204, "loss": 0.553, "lr": 1.0727855884741056e-05, "epoch": 0.7980416156670747, "percentage": 79.9, "elapsed_time": "1:12:08", "remaining_time": "0:18:08"}
|
| 164 |
+
{"current_steps": 164, "total_steps": 204, "loss": 0.3447, "lr": 1.022934018048432e-05, "epoch": 0.802937576499388, "percentage": 80.39, "elapsed_time": "1:12:38", "remaining_time": "0:17:43"}
|
| 165 |
+
{"current_steps": 165, "total_steps": 204, "loss": 0.7473, "lr": 9.741361994343867e-06, "epoch": 0.8078335373317014, "percentage": 80.88, "elapsed_time": "1:13:07", "remaining_time": "0:17:17"}
|
| 166 |
+
{"current_steps": 166, "total_steps": 204, "loss": 0.5008, "lr": 9.264050619597697e-06, "epoch": 0.8127294981640147, "percentage": 81.37, "elapsed_time": "1:13:32", "remaining_time": "0:16:50"}
|
| 167 |
+
{"current_steps": 167, "total_steps": 204, "loss": 0.4171, "lr": 8.797532523276542e-06, "epoch": 0.817625458996328, "percentage": 81.86, "elapsed_time": "1:13:57", "remaining_time": "0:16:23"}
|
| 168 |
+
{"current_steps": 168, "total_steps": 204, "loss": 0.7633, "lr": 8.341931312655582e-06, "epoch": 0.8225214198286414, "percentage": 82.35, "elapsed_time": "1:14:27", "remaining_time": "0:15:57"}
|
| 169 |
+
{"current_steps": 169, "total_steps": 204, "loss": 0.4054, "lr": 7.897367702503756e-06, "epoch": 0.8274173806609547, "percentage": 82.84, "elapsed_time": "1:14:49", "remaining_time": "0:15:29"}
|
| 170 |
+
{"current_steps": 170, "total_steps": 204, "loss": 0.4642, "lr": 7.463959483099547e-06, "epoch": 0.8323133414932681, "percentage": 83.33, "elapsed_time": "1:15:15", "remaining_time": "0:15:03"}
|
| 171 |
+
{"current_steps": 171, "total_steps": 204, "loss": 0.3769, "lr": 7.041821489021639e-06, "epoch": 0.8372093023255814, "percentage": 83.82, "elapsed_time": "1:15:34", "remaining_time": "0:14:35"}
|
| 172 |
+
{"current_steps": 172, "total_steps": 204, "loss": 0.4487, "lr": 6.631065568722633e-06, "epoch": 0.8421052631578947, "percentage": 84.31, "elapsed_time": "1:15:56", "remaining_time": "0:14:07"}
|
| 173 |
+
{"current_steps": 173, "total_steps": 204, "loss": 0.2915, "lr": 6.231800554894029e-06, "epoch": 0.847001223990208, "percentage": 84.8, "elapsed_time": "1:16:20", "remaining_time": "0:13:40"}
|
| 174 |
+
{"current_steps": 174, "total_steps": 204, "loss": 0.5065, "lr": 5.844132235630273e-06, "epoch": 0.8518971848225214, "percentage": 85.29, "elapsed_time": "1:16:49", "remaining_time": "0:13:14"}
|
| 175 |
+
{"current_steps": 175, "total_steps": 204, "loss": 0.4307, "lr": 5.468163326399389e-06, "epoch": 0.8567931456548348, "percentage": 85.78, "elapsed_time": "1:17:15", "remaining_time": "0:12:48"}
|
| 176 |
+
{"current_steps": 176, "total_steps": 204, "loss": 0.6356, "lr": 5.103993442827831e-06, "epoch": 0.8616891064871481, "percentage": 86.27, "elapsed_time": "1:17:43", "remaining_time": "0:12:21"}
|
| 177 |
+
{"current_steps": 177, "total_steps": 204, "loss": 0.5426, "lr": 4.751719074306604e-06, "epoch": 0.8665850673194615, "percentage": 86.76, "elapsed_time": "1:18:05", "remaining_time": "0:11:54"}
|
| 178 |
+
{"current_steps": 178, "total_steps": 204, "loss": 0.4278, "lr": 4.411433558425698e-06, "epoch": 0.8714810281517748, "percentage": 87.25, "elapsed_time": "1:18:32", "remaining_time": "0:11:28"}
|
| 179 |
+
{"current_steps": 179, "total_steps": 204, "loss": 0.2847, "lr": 4.083227056243644e-06, "epoch": 0.8763769889840881, "percentage": 87.75, "elapsed_time": "1:19:04", "remaining_time": "0:11:02"}
|
| 180 |
+
{"current_steps": 180, "total_steps": 204, "loss": 0.3923, "lr": 3.767186528398725e-06, "epoch": 0.8812729498164015, "percentage": 88.24, "elapsed_time": "1:19:31", "remaining_time": "0:10:36"}
|
| 181 |
+
{"current_steps": 181, "total_steps": 204, "loss": 0.5155, "lr": 3.4633957120681293e-06, "epoch": 0.8861689106487148, "percentage": 88.73, "elapsed_time": "1:19:54", "remaining_time": "0:10:09"}
|
| 182 |
+
{"current_steps": 182, "total_steps": 204, "loss": 0.5456, "lr": 3.1719350987811534e-06, "epoch": 0.8910648714810282, "percentage": 89.22, "elapsed_time": "1:20:26", "remaining_time": "0:09:43"}
|
| 183 |
+
{"current_steps": 183, "total_steps": 204, "loss": 0.5679, "lr": 2.8928819130924657e-06, "epoch": 0.8959608323133414, "percentage": 89.71, "elapsed_time": "1:20:54", "remaining_time": "0:09:17"}
|
| 184 |
+
{"current_steps": 184, "total_steps": 204, "loss": 0.4841, "lr": 2.6263100921208482e-06, "epoch": 0.9008567931456548, "percentage": 90.2, "elapsed_time": "1:21:19", "remaining_time": "0:08:50"}
|
| 185 |
+
{"current_steps": 185, "total_steps": 204, "loss": 0.6307, "lr": 2.372290265959065e-06, "epoch": 0.9057527539779682, "percentage": 90.69, "elapsed_time": "1:21:49", "remaining_time": "0:08:24"}
|
| 186 |
+
{"current_steps": 186, "total_steps": 204, "loss": 0.489, "lr": 2.130889738959946e-06, "epoch": 0.9106487148102815, "percentage": 91.18, "elapsed_time": "1:22:23", "remaining_time": "0:07:58"}
|
| 187 |
+
{"current_steps": 187, "total_steps": 204, "loss": 0.3811, "lr": 1.9021724719035628e-06, "epoch": 0.9155446756425949, "percentage": 91.67, "elapsed_time": "1:22:50", "remaining_time": "0:07:31"}
|
| 188 |
+
{"current_steps": 188, "total_steps": 204, "loss": 0.3263, "lr": 1.6861990650504255e-06, "epoch": 0.9204406364749081, "percentage": 92.16, "elapsed_time": "1:23:13", "remaining_time": "0:07:04"}
|
| 189 |
+
{"current_steps": 189, "total_steps": 204, "loss": 0.5176, "lr": 1.4830267420849585e-06, "epoch": 0.9253365973072215, "percentage": 92.65, "elapsed_time": "1:23:39", "remaining_time": "0:06:38"}
|
| 190 |
+
{"current_steps": 190, "total_steps": 204, "loss": 0.5375, "lr": 1.292709334953729e-06, "epoch": 0.9302325581395349, "percentage": 93.14, "elapsed_time": "1:24:03", "remaining_time": "0:06:11"}
|
| 191 |
+
{"current_steps": 191, "total_steps": 204, "loss": 0.4505, "lr": 1.1152972696022445e-06, "epoch": 0.9351285189718482, "percentage": 93.63, "elapsed_time": "1:24:34", "remaining_time": "0:05:45"}
|
| 192 |
+
{"current_steps": 192, "total_steps": 204, "loss": 0.4773, "lr": 9.508375526142976e-07, "epoch": 0.9400244798041616, "percentage": 94.12, "elapsed_time": "1:25:03", "remaining_time": "0:05:18"}
|
| 193 |
+
{"current_steps": 193, "total_steps": 204, "loss": 0.4163, "lr": 7.993737587571826e-07, "epoch": 0.944920440636475, "percentage": 94.61, "elapsed_time": "1:25:35", "remaining_time": "0:04:52"}
|
| 194 |
+
{"current_steps": 194, "total_steps": 204, "loss": 0.4597, "lr": 6.609460194362927e-07, "epoch": 0.9498164014687882, "percentage": 95.1, "elapsed_time": "1:26:03", "remaining_time": "0:04:26"}
|
| 195 |
+
{"current_steps": 195, "total_steps": 204, "loss": 0.5937, "lr": 5.355910120620034e-07, "epoch": 0.9547123623011016, "percentage": 95.59, "elapsed_time": "1:26:26", "remaining_time": "0:03:59"}
|
| 196 |
+
{"current_steps": 196, "total_steps": 204, "loss": 0.6014, "lr": 4.233419503317182e-07, "epoch": 0.9596083231334149, "percentage": 96.08, "elapsed_time": "1:26:51", "remaining_time": "0:03:32"}
|
| 197 |
+
{"current_steps": 197, "total_steps": 204, "loss": 0.4437, "lr": 3.242285754296859e-07, "epoch": 0.9645042839657283, "percentage": 96.57, "elapsed_time": "1:27:13", "remaining_time": "0:03:05"}
|
| 198 |
+
{"current_steps": 198, "total_steps": 204, "loss": 0.2822, "lr": 2.3827714814686486e-07, "epoch": 0.9694002447980417, "percentage": 97.06, "elapsed_time": "1:27:34", "remaining_time": "0:02:39"}
|
| 199 |
+
{"current_steps": 199, "total_steps": 204, "loss": 0.3, "lr": 1.655104419229281e-07, "epoch": 0.9742962056303549, "percentage": 97.55, "elapsed_time": "1:27:55", "remaining_time": "0:02:12"}
|
| 200 |
+
{"current_steps": 200, "total_steps": 204, "loss": 0.3109, "lr": 1.059477368122841e-07, "epoch": 0.9791921664626683, "percentage": 98.04, "elapsed_time": "1:28:21", "remaining_time": "0:01:46"}
|
| 201 |
+
{"current_steps": 201, "total_steps": 204, "loss": 0.8621, "lr": 5.960481437568555e-08, "epoch": 0.9840881272949816, "percentage": 98.53, "elapsed_time": "1:28:51", "remaining_time": "0:01:19"}
|
| 202 |
+
{"current_steps": 202, "total_steps": 204, "loss": 0.5233, "lr": 2.649395349879069e-08, "epoch": 0.988984088127295, "percentage": 99.02, "elapsed_time": "1:29:16", "remaining_time": "0:00:53"}
|
| 203 |
+
{"current_steps": 203, "total_steps": 204, "loss": 0.59, "lr": 6.623927138804664e-09, "epoch": 0.9938800489596084, "percentage": 99.51, "elapsed_time": "1:29:50", "remaining_time": "0:00:26"}
|
| 204 |
+
{"current_steps": 204, "total_steps": 204, "loss": 0.4031, "lr": 0.0, "epoch": 0.9987760097919217, "percentage": 100.0, "elapsed_time": "1:30:15", "remaining_time": "0:00:00"}
|
| 205 |
+
{"current_steps": 204, "total_steps": 204, "epoch": 0.9987760097919217, "percentage": 100.0, "elapsed_time": "1:30:16", "remaining_time": "0:00:00"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
adapter/trainer_state.json
CHANGED
|
@@ -3,1930 +3,1454 @@
|
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
"epoch": 0.9987760097919217,
|
| 5 |
"eval_steps": 500,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
| 10 |
"log_history": [
|
| 11 |
{
|
| 12 |
-
"epoch": 0.
|
| 13 |
-
"grad_norm": 0.
|
| 14 |
-
"learning_rate":
|
| 15 |
-
"loss": 0.
|
| 16 |
"step": 1
|
| 17 |
},
|
| 18 |
{
|
| 19 |
-
"epoch": 0.
|
| 20 |
-
"grad_norm": 0.
|
| 21 |
-
"learning_rate":
|
| 22 |
-
"loss": 0.
|
| 23 |
"step": 2
|
| 24 |
},
|
| 25 |
{
|
| 26 |
-
"epoch": 0.
|
| 27 |
-
"grad_norm": 0.
|
| 28 |
-
"learning_rate":
|
| 29 |
-
"loss": 0.
|
| 30 |
"step": 3
|
| 31 |
},
|
| 32 |
{
|
| 33 |
-
"epoch": 0.
|
| 34 |
-
"grad_norm": 0.
|
| 35 |
-
"learning_rate":
|
| 36 |
-
"loss": 0.
|
| 37 |
"step": 4
|
| 38 |
},
|
| 39 |
{
|
| 40 |
-
"epoch": 0.
|
| 41 |
-
"grad_norm": 0.
|
| 42 |
-
"learning_rate":
|
| 43 |
-
"loss": 0.
|
| 44 |
"step": 5
|
| 45 |
},
|
| 46 |
{
|
| 47 |
-
"epoch": 0.
|
| 48 |
-
"grad_norm": 0.
|
| 49 |
-
"learning_rate":
|
| 50 |
-
"loss": 0.
|
| 51 |
"step": 6
|
| 52 |
},
|
| 53 |
{
|
| 54 |
-
"epoch": 0.
|
| 55 |
-
"grad_norm": 0.
|
| 56 |
-
"learning_rate":
|
| 57 |
-
"loss": 0.
|
| 58 |
"step": 7
|
| 59 |
},
|
| 60 |
{
|
| 61 |
-
"epoch": 0.
|
| 62 |
-
"grad_norm": 0.
|
| 63 |
-
"learning_rate":
|
| 64 |
-
"loss": 0.
|
| 65 |
"step": 8
|
| 66 |
},
|
| 67 |
{
|
| 68 |
-
"epoch": 0.
|
| 69 |
-
"grad_norm": 0.
|
| 70 |
-
"learning_rate":
|
| 71 |
-
"loss": 0.
|
| 72 |
"step": 9
|
| 73 |
},
|
| 74 |
{
|
| 75 |
-
"epoch": 0.
|
| 76 |
-
"grad_norm": 0.
|
| 77 |
-
"learning_rate":
|
| 78 |
-
"loss": 0.
|
| 79 |
"step": 10
|
| 80 |
},
|
| 81 |
{
|
| 82 |
-
"epoch": 0.
|
| 83 |
-
"grad_norm": 0.
|
| 84 |
-
"learning_rate":
|
| 85 |
-
"loss": 0.
|
| 86 |
"step": 11
|
| 87 |
},
|
| 88 |
{
|
| 89 |
-
"epoch": 0.
|
| 90 |
-
"grad_norm": 0.
|
| 91 |
-
"learning_rate":
|
| 92 |
-
"loss": 0.
|
| 93 |
"step": 12
|
| 94 |
},
|
| 95 |
{
|
| 96 |
-
"epoch": 0.
|
| 97 |
-
"grad_norm": 0.
|
| 98 |
-
"learning_rate":
|
| 99 |
-
"loss": 0.
|
| 100 |
"step": 13
|
| 101 |
},
|
| 102 |
{
|
| 103 |
-
"epoch": 0.
|
| 104 |
-
"grad_norm": 0.
|
| 105 |
-
"learning_rate":
|
| 106 |
-
"loss": 0.
|
| 107 |
"step": 14
|
| 108 |
},
|
| 109 |
{
|
| 110 |
-
"epoch": 0.
|
| 111 |
-
"grad_norm": 0.
|
| 112 |
-
"learning_rate":
|
| 113 |
-
"loss": 0.
|
| 114 |
"step": 15
|
| 115 |
},
|
| 116 |
{
|
| 117 |
-
"epoch": 0.
|
| 118 |
-
"grad_norm": 0.
|
| 119 |
-
"learning_rate":
|
| 120 |
-
"loss": 0.
|
| 121 |
"step": 16
|
| 122 |
},
|
| 123 |
{
|
| 124 |
-
"epoch": 0.
|
| 125 |
-
"grad_norm": 0.
|
| 126 |
-
"learning_rate":
|
| 127 |
-
"loss": 0.
|
| 128 |
"step": 17
|
| 129 |
},
|
| 130 |
{
|
| 131 |
-
"epoch": 0.
|
| 132 |
-
"grad_norm": 0.
|
| 133 |
-
"learning_rate":
|
| 134 |
-
"loss": 0.
|
| 135 |
"step": 18
|
| 136 |
},
|
| 137 |
{
|
| 138 |
-
"epoch": 0.
|
| 139 |
-
"grad_norm": 0.
|
| 140 |
-
"learning_rate":
|
| 141 |
-
"loss": 0.
|
| 142 |
"step": 19
|
| 143 |
},
|
| 144 |
{
|
| 145 |
-
"epoch": 0.
|
| 146 |
-
"grad_norm": 0.
|
| 147 |
-
"learning_rate":
|
| 148 |
-
"loss": 0.
|
| 149 |
"step": 20
|
| 150 |
},
|
| 151 |
{
|
| 152 |
-
"epoch": 0.
|
| 153 |
-
"grad_norm": 0.
|
| 154 |
-
"learning_rate":
|
| 155 |
"loss": 0.4959,
|
| 156 |
"step": 21
|
| 157 |
},
|
| 158 |
{
|
| 159 |
-
"epoch": 0.
|
| 160 |
-
"grad_norm": 0.
|
| 161 |
-
"learning_rate":
|
| 162 |
-
"loss": 0.
|
| 163 |
"step": 22
|
| 164 |
},
|
| 165 |
{
|
| 166 |
-
"epoch": 0.
|
| 167 |
-
"grad_norm": 0.
|
| 168 |
-
"learning_rate":
|
| 169 |
-
"loss": 0.
|
| 170 |
"step": 23
|
| 171 |
},
|
| 172 |
{
|
| 173 |
-
"epoch": 0.
|
| 174 |
-
"grad_norm": 0.
|
| 175 |
-
"learning_rate":
|
| 176 |
-
"loss": 0.
|
| 177 |
"step": 24
|
| 178 |
},
|
| 179 |
{
|
| 180 |
-
"epoch": 0.
|
| 181 |
-
"grad_norm": 0.
|
| 182 |
-
"learning_rate":
|
| 183 |
-
"loss": 0.
|
| 184 |
"step": 25
|
| 185 |
},
|
| 186 |
{
|
| 187 |
-
"epoch": 0.
|
| 188 |
-
"grad_norm": 0.
|
| 189 |
-
"learning_rate": 9.
|
| 190 |
-
"loss": 0.
|
| 191 |
"step": 26
|
| 192 |
},
|
| 193 |
{
|
| 194 |
-
"epoch": 0.
|
| 195 |
-
"grad_norm": 0.
|
| 196 |
-
"learning_rate": 9.
|
| 197 |
-
"loss": 0.
|
| 198 |
"step": 27
|
| 199 |
},
|
| 200 |
{
|
| 201 |
-
"epoch": 0.
|
| 202 |
-
"grad_norm": 0.
|
| 203 |
-
"learning_rate":
|
| 204 |
-
"loss": 0.
|
| 205 |
"step": 28
|
| 206 |
},
|
| 207 |
{
|
| 208 |
-
"epoch": 0.
|
| 209 |
-
"grad_norm": 0.
|
| 210 |
-
"learning_rate": 9.
|
| 211 |
-
"loss": 0.
|
| 212 |
"step": 29
|
| 213 |
},
|
| 214 |
{
|
| 215 |
-
"epoch": 0.
|
| 216 |
-
"grad_norm": 0.
|
| 217 |
-
"learning_rate": 9.
|
| 218 |
-
"loss":
|
| 219 |
"step": 30
|
| 220 |
},
|
| 221 |
{
|
| 222 |
-
"epoch": 0.
|
| 223 |
-
"grad_norm": 0.
|
| 224 |
-
"learning_rate": 9.
|
| 225 |
-
"loss": 0.
|
| 226 |
"step": 31
|
| 227 |
},
|
| 228 |
{
|
| 229 |
-
"epoch": 0.
|
| 230 |
-
"grad_norm": 0.
|
| 231 |
-
"learning_rate": 9.
|
| 232 |
-
"loss": 0.
|
| 233 |
"step": 32
|
| 234 |
},
|
| 235 |
{
|
| 236 |
-
"epoch": 0.
|
| 237 |
-
"grad_norm": 0.
|
| 238 |
-
"learning_rate": 9.
|
| 239 |
-
"loss": 0.
|
| 240 |
"step": 33
|
| 241 |
},
|
| 242 |
{
|
| 243 |
-
"epoch": 0.
|
| 244 |
-
"grad_norm": 0.
|
| 245 |
-
"learning_rate": 9.
|
| 246 |
-
"loss": 0.
|
| 247 |
"step": 34
|
| 248 |
},
|
| 249 |
{
|
| 250 |
-
"epoch": 0.
|
| 251 |
-
"grad_norm": 0.
|
| 252 |
-
"learning_rate": 9.
|
| 253 |
-
"loss": 0.
|
| 254 |
"step": 35
|
| 255 |
},
|
| 256 |
{
|
| 257 |
-
"epoch": 0.
|
| 258 |
-
"grad_norm": 0.
|
| 259 |
-
"learning_rate": 9.
|
| 260 |
-
"loss": 0.
|
| 261 |
"step": 36
|
| 262 |
},
|
| 263 |
{
|
| 264 |
-
"epoch": 0.
|
| 265 |
-
"grad_norm": 0.
|
| 266 |
-
"learning_rate": 9.
|
| 267 |
-
"loss": 0.
|
| 268 |
"step": 37
|
| 269 |
},
|
| 270 |
{
|
| 271 |
-
"epoch": 0.
|
| 272 |
-
"grad_norm": 0.
|
| 273 |
-
"learning_rate": 9.
|
| 274 |
-
"loss": 0.
|
| 275 |
"step": 38
|
| 276 |
},
|
| 277 |
{
|
| 278 |
-
"epoch": 0.
|
| 279 |
-
"grad_norm": 0.
|
| 280 |
-
"learning_rate": 9.
|
| 281 |
-
"loss": 0.
|
| 282 |
"step": 39
|
| 283 |
},
|
| 284 |
{
|
| 285 |
-
"epoch": 0.
|
| 286 |
-
"grad_norm": 0.
|
| 287 |
-
"learning_rate": 9.
|
| 288 |
-
"loss": 0.
|
| 289 |
"step": 40
|
| 290 |
},
|
| 291 |
{
|
| 292 |
-
"epoch": 0.
|
| 293 |
-
"grad_norm": 0.
|
| 294 |
-
"learning_rate": 9.
|
| 295 |
-
"loss": 0.
|
| 296 |
"step": 41
|
| 297 |
},
|
| 298 |
{
|
| 299 |
-
"epoch": 0.
|
| 300 |
-
"grad_norm": 0.
|
| 301 |
-
"learning_rate": 9.
|
| 302 |
-
"loss": 0.
|
| 303 |
"step": 42
|
| 304 |
},
|
| 305 |
{
|
| 306 |
-
"epoch": 0.
|
| 307 |
-
"grad_norm": 0.
|
| 308 |
-
"learning_rate": 9.
|
| 309 |
-
"loss": 0.
|
| 310 |
"step": 43
|
| 311 |
},
|
| 312 |
{
|
| 313 |
-
"epoch": 0.
|
| 314 |
-
"grad_norm": 0.
|
| 315 |
-
"learning_rate": 9.
|
| 316 |
-
"loss": 0.
|
| 317 |
"step": 44
|
| 318 |
},
|
| 319 |
{
|
| 320 |
-
"epoch": 0.
|
| 321 |
-
"grad_norm": 0.
|
| 322 |
-
"learning_rate": 9.
|
| 323 |
-
"loss": 0.
|
| 324 |
"step": 45
|
| 325 |
},
|
| 326 |
{
|
| 327 |
-
"epoch": 0.
|
| 328 |
-
"grad_norm": 0.
|
| 329 |
-
"learning_rate": 9.
|
| 330 |
-
"loss": 0.
|
| 331 |
"step": 46
|
| 332 |
},
|
| 333 |
{
|
| 334 |
-
"epoch": 0.
|
| 335 |
-
"grad_norm": 0.
|
| 336 |
-
"learning_rate": 9.
|
| 337 |
-
"loss": 0.
|
| 338 |
"step": 47
|
| 339 |
},
|
| 340 |
{
|
| 341 |
-
"epoch": 0.
|
| 342 |
-
"grad_norm": 0.
|
| 343 |
-
"learning_rate": 9.
|
| 344 |
-
"loss": 0.
|
| 345 |
"step": 48
|
| 346 |
},
|
| 347 |
{
|
| 348 |
-
"epoch": 0.
|
| 349 |
-
"grad_norm": 0.
|
| 350 |
-
"learning_rate": 9.
|
| 351 |
-
"loss": 0.
|
| 352 |
"step": 49
|
| 353 |
},
|
| 354 |
{
|
| 355 |
-
"epoch": 0.
|
| 356 |
-
"grad_norm": 0.
|
| 357 |
-
"learning_rate": 9.
|
| 358 |
-
"loss": 0.
|
| 359 |
"step": 50
|
| 360 |
},
|
| 361 |
{
|
| 362 |
-
"epoch": 0.
|
| 363 |
-
"grad_norm": 0.
|
| 364 |
-
"learning_rate":
|
| 365 |
-
"loss": 0.
|
| 366 |
"step": 51
|
| 367 |
},
|
| 368 |
{
|
| 369 |
-
"epoch": 0.
|
| 370 |
-
"grad_norm": 0.
|
| 371 |
-
"learning_rate":
|
| 372 |
-
"loss": 0.
|
| 373 |
"step": 52
|
| 374 |
},
|
| 375 |
{
|
| 376 |
-
"epoch": 0.
|
| 377 |
-
"grad_norm": 0.
|
| 378 |
-
"learning_rate":
|
| 379 |
-
"loss": 0.
|
| 380 |
"step": 53
|
| 381 |
},
|
| 382 |
{
|
| 383 |
-
"epoch": 0.
|
| 384 |
-
"grad_norm": 0.
|
| 385 |
-
"learning_rate":
|
| 386 |
-
"loss": 0.
|
| 387 |
"step": 54
|
| 388 |
},
|
| 389 |
{
|
| 390 |
-
"epoch": 0.
|
| 391 |
-
"grad_norm": 0.
|
| 392 |
-
"learning_rate":
|
| 393 |
-
"loss": 0.
|
| 394 |
"step": 55
|
| 395 |
},
|
| 396 |
{
|
| 397 |
-
"epoch": 0.
|
| 398 |
-
"grad_norm": 0.
|
| 399 |
-
"learning_rate":
|
| 400 |
-
"loss": 0.
|
| 401 |
"step": 56
|
| 402 |
},
|
| 403 |
{
|
| 404 |
-
"epoch": 0.
|
| 405 |
-
"grad_norm": 0.
|
| 406 |
-
"learning_rate":
|
| 407 |
-
"loss": 0.
|
| 408 |
"step": 57
|
| 409 |
},
|
| 410 |
{
|
| 411 |
-
"epoch": 0.
|
| 412 |
-
"grad_norm": 0.
|
| 413 |
-
"learning_rate":
|
| 414 |
-
"loss": 0.
|
| 415 |
"step": 58
|
| 416 |
},
|
| 417 |
{
|
| 418 |
-
"epoch": 0.
|
| 419 |
-
"grad_norm": 0.
|
| 420 |
-
"learning_rate":
|
| 421 |
-
"loss": 0.
|
| 422 |
"step": 59
|
| 423 |
},
|
| 424 |
{
|
| 425 |
-
"epoch": 0.
|
| 426 |
-
"grad_norm": 0.
|
| 427 |
-
"learning_rate":
|
| 428 |
-
"loss": 0.
|
| 429 |
"step": 60
|
| 430 |
},
|
| 431 |
{
|
| 432 |
-
"epoch": 0.
|
| 433 |
-
"grad_norm": 0.
|
| 434 |
-
"learning_rate":
|
| 435 |
-
"loss": 0.
|
| 436 |
"step": 61
|
| 437 |
},
|
| 438 |
{
|
| 439 |
-
"epoch": 0.
|
| 440 |
-
"grad_norm": 0.
|
| 441 |
-
"learning_rate":
|
| 442 |
-
"loss": 0.
|
| 443 |
"step": 62
|
| 444 |
},
|
| 445 |
{
|
| 446 |
-
"epoch": 0.
|
| 447 |
-
"grad_norm": 0.
|
| 448 |
-
"learning_rate":
|
| 449 |
-
"loss": 0.
|
| 450 |
"step": 63
|
| 451 |
},
|
| 452 |
{
|
| 453 |
-
"epoch": 0.
|
| 454 |
-
"grad_norm": 0.
|
| 455 |
-
"learning_rate":
|
| 456 |
-
"loss": 0.
|
| 457 |
"step": 64
|
| 458 |
},
|
| 459 |
{
|
| 460 |
-
"epoch": 0.
|
| 461 |
-
"grad_norm": 0.
|
| 462 |
-
"learning_rate":
|
| 463 |
-
"loss": 0.
|
| 464 |
"step": 65
|
| 465 |
},
|
| 466 |
{
|
| 467 |
-
"epoch": 0.
|
| 468 |
-
"grad_norm": 0.
|
| 469 |
-
"learning_rate":
|
| 470 |
-
"loss": 0.
|
| 471 |
"step": 66
|
| 472 |
},
|
| 473 |
{
|
| 474 |
-
"epoch": 0.
|
| 475 |
-
"grad_norm": 0.
|
| 476 |
-
"learning_rate":
|
| 477 |
-
"loss": 0.
|
| 478 |
"step": 67
|
| 479 |
},
|
| 480 |
{
|
| 481 |
-
"epoch": 0.
|
| 482 |
-
"grad_norm": 0.
|
| 483 |
-
"learning_rate":
|
| 484 |
-
"loss": 0.
|
| 485 |
"step": 68
|
| 486 |
},
|
| 487 |
{
|
| 488 |
-
"epoch": 0.
|
| 489 |
-
"grad_norm": 0.
|
| 490 |
-
"learning_rate":
|
| 491 |
-
"loss": 0.
|
| 492 |
"step": 69
|
| 493 |
},
|
| 494 |
{
|
| 495 |
-
"epoch": 0.
|
| 496 |
-
"grad_norm": 0.
|
| 497 |
-
"learning_rate":
|
| 498 |
-
"loss": 0.
|
| 499 |
"step": 70
|
| 500 |
},
|
| 501 |
{
|
| 502 |
-
"epoch": 0.
|
| 503 |
-
"grad_norm": 0.
|
| 504 |
-
"learning_rate":
|
| 505 |
-
"loss": 0.
|
| 506 |
"step": 71
|
| 507 |
},
|
| 508 |
{
|
| 509 |
-
"epoch": 0.
|
| 510 |
-
"grad_norm": 0.
|
| 511 |
-
"learning_rate":
|
| 512 |
-
"loss": 0.
|
| 513 |
"step": 72
|
| 514 |
},
|
| 515 |
{
|
| 516 |
-
"epoch": 0.
|
| 517 |
-
"grad_norm": 0.
|
| 518 |
-
"learning_rate":
|
| 519 |
-
"loss": 0.
|
| 520 |
"step": 73
|
| 521 |
},
|
| 522 |
{
|
| 523 |
-
"epoch": 0.
|
| 524 |
-
"grad_norm": 0.
|
| 525 |
-
"learning_rate":
|
| 526 |
-
"loss": 0.
|
| 527 |
"step": 74
|
| 528 |
},
|
| 529 |
{
|
| 530 |
-
"epoch": 0.
|
| 531 |
-
"grad_norm": 0.
|
| 532 |
-
"learning_rate":
|
| 533 |
-
"loss": 0.
|
| 534 |
"step": 75
|
| 535 |
},
|
| 536 |
{
|
| 537 |
-
"epoch": 0.
|
| 538 |
-
"grad_norm": 0.
|
| 539 |
-
"learning_rate":
|
| 540 |
-
"loss": 0.
|
| 541 |
"step": 76
|
| 542 |
},
|
| 543 |
{
|
| 544 |
-
"epoch": 0.
|
| 545 |
-
"grad_norm": 0.
|
| 546 |
-
"learning_rate":
|
| 547 |
-
"loss": 0.
|
| 548 |
"step": 77
|
| 549 |
},
|
| 550 |
{
|
| 551 |
-
"epoch": 0.
|
| 552 |
-
"grad_norm": 0.
|
| 553 |
-
"learning_rate":
|
| 554 |
-
"loss": 0.
|
| 555 |
"step": 78
|
| 556 |
},
|
| 557 |
{
|
| 558 |
-
"epoch": 0.
|
| 559 |
-
"grad_norm": 0.
|
| 560 |
-
"learning_rate":
|
| 561 |
-
"loss": 0.
|
| 562 |
"step": 79
|
| 563 |
},
|
| 564 |
{
|
| 565 |
-
"epoch": 0.
|
| 566 |
-
"grad_norm": 0.
|
| 567 |
-
"learning_rate":
|
| 568 |
-
"loss": 0.
|
| 569 |
"step": 80
|
| 570 |
},
|
| 571 |
{
|
| 572 |
-
"epoch": 0.
|
| 573 |
-
"grad_norm": 0.
|
| 574 |
-
"learning_rate":
|
| 575 |
-
"loss": 0.
|
| 576 |
"step": 81
|
| 577 |
},
|
| 578 |
{
|
| 579 |
-
"epoch": 0.
|
| 580 |
-
"grad_norm": 0.
|
| 581 |
-
"learning_rate":
|
| 582 |
-
"loss": 0.
|
| 583 |
"step": 82
|
| 584 |
},
|
| 585 |
{
|
| 586 |
-
"epoch": 0.
|
| 587 |
-
"grad_norm": 0.
|
| 588 |
-
"learning_rate":
|
| 589 |
-
"loss": 0.
|
| 590 |
"step": 83
|
| 591 |
},
|
| 592 |
{
|
| 593 |
-
"epoch": 0.
|
| 594 |
-
"grad_norm": 0.
|
| 595 |
-
"learning_rate":
|
| 596 |
-
"loss": 0.
|
| 597 |
"step": 84
|
| 598 |
},
|
| 599 |
{
|
| 600 |
-
"epoch": 0.
|
| 601 |
-
"grad_norm": 0.
|
| 602 |
-
"learning_rate":
|
| 603 |
-
"loss": 0.
|
| 604 |
"step": 85
|
| 605 |
},
|
| 606 |
{
|
| 607 |
-
"epoch": 0.
|
| 608 |
-
"grad_norm": 0.
|
| 609 |
-
"learning_rate":
|
| 610 |
-
"loss": 0.
|
| 611 |
"step": 86
|
| 612 |
},
|
| 613 |
{
|
| 614 |
-
"epoch": 0.
|
| 615 |
-
"grad_norm": 0.
|
| 616 |
-
"learning_rate":
|
| 617 |
-
"loss": 0.
|
| 618 |
"step": 87
|
| 619 |
},
|
| 620 |
{
|
| 621 |
-
"epoch": 0.
|
| 622 |
-
"grad_norm": 0.
|
| 623 |
-
"learning_rate":
|
| 624 |
-
"loss": 0.
|
| 625 |
"step": 88
|
| 626 |
},
|
| 627 |
{
|
| 628 |
-
"epoch": 0.
|
| 629 |
-
"grad_norm": 0.
|
| 630 |
-
"learning_rate":
|
| 631 |
-
"loss": 0.
|
| 632 |
"step": 89
|
| 633 |
},
|
| 634 |
{
|
| 635 |
-
"epoch": 0.
|
| 636 |
-
"grad_norm": 0.
|
| 637 |
-
"learning_rate":
|
| 638 |
-
"loss": 0.
|
| 639 |
"step": 90
|
| 640 |
},
|
| 641 |
{
|
| 642 |
-
"epoch": 0.
|
| 643 |
-
"grad_norm": 0.
|
| 644 |
-
"learning_rate":
|
| 645 |
-
"loss": 0.
|
| 646 |
"step": 91
|
| 647 |
},
|
| 648 |
{
|
| 649 |
-
"epoch": 0.
|
| 650 |
-
"grad_norm": 0.
|
| 651 |
-
"learning_rate":
|
| 652 |
-
"loss": 0.
|
| 653 |
"step": 92
|
| 654 |
},
|
| 655 |
{
|
| 656 |
-
"epoch": 0.
|
| 657 |
-
"grad_norm": 0.
|
| 658 |
-
"learning_rate":
|
| 659 |
-
"loss": 0.
|
| 660 |
"step": 93
|
| 661 |
},
|
| 662 |
{
|
| 663 |
-
"epoch": 0.
|
| 664 |
-
"grad_norm": 0.
|
| 665 |
-
"learning_rate":
|
| 666 |
-
"loss": 0.
|
| 667 |
"step": 94
|
| 668 |
},
|
| 669 |
{
|
| 670 |
-
"epoch": 0.
|
| 671 |
-
"grad_norm": 0.
|
| 672 |
-
"learning_rate":
|
| 673 |
-
"loss": 0.
|
| 674 |
"step": 95
|
| 675 |
},
|
| 676 |
{
|
| 677 |
-
"epoch": 0.
|
| 678 |
-
"grad_norm": 0.
|
| 679 |
-
"learning_rate":
|
| 680 |
-
"loss": 0.
|
| 681 |
"step": 96
|
| 682 |
},
|
| 683 |
{
|
| 684 |
-
"epoch": 0.
|
| 685 |
-
"grad_norm": 0.
|
| 686 |
-
"learning_rate":
|
| 687 |
-
"loss": 0.
|
| 688 |
"step": 97
|
| 689 |
},
|
| 690 |
{
|
| 691 |
-
"epoch": 0.
|
| 692 |
-
"grad_norm": 0.
|
| 693 |
-
"learning_rate":
|
| 694 |
-
"loss": 0.
|
| 695 |
"step": 98
|
| 696 |
},
|
| 697 |
{
|
| 698 |
-
"epoch": 0.
|
| 699 |
-
"grad_norm": 0.
|
| 700 |
-
"learning_rate":
|
| 701 |
-
"loss": 0.
|
| 702 |
"step": 99
|
| 703 |
},
|
| 704 |
{
|
| 705 |
-
"epoch": 0.
|
| 706 |
-
"grad_norm": 0.
|
| 707 |
-
"learning_rate":
|
| 708 |
-
"loss": 0.
|
| 709 |
"step": 100
|
| 710 |
},
|
| 711 |
{
|
| 712 |
-
"epoch": 0.
|
| 713 |
-
"grad_norm": 0.
|
| 714 |
-
"learning_rate":
|
| 715 |
-
"loss": 0.
|
| 716 |
"step": 101
|
| 717 |
},
|
| 718 |
{
|
| 719 |
-
"epoch": 0.
|
| 720 |
-
"grad_norm": 0.
|
| 721 |
-
"learning_rate":
|
| 722 |
-
"loss": 0.
|
| 723 |
"step": 102
|
| 724 |
},
|
| 725 |
{
|
| 726 |
-
"epoch": 0.
|
| 727 |
-
"grad_norm": 0.
|
| 728 |
-
"learning_rate":
|
| 729 |
-
"loss": 0.
|
| 730 |
"step": 103
|
| 731 |
},
|
| 732 |
{
|
| 733 |
-
"epoch": 0.
|
| 734 |
-
"grad_norm": 0.
|
| 735 |
-
"learning_rate":
|
| 736 |
-
"loss": 0.
|
| 737 |
"step": 104
|
| 738 |
},
|
| 739 |
{
|
| 740 |
-
"epoch": 0.
|
| 741 |
-
"grad_norm": 0.
|
| 742 |
-
"learning_rate":
|
| 743 |
-
"loss":
|
| 744 |
"step": 105
|
| 745 |
},
|
| 746 |
{
|
| 747 |
-
"epoch": 0.
|
| 748 |
-
"grad_norm": 0.
|
| 749 |
-
"learning_rate":
|
| 750 |
-
"loss": 0.
|
| 751 |
"step": 106
|
| 752 |
},
|
| 753 |
{
|
| 754 |
-
"epoch": 0.
|
| 755 |
-
"grad_norm": 0.
|
| 756 |
-
"learning_rate":
|
| 757 |
-
"loss": 0.
|
| 758 |
"step": 107
|
| 759 |
},
|
| 760 |
{
|
| 761 |
-
"epoch": 0.
|
| 762 |
-
"grad_norm": 0.
|
| 763 |
-
"learning_rate":
|
| 764 |
-
"loss": 0.
|
| 765 |
"step": 108
|
| 766 |
},
|
| 767 |
{
|
| 768 |
-
"epoch": 0.
|
| 769 |
-
"grad_norm": 0.
|
| 770 |
-
"learning_rate":
|
| 771 |
-
"loss": 0.
|
| 772 |
"step": 109
|
| 773 |
},
|
| 774 |
{
|
| 775 |
-
"epoch": 0.
|
| 776 |
-
"grad_norm": 0.
|
| 777 |
-
"learning_rate":
|
| 778 |
-
"loss": 0.
|
| 779 |
"step": 110
|
| 780 |
},
|
| 781 |
{
|
| 782 |
-
"epoch": 0.
|
| 783 |
-
"grad_norm": 0.
|
| 784 |
-
"learning_rate":
|
| 785 |
-
"loss": 0.
|
| 786 |
"step": 111
|
| 787 |
},
|
| 788 |
{
|
| 789 |
-
"epoch": 0.
|
| 790 |
-
"grad_norm": 0.
|
| 791 |
-
"learning_rate":
|
| 792 |
-
"loss": 0.
|
| 793 |
"step": 112
|
| 794 |
},
|
| 795 |
{
|
| 796 |
-
"epoch": 0.
|
| 797 |
-
"grad_norm": 0.
|
| 798 |
-
"learning_rate":
|
| 799 |
-
"loss": 0.
|
| 800 |
"step": 113
|
| 801 |
},
|
| 802 |
{
|
| 803 |
-
"epoch": 0.
|
| 804 |
-
"grad_norm": 0.
|
| 805 |
-
"learning_rate":
|
| 806 |
-
"loss": 0.
|
| 807 |
"step": 114
|
| 808 |
},
|
| 809 |
{
|
| 810 |
-
"epoch": 0.
|
| 811 |
-
"grad_norm": 0.
|
| 812 |
-
"learning_rate":
|
| 813 |
-
"loss": 0.
|
| 814 |
"step": 115
|
| 815 |
},
|
| 816 |
{
|
| 817 |
-
"epoch": 0.
|
| 818 |
-
"grad_norm": 0.
|
| 819 |
-
"learning_rate":
|
| 820 |
-
"loss": 0.
|
| 821 |
"step": 116
|
| 822 |
},
|
| 823 |
{
|
| 824 |
-
"epoch": 0.
|
| 825 |
-
"grad_norm": 0.
|
| 826 |
-
"learning_rate":
|
| 827 |
-
"loss": 0.
|
| 828 |
"step": 117
|
| 829 |
},
|
| 830 |
{
|
| 831 |
-
"epoch": 0.
|
| 832 |
-
"grad_norm": 0.
|
| 833 |
-
"learning_rate":
|
| 834 |
-
"loss": 0.
|
| 835 |
"step": 118
|
| 836 |
},
|
| 837 |
{
|
| 838 |
-
"epoch": 0.
|
| 839 |
-
"grad_norm": 0.
|
| 840 |
-
"learning_rate":
|
| 841 |
-
"loss": 0.
|
| 842 |
"step": 119
|
| 843 |
},
|
| 844 |
{
|
| 845 |
-
"epoch": 0.
|
| 846 |
-
"grad_norm": 0.
|
| 847 |
-
"learning_rate":
|
| 848 |
-
"loss": 0.
|
| 849 |
"step": 120
|
| 850 |
},
|
| 851 |
{
|
| 852 |
-
"epoch": 0.
|
| 853 |
-
"grad_norm": 0.
|
| 854 |
-
"learning_rate":
|
| 855 |
-
"loss": 0.
|
| 856 |
"step": 121
|
| 857 |
},
|
| 858 |
{
|
| 859 |
-
"epoch": 0.
|
| 860 |
-
"grad_norm": 0.
|
| 861 |
-
"learning_rate":
|
| 862 |
-
"loss": 0.
|
| 863 |
"step": 122
|
| 864 |
},
|
| 865 |
{
|
| 866 |
-
"epoch": 0.
|
| 867 |
-
"grad_norm": 0.
|
| 868 |
-
"learning_rate":
|
| 869 |
-
"loss": 0.
|
| 870 |
"step": 123
|
| 871 |
},
|
| 872 |
{
|
| 873 |
-
"epoch": 0.
|
| 874 |
-
"grad_norm": 0.
|
| 875 |
-
"learning_rate":
|
| 876 |
-
"loss": 0.
|
| 877 |
"step": 124
|
| 878 |
},
|
| 879 |
{
|
| 880 |
-
"epoch": 0.
|
| 881 |
-
"grad_norm": 0.
|
| 882 |
-
"learning_rate":
|
| 883 |
-
"loss": 0.
|
| 884 |
"step": 125
|
| 885 |
},
|
| 886 |
{
|
| 887 |
-
"epoch": 0.
|
| 888 |
-
"grad_norm": 0.
|
| 889 |
-
"learning_rate":
|
| 890 |
-
"loss": 0.
|
| 891 |
"step": 126
|
| 892 |
},
|
| 893 |
{
|
| 894 |
-
"epoch": 0.
|
| 895 |
-
"grad_norm": 0.
|
| 896 |
-
"learning_rate":
|
| 897 |
-
"loss": 0.
|
| 898 |
"step": 127
|
| 899 |
},
|
| 900 |
{
|
| 901 |
-
"epoch": 0.
|
| 902 |
-
"grad_norm": 0.
|
| 903 |
-
"learning_rate":
|
| 904 |
-
"loss": 0.
|
| 905 |
"step": 128
|
| 906 |
},
|
| 907 |
{
|
| 908 |
-
"epoch": 0.
|
| 909 |
-
"grad_norm": 0.
|
| 910 |
-
"learning_rate":
|
| 911 |
-
"loss": 0.
|
| 912 |
"step": 129
|
| 913 |
},
|
| 914 |
{
|
| 915 |
-
"epoch": 0.
|
| 916 |
-
"grad_norm": 0.
|
| 917 |
-
"learning_rate":
|
| 918 |
-
"loss": 0.
|
| 919 |
"step": 130
|
| 920 |
},
|
| 921 |
{
|
| 922 |
-
"epoch": 0.
|
| 923 |
-
"grad_norm": 0.
|
| 924 |
-
"learning_rate":
|
| 925 |
-
"loss": 0.
|
| 926 |
"step": 131
|
| 927 |
},
|
| 928 |
{
|
| 929 |
-
"epoch": 0.
|
| 930 |
-
"grad_norm": 0.
|
| 931 |
-
"learning_rate":
|
| 932 |
-
"loss": 0.
|
| 933 |
"step": 132
|
| 934 |
},
|
| 935 |
{
|
| 936 |
-
"epoch": 0.
|
| 937 |
-
"grad_norm": 0.
|
| 938 |
-
"learning_rate":
|
| 939 |
-
"loss": 0.
|
| 940 |
"step": 133
|
| 941 |
},
|
| 942 |
{
|
| 943 |
-
"epoch": 0.
|
| 944 |
-
"grad_norm": 0.
|
| 945 |
-
"learning_rate":
|
| 946 |
-
"loss": 0.
|
| 947 |
"step": 134
|
| 948 |
},
|
| 949 |
{
|
| 950 |
-
"epoch": 0.
|
| 951 |
-
"grad_norm": 0.
|
| 952 |
-
"learning_rate":
|
| 953 |
-
"loss": 0.
|
| 954 |
"step": 135
|
| 955 |
},
|
| 956 |
{
|
| 957 |
-
"epoch": 0.
|
| 958 |
-
"grad_norm": 0.
|
| 959 |
-
"learning_rate":
|
| 960 |
-
"loss": 0.
|
| 961 |
"step": 136
|
| 962 |
},
|
| 963 |
{
|
| 964 |
-
"epoch": 0.
|
| 965 |
-
"grad_norm": 0.
|
| 966 |
-
"learning_rate":
|
| 967 |
-
"loss": 0.
|
| 968 |
"step": 137
|
| 969 |
},
|
| 970 |
{
|
| 971 |
-
"epoch": 0.
|
| 972 |
-
"grad_norm": 0.
|
| 973 |
-
"learning_rate":
|
| 974 |
-
"loss": 0.
|
| 975 |
"step": 138
|
| 976 |
},
|
| 977 |
{
|
| 978 |
-
"epoch": 0.
|
| 979 |
-
"grad_norm": 0.
|
| 980 |
-
"learning_rate":
|
| 981 |
-
"loss": 0.
|
| 982 |
"step": 139
|
| 983 |
},
|
| 984 |
{
|
| 985 |
-
"epoch": 0.
|
| 986 |
-
"grad_norm": 0.
|
| 987 |
-
"learning_rate":
|
| 988 |
-
"loss": 0.
|
| 989 |
"step": 140
|
| 990 |
},
|
| 991 |
{
|
| 992 |
-
"epoch": 0.
|
| 993 |
-
"grad_norm": 0.
|
| 994 |
-
"learning_rate":
|
| 995 |
-
"loss": 0.
|
| 996 |
"step": 141
|
| 997 |
},
|
| 998 |
{
|
| 999 |
-
"epoch": 0.
|
| 1000 |
-
"grad_norm": 0.
|
| 1001 |
-
"learning_rate":
|
| 1002 |
-
"loss": 0.
|
| 1003 |
"step": 142
|
| 1004 |
},
|
| 1005 |
{
|
| 1006 |
-
"epoch": 0.
|
| 1007 |
-
"grad_norm": 0.
|
| 1008 |
-
"learning_rate":
|
| 1009 |
-
"loss": 0.
|
| 1010 |
"step": 143
|
| 1011 |
},
|
| 1012 |
{
|
| 1013 |
-
"epoch": 0.
|
| 1014 |
-
"grad_norm": 0.
|
| 1015 |
-
"learning_rate":
|
| 1016 |
-
"loss": 0.
|
| 1017 |
"step": 144
|
| 1018 |
},
|
| 1019 |
{
|
| 1020 |
-
"epoch": 0.
|
| 1021 |
-
"grad_norm": 0.
|
| 1022 |
-
"learning_rate":
|
| 1023 |
-
"loss": 0.
|
| 1024 |
"step": 145
|
| 1025 |
},
|
| 1026 |
{
|
| 1027 |
-
"epoch": 0.
|
| 1028 |
-
"grad_norm": 0.
|
| 1029 |
-
"learning_rate":
|
| 1030 |
-
"loss": 0.
|
| 1031 |
"step": 146
|
| 1032 |
},
|
| 1033 |
{
|
| 1034 |
-
"epoch": 0.
|
| 1035 |
-
"grad_norm": 0.
|
| 1036 |
-
"learning_rate":
|
| 1037 |
-
"loss": 0.
|
| 1038 |
"step": 147
|
| 1039 |
},
|
| 1040 |
{
|
| 1041 |
-
"epoch": 0.
|
| 1042 |
-
"grad_norm": 0.
|
| 1043 |
-
"learning_rate":
|
| 1044 |
-
"loss": 0.
|
| 1045 |
"step": 148
|
| 1046 |
},
|
| 1047 |
{
|
| 1048 |
-
"epoch": 0.
|
| 1049 |
-
"grad_norm": 0.
|
| 1050 |
-
"learning_rate":
|
| 1051 |
-
"loss": 0.
|
| 1052 |
"step": 149
|
| 1053 |
},
|
| 1054 |
{
|
| 1055 |
-
"epoch": 0.
|
| 1056 |
-
"grad_norm": 0.
|
| 1057 |
-
"learning_rate":
|
| 1058 |
-
"loss": 0.
|
| 1059 |
"step": 150
|
| 1060 |
},
|
| 1061 |
{
|
| 1062 |
-
"epoch": 0.
|
| 1063 |
-
"grad_norm": 0.
|
| 1064 |
-
"learning_rate":
|
| 1065 |
-
"loss": 0.
|
| 1066 |
"step": 151
|
| 1067 |
},
|
| 1068 |
{
|
| 1069 |
-
"epoch": 0.
|
| 1070 |
-
"grad_norm": 0.
|
| 1071 |
-
"learning_rate":
|
| 1072 |
-
"loss": 0.
|
| 1073 |
"step": 152
|
| 1074 |
},
|
| 1075 |
{
|
| 1076 |
-
"epoch": 0.
|
| 1077 |
-
"grad_norm": 0.
|
| 1078 |
-
"learning_rate":
|
| 1079 |
-
"loss": 0.
|
| 1080 |
"step": 153
|
| 1081 |
},
|
| 1082 |
{
|
| 1083 |
-
"epoch": 0.
|
| 1084 |
-
"grad_norm": 0.
|
| 1085 |
-
"learning_rate":
|
| 1086 |
-
"loss": 0.
|
| 1087 |
"step": 154
|
| 1088 |
},
|
| 1089 |
{
|
| 1090 |
-
"epoch": 0.
|
| 1091 |
-
"grad_norm": 0.
|
| 1092 |
-
"learning_rate":
|
| 1093 |
-
"loss": 0.
|
| 1094 |
"step": 155
|
| 1095 |
},
|
| 1096 |
{
|
| 1097 |
-
"epoch": 0.
|
| 1098 |
-
"grad_norm": 0.
|
| 1099 |
-
"learning_rate":
|
| 1100 |
-
"loss": 0.
|
| 1101 |
"step": 156
|
| 1102 |
},
|
| 1103 |
{
|
| 1104 |
-
"epoch": 0.
|
| 1105 |
-
"grad_norm": 0.
|
| 1106 |
-
"learning_rate":
|
| 1107 |
-
"loss": 0.
|
| 1108 |
"step": 157
|
| 1109 |
},
|
| 1110 |
{
|
| 1111 |
-
"epoch": 0.
|
| 1112 |
-
"grad_norm": 0.
|
| 1113 |
-
"learning_rate":
|
| 1114 |
-
"loss": 0.
|
| 1115 |
"step": 158
|
| 1116 |
},
|
| 1117 |
{
|
| 1118 |
-
"epoch": 0.
|
| 1119 |
-
"grad_norm": 0.
|
| 1120 |
-
"learning_rate":
|
| 1121 |
-
"loss": 0.
|
| 1122 |
"step": 159
|
| 1123 |
},
|
| 1124 |
{
|
| 1125 |
-
"epoch": 0.
|
| 1126 |
-
"grad_norm": 0.
|
| 1127 |
-
"learning_rate":
|
| 1128 |
-
"loss": 0.
|
| 1129 |
"step": 160
|
| 1130 |
},
|
| 1131 |
{
|
| 1132 |
-
"epoch": 0.
|
| 1133 |
-
"grad_norm": 0.
|
| 1134 |
-
"learning_rate":
|
| 1135 |
-
"loss": 0.
|
| 1136 |
"step": 161
|
| 1137 |
},
|
| 1138 |
{
|
| 1139 |
-
"epoch": 0.
|
| 1140 |
-
"grad_norm": 0.
|
| 1141 |
-
"learning_rate":
|
| 1142 |
-
"loss": 0.
|
| 1143 |
"step": 162
|
| 1144 |
},
|
| 1145 |
{
|
| 1146 |
-
"epoch": 0.
|
| 1147 |
-
"grad_norm": 0.
|
| 1148 |
-
"learning_rate":
|
| 1149 |
-
"loss": 0.
|
| 1150 |
"step": 163
|
| 1151 |
},
|
| 1152 |
{
|
| 1153 |
-
"epoch": 0.
|
| 1154 |
-
"grad_norm": 0.
|
| 1155 |
-
"learning_rate":
|
| 1156 |
-
"loss": 0.
|
| 1157 |
"step": 164
|
| 1158 |
},
|
| 1159 |
{
|
| 1160 |
-
"epoch": 0.
|
| 1161 |
-
"grad_norm": 0.
|
| 1162 |
-
"learning_rate":
|
| 1163 |
-
"loss": 0.
|
| 1164 |
"step": 165
|
| 1165 |
},
|
| 1166 |
{
|
| 1167 |
-
"epoch": 0.
|
| 1168 |
-
"grad_norm": 0.
|
| 1169 |
-
"learning_rate":
|
| 1170 |
-
"loss": 0.
|
| 1171 |
"step": 166
|
| 1172 |
},
|
| 1173 |
{
|
| 1174 |
-
"epoch": 0.
|
| 1175 |
-
"grad_norm": 0.
|
| 1176 |
-
"learning_rate":
|
| 1177 |
-
"loss": 0.
|
| 1178 |
"step": 167
|
| 1179 |
},
|
| 1180 |
{
|
| 1181 |
-
"epoch": 0.
|
| 1182 |
-
"grad_norm": 0.
|
| 1183 |
-
"learning_rate":
|
| 1184 |
-
"loss": 0.
|
| 1185 |
"step": 168
|
| 1186 |
},
|
| 1187 |
{
|
| 1188 |
-
"epoch": 0.
|
| 1189 |
-
"grad_norm": 0.
|
| 1190 |
-
"learning_rate":
|
| 1191 |
-
"loss": 0.
|
| 1192 |
"step": 169
|
| 1193 |
},
|
| 1194 |
{
|
| 1195 |
-
"epoch": 0.
|
| 1196 |
-
"grad_norm": 0.
|
| 1197 |
-
"learning_rate":
|
| 1198 |
-
"loss": 0.
|
| 1199 |
"step": 170
|
| 1200 |
},
|
| 1201 |
{
|
| 1202 |
-
"epoch": 0.
|
| 1203 |
-
"grad_norm": 0.
|
| 1204 |
-
"learning_rate":
|
| 1205 |
-
"loss": 0.
|
| 1206 |
"step": 171
|
| 1207 |
},
|
| 1208 |
{
|
| 1209 |
-
"epoch": 0.
|
| 1210 |
-
"grad_norm": 0.
|
| 1211 |
-
"learning_rate":
|
| 1212 |
-
"loss": 0.
|
| 1213 |
"step": 172
|
| 1214 |
},
|
| 1215 |
{
|
| 1216 |
-
"epoch": 0.
|
| 1217 |
-
"grad_norm": 0.
|
| 1218 |
-
"learning_rate":
|
| 1219 |
-
"loss": 0.
|
| 1220 |
"step": 173
|
| 1221 |
},
|
| 1222 |
{
|
| 1223 |
-
"epoch": 0.
|
| 1224 |
-
"grad_norm": 0.
|
| 1225 |
-
"learning_rate":
|
| 1226 |
-
"loss": 0.
|
| 1227 |
"step": 174
|
| 1228 |
},
|
| 1229 |
{
|
| 1230 |
-
"epoch": 0.
|
| 1231 |
-
"grad_norm": 0.
|
| 1232 |
-
"learning_rate":
|
| 1233 |
-
"loss": 0.
|
| 1234 |
"step": 175
|
| 1235 |
},
|
| 1236 |
{
|
| 1237 |
-
"epoch": 0.
|
| 1238 |
-
"grad_norm": 0.
|
| 1239 |
-
"learning_rate":
|
| 1240 |
-
"loss": 0.
|
| 1241 |
"step": 176
|
| 1242 |
},
|
| 1243 |
{
|
| 1244 |
-
"epoch": 0.
|
| 1245 |
-
"grad_norm": 0.
|
| 1246 |
-
"learning_rate":
|
| 1247 |
-
"loss": 0.
|
| 1248 |
"step": 177
|
| 1249 |
},
|
| 1250 |
{
|
| 1251 |
-
"epoch": 0.
|
| 1252 |
-
"grad_norm": 0.
|
| 1253 |
-
"learning_rate":
|
| 1254 |
-
"loss": 0.
|
| 1255 |
"step": 178
|
| 1256 |
},
|
| 1257 |
{
|
| 1258 |
-
"epoch": 0.
|
| 1259 |
-
"grad_norm": 0.
|
| 1260 |
-
"learning_rate":
|
| 1261 |
-
"loss": 0.
|
| 1262 |
"step": 179
|
| 1263 |
},
|
| 1264 |
{
|
| 1265 |
-
"epoch": 0.
|
| 1266 |
-
"grad_norm": 0.
|
| 1267 |
-
"learning_rate": 3.
|
| 1268 |
-
"loss": 0.
|
| 1269 |
"step": 180
|
| 1270 |
},
|
| 1271 |
{
|
| 1272 |
-
"epoch": 0.
|
| 1273 |
-
"grad_norm": 0.
|
| 1274 |
-
"learning_rate": 3.
|
| 1275 |
-
"loss": 0.
|
| 1276 |
"step": 181
|
| 1277 |
},
|
| 1278 |
{
|
| 1279 |
-
"epoch": 0.
|
| 1280 |
-
"grad_norm": 0.
|
| 1281 |
-
"learning_rate":
|
| 1282 |
-
"loss": 0.
|
| 1283 |
"step": 182
|
| 1284 |
},
|
| 1285 |
{
|
| 1286 |
-
"epoch": 0.
|
| 1287 |
-
"grad_norm": 0.
|
| 1288 |
-
"learning_rate": 2.
|
| 1289 |
-
"loss": 0.
|
| 1290 |
"step": 183
|
| 1291 |
},
|
| 1292 |
{
|
| 1293 |
-
"epoch": 0.
|
| 1294 |
-
"grad_norm": 0.
|
| 1295 |
-
"learning_rate": 2.
|
| 1296 |
-
"loss": 0.
|
| 1297 |
"step": 184
|
| 1298 |
},
|
| 1299 |
{
|
| 1300 |
-
"epoch": 0.
|
| 1301 |
-
"grad_norm": 0.
|
| 1302 |
-
"learning_rate": 2.
|
| 1303 |
-
"loss": 0.
|
| 1304 |
"step": 185
|
| 1305 |
},
|
| 1306 |
{
|
| 1307 |
-
"epoch": 0.
|
| 1308 |
-
"grad_norm": 0.
|
| 1309 |
-
"learning_rate": 2.
|
| 1310 |
-
"loss": 0.
|
| 1311 |
"step": 186
|
| 1312 |
},
|
| 1313 |
{
|
| 1314 |
-
"epoch": 0.
|
| 1315 |
-
"grad_norm": 0.
|
| 1316 |
-
"learning_rate":
|
| 1317 |
-
"loss": 0.
|
| 1318 |
"step": 187
|
| 1319 |
},
|
| 1320 |
{
|
| 1321 |
-
"epoch": 0.
|
| 1322 |
-
"grad_norm": 0.
|
| 1323 |
-
"learning_rate":
|
| 1324 |
-
"loss": 0.
|
| 1325 |
"step": 188
|
| 1326 |
},
|
| 1327 |
{
|
| 1328 |
-
"epoch": 0.
|
| 1329 |
-
"grad_norm": 0.
|
| 1330 |
-
"learning_rate":
|
| 1331 |
-
"loss": 0.
|
| 1332 |
"step": 189
|
| 1333 |
},
|
| 1334 |
{
|
| 1335 |
-
"epoch": 0.
|
| 1336 |
-
"grad_norm": 0.
|
| 1337 |
-
"learning_rate":
|
| 1338 |
-
"loss": 0.
|
| 1339 |
"step": 190
|
| 1340 |
},
|
| 1341 |
{
|
| 1342 |
-
"epoch": 0.
|
| 1343 |
-
"grad_norm": 0.
|
| 1344 |
-
"learning_rate":
|
| 1345 |
-
"loss": 0.
|
| 1346 |
"step": 191
|
| 1347 |
},
|
| 1348 |
{
|
| 1349 |
-
"epoch": 0.
|
| 1350 |
-
"grad_norm": 0.
|
| 1351 |
-
"learning_rate":
|
| 1352 |
-
"loss": 0.
|
| 1353 |
"step": 192
|
| 1354 |
},
|
| 1355 |
{
|
| 1356 |
-
"epoch": 0.
|
| 1357 |
-
"grad_norm": 0.
|
| 1358 |
-
"learning_rate":
|
| 1359 |
-
"loss": 0.
|
| 1360 |
"step": 193
|
| 1361 |
},
|
| 1362 |
{
|
| 1363 |
-
"epoch": 0.
|
| 1364 |
-
"grad_norm": 0.
|
| 1365 |
-
"learning_rate":
|
| 1366 |
-
"loss": 0.
|
| 1367 |
"step": 194
|
| 1368 |
},
|
| 1369 |
{
|
| 1370 |
-
"epoch": 0.
|
| 1371 |
-
"grad_norm": 0.
|
| 1372 |
-
"learning_rate":
|
| 1373 |
-
"loss": 0.
|
| 1374 |
"step": 195
|
| 1375 |
},
|
| 1376 |
{
|
| 1377 |
-
"epoch": 0.
|
| 1378 |
-
"grad_norm": 0.
|
| 1379 |
-
"learning_rate":
|
| 1380 |
-
"loss": 0.
|
| 1381 |
"step": 196
|
| 1382 |
},
|
| 1383 |
{
|
| 1384 |
-
"epoch": 0.
|
| 1385 |
-
"grad_norm": 0.
|
| 1386 |
-
"learning_rate":
|
| 1387 |
-
"loss": 0.
|
| 1388 |
"step": 197
|
| 1389 |
},
|
| 1390 |
{
|
| 1391 |
-
"epoch": 0.
|
| 1392 |
-
"grad_norm": 0.
|
| 1393 |
-
"learning_rate": 2.
|
| 1394 |
-
"loss": 0.
|
| 1395 |
"step": 198
|
| 1396 |
},
|
| 1397 |
{
|
| 1398 |
-
"epoch": 0.
|
| 1399 |
-
"grad_norm": 0.
|
| 1400 |
-
"learning_rate":
|
| 1401 |
-
"loss": 0.
|
| 1402 |
"step": 199
|
| 1403 |
},
|
| 1404 |
{
|
| 1405 |
-
"epoch": 0.
|
| 1406 |
-
"grad_norm": 0.
|
| 1407 |
-
"learning_rate": 1.
|
| 1408 |
-
"loss": 0.
|
| 1409 |
"step": 200
|
| 1410 |
},
|
| 1411 |
{
|
| 1412 |
-
"epoch": 0.
|
| 1413 |
-
"grad_norm": 0.
|
| 1414 |
-
"learning_rate":
|
| 1415 |
-
"loss": 0.
|
| 1416 |
"step": 201
|
| 1417 |
},
|
| 1418 |
{
|
| 1419 |
-
"epoch": 0.
|
| 1420 |
-
"grad_norm": 0.
|
| 1421 |
-
"learning_rate":
|
| 1422 |
-
"loss": 0.
|
| 1423 |
"step": 202
|
| 1424 |
},
|
| 1425 |
{
|
| 1426 |
-
"epoch": 0.
|
| 1427 |
-
"grad_norm": 0.
|
| 1428 |
-
"learning_rate":
|
| 1429 |
-
"loss": 0.
|
| 1430 |
"step": 203
|
| 1431 |
},
|
| 1432 |
-
{
|
| 1433 |
-
"epoch": 0.7490820073439413,
|
| 1434 |
-
"grad_norm": 0.14774741232395172,
|
| 1435 |
-
"learning_rate": 1.7970341065091245e-05,
|
| 1436 |
-
"loss": 0.5486,
|
| 1437 |
-
"step": 204
|
| 1438 |
-
},
|
| 1439 |
-
{
|
| 1440 |
-
"epoch": 0.7527539779681762,
|
| 1441 |
-
"grad_norm": 0.20584475994110107,
|
| 1442 |
-
"learning_rate": 1.7478671934868302e-05,
|
| 1443 |
-
"loss": 0.4969,
|
| 1444 |
-
"step": 205
|
| 1445 |
-
},
|
| 1446 |
-
{
|
| 1447 |
-
"epoch": 0.7564259485924113,
|
| 1448 |
-
"grad_norm": 0.10497330129146576,
|
| 1449 |
-
"learning_rate": 1.6992393966438407e-05,
|
| 1450 |
-
"loss": 0.6211,
|
| 1451 |
-
"step": 206
|
| 1452 |
-
},
|
| 1453 |
-
{
|
| 1454 |
-
"epoch": 0.7600979192166463,
|
| 1455 |
-
"grad_norm": 0.10213766247034073,
|
| 1456 |
-
"learning_rate": 1.6511587771614205e-05,
|
| 1457 |
-
"loss": 0.3785,
|
| 1458 |
-
"step": 207
|
| 1459 |
-
},
|
| 1460 |
-
{
|
| 1461 |
-
"epoch": 0.7637698898408812,
|
| 1462 |
-
"grad_norm": 0.08263826370239258,
|
| 1463 |
-
"learning_rate": 1.6036333055135344e-05,
|
| 1464 |
-
"loss": 0.4173,
|
| 1465 |
-
"step": 208
|
| 1466 |
-
},
|
| 1467 |
-
{
|
| 1468 |
-
"epoch": 0.7674418604651163,
|
| 1469 |
-
"grad_norm": 0.15048938989639282,
|
| 1470 |
-
"learning_rate": 1.556670860145567e-05,
|
| 1471 |
-
"loss": 0.7029,
|
| 1472 |
-
"step": 209
|
| 1473 |
-
},
|
| 1474 |
-
{
|
| 1475 |
-
"epoch": 0.7711138310893513,
|
| 1476 |
-
"grad_norm": 0.09334007650613785,
|
| 1477 |
-
"learning_rate": 1.5102792261682813e-05,
|
| 1478 |
-
"loss": 0.4614,
|
| 1479 |
-
"step": 210
|
| 1480 |
-
},
|
| 1481 |
-
{
|
| 1482 |
-
"epoch": 0.7747858017135862,
|
| 1483 |
-
"grad_norm": 0.15974034368991852,
|
| 1484 |
-
"learning_rate": 1.4644660940672627e-05,
|
| 1485 |
-
"loss": 0.8333,
|
| 1486 |
-
"step": 211
|
| 1487 |
-
},
|
| 1488 |
-
{
|
| 1489 |
-
"epoch": 0.7784577723378213,
|
| 1490 |
-
"grad_norm": 0.08053223788738251,
|
| 1491 |
-
"learning_rate": 1.4192390584280346e-05,
|
| 1492 |
-
"loss": 0.3017,
|
| 1493 |
-
"step": 212
|
| 1494 |
-
},
|
| 1495 |
-
{
|
| 1496 |
-
"epoch": 0.7821297429620563,
|
| 1497 |
-
"grad_norm": 0.0834859237074852,
|
| 1498 |
-
"learning_rate": 1.374605616677087e-05,
|
| 1499 |
-
"loss": 0.304,
|
| 1500 |
-
"step": 213
|
| 1501 |
-
},
|
| 1502 |
-
{
|
| 1503 |
-
"epoch": 0.7858017135862914,
|
| 1504 |
-
"grad_norm": 0.0708678588271141,
|
| 1505 |
-
"learning_rate": 1.3305731678390048e-05,
|
| 1506 |
-
"loss": 0.4155,
|
| 1507 |
-
"step": 214
|
| 1508 |
-
},
|
| 1509 |
-
{
|
| 1510 |
-
"epoch": 0.7894736842105263,
|
| 1511 |
-
"grad_norm": 0.13682302832603455,
|
| 1512 |
-
"learning_rate": 1.2871490113099066e-05,
|
| 1513 |
-
"loss": 0.7763,
|
| 1514 |
-
"step": 215
|
| 1515 |
-
},
|
| 1516 |
-
{
|
| 1517 |
-
"epoch": 0.7931456548347613,
|
| 1518 |
-
"grad_norm": 0.12170913815498352,
|
| 1519 |
-
"learning_rate": 1.2443403456474017e-05,
|
| 1520 |
-
"loss": 0.5421,
|
| 1521 |
-
"step": 216
|
| 1522 |
-
},
|
| 1523 |
-
{
|
| 1524 |
-
"epoch": 0.7968176254589964,
|
| 1525 |
-
"grad_norm": 0.07957993447780609,
|
| 1526 |
-
"learning_rate": 1.2021542673772585e-05,
|
| 1527 |
-
"loss": 0.3638,
|
| 1528 |
-
"step": 217
|
| 1529 |
-
},
|
| 1530 |
-
{
|
| 1531 |
-
"epoch": 0.8004895960832313,
|
| 1532 |
-
"grad_norm": 0.14521080255508423,
|
| 1533 |
-
"learning_rate": 1.1605977698170001e-05,
|
| 1534 |
-
"loss": 0.5641,
|
| 1535 |
-
"step": 218
|
| 1536 |
-
},
|
| 1537 |
-
{
|
| 1538 |
-
"epoch": 0.8041615667074663,
|
| 1539 |
-
"grad_norm": 0.0813819020986557,
|
| 1540 |
-
"learning_rate": 1.1196777419165927e-05,
|
| 1541 |
-
"loss": 0.407,
|
| 1542 |
-
"step": 219
|
| 1543 |
-
},
|
| 1544 |
-
{
|
| 1545 |
-
"epoch": 0.8078335373317014,
|
| 1546 |
-
"grad_norm": 0.12135583162307739,
|
| 1547 |
-
"learning_rate": 1.0794009671164484e-05,
|
| 1548 |
-
"loss": 0.8802,
|
| 1549 |
-
"step": 220
|
| 1550 |
-
},
|
| 1551 |
-
{
|
| 1552 |
-
"epoch": 0.8115055079559363,
|
| 1553 |
-
"grad_norm": 0.1219213679432869,
|
| 1554 |
-
"learning_rate": 1.0397741222229057e-05,
|
| 1555 |
-
"loss": 0.5714,
|
| 1556 |
-
"step": 221
|
| 1557 |
-
},
|
| 1558 |
-
{
|
| 1559 |
-
"epoch": 0.8151774785801713,
|
| 1560 |
-
"grad_norm": 0.08637195825576782,
|
| 1561 |
-
"learning_rate": 1.0008037763014032e-05,
|
| 1562 |
-
"loss": 0.4146,
|
| 1563 |
-
"step": 222
|
| 1564 |
-
},
|
| 1565 |
-
{
|
| 1566 |
-
"epoch": 0.8188494492044064,
|
| 1567 |
-
"grad_norm": 0.22487489879131317,
|
| 1568 |
-
"learning_rate": 9.624963895874994e-06,
|
| 1569 |
-
"loss": 0.4979,
|
| 1570 |
-
"step": 223
|
| 1571 |
-
},
|
| 1572 |
-
{
|
| 1573 |
-
"epoch": 0.8225214198286414,
|
| 1574 |
-
"grad_norm": 0.28123998641967773,
|
| 1575 |
-
"learning_rate": 9.248583124159438e-06,
|
| 1576 |
-
"loss": 0.7919,
|
| 1577 |
-
"step": 224
|
| 1578 |
-
},
|
| 1579 |
-
{
|
| 1580 |
-
"epoch": 0.8261933904528764,
|
| 1581 |
-
"grad_norm": 0.14954394102096558,
|
| 1582 |
-
"learning_rate": 8.878957841679541e-06,
|
| 1583 |
-
"loss": 0.4572,
|
| 1584 |
-
"step": 225
|
| 1585 |
-
},
|
| 1586 |
-
{
|
| 1587 |
-
"epoch": 0.8298653610771114,
|
| 1588 |
-
"grad_norm": 0.08152212202548981,
|
| 1589 |
-
"learning_rate": 8.516149322369054e-06,
|
| 1590 |
-
"loss": 0.3024,
|
| 1591 |
-
"step": 226
|
| 1592 |
-
},
|
| 1593 |
-
{
|
| 1594 |
-
"epoch": 0.8335373317013464,
|
| 1595 |
-
"grad_norm": 0.08769659698009491,
|
| 1596 |
-
"learning_rate": 8.160217710125662e-06,
|
| 1597 |
-
"loss": 0.5087,
|
| 1598 |
-
"step": 227
|
| 1599 |
-
},
|
| 1600 |
-
{
|
| 1601 |
-
"epoch": 0.8372093023255814,
|
| 1602 |
-
"grad_norm": 0.14432811737060547,
|
| 1603 |
-
"learning_rate": 7.81122200884072e-06,
|
| 1604 |
-
"loss": 0.4048,
|
| 1605 |
-
"step": 228
|
| 1606 |
-
},
|
| 1607 |
-
{
|
| 1608 |
-
"epoch": 0.8408812729498164,
|
| 1609 |
-
"grad_norm": 0.2427009642124176,
|
| 1610 |
-
"learning_rate": 7.469220072618094e-06,
|
| 1611 |
-
"loss": 0.4902,
|
| 1612 |
-
"step": 229
|
| 1613 |
-
},
|
| 1614 |
-
{
|
| 1615 |
-
"epoch": 0.8445532435740514,
|
| 1616 |
-
"grad_norm": 0.09131976217031479,
|
| 1617 |
-
"learning_rate": 7.13426859618338e-06,
|
| 1618 |
-
"loss": 0.2719,
|
| 1619 |
-
"step": 230
|
| 1620 |
-
},
|
| 1621 |
-
{
|
| 1622 |
-
"epoch": 0.8482252141982864,
|
| 1623 |
-
"grad_norm": 0.0897597223520279,
|
| 1624 |
-
"learning_rate": 6.806423105485577e-06,
|
| 1625 |
-
"loss": 0.3147,
|
| 1626 |
-
"step": 231
|
| 1627 |
-
},
|
| 1628 |
-
{
|
| 1629 |
-
"epoch": 0.8518971848225214,
|
| 1630 |
-
"grad_norm": 0.11949042230844498,
|
| 1631 |
-
"learning_rate": 6.4857379484922375e-06,
|
| 1632 |
-
"loss": 0.5688,
|
| 1633 |
-
"step": 232
|
| 1634 |
-
},
|
| 1635 |
-
{
|
| 1636 |
-
"epoch": 0.8555691554467564,
|
| 1637 |
-
"grad_norm": 0.08959188312292099,
|
| 1638 |
-
"learning_rate": 6.1722662861801614e-06,
|
| 1639 |
-
"loss": 0.3724,
|
| 1640 |
-
"step": 233
|
| 1641 |
-
},
|
| 1642 |
-
{
|
| 1643 |
-
"epoch": 0.8592411260709915,
|
| 1644 |
-
"grad_norm": 0.10583740472793579,
|
| 1645 |
-
"learning_rate": 5.866060083722624e-06,
|
| 1646 |
-
"loss": 0.6448,
|
| 1647 |
-
"step": 234
|
| 1648 |
-
},
|
| 1649 |
-
{
|
| 1650 |
-
"epoch": 0.8629130966952264,
|
| 1651 |
-
"grad_norm": 0.1368107944726944,
|
| 1652 |
-
"learning_rate": 5.5671701018750745e-06,
|
| 1653 |
-
"loss": 0.7383,
|
| 1654 |
-
"step": 235
|
| 1655 |
-
},
|
| 1656 |
-
{
|
| 1657 |
-
"epoch": 0.8665850673194615,
|
| 1658 |
-
"grad_norm": 0.07287576794624329,
|
| 1659 |
-
"learning_rate": 5.275645888560232e-06,
|
| 1660 |
-
"loss": 0.3452,
|
| 1661 |
-
"step": 236
|
| 1662 |
-
},
|
| 1663 |
-
{
|
| 1664 |
-
"epoch": 0.8702570379436965,
|
| 1665 |
-
"grad_norm": 0.07863111793994904,
|
| 1666 |
-
"learning_rate": 4.9915357706544485e-06,
|
| 1667 |
-
"loss": 0.3893,
|
| 1668 |
-
"step": 237
|
| 1669 |
-
},
|
| 1670 |
-
{
|
| 1671 |
-
"epoch": 0.8739290085679314,
|
| 1672 |
-
"grad_norm": 0.08062013983726501,
|
| 1673 |
-
"learning_rate": 4.714886845976429e-06,
|
| 1674 |
-
"loss": 0.2986,
|
| 1675 |
-
"step": 238
|
| 1676 |
-
},
|
| 1677 |
-
{
|
| 1678 |
-
"epoch": 0.8776009791921665,
|
| 1679 |
-
"grad_norm": 0.07851500064134598,
|
| 1680 |
-
"learning_rate": 4.445744975479626e-06,
|
| 1681 |
-
"loss": 0.4395,
|
| 1682 |
-
"step": 239
|
| 1683 |
-
},
|
| 1684 |
-
{
|
| 1685 |
-
"epoch": 0.8812729498164015,
|
| 1686 |
-
"grad_norm": 0.07426175475120544,
|
| 1687 |
-
"learning_rate": 4.184154775649768e-06,
|
| 1688 |
-
"loss": 0.2981,
|
| 1689 |
-
"step": 240
|
| 1690 |
-
},
|
| 1691 |
-
{
|
| 1692 |
-
"epoch": 0.8849449204406364,
|
| 1693 |
-
"grad_norm": 0.10778014361858368,
|
| 1694 |
-
"learning_rate": 3.9301596111086025e-06,
|
| 1695 |
-
"loss": 0.5546,
|
| 1696 |
-
"step": 241
|
| 1697 |
-
},
|
| 1698 |
-
{
|
| 1699 |
-
"epoch": 0.8886168910648715,
|
| 1700 |
-
"grad_norm": 0.23396329581737518,
|
| 1701 |
-
"learning_rate": 3.683801587425251e-06,
|
| 1702 |
-
"loss": 0.4488,
|
| 1703 |
-
"step": 242
|
| 1704 |
-
},
|
| 1705 |
-
{
|
| 1706 |
-
"epoch": 0.8922888616891065,
|
| 1707 |
-
"grad_norm": 0.24511796236038208,
|
| 1708 |
-
"learning_rate": 3.4451215441362263e-06,
|
| 1709 |
-
"loss": 0.5273,
|
| 1710 |
-
"step": 243
|
| 1711 |
-
},
|
| 1712 |
-
{
|
| 1713 |
-
"epoch": 0.8959608323133414,
|
| 1714 |
-
"grad_norm": 0.11425651609897614,
|
| 1715 |
-
"learning_rate": 3.2141590479753236e-06,
|
| 1716 |
-
"loss": 0.6317,
|
| 1717 |
-
"step": 244
|
| 1718 |
-
},
|
| 1719 |
-
{
|
| 1720 |
-
"epoch": 0.8996328029375765,
|
| 1721 |
-
"grad_norm": 0.2402946650981903,
|
| 1722 |
-
"learning_rate": 2.990952386314505e-06,
|
| 1723 |
-
"loss": 0.5092,
|
| 1724 |
-
"step": 245
|
| 1725 |
-
},
|
| 1726 |
-
{
|
| 1727 |
-
"epoch": 0.9033047735618115,
|
| 1728 |
-
"grad_norm": 0.12040656059980392,
|
| 1729 |
-
"learning_rate": 2.775538560816937e-06,
|
| 1730 |
-
"loss": 0.5734,
|
| 1731 |
-
"step": 246
|
| 1732 |
-
},
|
| 1733 |
-
{
|
| 1734 |
-
"epoch": 0.9069767441860465,
|
| 1735 |
-
"grad_norm": 0.08450046181678772,
|
| 1736 |
-
"learning_rate": 2.5679532813030596e-06,
|
| 1737 |
-
"loss": 0.535,
|
| 1738 |
-
"step": 247
|
| 1739 |
-
},
|
| 1740 |
-
{
|
| 1741 |
-
"epoch": 0.9106487148102815,
|
| 1742 |
-
"grad_norm": 0.08185907453298569,
|
| 1743 |
-
"learning_rate": 2.3682309598308747e-06,
|
| 1744 |
-
"loss": 0.5474,
|
| 1745 |
-
"step": 248
|
| 1746 |
-
},
|
| 1747 |
-
{
|
| 1748 |
-
"epoch": 0.9143206854345165,
|
| 1749 |
-
"grad_norm": 0.08737199753522873,
|
| 1750 |
-
"learning_rate": 2.1764047049913527e-06,
|
| 1751 |
-
"loss": 0.3862,
|
| 1752 |
-
"step": 249
|
| 1753 |
-
},
|
| 1754 |
-
{
|
| 1755 |
-
"epoch": 0.9179926560587516,
|
| 1756 |
-
"grad_norm": 0.0846424475312233,
|
| 1757 |
-
"learning_rate": 1.992506316419912e-06,
|
| 1758 |
-
"loss": 0.3631,
|
| 1759 |
-
"step": 250
|
| 1760 |
-
},
|
| 1761 |
-
{
|
| 1762 |
-
"epoch": 0.9216646266829865,
|
| 1763 |
-
"grad_norm": 0.0750948116183281,
|
| 1764 |
-
"learning_rate": 1.8165662795249172e-06,
|
| 1765 |
-
"loss": 0.2861,
|
| 1766 |
-
"step": 251
|
| 1767 |
-
},
|
| 1768 |
-
{
|
| 1769 |
-
"epoch": 0.9253365973072215,
|
| 1770 |
-
"grad_norm": 0.09620046615600586,
|
| 1771 |
-
"learning_rate": 1.6486137604339813e-06,
|
| 1772 |
-
"loss": 0.6115,
|
| 1773 |
-
"step": 252
|
| 1774 |
-
},
|
| 1775 |
-
{
|
| 1776 |
-
"epoch": 0.9290085679314566,
|
| 1777 |
-
"grad_norm": 0.13249874114990234,
|
| 1778 |
-
"learning_rate": 1.4886766011590448e-06,
|
| 1779 |
-
"loss": 0.6009,
|
| 1780 |
-
"step": 253
|
| 1781 |
-
},
|
| 1782 |
-
{
|
| 1783 |
-
"epoch": 0.9326805385556916,
|
| 1784 |
-
"grad_norm": 0.07768455892801285,
|
| 1785 |
-
"learning_rate": 1.3367813149808729e-06,
|
| 1786 |
-
"loss": 0.5147,
|
| 1787 |
-
"step": 254
|
| 1788 |
-
},
|
| 1789 |
-
{
|
| 1790 |
-
"epoch": 0.9363525091799265,
|
| 1791 |
-
"grad_norm": 0.09131491929292679,
|
| 1792 |
-
"learning_rate": 1.1929530820539269e-06,
|
| 1793 |
-
"loss": 0.3699,
|
| 1794 |
-
"step": 255
|
| 1795 |
-
},
|
| 1796 |
-
{
|
| 1797 |
-
"epoch": 0.9400244798041616,
|
| 1798 |
-
"grad_norm": 0.0973149985074997,
|
| 1799 |
-
"learning_rate": 1.0572157452321097e-06,
|
| 1800 |
-
"loss": 0.4612,
|
| 1801 |
-
"step": 256
|
| 1802 |
-
},
|
| 1803 |
-
{
|
| 1804 |
-
"epoch": 0.9436964504283966,
|
| 1805 |
-
"grad_norm": 0.08338738232851028,
|
| 1806 |
-
"learning_rate": 9.295918061163033e-07,
|
| 1807 |
-
"loss": 0.3989,
|
| 1808 |
-
"step": 257
|
| 1809 |
-
},
|
| 1810 |
-
{
|
| 1811 |
-
"epoch": 0.9473684210526315,
|
| 1812 |
-
"grad_norm": 0.12276645749807358,
|
| 1813 |
-
"learning_rate": 8.101024213241826e-07,
|
| 1814 |
-
"loss": 0.7361,
|
| 1815 |
-
"step": 258
|
| 1816 |
-
},
|
| 1817 |
-
{
|
| 1818 |
-
"epoch": 0.9510403916768666,
|
| 1819 |
-
"grad_norm": 0.07636597752571106,
|
| 1820 |
-
"learning_rate": 6.987673989830523e-07,
|
| 1821 |
-
"loss": 0.356,
|
| 1822 |
-
"step": 259
|
| 1823 |
-
},
|
| 1824 |
-
{
|
| 1825 |
-
"epoch": 0.9547123623011016,
|
| 1826 |
-
"grad_norm": 0.0995926633477211,
|
| 1827 |
-
"learning_rate": 5.956051954461472e-07,
|
| 1828 |
-
"loss": 0.5988,
|
| 1829 |
-
"step": 260
|
| 1830 |
-
},
|
| 1831 |
-
{
|
| 1832 |
-
"epoch": 0.9583843329253366,
|
| 1833 |
-
"grad_norm": 0.12855686247348785,
|
| 1834 |
-
"learning_rate": 5.006329122330899e-07,
|
| 1835 |
-
"loss": 0.6091,
|
| 1836 |
-
"step": 261
|
| 1837 |
-
},
|
| 1838 |
-
{
|
| 1839 |
-
"epoch": 0.9620563035495716,
|
| 1840 |
-
"grad_norm": 0.10323388129472733,
|
| 1841 |
-
"learning_rate": 4.1386629319492556e-07,
|
| 1842 |
-
"loss": 0.5726,
|
| 1843 |
-
"step": 262
|
| 1844 |
-
},
|
| 1845 |
-
{
|
| 1846 |
-
"epoch": 0.9657282741738066,
|
| 1847 |
-
"grad_norm": 0.07876481115818024,
|
| 1848 |
-
"learning_rate": 3.3531972190419815e-07,
|
| 1849 |
-
"loss": 0.2667,
|
| 1850 |
-
"step": 263
|
| 1851 |
-
},
|
| 1852 |
-
{
|
| 1853 |
-
"epoch": 0.9694002447980417,
|
| 1854 |
-
"grad_norm": 0.08382689952850342,
|
| 1855 |
-
"learning_rate": 2.6500621927054715e-07,
|
| 1856 |
-
"loss": 0.2876,
|
| 1857 |
-
"step": 264
|
| 1858 |
-
},
|
| 1859 |
-
{
|
| 1860 |
-
"epoch": 0.9730722154222766,
|
| 1861 |
-
"grad_norm": 0.08943753689527512,
|
| 1862 |
-
"learning_rate": 2.0293744138219495e-07,
|
| 1863 |
-
"loss": 0.3169,
|
| 1864 |
-
"step": 265
|
| 1865 |
-
},
|
| 1866 |
-
{
|
| 1867 |
-
"epoch": 0.9767441860465116,
|
| 1868 |
-
"grad_norm": 0.07517411559820175,
|
| 1869 |
-
"learning_rate": 1.4912367757366487e-07,
|
| 1870 |
-
"loss": 0.3369,
|
| 1871 |
-
"step": 266
|
| 1872 |
-
},
|
| 1873 |
-
{
|
| 1874 |
-
"epoch": 0.9804161566707467,
|
| 1875 |
-
"grad_norm": 0.08059985190629959,
|
| 1876 |
-
"learning_rate": 1.0357384872011766e-07,
|
| 1877 |
-
"loss": 0.2544,
|
| 1878 |
-
"step": 267
|
| 1879 |
-
},
|
| 1880 |
-
{
|
| 1881 |
-
"epoch": 0.9840881272949816,
|
| 1882 |
-
"grad_norm": 0.15402479469776154,
|
| 1883 |
-
"learning_rate": 6.629550575847354e-08,
|
| 1884 |
-
"loss": 0.9444,
|
| 1885 |
-
"step": 268
|
| 1886 |
-
},
|
| 1887 |
-
{
|
| 1888 |
-
"epoch": 0.9877600979192166,
|
| 1889 |
-
"grad_norm": 0.07759124785661697,
|
| 1890 |
-
"learning_rate": 3.729482843569665e-08,
|
| 1891 |
-
"loss": 0.5641,
|
| 1892 |
-
"step": 269
|
| 1893 |
-
},
|
| 1894 |
-
{
|
| 1895 |
-
"epoch": 0.9914320685434517,
|
| 1896 |
-
"grad_norm": 0.08798715472221375,
|
| 1897 |
-
"learning_rate": 1.6576624284347918e-08,
|
| 1898 |
-
"loss": 0.3192,
|
| 1899 |
-
"step": 270
|
| 1900 |
-
},
|
| 1901 |
-
{
|
| 1902 |
-
"epoch": 0.9951040391676866,
|
| 1903 |
-
"grad_norm": 0.09718140214681625,
|
| 1904 |
-
"learning_rate": 4.1443278256170226e-09,
|
| 1905 |
-
"loss": 0.6975,
|
| 1906 |
-
"step": 271
|
| 1907 |
-
},
|
| 1908 |
{
|
| 1909 |
"epoch": 0.9987760097919217,
|
| 1910 |
-
"grad_norm": 0.
|
| 1911 |
"learning_rate": 0.0,
|
| 1912 |
-
"loss": 0.
|
| 1913 |
-
"step":
|
| 1914 |
},
|
| 1915 |
{
|
| 1916 |
"epoch": 0.9987760097919217,
|
| 1917 |
-
"step":
|
| 1918 |
-
"total_flos": 1.
|
| 1919 |
-
"train_loss": 0.
|
| 1920 |
-
"train_runtime":
|
| 1921 |
-
"train_samples_per_second": 0.
|
| 1922 |
-
"train_steps_per_second": 0.
|
| 1923 |
}
|
| 1924 |
],
|
| 1925 |
"logging_steps": 1,
|
| 1926 |
-
"max_steps":
|
| 1927 |
"num_input_tokens_seen": 0,
|
| 1928 |
"num_train_epochs": 1,
|
| 1929 |
-
"save_steps":
|
| 1930 |
"stateful_callbacks": {
|
| 1931 |
"TrainerControl": {
|
| 1932 |
"args": {
|
|
@@ -1939,7 +1463,7 @@
|
|
| 1939 |
"attributes": {}
|
| 1940 |
}
|
| 1941 |
},
|
| 1942 |
-
"total_flos": 1.
|
| 1943 |
"train_batch_size": 1,
|
| 1944 |
"trial_name": null,
|
| 1945 |
"trial_params": null
|
|
|
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
"epoch": 0.9987760097919217,
|
| 5 |
"eval_steps": 500,
|
| 6 |
+
"global_step": 204,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
| 10 |
"log_history": [
|
| 11 |
{
|
| 12 |
+
"epoch": 0.004895960832313341,
|
| 13 |
+
"grad_norm": 0.03658522292971611,
|
| 14 |
+
"learning_rate": 9.090909090909091e-06,
|
| 15 |
+
"loss": 0.4365,
|
| 16 |
"step": 1
|
| 17 |
},
|
| 18 |
{
|
| 19 |
+
"epoch": 0.009791921664626682,
|
| 20 |
+
"grad_norm": 0.035473328083753586,
|
| 21 |
+
"learning_rate": 1.8181818181818182e-05,
|
| 22 |
+
"loss": 0.6677,
|
| 23 |
"step": 2
|
| 24 |
},
|
| 25 |
{
|
| 26 |
+
"epoch": 0.014687882496940025,
|
| 27 |
+
"grad_norm": 0.030256139114499092,
|
| 28 |
+
"learning_rate": 2.7272727272727273e-05,
|
| 29 |
+
"loss": 0.4084,
|
| 30 |
"step": 3
|
| 31 |
},
|
| 32 |
{
|
| 33 |
+
"epoch": 0.019583843329253364,
|
| 34 |
+
"grad_norm": 0.04286854714155197,
|
| 35 |
+
"learning_rate": 3.6363636363636364e-05,
|
| 36 |
+
"loss": 0.3422,
|
| 37 |
"step": 4
|
| 38 |
},
|
| 39 |
{
|
| 40 |
+
"epoch": 0.02447980416156671,
|
| 41 |
+
"grad_norm": 0.06271344423294067,
|
| 42 |
+
"learning_rate": 4.545454545454546e-05,
|
| 43 |
+
"loss": 0.5332,
|
| 44 |
"step": 5
|
| 45 |
},
|
| 46 |
{
|
| 47 |
+
"epoch": 0.02937576499388005,
|
| 48 |
+
"grad_norm": 0.09929833561182022,
|
| 49 |
+
"learning_rate": 5.4545454545454546e-05,
|
| 50 |
+
"loss": 0.7114,
|
| 51 |
"step": 6
|
| 52 |
},
|
| 53 |
{
|
| 54 |
+
"epoch": 0.03427172582619339,
|
| 55 |
+
"grad_norm": 0.08789193630218506,
|
| 56 |
+
"learning_rate": 6.363636363636364e-05,
|
| 57 |
+
"loss": 0.5457,
|
| 58 |
"step": 7
|
| 59 |
},
|
| 60 |
{
|
| 61 |
+
"epoch": 0.03916768665850673,
|
| 62 |
+
"grad_norm": 0.0518026277422905,
|
| 63 |
+
"learning_rate": 7.272727272727273e-05,
|
| 64 |
+
"loss": 0.3846,
|
| 65 |
"step": 8
|
| 66 |
},
|
| 67 |
{
|
| 68 |
+
"epoch": 0.044063647490820076,
|
| 69 |
+
"grad_norm": 0.042549822479486465,
|
| 70 |
+
"learning_rate": 8.181818181818183e-05,
|
| 71 |
+
"loss": 0.5076,
|
| 72 |
"step": 9
|
| 73 |
},
|
| 74 |
{
|
| 75 |
+
"epoch": 0.04895960832313342,
|
| 76 |
+
"grad_norm": 0.051444098353385925,
|
| 77 |
+
"learning_rate": 9.090909090909092e-05,
|
| 78 |
+
"loss": 0.4675,
|
| 79 |
"step": 10
|
| 80 |
},
|
| 81 |
{
|
| 82 |
+
"epoch": 0.05385556915544676,
|
| 83 |
+
"grad_norm": 0.08935613930225372,
|
| 84 |
+
"learning_rate": 0.0001,
|
| 85 |
+
"loss": 0.3412,
|
| 86 |
"step": 11
|
| 87 |
},
|
| 88 |
{
|
| 89 |
+
"epoch": 0.0587515299877601,
|
| 90 |
+
"grad_norm": 0.10168597102165222,
|
| 91 |
+
"learning_rate": 9.99933760728612e-05,
|
| 92 |
+
"loss": 0.4016,
|
| 93 |
"step": 12
|
| 94 |
},
|
| 95 |
{
|
| 96 |
+
"epoch": 0.06364749082007344,
|
| 97 |
+
"grad_norm": 0.1793641448020935,
|
| 98 |
+
"learning_rate": 9.997350604650123e-05,
|
| 99 |
+
"loss": 0.3738,
|
| 100 |
"step": 13
|
| 101 |
},
|
| 102 |
{
|
| 103 |
+
"epoch": 0.06854345165238677,
|
| 104 |
+
"grad_norm": 0.24378074705600739,
|
| 105 |
+
"learning_rate": 9.994039518562432e-05,
|
| 106 |
+
"loss": 0.5781,
|
| 107 |
"step": 14
|
| 108 |
},
|
| 109 |
{
|
| 110 |
+
"epoch": 0.07343941248470012,
|
| 111 |
+
"grad_norm": 0.08958296477794647,
|
| 112 |
+
"learning_rate": 9.989405226318772e-05,
|
| 113 |
+
"loss": 0.4275,
|
| 114 |
"step": 15
|
| 115 |
},
|
| 116 |
{
|
| 117 |
+
"epoch": 0.07833537331701346,
|
| 118 |
+
"grad_norm": 0.16461962461471558,
|
| 119 |
+
"learning_rate": 9.983448955807708e-05,
|
| 120 |
+
"loss": 0.4416,
|
| 121 |
"step": 16
|
| 122 |
},
|
| 123 |
{
|
| 124 |
+
"epoch": 0.0832313341493268,
|
| 125 |
+
"grad_norm": 0.2743546664714813,
|
| 126 |
+
"learning_rate": 9.976172285185314e-05,
|
| 127 |
+
"loss": 0.5598,
|
| 128 |
"step": 17
|
| 129 |
},
|
| 130 |
{
|
| 131 |
+
"epoch": 0.08812729498164015,
|
| 132 |
+
"grad_norm": 0.05548033118247986,
|
| 133 |
+
"learning_rate": 9.967577142457032e-05,
|
| 134 |
+
"loss": 0.3509,
|
| 135 |
"step": 18
|
| 136 |
},
|
| 137 |
{
|
| 138 |
+
"epoch": 0.09302325581395349,
|
| 139 |
+
"grad_norm": 0.10299059748649597,
|
| 140 |
+
"learning_rate": 9.957665804966829e-05,
|
| 141 |
+
"loss": 0.7287,
|
| 142 |
"step": 19
|
| 143 |
},
|
| 144 |
{
|
| 145 |
+
"epoch": 0.09791921664626684,
|
| 146 |
+
"grad_norm": 0.1017536073923111,
|
| 147 |
+
"learning_rate": 9.946440898793801e-05,
|
| 148 |
+
"loss": 0.638,
|
| 149 |
"step": 20
|
| 150 |
},
|
| 151 |
{
|
| 152 |
+
"epoch": 0.10281517747858017,
|
| 153 |
+
"grad_norm": 0.09810858964920044,
|
| 154 |
+
"learning_rate": 9.933905398056372e-05,
|
| 155 |
"loss": 0.4959,
|
| 156 |
"step": 21
|
| 157 |
},
|
| 158 |
{
|
| 159 |
+
"epoch": 0.10771113831089352,
|
| 160 |
+
"grad_norm": 0.07071840763092041,
|
| 161 |
+
"learning_rate": 9.920062624124282e-05,
|
| 162 |
+
"loss": 0.5835,
|
| 163 |
"step": 22
|
| 164 |
},
|
| 165 |
{
|
| 166 |
+
"epoch": 0.11260709914320685,
|
| 167 |
+
"grad_norm": 0.23103618621826172,
|
| 168 |
+
"learning_rate": 9.904916244738571e-05,
|
| 169 |
+
"loss": 0.7785,
|
| 170 |
"step": 23
|
| 171 |
},
|
| 172 |
{
|
| 173 |
+
"epoch": 0.1175030599755202,
|
| 174 |
+
"grad_norm": 0.11760212481021881,
|
| 175 |
+
"learning_rate": 9.888470273039775e-05,
|
| 176 |
+
"loss": 0.3422,
|
| 177 |
"step": 24
|
| 178 |
},
|
| 179 |
{
|
| 180 |
+
"epoch": 0.12239902080783353,
|
| 181 |
+
"grad_norm": 0.07370053976774216,
|
| 182 |
+
"learning_rate": 9.870729066504629e-05,
|
| 183 |
+
"loss": 0.3107,
|
| 184 |
"step": 25
|
| 185 |
},
|
| 186 |
{
|
| 187 |
+
"epoch": 0.12729498164014688,
|
| 188 |
+
"grad_norm": 0.07285131514072418,
|
| 189 |
+
"learning_rate": 9.851697325791505e-05,
|
| 190 |
+
"loss": 0.4473,
|
| 191 |
"step": 26
|
| 192 |
},
|
| 193 |
{
|
| 194 |
+
"epoch": 0.13219094247246022,
|
| 195 |
+
"grad_norm": 0.05355285108089447,
|
| 196 |
+
"learning_rate": 9.831380093494957e-05,
|
| 197 |
+
"loss": 0.3368,
|
| 198 |
"step": 27
|
| 199 |
},
|
| 200 |
{
|
| 201 |
+
"epoch": 0.13708690330477355,
|
| 202 |
+
"grad_norm": 0.07507691532373428,
|
| 203 |
+
"learning_rate": 9.809782752809644e-05,
|
| 204 |
+
"loss": 0.3709,
|
| 205 |
"step": 28
|
| 206 |
},
|
| 207 |
{
|
| 208 |
+
"epoch": 0.1419828641370869,
|
| 209 |
+
"grad_norm": 0.055260878056287766,
|
| 210 |
+
"learning_rate": 9.786911026104007e-05,
|
| 211 |
+
"loss": 0.4986,
|
| 212 |
"step": 29
|
| 213 |
},
|
| 214 |
{
|
| 215 |
+
"epoch": 0.14687882496940025,
|
| 216 |
+
"grad_norm": 0.16646364331245422,
|
| 217 |
+
"learning_rate": 9.762770973404094e-05,
|
| 218 |
+
"loss": 0.6252,
|
| 219 |
"step": 30
|
| 220 |
},
|
| 221 |
{
|
| 222 |
+
"epoch": 0.15177478580171358,
|
| 223 |
+
"grad_norm": 0.22445577383041382,
|
| 224 |
+
"learning_rate": 9.737368990787916e-05,
|
| 225 |
+
"loss": 0.4772,
|
| 226 |
"step": 31
|
| 227 |
},
|
| 228 |
{
|
| 229 |
+
"epoch": 0.15667074663402691,
|
| 230 |
+
"grad_norm": 0.22849321365356445,
|
| 231 |
+
"learning_rate": 9.710711808690754e-05,
|
| 232 |
+
"loss": 0.381,
|
| 233 |
"step": 32
|
| 234 |
},
|
| 235 |
{
|
| 236 |
+
"epoch": 0.16156670746634028,
|
| 237 |
+
"grad_norm": 0.05855480954051018,
|
| 238 |
+
"learning_rate": 9.682806490121885e-05,
|
| 239 |
+
"loss": 0.3429,
|
| 240 |
"step": 33
|
| 241 |
},
|
| 242 |
{
|
| 243 |
+
"epoch": 0.1664626682986536,
|
| 244 |
+
"grad_norm": 0.06263675540685654,
|
| 245 |
+
"learning_rate": 9.653660428793188e-05,
|
| 246 |
+
"loss": 0.3927,
|
| 247 |
"step": 34
|
| 248 |
},
|
| 249 |
{
|
| 250 |
+
"epoch": 0.17135862913096694,
|
| 251 |
+
"grad_norm": 0.277313232421875,
|
| 252 |
+
"learning_rate": 9.623281347160127e-05,
|
| 253 |
+
"loss": 0.9417,
|
| 254 |
"step": 35
|
| 255 |
},
|
| 256 |
{
|
| 257 |
+
"epoch": 0.1762545899632803,
|
| 258 |
+
"grad_norm": 0.07143048942089081,
|
| 259 |
+
"learning_rate": 9.591677294375636e-05,
|
| 260 |
+
"loss": 0.4428,
|
| 261 |
"step": 36
|
| 262 |
},
|
| 263 |
{
|
| 264 |
+
"epoch": 0.18115055079559364,
|
| 265 |
+
"grad_norm": 0.07706379145383835,
|
| 266 |
+
"learning_rate": 9.558856644157432e-05,
|
| 267 |
+
"loss": 0.3614,
|
| 268 |
"step": 37
|
| 269 |
},
|
| 270 |
{
|
| 271 |
+
"epoch": 0.18604651162790697,
|
| 272 |
+
"grad_norm": 0.25005754828453064,
|
| 273 |
+
"learning_rate": 9.52482809256934e-05,
|
| 274 |
+
"loss": 0.6474,
|
| 275 |
"step": 38
|
| 276 |
},
|
| 277 |
{
|
| 278 |
+
"epoch": 0.1909424724602203,
|
| 279 |
+
"grad_norm": 0.06337836384773254,
|
| 280 |
+
"learning_rate": 9.489600655717217e-05,
|
| 281 |
+
"loss": 0.4504,
|
| 282 |
"step": 39
|
| 283 |
},
|
| 284 |
{
|
| 285 |
+
"epoch": 0.19583843329253367,
|
| 286 |
+
"grad_norm": 0.21187523007392883,
|
| 287 |
+
"learning_rate": 9.453183667360062e-05,
|
| 288 |
+
"loss": 0.6336,
|
| 289 |
"step": 40
|
| 290 |
},
|
| 291 |
{
|
| 292 |
+
"epoch": 0.200734394124847,
|
| 293 |
+
"grad_norm": 0.1760697066783905,
|
| 294 |
+
"learning_rate": 9.415586776436973e-05,
|
| 295 |
+
"loss": 0.5665,
|
| 296 |
"step": 41
|
| 297 |
},
|
| 298 |
{
|
| 299 |
+
"epoch": 0.20563035495716034,
|
| 300 |
+
"grad_norm": 0.10999207943677902,
|
| 301 |
+
"learning_rate": 9.376819944510598e-05,
|
| 302 |
+
"loss": 0.3929,
|
| 303 |
"step": 42
|
| 304 |
},
|
| 305 |
{
|
| 306 |
+
"epoch": 0.21052631578947367,
|
| 307 |
+
"grad_norm": 0.09944123774766922,
|
| 308 |
+
"learning_rate": 9.336893443127738e-05,
|
| 309 |
+
"loss": 0.4195,
|
| 310 |
"step": 43
|
| 311 |
},
|
| 312 |
{
|
| 313 |
+
"epoch": 0.21542227662178703,
|
| 314 |
+
"grad_norm": 0.1161671131849289,
|
| 315 |
+
"learning_rate": 9.295817851097837e-05,
|
| 316 |
+
"loss": 0.4643,
|
| 317 |
"step": 44
|
| 318 |
},
|
| 319 |
{
|
| 320 |
+
"epoch": 0.22031823745410037,
|
| 321 |
+
"grad_norm": 0.11634702235460281,
|
| 322 |
+
"learning_rate": 9.253604051690046e-05,
|
| 323 |
+
"loss": 0.5375,
|
| 324 |
"step": 45
|
| 325 |
},
|
| 326 |
{
|
| 327 |
+
"epoch": 0.2252141982864137,
|
| 328 |
+
"grad_norm": 0.07312487810850143,
|
| 329 |
+
"learning_rate": 9.210263229749626e-05,
|
| 330 |
+
"loss": 0.3291,
|
| 331 |
"step": 46
|
| 332 |
},
|
| 333 |
{
|
| 334 |
+
"epoch": 0.23011015911872704,
|
| 335 |
+
"grad_norm": 0.08545304834842682,
|
| 336 |
+
"learning_rate": 9.165806868734444e-05,
|
| 337 |
+
"loss": 0.5543,
|
| 338 |
"step": 47
|
| 339 |
},
|
| 340 |
{
|
| 341 |
+
"epoch": 0.2350061199510404,
|
| 342 |
+
"grad_norm": 0.15719719231128693,
|
| 343 |
+
"learning_rate": 9.120246747672347e-05,
|
| 344 |
+
"loss": 0.7045,
|
| 345 |
"step": 48
|
| 346 |
},
|
| 347 |
{
|
| 348 |
+
"epoch": 0.23990208078335373,
|
| 349 |
+
"grad_norm": 0.11078042536973953,
|
| 350 |
+
"learning_rate": 9.073594938040231e-05,
|
| 351 |
+
"loss": 0.572,
|
| 352 |
"step": 49
|
| 353 |
},
|
| 354 |
{
|
| 355 |
+
"epoch": 0.24479804161566707,
|
| 356 |
+
"grad_norm": 0.1860225647687912,
|
| 357 |
+
"learning_rate": 9.025863800565613e-05,
|
| 358 |
+
"loss": 0.7102,
|
| 359 |
"step": 50
|
| 360 |
},
|
| 361 |
{
|
| 362 |
+
"epoch": 0.24969400244798043,
|
| 363 |
+
"grad_norm": 0.11442095041275024,
|
| 364 |
+
"learning_rate": 8.977065981951566e-05,
|
| 365 |
+
"loss": 0.5135,
|
| 366 |
"step": 51
|
| 367 |
},
|
| 368 |
{
|
| 369 |
+
"epoch": 0.25458996328029376,
|
| 370 |
+
"grad_norm": 0.17788943648338318,
|
| 371 |
+
"learning_rate": 8.927214411525895e-05,
|
| 372 |
+
"loss": 0.4911,
|
| 373 |
"step": 52
|
| 374 |
},
|
| 375 |
{
|
| 376 |
+
"epoch": 0.2594859241126071,
|
| 377 |
+
"grad_norm": 0.24241389334201813,
|
| 378 |
+
"learning_rate": 8.876322297815405e-05,
|
| 379 |
+
"loss": 0.6657,
|
| 380 |
"step": 53
|
| 381 |
},
|
| 382 |
{
|
| 383 |
+
"epoch": 0.26438188494492043,
|
| 384 |
+
"grad_norm": 0.07881529629230499,
|
| 385 |
+
"learning_rate": 8.824403125046225e-05,
|
| 386 |
+
"loss": 0.4495,
|
| 387 |
"step": 54
|
| 388 |
},
|
| 389 |
{
|
| 390 |
+
"epoch": 0.2692778457772338,
|
| 391 |
+
"grad_norm": 0.12150023877620697,
|
| 392 |
+
"learning_rate": 8.771470649571056e-05,
|
| 393 |
+
"loss": 0.3651,
|
| 394 |
"step": 55
|
| 395 |
},
|
| 396 |
{
|
| 397 |
+
"epoch": 0.2741738066095471,
|
| 398 |
+
"grad_norm": 0.07412393391132355,
|
| 399 |
+
"learning_rate": 8.717538896224332e-05,
|
| 400 |
+
"loss": 0.3277,
|
| 401 |
"step": 56
|
| 402 |
},
|
| 403 |
{
|
| 404 |
+
"epoch": 0.27906976744186046,
|
| 405 |
+
"grad_norm": 0.06195759400725365,
|
| 406 |
+
"learning_rate": 8.662622154606237e-05,
|
| 407 |
+
"loss": 0.361,
|
| 408 |
"step": 57
|
| 409 |
},
|
| 410 |
{
|
| 411 |
+
"epoch": 0.2839657282741738,
|
| 412 |
+
"grad_norm": 0.06260855495929718,
|
| 413 |
+
"learning_rate": 8.606734975296578e-05,
|
| 414 |
+
"loss": 0.4541,
|
| 415 |
"step": 58
|
| 416 |
},
|
| 417 |
{
|
| 418 |
+
"epoch": 0.28886168910648713,
|
| 419 |
+
"grad_norm": 0.22213032841682434,
|
| 420 |
+
"learning_rate": 8.549892165999505e-05,
|
| 421 |
+
"loss": 0.4133,
|
| 422 |
"step": 59
|
| 423 |
},
|
| 424 |
{
|
| 425 |
+
"epoch": 0.2937576499388005,
|
| 426 |
+
"grad_norm": 0.08584744483232498,
|
| 427 |
+
"learning_rate": 8.492108787620105e-05,
|
| 428 |
+
"loss": 0.3445,
|
| 429 |
"step": 60
|
| 430 |
},
|
| 431 |
{
|
| 432 |
+
"epoch": 0.29865361077111385,
|
| 433 |
+
"grad_norm": 0.07675541192293167,
|
| 434 |
+
"learning_rate": 8.433400150273906e-05,
|
| 435 |
+
"loss": 0.4518,
|
| 436 |
"step": 61
|
| 437 |
},
|
| 438 |
{
|
| 439 |
+
"epoch": 0.30354957160342716,
|
| 440 |
+
"grad_norm": 0.07371577620506287,
|
| 441 |
+
"learning_rate": 8.373781809230355e-05,
|
| 442 |
+
"loss": 0.308,
|
| 443 |
"step": 62
|
| 444 |
},
|
| 445 |
{
|
| 446 |
+
"epoch": 0.3084455324357405,
|
| 447 |
+
"grad_norm": 0.10943123698234558,
|
| 448 |
+
"learning_rate": 8.313269560791342e-05,
|
| 449 |
+
"loss": 0.7011,
|
| 450 |
"step": 63
|
| 451 |
},
|
| 452 |
{
|
| 453 |
+
"epoch": 0.31334149326805383,
|
| 454 |
+
"grad_norm": 0.07427150011062622,
|
| 455 |
+
"learning_rate": 8.251879438105854e-05,
|
| 456 |
+
"loss": 0.3193,
|
| 457 |
"step": 64
|
| 458 |
},
|
| 459 |
{
|
| 460 |
+
"epoch": 0.3182374541003672,
|
| 461 |
+
"grad_norm": 0.06212422996759415,
|
| 462 |
+
"learning_rate": 8.189627706921877e-05,
|
| 463 |
+
"loss": 0.3383,
|
| 464 |
"step": 65
|
| 465 |
},
|
| 466 |
{
|
| 467 |
+
"epoch": 0.32313341493268055,
|
| 468 |
+
"grad_norm": 0.1618916094303131,
|
| 469 |
+
"learning_rate": 8.126530861276677e-05,
|
| 470 |
+
"loss": 0.5653,
|
| 471 |
"step": 66
|
| 472 |
},
|
| 473 |
{
|
| 474 |
+
"epoch": 0.32802937576499386,
|
| 475 |
+
"grad_norm": 0.06333769857883453,
|
| 476 |
+
"learning_rate": 8.062605619126584e-05,
|
| 477 |
+
"loss": 0.3395,
|
| 478 |
"step": 67
|
| 479 |
},
|
| 480 |
{
|
| 481 |
+
"epoch": 0.3329253365973072,
|
| 482 |
+
"grad_norm": 0.16169676184654236,
|
| 483 |
+
"learning_rate": 7.997868917917453e-05,
|
| 484 |
+
"loss": 0.7753,
|
| 485 |
"step": 68
|
| 486 |
},
|
| 487 |
{
|
| 488 |
+
"epoch": 0.3378212974296206,
|
| 489 |
+
"grad_norm": 0.06635843217372894,
|
| 490 |
+
"learning_rate": 7.932337910096961e-05,
|
| 491 |
+
"loss": 0.3176,
|
| 492 |
"step": 69
|
| 493 |
},
|
| 494 |
{
|
| 495 |
+
"epoch": 0.3427172582619339,
|
| 496 |
+
"grad_norm": 0.16557282209396362,
|
| 497 |
+
"learning_rate": 7.866029958569956e-05,
|
| 498 |
+
"loss": 0.4603,
|
| 499 |
"step": 70
|
| 500 |
},
|
| 501 |
{
|
| 502 |
+
"epoch": 0.34761321909424725,
|
| 503 |
+
"grad_norm": 0.08522031456232071,
|
| 504 |
+
"learning_rate": 7.798962632098024e-05,
|
| 505 |
+
"loss": 0.5636,
|
| 506 |
"step": 71
|
| 507 |
},
|
| 508 |
{
|
| 509 |
+
"epoch": 0.3525091799265606,
|
| 510 |
+
"grad_norm": 0.05583783611655235,
|
| 511 |
+
"learning_rate": 7.73115370064452e-05,
|
| 512 |
+
"loss": 0.2692,
|
| 513 |
"step": 72
|
| 514 |
},
|
| 515 |
{
|
| 516 |
+
"epoch": 0.3574051407588739,
|
| 517 |
+
"grad_norm": 0.08181650191545486,
|
| 518 |
+
"learning_rate": 7.6626211306663e-05,
|
| 519 |
+
"loss": 0.6908,
|
| 520 |
"step": 73
|
| 521 |
},
|
| 522 |
{
|
| 523 |
+
"epoch": 0.3623011015911873,
|
| 524 |
+
"grad_norm": 0.07728707045316696,
|
| 525 |
+
"learning_rate": 7.59338308035337e-05,
|
| 526 |
+
"loss": 0.4046,
|
| 527 |
"step": 74
|
| 528 |
},
|
| 529 |
{
|
| 530 |
+
"epoch": 0.3671970624235006,
|
| 531 |
+
"grad_norm": 0.09214276075363159,
|
| 532 |
+
"learning_rate": 7.523457894817745e-05,
|
| 533 |
+
"loss": 0.816,
|
| 534 |
"step": 75
|
| 535 |
},
|
| 536 |
{
|
| 537 |
+
"epoch": 0.37209302325581395,
|
| 538 |
+
"grad_norm": 0.07536876946687698,
|
| 539 |
+
"learning_rate": 7.452864101232798e-05,
|
| 540 |
+
"loss": 0.334,
|
| 541 |
"step": 76
|
| 542 |
},
|
| 543 |
{
|
| 544 |
+
"epoch": 0.3769889840881273,
|
| 545 |
+
"grad_norm": 0.09890823811292648,
|
| 546 |
+
"learning_rate": 7.381620403924333e-05,
|
| 547 |
+
"loss": 0.4769,
|
| 548 |
"step": 77
|
| 549 |
},
|
| 550 |
{
|
| 551 |
+
"epoch": 0.3818849449204406,
|
| 552 |
+
"grad_norm": 0.09237520396709442,
|
| 553 |
+
"learning_rate": 7.30974567941475e-05,
|
| 554 |
+
"loss": 0.4149,
|
| 555 |
"step": 78
|
| 556 |
},
|
| 557 |
{
|
| 558 |
+
"epoch": 0.386780905752754,
|
| 559 |
+
"grad_norm": 0.14629222452640533,
|
| 560 |
+
"learning_rate": 7.237258971421587e-05,
|
| 561 |
+
"loss": 0.9113,
|
| 562 |
"step": 79
|
| 563 |
},
|
| 564 |
{
|
| 565 |
+
"epoch": 0.39167686658506734,
|
| 566 |
+
"grad_norm": 0.0960475504398346,
|
| 567 |
+
"learning_rate": 7.164179485811727e-05,
|
| 568 |
+
"loss": 0.4643,
|
| 569 |
"step": 80
|
| 570 |
},
|
| 571 |
{
|
| 572 |
+
"epoch": 0.39657282741738065,
|
| 573 |
+
"grad_norm": 0.07956342399120331,
|
| 574 |
+
"learning_rate": 7.090526585512696e-05,
|
| 575 |
+
"loss": 0.3902,
|
| 576 |
"step": 81
|
| 577 |
},
|
| 578 |
{
|
| 579 |
+
"epoch": 0.401468788249694,
|
| 580 |
+
"grad_norm": 0.08580750226974487,
|
| 581 |
+
"learning_rate": 7.016319785382296e-05,
|
| 582 |
+
"loss": 0.5235,
|
| 583 |
"step": 82
|
| 584 |
},
|
| 585 |
{
|
| 586 |
+
"epoch": 0.40636474908200737,
|
| 587 |
+
"grad_norm": 0.12661615014076233,
|
| 588 |
+
"learning_rate": 6.941578747038023e-05,
|
| 589 |
+
"loss": 0.4481,
|
| 590 |
"step": 83
|
| 591 |
},
|
| 592 |
{
|
| 593 |
+
"epoch": 0.4112607099143207,
|
| 594 |
+
"grad_norm": 0.0649070218205452,
|
| 595 |
+
"learning_rate": 6.866323273647563e-05,
|
| 596 |
+
"loss": 0.4189,
|
| 597 |
"step": 84
|
| 598 |
},
|
| 599 |
{
|
| 600 |
+
"epoch": 0.41615667074663404,
|
| 601 |
+
"grad_norm": 0.1020541861653328,
|
| 602 |
+
"learning_rate": 6.79057330468182e-05,
|
| 603 |
+
"loss": 0.4533,
|
| 604 |
"step": 85
|
| 605 |
},
|
| 606 |
{
|
| 607 |
+
"epoch": 0.42105263157894735,
|
| 608 |
+
"grad_norm": 0.10127013921737671,
|
| 609 |
+
"learning_rate": 6.7143489106318e-05,
|
| 610 |
+
"loss": 0.522,
|
| 611 |
"step": 86
|
| 612 |
},
|
| 613 |
{
|
| 614 |
+
"epoch": 0.4259485924112607,
|
| 615 |
+
"grad_norm": 0.14281871914863586,
|
| 616 |
+
"learning_rate": 6.637670287690799e-05,
|
| 617 |
+
"loss": 0.4772,
|
| 618 |
"step": 87
|
| 619 |
},
|
| 620 |
{
|
| 621 |
+
"epoch": 0.43084455324357407,
|
| 622 |
+
"grad_norm": 0.065117247402668,
|
| 623 |
+
"learning_rate": 6.560557752403277e-05,
|
| 624 |
+
"loss": 0.5043,
|
| 625 |
"step": 88
|
| 626 |
},
|
| 627 |
{
|
| 628 |
+
"epoch": 0.4357405140758874,
|
| 629 |
+
"grad_norm": 0.12292537093162537,
|
| 630 |
+
"learning_rate": 6.483031736281843e-05,
|
| 631 |
+
"loss": 0.4375,
|
| 632 |
"step": 89
|
| 633 |
},
|
| 634 |
{
|
| 635 |
+
"epoch": 0.44063647490820074,
|
| 636 |
+
"grad_norm": 0.06215154007077217,
|
| 637 |
+
"learning_rate": 6.40511278039378e-05,
|
| 638 |
+
"loss": 0.3519,
|
| 639 |
"step": 90
|
| 640 |
},
|
| 641 |
{
|
| 642 |
+
"epoch": 0.4455324357405141,
|
| 643 |
+
"grad_norm": 0.070677250623703,
|
| 644 |
+
"learning_rate": 6.326821529918553e-05,
|
| 645 |
+
"loss": 0.3407,
|
| 646 |
"step": 91
|
| 647 |
},
|
| 648 |
{
|
| 649 |
+
"epoch": 0.4504283965728274,
|
| 650 |
+
"grad_norm": 0.06239693611860275,
|
| 651 |
+
"learning_rate": 6.248178728677711e-05,
|
| 652 |
+
"loss": 0.3799,
|
| 653 |
"step": 92
|
| 654 |
},
|
| 655 |
{
|
| 656 |
+
"epoch": 0.45532435740514077,
|
| 657 |
+
"grad_norm": 0.07950470596551895,
|
| 658 |
+
"learning_rate": 6.16920521363867e-05,
|
| 659 |
+
"loss": 0.3063,
|
| 660 |
"step": 93
|
| 661 |
},
|
| 662 |
{
|
| 663 |
+
"epoch": 0.4602203182374541,
|
| 664 |
+
"grad_norm": 0.06621692329645157,
|
| 665 |
+
"learning_rate": 6.089921909393812e-05,
|
| 666 |
+
"loss": 0.4386,
|
| 667 |
"step": 94
|
| 668 |
},
|
| 669 |
{
|
| 670 |
+
"epoch": 0.46511627906976744,
|
| 671 |
+
"grad_norm": 0.3681151270866394,
|
| 672 |
+
"learning_rate": 6.0103498226163603e-05,
|
| 673 |
+
"loss": 0.5919,
|
| 674 |
"step": 95
|
| 675 |
},
|
| 676 |
{
|
| 677 |
+
"epoch": 0.4700122399020808,
|
| 678 |
+
"grad_norm": 0.20958742499351501,
|
| 679 |
+
"learning_rate": 5.93051003649452e-05,
|
| 680 |
+
"loss": 0.4636,
|
| 681 |
"step": 96
|
| 682 |
},
|
| 683 |
{
|
| 684 |
+
"epoch": 0.4749082007343941,
|
| 685 |
+
"grad_norm": 0.09456545114517212,
|
| 686 |
+
"learning_rate": 5.850423705145334e-05,
|
| 687 |
+
"loss": 0.4564,
|
| 688 |
"step": 97
|
| 689 |
},
|
| 690 |
{
|
| 691 |
+
"epoch": 0.47980416156670747,
|
| 692 |
+
"grad_norm": 0.06202203407883644,
|
| 693 |
+
"learning_rate": 5.770112048009747e-05,
|
| 694 |
+
"loss": 0.3652,
|
| 695 |
"step": 98
|
| 696 |
},
|
| 697 |
{
|
| 698 |
+
"epoch": 0.4847001223990208,
|
| 699 |
+
"grad_norm": 0.11682210117578506,
|
| 700 |
+
"learning_rate": 5.68959634423037e-05,
|
| 701 |
+
"loss": 0.5179,
|
| 702 |
"step": 99
|
| 703 |
},
|
| 704 |
{
|
| 705 |
+
"epoch": 0.48959608323133413,
|
| 706 |
+
"grad_norm": 0.11552639305591583,
|
| 707 |
+
"learning_rate": 5.60889792701342e-05,
|
| 708 |
+
"loss": 0.467,
|
| 709 |
"step": 100
|
| 710 |
},
|
| 711 |
{
|
| 712 |
+
"epoch": 0.4944920440636475,
|
| 713 |
+
"grad_norm": 0.053526621311903,
|
| 714 |
+
"learning_rate": 5.52803817797633e-05,
|
| 715 |
+
"loss": 0.2341,
|
| 716 |
"step": 101
|
| 717 |
},
|
| 718 |
{
|
| 719 |
+
"epoch": 0.49938800489596086,
|
| 720 |
+
"grad_norm": 0.11124971508979797,
|
| 721 |
+
"learning_rate": 5.4470385214825416e-05,
|
| 722 |
+
"loss": 0.5641,
|
| 723 |
"step": 102
|
| 724 |
},
|
| 725 |
{
|
| 726 |
+
"epoch": 0.5042839657282742,
|
| 727 |
+
"grad_norm": 0.1113283783197403,
|
| 728 |
+
"learning_rate": 5.365920418964973e-05,
|
| 729 |
+
"loss": 0.438,
|
| 730 |
"step": 103
|
| 731 |
},
|
| 732 |
{
|
| 733 |
+
"epoch": 0.5091799265605875,
|
| 734 |
+
"grad_norm": 0.0814870297908783,
|
| 735 |
+
"learning_rate": 5.28470536323965e-05,
|
| 736 |
+
"loss": 0.4562,
|
| 737 |
"step": 104
|
| 738 |
},
|
| 739 |
{
|
| 740 |
+
"epoch": 0.5140758873929009,
|
| 741 |
+
"grad_norm": 0.09781132638454437,
|
| 742 |
+
"learning_rate": 5.2034148728110424e-05,
|
| 743 |
+
"loss": 0.5587,
|
| 744 |
"step": 105
|
| 745 |
},
|
| 746 |
{
|
| 747 |
+
"epoch": 0.5189718482252142,
|
| 748 |
+
"grad_norm": 0.11908842623233795,
|
| 749 |
+
"learning_rate": 5.1220704861705774e-05,
|
| 750 |
+
"loss": 0.7554,
|
| 751 |
"step": 106
|
| 752 |
},
|
| 753 |
{
|
| 754 |
+
"epoch": 0.5238678090575275,
|
| 755 |
+
"grad_norm": 0.06143520399928093,
|
| 756 |
+
"learning_rate": 5.0406937560898646e-05,
|
| 757 |
+
"loss": 0.2544,
|
| 758 |
"step": 107
|
| 759 |
},
|
| 760 |
{
|
| 761 |
+
"epoch": 0.5287637698898409,
|
| 762 |
+
"grad_norm": 0.08653257042169571,
|
| 763 |
+
"learning_rate": 4.9593062439101365e-05,
|
| 764 |
+
"loss": 0.5423,
|
| 765 |
"step": 108
|
| 766 |
},
|
| 767 |
{
|
| 768 |
+
"epoch": 0.5336597307221542,
|
| 769 |
+
"grad_norm": 0.07373999804258347,
|
| 770 |
+
"learning_rate": 4.877929513829424e-05,
|
| 771 |
+
"loss": 0.3577,
|
| 772 |
"step": 109
|
| 773 |
},
|
| 774 |
{
|
| 775 |
+
"epoch": 0.5385556915544676,
|
| 776 |
+
"grad_norm": 0.2530810236930847,
|
| 777 |
+
"learning_rate": 4.796585127188958e-05,
|
| 778 |
+
"loss": 0.454,
|
| 779 |
"step": 110
|
| 780 |
},
|
| 781 |
{
|
| 782 |
+
"epoch": 0.543451652386781,
|
| 783 |
+
"grad_norm": 0.0999133363366127,
|
| 784 |
+
"learning_rate": 4.715294636760352e-05,
|
| 785 |
+
"loss": 0.4601,
|
| 786 |
"step": 111
|
| 787 |
},
|
| 788 |
{
|
| 789 |
+
"epoch": 0.5483476132190942,
|
| 790 |
+
"grad_norm": 0.07089534401893616,
|
| 791 |
+
"learning_rate": 4.634079581035029e-05,
|
| 792 |
+
"loss": 0.2958,
|
| 793 |
"step": 112
|
| 794 |
},
|
| 795 |
{
|
| 796 |
+
"epoch": 0.5532435740514076,
|
| 797 |
+
"grad_norm": 0.15452376008033752,
|
| 798 |
+
"learning_rate": 4.55296147851746e-05,
|
| 799 |
+
"loss": 0.5048,
|
| 800 |
"step": 113
|
| 801 |
},
|
| 802 |
{
|
| 803 |
+
"epoch": 0.5581395348837209,
|
| 804 |
+
"grad_norm": 0.12181607633829117,
|
| 805 |
+
"learning_rate": 4.471961822023671e-05,
|
| 806 |
+
"loss": 0.5176,
|
| 807 |
"step": 114
|
| 808 |
},
|
| 809 |
{
|
| 810 |
+
"epoch": 0.5630354957160343,
|
| 811 |
+
"grad_norm": 0.07680846750736237,
|
| 812 |
+
"learning_rate": 4.391102072986581e-05,
|
| 813 |
+
"loss": 0.2968,
|
| 814 |
"step": 115
|
| 815 |
},
|
| 816 |
{
|
| 817 |
+
"epoch": 0.5679314565483476,
|
| 818 |
+
"grad_norm": 0.071733757853508,
|
| 819 |
+
"learning_rate": 4.3104036557696295e-05,
|
| 820 |
+
"loss": 0.6076,
|
| 821 |
"step": 116
|
| 822 |
},
|
| 823 |
{
|
| 824 |
+
"epoch": 0.572827417380661,
|
| 825 |
+
"grad_norm": 0.07903078943490982,
|
| 826 |
+
"learning_rate": 4.229887951990255e-05,
|
| 827 |
+
"loss": 0.5459,
|
| 828 |
"step": 117
|
| 829 |
},
|
| 830 |
{
|
| 831 |
+
"epoch": 0.5777233782129743,
|
| 832 |
+
"grad_norm": 0.09677067399024963,
|
| 833 |
+
"learning_rate": 4.149576294854668e-05,
|
| 834 |
+
"loss": 0.44,
|
| 835 |
"step": 118
|
| 836 |
},
|
| 837 |
{
|
| 838 |
+
"epoch": 0.5826193390452876,
|
| 839 |
+
"grad_norm": 0.09369610249996185,
|
| 840 |
+
"learning_rate": 4.069489963505482e-05,
|
| 841 |
+
"loss": 0.6845,
|
| 842 |
"step": 119
|
| 843 |
},
|
| 844 |
{
|
| 845 |
+
"epoch": 0.587515299877601,
|
| 846 |
+
"grad_norm": 0.06507845222949982,
|
| 847 |
+
"learning_rate": 3.98965017738364e-05,
|
| 848 |
+
"loss": 0.3054,
|
| 849 |
"step": 120
|
| 850 |
},
|
| 851 |
{
|
| 852 |
+
"epoch": 0.5924112607099143,
|
| 853 |
+
"grad_norm": 0.1210411936044693,
|
| 854 |
+
"learning_rate": 3.9100780906061896e-05,
|
| 855 |
+
"loss": 0.4176,
|
| 856 |
"step": 121
|
| 857 |
},
|
| 858 |
{
|
| 859 |
+
"epoch": 0.5973072215422277,
|
| 860 |
+
"grad_norm": 0.07214700430631638,
|
| 861 |
+
"learning_rate": 3.83079478636133e-05,
|
| 862 |
+
"loss": 0.4256,
|
| 863 |
"step": 122
|
| 864 |
},
|
| 865 |
{
|
| 866 |
+
"epoch": 0.602203182374541,
|
| 867 |
+
"grad_norm": 0.07553218305110931,
|
| 868 |
+
"learning_rate": 3.7518212713222906e-05,
|
| 869 |
+
"loss": 0.5853,
|
| 870 |
"step": 123
|
| 871 |
},
|
| 872 |
{
|
| 873 |
+
"epoch": 0.6070991432068543,
|
| 874 |
+
"grad_norm": 0.08783773332834244,
|
| 875 |
+
"learning_rate": 3.673178470081448e-05,
|
| 876 |
+
"loss": 0.5636,
|
| 877 |
"step": 124
|
| 878 |
},
|
| 879 |
{
|
| 880 |
+
"epoch": 0.6119951040391677,
|
| 881 |
+
"grad_norm": 0.07783231139183044,
|
| 882 |
+
"learning_rate": 3.594887219606221e-05,
|
| 883 |
+
"loss": 0.6704,
|
| 884 |
"step": 125
|
| 885 |
},
|
| 886 |
{
|
| 887 |
+
"epoch": 0.616891064871481,
|
| 888 |
+
"grad_norm": 0.14530447125434875,
|
| 889 |
+
"learning_rate": 3.516968263718159e-05,
|
| 890 |
+
"loss": 0.4881,
|
| 891 |
"step": 126
|
| 892 |
},
|
| 893 |
{
|
| 894 |
+
"epoch": 0.6217870257037944,
|
| 895 |
+
"grad_norm": 0.23401491343975067,
|
| 896 |
+
"learning_rate": 3.439442247596724e-05,
|
| 897 |
+
"loss": 0.4918,
|
| 898 |
"step": 127
|
| 899 |
},
|
| 900 |
{
|
| 901 |
+
"epoch": 0.6266829865361077,
|
| 902 |
+
"grad_norm": 0.07801464200019836,
|
| 903 |
+
"learning_rate": 3.3623297123092006e-05,
|
| 904 |
+
"loss": 0.5203,
|
| 905 |
"step": 128
|
| 906 |
},
|
| 907 |
{
|
| 908 |
+
"epoch": 0.631578947368421,
|
| 909 |
+
"grad_norm": 0.1613989621400833,
|
| 910 |
+
"learning_rate": 3.285651089368202e-05,
|
| 911 |
+
"loss": 0.7026,
|
| 912 |
"step": 129
|
| 913 |
},
|
| 914 |
{
|
| 915 |
+
"epoch": 0.6364749082007344,
|
| 916 |
+
"grad_norm": 0.12846659123897552,
|
| 917 |
+
"learning_rate": 3.209426695318182e-05,
|
| 918 |
+
"loss": 0.4857,
|
| 919 |
"step": 130
|
| 920 |
},
|
| 921 |
{
|
| 922 |
+
"epoch": 0.6413708690330477,
|
| 923 |
+
"grad_norm": 0.11735550314188004,
|
| 924 |
+
"learning_rate": 3.133676726352438e-05,
|
| 925 |
+
"loss": 0.542,
|
| 926 |
"step": 131
|
| 927 |
},
|
| 928 |
{
|
| 929 |
+
"epoch": 0.6462668298653611,
|
| 930 |
+
"grad_norm": 0.09229713678359985,
|
| 931 |
+
"learning_rate": 3.0584212529619775e-05,
|
| 932 |
+
"loss": 0.4807,
|
| 933 |
"step": 132
|
| 934 |
},
|
| 935 |
{
|
| 936 |
+
"epoch": 0.6511627906976745,
|
| 937 |
+
"grad_norm": 0.1247001513838768,
|
| 938 |
+
"learning_rate": 2.9836802146177034e-05,
|
| 939 |
+
"loss": 0.5123,
|
| 940 |
"step": 133
|
| 941 |
},
|
| 942 |
{
|
| 943 |
+
"epoch": 0.6560587515299877,
|
| 944 |
+
"grad_norm": 0.12723222374916077,
|
| 945 |
+
"learning_rate": 2.9094734144873036e-05,
|
| 946 |
+
"loss": 0.7619,
|
| 947 |
"step": 134
|
| 948 |
},
|
| 949 |
{
|
| 950 |
+
"epoch": 0.6609547123623011,
|
| 951 |
+
"grad_norm": 0.06785371154546738,
|
| 952 |
+
"learning_rate": 2.835820514188273e-05,
|
| 953 |
+
"loss": 0.488,
|
| 954 |
"step": 135
|
| 955 |
},
|
| 956 |
{
|
| 957 |
+
"epoch": 0.6658506731946144,
|
| 958 |
+
"grad_norm": 0.07913102209568024,
|
| 959 |
+
"learning_rate": 2.7627410285784163e-05,
|
| 960 |
+
"loss": 0.5913,
|
| 961 |
"step": 136
|
| 962 |
},
|
| 963 |
{
|
| 964 |
+
"epoch": 0.6707466340269278,
|
| 965 |
+
"grad_norm": 0.10038694739341736,
|
| 966 |
+
"learning_rate": 2.6902543205852492e-05,
|
| 967 |
+
"loss": 0.6774,
|
| 968 |
"step": 137
|
| 969 |
},
|
| 970 |
{
|
| 971 |
+
"epoch": 0.6756425948592412,
|
| 972 |
+
"grad_norm": 0.06108963489532471,
|
| 973 |
+
"learning_rate": 2.618379596075668e-05,
|
| 974 |
+
"loss": 0.304,
|
| 975 |
"step": 138
|
| 976 |
},
|
| 977 |
{
|
| 978 |
+
"epoch": 0.6805385556915544,
|
| 979 |
+
"grad_norm": 0.06344286352396011,
|
| 980 |
+
"learning_rate": 2.5471358987672017e-05,
|
| 981 |
+
"loss": 0.394,
|
| 982 |
"step": 139
|
| 983 |
},
|
| 984 |
{
|
| 985 |
+
"epoch": 0.6854345165238678,
|
| 986 |
+
"grad_norm": 0.1912202686071396,
|
| 987 |
+
"learning_rate": 2.476542105182254e-05,
|
| 988 |
+
"loss": 0.4608,
|
| 989 |
"step": 140
|
| 990 |
},
|
| 991 |
{
|
| 992 |
+
"epoch": 0.6903304773561811,
|
| 993 |
+
"grad_norm": 0.0918666198849678,
|
| 994 |
+
"learning_rate": 2.4066169196466326e-05,
|
| 995 |
+
"loss": 0.7695,
|
| 996 |
"step": 141
|
| 997 |
},
|
| 998 |
{
|
| 999 |
+
"epoch": 0.6952264381884945,
|
| 1000 |
+
"grad_norm": 0.09997723251581192,
|
| 1001 |
+
"learning_rate": 2.3373788693337024e-05,
|
| 1002 |
+
"loss": 0.5197,
|
| 1003 |
"step": 142
|
| 1004 |
},
|
| 1005 |
{
|
| 1006 |
+
"epoch": 0.7001223990208079,
|
| 1007 |
+
"grad_norm": 0.08384369313716888,
|
| 1008 |
+
"learning_rate": 2.268846299355481e-05,
|
| 1009 |
+
"loss": 0.4842,
|
| 1010 |
"step": 143
|
| 1011 |
},
|
| 1012 |
{
|
| 1013 |
+
"epoch": 0.7050183598531212,
|
| 1014 |
+
"grad_norm": 0.14654509723186493,
|
| 1015 |
+
"learning_rate": 2.2010373679019776e-05,
|
| 1016 |
+
"loss": 0.4083,
|
| 1017 |
"step": 144
|
| 1018 |
},
|
| 1019 |
{
|
| 1020 |
+
"epoch": 0.7099143206854345,
|
| 1021 |
+
"grad_norm": 0.23128315806388855,
|
| 1022 |
+
"learning_rate": 2.133970041430044e-05,
|
| 1023 |
+
"loss": 0.6992,
|
| 1024 |
"step": 145
|
| 1025 |
},
|
| 1026 |
{
|
| 1027 |
+
"epoch": 0.7148102815177478,
|
| 1028 |
+
"grad_norm": 0.09588706493377686,
|
| 1029 |
+
"learning_rate": 2.067662089903039e-05,
|
| 1030 |
+
"loss": 0.5621,
|
| 1031 |
"step": 146
|
| 1032 |
},
|
| 1033 |
{
|
| 1034 |
+
"epoch": 0.7197062423500612,
|
| 1035 |
+
"grad_norm": 0.10101126879453659,
|
| 1036 |
+
"learning_rate": 2.002131082082549e-05,
|
| 1037 |
+
"loss": 0.4135,
|
| 1038 |
"step": 147
|
| 1039 |
},
|
| 1040 |
{
|
| 1041 |
+
"epoch": 0.7246022031823746,
|
| 1042 |
+
"grad_norm": 0.15346215665340424,
|
| 1043 |
+
"learning_rate": 1.937394380873418e-05,
|
| 1044 |
+
"loss": 0.5933,
|
| 1045 |
"step": 148
|
| 1046 |
},
|
| 1047 |
{
|
| 1048 |
+
"epoch": 0.7294981640146879,
|
| 1049 |
+
"grad_norm": 0.07918453961610794,
|
| 1050 |
+
"learning_rate": 1.873469138723325e-05,
|
| 1051 |
+
"loss": 0.3081,
|
| 1052 |
"step": 149
|
| 1053 |
},
|
| 1054 |
{
|
| 1055 |
+
"epoch": 0.7343941248470012,
|
| 1056 |
+
"grad_norm": 0.0649547427892685,
|
| 1057 |
+
"learning_rate": 1.8103722930781247e-05,
|
| 1058 |
+
"loss": 0.407,
|
| 1059 |
"step": 150
|
| 1060 |
},
|
| 1061 |
{
|
| 1062 |
+
"epoch": 0.7392900856793145,
|
| 1063 |
+
"grad_norm": 0.07419539242982864,
|
| 1064 |
+
"learning_rate": 1.748120561894147e-05,
|
| 1065 |
+
"loss": 0.7313,
|
| 1066 |
"step": 151
|
| 1067 |
},
|
| 1068 |
{
|
| 1069 |
+
"epoch": 0.7441860465116279,
|
| 1070 |
+
"grad_norm": 0.09094121307134628,
|
| 1071 |
+
"learning_rate": 1.6867304392086575e-05,
|
| 1072 |
+
"loss": 0.3871,
|
| 1073 |
"step": 152
|
| 1074 |
},
|
| 1075 |
{
|
| 1076 |
+
"epoch": 0.7490820073439413,
|
| 1077 |
+
"grad_norm": 0.10387779027223587,
|
| 1078 |
+
"learning_rate": 1.6262181907696454e-05,
|
| 1079 |
+
"loss": 0.5026,
|
| 1080 |
"step": 153
|
| 1081 |
},
|
| 1082 |
{
|
| 1083 |
+
"epoch": 0.7539779681762546,
|
| 1084 |
+
"grad_norm": 0.22015878558158875,
|
| 1085 |
+
"learning_rate": 1.5665998497260958e-05,
|
| 1086 |
+
"loss": 0.4817,
|
| 1087 |
"step": 154
|
| 1088 |
},
|
| 1089 |
{
|
| 1090 |
+
"epoch": 0.758873929008568,
|
| 1091 |
+
"grad_norm": 0.06719034910202026,
|
| 1092 |
+
"learning_rate": 1.5078912123798961e-05,
|
| 1093 |
+
"loss": 0.5186,
|
| 1094 |
"step": 155
|
| 1095 |
},
|
| 1096 |
{
|
| 1097 |
+
"epoch": 0.7637698898408812,
|
| 1098 |
+
"grad_norm": 0.07599823921918869,
|
| 1099 |
+
"learning_rate": 1.4501078340004953e-05,
|
| 1100 |
+
"loss": 0.4275,
|
| 1101 |
"step": 156
|
| 1102 |
},
|
| 1103 |
{
|
| 1104 |
+
"epoch": 0.7686658506731946,
|
| 1105 |
+
"grad_norm": 0.1412460058927536,
|
| 1106 |
+
"learning_rate": 1.3932650247034218e-05,
|
| 1107 |
+
"loss": 0.626,
|
| 1108 |
"step": 157
|
| 1109 |
},
|
| 1110 |
{
|
| 1111 |
+
"epoch": 0.773561811505508,
|
| 1112 |
+
"grad_norm": 0.1420402228832245,
|
| 1113 |
+
"learning_rate": 1.337377845393763e-05,
|
| 1114 |
+
"loss": 0.5193,
|
| 1115 |
"step": 158
|
| 1116 |
},
|
| 1117 |
{
|
| 1118 |
+
"epoch": 0.7784577723378213,
|
| 1119 |
+
"grad_norm": 0.09665469825267792,
|
| 1120 |
+
"learning_rate": 1.2824611037756684e-05,
|
| 1121 |
+
"loss": 0.5848,
|
| 1122 |
"step": 159
|
| 1123 |
},
|
| 1124 |
{
|
| 1125 |
+
"epoch": 0.7833537331701347,
|
| 1126 |
+
"grad_norm": 0.06045060604810715,
|
| 1127 |
+
"learning_rate": 1.2285293504289447e-05,
|
| 1128 |
+
"loss": 0.2627,
|
| 1129 |
"step": 160
|
| 1130 |
},
|
| 1131 |
{
|
| 1132 |
+
"epoch": 0.7882496940024479,
|
| 1133 |
+
"grad_norm": 0.08824898302555084,
|
| 1134 |
+
"learning_rate": 1.1755968749537754e-05,
|
| 1135 |
+
"loss": 0.5949,
|
| 1136 |
"step": 161
|
| 1137 |
},
|
| 1138 |
{
|
| 1139 |
+
"epoch": 0.7931456548347613,
|
| 1140 |
+
"grad_norm": 0.13809214532375336,
|
| 1141 |
+
"learning_rate": 1.1236777021845956e-05,
|
| 1142 |
+
"loss": 0.7397,
|
| 1143 |
"step": 162
|
| 1144 |
},
|
| 1145 |
{
|
| 1146 |
+
"epoch": 0.7980416156670747,
|
| 1147 |
+
"grad_norm": 0.12224618345499039,
|
| 1148 |
+
"learning_rate": 1.0727855884741056e-05,
|
| 1149 |
+
"loss": 0.553,
|
| 1150 |
"step": 163
|
| 1151 |
},
|
| 1152 |
{
|
| 1153 |
+
"epoch": 0.802937576499388,
|
| 1154 |
+
"grad_norm": 0.07628399133682251,
|
| 1155 |
+
"learning_rate": 1.022934018048432e-05,
|
| 1156 |
+
"loss": 0.3447,
|
| 1157 |
"step": 164
|
| 1158 |
},
|
| 1159 |
{
|
| 1160 |
+
"epoch": 0.8078335373317014,
|
| 1161 |
+
"grad_norm": 0.06734273582696915,
|
| 1162 |
+
"learning_rate": 9.741361994343867e-06,
|
| 1163 |
+
"loss": 0.7473,
|
| 1164 |
"step": 165
|
| 1165 |
},
|
| 1166 |
{
|
| 1167 |
+
"epoch": 0.8127294981640147,
|
| 1168 |
+
"grad_norm": 0.0891406238079071,
|
| 1169 |
+
"learning_rate": 9.264050619597697e-06,
|
| 1170 |
+
"loss": 0.5008,
|
| 1171 |
"step": 166
|
| 1172 |
},
|
| 1173 |
{
|
| 1174 |
+
"epoch": 0.817625458996328,
|
| 1175 |
+
"grad_norm": 0.08535400032997131,
|
| 1176 |
+
"learning_rate": 8.797532523276542e-06,
|
| 1177 |
+
"loss": 0.4171,
|
| 1178 |
"step": 167
|
| 1179 |
},
|
| 1180 |
{
|
| 1181 |
+
"epoch": 0.8225214198286414,
|
| 1182 |
+
"grad_norm": 0.3191221058368683,
|
| 1183 |
+
"learning_rate": 8.341931312655582e-06,
|
| 1184 |
+
"loss": 0.7633,
|
| 1185 |
"step": 168
|
| 1186 |
},
|
| 1187 |
{
|
| 1188 |
+
"epoch": 0.8274173806609547,
|
| 1189 |
+
"grad_norm": 0.11288396269083023,
|
| 1190 |
+
"learning_rate": 7.897367702503756e-06,
|
| 1191 |
+
"loss": 0.4054,
|
| 1192 |
"step": 169
|
| 1193 |
},
|
| 1194 |
{
|
| 1195 |
+
"epoch": 0.8323133414932681,
|
| 1196 |
+
"grad_norm": 0.09110561013221741,
|
| 1197 |
+
"learning_rate": 7.463959483099547e-06,
|
| 1198 |
+
"loss": 0.4642,
|
| 1199 |
"step": 170
|
| 1200 |
},
|
| 1201 |
{
|
| 1202 |
+
"epoch": 0.8372093023255814,
|
| 1203 |
+
"grad_norm": 0.08345432579517365,
|
| 1204 |
+
"learning_rate": 7.041821489021639e-06,
|
| 1205 |
+
"loss": 0.3769,
|
| 1206 |
"step": 171
|
| 1207 |
},
|
| 1208 |
{
|
| 1209 |
+
"epoch": 0.8421052631578947,
|
| 1210 |
+
"grad_norm": 0.19082055985927582,
|
| 1211 |
+
"learning_rate": 6.631065568722633e-06,
|
| 1212 |
+
"loss": 0.4487,
|
| 1213 |
"step": 172
|
| 1214 |
},
|
| 1215 |
{
|
| 1216 |
+
"epoch": 0.847001223990208,
|
| 1217 |
+
"grad_norm": 0.06951060146093369,
|
| 1218 |
+
"learning_rate": 6.231800554894029e-06,
|
| 1219 |
+
"loss": 0.2915,
|
| 1220 |
"step": 173
|
| 1221 |
},
|
| 1222 |
{
|
| 1223 |
+
"epoch": 0.8518971848225214,
|
| 1224 |
+
"grad_norm": 0.08803869783878326,
|
| 1225 |
+
"learning_rate": 5.844132235630273e-06,
|
| 1226 |
+
"loss": 0.5065,
|
| 1227 |
"step": 174
|
| 1228 |
},
|
| 1229 |
{
|
| 1230 |
+
"epoch": 0.8567931456548348,
|
| 1231 |
+
"grad_norm": 0.06576603651046753,
|
| 1232 |
+
"learning_rate": 5.468163326399389e-06,
|
| 1233 |
+
"loss": 0.4307,
|
| 1234 |
"step": 175
|
| 1235 |
},
|
| 1236 |
{
|
| 1237 |
+
"epoch": 0.8616891064871481,
|
| 1238 |
+
"grad_norm": 0.11544258892536163,
|
| 1239 |
+
"learning_rate": 5.103993442827831e-06,
|
| 1240 |
+
"loss": 0.6356,
|
| 1241 |
"step": 176
|
| 1242 |
},
|
| 1243 |
{
|
| 1244 |
+
"epoch": 0.8665850673194615,
|
| 1245 |
+
"grad_norm": 0.08324428647756577,
|
| 1246 |
+
"learning_rate": 4.751719074306604e-06,
|
| 1247 |
+
"loss": 0.5426,
|
| 1248 |
"step": 177
|
| 1249 |
},
|
| 1250 |
{
|
| 1251 |
+
"epoch": 0.8714810281517748,
|
| 1252 |
+
"grad_norm": 0.0815030187368393,
|
| 1253 |
+
"learning_rate": 4.411433558425698e-06,
|
| 1254 |
+
"loss": 0.4278,
|
| 1255 |
"step": 178
|
| 1256 |
},
|
| 1257 |
{
|
| 1258 |
+
"epoch": 0.8763769889840881,
|
| 1259 |
+
"grad_norm": 0.06282901763916016,
|
| 1260 |
+
"learning_rate": 4.083227056243644e-06,
|
| 1261 |
+
"loss": 0.2847,
|
| 1262 |
"step": 179
|
| 1263 |
},
|
| 1264 |
{
|
| 1265 |
+
"epoch": 0.8812729498164015,
|
| 1266 |
+
"grad_norm": 0.07095268368721008,
|
| 1267 |
+
"learning_rate": 3.767186528398725e-06,
|
| 1268 |
+
"loss": 0.3923,
|
| 1269 |
"step": 180
|
| 1270 |
},
|
| 1271 |
{
|
| 1272 |
+
"epoch": 0.8861689106487148,
|
| 1273 |
+
"grad_norm": 0.09169545769691467,
|
| 1274 |
+
"learning_rate": 3.4633957120681293e-06,
|
| 1275 |
+
"loss": 0.5155,
|
| 1276 |
"step": 181
|
| 1277 |
},
|
| 1278 |
{
|
| 1279 |
+
"epoch": 0.8910648714810282,
|
| 1280 |
+
"grad_norm": 0.24409419298171997,
|
| 1281 |
+
"learning_rate": 3.1719350987811534e-06,
|
| 1282 |
+
"loss": 0.5456,
|
| 1283 |
"step": 182
|
| 1284 |
},
|
| 1285 |
{
|
| 1286 |
+
"epoch": 0.8959608323133414,
|
| 1287 |
+
"grad_norm": 0.08426636457443237,
|
| 1288 |
+
"learning_rate": 2.8928819130924657e-06,
|
| 1289 |
+
"loss": 0.5679,
|
| 1290 |
"step": 183
|
| 1291 |
},
|
| 1292 |
{
|
| 1293 |
+
"epoch": 0.9008567931456548,
|
| 1294 |
+
"grad_norm": 0.23968654870986938,
|
| 1295 |
+
"learning_rate": 2.6263100921208482e-06,
|
| 1296 |
+
"loss": 0.4841,
|
| 1297 |
"step": 184
|
| 1298 |
},
|
| 1299 |
{
|
| 1300 |
+
"epoch": 0.9057527539779682,
|
| 1301 |
+
"grad_norm": 0.10230088233947754,
|
| 1302 |
+
"learning_rate": 2.372290265959065e-06,
|
| 1303 |
+
"loss": 0.6307,
|
| 1304 |
"step": 185
|
| 1305 |
},
|
| 1306 |
{
|
| 1307 |
+
"epoch": 0.9106487148102815,
|
| 1308 |
+
"grad_norm": 0.07012014836072922,
|
| 1309 |
+
"learning_rate": 2.130889738959946e-06,
|
| 1310 |
+
"loss": 0.489,
|
| 1311 |
"step": 186
|
| 1312 |
},
|
| 1313 |
{
|
| 1314 |
+
"epoch": 0.9155446756425949,
|
| 1315 |
+
"grad_norm": 0.0840025320649147,
|
| 1316 |
+
"learning_rate": 1.9021724719035628e-06,
|
| 1317 |
+
"loss": 0.3811,
|
| 1318 |
"step": 187
|
| 1319 |
},
|
| 1320 |
{
|
| 1321 |
+
"epoch": 0.9204406364749081,
|
| 1322 |
+
"grad_norm": 0.06925945729017258,
|
| 1323 |
+
"learning_rate": 1.6861990650504255e-06,
|
| 1324 |
+
"loss": 0.3263,
|
| 1325 |
"step": 188
|
| 1326 |
},
|
| 1327 |
{
|
| 1328 |
+
"epoch": 0.9253365973072215,
|
| 1329 |
+
"grad_norm": 0.07820533215999603,
|
| 1330 |
+
"learning_rate": 1.4830267420849585e-06,
|
| 1331 |
+
"loss": 0.5176,
|
| 1332 |
"step": 189
|
| 1333 |
},
|
| 1334 |
{
|
| 1335 |
+
"epoch": 0.9302325581395349,
|
| 1336 |
+
"grad_norm": 0.10654879361391068,
|
| 1337 |
+
"learning_rate": 1.292709334953729e-06,
|
| 1338 |
+
"loss": 0.5375,
|
| 1339 |
"step": 190
|
| 1340 |
},
|
| 1341 |
{
|
| 1342 |
+
"epoch": 0.9351285189718482,
|
| 1343 |
+
"grad_norm": 0.06305018812417984,
|
| 1344 |
+
"learning_rate": 1.1152972696022445e-06,
|
| 1345 |
+
"loss": 0.4505,
|
| 1346 |
"step": 191
|
| 1347 |
},
|
| 1348 |
{
|
| 1349 |
+
"epoch": 0.9400244798041616,
|
| 1350 |
+
"grad_norm": 0.08257196098566055,
|
| 1351 |
+
"learning_rate": 9.508375526142976e-07,
|
| 1352 |
+
"loss": 0.4773,
|
| 1353 |
"step": 192
|
| 1354 |
},
|
| 1355 |
{
|
| 1356 |
+
"epoch": 0.944920440636475,
|
| 1357 |
+
"grad_norm": 0.0973181426525116,
|
| 1358 |
+
"learning_rate": 7.993737587571826e-07,
|
| 1359 |
+
"loss": 0.4163,
|
| 1360 |
"step": 193
|
| 1361 |
},
|
| 1362 |
{
|
| 1363 |
+
"epoch": 0.9498164014687882,
|
| 1364 |
+
"grad_norm": 0.1023239865899086,
|
| 1365 |
+
"learning_rate": 6.609460194362927e-07,
|
| 1366 |
+
"loss": 0.4597,
|
| 1367 |
"step": 194
|
| 1368 |
},
|
| 1369 |
{
|
| 1370 |
+
"epoch": 0.9547123623011016,
|
| 1371 |
+
"grad_norm": 0.09319756925106049,
|
| 1372 |
+
"learning_rate": 5.355910120620034e-07,
|
| 1373 |
+
"loss": 0.5937,
|
| 1374 |
"step": 195
|
| 1375 |
},
|
| 1376 |
{
|
| 1377 |
+
"epoch": 0.9596083231334149,
|
| 1378 |
+
"grad_norm": 0.07661579549312592,
|
| 1379 |
+
"learning_rate": 4.233419503317182e-07,
|
| 1380 |
+
"loss": 0.6014,
|
| 1381 |
"step": 196
|
| 1382 |
},
|
| 1383 |
{
|
| 1384 |
+
"epoch": 0.9645042839657283,
|
| 1385 |
+
"grad_norm": 0.07042757421731949,
|
| 1386 |
+
"learning_rate": 3.242285754296859e-07,
|
| 1387 |
+
"loss": 0.4437,
|
| 1388 |
"step": 197
|
| 1389 |
},
|
| 1390 |
{
|
| 1391 |
+
"epoch": 0.9694002447980417,
|
| 1392 |
+
"grad_norm": 0.07441776990890503,
|
| 1393 |
+
"learning_rate": 2.3827714814686486e-07,
|
| 1394 |
+
"loss": 0.2822,
|
| 1395 |
"step": 198
|
| 1396 |
},
|
| 1397 |
{
|
| 1398 |
+
"epoch": 0.9742962056303549,
|
| 1399 |
+
"grad_norm": 0.06588555127382278,
|
| 1400 |
+
"learning_rate": 1.655104419229281e-07,
|
| 1401 |
+
"loss": 0.3,
|
| 1402 |
"step": 199
|
| 1403 |
},
|
| 1404 |
{
|
| 1405 |
+
"epoch": 0.9791921664626683,
|
| 1406 |
+
"grad_norm": 0.06164183467626572,
|
| 1407 |
+
"learning_rate": 1.059477368122841e-07,
|
| 1408 |
+
"loss": 0.3109,
|
| 1409 |
"step": 200
|
| 1410 |
},
|
| 1411 |
{
|
| 1412 |
+
"epoch": 0.9840881272949816,
|
| 1413 |
+
"grad_norm": 0.13465183973312378,
|
| 1414 |
+
"learning_rate": 5.960481437568555e-08,
|
| 1415 |
+
"loss": 0.8621,
|
| 1416 |
"step": 201
|
| 1417 |
},
|
| 1418 |
{
|
| 1419 |
+
"epoch": 0.988984088127295,
|
| 1420 |
+
"grad_norm": 0.06679002195596695,
|
| 1421 |
+
"learning_rate": 2.649395349879069e-08,
|
| 1422 |
+
"loss": 0.5233,
|
| 1423 |
"step": 202
|
| 1424 |
},
|
| 1425 |
{
|
| 1426 |
+
"epoch": 0.9938800489596084,
|
| 1427 |
+
"grad_norm": 0.07267452776432037,
|
| 1428 |
+
"learning_rate": 6.623927138804664e-09,
|
| 1429 |
+
"loss": 0.59,
|
| 1430 |
"step": 203
|
| 1431 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1432 |
{
|
| 1433 |
"epoch": 0.9987760097919217,
|
| 1434 |
+
"grad_norm": 0.07579752057790756,
|
| 1435 |
"learning_rate": 0.0,
|
| 1436 |
+
"loss": 0.4031,
|
| 1437 |
+
"step": 204
|
| 1438 |
},
|
| 1439 |
{
|
| 1440 |
"epoch": 0.9987760097919217,
|
| 1441 |
+
"step": 204,
|
| 1442 |
+
"total_flos": 1.7231028658783027e+17,
|
| 1443 |
+
"train_loss": 0.4889225305295458,
|
| 1444 |
+
"train_runtime": 5417.273,
|
| 1445 |
+
"train_samples_per_second": 0.151,
|
| 1446 |
+
"train_steps_per_second": 0.038
|
| 1447 |
}
|
| 1448 |
],
|
| 1449 |
"logging_steps": 1,
|
| 1450 |
+
"max_steps": 204,
|
| 1451 |
"num_input_tokens_seen": 0,
|
| 1452 |
"num_train_epochs": 1,
|
| 1453 |
+
"save_steps": 500,
|
| 1454 |
"stateful_callbacks": {
|
| 1455 |
"TrainerControl": {
|
| 1456 |
"args": {
|
|
|
|
| 1463 |
"attributes": {}
|
| 1464 |
}
|
| 1465 |
},
|
| 1466 |
+
"total_flos": 1.7231028658783027e+17,
|
| 1467 |
"train_batch_size": 1,
|
| 1468 |
"trial_name": null,
|
| 1469 |
"trial_params": null
|
adapter/training_args.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 5624
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7df378d0939130465fa7f5d61728d6ae9bbd818e223e5a12ec124d00fe8993d3
|
| 3 |
size 5624
|
model-00001-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3945426872
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c9cd28011a9c856ce2983f3f8da430e2c6441ee1b44e2085753fb150b0740cae
|
| 3 |
size 3945426872
|
model-00002-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3864726352
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:18fbc638c8cc908f3463182b492a369987c10a8d66ec79681356462537018896
|
| 3 |
size 3864726352
|
model-00003-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3864726408
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3f7bc32989b9524fbda3781ecaeafb527370081280c665e3d8acf6893be8c472
|
| 3 |
size 3864726408
|
model-00004-of-00004.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 3556392240
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d683d27ef1391f41ec3d614695620c3b873b62faf48fc42c241cb2e73b1c347d
|
| 3 |
size 3556392240
|