HiTZ
/

Token Classification
Transformers
Safetensors
bert
Inference Endpoints
File size: 1,286 Bytes
af2c5aa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
{
  "best_metric": null,
  "best_model_checkpoint": null,
  "epoch": 2.2727272727272725,
  "eval_steps": 500,
  "global_step": 2500,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 0.45,
      "grad_norm": 4.369685173034668,
      "learning_rate": 4.242424242424243e-05,
      "loss": 0.4661,
      "step": 500
    },
    {
      "epoch": 0.91,
      "grad_norm": 3.9891207218170166,
      "learning_rate": 3.484848484848485e-05,
      "loss": 0.3327,
      "step": 1000
    },
    {
      "epoch": 1.36,
      "grad_norm": 9.819967269897461,
      "learning_rate": 2.7272727272727273e-05,
      "loss": 0.2258,
      "step": 1500
    },
    {
      "epoch": 1.82,
      "grad_norm": 1.7621924877166748,
      "learning_rate": 1.9696969696969697e-05,
      "loss": 0.1772,
      "step": 2000
    },
    {
      "epoch": 2.27,
      "grad_norm": 21.205034255981445,
      "learning_rate": 1.2121212121212122e-05,
      "loss": 0.1106,
      "step": 2500
    }
  ],
  "logging_steps": 500,
  "max_steps": 3300,
  "num_input_tokens_seen": 0,
  "num_train_epochs": 3,
  "save_steps": 500,
  "total_flos": 2019349980886680.0,
  "train_batch_size": 16,
  "trial_name": null,
  "trial_params": null
}