dougtrajano commited on
Commit
31c9a21
1 Parent(s): eef5092

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -10,16 +10,16 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "IND",
14
- "1": "GRP",
15
- "2": "OTH"
16
  },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
  "label2id": {
20
- "GRP": 1,
21
- "IND": 0,
22
- "OTH": 2
23
  },
24
  "layer_norm_eps": 1e-12,
25
  "max_position_embeddings": 512,
@@ -36,7 +36,7 @@
36
  "position_embedding_type": "absolute",
37
  "problem_type": "single_label_classification",
38
  "torch_dtype": "float32",
39
- "transformers_version": "4.26.0",
40
  "type_vocab_size": 2,
41
  "use_cache": true,
42
  "vocab_size": 29794
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "INDIVIDUAL",
14
+ "1": "GROUP",
15
+ "2": "OTHER"
16
  },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
  "label2id": {
20
+ "GROUP": 1,
21
+ "INDIVIDUAL": 0,
22
+ "OTHER": 2
23
  },
24
  "layer_norm_eps": 1e-12,
25
  "max_position_embeddings": 512,
 
36
  "position_embedding_type": "absolute",
37
  "problem_type": "single_label_classification",
38
  "torch_dtype": "float32",
39
+ "transformers_version": "4.26.1",
40
  "type_vocab_size": 2,
41
  "use_cache": true,
42
  "vocab_size": 29794
last-checkpoint/config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "neuralmind/bert-base-portuguese-cased",
3
+ "architectures": [
4
+ "ToxicityTypeForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "INDIVIDUAL",
14
+ "1": "GROUP",
15
+ "2": "OTHER"
16
+ },
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 3072,
19
+ "label2id": {
20
+ "GROUP": 1,
21
+ "INDIVIDUAL": 0,
22
+ "OTHER": 2
23
+ },
24
+ "layer_norm_eps": 1e-12,
25
+ "max_position_embeddings": 512,
26
+ "model_type": "bert",
27
+ "num_attention_heads": 12,
28
+ "num_hidden_layers": 12,
29
+ "output_past": true,
30
+ "pad_token_id": 0,
31
+ "pooler_fc_size": 768,
32
+ "pooler_num_attention_heads": 12,
33
+ "pooler_num_fc_layers": 3,
34
+ "pooler_size_per_head": 128,
35
+ "pooler_type": "first_token_transform",
36
+ "position_embedding_type": "absolute",
37
+ "problem_type": "single_label_classification",
38
+ "torch_dtype": "float32",
39
+ "transformers_version": "4.26.1",
40
+ "type_vocab_size": 2,
41
+ "use_cache": true,
42
+ "vocab_size": 29794
43
+ }
last-checkpoint/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae7426a86cdc93bd5d28ea200606db91e5318e5cc75f4ab4344d6f45bda7d3dd
3
+ size 1426051
last-checkpoint/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ec383c18a5b94a5d739dec9c5befc670c3fc650f033ee271224cc462f76d529
3
+ size 435772781
last-checkpoint/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6f820ea87c2f62ea99aeae7edde410b710ab5d11993016cf24f9668342726ec
3
+ size 14503
last-checkpoint/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80e3bce3c0ea15d7b424ec6761f0c5066770082ab56e25f2ea2827bbbc47848b
3
+ size 623
last-checkpoint/special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
last-checkpoint/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
last-checkpoint/tokenizer_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "do_basic_tokenize": true,
4
+ "do_lower_case": false,
5
+ "mask_token": "[MASK]",
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "name_or_path": "neuralmind/bert-base-portuguese-cased",
8
+ "never_split": null,
9
+ "pad_token": "[PAD]",
10
+ "sep_token": "[SEP]",
11
+ "special_tokens_map_file": "/root/.cache/huggingface/hub/models--neuralmind--bert-base-portuguese-cased/snapshots/94d69c95f98f7d5b2a8700c420230ae10def0baa/special_tokens_map.json",
12
+ "strip_accents": null,
13
+ "tokenize_chinese_chars": true,
14
+ "tokenizer_class": "BertTokenizer",
15
+ "unk_token": "[UNK]"
16
+ }
last-checkpoint/trainer_state.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 0.7603304380457284,
3
+ "best_model_checkpoint": "/opt/ml/checkpoints/toxicity-target-type-identification-2023-02-13-13-22-05-747/model/checkpoint-355",
4
+ "epoch": 1.0,
5
+ "global_step": 355,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 1.0,
12
+ "eval_accuracy": 0.7505285412262156,
13
+ "eval_f1": 0.7603304380457284,
14
+ "eval_loss": 0.700143575668335,
15
+ "eval_precision": 0.7813181118720665,
16
+ "eval_recall": 0.7505285412262156,
17
+ "eval_runtime": 5.4125,
18
+ "eval_samples_per_second": 174.779,
19
+ "eval_steps_per_second": 21.986,
20
+ "step": 355
21
+ }
22
+ ],
23
+ "max_steps": 10650,
24
+ "num_train_epochs": 30,
25
+ "total_flos": 138751730273394.0,
26
+ "trial_name": null,
27
+ "trial_params": null
28
+ }
last-checkpoint/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29ea1e9a63ff6356100cc945f6ca8285154323a795ec0916bd9de37f42dc85eb
3
+ size 3695
last-checkpoint/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f7f53f0c9ea25a55e1fe1aaa4b6aa42961ae41fda69e299001ec56f516aa9d52
3
  size 3695
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29ea1e9a63ff6356100cc945f6ca8285154323a795ec0916bd9de37f42dc85eb
3
  size 3695