c14kevincardenas commited on
Commit
86f45ec
1 Parent(s): 70452a2

Training in progress, epoch 1

Browse files
config.json ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "c14kevincardenas/beit-large-patch16-384-limb",
3
+ "add_fpn": false,
4
+ "architectures": [
5
+ "LimbXYModel"
6
+ ],
7
+ "attention_probs_dropout_prob": 0.0,
8
+ "auxiliary_channels": 256,
9
+ "auxiliary_concat_input": false,
10
+ "auxiliary_loss_weight": 0.4,
11
+ "auxiliary_num_convs": 1,
12
+ "drop_path_rate": 0.1,
13
+ "finetuning_task": "regression",
14
+ "hidden_act": "gelu",
15
+ "hidden_dropout_prob": 0.0,
16
+ "hidden_size": 1024,
17
+ "id2label": {
18
+ "0": "left_foot",
19
+ "1": "left_hand",
20
+ "2": "right_foot",
21
+ "3": "right_hand"
22
+ },
23
+ "image_size": 384,
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 4096,
26
+ "label2id": {
27
+ "left_foot": "0",
28
+ "left_hand": "1",
29
+ "right_foot": "2",
30
+ "right_hand": "3"
31
+ },
32
+ "layer_norm_eps": 1e-12,
33
+ "layer_scale_init_value": 0.1,
34
+ "model_type": "beit",
35
+ "num_attention_heads": 16,
36
+ "num_channels": 3,
37
+ "num_hidden_layers": 24,
38
+ "out_features": [
39
+ "stage24"
40
+ ],
41
+ "out_indices": [
42
+ 24
43
+ ],
44
+ "patch_size": 16,
45
+ "pool_scales": [
46
+ 1,
47
+ 2,
48
+ 3,
49
+ 6
50
+ ],
51
+ "reshape_hidden_states": true,
52
+ "semantic_loss_ignore_index": 255,
53
+ "stage_names": [
54
+ "stem",
55
+ "stage1",
56
+ "stage2",
57
+ "stage3",
58
+ "stage4",
59
+ "stage5",
60
+ "stage6",
61
+ "stage7",
62
+ "stage8",
63
+ "stage9",
64
+ "stage10",
65
+ "stage11",
66
+ "stage12",
67
+ "stage13",
68
+ "stage14",
69
+ "stage15",
70
+ "stage16",
71
+ "stage17",
72
+ "stage18",
73
+ "stage19",
74
+ "stage20",
75
+ "stage21",
76
+ "stage22",
77
+ "stage23",
78
+ "stage24"
79
+ ],
80
+ "torch_dtype": "float32",
81
+ "transformers_version": "4.45.2",
82
+ "use_absolute_position_embeddings": false,
83
+ "use_auxiliary_head": true,
84
+ "use_mask_token": false,
85
+ "use_mean_pooling": true,
86
+ "use_relative_position_bias": true,
87
+ "use_shared_relative_position_bias": false,
88
+ "vocab_size": 8192
89
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4076431e46a32e5ee4781ee8d86a1e522d4ee61ffac3b04670dd86fb13e04c0d
3
+ size 1215959696
preprocessor_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": {
3
+ "height": 224,
4
+ "width": 224
5
+ },
6
+ "do_center_crop": false,
7
+ "do_normalize": true,
8
+ "do_reduce_labels": false,
9
+ "do_rescale": true,
10
+ "do_resize": true,
11
+ "image_mean": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "image_processor_type": "BeitImageProcessor",
17
+ "image_std": [
18
+ 0.5,
19
+ 0.5,
20
+ 0.5
21
+ ],
22
+ "resample": 2,
23
+ "rescale_factor": 0.00392156862745098,
24
+ "size": {
25
+ "height": 384,
26
+ "width": 384
27
+ }
28
+ }
runs/Oct24_16-30-28_galactica.ad.cirange.net/events.out.tfevents.1729787455.galactica.ad.cirange.net.107524.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:902f1c1c9fa16621bb717af72dc57303655d431748b619f1bf59eeb1cb80d7d1
3
+ size 6905
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c14acf0ab4a6d6f8a096c2c4941203197fda793c854f79ba094ad136059d7a30
3
+ size 5304