End of training
Browse files- README.md +55 -0
- adapter_config.json +30 -0
- adapter_model.safetensors +3 -0
- runs/Jan07_08-30-52_096ae31a5012/events.out.tfevents.1704616274.096ae31a5012.684.0 +3 -0
- special_tokens_map.json +24 -0
- tokenizer.json +0 -0
- tokenizer_config.json +42 -0
- training_args.bin +3 -0
- wandb/debug.log +131 -0
- wandb/run-20240107_083215-enryt6zo/files/config.yaml +677 -0
- wandb/run-20240107_083215-enryt6zo/files/output.log +33 -0
- wandb/run-20240107_083215-enryt6zo/files/requirements.txt +497 -0
- wandb/run-20240107_083215-enryt6zo/files/wandb-metadata.json +52 -0
- wandb/run-20240107_083215-enryt6zo/files/wandb-summary.json +1 -0
- wandb/run-20240107_083215-enryt6zo/logs/debug.log +131 -0
- wandb/run-20240107_083215-enryt6zo/run-enryt6zo.wandb +0 -0
README.md
ADDED
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
library_name: peft
|
4 |
+
tags:
|
5 |
+
- trl
|
6 |
+
- sft
|
7 |
+
- generated_from_trainer
|
8 |
+
base_model: mistralai/Mistral-7B-v0.1
|
9 |
+
model-index:
|
10 |
+
- name: Mistral-7B-Finetuning-Insurance
|
11 |
+
results: []
|
12 |
+
---
|
13 |
+
|
14 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
15 |
+
should probably proofread and complete it, then remove this comment. -->
|
16 |
+
|
17 |
+
# Mistral-7B-Finetuning-Insurance
|
18 |
+
|
19 |
+
This model is a fine-tuned version of [mistralai/Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) on an unknown dataset.
|
20 |
+
|
21 |
+
## Model description
|
22 |
+
|
23 |
+
More information needed
|
24 |
+
|
25 |
+
## Intended uses & limitations
|
26 |
+
|
27 |
+
More information needed
|
28 |
+
|
29 |
+
## Training and evaluation data
|
30 |
+
|
31 |
+
More information needed
|
32 |
+
|
33 |
+
## Training procedure
|
34 |
+
|
35 |
+
### Training hyperparameters
|
36 |
+
|
37 |
+
The following hyperparameters were used during training:
|
38 |
+
- learning_rate: 0.0002
|
39 |
+
- train_batch_size: 2
|
40 |
+
- eval_batch_size: 8
|
41 |
+
- seed: 42
|
42 |
+
- gradient_accumulation_steps: 2
|
43 |
+
- total_train_batch_size: 4
|
44 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
45 |
+
- lr_scheduler_type: cosine
|
46 |
+
- lr_scheduler_warmup_ratio: 0.03
|
47 |
+
- training_steps: 60
|
48 |
+
|
49 |
+
### Framework versions
|
50 |
+
|
51 |
+
- PEFT 0.7.2.dev0
|
52 |
+
- Transformers 4.36.2
|
53 |
+
- Pytorch 2.1.0+cu121
|
54 |
+
- Datasets 2.16.1
|
55 |
+
- Tokenizers 0.15.0
|
adapter_config.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "mistralai/Mistral-7B-v0.1",
|
5 |
+
"bias": "none",
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"init_lora_weights": true,
|
9 |
+
"layers_pattern": null,
|
10 |
+
"layers_to_transform": null,
|
11 |
+
"loftq_config": {},
|
12 |
+
"lora_alpha": 32,
|
13 |
+
"lora_dropout": 0.05,
|
14 |
+
"megatron_config": null,
|
15 |
+
"megatron_core": "megatron.core",
|
16 |
+
"modules_to_save": null,
|
17 |
+
"peft_type": "LORA",
|
18 |
+
"r": 32,
|
19 |
+
"rank_pattern": {},
|
20 |
+
"revision": null,
|
21 |
+
"target_modules": [
|
22 |
+
"gate_proj",
|
23 |
+
"o_proj",
|
24 |
+
"q_proj",
|
25 |
+
"v_proj",
|
26 |
+
"k_proj"
|
27 |
+
],
|
28 |
+
"task_type": "CAUSAL_LM",
|
29 |
+
"use_rslora": false
|
30 |
+
}
|
adapter_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b751455bac18c260c793188b2414a3cabb61d6d296185708d65553c0970461d
|
3 |
+
size 184592616
|
runs/Jan07_08-30-52_096ae31a5012/events.out.tfevents.1704616274.096ae31a5012.684.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:46c86b905e7297c5c8b7d6e29bd742d38afc6e38b6c5aeaf2d587dcb072e890f
|
3 |
+
size 6155
|
special_tokens_map.json
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"pad_token": "</s>",
|
17 |
+
"unk_token": {
|
18 |
+
"content": "<unk>",
|
19 |
+
"lstrip": false,
|
20 |
+
"normalized": false,
|
21 |
+
"rstrip": false,
|
22 |
+
"single_word": false
|
23 |
+
}
|
24 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"0": {
|
6 |
+
"content": "<unk>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"1": {
|
14 |
+
"content": "<s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"2": {
|
22 |
+
"content": "</s>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
}
|
29 |
+
},
|
30 |
+
"additional_special_tokens": [],
|
31 |
+
"bos_token": "<s>",
|
32 |
+
"clean_up_tokenization_spaces": false,
|
33 |
+
"eos_token": "</s>",
|
34 |
+
"legacy": true,
|
35 |
+
"model_max_length": 1000000000000000019884624838656,
|
36 |
+
"pad_token": "</s>",
|
37 |
+
"sp_model_kwargs": {},
|
38 |
+
"spaces_between_special_tokens": false,
|
39 |
+
"tokenizer_class": "LlamaTokenizer",
|
40 |
+
"unk_token": "<unk>",
|
41 |
+
"use_default_system_prompt": false
|
42 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1d970283b2db9e26741e20f533939320bbbc53f311abd5aa39921484c70f4190
|
3 |
+
size 4792
|
wandb/debug.log
ADDED
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-01-07 08:32:15,522 INFO MainThread:684 [wandb_setup.py:_flush():76] Current SDK version is 0.16.1
|
2 |
+
2024-01-07 08:32:15,523 INFO MainThread:684 [wandb_setup.py:_flush():76] Configure stats pid to 684
|
3 |
+
2024-01-07 08:32:15,523 INFO MainThread:684 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
|
4 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Loading settings from /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/wandb/settings
|
5 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
|
6 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
|
7 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
|
8 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
|
9 |
+
2024-01-07 08:32:15,525 INFO MainThread:684 [wandb_init.py:_log_setup():524] Logging user logs to /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/wandb/run-20240107_083215-enryt6zo/logs/debug.log
|
10 |
+
2024-01-07 08:32:15,525 INFO MainThread:684 [wandb_init.py:_log_setup():525] Logging internal logs to /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/wandb/run-20240107_083215-enryt6zo/logs/debug-internal.log
|
11 |
+
2024-01-07 08:32:15,525 INFO MainThread:684 [wandb_init.py:_jupyter_setup():470] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7bfea43750f0>
|
12 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():564] calling init triggers
|
13 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():571] wandb.init called with sweep_config: {}
|
14 |
+
config: {}
|
15 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():614] starting backend
|
16 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():618] setting up manager
|
17 |
+
2024-01-07 08:32:15,531 INFO MainThread:684 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
|
18 |
+
2024-01-07 08:32:15,534 INFO MainThread:684 [wandb_init.py:init():624] backend started and connected
|
19 |
+
2024-01-07 08:32:15,570 INFO MainThread:684 [wandb_run.py:_label_probe_notebook():1294] probe notebook
|
20 |
+
2024-01-07 08:32:17,418 INFO MainThread:684 [wandb_init.py:init():716] updated telemetry
|
21 |
+
2024-01-07 08:32:17,453 INFO MainThread:684 [wandb_init.py:init():749] communicating run to backend with 90.0 second timeout
|
22 |
+
2024-01-07 08:32:17,964 INFO MainThread:684 [wandb_run.py:_on_init():2254] communicating current version
|
23 |
+
2024-01-07 08:32:18,119 INFO MainThread:684 [wandb_run.py:_on_init():2263] got version response
|
24 |
+
2024-01-07 08:32:18,120 INFO MainThread:684 [wandb_init.py:init():800] starting run threads in backend
|
25 |
+
2024-01-07 08:32:18,210 INFO MainThread:684 [wandb_run.py:_console_start():2233] atexit reg
|
26 |
+
2024-01-07 08:32:18,211 INFO MainThread:684 [wandb_run.py:_redirect():2088] redirect: wrap_raw
|
27 |
+
2024-01-07 08:32:18,211 INFO MainThread:684 [wandb_run.py:_redirect():2153] Wrapping output streams.
|
28 |
+
2024-01-07 08:32:18,211 INFO MainThread:684 [wandb_run.py:_redirect():2178] Redirects installed.
|
29 |
+
2024-01-07 08:32:18,213 INFO MainThread:684 [wandb_init.py:init():841] run started, returning control to user process
|
30 |
+
2024-01-07 08:32:18,219 INFO MainThread:684 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 32768, 'hidden_size': 4096, 'intermediate_size': 14336, 'num_hidden_layers': 32, 'num_attention_heads': 32, 'sliding_window': 4096, 'num_key_value_heads': 8, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'use_cache': False, 'rope_theta': 10000.0, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['MistralForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'mistralai/Mistral-7B-v0.1', 'transformers_version': '4.36.2', 'model_type': 'mistral', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', 'load_in_8bit': False, 'load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'bfloat16'}, 'output_dir': '/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 2, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0002, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 3.0, 'max_steps': 60, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/runs/Jan07_08-30-52_096ae31a5012', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 10, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 10, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': False, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': '/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': True, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': False, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': False, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None}
|
31 |
+
2024-01-07 08:44:03,889 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
32 |
+
2024-01-07 08:44:03,890 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
33 |
+
2024-01-07 08:44:26,570 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
34 |
+
2024-01-07 08:44:34,326 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
35 |
+
2024-01-07 08:44:34,327 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
36 |
+
2024-01-07 08:44:46,475 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
37 |
+
2024-01-07 08:46:05,058 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
38 |
+
2024-01-07 08:46:05,058 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
39 |
+
2024-01-07 08:46:13,038 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
40 |
+
2024-01-07 08:46:18,516 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
41 |
+
2024-01-07 08:46:18,516 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
42 |
+
2024-01-07 08:50:09,111 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
43 |
+
2024-01-07 08:50:13,508 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
44 |
+
2024-01-07 08:50:13,513 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
45 |
+
2024-01-07 08:51:38,094 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
46 |
+
2024-01-07 08:51:38,098 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
47 |
+
2024-01-07 08:51:38,098 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
48 |
+
2024-01-07 08:51:41,383 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
49 |
+
2024-01-07 08:52:12,662 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
50 |
+
2024-01-07 08:52:12,662 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
51 |
+
2024-01-07 08:52:45,454 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
52 |
+
2024-01-07 08:53:09,095 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
53 |
+
2024-01-07 08:53:09,096 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
54 |
+
2024-01-07 08:55:59,367 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
55 |
+
2024-01-07 08:56:00,251 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
56 |
+
2024-01-07 08:56:00,252 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
57 |
+
2024-01-07 08:56:06,562 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
58 |
+
2024-01-07 08:56:10,699 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
59 |
+
2024-01-07 08:56:10,700 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
60 |
+
2024-01-07 08:57:55,151 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
61 |
+
2024-01-07 08:57:58,981 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
62 |
+
2024-01-07 08:57:58,981 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
63 |
+
2024-01-07 09:00:08,883 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
64 |
+
2024-01-07 09:00:18,822 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
65 |
+
2024-01-07 09:00:18,822 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
66 |
+
2024-01-07 09:08:01,727 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
67 |
+
2024-01-07 09:08:06,231 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
68 |
+
2024-01-07 09:08:06,232 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
69 |
+
2024-01-07 09:08:25,862 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
70 |
+
2024-01-07 09:08:25,898 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
71 |
+
2024-01-07 09:08:25,898 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
72 |
+
2024-01-07 09:08:35,845 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
73 |
+
2024-01-07 09:08:37,716 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
74 |
+
2024-01-07 09:08:37,717 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
75 |
+
2024-01-07 09:08:40,487 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
76 |
+
2024-01-07 09:08:40,495 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
77 |
+
2024-01-07 09:08:40,501 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
78 |
+
2024-01-07 09:08:45,788 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
79 |
+
2024-01-07 09:08:45,793 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
80 |
+
2024-01-07 09:08:45,794 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
81 |
+
2024-01-07 09:08:49,111 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
82 |
+
2024-01-07 09:08:49,155 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
83 |
+
2024-01-07 09:08:49,155 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
84 |
+
2024-01-07 09:09:44,376 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
85 |
+
2024-01-07 09:09:44,379 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
86 |
+
2024-01-07 09:09:44,380 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
87 |
+
2024-01-07 09:10:16,380 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
88 |
+
2024-01-07 09:10:16,383 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
89 |
+
2024-01-07 09:10:16,383 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
90 |
+
2024-01-07 09:10:25,980 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
91 |
+
2024-01-07 09:10:26,068 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
92 |
+
2024-01-07 09:10:26,076 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
93 |
+
2024-01-07 09:10:52,944 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
94 |
+
2024-01-07 09:10:52,950 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
95 |
+
2024-01-07 09:10:52,950 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
96 |
+
2024-01-07 09:10:54,782 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
97 |
+
2024-01-07 09:10:54,813 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
98 |
+
2024-01-07 09:10:54,813 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
99 |
+
2024-01-07 09:12:03,682 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
100 |
+
2024-01-07 09:12:03,692 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
101 |
+
2024-01-07 09:12:03,692 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
102 |
+
2024-01-07 09:12:06,232 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
103 |
+
2024-01-07 09:12:06,325 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
104 |
+
2024-01-07 09:12:06,326 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
105 |
+
2024-01-07 09:12:33,934 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
106 |
+
2024-01-07 09:12:34,001 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
107 |
+
2024-01-07 09:12:34,004 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
108 |
+
2024-01-07 09:13:00,605 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
109 |
+
2024-01-07 09:13:00,639 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
110 |
+
2024-01-07 09:13:00,639 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
111 |
+
2024-01-07 09:13:06,384 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
112 |
+
2024-01-07 09:13:06,462 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
113 |
+
2024-01-07 09:13:06,462 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
114 |
+
2024-01-07 09:13:24,618 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
115 |
+
2024-01-07 09:13:24,664 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
116 |
+
2024-01-07 09:13:24,665 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
117 |
+
2024-01-07 09:13:52,451 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
118 |
+
2024-01-07 09:13:52,481 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
119 |
+
2024-01-07 09:13:52,481 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
120 |
+
2024-01-07 09:14:01,980 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
121 |
+
2024-01-07 09:14:02,039 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
122 |
+
2024-01-07 09:14:02,040 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
123 |
+
2024-01-07 09:14:37,393 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
124 |
+
2024-01-07 09:14:42,402 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
125 |
+
2024-01-07 09:14:42,403 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
126 |
+
2024-01-07 09:16:27,093 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
127 |
+
2024-01-07 09:16:27,127 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
128 |
+
2024-01-07 09:16:27,127 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
129 |
+
2024-01-07 09:19:48,148 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
130 |
+
2024-01-07 09:19:48,193 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
131 |
+
2024-01-07 09:19:48,193 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
wandb/run-20240107_083215-enryt6zo/files/config.yaml
ADDED
@@ -0,0 +1,677 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
wandb_version: 1
|
2 |
+
|
3 |
+
_wandb:
|
4 |
+
desc: null
|
5 |
+
value:
|
6 |
+
python_version: 3.10.12
|
7 |
+
cli_version: 0.16.1
|
8 |
+
framework: huggingface
|
9 |
+
huggingface_version: 4.36.2
|
10 |
+
is_jupyter_run: true
|
11 |
+
is_kaggle_kernel: false
|
12 |
+
start_time: 1704616335.534639
|
13 |
+
t:
|
14 |
+
1:
|
15 |
+
- 1
|
16 |
+
- 2
|
17 |
+
- 3
|
18 |
+
- 5
|
19 |
+
- 11
|
20 |
+
- 12
|
21 |
+
- 49
|
22 |
+
- 51
|
23 |
+
- 53
|
24 |
+
- 55
|
25 |
+
- 71
|
26 |
+
- 84
|
27 |
+
- 98
|
28 |
+
2:
|
29 |
+
- 1
|
30 |
+
- 2
|
31 |
+
- 3
|
32 |
+
- 5
|
33 |
+
- 11
|
34 |
+
- 12
|
35 |
+
- 49
|
36 |
+
- 51
|
37 |
+
- 53
|
38 |
+
- 55
|
39 |
+
- 71
|
40 |
+
- 84
|
41 |
+
- 98
|
42 |
+
3:
|
43 |
+
- 7
|
44 |
+
- 23
|
45 |
+
4: 3.10.12
|
46 |
+
5: 0.16.1
|
47 |
+
6: 4.36.2
|
48 |
+
8:
|
49 |
+
- 1
|
50 |
+
- 5
|
51 |
+
- 12
|
52 |
+
9:
|
53 |
+
1: transformers_trainer
|
54 |
+
13: linux-x86_64
|
55 |
+
m:
|
56 |
+
- 1: train/global_step
|
57 |
+
6:
|
58 |
+
- 3
|
59 |
+
- 1: train/loss
|
60 |
+
5: 1
|
61 |
+
6:
|
62 |
+
- 1
|
63 |
+
- 1: train/learning_rate
|
64 |
+
5: 1
|
65 |
+
6:
|
66 |
+
- 1
|
67 |
+
- 1: train/epoch
|
68 |
+
5: 1
|
69 |
+
6:
|
70 |
+
- 1
|
71 |
+
- 1: train/train_runtime
|
72 |
+
5: 1
|
73 |
+
6:
|
74 |
+
- 1
|
75 |
+
- 1: train/train_samples_per_second
|
76 |
+
5: 1
|
77 |
+
6:
|
78 |
+
- 1
|
79 |
+
- 1: train/train_steps_per_second
|
80 |
+
5: 1
|
81 |
+
6:
|
82 |
+
- 1
|
83 |
+
- 1: train/total_flos
|
84 |
+
5: 1
|
85 |
+
6:
|
86 |
+
- 1
|
87 |
+
- 1: train/train_loss
|
88 |
+
5: 1
|
89 |
+
6:
|
90 |
+
- 1
|
91 |
+
vocab_size:
|
92 |
+
desc: null
|
93 |
+
value: 32000
|
94 |
+
max_position_embeddings:
|
95 |
+
desc: null
|
96 |
+
value: 32768
|
97 |
+
hidden_size:
|
98 |
+
desc: null
|
99 |
+
value: 4096
|
100 |
+
intermediate_size:
|
101 |
+
desc: null
|
102 |
+
value: 14336
|
103 |
+
num_hidden_layers:
|
104 |
+
desc: null
|
105 |
+
value: 32
|
106 |
+
num_attention_heads:
|
107 |
+
desc: null
|
108 |
+
value: 32
|
109 |
+
sliding_window:
|
110 |
+
desc: null
|
111 |
+
value: 4096
|
112 |
+
num_key_value_heads:
|
113 |
+
desc: null
|
114 |
+
value: 8
|
115 |
+
hidden_act:
|
116 |
+
desc: null
|
117 |
+
value: silu
|
118 |
+
initializer_range:
|
119 |
+
desc: null
|
120 |
+
value: 0.02
|
121 |
+
rms_norm_eps:
|
122 |
+
desc: null
|
123 |
+
value: 1.0e-05
|
124 |
+
use_cache:
|
125 |
+
desc: null
|
126 |
+
value: false
|
127 |
+
rope_theta:
|
128 |
+
desc: null
|
129 |
+
value: 10000.0
|
130 |
+
attention_dropout:
|
131 |
+
desc: null
|
132 |
+
value: 0.0
|
133 |
+
return_dict:
|
134 |
+
desc: null
|
135 |
+
value: true
|
136 |
+
output_hidden_states:
|
137 |
+
desc: null
|
138 |
+
value: false
|
139 |
+
output_attentions:
|
140 |
+
desc: null
|
141 |
+
value: false
|
142 |
+
torchscript:
|
143 |
+
desc: null
|
144 |
+
value: false
|
145 |
+
torch_dtype:
|
146 |
+
desc: null
|
147 |
+
value: bfloat16
|
148 |
+
use_bfloat16:
|
149 |
+
desc: null
|
150 |
+
value: false
|
151 |
+
tf_legacy_loss:
|
152 |
+
desc: null
|
153 |
+
value: false
|
154 |
+
pruned_heads:
|
155 |
+
desc: null
|
156 |
+
value: {}
|
157 |
+
tie_word_embeddings:
|
158 |
+
desc: null
|
159 |
+
value: false
|
160 |
+
is_encoder_decoder:
|
161 |
+
desc: null
|
162 |
+
value: false
|
163 |
+
is_decoder:
|
164 |
+
desc: null
|
165 |
+
value: false
|
166 |
+
cross_attention_hidden_size:
|
167 |
+
desc: null
|
168 |
+
value: null
|
169 |
+
add_cross_attention:
|
170 |
+
desc: null
|
171 |
+
value: false
|
172 |
+
tie_encoder_decoder:
|
173 |
+
desc: null
|
174 |
+
value: false
|
175 |
+
max_length:
|
176 |
+
desc: null
|
177 |
+
value: 20
|
178 |
+
min_length:
|
179 |
+
desc: null
|
180 |
+
value: 0
|
181 |
+
do_sample:
|
182 |
+
desc: null
|
183 |
+
value: false
|
184 |
+
early_stopping:
|
185 |
+
desc: null
|
186 |
+
value: false
|
187 |
+
num_beams:
|
188 |
+
desc: null
|
189 |
+
value: 1
|
190 |
+
num_beam_groups:
|
191 |
+
desc: null
|
192 |
+
value: 1
|
193 |
+
diversity_penalty:
|
194 |
+
desc: null
|
195 |
+
value: 0.0
|
196 |
+
temperature:
|
197 |
+
desc: null
|
198 |
+
value: 1.0
|
199 |
+
top_k:
|
200 |
+
desc: null
|
201 |
+
value: 50
|
202 |
+
top_p:
|
203 |
+
desc: null
|
204 |
+
value: 1.0
|
205 |
+
typical_p:
|
206 |
+
desc: null
|
207 |
+
value: 1.0
|
208 |
+
repetition_penalty:
|
209 |
+
desc: null
|
210 |
+
value: 1.0
|
211 |
+
length_penalty:
|
212 |
+
desc: null
|
213 |
+
value: 1.0
|
214 |
+
no_repeat_ngram_size:
|
215 |
+
desc: null
|
216 |
+
value: 0
|
217 |
+
encoder_no_repeat_ngram_size:
|
218 |
+
desc: null
|
219 |
+
value: 0
|
220 |
+
bad_words_ids:
|
221 |
+
desc: null
|
222 |
+
value: null
|
223 |
+
num_return_sequences:
|
224 |
+
desc: null
|
225 |
+
value: 1
|
226 |
+
chunk_size_feed_forward:
|
227 |
+
desc: null
|
228 |
+
value: 0
|
229 |
+
output_scores:
|
230 |
+
desc: null
|
231 |
+
value: false
|
232 |
+
return_dict_in_generate:
|
233 |
+
desc: null
|
234 |
+
value: false
|
235 |
+
forced_bos_token_id:
|
236 |
+
desc: null
|
237 |
+
value: null
|
238 |
+
forced_eos_token_id:
|
239 |
+
desc: null
|
240 |
+
value: null
|
241 |
+
remove_invalid_values:
|
242 |
+
desc: null
|
243 |
+
value: false
|
244 |
+
exponential_decay_length_penalty:
|
245 |
+
desc: null
|
246 |
+
value: null
|
247 |
+
suppress_tokens:
|
248 |
+
desc: null
|
249 |
+
value: null
|
250 |
+
begin_suppress_tokens:
|
251 |
+
desc: null
|
252 |
+
value: null
|
253 |
+
architectures:
|
254 |
+
desc: null
|
255 |
+
value:
|
256 |
+
- MistralForCausalLM
|
257 |
+
finetuning_task:
|
258 |
+
desc: null
|
259 |
+
value: null
|
260 |
+
id2label:
|
261 |
+
desc: null
|
262 |
+
value:
|
263 |
+
'0': LABEL_0
|
264 |
+
'1': LABEL_1
|
265 |
+
label2id:
|
266 |
+
desc: null
|
267 |
+
value:
|
268 |
+
LABEL_0: 0
|
269 |
+
LABEL_1: 1
|
270 |
+
tokenizer_class:
|
271 |
+
desc: null
|
272 |
+
value: null
|
273 |
+
prefix:
|
274 |
+
desc: null
|
275 |
+
value: null
|
276 |
+
bos_token_id:
|
277 |
+
desc: null
|
278 |
+
value: 1
|
279 |
+
pad_token_id:
|
280 |
+
desc: null
|
281 |
+
value: null
|
282 |
+
eos_token_id:
|
283 |
+
desc: null
|
284 |
+
value: 2
|
285 |
+
sep_token_id:
|
286 |
+
desc: null
|
287 |
+
value: null
|
288 |
+
decoder_start_token_id:
|
289 |
+
desc: null
|
290 |
+
value: null
|
291 |
+
task_specific_params:
|
292 |
+
desc: null
|
293 |
+
value: null
|
294 |
+
problem_type:
|
295 |
+
desc: null
|
296 |
+
value: null
|
297 |
+
_name_or_path:
|
298 |
+
desc: null
|
299 |
+
value: mistralai/Mistral-7B-v0.1
|
300 |
+
transformers_version:
|
301 |
+
desc: null
|
302 |
+
value: 4.36.2
|
303 |
+
model_type:
|
304 |
+
desc: null
|
305 |
+
value: mistral
|
306 |
+
quantization_config:
|
307 |
+
desc: null
|
308 |
+
value:
|
309 |
+
quant_method: QuantizationMethod.BITS_AND_BYTES
|
310 |
+
load_in_8bit: false
|
311 |
+
load_in_4bit: true
|
312 |
+
llm_int8_threshold: 6.0
|
313 |
+
llm_int8_skip_modules: null
|
314 |
+
llm_int8_enable_fp32_cpu_offload: false
|
315 |
+
llm_int8_has_fp16_weight: false
|
316 |
+
bnb_4bit_quant_type: nf4
|
317 |
+
bnb_4bit_use_double_quant: true
|
318 |
+
bnb_4bit_compute_dtype: bfloat16
|
319 |
+
output_dir:
|
320 |
+
desc: null
|
321 |
+
value: /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance
|
322 |
+
overwrite_output_dir:
|
323 |
+
desc: null
|
324 |
+
value: false
|
325 |
+
do_train:
|
326 |
+
desc: null
|
327 |
+
value: false
|
328 |
+
do_eval:
|
329 |
+
desc: null
|
330 |
+
value: false
|
331 |
+
do_predict:
|
332 |
+
desc: null
|
333 |
+
value: false
|
334 |
+
evaluation_strategy:
|
335 |
+
desc: null
|
336 |
+
value: 'no'
|
337 |
+
prediction_loss_only:
|
338 |
+
desc: null
|
339 |
+
value: false
|
340 |
+
per_device_train_batch_size:
|
341 |
+
desc: null
|
342 |
+
value: 2
|
343 |
+
per_device_eval_batch_size:
|
344 |
+
desc: null
|
345 |
+
value: 8
|
346 |
+
per_gpu_train_batch_size:
|
347 |
+
desc: null
|
348 |
+
value: null
|
349 |
+
per_gpu_eval_batch_size:
|
350 |
+
desc: null
|
351 |
+
value: null
|
352 |
+
gradient_accumulation_steps:
|
353 |
+
desc: null
|
354 |
+
value: 2
|
355 |
+
eval_accumulation_steps:
|
356 |
+
desc: null
|
357 |
+
value: null
|
358 |
+
eval_delay:
|
359 |
+
desc: null
|
360 |
+
value: 0
|
361 |
+
learning_rate:
|
362 |
+
desc: null
|
363 |
+
value: 0.0002
|
364 |
+
weight_decay:
|
365 |
+
desc: null
|
366 |
+
value: 0.0
|
367 |
+
adam_beta1:
|
368 |
+
desc: null
|
369 |
+
value: 0.9
|
370 |
+
adam_beta2:
|
371 |
+
desc: null
|
372 |
+
value: 0.999
|
373 |
+
adam_epsilon:
|
374 |
+
desc: null
|
375 |
+
value: 1.0e-08
|
376 |
+
max_grad_norm:
|
377 |
+
desc: null
|
378 |
+
value: 0.3
|
379 |
+
num_train_epochs:
|
380 |
+
desc: null
|
381 |
+
value: 3.0
|
382 |
+
max_steps:
|
383 |
+
desc: null
|
384 |
+
value: 60
|
385 |
+
lr_scheduler_type:
|
386 |
+
desc: null
|
387 |
+
value: cosine
|
388 |
+
lr_scheduler_kwargs:
|
389 |
+
desc: null
|
390 |
+
value: {}
|
391 |
+
warmup_ratio:
|
392 |
+
desc: null
|
393 |
+
value: 0.03
|
394 |
+
warmup_steps:
|
395 |
+
desc: null
|
396 |
+
value: 0
|
397 |
+
log_level:
|
398 |
+
desc: null
|
399 |
+
value: passive
|
400 |
+
log_level_replica:
|
401 |
+
desc: null
|
402 |
+
value: warning
|
403 |
+
log_on_each_node:
|
404 |
+
desc: null
|
405 |
+
value: true
|
406 |
+
logging_dir:
|
407 |
+
desc: null
|
408 |
+
value: /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/runs/Jan07_08-30-52_096ae31a5012
|
409 |
+
logging_strategy:
|
410 |
+
desc: null
|
411 |
+
value: steps
|
412 |
+
logging_first_step:
|
413 |
+
desc: null
|
414 |
+
value: false
|
415 |
+
logging_steps:
|
416 |
+
desc: null
|
417 |
+
value: 10
|
418 |
+
logging_nan_inf_filter:
|
419 |
+
desc: null
|
420 |
+
value: true
|
421 |
+
save_strategy:
|
422 |
+
desc: null
|
423 |
+
value: steps
|
424 |
+
save_steps:
|
425 |
+
desc: null
|
426 |
+
value: 10
|
427 |
+
save_total_limit:
|
428 |
+
desc: null
|
429 |
+
value: null
|
430 |
+
save_safetensors:
|
431 |
+
desc: null
|
432 |
+
value: true
|
433 |
+
save_on_each_node:
|
434 |
+
desc: null
|
435 |
+
value: false
|
436 |
+
save_only_model:
|
437 |
+
desc: null
|
438 |
+
value: false
|
439 |
+
no_cuda:
|
440 |
+
desc: null
|
441 |
+
value: false
|
442 |
+
use_cpu:
|
443 |
+
desc: null
|
444 |
+
value: false
|
445 |
+
use_mps_device:
|
446 |
+
desc: null
|
447 |
+
value: false
|
448 |
+
seed:
|
449 |
+
desc: null
|
450 |
+
value: 42
|
451 |
+
data_seed:
|
452 |
+
desc: null
|
453 |
+
value: null
|
454 |
+
jit_mode_eval:
|
455 |
+
desc: null
|
456 |
+
value: false
|
457 |
+
use_ipex:
|
458 |
+
desc: null
|
459 |
+
value: false
|
460 |
+
bf16:
|
461 |
+
desc: null
|
462 |
+
value: false
|
463 |
+
fp16:
|
464 |
+
desc: null
|
465 |
+
value: false
|
466 |
+
fp16_opt_level:
|
467 |
+
desc: null
|
468 |
+
value: O1
|
469 |
+
half_precision_backend:
|
470 |
+
desc: null
|
471 |
+
value: auto
|
472 |
+
bf16_full_eval:
|
473 |
+
desc: null
|
474 |
+
value: false
|
475 |
+
fp16_full_eval:
|
476 |
+
desc: null
|
477 |
+
value: false
|
478 |
+
tf32:
|
479 |
+
desc: null
|
480 |
+
value: false
|
481 |
+
local_rank:
|
482 |
+
desc: null
|
483 |
+
value: 0
|
484 |
+
ddp_backend:
|
485 |
+
desc: null
|
486 |
+
value: null
|
487 |
+
tpu_num_cores:
|
488 |
+
desc: null
|
489 |
+
value: null
|
490 |
+
tpu_metrics_debug:
|
491 |
+
desc: null
|
492 |
+
value: false
|
493 |
+
debug:
|
494 |
+
desc: null
|
495 |
+
value: []
|
496 |
+
dataloader_drop_last:
|
497 |
+
desc: null
|
498 |
+
value: false
|
499 |
+
eval_steps:
|
500 |
+
desc: null
|
501 |
+
value: null
|
502 |
+
dataloader_num_workers:
|
503 |
+
desc: null
|
504 |
+
value: 0
|
505 |
+
past_index:
|
506 |
+
desc: null
|
507 |
+
value: -1
|
508 |
+
run_name:
|
509 |
+
desc: null
|
510 |
+
value: /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance
|
511 |
+
disable_tqdm:
|
512 |
+
desc: null
|
513 |
+
value: false
|
514 |
+
remove_unused_columns:
|
515 |
+
desc: null
|
516 |
+
value: true
|
517 |
+
label_names:
|
518 |
+
desc: null
|
519 |
+
value: null
|
520 |
+
load_best_model_at_end:
|
521 |
+
desc: null
|
522 |
+
value: false
|
523 |
+
metric_for_best_model:
|
524 |
+
desc: null
|
525 |
+
value: null
|
526 |
+
greater_is_better:
|
527 |
+
desc: null
|
528 |
+
value: null
|
529 |
+
ignore_data_skip:
|
530 |
+
desc: null
|
531 |
+
value: false
|
532 |
+
fsdp:
|
533 |
+
desc: null
|
534 |
+
value: []
|
535 |
+
fsdp_min_num_params:
|
536 |
+
desc: null
|
537 |
+
value: 0
|
538 |
+
fsdp_config:
|
539 |
+
desc: null
|
540 |
+
value:
|
541 |
+
min_num_params: 0
|
542 |
+
xla: false
|
543 |
+
xla_fsdp_grad_ckpt: false
|
544 |
+
fsdp_transformer_layer_cls_to_wrap:
|
545 |
+
desc: null
|
546 |
+
value: null
|
547 |
+
deepspeed:
|
548 |
+
desc: null
|
549 |
+
value: null
|
550 |
+
label_smoothing_factor:
|
551 |
+
desc: null
|
552 |
+
value: 0.0
|
553 |
+
optim:
|
554 |
+
desc: null
|
555 |
+
value: paged_adamw_32bit
|
556 |
+
optim_args:
|
557 |
+
desc: null
|
558 |
+
value: null
|
559 |
+
adafactor:
|
560 |
+
desc: null
|
561 |
+
value: false
|
562 |
+
group_by_length:
|
563 |
+
desc: null
|
564 |
+
value: true
|
565 |
+
length_column_name:
|
566 |
+
desc: null
|
567 |
+
value: length
|
568 |
+
report_to:
|
569 |
+
desc: null
|
570 |
+
value:
|
571 |
+
- tensorboard
|
572 |
+
- wandb
|
573 |
+
ddp_find_unused_parameters:
|
574 |
+
desc: null
|
575 |
+
value: null
|
576 |
+
ddp_bucket_cap_mb:
|
577 |
+
desc: null
|
578 |
+
value: null
|
579 |
+
ddp_broadcast_buffers:
|
580 |
+
desc: null
|
581 |
+
value: null
|
582 |
+
dataloader_pin_memory:
|
583 |
+
desc: null
|
584 |
+
value: true
|
585 |
+
dataloader_persistent_workers:
|
586 |
+
desc: null
|
587 |
+
value: false
|
588 |
+
skip_memory_metrics:
|
589 |
+
desc: null
|
590 |
+
value: true
|
591 |
+
use_legacy_prediction_loop:
|
592 |
+
desc: null
|
593 |
+
value: false
|
594 |
+
push_to_hub:
|
595 |
+
desc: null
|
596 |
+
value: true
|
597 |
+
resume_from_checkpoint:
|
598 |
+
desc: null
|
599 |
+
value: null
|
600 |
+
hub_model_id:
|
601 |
+
desc: null
|
602 |
+
value: null
|
603 |
+
hub_strategy:
|
604 |
+
desc: null
|
605 |
+
value: every_save
|
606 |
+
hub_token:
|
607 |
+
desc: null
|
608 |
+
value: <HUB_TOKEN>
|
609 |
+
hub_private_repo:
|
610 |
+
desc: null
|
611 |
+
value: false
|
612 |
+
hub_always_push:
|
613 |
+
desc: null
|
614 |
+
value: false
|
615 |
+
gradient_checkpointing:
|
616 |
+
desc: null
|
617 |
+
value: false
|
618 |
+
gradient_checkpointing_kwargs:
|
619 |
+
desc: null
|
620 |
+
value: null
|
621 |
+
include_inputs_for_metrics:
|
622 |
+
desc: null
|
623 |
+
value: false
|
624 |
+
fp16_backend:
|
625 |
+
desc: null
|
626 |
+
value: auto
|
627 |
+
push_to_hub_model_id:
|
628 |
+
desc: null
|
629 |
+
value: null
|
630 |
+
push_to_hub_organization:
|
631 |
+
desc: null
|
632 |
+
value: null
|
633 |
+
push_to_hub_token:
|
634 |
+
desc: null
|
635 |
+
value: <PUSH_TO_HUB_TOKEN>
|
636 |
+
mp_parameters:
|
637 |
+
desc: null
|
638 |
+
value: ''
|
639 |
+
auto_find_batch_size:
|
640 |
+
desc: null
|
641 |
+
value: false
|
642 |
+
full_determinism:
|
643 |
+
desc: null
|
644 |
+
value: false
|
645 |
+
torchdynamo:
|
646 |
+
desc: null
|
647 |
+
value: null
|
648 |
+
ray_scope:
|
649 |
+
desc: null
|
650 |
+
value: last
|
651 |
+
ddp_timeout:
|
652 |
+
desc: null
|
653 |
+
value: 1800
|
654 |
+
torch_compile:
|
655 |
+
desc: null
|
656 |
+
value: false
|
657 |
+
torch_compile_backend:
|
658 |
+
desc: null
|
659 |
+
value: null
|
660 |
+
torch_compile_mode:
|
661 |
+
desc: null
|
662 |
+
value: null
|
663 |
+
dispatch_batches:
|
664 |
+
desc: null
|
665 |
+
value: null
|
666 |
+
split_batches:
|
667 |
+
desc: null
|
668 |
+
value: false
|
669 |
+
include_tokens_per_second:
|
670 |
+
desc: null
|
671 |
+
value: false
|
672 |
+
include_num_input_tokens_seen:
|
673 |
+
desc: null
|
674 |
+
value: false
|
675 |
+
neftune_noise_alpha:
|
676 |
+
desc: null
|
677 |
+
value: null
|
wandb/run-20240107_083215-enryt6zo/files/output.log
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
You're using a LlamaTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.
|
3 |
+
-------------------------------------------------
|
4 |
+
Pre-trained Model Answer:
|
5 |
+
Answer the following question truthfully.
|
6 |
+
: 고지의무가 뭐야?
|
7 |
+
: 어디에서 왔니?
|
8 |
+
: 이름은 몇번째인데요?
|
9 |
+
: 나는 언제 태어난거예요?
|
10 |
+
: 내 집주소는 어디입니까?
|
11 |
+
: 우리집 전화번호는 010-2345-6789 입니다.
|
12 |
+
: 저희 학교 위치는 여기에있습니다. (그림)
|
13 |
+
: 저희 학생들은 한국어,영어,수학을 배워요!
|
14 |
+
: 저희 선생님께서 좋아하시는것은 바로 음식과 운동입니다.
|
15 |
+
: 저희 학교를 찾으실때 도와드려요~
|
16 |
+
: 오늘 날짜는 2월 2일 입니다.
|
17 |
+
: 저희 학교는 매년 여러분
|
18 |
+
-------------------------------------------------
|
19 |
+
-------------------------------------------------
|
20 |
+
Pre-trained Model Answer:
|
21 |
+
Answer the following question truthfully.
|
22 |
+
: 운전면허 미보유 보험가입 어떻게 하는지 알려줘?
|
23 |
+
: 10분만에 답변드립니다!
|
24 |
+
## What is a car insurance policy for those who do not have driver's license in Korea?
|
25 |
+
### Car Insurance Policy For Those Who Do Not Have Driver’s License In Korea
|
26 |
+
##### [Car Insurance] How to get an auto insurance without having a driving licence (DL) or international DL in South Korea?
|
27 |
+
In order to drive on Korean roads, you need to obtain a valid driver’s license issued by the Ministry of Land, Transport and Maritime Affairs(MLTM). If your country has signed bilateral agreements with Korea regarding mutual recognition of licenses, then it may be possible that you can use your foreign-issued driver’s license while living here temporarily as long as its expiration date hasn’t passed yet; however this does NOT apply if one wants their own personal vehicle insured under them because they will still require obtaining local certification before doing so which takes time depending upon where exactly someone lives within Seoul metropolitan area etcetera…
|
28 |
+
The process starts off when applying through either KIA Motors Corporation headquartered near Gangnam Station OR Hyundai Motor Company located closeby too – both companies offer similar services
|
29 |
+
-------------------------------------------------
|
30 |
+
Drive already mounted at /content/gdrive; to attempt to forcibly remount, call drive.mount("/content/gdrive", force_remount=True).
|
31 |
+
/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance
|
32 |
+
/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance
|
33 |
+
env: LC_ALL=en_US.UTF-8
|
wandb/run-20240107_083215-enryt6zo/files/requirements.txt
ADDED
@@ -0,0 +1,497 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
absl-py==1.4.0
|
2 |
+
accelerate==0.25.0
|
3 |
+
aiohttp==3.9.1
|
4 |
+
aiosignal==1.3.1
|
5 |
+
alabaster==0.7.13
|
6 |
+
albumentations==1.3.1
|
7 |
+
altair==4.2.2
|
8 |
+
anyio==3.7.1
|
9 |
+
appdirs==1.4.4
|
10 |
+
argon2-cffi-bindings==21.2.0
|
11 |
+
argon2-cffi==23.1.0
|
12 |
+
array-record==0.5.0
|
13 |
+
arviz==0.15.1
|
14 |
+
astropy==5.3.4
|
15 |
+
astunparse==1.6.3
|
16 |
+
async-timeout==4.0.3
|
17 |
+
atpublic==4.0
|
18 |
+
attrs==23.1.0
|
19 |
+
audioread==3.0.1
|
20 |
+
autograd==1.6.2
|
21 |
+
babel==2.14.0
|
22 |
+
backcall==0.2.0
|
23 |
+
beautifulsoup4==4.11.2
|
24 |
+
bidict==0.22.1
|
25 |
+
bigframes==0.17.0
|
26 |
+
bitsandbytes==0.41.3.post2
|
27 |
+
bleach==6.1.0
|
28 |
+
blinker==1.4
|
29 |
+
blis==0.7.11
|
30 |
+
blosc2==2.0.0
|
31 |
+
bokeh==3.3.2
|
32 |
+
bqplot==0.12.42
|
33 |
+
branca==0.7.0
|
34 |
+
build==1.0.3
|
35 |
+
cachecontrol==0.13.1
|
36 |
+
cachetools==5.3.2
|
37 |
+
catalogue==2.0.10
|
38 |
+
certifi==2023.11.17
|
39 |
+
cffi==1.16.0
|
40 |
+
chardet==5.2.0
|
41 |
+
charset-normalizer==3.3.2
|
42 |
+
chex==0.1.7
|
43 |
+
click-plugins==1.1.1
|
44 |
+
click==8.1.7
|
45 |
+
cligj==0.7.2
|
46 |
+
cloudpickle==2.2.1
|
47 |
+
cmake==3.27.9
|
48 |
+
cmdstanpy==1.2.0
|
49 |
+
colorcet==3.0.1
|
50 |
+
colorlover==0.3.0
|
51 |
+
colour==0.1.5
|
52 |
+
community==1.0.0b1
|
53 |
+
confection==0.1.4
|
54 |
+
cons==0.4.6
|
55 |
+
contextlib2==21.6.0
|
56 |
+
contourpy==1.2.0
|
57 |
+
cryptography==41.0.7
|
58 |
+
cufflinks==0.17.3
|
59 |
+
cupy-cuda12x==12.2.0
|
60 |
+
cvxopt==1.3.2
|
61 |
+
cvxpy==1.3.2
|
62 |
+
cycler==0.12.1
|
63 |
+
cymem==2.0.8
|
64 |
+
cython==3.0.7
|
65 |
+
dask==2023.8.1
|
66 |
+
datascience==0.17.6
|
67 |
+
datasets==2.16.1
|
68 |
+
db-dtypes==1.2.0
|
69 |
+
dbus-python==1.2.18
|
70 |
+
debugpy==1.6.6
|
71 |
+
decorator==4.4.2
|
72 |
+
defusedxml==0.7.1
|
73 |
+
dill==0.3.7
|
74 |
+
diskcache==5.6.3
|
75 |
+
distributed==2023.8.1
|
76 |
+
distro==1.7.0
|
77 |
+
dlib==19.24.2
|
78 |
+
dm-tree==0.1.8
|
79 |
+
docker-pycreds==0.4.0
|
80 |
+
docstring-parser==0.15
|
81 |
+
docutils==0.18.1
|
82 |
+
dopamine-rl==4.0.6
|
83 |
+
duckdb==0.9.2
|
84 |
+
earthengine-api==0.1.384
|
85 |
+
easydict==1.11
|
86 |
+
ecos==2.0.12
|
87 |
+
editdistance==0.6.2
|
88 |
+
eerepr==0.0.4
|
89 |
+
einops==0.7.0
|
90 |
+
en-core-web-sm==3.6.0
|
91 |
+
entrypoints==0.4
|
92 |
+
et-xmlfile==1.1.0
|
93 |
+
etils==1.6.0
|
94 |
+
etuples==0.3.9
|
95 |
+
exceptiongroup==1.2.0
|
96 |
+
fastai==2.7.13
|
97 |
+
fastcore==1.5.29
|
98 |
+
fastdownload==0.0.7
|
99 |
+
fastjsonschema==2.19.0
|
100 |
+
fastprogress==1.0.3
|
101 |
+
fastrlock==0.8.2
|
102 |
+
filelock==3.13.1
|
103 |
+
fiona==1.9.5
|
104 |
+
firebase-admin==5.3.0
|
105 |
+
flask==2.2.5
|
106 |
+
flatbuffers==23.5.26
|
107 |
+
flax==0.7.5
|
108 |
+
folium==0.14.0
|
109 |
+
fonttools==4.47.0
|
110 |
+
frozendict==2.3.10
|
111 |
+
frozenlist==1.4.1
|
112 |
+
fsspec==2023.6.0
|
113 |
+
future==0.18.3
|
114 |
+
gast==0.5.4
|
115 |
+
gcsfs==2023.6.0
|
116 |
+
gdal==3.4.3
|
117 |
+
gdown==4.6.6
|
118 |
+
geemap==0.29.6
|
119 |
+
gensim==4.3.2
|
120 |
+
geocoder==1.38.1
|
121 |
+
geographiclib==2.0
|
122 |
+
geopandas==0.13.2
|
123 |
+
geopy==2.3.0
|
124 |
+
gin-config==0.5.0
|
125 |
+
gitdb==4.0.11
|
126 |
+
gitpython==3.1.40
|
127 |
+
glob2==0.7
|
128 |
+
google-ai-generativelanguage==0.4.0
|
129 |
+
google-api-core==2.11.1
|
130 |
+
google-api-python-client==2.84.0
|
131 |
+
google-auth-httplib2==0.1.1
|
132 |
+
google-auth-oauthlib==1.2.0
|
133 |
+
google-auth==2.17.3
|
134 |
+
google-cloud-aiplatform==1.38.1
|
135 |
+
google-cloud-bigquery-connection==1.12.1
|
136 |
+
google-cloud-bigquery-storage==2.24.0
|
137 |
+
google-cloud-bigquery==3.12.0
|
138 |
+
google-cloud-core==2.3.3
|
139 |
+
google-cloud-datastore==2.15.2
|
140 |
+
google-cloud-firestore==2.11.1
|
141 |
+
google-cloud-functions==1.13.3
|
142 |
+
google-cloud-iam==2.13.0
|
143 |
+
google-cloud-language==2.9.1
|
144 |
+
google-cloud-resource-manager==1.11.0
|
145 |
+
google-cloud-storage==2.8.0
|
146 |
+
google-cloud-translate==3.11.3
|
147 |
+
google-colab==1.0.0
|
148 |
+
google-crc32c==1.5.0
|
149 |
+
google-generativeai==0.3.2
|
150 |
+
google-pasta==0.2.0
|
151 |
+
google-resumable-media==2.7.0
|
152 |
+
google==2.0.3
|
153 |
+
googleapis-common-protos==1.62.0
|
154 |
+
googledrivedownloader==0.4
|
155 |
+
graphviz==0.20.1
|
156 |
+
greenlet==3.0.2
|
157 |
+
grpc-google-iam-v1==0.13.0
|
158 |
+
grpcio-status==1.48.2
|
159 |
+
grpcio==1.60.0
|
160 |
+
gspread-dataframe==3.3.1
|
161 |
+
gspread==3.4.2
|
162 |
+
gym-notices==0.0.8
|
163 |
+
gym==0.25.2
|
164 |
+
h5netcdf==1.3.0
|
165 |
+
h5py==3.9.0
|
166 |
+
holidays==0.39
|
167 |
+
holoviews==1.17.1
|
168 |
+
html5lib==1.1
|
169 |
+
httpimport==1.3.1
|
170 |
+
httplib2==0.22.0
|
171 |
+
huggingface-hub==0.20.1
|
172 |
+
humanize==4.7.0
|
173 |
+
hyperopt==0.2.7
|
174 |
+
ibis-framework==6.2.0
|
175 |
+
idna==3.6
|
176 |
+
imageio-ffmpeg==0.4.9
|
177 |
+
imageio==2.31.6
|
178 |
+
imagesize==1.4.1
|
179 |
+
imbalanced-learn==0.10.1
|
180 |
+
imgaug==0.4.0
|
181 |
+
importlib-metadata==7.0.0
|
182 |
+
importlib-resources==6.1.1
|
183 |
+
imutils==0.5.4
|
184 |
+
inflect==7.0.0
|
185 |
+
iniconfig==2.0.0
|
186 |
+
install==1.3.5
|
187 |
+
intel-openmp==2023.2.3
|
188 |
+
ipyevents==2.0.2
|
189 |
+
ipyfilechooser==0.6.0
|
190 |
+
ipykernel==5.5.6
|
191 |
+
ipyleaflet==0.18.1
|
192 |
+
ipython-genutils==0.2.0
|
193 |
+
ipython-sql==0.5.0
|
194 |
+
ipython==7.34.0
|
195 |
+
ipytree==0.2.2
|
196 |
+
ipywidgets==7.7.1
|
197 |
+
itsdangerous==2.1.2
|
198 |
+
jax==0.4.23
|
199 |
+
jaxlib==0.4.23+cuda12.cudnn89
|
200 |
+
jeepney==0.7.1
|
201 |
+
jieba==0.42.1
|
202 |
+
jinja2==3.1.2
|
203 |
+
joblib==1.3.2
|
204 |
+
jsonpickle==3.0.2
|
205 |
+
jsonschema-specifications==2023.11.2
|
206 |
+
jsonschema==4.19.2
|
207 |
+
jupyter-client==6.1.12
|
208 |
+
jupyter-console==6.1.0
|
209 |
+
jupyter-core==5.5.1
|
210 |
+
jupyter-server==1.24.0
|
211 |
+
jupyterlab-pygments==0.3.0
|
212 |
+
jupyterlab-widgets==3.0.9
|
213 |
+
kaggle==1.5.16
|
214 |
+
kagglehub==0.1.4
|
215 |
+
keras==2.15.0
|
216 |
+
keyring==23.5.0
|
217 |
+
kiwisolver==1.4.5
|
218 |
+
langcodes==3.3.0
|
219 |
+
launchpadlib==1.10.16
|
220 |
+
lazr.restfulclient==0.14.4
|
221 |
+
lazr.uri==1.0.6
|
222 |
+
lazy-loader==0.3
|
223 |
+
libclang==16.0.6
|
224 |
+
librosa==0.10.1
|
225 |
+
lida==0.0.10
|
226 |
+
lightgbm==4.1.0
|
227 |
+
linkify-it-py==2.0.2
|
228 |
+
llmx==0.0.15a0
|
229 |
+
llvmlite==0.41.1
|
230 |
+
locket==1.0.0
|
231 |
+
logical-unification==0.4.6
|
232 |
+
lxml==4.9.4
|
233 |
+
malloy==2023.1067
|
234 |
+
markdown-it-py==3.0.0
|
235 |
+
markdown==3.5.1
|
236 |
+
markupsafe==2.1.3
|
237 |
+
matplotlib-inline==0.1.6
|
238 |
+
matplotlib-venn==0.11.9
|
239 |
+
matplotlib==3.7.1
|
240 |
+
mdit-py-plugins==0.4.0
|
241 |
+
mdurl==0.1.2
|
242 |
+
minikanren==1.0.3
|
243 |
+
missingno==0.5.2
|
244 |
+
mistune==0.8.4
|
245 |
+
mizani==0.9.3
|
246 |
+
mkl==2023.2.0
|
247 |
+
ml-dtypes==0.2.0
|
248 |
+
mlxtend==0.22.0
|
249 |
+
more-itertools==10.1.0
|
250 |
+
moviepy==1.0.3
|
251 |
+
mpmath==1.3.0
|
252 |
+
msgpack==1.0.7
|
253 |
+
multidict==6.0.4
|
254 |
+
multipledispatch==1.0.0
|
255 |
+
multiprocess==0.70.15
|
256 |
+
multitasking==0.0.11
|
257 |
+
murmurhash==1.0.10
|
258 |
+
music21==9.1.0
|
259 |
+
natsort==8.4.0
|
260 |
+
nbclassic==1.0.0
|
261 |
+
nbclient==0.9.0
|
262 |
+
nbconvert==6.5.4
|
263 |
+
nbformat==5.9.2
|
264 |
+
nest-asyncio==1.5.8
|
265 |
+
networkx==3.2.1
|
266 |
+
nibabel==4.0.2
|
267 |
+
nltk==3.8.1
|
268 |
+
notebook-shim==0.2.3
|
269 |
+
notebook==6.5.5
|
270 |
+
numba==0.58.1
|
271 |
+
numexpr==2.8.8
|
272 |
+
numpy==1.23.5
|
273 |
+
oauth2client==4.1.3
|
274 |
+
oauthlib==3.2.2
|
275 |
+
opencv-contrib-python==4.8.0.76
|
276 |
+
opencv-python-headless==4.8.1.78
|
277 |
+
opencv-python==4.8.0.76
|
278 |
+
openpyxl==3.1.2
|
279 |
+
opt-einsum==3.3.0
|
280 |
+
optax==0.1.7
|
281 |
+
orbax-checkpoint==0.4.4
|
282 |
+
osqp==0.6.2.post8
|
283 |
+
packaging==23.2
|
284 |
+
pandas-datareader==0.10.0
|
285 |
+
pandas-gbq==0.19.2
|
286 |
+
pandas-stubs==1.5.3.230304
|
287 |
+
pandas==1.5.3
|
288 |
+
pandocfilters==1.5.0
|
289 |
+
panel==1.3.6
|
290 |
+
param==2.0.1
|
291 |
+
parso==0.8.3
|
292 |
+
parsy==2.1
|
293 |
+
partd==1.4.1
|
294 |
+
pathlib==1.0.1
|
295 |
+
pathy==0.10.3
|
296 |
+
patsy==0.5.4
|
297 |
+
peewee==3.17.0
|
298 |
+
peft==0.7.2.dev0
|
299 |
+
pexpect==4.9.0
|
300 |
+
pickleshare==0.7.5
|
301 |
+
pillow==9.4.0
|
302 |
+
pip-tools==6.13.0
|
303 |
+
pip==23.1.2
|
304 |
+
platformdirs==4.1.0
|
305 |
+
plotly==5.15.0
|
306 |
+
plotnine==0.12.4
|
307 |
+
pluggy==1.3.0
|
308 |
+
polars==0.17.3
|
309 |
+
pooch==1.8.0
|
310 |
+
portpicker==1.5.2
|
311 |
+
prefetch-generator==1.0.3
|
312 |
+
preshed==3.0.9
|
313 |
+
prettytable==3.9.0
|
314 |
+
proglog==0.1.10
|
315 |
+
progressbar2==4.2.0
|
316 |
+
prometheus-client==0.19.0
|
317 |
+
promise==2.3
|
318 |
+
prompt-toolkit==3.0.43
|
319 |
+
prophet==1.1.5
|
320 |
+
proto-plus==1.23.0
|
321 |
+
protobuf==3.20.3
|
322 |
+
psutil==5.9.5
|
323 |
+
psycopg2==2.9.9
|
324 |
+
ptyprocess==0.7.0
|
325 |
+
py-cpuinfo==9.0.0
|
326 |
+
py4j==0.10.9.7
|
327 |
+
pyarrow-hotfix==0.6
|
328 |
+
pyarrow==10.0.1
|
329 |
+
pyasn1-modules==0.3.0
|
330 |
+
pyasn1==0.5.1
|
331 |
+
pycocotools==2.0.7
|
332 |
+
pycparser==2.21
|
333 |
+
pyct==0.5.0
|
334 |
+
pydantic==1.10.13
|
335 |
+
pydata-google-auth==1.8.2
|
336 |
+
pydot-ng==2.0.0
|
337 |
+
pydot==1.4.2
|
338 |
+
pydotplus==2.0.2
|
339 |
+
pydrive2==1.6.3
|
340 |
+
pydrive==1.3.1
|
341 |
+
pyerfa==2.0.1.1
|
342 |
+
pygame==2.5.2
|
343 |
+
pygments==2.16.1
|
344 |
+
pygobject==3.42.1
|
345 |
+
pyjwt==2.3.0
|
346 |
+
pymc==5.7.2
|
347 |
+
pymystem3==0.2.0
|
348 |
+
pyopengl==3.1.7
|
349 |
+
pyopenssl==23.3.0
|
350 |
+
pyparsing==3.1.1
|
351 |
+
pyperclip==1.8.2
|
352 |
+
pyproj==3.6.1
|
353 |
+
pyproject-hooks==1.0.0
|
354 |
+
pyshp==2.3.1
|
355 |
+
pysocks==1.7.1
|
356 |
+
pytensor==2.14.2
|
357 |
+
pytest==7.4.3
|
358 |
+
python-apt==0.0.0
|
359 |
+
python-box==7.1.1
|
360 |
+
python-dateutil==2.8.2
|
361 |
+
python-louvain==0.16
|
362 |
+
python-slugify==8.0.1
|
363 |
+
python-utils==3.8.1
|
364 |
+
pytz==2023.3.post1
|
365 |
+
pyviz-comms==3.0.0
|
366 |
+
pywavelets==1.5.0
|
367 |
+
pyyaml==6.0.1
|
368 |
+
pyzmq==23.2.1
|
369 |
+
qdldl==0.1.7.post0
|
370 |
+
qudida==0.0.4
|
371 |
+
ratelim==0.1.6
|
372 |
+
referencing==0.32.0
|
373 |
+
regex==2023.6.3
|
374 |
+
requests-oauthlib==1.3.1
|
375 |
+
requests==2.31.0
|
376 |
+
requirements-parser==0.5.0
|
377 |
+
rich==13.7.0
|
378 |
+
rpds-py==0.15.2
|
379 |
+
rpy2==3.4.2
|
380 |
+
rsa==4.9
|
381 |
+
safetensors==0.4.1
|
382 |
+
scikit-image==0.19.3
|
383 |
+
scikit-learn==1.2.2
|
384 |
+
scipy==1.11.4
|
385 |
+
scooby==0.9.2
|
386 |
+
scs==3.2.4.post1
|
387 |
+
seaborn==0.12.2
|
388 |
+
secretstorage==3.3.1
|
389 |
+
send2trash==1.8.2
|
390 |
+
sentry-sdk==1.39.1
|
391 |
+
setproctitle==1.3.3
|
392 |
+
setuptools==67.7.2
|
393 |
+
shapely==2.0.2
|
394 |
+
shtab==1.6.5
|
395 |
+
six==1.16.0
|
396 |
+
sklearn-pandas==2.2.0
|
397 |
+
smart-open==6.4.0
|
398 |
+
smmap==5.0.1
|
399 |
+
sniffio==1.3.0
|
400 |
+
snowballstemmer==2.2.0
|
401 |
+
sortedcontainers==2.4.0
|
402 |
+
soundfile==0.12.1
|
403 |
+
soupsieve==2.5
|
404 |
+
soxr==0.3.7
|
405 |
+
spacy-legacy==3.0.12
|
406 |
+
spacy-loggers==1.0.5
|
407 |
+
spacy==3.6.1
|
408 |
+
sphinx==5.0.2
|
409 |
+
sphinxcontrib-applehelp==1.0.7
|
410 |
+
sphinxcontrib-devhelp==1.0.5
|
411 |
+
sphinxcontrib-htmlhelp==2.0.4
|
412 |
+
sphinxcontrib-jsmath==1.0.1
|
413 |
+
sphinxcontrib-qthelp==1.0.6
|
414 |
+
sphinxcontrib-serializinghtml==1.1.9
|
415 |
+
sqlalchemy==2.0.23
|
416 |
+
sqlglot==17.16.2
|
417 |
+
sqlparse==0.4.4
|
418 |
+
srsly==2.4.8
|
419 |
+
stanio==0.3.0
|
420 |
+
statsmodels==0.14.1
|
421 |
+
sympy==1.12
|
422 |
+
tables==3.8.0
|
423 |
+
tabulate==0.9.0
|
424 |
+
tbb==2021.11.0
|
425 |
+
tblib==3.0.0
|
426 |
+
tenacity==8.2.3
|
427 |
+
tensorboard-data-server==0.7.2
|
428 |
+
tensorboard==2.15.1
|
429 |
+
tensorflow-datasets==4.9.4
|
430 |
+
tensorflow-estimator==2.15.0
|
431 |
+
tensorflow-gcs-config==2.15.0
|
432 |
+
tensorflow-hub==0.15.0
|
433 |
+
tensorflow-io-gcs-filesystem==0.35.0
|
434 |
+
tensorflow-metadata==1.14.0
|
435 |
+
tensorflow-probability==0.22.0
|
436 |
+
tensorflow==2.15.0
|
437 |
+
tensorstore==0.1.45
|
438 |
+
termcolor==2.4.0
|
439 |
+
terminado==0.18.0
|
440 |
+
text-unidecode==1.3
|
441 |
+
textblob==0.17.1
|
442 |
+
tf-slim==1.1.0
|
443 |
+
thinc==8.1.12
|
444 |
+
threadpoolctl==3.2.0
|
445 |
+
tifffile==2023.12.9
|
446 |
+
tinycss2==1.2.1
|
447 |
+
tokenizers==0.15.0
|
448 |
+
toml==0.10.2
|
449 |
+
tomli==2.0.1
|
450 |
+
toolz==0.12.0
|
451 |
+
torch==2.1.0+cu121
|
452 |
+
torchaudio==2.1.0+cu121
|
453 |
+
torchdata==0.7.0
|
454 |
+
torchsummary==1.5.1
|
455 |
+
torchtext==0.16.0
|
456 |
+
torchvision==0.16.0+cu121
|
457 |
+
tornado==6.3.2
|
458 |
+
tqdm==4.66.1
|
459 |
+
traitlets==5.7.1
|
460 |
+
traittypes==0.2.1
|
461 |
+
transformers==4.36.2
|
462 |
+
triton==2.1.0
|
463 |
+
trl==0.7.7
|
464 |
+
tweepy==4.14.0
|
465 |
+
typer==0.9.0
|
466 |
+
types-pytz==2023.3.1.1
|
467 |
+
types-setuptools==69.0.0.0
|
468 |
+
typing-extensions==4.5.0
|
469 |
+
tyro==0.6.3
|
470 |
+
tzlocal==5.2
|
471 |
+
uc-micro-py==1.0.2
|
472 |
+
uritemplate==4.1.1
|
473 |
+
urllib3==2.0.7
|
474 |
+
vega-datasets==0.9.0
|
475 |
+
wadllib==1.3.6
|
476 |
+
wandb==0.16.1
|
477 |
+
wasabi==1.1.2
|
478 |
+
wcwidth==0.2.12
|
479 |
+
webcolors==1.13
|
480 |
+
webencodings==0.5.1
|
481 |
+
websocket-client==1.7.0
|
482 |
+
werkzeug==3.0.1
|
483 |
+
wheel==0.42.0
|
484 |
+
widgetsnbextension==3.6.6
|
485 |
+
wordcloud==1.9.3
|
486 |
+
wrapt==1.14.1
|
487 |
+
xarray-einstats==0.6.0
|
488 |
+
xarray==2023.7.0
|
489 |
+
xgboost==2.0.3
|
490 |
+
xlrd==2.0.1
|
491 |
+
xxhash==3.4.1
|
492 |
+
xyzservices==2023.10.1
|
493 |
+
yarl==1.9.4
|
494 |
+
yellowbrick==1.5
|
495 |
+
yfinance==0.2.33
|
496 |
+
zict==3.0.0
|
497 |
+
zipp==3.17.0
|
wandb/run-20240107_083215-enryt6zo/files/wandb-metadata.json
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"os": "Linux-6.1.58+-x86_64-with-glibc2.35",
|
3 |
+
"python": "3.10.12",
|
4 |
+
"heartbeatAt": "2024-01-07T08:32:18.151267",
|
5 |
+
"startedAt": "2024-01-07T08:32:15.488264",
|
6 |
+
"docker": null,
|
7 |
+
"cuda": null,
|
8 |
+
"args": [],
|
9 |
+
"state": "running",
|
10 |
+
"program": "Mistral-7B-Finetuning-Insurance.ipynb",
|
11 |
+
"codePathLocal": null,
|
12 |
+
"colab": "https://colab.research.google.com/notebook#fileId=1yggNDPxtSX3bri4ON4ppYc-G-Q2zFtkQ",
|
13 |
+
"host": "096ae31a5012",
|
14 |
+
"username": "root",
|
15 |
+
"executable": "/usr/bin/python3",
|
16 |
+
"cpu_count": 1,
|
17 |
+
"cpu_count_logical": 2,
|
18 |
+
"cpu_freq": {
|
19 |
+
"current": 2199.998,
|
20 |
+
"min": 0.0,
|
21 |
+
"max": 0.0
|
22 |
+
},
|
23 |
+
"cpu_freq_per_core": [
|
24 |
+
{
|
25 |
+
"current": 2199.998,
|
26 |
+
"min": 0.0,
|
27 |
+
"max": 0.0
|
28 |
+
},
|
29 |
+
{
|
30 |
+
"current": 2199.998,
|
31 |
+
"min": 0.0,
|
32 |
+
"max": 0.0
|
33 |
+
}
|
34 |
+
],
|
35 |
+
"disk": {
|
36 |
+
"/": {
|
37 |
+
"total": 78.1898422241211,
|
38 |
+
"used": 40.23767852783203
|
39 |
+
}
|
40 |
+
},
|
41 |
+
"gpu": "Tesla T4",
|
42 |
+
"gpu_count": 1,
|
43 |
+
"gpu_devices": [
|
44 |
+
{
|
45 |
+
"name": "Tesla T4",
|
46 |
+
"memory_total": 16106127360
|
47 |
+
}
|
48 |
+
],
|
49 |
+
"memory": {
|
50 |
+
"total": 12.674781799316406
|
51 |
+
}
|
52 |
+
}
|
wandb/run-20240107_083215-enryt6zo/files/wandb-summary.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"train/loss": 1.2937, "train/learning_rate": 0.0, "train/epoch": 0.44, "train/global_step": 60, "_timestamp": 1704617031.0774682, "_runtime": 695.5428292751312, "_step": 6, "train/train_runtime": 756.7394, "train/train_samples_per_second": 0.317, "train/train_steps_per_second": 0.079, "train/total_flos": 1588809031680000.0, "train/train_loss": 1.4518062591552734}
|
wandb/run-20240107_083215-enryt6zo/logs/debug.log
ADDED
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2024-01-07 08:32:15,522 INFO MainThread:684 [wandb_setup.py:_flush():76] Current SDK version is 0.16.1
|
2 |
+
2024-01-07 08:32:15,523 INFO MainThread:684 [wandb_setup.py:_flush():76] Configure stats pid to 684
|
3 |
+
2024-01-07 08:32:15,523 INFO MainThread:684 [wandb_setup.py:_flush():76] Loading settings from /root/.config/wandb/settings
|
4 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Loading settings from /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/wandb/settings
|
5 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Loading settings from environment variables: {}
|
6 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Applying setup settings: {'_disable_service': False}
|
7 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Inferring run settings from compute environment: {'program': '<python with no main file>'}
|
8 |
+
2024-01-07 08:32:15,524 INFO MainThread:684 [wandb_setup.py:_flush():76] Applying login settings: {'api_key': '***REDACTED***'}
|
9 |
+
2024-01-07 08:32:15,525 INFO MainThread:684 [wandb_init.py:_log_setup():524] Logging user logs to /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/wandb/run-20240107_083215-enryt6zo/logs/debug.log
|
10 |
+
2024-01-07 08:32:15,525 INFO MainThread:684 [wandb_init.py:_log_setup():525] Logging internal logs to /content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/wandb/run-20240107_083215-enryt6zo/logs/debug-internal.log
|
11 |
+
2024-01-07 08:32:15,525 INFO MainThread:684 [wandb_init.py:_jupyter_setup():470] configuring jupyter hooks <wandb.sdk.wandb_init._WandbInit object at 0x7bfea43750f0>
|
12 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():564] calling init triggers
|
13 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():571] wandb.init called with sweep_config: {}
|
14 |
+
config: {}
|
15 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():614] starting backend
|
16 |
+
2024-01-07 08:32:15,526 INFO MainThread:684 [wandb_init.py:init():618] setting up manager
|
17 |
+
2024-01-07 08:32:15,531 INFO MainThread:684 [backend.py:_multiprocessing_setup():105] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
|
18 |
+
2024-01-07 08:32:15,534 INFO MainThread:684 [wandb_init.py:init():624] backend started and connected
|
19 |
+
2024-01-07 08:32:15,570 INFO MainThread:684 [wandb_run.py:_label_probe_notebook():1294] probe notebook
|
20 |
+
2024-01-07 08:32:17,418 INFO MainThread:684 [wandb_init.py:init():716] updated telemetry
|
21 |
+
2024-01-07 08:32:17,453 INFO MainThread:684 [wandb_init.py:init():749] communicating run to backend with 90.0 second timeout
|
22 |
+
2024-01-07 08:32:17,964 INFO MainThread:684 [wandb_run.py:_on_init():2254] communicating current version
|
23 |
+
2024-01-07 08:32:18,119 INFO MainThread:684 [wandb_run.py:_on_init():2263] got version response
|
24 |
+
2024-01-07 08:32:18,120 INFO MainThread:684 [wandb_init.py:init():800] starting run threads in backend
|
25 |
+
2024-01-07 08:32:18,210 INFO MainThread:684 [wandb_run.py:_console_start():2233] atexit reg
|
26 |
+
2024-01-07 08:32:18,211 INFO MainThread:684 [wandb_run.py:_redirect():2088] redirect: wrap_raw
|
27 |
+
2024-01-07 08:32:18,211 INFO MainThread:684 [wandb_run.py:_redirect():2153] Wrapping output streams.
|
28 |
+
2024-01-07 08:32:18,211 INFO MainThread:684 [wandb_run.py:_redirect():2178] Redirects installed.
|
29 |
+
2024-01-07 08:32:18,213 INFO MainThread:684 [wandb_init.py:init():841] run started, returning control to user process
|
30 |
+
2024-01-07 08:32:18,219 INFO MainThread:684 [wandb_run.py:_config_callback():1342] config_cb None None {'vocab_size': 32000, 'max_position_embeddings': 32768, 'hidden_size': 4096, 'intermediate_size': 14336, 'num_hidden_layers': 32, 'num_attention_heads': 32, 'sliding_window': 4096, 'num_key_value_heads': 8, 'hidden_act': 'silu', 'initializer_range': 0.02, 'rms_norm_eps': 1e-05, 'use_cache': False, 'rope_theta': 10000.0, 'attention_dropout': 0.0, 'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'bfloat16', 'use_bfloat16': False, 'tf_legacy_loss': False, 'pruned_heads': {}, 'tie_word_embeddings': False, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'typical_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'exponential_decay_length_penalty': None, 'suppress_tokens': None, 'begin_suppress_tokens': None, 'architectures': ['MistralForCausalLM'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': None, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'mistralai/Mistral-7B-v0.1', 'transformers_version': '4.36.2', 'model_type': 'mistral', 'quantization_config': {'quant_method': 'QuantizationMethod.BITS_AND_BYTES', 'load_in_8bit': False, 'load_in_4bit': True, 'llm_int8_threshold': 6.0, 'llm_int8_skip_modules': None, 'llm_int8_enable_fp32_cpu_offload': False, 'llm_int8_has_fp16_weight': False, 'bnb_4bit_quant_type': 'nf4', 'bnb_4bit_use_double_quant': True, 'bnb_4bit_compute_dtype': 'bfloat16'}, 'output_dir': '/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance', 'overwrite_output_dir': False, 'do_train': False, 'do_eval': False, 'do_predict': False, 'evaluation_strategy': 'no', 'prediction_loss_only': False, 'per_device_train_batch_size': 2, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': None, 'per_gpu_eval_batch_size': None, 'gradient_accumulation_steps': 2, 'eval_accumulation_steps': None, 'eval_delay': 0, 'learning_rate': 0.0002, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 0.3, 'num_train_epochs': 3.0, 'max_steps': 60, 'lr_scheduler_type': 'cosine', 'lr_scheduler_kwargs': {}, 'warmup_ratio': 0.03, 'warmup_steps': 0, 'log_level': 'passive', 'log_level_replica': 'warning', 'log_on_each_node': True, 'logging_dir': '/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance/runs/Jan07_08-30-52_096ae31a5012', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 10, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 10, 'save_total_limit': None, 'save_safetensors': True, 'save_on_each_node': False, 'save_only_model': False, 'no_cuda': False, 'use_cpu': False, 'use_mps_device': False, 'seed': 42, 'data_seed': None, 'jit_mode_eval': False, 'use_ipex': False, 'bf16': False, 'fp16': False, 'fp16_opt_level': 'O1', 'half_precision_backend': 'auto', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': False, 'local_rank': 0, 'ddp_backend': None, 'tpu_num_cores': None, 'tpu_metrics_debug': False, 'debug': [], 'dataloader_drop_last': False, 'eval_steps': None, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': '/content/gdrive/MyDrive/LLM/Mistral-7B-Finetuning-Insurance', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': None, 'load_best_model_at_end': False, 'metric_for_best_model': None, 'greater_is_better': None, 'ignore_data_skip': False, 'fsdp': [], 'fsdp_min_num_params': 0, 'fsdp_config': {'min_num_params': 0, 'xla': False, 'xla_fsdp_grad_ckpt': False}, 'fsdp_transformer_layer_cls_to_wrap': None, 'deepspeed': None, 'label_smoothing_factor': 0.0, 'optim': 'paged_adamw_32bit', 'optim_args': None, 'adafactor': False, 'group_by_length': True, 'length_column_name': 'length', 'report_to': ['tensorboard', 'wandb'], 'ddp_find_unused_parameters': None, 'ddp_bucket_cap_mb': None, 'ddp_broadcast_buffers': None, 'dataloader_pin_memory': True, 'dataloader_persistent_workers': False, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': None, 'hub_model_id': None, 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'hub_private_repo': False, 'hub_always_push': False, 'gradient_checkpointing': False, 'gradient_checkpointing_kwargs': None, 'include_inputs_for_metrics': False, 'fp16_backend': 'auto', 'push_to_hub_model_id': None, 'push_to_hub_organization': None, 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', 'mp_parameters': '', 'auto_find_batch_size': False, 'full_determinism': False, 'torchdynamo': None, 'ray_scope': 'last', 'ddp_timeout': 1800, 'torch_compile': False, 'torch_compile_backend': None, 'torch_compile_mode': None, 'dispatch_batches': None, 'split_batches': False, 'include_tokens_per_second': False, 'include_num_input_tokens_seen': False, 'neftune_noise_alpha': None}
|
31 |
+
2024-01-07 08:44:03,889 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
32 |
+
2024-01-07 08:44:03,890 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
33 |
+
2024-01-07 08:44:26,570 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
34 |
+
2024-01-07 08:44:34,326 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
35 |
+
2024-01-07 08:44:34,327 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
36 |
+
2024-01-07 08:44:46,475 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
37 |
+
2024-01-07 08:46:05,058 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
38 |
+
2024-01-07 08:46:05,058 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
39 |
+
2024-01-07 08:46:13,038 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
40 |
+
2024-01-07 08:46:18,516 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
41 |
+
2024-01-07 08:46:18,516 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
42 |
+
2024-01-07 08:50:09,111 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
43 |
+
2024-01-07 08:50:13,508 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
44 |
+
2024-01-07 08:50:13,513 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
45 |
+
2024-01-07 08:51:38,094 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
46 |
+
2024-01-07 08:51:38,098 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
47 |
+
2024-01-07 08:51:38,098 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
48 |
+
2024-01-07 08:51:41,383 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
49 |
+
2024-01-07 08:52:12,662 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
50 |
+
2024-01-07 08:52:12,662 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
51 |
+
2024-01-07 08:52:45,454 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
52 |
+
2024-01-07 08:53:09,095 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
53 |
+
2024-01-07 08:53:09,096 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
54 |
+
2024-01-07 08:55:59,367 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
55 |
+
2024-01-07 08:56:00,251 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
56 |
+
2024-01-07 08:56:00,252 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
57 |
+
2024-01-07 08:56:06,562 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
58 |
+
2024-01-07 08:56:10,699 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
59 |
+
2024-01-07 08:56:10,700 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
60 |
+
2024-01-07 08:57:55,151 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
61 |
+
2024-01-07 08:57:58,981 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
62 |
+
2024-01-07 08:57:58,981 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
63 |
+
2024-01-07 09:00:08,883 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
64 |
+
2024-01-07 09:00:18,822 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
65 |
+
2024-01-07 09:00:18,822 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
66 |
+
2024-01-07 09:08:01,727 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
67 |
+
2024-01-07 09:08:06,231 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
68 |
+
2024-01-07 09:08:06,232 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
69 |
+
2024-01-07 09:08:25,862 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
70 |
+
2024-01-07 09:08:25,898 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
71 |
+
2024-01-07 09:08:25,898 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
72 |
+
2024-01-07 09:08:35,845 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
73 |
+
2024-01-07 09:08:37,716 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
74 |
+
2024-01-07 09:08:37,717 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
75 |
+
2024-01-07 09:08:40,487 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
76 |
+
2024-01-07 09:08:40,495 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
77 |
+
2024-01-07 09:08:40,501 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
78 |
+
2024-01-07 09:08:45,788 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
79 |
+
2024-01-07 09:08:45,793 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
80 |
+
2024-01-07 09:08:45,794 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
81 |
+
2024-01-07 09:08:49,111 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
82 |
+
2024-01-07 09:08:49,155 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
83 |
+
2024-01-07 09:08:49,155 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
84 |
+
2024-01-07 09:09:44,376 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
85 |
+
2024-01-07 09:09:44,379 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
86 |
+
2024-01-07 09:09:44,380 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
87 |
+
2024-01-07 09:10:16,380 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
88 |
+
2024-01-07 09:10:16,383 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
89 |
+
2024-01-07 09:10:16,383 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
90 |
+
2024-01-07 09:10:25,980 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
91 |
+
2024-01-07 09:10:26,068 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
92 |
+
2024-01-07 09:10:26,076 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
93 |
+
2024-01-07 09:10:52,944 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
94 |
+
2024-01-07 09:10:52,950 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
95 |
+
2024-01-07 09:10:52,950 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
96 |
+
2024-01-07 09:10:54,782 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
97 |
+
2024-01-07 09:10:54,813 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
98 |
+
2024-01-07 09:10:54,813 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
99 |
+
2024-01-07 09:12:03,682 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
100 |
+
2024-01-07 09:12:03,692 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
101 |
+
2024-01-07 09:12:03,692 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
102 |
+
2024-01-07 09:12:06,232 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
103 |
+
2024-01-07 09:12:06,325 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
104 |
+
2024-01-07 09:12:06,326 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
105 |
+
2024-01-07 09:12:33,934 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
106 |
+
2024-01-07 09:12:34,001 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
107 |
+
2024-01-07 09:12:34,004 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
108 |
+
2024-01-07 09:13:00,605 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
109 |
+
2024-01-07 09:13:00,639 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
110 |
+
2024-01-07 09:13:00,639 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
111 |
+
2024-01-07 09:13:06,384 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
112 |
+
2024-01-07 09:13:06,462 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
113 |
+
2024-01-07 09:13:06,462 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
114 |
+
2024-01-07 09:13:24,618 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
115 |
+
2024-01-07 09:13:24,664 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
116 |
+
2024-01-07 09:13:24,665 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
117 |
+
2024-01-07 09:13:52,451 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
118 |
+
2024-01-07 09:13:52,481 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
119 |
+
2024-01-07 09:13:52,481 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
120 |
+
2024-01-07 09:14:01,980 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
121 |
+
2024-01-07 09:14:02,039 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
122 |
+
2024-01-07 09:14:02,040 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
123 |
+
2024-01-07 09:14:37,393 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
124 |
+
2024-01-07 09:14:42,402 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
125 |
+
2024-01-07 09:14:42,403 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
126 |
+
2024-01-07 09:16:27,093 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
127 |
+
2024-01-07 09:16:27,127 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
128 |
+
2024-01-07 09:16:27,127 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
129 |
+
2024-01-07 09:19:48,148 INFO MainThread:684 [wandb_init.py:_resume_backend():440] resuming backend
|
130 |
+
2024-01-07 09:19:48,193 INFO MainThread:684 [jupyter.py:save_ipynb():373] not saving jupyter notebook
|
131 |
+
2024-01-07 09:19:48,193 INFO MainThread:684 [wandb_init.py:_pause_backend():435] pausing backend
|
wandb/run-20240107_083215-enryt6zo/run-enryt6zo.wandb
ADDED
Binary file (33.2 kB). View file
|
|