sino commited on
Commit
6c624ee
1 Parent(s): b4d1dc8

Update configuration_maelm.py

Browse files
Files changed (1) hide show
  1. configuration_maelm.py +2 -2
configuration_maelm.py CHANGED
@@ -111,13 +111,13 @@ class MAELMConfig(PretrainedConfig):
111
  per_device_train_batch_size=12,
112
  learning_rate=0.00005,
113
  lm_lr_ratio=0.1,
114
- tokenizer_name='Llama-2-7b-hf',
115
  resume_from_checkpoint=None,
116
  resume_from_pth='epoch_4-step_8639-allstep_60000.pth',
117
  backbone={'name': 'MAEViT', 'arch': 'b', 'patch_size': 16, 'mask_ratio': 0.0, 'img_size': [80, 2992], \
118
  'ckpt': 'epoch_20.pth'},
119
  neck={'name': 'LMDecoder', 'patch_size': 16, 'img_size': [80, 2992], 'in_chans': 3, 'embed_dim': 768, \
120
- 'decoder_embed_dim': 4544, 'freeze_decoder': True, 'decoder_type': 'Llama-2-7b-hf'},
121
  wandb={'proj': 'ATRena_cap', 'expname': 'cap_lynx_apmPT_mccaigc1wFT'},
122
  **kwargs,
123
  ):
 
111
  per_device_train_batch_size=12,
112
  learning_rate=0.00005,
113
  lm_lr_ratio=0.1,
114
+ tokenizer_name='meta-llama/Llama-2-7b-hf',
115
  resume_from_checkpoint=None,
116
  resume_from_pth='epoch_4-step_8639-allstep_60000.pth',
117
  backbone={'name': 'MAEViT', 'arch': 'b', 'patch_size': 16, 'mask_ratio': 0.0, 'img_size': [80, 2992], \
118
  'ckpt': 'epoch_20.pth'},
119
  neck={'name': 'LMDecoder', 'patch_size': 16, 'img_size': [80, 2992], 'in_chans': 3, 'embed_dim': 768, \
120
+ 'decoder_embed_dim': 4544, 'freeze_decoder': True, 'decoder_type': 'meta-llama/Llama-2-7b-hf'},
121
  wandb={'proj': 'ATRena_cap', 'expname': 'cap_lynx_apmPT_mccaigc1wFT'},
122
  **kwargs,
123
  ):