NLU-Speech-MASSIVE-finetune / submitted_eval_job_06-10.16-33.sh
Beomseok-LEE's picture
Upload folder using huggingface_hub
8048f3a verified
raw
history blame
1.92 kB
#!/bin/bash
#SBATCH --job-name=eval_NLU_NLU.mt5-base.task_type-1.fine_tune.gpu_a100-40g+.node-1x1.bsz-64.epochs-22.metric-ema.metric_lang-all
#SBATCH -n 1
#SBATCH -N 1
#SBATCH -p gpu
#SBATCH --gres=gpu:1
#SBATCH --cpus-per-task=8
#SBATCH --constraint=gpu_v100&gpu_32g
#SBATCH --mem=32G
#SBATCH --mail-type=ALL
#SBATCH [email protected]
#SBATCH --output=/beegfs/scratch/user/blee/project_3/models/NLU.mt5-base.task_type-1.fine_tune.gpu_a100-40g+.node-1x1.bsz-64.epochs-22.metric-ema.metric_lang-all/checkpoint-30407/sbatch-%j-06-10.16-33.log
source /home/blee/environments/py39-hugging-face/bin/activate
export http_proxy=http://proxy.int.europe.naverlabs.com:3128
export https_proxy=http://proxy.int.europe.naverlabs.com:3128
export no_proxy=int.europe.naverlabs.com
export HF_HOME=/beegfs/scratch/user/blee/hugging-face/models
export HF_DATASETS_DOWNLOADED_DATASETS_PATH=/beegfs/scratch/user/blee/hugging-face/downloaded
export HF_DATASETS_EXTRACTED_DATASETS_PATH=/beegfs/scratch/user/blee/hugging-face/extracted
export PYTHONPATH=:/home/blee/code-repo/transformers-slu
python /home/blee/code-repo/transformers-slu/nle/examples/nlu/run_nlu_mT5.py \
--do_predict \
--predict_with_generate \
--use_fast_tokenizer \
--trust_remote_code \
--test_dataset_name /home/blee/code-repo/transformers-slu/nle/dataset/massive_slu \
--test_dataset_config_name multilingual-test \
--model_name_or_path /beegfs/scratch/user/blee/project_3/models/NLU.mt5-base.task_type-1.fine_tune.gpu_a100-40g+.node-1x1.bsz-64.epochs-22.metric-ema.metric_lang-all/checkpoint-30407 \
--output_dir /beegfs/scratch/user/blee/project_3/models/NLU.mt5-base.task_type-1.fine_tune.gpu_a100-40g+.node-1x1.bsz-64.epochs-22.metric-ema.metric_lang-all/checkpoint-30407/eval/NLU \
--preprocessing_num_workers 1 \
--length_column_name input_length \
--per_device_eval_batch_size 32 \
--generation_num_beams 2