run.sub 3.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. #!/bin/bash
  2. #SBATCH --exclusive
  3. #SBATCH --mem=0
  4. #SBATCH --overcommit
  5. # Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
  6. # Licensed under the Apache License, Version 2.0 (the "License");
  7. # you may not use this file except in compliance with the License.
  8. # You may obtain a copy of the License at
  9. #
  10. # http://www.apache.org/licenses/LICENSE-2.0
  11. #
  12. # Unless required by applicable law or agreed to in writing, software
  13. # distributed under the License is distributed on an "AS IS" BASIS,
  14. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. # See the License for the specific language governing permissions and
  16. # limitations under the License.
  17. set -eux
  18. # Docker image resulting from bash scripts/docker/build.sh
  19. readonly docker_image="gitlab-master.nvidia.com/dl/joc/electra_tf2:keras_mp_20.07_clean_up"
  20. # Location of dataset for phase 1 amd phase 2
  21. readonly datadir="/lustre/fsw/joc-luna/sharatht/electra_tf2_data/"
  22. readonly mounts=".:/workspace/electra,${datadir}:/workspace/electra/data"
  23. DGXSYSTEM=DGXA100
  24. cluster="selene"
  25. if [[ "${DGXSYSTEM}" == DGX2* ]]; then
  26. cluster='circe'
  27. fi
  28. if [[ "${DGXSYSTEM}" == DGXA100* ]]; then
  29. cluster='selene'
  30. fi
  31. BIND_CMD="./scripts/bind.sh --cpu=exclusive --ib=single --cluster=$cluster -- "
  32. BATCHSIZE=${BATCHSIZE:-16}
  33. PHASE=${PHASE:-1}
  34. LR=${LR:-3e-3}
  35. STEPS=${STEPS:-57450}
  36. WARMUP=${WARMUP:-3750}
  37. GRAD_ACCUM_STEPS=${GRAD_ACCUM_STEPS:-1}
  38. b1=${b1:-"0.878"}
  39. b2=${b2:-"0.974"}
  40. decay=${decay:-"0.5"}
  41. end_lr=${end_lr:-"0.0"}
  42. skip_adaptive=${skip_adaptive:-"yes"}
  43. model_count=${model_count:-1}
  44. skip_flag=""
  45. if [ "$skip_adaptive" = "yes" ] ; then
  46. skip_flag=" --skip_adaptive"
  47. fi
  48. ckpt_STEPS=$(awk -v a=$STEPS 'BEGIN { print a / 10}')
  49. if [ "$PHASE" = "1" ] ; then
  50. LAUNCH_CMD="$BIND_CMD python run_pretraining.py \
  51. --model_name='electra_keras_mp_base_lamb_48x8x${BATCHSIZE}x${GRAD_ACCUM_STEPS}_p1_skip_adaptive_${skip_adaptive}_LR_${LR}_WARMUP_${WARMUP}_STEPS_${STEPS}_b1_${b1}_b2_${b2}_decay_${decay}_end_lr_${end_lr}_${model_count}' \
  52. --pretrain_tfrecords='/workspace/electra/data/tfrecord_lower_case_1_seq_len_128_random_seed_12345/books_wiki_en_corpus/train/pretrain_data*' \
  53. --num_train_steps=$STEPS \
  54. --num_warmup_steps=$WARMUP \
  55. --disc_weight=50.0 \
  56. --generator_hidden_size=0.3333333 \
  57. --learning_rate=$LR \
  58. --train_batch_size=$BATCHSIZE \
  59. --max_seq_length=128 --log_freq=10 \
  60. --save_checkpoints_steps=$ckpt_STEPS \
  61. --optimizer='lamb' $skip_flag --opt_beta_1=$b1 --opt_beta_2=$b2 --lr_decay_power=$decay --end_lr=$end_lr $skip_flag --gradient_accumulation_steps=$GRAD_ACCUM_STEPS --amp --xla "
  62. else
  63. LAUNCH_CMD="$BIND_CMD python run_pretraining.py \
  64. --model_name='electra_keras_mp_base_lamb_48x8x176x1_p1_skip_adaptive_yes_LR_6e-3_WARMUP_2000_STEPS_10000_b1_0.878_b2_0.974_decay_0.5_end_lr_0.0_${model_count}' \
  65. --pretrain_tfrecords='/workspace/electra/data/tfrecord_lower_case_1_seq_len_512_random_seed_12345/books_wiki_en_corpus/train/pretrain_data*' \
  66. --num_train_steps=$STEPS \
  67. --num_warmup_steps=$WARMUP \
  68. --disc_weight=50.0 \
  69. --generator_hidden_size=0.3333333 \
  70. --learning_rate=$LR \
  71. --train_batch_size=$BATCHSIZE \
  72. --max_seq_length=512 --log_freq=10 \
  73. --restore_checkpoint --phase2 \
  74. --save_checkpoints_steps=$ckpt_STEPS \
  75. --optimizer='lamb' $skip_flag --opt_beta_1=$b1 --opt_beta_2=$b2 --lr_decay_power=$decay --end_lr=$end_lr $skip_flag --gradient_accumulation_steps=$GRAD_ACCUM_STEPS --amp --xla "
  76. fi;
  77. srun --mpi=pmi2 -l --container-image="${docker_image}" --container-mounts="${mounts}" bash -c "${LAUNCH_CMD}"