data_download.sh 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. #!/usr/bin/env bash
  2. # Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. DATA_DIR=${1:-/workspace/bert/data}
  15. # Download vocab files from pretrained model
  16. cd vocab && python3 download_models.py && rm *.zip && rm ./*/*.ckpt.*
  17. # Download SQUAD
  18. cd $DATA_DIR/squad && . squad_download.sh
  19. # Download SWAG
  20. git clone https://github.com/rowanz/swagaf.git $DATA_DIR/swag
  21. # Download GLUE
  22. cd $DATA_DIR/glue && . download_mrpc.sh
  23. # WIKI Download
  24. cd $DATA_DIR/wikipedia_corpus && . download_wikipedia.sh
  25. # Bookcorpus Download
  26. cd $DATA_DIR/bookcorpus && . download_bookcorpus.sh
  27. cd $DATA_DIR
  28. # Create HDF5 files for WIKI
  29. bash create_datasets_from_start.sh wikipedia_corpus ./wikipedia_corpus/wikipedia_corpus.txt \
  30. && rm -r ./wikipedia_corpus/final_* \
  31. # Create HDF5 files for Bookcorpus
  32. bash create_datasets_from_start.sh bookcorpus ./bookcorpus/bookcorpus.txt \
  33. && rm -r ./bookcorpus/final_* \
  34. # Create HDF5 files for inter sequence-pair mixed Wikipedia and Bookcorpus
  35. bash merge_datasets_after_creation.sh merged_wiki+books wikipedia_corpus/hdf5_shards,bookcorpus/hdf5_shards 1024