quantization-results.sh 1.2 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344
  1. #!/usr/bin/env bash
  2. #
  3. # Copyright (c) 2016-present, Facebook, Inc.
  4. # All rights reserved.
  5. #
  6. # This source code is licensed under the BSD-style license found in the
  7. # LICENSE file in the root directory of this source tree. An additional grant
  8. # of patent rights can be found in the PATENTS file in the same directory.
  9. #
  10. # This script applies quantization to the models from Table 1 in:
  11. # Bag of Tricks for Efficient Text Classification, arXiv 1607.01759, 2016
  12. set -e
  13. DATASET=(
  14. ag_news
  15. sogou_news
  16. dbpedia
  17. yelp_review_polarity
  18. yelp_review_full
  19. yahoo_answers
  20. amazon_review_full
  21. amazon_review_polarity
  22. )
  23. # These learning rates were chosen by validation on a subset of the training set.
  24. LR=( 0.25 0.5 0.5 0.1 0.1 0.1 0.05 0.05 )
  25. RESULTDIR=result
  26. DATADIR=data
  27. echo 'Warning! Make sure you run the classification-results.sh script before this one'
  28. echo 'Otherwise you can expect the commands in this script to fail'
  29. for i in {0..7}
  30. do
  31. echo "Working on dataset ${DATASET[i]}"
  32. ./fasttext quantize -input "${DATADIR}/${DATASET[i]}.train" \
  33. -output "${RESULTDIR}/${DATASET[i]}" -lr "${LR[i]}" \
  34. -thread 4 -qnorm -retrain -epoch 5 -cutoff 100000 > /dev/null
  35. ./fasttext test "${RESULTDIR}/${DATASET[i]}.ftz" \
  36. "${DATADIR}/${DATASET[i]}.test"
  37. done