model_glm_base.sh 471 B

123456789101112
  1. MODEL_TYPE="glm-large-generation"
  2. CHECKPOINT_PATH="/zhangpai21/checkpoints/glm-base-en-blank"
  3. MP_SIZE=1
  4. MODEL_ARGS="--model-parallel-size ${MP_SIZE} \
  5. --vocab 30592 \
  6. --num-layers 12 \
  7. --hidden-size 768 \
  8. --num-attention-heads 12 \
  9. --max-sequence-length 513 \
  10. --tokenizer-type glm_BertWordPieceTokenizer \
  11. --tokenizer-model-type bert-base-uncased \
  12. --load ${CHECKPOINT_PATH}"