model_glm_roberta_large.sh 480 B

12345678910111213
  1. MODEL_TYPE="glm-roberta-large"
  2. CHECKPOINT_PATH="/zhangpai21/checkpoints/glm-large-en-blank"
  3. MP_SIZE=1
  4. MODEL_ARGS="--model-parallel-size ${MP_SIZE} \
  5. --vocab 50304 \
  6. --num-layers 24 \
  7. --hidden-size 1024 \
  8. --num-attention-heads 16 \
  9. --max-sequence-length 513 \
  10. --tokenizer-type glm_GPT2BPETokenizer \
  11. --tokenizer-model-type roberta \
  12. --task-mask \
  13. --load ${CHECKPOINT_PATH}"