model_glm_10b.sh 462 B

12345678910111213
  1. MODEL_TYPE="glm-10b"
  2. CHECKPOINT_PATH="/zhangpai21/checkpoints/glm-10b-sat"
  3. MP_SIZE=1
  4. MODEL_ARGS="--model-parallel-size ${MP_SIZE} \
  5. --vocab 50304 \
  6. --num-layers 48 \
  7. --hidden-size 4096 \
  8. --num-attention-heads 64 \
  9. --max-sequence-length 1025 \
  10. --tokenizer-type glm_GPT2BPETokenizer \
  11. --tokenizer-model-type gpt2 \
  12. --task-mask \
  13. --load ${CHECKPOINT_PATH}"