model_glm_2b.sh 459 B

12345678910111213
  1. MODEL_TYPE="glm-2b"
  2. CHECKPOINT_PATH="/zhangpai21/checkpoints/glm-2b-sat"
  3. MP_SIZE=1
  4. MODEL_ARGS="--model-parallel-size ${MP_SIZE} \
  5. --vocab 50304 \
  6. --num-layers 36 \
  7. --hidden-size 2048 \
  8. --num-attention-heads 32 \
  9. --max-sequence-length 1025 \
  10. --tokenizer-type glm_GPT2BPETokenizer \
  11. --tokenizer-model-type gpt2 \
  12. --task-mask \
  13. --load ${CHECKPOINT_PATH}"