yuyan-10b / finetune_wsc_distributed.sh
Shawn001's picture
Upload 8 files
7a8763c
raw
history blame
No virus
2.21 kB
#!/bin/bash
WORLD_SIZE=8
DISTRIBUTED_ARGS="--nproc_per_node $WORLD_SIZE \
--nnodes 1 \
--node_rank 0 \
--master_addr localhost \
--master_port 6000"
TASK="WSC"
TRAIN_DATA="clue_data/wsc/train.json"
VALID_DATA="clue_data/wsc/dev.json"
TEST_DATA="clue_data/wsc/test.json"
PRETRAINED_CHECKPOINT="./yuyan-10b"
VOCAB_FILE=bert-vocab.txt
for lr in 3e-6 5e-6 1e-5; do
for bs in 8 16 32; do
for ep in 10 20 30; do
ct=`date +"%m%d%H%M%S"`
OUTPUTS_PATH="outputs/${TASK}/yuyan_bs_${bs}_lr_${lr}_ep_${ep}_${ct}"
if [ ! -d ${OUTPUTS_PATH} ];then
mkdir -p ${OUTPUTS_PATH}
else
echo "dir exist, not mkdir"
fi
python -m torch.distributed.launch $DISTRIBUTED_ARGS ./tasks/main.py \
--task $TASK \
--seed 1238 \
--pretrained-checkpoint $PRETRAINED_CHECKPOINT \
--train-data $TRAIN_DATA \
--valid-data $VALID_DATA \
--test-data $TEST_DATA \
--tokenizer-type BertWordPieceLowerCase \
--vocab-file $VOCAB_FILE \
--epochs $ep \
--tensor-model-parallel-size 8 \
--num-layers 48 \
--hidden-size 4096 \
--num-attention-heads 64 \
--micro-batch-size $bs \
--lr $lr \
--lr-decay-style linear \
--lr-warmup-fraction 0.1 \
--seq-length 128 \
--max-position-embeddings 512 \
--log-interval 5 \
--eval-interval 50 \
--eval-iters 25 \
--weight-decay 1.0e-1 \
--res-path ${OUTPUTS_PATH} \
--fp16 | tee ${OUTPUTS_PATH}/job.log
# --activations-checkpoint-method uniform \
done
done
done