-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
llmjp0
committed
May 30, 2024
1 parent
3046232
commit 7effe39
Showing
3 changed files
with
72 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
#!/bin/bash | ||
|
||
# python virtualenv | ||
cd /model/llmjp0/nii-geniac-megatron/Megatron-LM | ||
source .env/bin/activate | ||
|
||
# distributed settings | ||
TENSOR_PARALLEL_SIZE=1 | ||
PIPELINE_PARALLEL_SIZE=1 | ||
|
||
# model config | ||
MEGATRON_CHECKPOINT_DIR=/model/checkpoints_1.3b_mcore/CC_v2_code20K_en40K_ja60K_ver2.2/ | ||
|
||
HF_CHECKPOINT_DIR=/model/checkpoints_1.3b_hf/CC_v2_code20K_en40K_ja60K_ver2.2/ | ||
MEGATRON_CHECKPOINT_DIR=/model/checkpoints_1.3b_mcore/CC_v2_code20K_en40K_ja60K_ver2.2/ | ||
|
||
mkdir -p ${MEGATRON_CHECKPOINT_DIR} | ||
|
||
# tokenizer config | ||
TOKENIZER_MODEL=/model/llmjp0/llm-jp-tokenizer/models/ver2.2/code20K_en40K_ja60K.ver2.2.model | ||
|
||
# convert | ||
python tools/checkpoint/convert.py \ | ||
--model-type GPT \ | ||
--loader llama2_hf \ | ||
--saver mcore \ | ||
--target-tensor-parallel-size ${TENSOR_PARALLEL_SIZE} \ | ||
--target-pipeline-parallel-size ${PIPELINE_PARALLEL_SIZE} \ | ||
--load-dir ${HF_CHECKPOINT_DIR} \ | ||
--save-dir ${MEGATRON_CHECKPOINT_DIR} \ | ||
--tokenizer-model ${TOKENIZER_MODEL} \ | ||
--bf16 \ | ||
--saver-transformer-impl "transformer_engine" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/bin/bash | ||
set -e | ||
# python virtualenv | ||
source .env/bin/activate | ||
|
||
python scripts/abci/megatron_to_hf/check.py \ | ||
--base-hf-model-path /model/checkpoints_1.3b_hf/CC_v2_code20K_en40K_ja60K_ver2.2/ \ | ||
--converted-hf-model-path /model/checkpoints_1.3b_hf/megatron_to_hf/CC_v2_code20K_en40K_ja60K_ver2.2/ \ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
#!/bin/bash | ||
|
||
# python virtualenv | ||
cd /model/llmjp0/nii-geniac-megatron/Megatron-LM | ||
source .env/bin/activate | ||
|
||
# distributed settings | ||
TENSOR_PARALLEL_SIZE=1 | ||
PIPELINE_PARALLEL_SIZE=1 | ||
|
||
# model config | ||
MEGATRON_CHECKPOINT_DIR=/model/checkpoints_1.3b_mcore/CC_v2_code20K_en40K_ja60K_ver2.2/ | ||
HF_CHECKPOINT_DIR=/model/checkpoints_1.3b_hf/megatron_to_hf/CC_v2_code20K_en40K_ja60K_ver2.2/ | ||
|
||
mkdir -p ${MEGATRON_CHECKPOINT_DIR} | ||
|
||
# tokenizer config | ||
TOKENIZER_MODEL_DIR=/model/checkpoints_1.3b_hf/megatron_to_hf/CC_v2_code20K_en40K_ja60K_ver2.2/ | ||
|
||
# convert | ||
python tools/checkpoint/convert.py \ | ||
--model-type GPT \ | ||
--loader mcore \ | ||
--saver llama2_hf \ | ||
--load-dir ${MEGATRON_CHECKPOINT_DIR} \ | ||
--save-dir ${HF_CHECKPOINT_DIR} \ | ||
--true-vocab-size 96867 \ | ||
--hf-tokenizer-path ${TOKENIZER_MODEL_DIR} \ | ||
--save-dtype bfloat16 \ | ||
--loader-transformer-impl "transformer_engine" \ | ||
--megatron-path /model/llmjp0/nii-geniac-megatron/Megatron-LM |