fix config for parity with previous change

5159d00a86\#diff-65b4693504c4e8ffac76c7f2c90913faee381f802cf64e7f49c995a2134ed3b3R164
This commit is contained in:
Wing Lian
2023-05-11 08:13:09 -04:00
parent 4cc7ed8898
commit 165da584b3
6 changed files with 8 additions and 9 deletions

View File

@@ -34,7 +34,7 @@ tf32: false
early_stopping_patience:
resume_from_checkpoint:
local_rank:
special_tokens:
tokens:
pad_token: "[PAD]"
bos_token: "<s>"
eos_token: "</s>"

View File

@@ -51,7 +51,7 @@ deepspeed:
weight_decay: 0.0001
fsdp:
fsdp_config:
special_tokens:
tokens:
pad_token: "[PAD]"
bos_token: "<s>"
eos_token: "</s>"

View File

@@ -49,7 +49,7 @@ deepspeed:
weight_decay: 0.01
fsdp:
fsdp_config:
#special_tokens:
#tokens:
# pad_token: "[PAD]"
# bos_token: "<s>"
# eos_token: "</s>"

View File

@@ -55,7 +55,7 @@ deepspeed:
weight_decay: 0.0001
fsdp:
fsdp_config:
special_tokens:
tokens:
pad_token: "[PAD]"
bos_token: "<s>"
eos_token: "</s>"

View File

@@ -1,7 +1,6 @@
base_model: mosaicml/mpt-7b
base_model_config: mosaicml/mpt-7b
model_type: AutoModelForCausalLM
tokenizer_type: GPTNeoXTokenizer
tokenizer_type: AutoTokenizer
trust_remote_code: true # required for mpt as their model class is not merged into transformers yet
load_in_8bit: false
datasets:
@@ -25,7 +24,7 @@ wandb_watch:
wandb_run_id:
wandb_log_model: checkpoint
output_dir: ./mpt-alpaca-7b
batch_size: 4
batch_size: 1
micro_batch_size: 1
num_epochs: 3
optimizer: adamw_bnb_8bit
@@ -52,7 +51,7 @@ deepspeed:
weight_decay: 0.0001
fsdp:
fsdp_config:
special_tokens:
tokens:
pad_token: "<|padding|>"
bos_token: "<|endoftext|>"
eos_token: "<|endoftext|>"

View File

@@ -52,7 +52,7 @@ deepspeed:
weight_decay: 0.0001
fsdp:
fsdp_config:
special_tokens:
tokens:
pad_token: "<|padding|>"
bos_token: "<|endoftext|>"
eos_token: "<|endoftext|>"