new llama-2 default settings (#370)

* new default settings

* fix whitespace

* rm max packed sequence length

---------

Co-authored-by: Mads Henrichsen <mads@BrbartiendeMads.lan>
This commit is contained in:
mhenrichsen
2023-08-14 10:39:09 +02:00
committed by GitHub
parent 919246fbc1
commit fdffef5940
2 changed files with 7 additions and 8 deletions

View File

@@ -15,7 +15,7 @@ val_set_size: 0.01
output_dir: ./lora-out
sequence_len: 4096
max_packed_sequence_len: 4096
sample_packing: true
adapter: lora
lora_model_dir:
@@ -49,8 +49,8 @@ early_stopping_patience:
resume_from_checkpoint:
local_rank:
logging_steps: 1
xformers_attention: true
flash_attention:
xformers_attention:
flash_attention: true
warmup_steps: 10
eval_steps: 20
@@ -64,4 +64,3 @@ special_tokens:
bos_token: "<s>"
eos_token: "</s>"
unk_token: "<unk>"
pad_token: "<pad>"

View File

@@ -18,7 +18,8 @@ adapter: qlora
lora_model_dir:
sequence_len: 4096
max_packed_sequence_len: 4096
sample_packing: true
lora_r: 32
lora_alpha: 16
lora_dropout: 0.05
@@ -50,8 +51,8 @@ early_stopping_patience:
resume_from_checkpoint:
local_rank:
logging_steps: 1
xformers_attention: true
flash_attention:
xformers_attention:
flash_attention: true
warmup_steps: 10
eval_steps: 20
@@ -65,4 +66,3 @@ special_tokens:
bos_token: "<s>"
eos_token: "</s>"
unk_token: "<unk>"
pad_token: "<pad>"