diff --git a/src/axolotl/prompters.py b/src/axolotl/prompters.py index 3ae0a0bd4..c79c3afa7 100644 --- a/src/axolotl/prompters.py +++ b/src/axolotl/prompters.py @@ -11,6 +11,7 @@ class PromptStyle(Enum): instruct = "instruct" chat = "chat" + class AlpacaPrompter: system_prompt = "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n" system_no_input_prompt = "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n" @@ -50,6 +51,10 @@ class AlpacaPrompter: return output.split(self.response_split)[1].strip() +class UnpromptedPrompter(AlpacaPrompter): + system_prompt = "" + system_no_input_prompt = "" + class JeopardyPrompter(AlpacaPrompter): prompt_input = "Below is a Jeopardy clue paired with input providing the category of the clue. Write a concise response that best answers tbe clue given the category.\n\n### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:\n" diff --git a/src/axolotl/utils/data.py b/src/axolotl/utils/data.py index 2ceaa4d99..8d9525fa5 100644 --- a/src/axolotl/utils/data.py +++ b/src/axolotl/utils/data.py @@ -98,6 +98,11 @@ def load_tokenized_prepared_datasets(tokenizer, cfg, default_dataset_prepared_pa ds = load_dataset("json", data_files=fp, streaming=False, split=None) if not ds: raise Exception("unhandled dataset load") + # support for using a subset of the data + if d.shards: + ds = ds.shuffle(seed=42)["train"].shard( + num_shards=cfg.shards, index=0 + ) d_type = d.type d_type_split = d_type.split(":") d_base_type = d_type_split[0] diff --git a/src/axolotl/utils/models.py b/src/axolotl/utils/models.py index 1bcc4b0bc..571f1c6dd 100644 --- a/src/axolotl/utils/models.py +++ b/src/axolotl/utils/models.py @@ -134,6 +134,7 @@ def load_model( model = LlamaForCausalLM.from_pretrained( base_model, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, + load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, torch_dtype=torch_dtype, device_map=cfg.device_map, **model_kwargs, @@ -168,6 +169,7 @@ def load_model( model = getattr(transformers, model_type).from_pretrained( base_model, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, + load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, torch_dtype=torch_dtype, device_map=cfg.device_map, trust_remote_code=True if cfg.trust_remote_code is True else False, @@ -182,6 +184,7 @@ def load_model( base_model, config=config, load_in_8bit=cfg.load_in_8bit and cfg.adapter is not None, + load_in_4bit=cfg.load_in_4bit and cfg.adapter is not None, torch_dtype=torch_dtype, device_map=cfg.device_map, trust_remote_code=True if cfg.trust_remote_code is True else False,