fix(readme): update cuda instructions during preprocess (#2114) [skip ci]

This commit is contained in:
NanoCode012
2024-12-03 20:58:03 +07:00
committed by bursteratom
parent 4078f37076
commit 1969fa3bf0

View File

@@ -147,7 +147,7 @@ pip3 install -e '.[flash-attn,deepspeed]'
### Usage
```bash
# preprocess datasets - optional but recommended
CUDA_VISIBLE_DEVICES="" python -m axolotl.cli.preprocess examples/openllama-3b/lora.yml
CUDA_VISIBLE_DEVICES="0" python -m axolotl.cli.preprocess examples/openllama-3b/lora.yml
# finetune lora
accelerate launch -m axolotl.cli.train examples/openllama-3b/lora.yml