Merge pull request #146 from FarisHijazi/main
added docker-compose file
This commit is contained in:
@@ -55,6 +55,12 @@ accelerate launch scripts/finetune.py examples/lora-openllama-3b/config.yml \
|
||||
- `winglian/axolotl:dev`: dev branch
|
||||
- `winglian/axolotl-runpod:main`: for runpod
|
||||
|
||||
Or run on the current files for development:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
- Conda/Pip venv
|
||||
1. Install python **3.9**
|
||||
|
||||
|
||||
20
docker-compose.yaml
Normal file
20
docker-compose.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
# version: '3.8'
|
||||
services:
|
||||
axolotl:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/Dockerfile
|
||||
volumes:
|
||||
- .:/workspace/axolotl
|
||||
- ~/.cache/huggingface/:/root/.cache/huggingface/
|
||||
# set environment variables
|
||||
environment:
|
||||
- WANDB_API_KEY=${WANDB_API_KEY}
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
# count: 1
|
||||
capabilities: [gpu]
|
||||
command: tail -f /dev/null
|
||||
Reference in New Issue
Block a user