Full-stack app with FastAPI backend (SSH/paramiko, pipeline streaming, GPU stats, xterm.js terminal, Ollama model manager) and React + Tailwind frontend (8 panels: Connection, Documents, Pipeline, QA Pairs, Training, Terminal, Models, Config). Docker Compose included. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
30 lines
486 B
YAML
30 lines
486 B
YAML
version: "3.9"
|
|
|
|
services:
|
|
|
|
backend:
|
|
build: ./backend
|
|
container_name: llm-trainer-backend
|
|
restart: unless-stopped
|
|
ports:
|
|
- "8080:8080"
|
|
environment:
|
|
- OLLAMA_URL=http://192.168.2.47:11434
|
|
networks:
|
|
- llm-net
|
|
|
|
frontend:
|
|
build: ./frontend
|
|
container_name: llm-trainer-frontend
|
|
restart: unless-stopped
|
|
ports:
|
|
- "3000:80"
|
|
depends_on:
|
|
- backend
|
|
networks:
|
|
- llm-net
|
|
|
|
networks:
|
|
llm-net:
|
|
driver: bridge
|