improve GPU logging to break out pytorch cache and system mem

This commit is contained in:
Aman Karmani
2023-08-13 01:50:32 +00:00
committed by Aman Gupta Karmani
parent e029ab34ea
commit 7b55fe6419
6 changed files with 32 additions and 14 deletions

View File

@@ -18,7 +18,6 @@ from optimum.bettertransformer import BetterTransformer
from transformers import GenerationConfig, TextStreamer
from axolotl.logging_config import configure_logging
from axolotl.utils.bench import log_gpu_memory_usage
from axolotl.utils.config import normalize_config, validate_config
from axolotl.utils.data import load_prepare_datasets, load_pretraining_dataset
from axolotl.utils.dict import DictDefault
@@ -226,8 +225,6 @@ def train(
LOG.info("Finished preparing dataset. Exiting...")
return
log_gpu_memory_usage(LOG, "baseline", cfg.device)
# Load the model and tokenizer
LOG.info("loading model and (optionally) peft_config...")
model, peft_config = load_model(cfg, tokenizer)