make sure the CI fails when pytest script fails (#1669)
* make sure the pytest script fails * make sure the defaults come through for tests * make sure tensorboard is loaded for test assertion
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
pytest --ignore=tests/e2e/ /workspace/axolotl/tests/
|
||||
pytest /workspace/axolotl/tests/e2e/patched/
|
||||
|
||||
@@ -80,8 +80,8 @@ class MultipackBatchSampler(BatchSampler):
|
||||
self.lengths = np.array(lengths, dtype=np.int32)
|
||||
self.batch_max_len = batch_max_len
|
||||
self.batch_size = batch_size
|
||||
self.group_size = group_size
|
||||
self.bin_size = bin_size
|
||||
self.group_size = group_size if group_size is not None else 100_000
|
||||
self.bin_size = bin_size if bin_size is not None else 200
|
||||
self.drop_last = drop_last
|
||||
|
||||
self._efficiency = None
|
||||
|
||||
@@ -62,6 +62,7 @@ class TestResumeLlama(unittest.TestCase):
|
||||
"save_steps": 10,
|
||||
"save_total_limit": 5,
|
||||
"max_steps": 40,
|
||||
"use_tensorboard": True,
|
||||
}
|
||||
)
|
||||
if is_torch_bf16_gpu_available():
|
||||
|
||||
Reference in New Issue
Block a user