feat: add lfm2 family and latest moe model (#3208)
* feat: add lfm2 family and latest moe model * fix: use ml-cross-entropy for lfm2 examples
This commit is contained in:
@@ -29,5 +29,5 @@ UV_PREFIX = "uv " if USE_UV else ""
|
||||
|
||||
print(
|
||||
UNINSTALL_PREFIX
|
||||
+ f'{UV_PREFIX}pip install "cut-cross-entropy[transformers] @ git+https://github.com/axolotl-ai-cloud/ml-cross-entropy.git@147ea28"'
|
||||
+ f'{UV_PREFIX}pip install "cut-cross-entropy[transformers] @ git+https://github.com/axolotl-ai-cloud/ml-cross-entropy.git@49f3308"'
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user