Update src/axolotl/monkeypatch/llama_attn_hijack_xformers.py
Co-authored-by: NanoCode012 <kevinvong@rocketmail.com>
This commit is contained in:
@@ -18,7 +18,6 @@ except ImportError:
|
||||
|
||||
def hijack_llama_attention():
|
||||
transformers.models.llama.modeling_llama.LlamaAttention.forward = xformers_forward
|
||||
logging.info("Replaced attention with xformers_attention")
|
||||
|
||||
|
||||
def hijack_llama_sdp_attention():
|
||||
|
||||
Reference in New Issue
Block a user