From 969b3e79c2ec795f4f4b8866cce9c84a8d8b7fd5 Mon Sep 17 00:00:00 2001 From: Daniel King Date: Mon, 9 Oct 2023 13:23:49 -0700 Subject: [PATCH] precommit --- llmfoundry/models/layers/attention.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/llmfoundry/models/layers/attention.py b/llmfoundry/models/layers/attention.py index 020bd03bb7..ee1e009af4 100644 --- a/llmfoundry/models/layers/attention.py +++ b/llmfoundry/models/layers/attention.py @@ -16,6 +16,7 @@ from llmfoundry.models.layers.fc import FC_CLASS_REGISTRY from llmfoundry.models.layers.norm import NORM_CLASS_REGISTRY + def raise_if_flash_attn_v2(): flash_attn_version = None # This only needs to be in a try except so that huggingface does not try to import it @@ -28,6 +29,7 @@ def raise_if_flash_attn_v2(): except: pass + def _reset_is_causal(num_query_tokens: int, num_key_tokens: int, original_is_causal: bool) -> bool: # disable causal when it is not needed