oweller2 commited on
Commit
f7dca47
1 Parent(s): bd97ebe

attention fix

Browse files
Files changed (1) hide show
  1. attention.py +1 -3
attention.py CHANGED
@@ -20,12 +20,10 @@ from typing import Optional
20
  import importlib.metadata
21
  import logging
22
  import math
23
-
24
- import .bert_padding
25
  from .configuration_bert import FlexBertConfig, maybe_add_padding
26
  from .normalization import get_norm_layer
27
  from .initialization import ModuleType, init_weights
28
- import src.utils # noqa: F401
29
 
30
  IMPL_USE_FLASH3 = False
31
  IMPL_USE_FLASH2 = False
 
20
  import importlib.metadata
21
  import logging
22
  import math
23
+ from .bert_padding import pad_input, unpad_input_only, index_first_axis
 
24
  from .configuration_bert import FlexBertConfig, maybe_add_padding
25
  from .normalization import get_norm_layer
26
  from .initialization import ModuleType, init_weights
 
27
 
28
  IMPL_USE_FLASH3 = False
29
  IMPL_USE_FLASH2 = False