bjoernp commited on
Commit
6d91817
1 Parent(s): 5d8825a

Update configuration_bitllama.py

Browse files
Files changed (1) hide show
  1. configuration_bitllama.py +1 -1
configuration_bitllama.py CHANGED
@@ -52,7 +52,7 @@ from transformers.utils import (
52
  )
53
  from transformers.utils.import_utils import is_torch_fx_available
54
 
55
- from .configuration_llama import LlamaConfig
56
 
57
 
58
  if is_flash_attn_2_available():
 
52
  )
53
  from transformers.utils.import_utils import is_torch_fx_available
54
 
55
+ from .configuration_bitllama import LlamaConfig
56
 
57
 
58
  if is_flash_attn_2_available():