Skip to content

Commit f0dee79

Browse files
committed
style
1 parent c2b684d commit f0dee79

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

src/transformers/models/diffllama/modeling_diffllama.py

+1
Original file line numberDiff line numberDiff line change
@@ -595,6 +595,7 @@ class DiffLlamaPreTrainedModel(PreTrainedModel):
595595
_skip_keys_device_placement = ["past_key_values"]
596596
_supports_flash_attn_2 = True
597597
_supports_sdpa = True
598+
_supports_flex_attn = False
598599
_supports_cache_class = True
599600
_supports_quantized_cache = True
600601
_supports_static_cache = True

src/transformers/models/diffllama/modular_diffllama.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -431,8 +431,7 @@ def __init__(self, config: DiffLlamaConfig, layer_idx: int):
431431

432432

433433
class DiffLlamaPreTrainedModel(LlamaPreTrainedModel):
434-
pass
435-
434+
_supports_flex_attn = False
436435

437436
class DiffLlamaModel(LlamaModel):
438437
pass

0 commit comments

Comments
 (0)