Update configuration_mpt.py
Browse files- configuration_mpt.py +2 -2
configuration_mpt.py
CHANGED
@@ -116,8 +116,8 @@ class MPTConfig(PretrainedConfig):
|
|
116 |
raise ValueError(f"Unknown attn_impl={self.attn_config['attn_impl']}")
|
117 |
if self.attn_config['prefix_lm'] and self.attn_config['attn_impl'] not in ['torch', 'triton']:
|
118 |
raise NotImplementedError('prefix_lm only implemented with torch and triton attention.')
|
119 |
-
if self.attn_config['alibi'] and self.attn_config['attn_impl'] not in ['torch', 'triton']:
|
120 |
-
|
121 |
if self.attn_config['attn_uses_sequence_id'] and self.attn_config['attn_impl'] not in ['torch', 'triton']:
|
122 |
raise NotImplementedError('attn_uses_sequence_id only implemented with torch and triton attention.')
|
123 |
if self.embedding_fraction > 1 or self.embedding_fraction <= 0:
|
|
|
116 |
raise ValueError(f"Unknown attn_impl={self.attn_config['attn_impl']}")
|
117 |
if self.attn_config['prefix_lm'] and self.attn_config['attn_impl'] not in ['torch', 'triton']:
|
118 |
raise NotImplementedError('prefix_lm only implemented with torch and triton attention.')
|
119 |
+
#if self.attn_config['alibi'] and self.attn_config['attn_impl'] not in ['torch', 'triton']:
|
120 |
+
# raise NotImplementedError('alibi only implemented with torch and triton attention.')
|
121 |
if self.attn_config['attn_uses_sequence_id'] and self.attn_config['attn_impl'] not in ['torch', 'triton']:
|
122 |
raise NotImplementedError('attn_uses_sequence_id only implemented with torch and triton attention.')
|
123 |
if self.embedding_fraction > 1 or self.embedding_fraction <= 0:
|