Unverified Commit 0c2121f9 authored by Mohamed Abu El-Nasr's avatar Mohamed Abu El-Nasr Committed by GitHub
Browse files

Fix building alibi tensor when num_heads is not a power of 2 (#28380)

* Fix building alibi tensor when num_heads is not a power of 2

* Remove print function
parent 53cffeb3
...@@ -70,10 +70,10 @@ def build_mpt_alibi_tensor(num_heads, sequence_length, alibi_bias_max=8, device= ...@@ -70,10 +70,10 @@ def build_mpt_alibi_tensor(num_heads, sequence_length, alibi_bias_max=8, device=
base = base * (alibi_bias_max / num_heads_power_of_2) base = base * (alibi_bias_max / num_heads_power_of_2)
slopes = 1.0 / torch.pow(2, base) slopes = 1.0 / torch.pow(2, base)
slopes = slopes.view(1, num_heads, 1, 1) slopes = slopes.view(1, num_heads_power_of_2, 1, 1)
if num_heads_power_of_2 != num_heads: if num_heads_power_of_2 != num_heads:
slopes = torch.concat([slopes[1::2], slopes[::2]])[:num_heads] slopes = torch.concat([slopes[:, 1::2, ...], slopes[:, ::2, ...]], dim=1)[:, :num_heads, ...]
alibi = alibi * slopes alibi = alibi * slopes
return alibi.squeeze(0) return alibi.squeeze(0)
......
...@@ -53,7 +53,7 @@ class MptModelTester: ...@@ -53,7 +53,7 @@ class MptModelTester:
use_labels=True, use_labels=True,
use_mc_token_ids=True, use_mc_token_ids=True,
vocab_size=99, vocab_size=99,
hidden_size=32, hidden_size=48,
num_hidden_layers=2, num_hidden_layers=2,
num_attention_heads=4, num_attention_heads=4,
intermediate_size=37, intermediate_size=37,
...@@ -385,6 +385,12 @@ class MptModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, ...@@ -385,6 +385,12 @@ class MptModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin,
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpt_model(*config_and_inputs) self.model_tester.create_and_check_mpt_model(*config_and_inputs)
def test_mpt_model_alibi_tensor(self):
# test creation of alibi tensor when num heads is not a power of two
config_and_inputs = self.model_tester.prepare_config_and_inputs()
config_and_inputs[0].n_heads = 6
self.model_tester.create_and_check_mpt_model(*config_and_inputs)
def test_mpt_model_past(self): def test_mpt_model_past(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs() config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_mpt_model_past(*config_and_inputs) self.model_tester.create_and_check_mpt_model_past(*config_and_inputs)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment