Unverified Commit e5ec9764 authored by Xuchun Shang's avatar Xuchun Shang Committed by GitHub
Browse files

[Bug fix][PP] fix deadlock with tie_word_embeddings (#12362)


Signed-off-by: default avatarXuchun Shang <xuchun.shang@gmail.com>
parent 621dfb88
...@@ -462,7 +462,7 @@ class Qwen2ForCausalLM(nn.Module): ...@@ -462,7 +462,7 @@ class Qwen2ForCausalLM(nn.Module):
self.pp_group.send( self.pp_group.send(
self.model.embed_tokens.weight, dst=self.pp_group.last_rank self.model.embed_tokens.weight, dst=self.pp_group.last_rank
) )
else: elif self.pp_group.is_last_rank:
emb_token_weight = self.pp_group.recv( emb_token_weight = self.pp_group.recv(
size=(config.vocab_size, config.hidden_size), size=(config.vocab_size, config.hidden_size),
dtype=next(self.model.parameters()).dtype, dtype=next(self.model.parameters()).dtype,
......
...@@ -361,7 +361,7 @@ class Qwen3ForCausalLM(nn.Module): ...@@ -361,7 +361,7 @@ class Qwen3ForCausalLM(nn.Module):
self.pp_group.send( self.pp_group.send(
self.model.embed_tokens.weight, dst=self.pp_group.last_rank self.model.embed_tokens.weight, dst=self.pp_group.last_rank
) )
else: elif self.pp_group.is_last_rank:
emb_token_weight = self.pp_group.recv( emb_token_weight = self.pp_group.recv(
size=(config.vocab_size, config.hidden_size), size=(config.vocab_size, config.hidden_size),
dtype=next(self.model.parameters()).dtype, dtype=next(self.model.parameters()).dtype,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment