Unverified Commit bfe21c3d authored by Reese Wang's avatar Reese Wang Committed by GitHub
Browse files

[JAX] Adapt latest JAX/PAX image (#744)



* value_and_grad requires same shape for input and gradients
Signed-off-by: default avatarReese Wang <rewang@nvidia.com>

* Use high precision layernorm
Signed-off-by: default avatarReese Wang <rewang@nvidia.com>

* Remove local_device_ids as it caused unexpected behaviors
Signed-off-by: default avatarReese Wang <rewang@nvidia.com>

* Revert "Remove local_device_ids as it caused unexpected behaviors"

This reverts commit c54349b2ce1e96ae696cf0d74f5210e55002cf72.
Signed-off-by: default avatarReese Wang <rewang@nvidia.com>

---------
Signed-off-by: default avatarReese Wang <rewang@nvidia.com>
parent d541d208
...@@ -485,7 +485,8 @@ class TestGeLuFP8(TestGeLu): ...@@ -485,7 +485,8 @@ class TestGeLuFP8(TestGeLu):
primitive.defvjp(primitive_fwd, primitive_bwd) primitive.defvjp(primitive_fwd, primitive_bwd)
func = value_and_grad(lambda x, y, z, w: jnp.mean(primitive(x, y, z, w)), (0, 1, 2, 3)) func = value_and_grad(lambda x, y, z, w: jnp.mean(primitive(x, y, z, w)), (0, 1, 2, 3))
return func(inputs, no_use, no_use, no_use) return func(inputs, jnp.transpose(inputs, (2, 0, 1)),
jnp.zeros(inputs.shape[-1], dtype=inputs.dtype), no_use)
@pytest.mark.skipif(not is_fp8_supported, reason=reason) @pytest.mark.skipif(not is_fp8_supported, reason=reason)
@pytest.mark.parametrize('shape', [(32, 2, 64), (64, 2, 256)]) @pytest.mark.parametrize('shape', [(32, 2, 64), (64, 2, 256)])
...@@ -582,7 +583,7 @@ class TestGatedGeLuFP8(TestGatedGeLu): ...@@ -582,7 +583,7 @@ class TestGatedGeLuFP8(TestGatedGeLu):
primitive.defvjp(primitive_fwd, primitive_bwd) primitive.defvjp(primitive_fwd, primitive_bwd)
func = value_and_grad(lambda x, y, z: jnp.mean(primitive(x, y, z)), (0, 1, 2)) func = value_and_grad(lambda x, y, z: jnp.mean(primitive(x, y, z)), (0, 1, 2))
return func(inputs, no_use, no_use) return func(inputs, jnp.transpose(inputs, (1, 2, 0)), no_use)
@pytest.mark.skipif(not is_fp8_supported, reason=reason) @pytest.mark.skipif(not is_fp8_supported, reason=reason)
@pytest.mark.parametrize('shape', [(32, 2, 64), (64, 2, 256)]) @pytest.mark.parametrize('shape', [(32, 2, 64), (64, 2, 256)])
......
...@@ -731,19 +731,18 @@ class LayerNorm(nn.Module): ...@@ -731,19 +731,18 @@ class LayerNorm(nn.Module):
axes=('embed',)) axes=('embed',))
bias = jnp.asarray(bias, self.dtype) bias = jnp.asarray(bias, self.dtype)
y = jnp.asarray(y, self.dtype)
if not self.zero_centered_gamma: if not self.zero_centered_gamma:
z = y * scale + bias z = y * scale + bias
else: else:
z = y * (scale + 1) + bias z = y * (scale + 1.) + bias
else: else:
assert self.layernorm_type == 'rmsnorm' assert self.layernorm_type == 'rmsnorm'
assert not self.zero_centered_gamma assert not self.zero_centered_gamma
mean2 = jnp.mean(lax.square(x), axis=-1, keepdims=True) mean2 = jnp.mean(lax.square(x), axis=-1, keepdims=True)
y = jnp.asarray(x * lax.rsqrt(mean2 + self.epsilon), self.dtype) y = x * lax.rsqrt(mean2 + self.epsilon)
z = y * scale z = y * scale
return z return jnp.asarray(z, self.dtype)
class RelativePositionBiases(nn.Module): class RelativePositionBiases(nn.Module):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment