Unverified Commit f796aaa8 authored by PanZezhong1725's avatar PanZezhong1725 Committed by GitHub
Browse files

Merge pull request #569 from pengcheng888/issue/565

issue/565 - 调整一些的算子到infinicore.nn.functional.py文件中
parents 581eddb1 7fa37247
......@@ -9,6 +9,9 @@ python/infinicore/lib/*.so
# Vscode
.vscode/
# Pycharm
.idea/
# Python
__pycache__/
*.egg-info/
......
......@@ -27,13 +27,10 @@ from infinicore.dtype import (
from infinicore.ntops import use_ntops
from infinicore.ops.add import add
from infinicore.ops.attention import attention
from infinicore.ops.causal_softmax import causal_softmax
from infinicore.ops.matmul import matmul
from infinicore.ops.rearrange import rearrange
from infinicore.ops.rms_norm import rms_norm
from infinicore.ops.silu import silu
from infinicore.ops.swiglu import swiglu
from infinicore.tensor import (
Tensor,
empty,
from_blob,
ones,
......@@ -42,6 +39,8 @@ from infinicore.tensor import (
zeros,
)
from infinicore import nn as nn
__all__ = [
# Classes.
"device",
......@@ -74,12 +73,8 @@ __all__ = [
# Operations.
"add",
"attention",
"causal_softmax",
"matmul",
"rearrange",
"rms_norm",
"silu",
"swiglu",
"empty",
"from_blob",
"ones",
......
from infinicore.nn import (
functional as functional,
)
import infinicore
from infinicore.lib import _infinicore
__all__ = ["causal_softmax", "rms_norm", "silu", "swiglu"]
def causal_softmax(
input: infinicore.Tensor,
out=None
) -> infinicore.Tensor:
r"""Apply a causal softmax function.
"""
if out is None:
return infinicore.Tensor(_infinicore.causal_softmax(input._underlying))
_infinicore.causal_softmax_(out._underlying, input._underlying)
return out
def rms_norm(
input: infinicore.Tensor,
normalized_shape: list[int],
weight: infinicore.Tensor,
eps: float = 1e-5,
out=None
) -> infinicore.Tensor:
r"""Apply Root Mean Square Layer Normalization.
"""
assert normalized_shape == weight.shape, "normalized_shape does not match weight.shape."
if out is None:
return infinicore.Tensor(
_infinicore.rms_norm(input._underlying, weight._underlying, eps)
)
_infinicore.rms_norm_(out._underlying, input._underlying, weight._underlying, eps)
return out
def silu(input: infinicore.Tensor, inplace: bool = False, out=None) -> infinicore.Tensor:
r"""Apply the Sigmoid Linear Unit (SiLU) function, element-wise.
"""
if inplace:
_infinicore.silu_(input._underlying, input._underlying)
return input
if out is None:
return infinicore.Tensor(_infinicore.silu(input._underlying))
_infinicore.silu_(out._underlying, input._underlying)
return out
def swiglu(input: infinicore.Tensor, other: infinicore.Tensor, out=None):
r"""Apply the Swish-Gated Linear Unit (SwiGLU) function, element-wise.
"""
if out is None:
return infinicore.Tensor(_infinicore.swiglu(input._underlying, other._underlying))
_infinicore.swiglu_(out._underlying, input._underlying, other._underlying)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def causal_softmax(input, *, out=None):
if out is None:
return Tensor(_infinicore.causal_softmax(input._underlying))
_infinicore.causal_softmax_(out._underlying, input._underlying)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def rms_norm(input, weight, epsilon=1e-5, *, out=None):
if out is None:
return Tensor(
_infinicore.rms_norm(input._underlying, weight._underlying, epsilon)
)
_infinicore.rms_norm_(
out._underlying, input._underlying, weight._underlying, epsilon
)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def silu(input, *, out=None):
if out is None:
return Tensor(_infinicore.silu(input._underlying))
_infinicore.silu_(out._underlying, input._underlying)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def swiglu(input, other, *, out=None):
if out is None:
return Tensor(_infinicore.swiglu(input._underlying, other._underlying))
_infinicore.swiglu_(out._underlying, input._underlying, other._underlying)
return out
......@@ -132,7 +132,9 @@ class OpTest(BaseOperatorTest):
return self.torch_causal_softmax(*args, **kwargs)
def infinicore_operator(self, *args, **kwargs):
return infinicore.causal_softmax(*args, **kwargs)
import infinicore.nn.functional as F
return F.causal_softmax(*args, **kwargs)
def main():
......
......@@ -149,7 +149,9 @@ class OpTest(BaseOperatorTest):
def infinicore_operator(self, x, weight, epsilon=_EPSILON, out=None, **kwargs):
"""InfiniCore RMSNorm implementation"""
return infinicore.rms_norm(x, weight, epsilon, out=out)
import infinicore.nn.functional as F
return F.rms_norm(x, weight.shape, weight, epsilon, out=out)
def main():
......
......@@ -130,7 +130,9 @@ class OpTest(BaseOperatorTest):
def infinicore_operator(self, input, out=None, **kwargs):
"""InfiniCore SiLU implementation"""
return infinicore.silu(input, out=out)
import infinicore.nn.functional as F
return F.silu(input, out=out)
def main():
......
......@@ -146,7 +146,9 @@ class OpTest(BaseOperatorTest):
def infinicore_operator(self, a, b, out=None, **kwargs):
"""InfiniCore SwiGLU implementation"""
return infinicore.swiglu(a, b, out=out)
import infinicore.nn.functional as F
return F.swiglu(a, b, out=out)
def main():
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment