Commit 7fa37247 authored by pengcheng888's avatar pengcheng888
Browse files

issue/565 - 调整python的算子位置到infinicore.nn.functional.py文件中

parent 39ec8f0e
...@@ -9,6 +9,9 @@ python/infinicore/lib/*.so ...@@ -9,6 +9,9 @@ python/infinicore/lib/*.so
# Vscode # Vscode
.vscode/ .vscode/
# Pycharm
.idea/
# Python # Python
__pycache__/ __pycache__/
*.egg-info/ *.egg-info/
......
...@@ -27,13 +27,10 @@ from infinicore.dtype import ( ...@@ -27,13 +27,10 @@ from infinicore.dtype import (
from infinicore.ntops import use_ntops from infinicore.ntops import use_ntops
from infinicore.ops.add import add from infinicore.ops.add import add
from infinicore.ops.attention import attention from infinicore.ops.attention import attention
from infinicore.ops.causal_softmax import causal_softmax
from infinicore.ops.matmul import matmul from infinicore.ops.matmul import matmul
from infinicore.ops.rearrange import rearrange from infinicore.ops.rearrange import rearrange
from infinicore.ops.rms_norm import rms_norm
from infinicore.ops.silu import silu
from infinicore.ops.swiglu import swiglu
from infinicore.tensor import ( from infinicore.tensor import (
Tensor,
empty, empty,
from_blob, from_blob,
ones, ones,
...@@ -42,6 +39,8 @@ from infinicore.tensor import ( ...@@ -42,6 +39,8 @@ from infinicore.tensor import (
zeros, zeros,
) )
from infinicore import nn as nn
__all__ = [ __all__ = [
# Classes. # Classes.
"device", "device",
...@@ -74,12 +73,8 @@ __all__ = [ ...@@ -74,12 +73,8 @@ __all__ = [
# Operations. # Operations.
"add", "add",
"attention", "attention",
"causal_softmax",
"matmul", "matmul",
"rearrange", "rearrange",
"rms_norm",
"silu",
"swiglu",
"empty", "empty",
"from_blob", "from_blob",
"ones", "ones",
......
from infinicore.nn import (
functional as functional,
)
import infinicore
from infinicore.lib import _infinicore
__all__ = ["causal_softmax", "rms_norm", "silu", "swiglu"]
def causal_softmax(
input: infinicore.Tensor,
out=None
) -> infinicore.Tensor:
r"""Apply a causal softmax function.
"""
if out is None:
return infinicore.Tensor(_infinicore.causal_softmax(input._underlying))
_infinicore.causal_softmax_(out._underlying, input._underlying)
return out
def rms_norm(
input: infinicore.Tensor,
normalized_shape: list[int],
weight: infinicore.Tensor,
eps: float = 1e-5,
out=None
) -> infinicore.Tensor:
r"""Apply Root Mean Square Layer Normalization.
"""
assert normalized_shape == weight.shape, "normalized_shape does not match weight.shape."
if out is None:
return infinicore.Tensor(
_infinicore.rms_norm(input._underlying, weight._underlying, eps)
)
_infinicore.rms_norm_(out._underlying, input._underlying, weight._underlying, eps)
return out
def silu(input: infinicore.Tensor, inplace: bool = False, out=None) -> infinicore.Tensor:
r"""Apply the Sigmoid Linear Unit (SiLU) function, element-wise.
"""
if inplace:
_infinicore.silu_(input._underlying, input._underlying)
return input
if out is None:
return infinicore.Tensor(_infinicore.silu(input._underlying))
_infinicore.silu_(out._underlying, input._underlying)
return out
def swiglu(input: infinicore.Tensor, other: infinicore.Tensor, out=None):
r"""Apply the Swish-Gated Linear Unit (SwiGLU) function, element-wise.
"""
if out is None:
return infinicore.Tensor(_infinicore.swiglu(input._underlying, other._underlying))
_infinicore.swiglu_(out._underlying, input._underlying, other._underlying)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def causal_softmax(input, *, out=None):
if out is None:
return Tensor(_infinicore.causal_softmax(input._underlying))
_infinicore.causal_softmax_(out._underlying, input._underlying)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def rms_norm(input, weight, epsilon=1e-5, *, out=None):
if out is None:
return Tensor(
_infinicore.rms_norm(input._underlying, weight._underlying, epsilon)
)
_infinicore.rms_norm_(
out._underlying, input._underlying, weight._underlying, epsilon
)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def silu(input, *, out=None):
if out is None:
return Tensor(_infinicore.silu(input._underlying))
_infinicore.silu_(out._underlying, input._underlying)
return out
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor
def swiglu(input, other, *, out=None):
if out is None:
return Tensor(_infinicore.swiglu(input._underlying, other._underlying))
_infinicore.swiglu_(out._underlying, input._underlying, other._underlying)
return out
...@@ -132,7 +132,9 @@ class OpTest(BaseOperatorTest): ...@@ -132,7 +132,9 @@ class OpTest(BaseOperatorTest):
return self.torch_causal_softmax(*args, **kwargs) return self.torch_causal_softmax(*args, **kwargs)
def infinicore_operator(self, *args, **kwargs): def infinicore_operator(self, *args, **kwargs):
return infinicore.causal_softmax(*args, **kwargs) import infinicore.nn.functional as F
return F.causal_softmax(*args, **kwargs)
def main(): def main():
......
...@@ -149,7 +149,9 @@ class OpTest(BaseOperatorTest): ...@@ -149,7 +149,9 @@ class OpTest(BaseOperatorTest):
def infinicore_operator(self, x, weight, epsilon=_EPSILON, out=None, **kwargs): def infinicore_operator(self, x, weight, epsilon=_EPSILON, out=None, **kwargs):
"""InfiniCore RMSNorm implementation""" """InfiniCore RMSNorm implementation"""
return infinicore.rms_norm(x, weight, epsilon, out=out) import infinicore.nn.functional as F
return F.rms_norm(x, weight.shape, weight, epsilon, out=out)
def main(): def main():
......
...@@ -130,7 +130,9 @@ class OpTest(BaseOperatorTest): ...@@ -130,7 +130,9 @@ class OpTest(BaseOperatorTest):
def infinicore_operator(self, input, out=None, **kwargs): def infinicore_operator(self, input, out=None, **kwargs):
"""InfiniCore SiLU implementation""" """InfiniCore SiLU implementation"""
return infinicore.silu(input, out=out) import infinicore.nn.functional as F
return F.silu(input, out=out)
def main(): def main():
......
...@@ -146,7 +146,9 @@ class OpTest(BaseOperatorTest): ...@@ -146,7 +146,9 @@ class OpTest(BaseOperatorTest):
def infinicore_operator(self, a, b, out=None, **kwargs): def infinicore_operator(self, a, b, out=None, **kwargs):
"""InfiniCore SwiGLU implementation""" """InfiniCore SwiGLU implementation"""
return infinicore.swiglu(a, b, out=out) import infinicore.nn.functional as F
return F.swiglu(a, b, out=out)
def main(): def main():
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment