"src/include/blockwise_tensor_slice_op.hip.hpp" did not exist on "569ad66e2a03789c4a1fa6659dc8296b4dfb868b"
profile_gemm_add_add_fastgelu.cpp 6.28 KB
Newer Older
Chao Liu's avatar
Chao Liu committed
1
2
3
// SPDX-License-Identifier: MIT
// Copyright (c) 2018-2022, Advanced Micro Devices, Inc. All rights reserved.

4
5
6
7
8
#include <iostream>
#include <numeric>
#include <initializer_list>
#include <cstdlib>

9
10
11
12
13
#include "profiler/profile_gemm_add_add_fastgelu_impl.hpp"
#include "profiler_operation_registry.hpp"

#define OP_NAME "gemm_add_add_fastgelu"
#define OP_DESC "GEMM+Add+Add+FastGeLU"
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35

int profile_gemm_add_add_fastgelu(int argc, char* argv[])
{
    enum struct MatrixLayout
    {
        MK_KN_MN_MN_MN, // 0
        MK_NK_MN_MN_MN, // 1
        KM_KN_MN_MN_MN, // 2
        KM_NK_MN_MN_MN, // 3
    };

    enum struct MatrixDataType
    {
        F32_F32_F32_F32_F32,      // 0
        F16_F16_F16_F16_F16,      // 1
        BF16_BF16_BF16_BF16_BF16, // 2
        INT8_INT8_INT8_INT8_INT8, // 3
    };

    if(argc != 16)
    {
        // clang-format off
36
        printf("arg1: tensor operation (" OP_NAME ": " OP_DESC ")\n");
37
38
39
40
41
42
43
44
45
        printf("arg2: data type (0: fp32; 1: fp16; 2: bf16; 3: int8)\n");
        printf("arg3: matrix layout (0: E[m, n] = FastGeLU(A[m, k] * B[k, n] + D0[m, n] + D1[m, n]);\n");
        printf("                     1: E[m, n] = FastGeLU(A[m, k] * B[n, k] + D0[m, n] + D1[m, n]);\n");
        printf("                     2: E[m, n] = FastGeLU(A[k, m] * B[k, n] + D0[m, n] + D1[m, n]);\n");
        printf("                     3: E[m, n] = FastGeLU(A[k, m] * B[n, k] + D0[m, n] + D1[m, n]))\n");
        printf("arg4: verification (0: no; 1: yes)\n");
        printf("arg5: initialization (0: no init; 1: integer value; 2: decimal value)\n");
        printf("arg6: print tensor value (0: no; 1: yes)\n");
        printf("arg7: time kernel (0=no, 1=yes)\n");
Chao Liu's avatar
Chao Liu committed
46
        printf("arg8 to 15: M, N, K, StrideA, StrideB, StrideD0, StrideD1, StrideE\n");
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
        // clang-format on
        exit(1);
    }

    const auto data_type       = static_cast<MatrixDataType>(std::stoi(argv[2]));
    const auto layout          = static_cast<MatrixLayout>(std::stoi(argv[3]));
    const bool do_verification = std::stoi(argv[4]);
    const int init_method      = std::stoi(argv[5]);
    const bool do_log          = std::stoi(argv[6]);
    const bool time_kernel     = std::stoi(argv[7]);

    const int M = std::stoi(argv[8]);
    const int N = std::stoi(argv[9]);
    const int K = std::stoi(argv[10]);

    const int StrideA  = std::stoi(argv[11]);
    const int StrideB  = std::stoi(argv[12]);
    const int StrideD0 = std::stoi(argv[13]);
    const int StrideD1 = std::stoi(argv[14]);
    const int StrideE  = std::stoi(argv[15]);

    using F16 = ck::half_t;
    using F32 = float;

    using Row = ck::tensor_layout::gemm::RowMajor;
    using Col = ck::tensor_layout::gemm::ColumnMajor;

    auto profile = [&](auto a_type,
                       auto b_type,
                       auto acc_type,
                       auto d0_type,
                       auto d1_type,
                       auto e_type,
                       auto a_layout,
                       auto b_layout,
82
83
84
                       auto d0_layout,
                       auto d1_layout,
                       auto e_layout) {
85
86
87
88
89
90
91
92
93
        using ADataType   = decltype(a_type);
        using BDataType   = decltype(b_type);
        using AccDataType = decltype(acc_type);
        using D0DataType  = decltype(d0_type);
        using D1DataType  = decltype(d1_type);
        using EDataType   = decltype(e_type);

        using ALayout  = decltype(a_layout);
        using BLayout  = decltype(b_layout);
94
95
96
        using D0Layout = decltype(d0_layout);
        using D1Layout = decltype(d1_layout);
        using ELayout  = decltype(e_layout);
97
98
99

        const int DefaultStrideA  = ck::is_same_v<ALayout, Row> ? K : M;
        const int DefaultStrideB  = ck::is_same_v<BLayout, Row> ? N : K;
100
101
102
        const int DefaultStrideD0 = ck::is_same_v<D0Layout, Row> ? N : M;
        const int DefaultStrideD1 = ck::is_same_v<D1Layout, Row> ? N : M;
        const int DefaultStrideE  = ck::is_same_v<ELayout, Row> ? N : M;
103

Chao Liu's avatar
Chao Liu committed
104
105
106
107
108
109
110
111
        bool pass = ck::profiler::profile_gemm_add_add_fastgelu_impl<ADataType,
                                                                     BDataType,
                                                                     AccDataType,
                                                                     D0DataType,
                                                                     D1DataType,
                                                                     EDataType,
                                                                     ALayout,
                                                                     BLayout,
112
113
114
                                                                     D0Layout,
                                                                     D1Layout,
                                                                     ELayout>(
115
116
117
118
119
120
121
122
123
124
125
126
            do_verification,
            init_method,
            do_log,
            time_kernel,
            M,
            N,
            K,
            (StrideA < 0) ? DefaultStrideA : StrideA,
            (StrideB < 0) ? DefaultStrideB : StrideB,
            (StrideD0 < 0) ? DefaultStrideD0 : StrideD0,
            (StrideD1 < 0) ? DefaultStrideD1 : StrideD1,
            (StrideE < 0) ? DefaultStrideE : StrideE);
Chao Liu's avatar
Chao Liu committed
127
128

        return pass ? 0 : 1;
129
130
131
132
    };

    if(data_type == MatrixDataType::F16_F16_F16_F16_F16 && layout == MatrixLayout::MK_KN_MN_MN_MN)
    {
133
        return profile(F16{}, F16{}, F32{}, F16{}, F16{}, F16{}, Row{}, Row{}, Row{}, Row{}, Row{});
134
135
136
137
    }
    else if(data_type == MatrixDataType::F16_F16_F16_F16_F16 &&
            layout == MatrixLayout::MK_NK_MN_MN_MN)
    {
138
        return profile(F16{}, F16{}, F32{}, F16{}, F16{}, F16{}, Row{}, Col{}, Row{}, Row{}, Row{});
139
140
141
142
    }
    else if(data_type == MatrixDataType::F16_F16_F16_F16_F16 &&
            layout == MatrixLayout::KM_KN_MN_MN_MN)
    {
143
        return profile(F16{}, F16{}, F32{}, F16{}, F16{}, F16{}, Col{}, Row{}, Row{}, Row{}, Row{});
144
145
146
147
    }
    else if(data_type == MatrixDataType::F16_F16_F16_F16_F16 &&
            layout == MatrixLayout::KM_NK_MN_MN_MN)
    {
148
        return profile(F16{}, F16{}, F32{}, F16{}, F16{}, F16{}, Col{}, Col{}, Row{}, Row{}, Row{});
149
150
151
152
153
    }
    else
    {
        std::cout << "this data_type & layout is not implemented" << std::endl;

Chao Liu's avatar
Chao Liu committed
154
        return 1;
155
156
    }
}
157
158

REGISTER_PROFILER_OPERATION(OP_NAME, OP_DESC, profile_gemm_add_add_fastgelu);