Commit 1c8b0fc2 authored by charlie's avatar charlie
Browse files

Merge branch 'dyn_ref_multibroadcast' of...

Merge branch 'dyn_ref_multibroadcast' of github.com:ROCmSoftwarePlatform/AMDMIGraphX into dyn_unsqueeze
parents 8ae761df 2fa68ded
......@@ -7,7 +7,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Cancel Previous Runs
uses: styfle/cancel-workflow-action@0.6.0
uses: styfle/cancel-workflow-action@0.11.0
with:
access_token: ${{ github.token }}
tidy:
......@@ -15,9 +15,19 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: |
sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
du . --max-depth=1 -h
ls -la
cd /usr/local
du . --max-depth=1 -h
ls -la
cd /usr/local/lib
echo $(pwd)
du . --max-depth=1 -h
ls -la
- uses: actions/checkout@v3
# In this step, this action saves a list of existing images,
# the cache is created without them in the post run.
......@@ -34,7 +44,7 @@ jobs:
message("::set-output name=timestamp::${current_date}")
- name: Cache files for tidy
uses: pat-s/always-upload-cache@v2.1.3
uses: pat-s/always-upload-cache@v3.0.11
with:
path: tidy-cache
key: tidy-cache-${{ steps.cache_timestamp.outputs.timestamp }}
......@@ -65,8 +75,8 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3
# In this step, this action saves a list of existing images,
# the cache is created without them in the post run.
......@@ -110,8 +120,8 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3
# In this step, this action saves a list of existing images,
# the cache is created without them in the post run.
......@@ -146,10 +156,10 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install pyflakes
......@@ -167,10 +177,10 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: run License Check
......@@ -198,16 +208,16 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: 3.7
- name: Cache dependencies
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
uses: actions/cache@v2
uses: actions/cache@v3
with:
# This path is specific to Ubuntu
path: ${{ github.workspace }}/cget
......@@ -294,16 +304,16 @@ jobs:
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: 3.7
- name: Cache dependencies
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
uses: actions/cache@v2
uses: actions/cache@v3
with:
# This path is specific to Ubuntu
path: ${{ github.workspace }}/cget
......
......@@ -68,13 +68,9 @@ std::vector<std::size_t> compute_broadcasted_lens(std::vector<std::size_t> s0,
std::vector<shape::dynamic_dimension> compute_broadcasted_dyn_dims(shape s0, shape s1)
{
assert(s0.dynamic() or s1.dynamic());
// change both shapes to dynamic_dimension representation
if(not s0.dynamic())
s0 = s0.to_dynamic();
if(not s1.dynamic())
s1 = s1.to_dynamic();
s0 = s0.to_dynamic();
s1 = s1.to_dynamic();
if(s0.ndim() > s1.ndim())
{
std::swap(s0, s1);
......
......@@ -142,7 +142,6 @@ struct broadcast
{
return args[0].reshape(dyn_out.computed_shape);
}
std::ptrdiff_t output_alias(const std::vector<shape>&) const { return 0; }
};
......
......@@ -64,7 +64,7 @@ std::size_t compile_miopen::compile(operation& op, instruction_ref ins, bool for
{
op.from_value({{"int8_x4_format", format}});
auto v = op.compile(*ctx, ins->get_shape(), to_shapes(ins->inputs()));
return v.get("workspace", 0);
return v.get<std::size_t>("workspace", 0);
}
void compile_miopen::apply(module& m) const
......
......@@ -21,7 +21,7 @@
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <rocblas.h>
#include <rocblas/rocblas.h>
#include <migraphx/gpu/gemm_impl.hpp>
#include <migraphx/reduce_dims.hpp>
#include <migraphx/permutation.hpp>
......
......@@ -25,7 +25,7 @@
#define MIGRAPHX_GUARD_MIGRAPHLIB_ROCBLAS_HPP
#include <migraphx/manage_ptr.hpp>
#include <migraphx/config.hpp>
#include <rocblas.h>
#include <rocblas/rocblas.h>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
......
......@@ -454,6 +454,23 @@ def binary_dyn_brcst_add_test():
return ([node], [arg0, arg1], [arg_out])
@onnx_test
def binary_dyn_brcst_attr_error_test():
arg0 = helper.make_tensor_value_info('0', TensorProto.FLOAT16, [4, 5])
arg1 = helper.make_tensor_value_info('1', TensorProto.FLOAT,
[None, 3, 4, 5])
arg_out = helper.make_tensor_value_info('out', TensorProto.FLOAT,
[None, 3, 4, 5])
node = onnx.helper.make_node('Add',
inputs=['0', '1'],
outputs=['out'],
broadcast=1,
axis=1)
return ([node], [arg0, arg1], [arg_out])
@onnx_test
def binary_dyn_brcst_mul_test():
arg0 = helper.make_tensor_value_info('0', TensorProto.FLOAT,
......
......@@ -559,6 +559,14 @@ TEST_CASE(binary_dyn_brcst_add_test)
EXPECT(p == prog);
}
TEST_CASE(binary_dyn_brcst_attr_error_test)
{
migraphx::onnx_options options;
options.default_dyn_dim_value = {1, 4, 0};
EXPECT(test::throws(
[&] { migraphx::parse_onnx("binary_dyn_brcst_attr_error_test.onnx", options); }));
}
TEST_CASE(binary_dyn_brcst_mul_test)
{
migraphx::program p;
......
......@@ -81,6 +81,14 @@ void throws_shape(const migraphx::shape&, Ts...)
"An expected shape should not be passed to throws_shape function");
}
TEST_CASE(binary_dyn_static_error)
{
migraphx::shape a_shape{migraphx::shape::float_type, {1, 4, 4}};
std::vector<migraphx::shape::dynamic_dimension> b{{1, 1, 0}, {4, 4, 4}, {4, 4, 0}};
migraphx::shape b_shape{migraphx::shape::float_type, b};
throws_shape(migraphx::make_op("add"), a_shape, b_shape);
}
TEST_CASE(broadcast)
{
{
......@@ -1213,6 +1221,21 @@ TEST_CASE(multibroadcast_2in_static_dyn1)
a_shape);
}
TEST_CASE(multibroadcast_2in_static_dyn2)
{
migraphx::shape a_shape{migraphx::shape::float_type, {1, 6}};
std::vector<migraphx::shape::dynamic_dimension> b{{8, 8, 0}, {6, 6, 0}};
migraphx::shape b_shape{migraphx::shape::float_type, b};
expect_shape(migraphx::shape{migraphx::shape::float_type, {{8, 8, 0}, {6, 6, 0}}},
migraphx::make_op("multibroadcast", {{"out_dyn_dims", migraphx::to_value(b)}}),
a_shape,
b_shape);
expect_shape(migraphx::shape{migraphx::shape::float_type, {{8, 8, 0}, {6, 6, 0}}},
migraphx::make_op("multibroadcast", {{"out_dyn_dims", migraphx::to_value(b)}}),
b_shape,
a_shape);
}
TEST_CASE(multibroadcast_2in_static_dyn_error0)
{
// doesn't match on first dimension
......@@ -1259,6 +1282,22 @@ TEST_CASE(multibroadcast_2in_dyn_dyn0)
a_shape);
}
TEST_CASE(multibroadcast_2in_dyn_dyn1)
{
std::vector<migraphx::shape::dynamic_dimension> a{{1, 4, 0}, {2, 4, 2}, {2, 4, 0}};
migraphx::shape a_shape{migraphx::shape::float_type, a};
std::vector<migraphx::shape::dynamic_dimension> b{{2, 4, 2}, {2, 4, 0}};
migraphx::shape b_shape{migraphx::shape::float_type, b};
expect_shape(migraphx::shape{migraphx::shape::float_type, {{1, 4, 0}, {2, 4, 2}, {2, 4, 0}}},
migraphx::make_op("multibroadcast", {{"out_dyn_dims", migraphx::to_value(a)}}),
a_shape,
b_shape);
expect_shape(migraphx::shape{migraphx::shape::float_type, {{1, 4, 0}, {2, 4, 2}, {2, 4, 0}}},
migraphx::make_op("multibroadcast", {{"out_dyn_dims", migraphx::to_value(a)}}),
b_shape,
a_shape);
}
TEST_CASE(multibroadcast_2in_dyn_dyn_error0)
{
// max doesn't match on second dimension of a
......
......@@ -38,6 +38,27 @@ TEST_CASE(test_shape_default)
EXPECT(s.elements() == 0);
EXPECT(s.bytes() == 0);
}
TEST_CASE(test_dyn_4arg_constructor)
{
migraphx::shape s{migraphx::shape::float_type,
{
1,
4,
4,
},
{
4,
4,
4,
},
{0, 0, 0}};
std::vector<migraphx::shape::dynamic_dimension> expected_dyn_dims = {
{1, 4, 0}, {4, 4, 0}, {4, 4, 0}};
EXPECT(s.dynamic());
EXPECT(s.dyn_dims() == expected_dyn_dims);
}
TEST_CASE(test_shape_assign)
{
migraphx::shape s1{migraphx::shape::float_type, {100, 32, 8, 8}};
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment