Skip to content

Commit

Permalink
src/common: verbose: update dispatch checks in reference implementations
Browse files Browse the repository at this point in the history
  • Loading branch information
avmanerikar committed Oct 3, 2024
1 parent a9fe108 commit 8393e5c
Show file tree
Hide file tree
Showing 11 changed files with 232 additions and 124 deletions.
95 changes: 60 additions & 35 deletions src/cpu/matmul/ref_matmul.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,25 +49,36 @@ struct ref_matmul_t : public primitive_t {
const auto bia_type = weights_md(1)->data_type;
const auto dst_type = dst_md(0)->data_type;

bool ok = is_dense_format_kind()
&& utils::one_of(
src_type, f32, bf16, f16, f8_e5m2, f8_e4m3, f4_e2m1)
&& utils::one_of(wei_type, f32, bf16, f16, f8_e5m2, f8_e4m3,
f4_e2m1, u8, s8, u4, s4)
&& utils::one_of(
dst_type, f32, bf16, f16, f8_e5m2, f8_e4m3, f4_e2m1)
&& (src_type == wei_type
|| utils::one_of(wei_type, u8, s8, u4, s4))
/* int8 weights decompression support */
&& IMPLICATION(utils::one_of(wei_type, u8, s8),
attr_.mayiconvert(wei_type, src_type))
&& IMPLICATION(src_type == f32, dst_type == f32)
&& IMPLICATION(src_type == bf16,
utils::one_of(dst_type, f32, bf16))
&& IMPLICATION(
src_type == f16, utils::one_of(dst_type, f32, f16))
// TODO: any implication on allowed dst data type for fp8?
&& IMPLICATION(with_bias(),
VDISPATCH_MATMUL(
is_dense_format_kind(), VERBOSE_UNSUPPORTED_SPARSE_CFG);
VDISPATCH_MATMUL(utils::one_of(src_type, f32, bf16, f16, f8_e5m2,
f8_e4m3, f4_e2m1),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(utils::one_of(wei_type, f32, bf16, f16, f8_e5m2,
f8_e4m3, f4_e2m1, u8, s8, u4, s4),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(utils::one_of(dst_type, f32, bf16, f16, f8_e5m2,
f8_e4m3, f4_e2m1),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(
(src_type == wei_type
|| utils::one_of(wei_type, u8, s8, u4, s4)),
VERBOSE_UNSUPPORTED_DT);
/* int8 weights decompression support */
VDISPATCH_MATMUL(IMPLICATION(utils::one_of(wei_type, u8, s8),
attr_.mayiconvert(wei_type, src_type)),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(IMPLICATION(src_type == f32, dst_type == f32),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(IMPLICATION(src_type == bf16,
utils::one_of(dst_type, f32, bf16)),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(IMPLICATION(src_type == f16,
utils::one_of(dst_type, f32, f16)),
VERBOSE_UNSUPPORTED_DT);
// TODO: any implication on allowed dst data type for fp8?
VDISPATCH_MATMUL(
IMPLICATION(with_bias(),
utils::one_of(
bia_type, f32, bf16, f16, f8_e5m2, f8_e4m3)
&& IMPLICATION(
Expand All @@ -78,31 +89,45 @@ struct ref_matmul_t : public primitive_t {
utils::one_of(bia_type, f32, bf16))
// TODO: any implication on allowed bias
// data type for fp8?
)
&& platform::has_data_type_support(src_type)
&& attr()->has_default_values(
smask_t::scales_runtime_data_type
),
VERBOSE_UNSUPPORTED_BIAS_CFG);
VDISPATCH_MATMUL(platform::has_data_type_support(src_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(
attr()->has_default_values(smask_t::scales_runtime_data_type
| smask_t::scales_runtime_groups
| smask_t::zero_points_runtime_data_type
| smask_t::zero_points_runtime_groups
| smask_t::post_ops | smask_t::sum_dt
| smask_t::fpmath_mode | smask_t::dropout
| smask_t::rounding_mode,
dst_type)
&& attr_.post_ops_.check_sum_consistency(dst_type,
/* is_int8 */ false)
&& ref_post_ops_t::primitive_kind_ok(attr()->post_ops_)
&& attr_scales_ok() && set_default_formats()
&& zero_points_ok()
&& attr_.set_default_formats(dst_md(0)) == status::success
&& IMPLICATION(!attr_.dropout_.has_default_values(),
dst_type),
VERBOSE_UNSUPPORTED_ATTR);
VDISPATCH_MATMUL(attr_.post_ops_.check_sum_consistency(dst_type,
/* is_int8 */ false),
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_MATMUL(
ref_post_ops_t::primitive_kind_ok(attr()->post_ops_),
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_MATMUL(attr_scales_ok(), VERBOSE_UNSUPPORTED_SCALES_CFG);
VDISPATCH_MATMUL(set_default_formats(), VERBOSE_UNSUPPORTED_TAG);
VDISPATCH_MATMUL(zero_points_ok(), VERBOSE_UNSUPPORTED_ZP_CFG);
VDISPATCH_MATMUL(
attr_.set_default_formats(dst_md(0)) == status::success,
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_MATMUL(
IMPLICATION(!attr_.dropout_.has_default_values(),
utils::one_of(
attr_.dropout_.dropout_desc_.data_type, u8,
s8))
&& IMPLICATION(!attr_.dropout_.has_default_values(),
s8)),
VERBOSE_UNSUPPORTED_ATTR);
VDISPATCH_MATMUL(
IMPLICATION(!attr_.dropout_.has_default_values(),
memory_desc_wrapper(dst_md(0)).similar_to(
attr_.dropout_.dropout_desc_, true, false));
return ok ? status::success : status::unimplemented;
attr_.dropout_.dropout_desc_, true, false)),
VERBOSE_UNSUPPORTED_ATTR);

return status::success;
}

private:
Expand Down
47 changes: 31 additions & 16 deletions src/cpu/matmul/ref_matmul_int8.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,26 +48,41 @@ struct ref_matmul_int8_t : public primitive_t {
const auto bia_type = weights_md(1)->data_type;
const auto dst_type = dst_md(0)->data_type;

bool ok = is_dense_format_kind() && utils::one_of(src_type, s8, u8)
&& utils::one_of(wei_type, s8, u8, s4, u4)
&& IMPLICATION(with_bias(),
utils::one_of(
bia_type, f32, bf16, f16, s32, s8, u8))
&& utils::one_of(dst_type, f32, bf16, f16, s32, s8, u8)
&& attr()->has_default_values(
smask_t::scales_runtime_data_type
VDISPATCH_MATMUL(
is_dense_format_kind(), VERBOSE_UNSUPPORTED_SPARSE_CFG);
VDISPATCH_MATMUL(
utils::one_of(src_type, s8, u8), VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(utils::one_of(wei_type, s8, u8, s4, u4),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(IMPLICATION(with_bias(),
utils::one_of(bia_type, f32, bf16, f16,
s32, s8, u8)),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(
utils::one_of(dst_type, f32, bf16, f16, s32, s8, u8),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_MATMUL(
attr()->has_default_values(smask_t::scales_runtime_data_type
| smask_t::scales_runtime_groups
| smask_t::zero_points_runtime_data_type
| smask_t::zero_points_runtime_groups
| smask_t::post_ops | smask_t::sum_dt,
dst_type)
&& attr_.post_ops_.check_sum_consistency(dst_type,
/* is_int8 */ true)
&& ref_post_ops_t::primitive_kind_ok(attr()->post_ops_)
&& attr_scales_ok() && attr_zero_points_ok()
&& set_default_formats()
&& attr_.set_default_formats(dst_md(0)) == status::success;
return ok ? status::success : status::unimplemented;
dst_type),
VERBOSE_UNSUPPORTED_ATTR);
VDISPATCH_MATMUL(attr_.post_ops_.check_sum_consistency(dst_type,
/* is_int8 */ true),
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_MATMUL(
ref_post_ops_t::primitive_kind_ok(attr()->post_ops_),
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_MATMUL(attr_scales_ok(), VERBOSE_UNSUPPORTED_SCALES_CFG);
VDISPATCH_MATMUL(attr_zero_points_ok(), VERBOSE_UNSUPPORTED_ZP_CFG);
VDISPATCH_MATMUL(set_default_formats(), VERBOSE_UNSUPPORTED_TAG);
VDISPATCH_MATMUL(
attr_.set_default_formats(dst_md(0)) == status::success,
VERBOSE_UNSUPPORTED_POSTOP);

return status::success;
}

private:
Expand Down
111 changes: 72 additions & 39 deletions src/cpu/ref_convolution.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*******************************************************************************
* Copyright 2016-2023 Intel Corporation
* Copyright 2016-2024 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -45,23 +45,38 @@ struct ref_convolution_fwd_t : public primitive_t {
const auto bia_type = weights_md(1)->data_type;
const auto dst_type = dst_md(0)->data_type;

bool ok = is_fwd()
&& set_default_alg_kind(alg_kind::convolution_direct)
&& platform::has_data_type_support(src_type)
&& platform::has_data_type_support(bia_type)
&& platform::has_data_type_support(dst_type)
&& utils::one_of(src_type, f32, bf16, f16, f8_e5m2, f8_e4m3)
&& src_type == wei_type
&& utils::one_of(dst_type, src_type, f32)
&& utils::one_of(bia_type, data_type::undef, src_type, f32)
&& set_default_formats()
&& attr()->has_default_values(
smask_t::post_ops | smask_t::sum_dt, dst_type)
&& attr()->post_ops_.check_sum_consistency(
dst_type, /* is_int8 */ false)
&& post_ops_ok()
&& attr_.set_default_formats(dst_md(0)) == status::success;
return ok ? status::success : status::unimplemented;
VDISPATCH_CONV(is_fwd(), VERBOSE_BAD_PROPKIND);
VDISPATCH_CONV(set_default_alg_kind(alg_kind::convolution_direct),
VERBOSE_BAD_ALGORITHM);
VDISPATCH_CONV(platform::has_data_type_support(src_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(platform::has_data_type_support(bia_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(platform::has_data_type_support(dst_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(
utils::one_of(src_type, f32, bf16, f16, f8_e5m2, f8_e4m3),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(src_type == wei_type, VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(utils::one_of(dst_type, src_type, f32),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(
utils::one_of(bia_type, data_type::undef, src_type, f32),
VERBOSE_UNSUPPORTED_BIAS_CFG);
VDISPATCH_CONV(set_default_formats(), VERBOSE_UNSUPPORTED_TAG);
VDISPATCH_CONV(
attr()->has_default_values(
smask_t::post_ops | smask_t::sum_dt, dst_type),
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_CONV(attr()->post_ops_.check_sum_consistency(
dst_type, /* is_int8 */ false),
VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_CONV(post_ops_ok(), VERBOSE_UNSUPPORTED_POSTOP);
VDISPATCH_CONV(
attr_.set_default_formats(dst_md(0)) == status::success,
VERBOSE_UNSUPPORTED_POSTOP);

return status::success;
}

protected:
Expand Down Expand Up @@ -111,16 +126,24 @@ struct ref_convolution_bwd_data_t : public primitive_t {
const auto wei_type = weights_md(0)->data_type;
const auto diff_dst_type = diff_dst_md(0)->data_type;

bool ok = desc()->prop_kind == prop_kind::backward_data
&& set_default_alg_kind(alg_kind::convolution_direct)
&& platform::has_data_type_support(diff_src_type)
&& platform::has_data_type_support(diff_dst_type)
&& utils::one_of(diff_dst_type, f32, bf16, f16)
&& wei_type == diff_dst_type
&& utils::one_of(diff_src_type, f32, diff_dst_type)
&& set_default_formats() && attr()->has_default_values();

return ok ? status::success : status::unimplemented;
VDISPATCH_CONV(desc()->prop_kind == prop_kind::backward_data,
VERBOSE_BAD_PROPKIND);
VDISPATCH_CONV(set_default_alg_kind(alg_kind::convolution_direct),
VERBOSE_BAD_ALGORITHM);
VDISPATCH_CONV(platform::has_data_type_support(diff_src_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(platform::has_data_type_support(diff_dst_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(utils::one_of(diff_dst_type, f32, bf16, f16),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(wei_type == diff_dst_type, VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(utils::one_of(diff_src_type, f32, diff_dst_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(set_default_formats(), VERBOSE_UNSUPPORTED_TAG);
VDISPATCH_CONV(
attr()->has_default_values(), VERBOSE_UNSUPPORTED_ATTR);

return status::success;
}

protected:
Expand Down Expand Up @@ -159,17 +182,27 @@ struct ref_convolution_bwd_weights_t : public primitive_t {
const auto diff_bia_type = diff_weights_md(1)->data_type;
const auto diff_dst_type = diff_dst_md(0)->data_type;

bool ok = desc()->prop_kind == prop_kind::backward_weights
&& set_default_alg_kind(alg_kind::convolution_direct)
&& platform::has_data_type_support(src_type)
&& platform::has_data_type_support(diff_wei_type)
&& utils::one_of(src_type, f32, bf16, f16)
&& diff_dst_type == src_type
&& utils::one_of(diff_wei_type, f32, src_type)
&& utils::one_of(
diff_bia_type, data_type::undef, f32, src_type)
&& set_default_formats() && attr()->has_default_values();
return ok ? status::success : status::unimplemented;
VDISPATCH_CONV(desc()->prop_kind == prop_kind::backward_weights,
VERBOSE_BAD_PROPKIND);
VDISPATCH_CONV(set_default_alg_kind(alg_kind::convolution_direct),
VERBOSE_BAD_ALGORITHM);
VDISPATCH_CONV(platform::has_data_type_support(src_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(platform::has_data_type_support(diff_wei_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(utils::one_of(src_type, f32, bf16, f16),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(diff_dst_type == src_type, VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(utils::one_of(diff_wei_type, f32, src_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(utils::one_of(diff_bia_type, data_type::undef, f32,
src_type),
VERBOSE_UNSUPPORTED_DT);
VDISPATCH_CONV(set_default_formats(), VERBOSE_UNSUPPORTED_TAG);
VDISPATCH_CONV(
attr()->has_default_values(), VERBOSE_UNSUPPORTED_ATTR);

return status::success;
}

protected:
Expand Down
Loading

0 comments on commit 8393e5c

Please sign in to comment.