Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

cpu: Fixes some coverity issues (copy instead of move, const auto instead of const auto&). #2626

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/cpu/ref_fused_convolution.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ struct ref_fused_convolution_fwd_t : public primitive_t {
arg_cache.append_inout_arg(
DNNL_ARG_FROM, sp_begin, from_md, true);
arg_cache.append_inout_arg(DNNL_ARG_TO, sp_end, to_md, false);
args_.push_back(arg_cache);
args_.push_back(std::move(arg_cache));

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I did not see related coverity issue in the latest Coverity report. Could you point me out the CID or check if these changes are needed?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There are a variety of CIDs represented here, but they are in the minor category. This particular one is 6925574.

// Increment scratchpad offsets
sp_begin = sp_end;
Expand Down Expand Up @@ -340,7 +340,7 @@ struct ref_fused_convolution_fwd_t : public primitive_t {
| DNNL_ARG_SRC_1));
}

args_.push_back(arg_cache);
args_.push_back(std::move(arg_cache));

while (++po_op_iter < end) {
if (utils::one_of(po.entry_[po_op_iter].kind,
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/sycl/stream.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ struct stream_t : public cpu::cpu_stream_t {
register_deps(cgh);
submit_cpu_primitive(this, prim_iface, exec_ctx, cgh);
});
sycl_ctx().set_deps({event});
sycl_ctx().set_deps({std::move(event)});
return status::success;
}

Expand Down
4 changes: 2 additions & 2 deletions src/cpu/sycl/stream_submit_cpu_primitive.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ void init_thunk_params(
thunk_params_t *p, accessor_t acc, accessor_types... accessors) {
p->native_pointers[N - sizeof...(accessor_types) - 1]
= reinterpret_cast<uintptr_t>(&acc[0]);
init_thunk_params<N>(p, accessors...);
init_thunk_params<N>(p, std::move(accessors)...);
}

template <typename... param_types>
Expand All @@ -57,7 +57,7 @@ status_t submit_cpu_primitive_with_params_impl(
constexpr size_t nparams = sizeof...(param_types);

// Extract pointers from params
init_thunk_params<nparams>(&thunk_params, params...);
init_thunk_params<nparams>(&thunk_params, std::move(params)...);

dnnl_impl_sycl_cpu_thunk(&thunk_params);
});
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/brgemm/brgemm_containers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ status_t brgemm_kernel_container_t::insert(int idx, const brgemm_desc_t *brg) {
CHECK(brgemm_kernel_create(&brg_kernel, *brg));
std::shared_ptr<brgemm_kernel_t> sptr(brg_kernel);
lock_write();
const auto kernel_ret = get_set().insert(sptr);
const auto kernel_ret = get_set().insert(std::move(sptr));
refs_[idx] = kernel_ret.first->get();
unlock_write();
const auto brgemm_ret = brgemm_map_.insert({brg, refs_[idx]});
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_avx2_1x1_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ void jit_avx2_1x1_convolution_fwd_t::execute_forward(
const data_t *, DNNL_ARG_ATTR_POST_OP_DW | DNNL_ARG_BIAS);
const auto post_ops_binary_rhs_arg_vec
= binary_injector::prepare_binary_args(pd()->jcp_.post_ops, ctx);
const auto post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_
const auto& post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_
? binary_injector::prepare_binary_args(pd()->jcp_dw_->post_ops, ctx,
pd()->jcp_.post_ops.entry_.size() + 1)
: std::vector<const void *> {};
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_avx512_common_1x1_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ void jit_avx512_common_1x1_convolution_fwd_t<src_type, wei_type,
const dst_data_t *, DNNL_ARG_ATTR_POST_OP_DW | DNNL_ARG_BIAS);
const auto post_ops_binary_rhs_arg_vec
= binary_injector::prepare_binary_args(pd()->jcp_.post_ops, ctx);
const auto post_ops_binary_rhs_arg_vec_dw = pd()->dw_conv_pd_
const auto& post_ops_binary_rhs_arg_vec_dw = pd()->dw_conv_pd_
? binary_injector::prepare_binary_args(
pd()->dw_conv_pd_->jcp_.post_ops, ctx,
pd()->jcp_.post_ops.entry_.size() + 1)
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_avx512_core_bf16_1x1_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ void jit_avx512_core_bf16_1x1_convolution_fwd_t<dst_type>::execute_forward(
const dw_wei_data_t *, DNNL_ARG_ATTR_POST_OP_DW | DNNL_ARG_WEIGHTS);
const auto post_ops_binary_rhs_arg_vec
= binary_injector::prepare_binary_args(pd()->jcp_.post_ops, ctx);
const auto post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_ != nullptr
const auto& post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_ != nullptr
? binary_injector::prepare_binary_args(pd()->jcp_dw_->post_ops, ctx,
pd()->jcp_.post_ops.entry_.size() + 1)
: std::vector<const void *> {};
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_avx512_core_x8s8s32x_1x1_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ status_t jit_avx512_core_x8s8s32x_1x1_convolution_fwd_t::execute_forward(
const char *, DNNL_ARG_ATTR_POST_OP_DW | DNNL_ARG_BIAS);
const auto post_ops_binary_rhs_arg_vec
= binary_injector::prepare_binary_args(pd()->jcp_.post_ops, ctx);
const auto post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_
const auto& post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_
? binary_injector::prepare_binary_args(pd()->jcp_dw_->post_ops, ctx,
pd()->jcp_.post_ops.entry_.size() + 1)
: std::vector<const void *> {};
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_brgemm_conv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,7 @@ status_t brgemm_convolution_fwd_t<isa>::pd_t::init(engine_t *engine) {
|| (jcp_.dst_dt != jcp_.acc_dt) || jcp_.with_sum || jcp_.use_M_mask
|| jcp_.src_zero_point || jcp_.dst_zero_point;

const auto Mv = (jcp_.M_tail > 0 && jcp_.M_tail != jcp_.M)
const auto& Mv = (jcp_.M_tail > 0 && jcp_.M_tail != jcp_.M)
? std::vector<int> {jcp_.M, jcp_.M_tail}
: std::vector<int> {jcp_.M};

Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_sse41_1x1_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ void jit_sse41_1x1_convolution_fwd_t::execute_forward(
const data_t *, DNNL_ARG_ATTR_POST_OP_DW | DNNL_ARG_BIAS);
const auto post_ops_binary_rhs_arg_vec
= binary_injector::prepare_binary_args(pd()->jcp_.post_ops, ctx);
const auto post_ops_binary_rhs_arg_vec_dw = pd()->dw_conv_pd_ != nullptr
const auto& post_ops_binary_rhs_arg_vec_dw = pd()->dw_conv_pd_ != nullptr
? binary_injector::prepare_binary_args(
pd()->dw_conv_pd_->jcp_.post_ops, ctx,
pd()->jcp_.post_ops.entry_.size() + 1)
Expand Down
2 changes: 1 addition & 1 deletion src/cpu/x64/jit_uni_x8s8s32x_1x1_convolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ status_t jit_uni_x8s8s32x_1x1_convolution_fwd_t<isa>::execute_forward(
const char *, DNNL_ARG_ATTR_POST_OP_DW | DNNL_ARG_BIAS);
const auto post_ops_binary_rhs_arg_vec
= binary_injector::prepare_binary_args(pd()->jcp_.post_ops, ctx);
const auto post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_
const auto& post_ops_binary_rhs_arg_vec_dw = pd()->jcp_dw_
? binary_injector::prepare_binary_args(pd()->jcp_dw_->post_ops, ctx,
pd()->jcp_.post_ops.entry_.size() + 1)
: std::vector<const void *> {};
Expand Down
Loading