Skip to content

Commit

Permalink
Address review comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
mdfaijul committed Jan 25, 2024
1 parent 273aef3 commit 9e06444
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 8 deletions.
1 change: 0 additions & 1 deletion tensorflow/core/kernels/mkl/mkl_batch_matmul_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ limitations under the License.
#include "tensorflow/core/platform/errors.h"
#include "tensorflow/core/platform/types.h"
#include "tensorflow/core/util/matmul_bcast.h"
#include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor"

namespace tensorflow {

Expand Down
16 changes: 9 additions & 7 deletions tensorflow/core/kernels/mkl/mkl_matmul_ops_common.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ limitations under the License.
#include <string>
#include <vector>

#include "unsupported/Eigen/CXX11/Tensor" // from @eigen_archive
#include "dnnl.hpp"
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/tensor_util.h"
#include "tensorflow/core/kernels/mkl/mkl_kernel_util.h"
#include "tensorflow/core/util/mkl_util.h"
#include "tensorflow/core/util/onednn_env_vars.h"
#include "unsupported/Eigen/CXX11/Tensor" // from @eigen_archive
#if defined(DNNL_AARCH64_USE_ACL) && defined(ENABLE_ONEDNN_OPENMP)
#include "tensorflow/core/platform/mutex.h"
#endif // DNNL_AARCH64_USE_ACL && ENABLE_ONEDNN_OPENMP
Expand Down Expand Up @@ -814,7 +814,7 @@ class MklMatMulPrimitive : public MklPrimitive {
void Execute(const std::shared_ptr<stream>& stream, const Tlhs* a_data,
const Trhs* b_data, const Toutput* c_data,
const MklMatMulParams& matmul_params, void* sp_data,
const std::vector<void*> binary_op_fusions_data = {}) {
const std::vector<void*>& binary_op_fusions_data = {}) {
#if defined(DNNL_AARCH64_USE_ACL) && defined(ENABLE_ONEDNN_OPENMP)
mutex_lock lock(primitive_execution_mu_);
#endif
Expand All @@ -829,9 +829,10 @@ class MklMatMulPrimitive : public MklPrimitive {
static_cast<void*>(const_cast<Toutput*>(c_data)), *stream);
context_.sp_mem->set_data_handle(sp_data, *stream);

for (int i = 0; i < num_post_ops_data; ++i)
for (int i = 0; i < num_post_ops_data; ++i) {
context_.post_ops_mem[i]->set_data_handle(binary_op_fusions_data[i],
*stream);
}
#else
context_.a_mem->set_data_handle(
static_cast<void*>(const_cast<Tlhs*>(a_data)));
Expand Down Expand Up @@ -861,8 +862,9 @@ class MklMatMulPrimitive : public MklPrimitive {
context_.b_mem->set_data_handle(DummyData);
context_.c_mem->set_data_handle(DummyData);
context_.sp_mem->set_data_handle(DummyData);
for (int i = 0; i < num_post_ops_data; ++i)
for (int i = 0; i < num_post_ops_data; ++i) {
context_.post_ops_mem[i]->set_data_handle(DummyData);
}
}

std::shared_ptr<dnnl::matmul::primitive_desc> GetPrimitiveDesc() const {
Expand All @@ -878,7 +880,7 @@ class MklMatMulPrimitive : public MklPrimitive {
std::shared_ptr<dnnl::memory> c_mem;
std::shared_ptr<dnnl::memory> sp_mem;

// Quantization scale related memory
// Quantization scale related memory.
std::shared_ptr<dnnl::memory> lhs_scale_mem;
std::shared_ptr<dnnl::memory> rhs_scale_mem;
std::shared_ptr<dnnl::memory> dst_scale_mem;
Expand All @@ -896,7 +898,7 @@ class MklMatMulPrimitive : public MklPrimitive {
std::shared_ptr<dnnl::memory::desc> b_md;
std::shared_ptr<dnnl::memory::desc> c_md;

// Quantization scale related memory descriptors
// Quantization scale related memory descriptors.
std::shared_ptr<dnnl::memory::desc> lhs_scale_md;
std::shared_ptr<dnnl::memory::desc> rhs_scale_md;
std::shared_ptr<dnnl::memory::desc> dst_scale_md;
Expand Down Expand Up @@ -969,7 +971,7 @@ class MklMatMulPrimitive : public MklPrimitive {
dnnl::primitive_attr post_ops_attr;
dnnl::post_ops post_ops;
std::unordered_map<string, bool> is_scale_set;
int binary_post_ops_count = 0; // Keep track op binary fusions
int binary_post_ops_count = 0; // Keep track op binary fusions.
if (!post_op_params.empty()) {
for (auto const& post_op_param : post_op_params) {
if (post_op_param.name == "lhs_scale") {
Expand Down

0 comments on commit 9e06444

Please sign in to comment.