Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
[master][submodule] Upgrade oneDNN to v2.5.1 (#20662)
Browse files Browse the repository at this point in the history
* Change oneDNN build flags names from DNNL_* to ONEDNN_*

* [submodule] Upgrade oneDNN used on master to v2.5.1

As there is currently a bug regarding conv+sum fusion,
some tests have been temporairly adjusted.

* Replace internal link
  • Loading branch information
bartekkuncer authored Jan 25, 2022
1 parent 69e6c04 commit c93c152
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 12 deletions.
2 changes: 1 addition & 1 deletion 3rdparty/onednn
Submodule onednn updated 1406 files
16 changes: 8 additions & 8 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -312,16 +312,16 @@ if(USE_ONEDNN)
endif()

function(load_onednn)
set(DNNL_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
set(DNNL_BUILD_EXAMPLES OFF CACHE INTERNAL "" FORCE)
set(DNNL_ARCH_OPT_FLAGS "" CACHE INTERNAL "" FORCE)
set(DNNL_ENABLE_JIT_PROFILING OFF CACHE INTERNAL "" FORCE)
set(DNNL_LIBRARY_TYPE STATIC CACHE INTERNAL "" FORCE)
set(DNNL_ENABLE_CONCURRENT_EXEC ON CACHE INTERNAL "" FORCE)
set(DNNL_ENABLE_PRIMITIVE_CACHE ON CACHE INTERNAL "" FORCE)
set(ONEDNN_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
set(ONEDNN_BUILD_EXAMPLES OFF CACHE INTERNAL "" FORCE)
set(ONEDNN_ARCH_OPT_FLAGS "" CACHE INTERNAL "" FORCE)
set(ONEDNN_ENABLE_JIT_PROFILING OFF CACHE INTERNAL "" FORCE)
set(ONEDNN_LIBRARY_TYPE STATIC CACHE INTERNAL "" FORCE)
set(ONEDNN_ENABLE_CONCURRENT_EXEC ON CACHE INTERNAL "" FORCE)
set(ONEDNN_ENABLE_PRIMITIVE_CACHE ON CACHE INTERNAL "" FORCE)

if(NOT USE_OPENMP)
set(DNNL_CPU_RUNTIME SEQ CACHE INTERNAL "" FORCE)
set(ONEDNN_CPU_RUNTIME SEQ CACHE INTERNAL "" FORCE)
endif()

set(CMAKE_INSTALL_INCLUDEDIR "${CMAKE_INSTALL_INCLUDEDIR}/onednn")
Expand Down
2 changes: 1 addition & 1 deletion tests/cpp/operator/dnnl_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ static void VerifyDefMem(const dnnl::memory& mem) {

TEST(DNNL_UTIL_FUNC, MemFormat) {
// Check whether the number of format is correct.
CHECK_EQ(dnnl_format_tag_last, 385);
CHECK_EQ(dnnl_format_tag_last, 503);
CHECK_EQ(dnnl_nchw, 5);
CHECK_EQ(dnnl_oihw, 5);
}
Expand Down
5 changes: 4 additions & 1 deletion tests/python/dnnl/subgraphs/subgraph_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,10 @@
}
}

DATA_SHAPE=[(64, 4, 10, 10), (4, 3, 24, 24), (1, 16, 32, 32)]
DATA_SHAPE=[(64, 4, 10, 10), (4, 4, 24, 24), (1, 16, 32, 32)]
# Second shape has been temporairly changed from (4, 3, 24, 24) to (4, 4, 24, 24) due to
# a bug regarding conv+sum fuse with the amount of input channels < 4. It will be reverted
# as soon as the problem is fixed. Issue: https://github.com/apache/incubator-mxnet/issues/20826.

# Helpers
class RELU6(nn.HybridBlock):
Expand Down
2 changes: 1 addition & 1 deletion tools/dependencies/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ The dependencies could be categorized by several groups: BLAS libraries, CPU-bas
| Dependencies | MXNet Version |
| :------------: |:-------------:|
|OpenBLAS| 0.3.9 |
|oneDNN| 2.3.3 |
|oneDNN| 2.5.1 |
|CUDA| 10.1 |
|cuDNN| 7.5.1 |
|NCCL| 2.4.2 |
Expand Down

0 comments on commit c93c152

Please sign in to comment.