test_onnx_importer.cpp 30.5 KB
Newer Older
1 2 3 4
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.

5
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 7 8 9 10 11 12 13 14
// Third party copyrights are property of their respective owners.


#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/dnn/shape_utils.hpp>
namespace opencv_test { namespace {

template<typename TString>
15
static std::string _tf(TString filename, bool required = true)
16
{
17
    return findDataFile(std::string("dnn/onnx/") + filename, required);
18 19 20 21 22
}

class Test_ONNX_layers : public DNNTestLayer
{
public:
23 24 25 26
    bool required;

    Test_ONNX_layers() : required(true) { }

27 28 29 30 31 32
    enum Extension
    {
        npy,
        pb
    };

33
    void testONNXModels(const String& basename, const Extension ext = npy,
34
                        const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
D
Dmitry Kurtaev 已提交
35
                        bool checkNoFallbacks = true, int numInps = 1)
36
    {
37
        String onnxmodel = _tf("models/" + basename + ".onnx", required);
D
Dmitry Kurtaev 已提交
38 39
        std::vector<Mat> inps(numInps);
        Mat ref;
40
        if (ext == npy) {
D
Dmitry Kurtaev 已提交
41 42
            for (int i = 0; i < numInps; ++i)
                inps[i] = blobFromNPY(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".npy"));
43 44 45
            ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
        }
        else if (ext == pb) {
D
Dmitry Kurtaev 已提交
46 47
            for (int i = 0; i < numInps; ++i)
                inps[i] = readTensorFromONNX(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".pb"));
48 49 50 51 52
            ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
        }
        else
            CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");

D
Dmitry Kurtaev 已提交
53
        checkBackend(&inps[0], &ref);
54 55 56 57 58 59
        Net net = readNetFromONNX(onnxmodel);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

60
        std::vector<String> inputNames;
D
Dmitry Kurtaev 已提交
61
        for (int i = 0; i < numInps; ++i)
62 63 64 65 66
            inputNames.push_back(format("%d", i));
        net.setInputsNames(inputNames);

        for (int i = 0; i < numInps; ++i)
            net.setInput(inps[i], inputNames[i]);
67 68 69 70 71 72
        Mat out = net.forward("");

        if (useSoftmax)
        {
            LayerParams lp;
            Net netSoftmax;
D
Dmitry Kurtaev 已提交
73
            netSoftmax.addLayerToPrev("softmaxLayer", "Softmax", lp);
74 75 76 77 78 79 80 81
            netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);

            netSoftmax.setInput(out);
            out = netSoftmax.forward();

            netSoftmax.setInput(ref);
            ref = netSoftmax.forward();
        }
82
        normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
83 84
        if (checkNoFallbacks)
            expectNoFallbacksFromIE(net);
85 86 87
    }
};

88 89 90 91 92 93 94 95
TEST_P(Test_ONNX_layers, InstanceNorm)
{
    if (target == DNN_TARGET_MYRIAD)
        testONNXModels("instancenorm", npy, 0, 0, false, false);
    else
        testONNXModels("instancenorm", npy);
}

96 97
TEST_P(Test_ONNX_layers, MaxPooling)
{
98 99 100 101
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
#endif
102
    testONNXModels("maxpooling", npy, 0, 0, false, false);
103 104 105
}
TEST_P(Test_ONNX_layers, MaxPooling_2)
{
106
    testONNXModels("two_maxpooling", npy, 0, 0, false, false);
107 108 109 110 111
}

TEST_P(Test_ONNX_layers, Convolution)
{
    testONNXModels("convolution");
112 113
}

114 115
TEST_P(Test_ONNX_layers, Convolution3D)
{
116
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
117
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
118
#endif
119 120
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
121 122 123
    testONNXModels("conv3d");
    testONNXModels("conv3d_bias");
}
124 125 126

TEST_P(Test_ONNX_layers, Two_convolution)
{
127
#if defined(INF_ENGINE_RELEASE)
128
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
129 130
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
    )
131
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
132 133
#endif
    // Reference output values are in range [-0.855, 0.611]
134 135 136
    testONNXModels("two_convolution");
}

137 138
TEST_P(Test_ONNX_layers, Deconvolution)
{
139 140 141 142 143
    testONNXModels("deconvolution", npy, 0, 0, false, false);
    testONNXModels("two_deconvolution", npy, 0, 0, false, false);
    testONNXModels("deconvolution_group", npy, 0, 0, false, false);
    testONNXModels("deconvolution_output_shape", npy, 0, 0, false, false);
    testONNXModels("deconv_adjpad_2d", npy, 0, 0, false, false);
144 145
}

146 147
TEST_P(Test_ONNX_layers, Deconvolution3D)
{
148 149
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
150
#endif
151
    if (backend == DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
152 153 154 155 156 157 158
        throw SkipTestException("Only DLIE backend on CPU is supported");
    testONNXModels("deconv3d");
    testONNXModels("deconv3d_bias");
    testONNXModels("deconv3d_pad");
    testONNXModels("deconv3d_adjpad");
}

159 160 161 162 163 164 165 166
TEST_P(Test_ONNX_layers, Dropout)
{
    testONNXModels("dropout");
}

TEST_P(Test_ONNX_layers, Linear)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
167
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
168 169 170 171 172 173 174 175
    testONNXModels("linear");
}

TEST_P(Test_ONNX_layers, ReLU)
{
    testONNXModels("ReLU");
}

176 177 178 179 180
TEST_P(Test_ONNX_layers, Clip)
{
    testONNXModels("clip", npy);
}

181 182 183 184 185
TEST_P(Test_ONNX_layers, Shape)
{
    testONNXModels("shape_of_constant");
}

186 187 188
TEST_P(Test_ONNX_layers, ReduceMean)
{
    testONNXModels("reduce_mean");
189 190
    testONNXModels("reduce_mean_axis1");
    testONNXModels("reduce_mean_axis2");
191 192 193 194
}

TEST_P(Test_ONNX_layers, ReduceMean3D)
{
195 196 197 198
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
199 200 201 202 203
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("reduce_mean3d");
}

204 205 206 207 208
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
{
    testONNXModels("maxpooling_sigmoid");
}

209 210 211 212 213
TEST_P(Test_ONNX_layers, Cast)
{
    testONNXModels("cast");
}

214 215
TEST_P(Test_ONNX_layers, Concatenation)
{
216
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
217
    {
218 219 220
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
221
    }
222 223 224
    testONNXModels("concatenation");
}

225 226 227
TEST_P(Test_ONNX_layers, Eltwise3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
228
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
229
#endif
230 231 232 233
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
234 235 236
    testONNXModels("eltwise3d");
}

237 238 239 240 241
TEST_P(Test_ONNX_layers, AveragePooling)
{
    testONNXModels("average_pooling");
}

242 243
TEST_P(Test_ONNX_layers, MaxPooling3D)
{
244
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
245
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
246
#endif
247 248 249 250
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
251 252
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
253
    testONNXModels("max_pool3d", npy, 0, 0, false, false);
254 255 256 257
}

TEST_P(Test_ONNX_layers, AvePooling3D)
{
258
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
259
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
260
#endif
261 262 263 264
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
265 266
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
267 268 269
    testONNXModels("ave_pool3d");
}

270 271 272
TEST_P(Test_ONNX_layers, PoolConv3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
273
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
274
#endif
275 276 277 278
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
279 280 281 282 283
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("pool_conv_3d");
}

284 285 286 287 288
TEST_P(Test_ONNX_layers, BatchNormalization)
{
    testONNXModels("batch_norm");
}

L
Liubov Batanina 已提交
289 290
TEST_P(Test_ONNX_layers, BatchNormalization3D)
{
291
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
292
    {
293 294 295
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
296
    }
L
Liubov Batanina 已提交
297 298 299
    testONNXModels("batch_norm_3d");
}

300 301 302 303 304 305 306 307 308
TEST_P(Test_ONNX_layers, BatchNormalizationUnfused)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("frozenBatchNorm2d");
}

309 310 311 312 313 314 315
TEST_P(Test_ONNX_layers, BatchNormalizationSubgraph)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("batch_norm_subgraph");
}

316 317
TEST_P(Test_ONNX_layers, Transpose)
{
318
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
319
    {
320 321 322
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
323
    }
324 325 326
    testONNXModels("transpose");
}

327 328
TEST_P(Test_ONNX_layers, Multiplication)
{
329 330
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
331 332
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
333 334 335
    testONNXModels("mul");
}

336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359
TEST_P(Test_ONNX_layers, MatMul)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

    testONNXModels("matmul_2d");
    testONNXModels("matmul_3d");
    testONNXModels("matmul_4d");
}

TEST_P(Test_ONNX_layers, Expand)
{
    testONNXModels("expand_batch");
    testONNXModels("expand_channels");
}

TEST_P(Test_ONNX_layers, ExpandHW)
{
    // ngraph::op::v1::Multiply bug
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("expand_hw");
}

360 361
TEST_P(Test_ONNX_layers, Constant)
{
362
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
363
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
364
            && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
365
       applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
366
#endif
367 368 369
    testONNXModels("constant");
}

D
Dmitry Kurtaev 已提交
370 371
TEST_P(Test_ONNX_layers, Padding)
{
372 373 374
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("padding", npy, 0, 0, false, false);
#else
D
Dmitry Kurtaev 已提交
375
    testONNXModels("padding");
376
#endif
D
Dmitry Kurtaev 已提交
377 378
}

379 380 381
TEST_P(Test_ONNX_layers, Resize)
{
    testONNXModels("resize_nearest");
382 383 384
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("resize_bilinear");
385 386
}

387 388
TEST_P(Test_ONNX_layers, ResizeUnfused)
{
389 390
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
391
    testONNXModels("upsample_unfused_torch1.2");
392 393 394
    testONNXModels("upsample_unfused_opset9_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.3");
395
    testONNXModels("resize_bilinear_unfused_opset11_torch1.4");
396 397
}

398 399 400 401 402 403 404 405 406 407
TEST_P(Test_ONNX_layers, ResizeUnfusedTwoInputs)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("upsample_unfused_two_inputs_opset9_torch1.4", npy, 0, 0, false, true, 2);
    testONNXModels("upsample_unfused_two_inputs_opset11_torch1.4", npy, 0, 0, false, true, 2);
}

408 409
TEST_P(Test_ONNX_layers, MultyInputs)
{
D
Dmitry Kurtaev 已提交
410 411
    testONNXModels("multy_inputs", npy, 0, 0, false, true, 2);
}
412

D
Dmitry Kurtaev 已提交
413 414 415 416 417
TEST_P(Test_ONNX_layers, Broadcast)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("channel_broadcast", npy, 0, 0, false, true, 2);
418 419
}

420 421 422 423 424
TEST_P(Test_ONNX_layers, DynamicResize)
{
    testONNXModels("dynamic_resize", npy, 0, 0, false, true, 2);
}

425 426 427 428 429 430 431 432 433
TEST_P(Test_ONNX_layers, Div)
{
    const String model =  _tf("models/div.onnx");
    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

434 435 436 437
    // Reference output values range is -68.80928, 2.991873. So to avoid computational
    // difference for FP16 we'll perform reversed division (just swap inputs).
    Mat inp1 = blobFromNPY(_tf("data/input_div_1.npy"));
    Mat inp2 = blobFromNPY(_tf("data/input_div_0.npy"));
438
    Mat ref  = blobFromNPY(_tf("data/output_div.npy"));
439
    cv::divide(1.0, ref, ref);
440 441 442 443 444 445 446 447 448 449
    checkBackend(&inp1, &ref);

    net.setInput(inp1, "0");
    net.setInput(inp2, "1");
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
    expectNoFallbacksFromIE(net);
}

450 451
TEST_P(Test_ONNX_layers, DynamicReshape)
{
452
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
453 454
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

455
    testONNXModels("dynamic_reshape");
456 457
    testONNXModels("dynamic_reshape_opset_11");
    testONNXModels("flatten_by_prod");
458
    testONNXModels("flatten_const");
459
}
460

461 462 463 464 465
TEST_P(Test_ONNX_layers, Reshape)
{
    testONNXModels("unsqueeze");
}

466 467
TEST_P(Test_ONNX_layers, Squeeze)
{
468 469
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
470 471 472 473 474 475
    testONNXModels("squeeze");
}

TEST_P(Test_ONNX_layers, ReduceL2)
{
    testONNXModels("reduceL2");
476
    testONNXModels("reduceL2_subgraph");
477
    testONNXModels("reduceL2_subgraph_2");
478 479
}

480 481 482 483 484 485 486 487 488 489 490 491
TEST_P(Test_ONNX_layers, Split)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("split_1");
    testONNXModels("split_2");
    testONNXModels("split_3");
    testONNXModels("split_4");
}

492 493
TEST_P(Test_ONNX_layers, Slice)
{
494 495 496
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("slice", npy, 0, 0, false, false);
#else
497
    testONNXModels("slice");
498
    testONNXModels("slice_opset_11");
499
#endif
500 501
}

D
dianlujitao 已提交
502 503 504
TEST_P(Test_ONNX_layers, Softmax)
{
    testONNXModels("softmax");
D
Dmitry Kurtaev 已提交
505
    testONNXModels("log_softmax", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
506
    testONNXModels("softmax_unfused");
D
dianlujitao 已提交
507 508
}

509 510
TEST_P(Test_ONNX_layers, Split_EltwiseMax)
{
511 512 513 514
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
515 516 517
    testONNXModels("split_max");
}

D
Dmitry Kurtaev 已提交
518 519
TEST_P(Test_ONNX_layers, LSTM)
{
520
    testONNXModels("lstm", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
521 522
}

D
Dmitry Kurtaev 已提交
523 524
TEST_P(Test_ONNX_layers, LSTM_bidirectional)
{
525
    testONNXModels("lstm_bidirectional", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
526 527
}

528 529 530 531 532 533
TEST_P(Test_ONNX_layers, Pad2d_Unfused)
{
    testONNXModels("ReflectionPad2d");
    testONNXModels("ZeroPad2d");
}

534 535
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());

536 537 538 539 540 541
class Test_ONNX_nets : public Test_ONNX_layers
{
public:
    Test_ONNX_nets() { required = false; }
};

542 543
TEST_P(Test_ONNX_nets, Alexnet)
{
544
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
545 546
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
547
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
548 549
#endif

550
    const String model =  _tf("models/alexnet.onnx", false);
551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    Mat inp = imread(_tf("../grace_hopper_227.png"));
    Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(out, ref, "", default_l1,  default_lInf);
567
    expectNoFallbacksFromIE(net);
568 569 570 571 572 573 574 575 576
}

TEST_P(Test_ONNX_nets, Squeezenet)
{
    testONNXModels("squeezenet", pb);
}

TEST_P(Test_ONNX_nets, Googlenet)
{
577 578
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
579

580 581 582
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

583
    const String model = _tf("models/googlenet.onnx", false);
584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    std::vector<Mat> images;
    images.push_back( imread(_tf("../googlenet_0.png")) );
    images.push_back( imread(_tf("../googlenet_1.png")) );
    Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
    Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(inp);
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
603
    expectNoFallbacksFromIE(net);
604 605 606 607
}

TEST_P(Test_ONNX_nets, CaffeNet)
{
608
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
609 610
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
611
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
612 613
#endif

A
Alexander Alekhin 已提交
614
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
615
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
616
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
617
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
618
#endif
619 620 621 622 623
    testONNXModels("caffenet", pb);
}

TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
{
624
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
625 626
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
627
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
628 629
#endif

A
Alexander Alekhin 已提交
630
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
631
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
632
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
633
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
634
#endif
635
    // Reference output values are in range [-4.992, -1.161]
636
    testONNXModels("rcnn_ilsvrc13", pb, 0.0046);
637 638 639 640
}

TEST_P(Test_ONNX_nets, VGG16_bn)
{
641 642
    applyTestTag(CV_TEST_TAG_MEMORY_6GB);  // > 2.3Gb

643 644 645
    // output range: [-16; 27], after Softmax [0; 0.67]
    const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
    testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
646 647 648 649
}

TEST_P(Test_ONNX_nets, ZFNet)
{
650
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
651 652 653 654 655
    testONNXModels("zfnet512", pb);
}

TEST_P(Test_ONNX_nets, ResNet18v1)
{
656 657
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

658
    // output range: [-16; 22], after Softmax [0, 0.51]
659
    testONNXModels("resnet18v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
660 661 662 663
}

TEST_P(Test_ONNX_nets, ResNet50v1)
{
664 665
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

666
    // output range: [-67; 75], after Softmax [0, 0.98]
667
    testONNXModels("resnet50v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
668 669 670 671
}

TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{
672 673
    applyTestTag(CV_TEST_TAG_VERYLONG);

674
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
675 676
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
677 678
#endif
#if defined(INF_ENGINE_RELEASE)
679 680
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
681 682
#endif
    if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
683 684 685
    {
        if (backend == DNN_BACKEND_OPENCV)
            applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
686
        throw SkipTestException("Test is disabled for OpenCL targets");
687
    }
688 689 690 691 692
    testONNXModels("resnet101_duc_hdc", pb);
}

TEST_P(Test_ONNX_nets, TinyYolov2)
{
693 694
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

695 696 697
    if (cvtest::skipUnstableTests)
        throw SkipTestException("Skip unstable test");
#if defined(INF_ENGINE_RELEASE)
698
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
699 700
            && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
    )
701
        applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
702

703
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
704
    )
705 706 707 708
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
709
#endif
710

711
    // output range: [-11; 8]
712 713
    double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
    double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
714 715 716 717 718
    testONNXModels("tiny_yolo2", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, CNN_MNIST)
{
719 720
    // output range: [-1952; 6574], after Softmax [0; 1]
    testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
721 722 723 724
}

TEST_P(Test_ONNX_nets, MobileNet_v2)
{
725 726
    // output range: [-166; 317], after Softmax [0; 1]
    testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
727 728 729 730
}

TEST_P(Test_ONNX_nets, LResNet100E_IR)
{
731
    applyTestTag(
732 733 734
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
        CV_TEST_TAG_MEMORY_2GB,
#else
735
        (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
736
#endif
737 738
        CV_TEST_TAG_DEBUG_LONG
    );
739
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
740
    {
741 742 743
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
744
    }
745 746 747 748 749 750
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
    {
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    }
751 752 753 754 755 756 757 758

    double l1 = default_l1;
    double lInf = default_lInf;
    // output range: [-3; 3]
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
        l1 = 0.009;
        lInf = 0.035;
    }
759
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_CPU) {
760
        l1 = 4.6e-5;
761 762
        lInf = 1.9e-4;
    }
763 764 765 766 767
    testONNXModels("LResNet100E_IR", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, Emotion_ferplus)
{
768
#if defined(INF_ENGINE_RELEASE)
769 770 771 772 773
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
774 775
#endif

776 777
    double l1 = default_l1;
    double lInf = default_lInf;
778 779

    // Output values are in range [-2.011, 2.111]
780 781
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        l1 = 0.007;
782
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
783 784 785 786
    {
        l1 = 0.021;
        lInf = 0.034;
    }
787
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
788 789 790
        l1 = 2.4e-4;
        lInf = 6e-4;
    }
791
    testONNXModels("emotion_ferplus", pb, l1, lInf);
792 793 794 795
}

TEST_P(Test_ONNX_nets, Inception_v2)
{
796
    testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
797 798 799 800
}

TEST_P(Test_ONNX_nets, DenseNet121)
{
801 802
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

803
    // output range: [-87; 138], after Softmax [0; 1]
804
    testONNXModels("densenet121", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
805 806
}

807 808
TEST_P(Test_ONNX_nets, Inception_v1)
{
809
#if defined(INF_ENGINE_RELEASE)
810 811
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD)
812
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
813
#endif
814 815
    testONNXModels("inception_v1", pb);
}
816

817 818
TEST_P(Test_ONNX_nets, Shufflenet)
{
819
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
820
    {
821 822 823
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
824
    }
825 826 827
    testONNXModels("shufflenet", pb);
}

828 829
TEST_P(Test_ONNX_nets, Resnet34_kinetics)
{
830
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
831
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
832
#endif
833 834 835 836
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
837 838
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
839

A
Alexander Alekhin 已提交
840
    String onnxmodel = findDataFile("dnn/resnet-34_kinetics.onnx", false);
841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857
    Mat image0 = imread(findDataFile("dnn/dog416.png"));
    Mat image1 = imread(findDataFile("dnn/street.png"));

    Mat ref0 = blobFromNPY(_tf("data/output_kinetics0.npy"));
    Mat ref1 = blobFromNPY(_tf("data/output_kinetics1.npy"));

    std::vector<Mat> images_0(16, image0);
    std::vector<Mat> images_1(16, image1);
    Mat blob0 = blobFromImages(images_0, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
    Mat blob1 = blobFromImages(images_1, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);

    Net permute;
    LayerParams lp;
    int order[] = {1, 0, 2, 3};
    lp.set("order", DictValue::arrayInt<int*>(&order[0], 4));
    permute.addLayerToPrev("perm", "Permute", lp);

858 859 860
    permute.setPreferableBackend(backend);
    permute.setPreferableTarget(target);

861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892
    permute.setInput(blob0);
    Mat input0 = permute.forward().clone();

    permute.setInput(blob1);
    Mat input1 = permute.forward().clone();

    int dims[] = {1, 3, 16, 112, 112};
    input0 = input0.reshape(0, 5, &dims[0]);
    input1 = input1.reshape(0, 5, &dims[0]);

    Net net = readNetFromONNX(onnxmodel);
    ASSERT_FALSE(net.empty());
    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    // output range [-5, 11]
    float l1 = 0.0013;
    float lInf = 0.009;

    checkBackend(&input0, &ref0);
    net.setInput(input0);
    Mat out = net.forward().clone();
    normAssert(ref0, out, "", l1, lInf);

    checkBackend(&input1, &ref1);
    net.setInput(input1);
    out = net.forward().clone();
    normAssert(ref1, out, "", l1, lInf);

    expectNoFallbacksFromIE(net);
}

893 894 895
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());

}} // namespace