test_onnx_importer.cpp 30.5 KB
Newer Older
1 2 3 4
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.

5
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 7 8 9 10 11 12 13 14
// Third party copyrights are property of their respective owners.


#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/dnn/shape_utils.hpp>
namespace opencv_test { namespace {

template<typename TString>
15
static std::string _tf(TString filename, bool required = true)
16
{
17
    return findDataFile(std::string("dnn/onnx/") + filename, required);
18 19 20 21 22
}

class Test_ONNX_layers : public DNNTestLayer
{
public:
23 24 25 26
    bool required;

    Test_ONNX_layers() : required(true) { }

27 28 29 30 31 32
    enum Extension
    {
        npy,
        pb
    };

33
    void testONNXModels(const String& basename, const Extension ext = npy,
34
                        const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
D
Dmitry Kurtaev 已提交
35
                        bool checkNoFallbacks = true, int numInps = 1)
36
    {
37
        String onnxmodel = _tf("models/" + basename + ".onnx", required);
D
Dmitry Kurtaev 已提交
38 39
        std::vector<Mat> inps(numInps);
        Mat ref;
40
        if (ext == npy) {
D
Dmitry Kurtaev 已提交
41 42
            for (int i = 0; i < numInps; ++i)
                inps[i] = blobFromNPY(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".npy"));
43 44 45
            ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
        }
        else if (ext == pb) {
D
Dmitry Kurtaev 已提交
46 47
            for (int i = 0; i < numInps; ++i)
                inps[i] = readTensorFromONNX(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".pb"));
48 49 50 51 52
            ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
        }
        else
            CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");

D
Dmitry Kurtaev 已提交
53
        checkBackend(&inps[0], &ref);
54 55 56 57 58 59
        Net net = readNetFromONNX(onnxmodel);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

60
        std::vector<String> inputNames;
D
Dmitry Kurtaev 已提交
61
        for (int i = 0; i < numInps; ++i)
62 63 64 65 66
            inputNames.push_back(format("%d", i));
        net.setInputsNames(inputNames);

        for (int i = 0; i < numInps; ++i)
            net.setInput(inps[i], inputNames[i]);
67 68 69 70 71 72
        Mat out = net.forward("");

        if (useSoftmax)
        {
            LayerParams lp;
            Net netSoftmax;
D
Dmitry Kurtaev 已提交
73
            netSoftmax.addLayerToPrev("softmaxLayer", "Softmax", lp);
74 75 76 77 78 79 80 81
            netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);

            netSoftmax.setInput(out);
            out = netSoftmax.forward();

            netSoftmax.setInput(ref);
            ref = netSoftmax.forward();
        }
82
        normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
83 84
        if (checkNoFallbacks)
            expectNoFallbacksFromIE(net);
85 86 87
    }
};

88 89 90 91 92 93 94 95
TEST_P(Test_ONNX_layers, InstanceNorm)
{
    if (target == DNN_TARGET_MYRIAD)
        testONNXModels("instancenorm", npy, 0, 0, false, false);
    else
        testONNXModels("instancenorm", npy);
}

96 97
TEST_P(Test_ONNX_layers, MaxPooling)
{
98 99 100 101
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
#endif
102
    testONNXModels("maxpooling", npy, 0, 0, false, false);
103 104 105
}
TEST_P(Test_ONNX_layers, MaxPooling_2)
{
106
    testONNXModels("two_maxpooling", npy, 0, 0, false, false);
107 108 109 110 111
}

TEST_P(Test_ONNX_layers, Convolution)
{
    testONNXModels("convolution");
112 113
}

114 115
TEST_P(Test_ONNX_layers, Convolution3D)
{
116
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
117
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
118
#endif
119 120
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
121 122 123
    testONNXModels("conv3d");
    testONNXModels("conv3d_bias");
}
124 125 126

TEST_P(Test_ONNX_layers, Two_convolution)
{
127
#if defined(INF_ENGINE_RELEASE)
128
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
129 130
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
    )
131
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
132 133
#endif
    // Reference output values are in range [-0.855, 0.611]
134 135 136
    testONNXModels("two_convolution");
}

137 138
TEST_P(Test_ONNX_layers, Deconvolution)
{
139 140 141 142 143
    testONNXModels("deconvolution", npy, 0, 0, false, false);
    testONNXModels("two_deconvolution", npy, 0, 0, false, false);
    testONNXModels("deconvolution_group", npy, 0, 0, false, false);
    testONNXModels("deconvolution_output_shape", npy, 0, 0, false, false);
    testONNXModels("deconv_adjpad_2d", npy, 0, 0, false, false);
144 145
}

146 147
TEST_P(Test_ONNX_layers, Deconvolution3D)
{
148 149
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
150
#endif
151
    if (backend == DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
152 153 154 155 156 157 158
        throw SkipTestException("Only DLIE backend on CPU is supported");
    testONNXModels("deconv3d");
    testONNXModels("deconv3d_bias");
    testONNXModels("deconv3d_pad");
    testONNXModels("deconv3d_adjpad");
}

159 160 161 162 163 164 165 166
TEST_P(Test_ONNX_layers, Dropout)
{
    testONNXModels("dropout");
}

TEST_P(Test_ONNX_layers, Linear)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
167
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
168 169 170 171 172 173 174 175
    testONNXModels("linear");
}

TEST_P(Test_ONNX_layers, ReLU)
{
    testONNXModels("ReLU");
}

176 177 178 179 180
TEST_P(Test_ONNX_layers, Clip)
{
    testONNXModels("clip", npy);
}

181 182 183 184 185
TEST_P(Test_ONNX_layers, Shape)
{
    testONNXModels("shape_of_constant");
}

186 187 188
TEST_P(Test_ONNX_layers, ReduceMean)
{
    testONNXModels("reduce_mean");
189 190
    testONNXModels("reduce_mean_axis1");
    testONNXModels("reduce_mean_axis2");
191 192 193 194
}

TEST_P(Test_ONNX_layers, ReduceMean3D)
{
195 196 197 198
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
199 200 201 202 203
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("reduce_mean3d");
}

204 205 206 207 208
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
{
    testONNXModels("maxpooling_sigmoid");
}

209 210 211 212 213
TEST_P(Test_ONNX_layers, Cast)
{
    testONNXModels("cast");
}

214 215
TEST_P(Test_ONNX_layers, Concatenation)
{
216
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
217
    {
218 219 220
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
221
    }
222 223 224
    testONNXModels("concatenation");
}

225 226 227
TEST_P(Test_ONNX_layers, Eltwise3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
228
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
229
#endif
230 231 232 233
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
234 235 236
    testONNXModels("eltwise3d");
}

237 238 239 240 241
TEST_P(Test_ONNX_layers, AveragePooling)
{
    testONNXModels("average_pooling");
}

242 243
TEST_P(Test_ONNX_layers, MaxPooling3D)
{
244
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
245
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
246
#endif
247 248 249 250
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
251 252
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
253
    testONNXModels("max_pool3d", npy, 0, 0, false, false);
254 255 256 257
}

TEST_P(Test_ONNX_layers, AvePooling3D)
{
258
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
259
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
260
#endif
261 262 263 264
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
265 266
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
267 268 269
    testONNXModels("ave_pool3d");
}

270 271 272
TEST_P(Test_ONNX_layers, PoolConv3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
273
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
274
#endif
275 276 277 278
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
279 280 281 282 283
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("pool_conv_3d");
}

284 285 286 287 288
TEST_P(Test_ONNX_layers, BatchNormalization)
{
    testONNXModels("batch_norm");
}

L
Liubov Batanina 已提交
289 290
TEST_P(Test_ONNX_layers, BatchNormalization3D)
{
291
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
292
    {
293 294 295
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
296
    }
L
Liubov Batanina 已提交
297 298 299
    testONNXModels("batch_norm_3d");
}

300 301 302 303 304
TEST_P(Test_ONNX_layers, BatchNormalizationUnfused)
{
    testONNXModels("frozenBatchNorm2d");
}

305 306 307 308 309
TEST_P(Test_ONNX_layers, BatchNormalizationSubgraph)
{
    testONNXModels("batch_norm_subgraph");
}

310 311
TEST_P(Test_ONNX_layers, Transpose)
{
312
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
313
    {
314 315 316
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
317
    }
318 319 320
    testONNXModels("transpose");
}

321 322
TEST_P(Test_ONNX_layers, Multiplication)
{
323 324
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
325 326
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
327 328 329
    testONNXModels("mul");
}

330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347
TEST_P(Test_ONNX_layers, MatMul)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

    testONNXModels("matmul_2d");
    testONNXModels("matmul_3d");
    testONNXModels("matmul_4d");
}

TEST_P(Test_ONNX_layers, Expand)
{
    testONNXModels("expand_batch");
    testONNXModels("expand_channels");
}

TEST_P(Test_ONNX_layers, ExpandHW)
{
348 349
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
350 351 352
    testONNXModels("expand_hw");
}

353 354
TEST_P(Test_ONNX_layers, Constant)
{
355
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
356
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
357
            && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
358
       applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
359
#endif
360 361 362
    testONNXModels("constant");
}

D
Dmitry Kurtaev 已提交
363 364
TEST_P(Test_ONNX_layers, Padding)
{
365 366 367
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("padding", npy, 0, 0, false, false);
#else
D
Dmitry Kurtaev 已提交
368
    testONNXModels("padding");
369
#endif
D
Dmitry Kurtaev 已提交
370 371
}

372 373 374
TEST_P(Test_ONNX_layers, Resize)
{
    testONNXModels("resize_nearest");
375 376 377
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("resize_bilinear");
378 379
}

380 381
TEST_P(Test_ONNX_layers, ResizeUnfused)
{
382 383
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
384
    testONNXModels("upsample_unfused_torch1.2");
385 386 387
    testONNXModels("upsample_unfused_opset9_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.3");
388
    testONNXModels("resize_bilinear_unfused_opset11_torch1.4");
389 390
}

391 392 393 394 395 396 397 398 399 400
TEST_P(Test_ONNX_layers, ResizeUnfusedTwoInputs)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("upsample_unfused_two_inputs_opset9_torch1.4", npy, 0, 0, false, true, 2);
    testONNXModels("upsample_unfused_two_inputs_opset11_torch1.4", npy, 0, 0, false, true, 2);
}

401 402
TEST_P(Test_ONNX_layers, MultyInputs)
{
D
Dmitry Kurtaev 已提交
403 404
    testONNXModels("multy_inputs", npy, 0, 0, false, true, 2);
}
405

D
Dmitry Kurtaev 已提交
406 407 408 409 410
TEST_P(Test_ONNX_layers, Broadcast)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("channel_broadcast", npy, 0, 0, false, true, 2);
411 412
}

413 414 415 416 417
TEST_P(Test_ONNX_layers, DynamicResize)
{
    testONNXModels("dynamic_resize", npy, 0, 0, false, true, 2);
}

418 419 420 421 422 423 424 425 426
TEST_P(Test_ONNX_layers, Div)
{
    const String model =  _tf("models/div.onnx");
    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

427 428 429 430
    // Reference output values range is -68.80928, 2.991873. So to avoid computational
    // difference for FP16 we'll perform reversed division (just swap inputs).
    Mat inp1 = blobFromNPY(_tf("data/input_div_1.npy"));
    Mat inp2 = blobFromNPY(_tf("data/input_div_0.npy"));
431
    Mat ref  = blobFromNPY(_tf("data/output_div.npy"));
432
    cv::divide(1.0, ref, ref);
433 434 435 436 437 438 439 440 441 442
    checkBackend(&inp1, &ref);

    net.setInput(inp1, "0");
    net.setInput(inp2, "1");
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
    expectNoFallbacksFromIE(net);
}

443 444
TEST_P(Test_ONNX_layers, DynamicReshape)
{
445
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
446 447
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

448
    testONNXModels("dynamic_reshape");
449 450
    testONNXModels("dynamic_reshape_opset_11");
    testONNXModels("flatten_by_prod");
451
    testONNXModels("flatten_const");
452
}
453

454 455 456 457 458
TEST_P(Test_ONNX_layers, Reshape)
{
    testONNXModels("unsqueeze");
}

459 460
TEST_P(Test_ONNX_layers, Squeeze)
{
461 462
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
463 464 465 466 467 468
    testONNXModels("squeeze");
}

TEST_P(Test_ONNX_layers, ReduceL2)
{
    testONNXModels("reduceL2");
469
    testONNXModels("reduceL2_subgraph");
470
    testONNXModels("reduceL2_subgraph_2");
471 472
}

473 474 475 476 477 478 479 480 481 482 483 484
TEST_P(Test_ONNX_layers, Split)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("split_1");
    testONNXModels("split_2");
    testONNXModels("split_3");
    testONNXModels("split_4");
}

485 486
TEST_P(Test_ONNX_layers, Slice)
{
487 488 489
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("slice", npy, 0, 0, false, false);
#else
490
    testONNXModels("slice");
491
    testONNXModels("slice_opset_11");
492
#endif
493 494
}

D
dianlujitao 已提交
495 496 497
TEST_P(Test_ONNX_layers, Softmax)
{
    testONNXModels("softmax");
D
Dmitry Kurtaev 已提交
498
    testONNXModels("log_softmax", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
499
    testONNXModels("softmax_unfused");
D
dianlujitao 已提交
500 501
}

502 503
TEST_P(Test_ONNX_layers, Split_EltwiseMax)
{
504 505 506 507
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
508 509 510
    testONNXModels("split_max");
}

D
Dmitry Kurtaev 已提交
511 512
TEST_P(Test_ONNX_layers, LSTM)
{
513
    testONNXModels("lstm", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
514 515
}

D
Dmitry Kurtaev 已提交
516 517
TEST_P(Test_ONNX_layers, LSTM_bidirectional)
{
518
    testONNXModels("lstm_bidirectional", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
519 520
}

521 522 523 524 525 526
TEST_P(Test_ONNX_layers, Pad2d_Unfused)
{
    testONNXModels("ReflectionPad2d");
    testONNXModels("ZeroPad2d");
}

527 528
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());

529 530 531 532 533 534
class Test_ONNX_nets : public Test_ONNX_layers
{
public:
    Test_ONNX_nets() { required = false; }
};

535 536
TEST_P(Test_ONNX_nets, Alexnet)
{
537
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
538 539
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
540
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
541 542
#endif

543
    const String model =  _tf("models/alexnet.onnx", false);
544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    Mat inp = imread(_tf("../grace_hopper_227.png"));
    Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(out, ref, "", default_l1,  default_lInf);
560
    expectNoFallbacksFromIE(net);
561 562 563 564 565 566 567 568 569
}

TEST_P(Test_ONNX_nets, Squeezenet)
{
    testONNXModels("squeezenet", pb);
}

TEST_P(Test_ONNX_nets, Googlenet)
{
570 571
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
572

573 574 575
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

576
    const String model = _tf("models/googlenet.onnx", false);
577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    std::vector<Mat> images;
    images.push_back( imread(_tf("../googlenet_0.png")) );
    images.push_back( imread(_tf("../googlenet_1.png")) );
    Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
    Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(inp);
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
596
    expectNoFallbacksFromIE(net);
597 598 599 600
}

TEST_P(Test_ONNX_nets, CaffeNet)
{
601
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
602 603
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
604
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
605 606
#endif

A
Alexander Alekhin 已提交
607
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
608
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
609
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
610
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
611
#endif
612 613 614 615 616
    testONNXModels("caffenet", pb);
}

TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
{
617
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
618 619
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
620
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
621 622
#endif

A
Alexander Alekhin 已提交
623
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
624
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
625
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
626
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
627
#endif
628
    // Reference output values are in range [-4.992, -1.161]
629
    testONNXModels("rcnn_ilsvrc13", pb, 0.0046);
630 631 632 633
}

TEST_P(Test_ONNX_nets, VGG16_bn)
{
634 635
    applyTestTag(CV_TEST_TAG_MEMORY_6GB);  // > 2.3Gb

636 637 638
    // output range: [-16; 27], after Softmax [0; 0.67]
    const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
    testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
639 640 641 642
}

TEST_P(Test_ONNX_nets, ZFNet)
{
643
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
644 645 646 647 648
    testONNXModels("zfnet512", pb);
}

TEST_P(Test_ONNX_nets, ResNet18v1)
{
649 650
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

651
    // output range: [-16; 22], after Softmax [0, 0.51]
652
    testONNXModels("resnet18v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
653 654 655 656
}

TEST_P(Test_ONNX_nets, ResNet50v1)
{
657 658
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

659
    // output range: [-67; 75], after Softmax [0, 0.98]
660
    testONNXModels("resnet50v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
661 662 663 664
}

TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{
665 666
    applyTestTag(CV_TEST_TAG_VERYLONG);

667
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
668 669
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
670 671
#endif
#if defined(INF_ENGINE_RELEASE)
672 673
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
674 675
#endif
    if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
676 677 678
    {
        if (backend == DNN_BACKEND_OPENCV)
            applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
679
        throw SkipTestException("Test is disabled for OpenCL targets");
680
    }
681 682 683 684 685
    testONNXModels("resnet101_duc_hdc", pb);
}

TEST_P(Test_ONNX_nets, TinyYolov2)
{
686 687
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

688 689 690
    if (cvtest::skipUnstableTests)
        throw SkipTestException("Skip unstable test");
#if defined(INF_ENGINE_RELEASE)
691
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
692 693
            && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
    )
694
        applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
695

696
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
697
    )
698 699 700 701
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
702
#endif
703

704
    // output range: [-11; 8]
705 706
    double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
    double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
707 708 709 710 711 712 713
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
    {
        l1 = 0.018f; lInf = 0.16f;
    }
#endif

714 715 716 717 718
    testONNXModels("tiny_yolo2", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, CNN_MNIST)
{
719 720
    // output range: [-1952; 6574], after Softmax [0; 1]
    testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
721 722 723 724
}

TEST_P(Test_ONNX_nets, MobileNet_v2)
{
725 726
    // output range: [-166; 317], after Softmax [0; 1]
    testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
727 728 729 730
}

TEST_P(Test_ONNX_nets, LResNet100E_IR)
{
731
    applyTestTag(
732 733 734
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
        CV_TEST_TAG_MEMORY_2GB,
#else
735
        (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
736
#endif
737 738
        CV_TEST_TAG_DEBUG_LONG
    );
739
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
740
    {
741 742 743
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
744
    }
745 746 747 748 749 750
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
    {
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    }
751 752 753 754 755 756 757 758

    double l1 = default_l1;
    double lInf = default_lInf;
    // output range: [-3; 3]
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
        l1 = 0.009;
        lInf = 0.035;
    }
759
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_CPU) {
760
        l1 = 4.6e-5;
761 762
        lInf = 1.9e-4;
    }
763 764 765 766 767
    testONNXModels("LResNet100E_IR", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, Emotion_ferplus)
{
768
#if defined(INF_ENGINE_RELEASE)
769 770 771 772 773
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
774 775
#endif

776 777
    double l1 = default_l1;
    double lInf = default_lInf;
778 779

    // Output values are in range [-2.011, 2.111]
780 781
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        l1 = 0.007;
782
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
783 784 785 786
    {
        l1 = 0.021;
        lInf = 0.034;
    }
787
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
788 789 790
        l1 = 2.4e-4;
        lInf = 6e-4;
    }
791 792 793 794 795 796 797
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
    {
        l1 = 0.012f; lInf = 0.035f;
    }
#endif

798
    testONNXModels("emotion_ferplus", pb, l1, lInf);
799 800 801 802
}

TEST_P(Test_ONNX_nets, Inception_v2)
{
803
    testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
804 805 806 807
}

TEST_P(Test_ONNX_nets, DenseNet121)
{
808 809
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

810
    // output range: [-87; 138], after Softmax [0; 1]
811
    testONNXModels("densenet121", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
812 813
}

814 815
TEST_P(Test_ONNX_nets, Inception_v1)
{
816
#if defined(INF_ENGINE_RELEASE)
817 818
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD)
819
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
820
#endif
821 822
    testONNXModels("inception_v1", pb);
}
823

824 825
TEST_P(Test_ONNX_nets, Shufflenet)
{
826
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
827
    {
828 829 830
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
831
    }
832 833 834
    testONNXModels("shufflenet", pb);
}

835 836
TEST_P(Test_ONNX_nets, Resnet34_kinetics)
{
837
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
838
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
839
#endif
840 841 842 843
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
844 845
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
846

A
Alexander Alekhin 已提交
847
    String onnxmodel = findDataFile("dnn/resnet-34_kinetics.onnx", false);
848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864
    Mat image0 = imread(findDataFile("dnn/dog416.png"));
    Mat image1 = imread(findDataFile("dnn/street.png"));

    Mat ref0 = blobFromNPY(_tf("data/output_kinetics0.npy"));
    Mat ref1 = blobFromNPY(_tf("data/output_kinetics1.npy"));

    std::vector<Mat> images_0(16, image0);
    std::vector<Mat> images_1(16, image1);
    Mat blob0 = blobFromImages(images_0, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
    Mat blob1 = blobFromImages(images_1, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);

    Net permute;
    LayerParams lp;
    int order[] = {1, 0, 2, 3};
    lp.set("order", DictValue::arrayInt<int*>(&order[0], 4));
    permute.addLayerToPrev("perm", "Permute", lp);

865 866 867
    permute.setPreferableBackend(backend);
    permute.setPreferableTarget(target);

868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899
    permute.setInput(blob0);
    Mat input0 = permute.forward().clone();

    permute.setInput(blob1);
    Mat input1 = permute.forward().clone();

    int dims[] = {1, 3, 16, 112, 112};
    input0 = input0.reshape(0, 5, &dims[0]);
    input1 = input1.reshape(0, 5, &dims[0]);

    Net net = readNetFromONNX(onnxmodel);
    ASSERT_FALSE(net.empty());
    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    // output range [-5, 11]
    float l1 = 0.0013;
    float lInf = 0.009;

    checkBackend(&input0, &ref0);
    net.setInput(input0);
    Mat out = net.forward().clone();
    normAssert(ref0, out, "", l1, lInf);

    checkBackend(&input1, &ref1);
    net.setInput(input1);
    out = net.forward().clone();
    normAssert(ref1, out, "", l1, lInf);

    expectNoFallbacksFromIE(net);
}

900 901 902
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());

}} // namespace