test_onnx_importer.cpp 34.6 KB
Newer Older
1 2 3 4
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.

5
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 7 8 9 10 11 12 13 14
// Third party copyrights are property of their respective owners.


#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/dnn/shape_utils.hpp>
namespace opencv_test { namespace {

template<typename TString>
15
static std::string _tf(TString filename, bool required = true)
16
{
17
    return findDataFile(std::string("dnn/onnx/") + filename, required);
18 19 20 21 22
}

class Test_ONNX_layers : public DNNTestLayer
{
public:
23 24 25 26
    bool required;

    Test_ONNX_layers() : required(true) { }

27 28 29 30 31 32
    enum Extension
    {
        npy,
        pb
    };

33
    void testONNXModels(const String& basename, const Extension ext = npy,
34
                        const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
D
Dmitry Kurtaev 已提交
35
                        bool checkNoFallbacks = true, int numInps = 1)
36
    {
37
        String onnxmodel = _tf("models/" + basename + ".onnx", required);
D
Dmitry Kurtaev 已提交
38 39
        std::vector<Mat> inps(numInps);
        Mat ref;
40
        if (ext == npy) {
D
Dmitry Kurtaev 已提交
41 42
            for (int i = 0; i < numInps; ++i)
                inps[i] = blobFromNPY(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".npy"));
43 44 45
            ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
        }
        else if (ext == pb) {
D
Dmitry Kurtaev 已提交
46 47
            for (int i = 0; i < numInps; ++i)
                inps[i] = readTensorFromONNX(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".pb"));
48 49 50 51 52
            ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
        }
        else
            CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");

D
Dmitry Kurtaev 已提交
53
        checkBackend(&inps[0], &ref);
54 55 56 57 58 59
        Net net = readNetFromONNX(onnxmodel);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

60
        std::vector<String> inputNames;
D
Dmitry Kurtaev 已提交
61
        for (int i = 0; i < numInps; ++i)
62 63 64 65 66
            inputNames.push_back(format("%d", i));
        net.setInputsNames(inputNames);

        for (int i = 0; i < numInps; ++i)
            net.setInput(inps[i], inputNames[i]);
67 68 69 70 71 72
        Mat out = net.forward("");

        if (useSoftmax)
        {
            LayerParams lp;
            Net netSoftmax;
D
Dmitry Kurtaev 已提交
73
            netSoftmax.addLayerToPrev("softmaxLayer", "Softmax", lp);
74 75 76 77 78 79 80 81
            netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);

            netSoftmax.setInput(out);
            out = netSoftmax.forward();

            netSoftmax.setInput(ref);
            ref = netSoftmax.forward();
        }
82
        normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
83 84
        if (checkNoFallbacks)
            expectNoFallbacksFromIE(net);
85 86 87
    }
};

88 89 90 91 92 93 94 95
TEST_P(Test_ONNX_layers, InstanceNorm)
{
    if (target == DNN_TARGET_MYRIAD)
        testONNXModels("instancenorm", npy, 0, 0, false, false);
    else
        testONNXModels("instancenorm", npy);
}

96 97
TEST_P(Test_ONNX_layers, MaxPooling)
{
98 99 100 101
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
#endif
102
    testONNXModels("maxpooling", npy, 0, 0, false, false);
103 104 105
}
TEST_P(Test_ONNX_layers, MaxPooling_2)
{
106
    testONNXModels("two_maxpooling", npy, 0, 0, false, false);
107 108 109 110 111
}

TEST_P(Test_ONNX_layers, Convolution)
{
    testONNXModels("convolution");
112 113
}

114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169
TEST_P(Test_ONNX_layers, Convolution_variable_weight)
{
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

    String basename = "conv_variable_w";
    Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    for (int i = 0; i < 2; i++)
    {
        Mat input = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_0.npy"));
        Mat weights = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_1.npy"));
        Mat ref = blobFromNPY(_tf("data/output_" + basename + format("_%d", i) + ".npy"));

        net.setInput(input, "0");
        net.setInput(weights, "1");

        Mat out = net.forward();
        normAssert(ref, out, "", default_l1, default_lInf);
    }
}

TEST_P(Test_ONNX_layers, Convolution_variable_weight_bias)
{
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

    String basename = "conv_variable_wb";
    Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    for (int i = 0; i < 2; i++)
    {
        Mat input = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_0.npy"));
        Mat weights = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_1.npy"));
        Mat bias = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_2.npy"));
        Mat ref = blobFromNPY(_tf("data/output_" + basename + format("_%d", i) + ".npy"));

        net.setInput(input, "0");
        net.setInput(weights, "1");
        net.setInput(bias, "bias");

        Mat out = net.forward();
        normAssert(ref, out, "", default_l1, default_lInf);
    }
}

170 171 172 173 174 175 176 177 178 179 180
TEST_P(Test_ONNX_layers, Gather)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("gather");
    // GPU plugin unsupported slice for constant
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("gather_scalar", npy, 0, 0, false, false);
}

181 182
TEST_P(Test_ONNX_layers, Convolution3D)
{
183
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
184
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
185
#endif
186 187
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
188 189 190
    testONNXModels("conv3d");
    testONNXModels("conv3d_bias");
}
191 192 193

TEST_P(Test_ONNX_layers, Two_convolution)
{
194
#if defined(INF_ENGINE_RELEASE)
195
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
196 197
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
    )
198
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
199 200
#endif
    // Reference output values are in range [-0.855, 0.611]
201 202 203
    testONNXModels("two_convolution");
}

204 205
TEST_P(Test_ONNX_layers, Deconvolution)
{
206 207 208 209 210
    testONNXModels("deconvolution", npy, 0, 0, false, false);
    testONNXModels("two_deconvolution", npy, 0, 0, false, false);
    testONNXModels("deconvolution_group", npy, 0, 0, false, false);
    testONNXModels("deconvolution_output_shape", npy, 0, 0, false, false);
    testONNXModels("deconv_adjpad_2d", npy, 0, 0, false, false);
211 212
}

213 214
TEST_P(Test_ONNX_layers, Deconvolution3D)
{
215 216
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
217
#endif
218
    if (backend == DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
219 220 221 222 223 224 225
        throw SkipTestException("Only DLIE backend on CPU is supported");
    testONNXModels("deconv3d");
    testONNXModels("deconv3d_bias");
    testONNXModels("deconv3d_pad");
    testONNXModels("deconv3d_adjpad");
}

226 227 228 229 230 231 232 233
TEST_P(Test_ONNX_layers, Dropout)
{
    testONNXModels("dropout");
}

TEST_P(Test_ONNX_layers, Linear)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
234
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
235 236 237 238 239 240 241 242
    testONNXModels("linear");
}

TEST_P(Test_ONNX_layers, ReLU)
{
    testONNXModels("ReLU");
}

243 244 245 246 247
TEST_P(Test_ONNX_layers, Clip)
{
    testONNXModels("clip", npy);
}

248 249 250 251 252
TEST_P(Test_ONNX_layers, Shape)
{
    testONNXModels("shape_of_constant");
}

253 254 255
TEST_P(Test_ONNX_layers, ReduceMean)
{
    testONNXModels("reduce_mean");
256 257
    testONNXModels("reduce_mean_axis1");
    testONNXModels("reduce_mean_axis2");
258 259
}

260 261 262 263 264
TEST_P(Test_ONNX_layers, ReduceSum)
{
    testONNXModels("reduce_sum");
}

L
Liubov Batanina 已提交
265 266 267 268 269
TEST_P(Test_ONNX_layers, ReduceMaxGlobal)
{
    testONNXModels("reduce_max");
}

270 271
TEST_P(Test_ONNX_layers, ReduceMean3D)
{
272 273 274 275
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
276 277 278 279 280
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("reduce_mean3d");
}

281 282 283 284 285
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
{
    testONNXModels("maxpooling_sigmoid");
}

286 287 288 289 290
TEST_P(Test_ONNX_layers, Cast)
{
    testONNXModels("cast");
}

L
Liubov Batanina 已提交
291 292 293 294 295
TEST_P(Test_ONNX_layers, Power)
{
    testONNXModels("pow2", npy, 0, 0, false, false);
}

296 297
TEST_P(Test_ONNX_layers, Concatenation)
{
298
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
299
    {
300 301 302
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
303
    }
304 305 306
    testONNXModels("concatenation");
}

307 308 309
TEST_P(Test_ONNX_layers, Eltwise3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
310
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
311
#endif
312 313 314 315
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
316 317 318
    testONNXModels("eltwise3d");
}

319 320 321 322 323
TEST_P(Test_ONNX_layers, AveragePooling)
{
    testONNXModels("average_pooling");
}

324 325
TEST_P(Test_ONNX_layers, MaxPooling3D)
{
326
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
327
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
328
#endif
329 330 331 332
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
333 334
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
335
    testONNXModels("max_pool3d", npy, 0, 0, false, false);
336 337 338 339
}

TEST_P(Test_ONNX_layers, AvePooling3D)
{
340
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
341
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
342
#endif
343 344 345 346
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
347 348
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
349 350 351
    testONNXModels("ave_pool3d");
}

352 353 354
TEST_P(Test_ONNX_layers, PoolConv3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
355
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
356
#endif
357 358 359 360
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
361 362 363 364 365
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("pool_conv_3d");
}

366 367 368 369 370
TEST_P(Test_ONNX_layers, BatchNormalization)
{
    testONNXModels("batch_norm");
}

L
Liubov Batanina 已提交
371 372
TEST_P(Test_ONNX_layers, BatchNormalization3D)
{
373
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
374
    {
375 376 377
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
378
    }
L
Liubov Batanina 已提交
379 380 381
    testONNXModels("batch_norm_3d");
}

382 383 384 385 386
TEST_P(Test_ONNX_layers, BatchNormalizationUnfused)
{
    testONNXModels("frozenBatchNorm2d");
}

387 388 389 390 391
TEST_P(Test_ONNX_layers, BatchNormalizationSubgraph)
{
    testONNXModels("batch_norm_subgraph");
}

392 393
TEST_P(Test_ONNX_layers, Transpose)
{
394
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
395
    {
396 397 398
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
399
    }
400 401 402
    testONNXModels("transpose");
}

403 404
TEST_P(Test_ONNX_layers, Multiplication)
{
405 406
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
407 408
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
409 410 411
    testONNXModels("mul");
}

412 413 414 415 416 417 418 419 420 421
TEST_P(Test_ONNX_layers, MatMul)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

    testONNXModels("matmul_2d");
    testONNXModels("matmul_3d");
    testONNXModels("matmul_4d");
}

L
Liubov Batanina 已提交
422 423 424 425 426 427 428 429 430
TEST_P(Test_ONNX_layers, MatMulAdd)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
    testONNXModels("matmul_add");
}

431 432 433 434
TEST_P(Test_ONNX_layers, Expand)
{
    testONNXModels("expand_batch");
    testONNXModels("expand_channels");
435
    testONNXModels("expand_neg_batch");
436 437 438 439
}

TEST_P(Test_ONNX_layers, ExpandHW)
{
440 441
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
442 443 444
    testONNXModels("expand_hw");
}

445 446
TEST_P(Test_ONNX_layers, Constant)
{
447
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
448
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
449
            && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
450
       applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
451
#endif
452 453 454
    testONNXModels("constant");
}

D
Dmitry Kurtaev 已提交
455 456
TEST_P(Test_ONNX_layers, Padding)
{
457 458 459
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("padding", npy, 0, 0, false, false);
#else
D
Dmitry Kurtaev 已提交
460
    testONNXModels("padding");
461
#endif
D
Dmitry Kurtaev 已提交
462 463
}

464 465 466
TEST_P(Test_ONNX_layers, Resize)
{
    testONNXModels("resize_nearest");
467 468 469
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("resize_bilinear");
470 471
}

472 473
TEST_P(Test_ONNX_layers, ResizeUnfused)
{
474 475
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
476
    testONNXModels("upsample_unfused_torch1.2");
477 478 479
    testONNXModels("upsample_unfused_opset9_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.3");
480
    testONNXModels("resize_bilinear_unfused_opset11_torch1.4");
481 482
}

483 484 485 486 487 488 489 490 491 492
TEST_P(Test_ONNX_layers, ResizeUnfusedTwoInputs)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("upsample_unfused_two_inputs_opset9_torch1.4", npy, 0, 0, false, true, 2);
    testONNXModels("upsample_unfused_two_inputs_opset11_torch1.4", npy, 0, 0, false, true, 2);
}

493 494
TEST_P(Test_ONNX_layers, MultyInputs)
{
D
Dmitry Kurtaev 已提交
495 496
    testONNXModels("multy_inputs", npy, 0, 0, false, true, 2);
}
497

D
Dmitry Kurtaev 已提交
498 499 500 501 502
TEST_P(Test_ONNX_layers, Broadcast)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("channel_broadcast", npy, 0, 0, false, true, 2);
503 504
}

505 506 507 508 509
TEST_P(Test_ONNX_layers, DynamicResize)
{
    testONNXModels("dynamic_resize", npy, 0, 0, false, true, 2);
}

510 511 512 513 514 515 516 517 518
TEST_P(Test_ONNX_layers, Div)
{
    const String model =  _tf("models/div.onnx");
    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

519 520 521 522
    // Reference output values range is -68.80928, 2.991873. So to avoid computational
    // difference for FP16 we'll perform reversed division (just swap inputs).
    Mat inp1 = blobFromNPY(_tf("data/input_div_1.npy"));
    Mat inp2 = blobFromNPY(_tf("data/input_div_0.npy"));
523
    Mat ref  = blobFromNPY(_tf("data/output_div.npy"));
524
    cv::divide(1.0, ref, ref);
525 526 527 528 529 530 531 532 533 534
    checkBackend(&inp1, &ref);

    net.setInput(inp1, "0");
    net.setInput(inp2, "1");
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
    expectNoFallbacksFromIE(net);
}

535 536
TEST_P(Test_ONNX_layers, DynamicReshape)
{
537
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
538 539
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

540
    testONNXModels("dynamic_reshape");
541 542
    testONNXModels("dynamic_reshape_opset_11");
    testONNXModels("flatten_by_prod");
543
    testONNXModels("flatten_const");
544
}
545

546 547 548 549 550
TEST_P(Test_ONNX_layers, Reshape)
{
    testONNXModels("unsqueeze");
}

551 552
TEST_P(Test_ONNX_layers, Squeeze)
{
553 554
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
555 556 557 558 559 560
    testONNXModels("squeeze");
}

TEST_P(Test_ONNX_layers, ReduceL2)
{
    testONNXModels("reduceL2");
561
    testONNXModels("reduceL2_subgraph");
562
    testONNXModels("reduceL2_subgraph_2");
563 564
}

565 566 567 568 569 570 571 572 573 574 575 576
TEST_P(Test_ONNX_layers, Split)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("split_1");
    testONNXModels("split_2");
    testONNXModels("split_3");
    testONNXModels("split_4");
}

577 578
TEST_P(Test_ONNX_layers, Slice)
{
579 580 581
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("slice", npy, 0, 0, false, false);
#else
582
    testONNXModels("slice");
583
    testONNXModels("slice_opset_11");
584
#endif
585 586
}

D
dianlujitao 已提交
587 588 589
TEST_P(Test_ONNX_layers, Softmax)
{
    testONNXModels("softmax");
D
Dmitry Kurtaev 已提交
590
    testONNXModels("log_softmax", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
591
    testONNXModels("softmax_unfused");
D
dianlujitao 已提交
592 593
}

594 595
TEST_P(Test_ONNX_layers, Split_EltwiseMax)
{
596 597 598 599
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
600 601 602
    testONNXModels("split_max");
}

D
Dmitry Kurtaev 已提交
603 604
TEST_P(Test_ONNX_layers, LSTM)
{
605
    testONNXModels("lstm", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
606 607
}

D
Dmitry Kurtaev 已提交
608 609
TEST_P(Test_ONNX_layers, LSTM_bidirectional)
{
610
    testONNXModels("lstm_bidirectional", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
611 612
}

613 614 615 616 617 618
TEST_P(Test_ONNX_layers, Pad2d_Unfused)
{
    testONNXModels("ReflectionPad2d");
    testONNXModels("ZeroPad2d");
}

619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638
TEST_P(Test_ONNX_layers, LinearWithConstant)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2020040000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
#endif
    testONNXModels("lin_with_constant");
}

TEST_P(Test_ONNX_layers, MatmulWithTwoInputs)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2020040000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
#endif
    testONNXModels("matmul_with_two_inputs");
}

639 640 641 642 643
TEST_P(Test_ONNX_layers, ResizeOpset11_Torch1_6)
{
    testONNXModels("resize_opset11_torch1.6");
}

644 645
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());

646 647 648 649 650 651
class Test_ONNX_nets : public Test_ONNX_layers
{
public:
    Test_ONNX_nets() { required = false; }
};

652 653
TEST_P(Test_ONNX_nets, Alexnet)
{
654
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
655 656
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
657
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
658 659
#endif

660
    const String model =  _tf("models/alexnet.onnx", false);
661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    Mat inp = imread(_tf("../grace_hopper_227.png"));
    Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(out, ref, "", default_l1,  default_lInf);
677
    expectNoFallbacksFromIE(net);
678 679 680 681 682 683 684 685 686
}

TEST_P(Test_ONNX_nets, Squeezenet)
{
    testONNXModels("squeezenet", pb);
}

TEST_P(Test_ONNX_nets, Googlenet)
{
687 688
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
689

690 691 692
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

693
    const String model = _tf("models/googlenet.onnx", false);
694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    std::vector<Mat> images;
    images.push_back( imread(_tf("../googlenet_0.png")) );
    images.push_back( imread(_tf("../googlenet_1.png")) );
    Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
    Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(inp);
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
713
    expectNoFallbacksFromIE(net);
714 715 716 717
}

TEST_P(Test_ONNX_nets, CaffeNet)
{
718
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
719 720
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
721
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
722 723
#endif

A
Alexander Alekhin 已提交
724
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
725
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
726
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
727
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
728
#endif
729 730 731 732 733
    testONNXModels("caffenet", pb);
}

TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
{
734
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
735 736
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
737
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
738 739
#endif

A
Alexander Alekhin 已提交
740
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
741
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
742
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
743
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
744
#endif
745
    // Reference output values are in range [-4.992, -1.161]
746
    testONNXModels("rcnn_ilsvrc13", pb, 0.0046);
747 748 749 750
}

TEST_P(Test_ONNX_nets, VGG16_bn)
{
751 752
    applyTestTag(CV_TEST_TAG_MEMORY_6GB);  // > 2.3Gb

753 754 755
    // output range: [-16; 27], after Softmax [0; 0.67]
    const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
    testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
756 757 758 759
}

TEST_P(Test_ONNX_nets, ZFNet)
{
760
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
761 762 763 764 765
    testONNXModels("zfnet512", pb);
}

TEST_P(Test_ONNX_nets, ResNet18v1)
{
766 767
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

768
    // output range: [-16; 22], after Softmax [0, 0.51]
769
    testONNXModels("resnet18v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
770 771 772 773
}

TEST_P(Test_ONNX_nets, ResNet50v1)
{
774 775
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

776
    // output range: [-67; 75], after Softmax [0, 0.98]
777
    testONNXModels("resnet50v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
778 779 780 781
}

TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{
782 783
    applyTestTag(CV_TEST_TAG_VERYLONG);

784
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
785 786
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
787 788
#endif
#if defined(INF_ENGINE_RELEASE)
789 790
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
791 792
#endif
    if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
793 794 795
    {
        if (backend == DNN_BACKEND_OPENCV)
            applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
796
        throw SkipTestException("Test is disabled for OpenCL targets");
797
    }
798 799 800 801 802
    testONNXModels("resnet101_duc_hdc", pb);
}

TEST_P(Test_ONNX_nets, TinyYolov2)
{
803 804
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

805 806 807
    if (cvtest::skipUnstableTests)
        throw SkipTestException("Skip unstable test");
#if defined(INF_ENGINE_RELEASE)
808
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
809 810
            && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
    )
811
        applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
812

813
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
814
    )
815 816 817 818
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
819
#endif
820

821
    // output range: [-11; 8]
822 823
    double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
    double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
824 825 826 827 828 829 830
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
    {
        l1 = 0.018f; lInf = 0.16f;
    }
#endif

831 832 833 834 835
    testONNXModels("tiny_yolo2", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, CNN_MNIST)
{
836 837
    // output range: [-1952; 6574], after Softmax [0; 1]
    testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
838 839 840 841
}

TEST_P(Test_ONNX_nets, MobileNet_v2)
{
842 843
    // output range: [-166; 317], after Softmax [0; 1]
    testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
844 845 846 847
}

TEST_P(Test_ONNX_nets, LResNet100E_IR)
{
848
    applyTestTag(
849 850 851
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
        CV_TEST_TAG_MEMORY_2GB,
#else
852
        (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
853
#endif
854 855
        CV_TEST_TAG_DEBUG_LONG
    );
856
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
857
    {
858 859 860
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
861
    }
862 863 864 865 866 867
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
    {
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    }
868 869 870 871 872 873 874 875

    double l1 = default_l1;
    double lInf = default_lInf;
    // output range: [-3; 3]
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
        l1 = 0.009;
        lInf = 0.035;
    }
876
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_CPU) {
877
        l1 = 4.6e-5;
878 879
        lInf = 1.9e-4;
    }
880 881 882 883 884
    testONNXModels("LResNet100E_IR", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, Emotion_ferplus)
{
885
#if defined(INF_ENGINE_RELEASE)
886 887 888 889 890
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
891 892
#endif

893 894
    double l1 = default_l1;
    double lInf = default_lInf;
895 896

    // Output values are in range [-2.011, 2.111]
897 898
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        l1 = 0.007;
899
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
900 901 902 903
    {
        l1 = 0.021;
        lInf = 0.034;
    }
904
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
905 906 907
        l1 = 2.4e-4;
        lInf = 6e-4;
    }
908 909 910 911 912 913 914
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
    {
        l1 = 0.012f; lInf = 0.035f;
    }
#endif

915
    testONNXModels("emotion_ferplus", pb, l1, lInf);
916 917 918 919
}

TEST_P(Test_ONNX_nets, Inception_v2)
{
920
    testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
921 922 923 924
}

TEST_P(Test_ONNX_nets, DenseNet121)
{
925 926
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

927
    // output range: [-87; 138], after Softmax [0; 1]
928
    testONNXModels("densenet121", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
929 930
}

931 932
TEST_P(Test_ONNX_nets, Inception_v1)
{
933
#if defined(INF_ENGINE_RELEASE)
934 935
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD)
936
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
937
#endif
938 939
    testONNXModels("inception_v1", pb);
}
940

941 942
TEST_P(Test_ONNX_nets, Shufflenet)
{
943
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
944
    {
945 946 947
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
948
    }
949 950 951
    testONNXModels("shufflenet", pb);
}

952 953
TEST_P(Test_ONNX_nets, Resnet34_kinetics)
{
954
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
955
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
956
#endif
957 958 959 960
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
961 962
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
963

A
Alexander Alekhin 已提交
964
    String onnxmodel = findDataFile("dnn/resnet-34_kinetics.onnx", false);
965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981
    Mat image0 = imread(findDataFile("dnn/dog416.png"));
    Mat image1 = imread(findDataFile("dnn/street.png"));

    Mat ref0 = blobFromNPY(_tf("data/output_kinetics0.npy"));
    Mat ref1 = blobFromNPY(_tf("data/output_kinetics1.npy"));

    std::vector<Mat> images_0(16, image0);
    std::vector<Mat> images_1(16, image1);
    Mat blob0 = blobFromImages(images_0, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
    Mat blob1 = blobFromImages(images_1, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);

    Net permute;
    LayerParams lp;
    int order[] = {1, 0, 2, 3};
    lp.set("order", DictValue::arrayInt<int*>(&order[0], 4));
    permute.addLayerToPrev("perm", "Permute", lp);

982 983 984
    permute.setPreferableBackend(backend);
    permute.setPreferableTarget(target);

985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016
    permute.setInput(blob0);
    Mat input0 = permute.forward().clone();

    permute.setInput(blob1);
    Mat input1 = permute.forward().clone();

    int dims[] = {1, 3, 16, 112, 112};
    input0 = input0.reshape(0, 5, &dims[0]);
    input1 = input1.reshape(0, 5, &dims[0]);

    Net net = readNetFromONNX(onnxmodel);
    ASSERT_FALSE(net.empty());
    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    // output range [-5, 11]
    float l1 = 0.0013;
    float lInf = 0.009;

    checkBackend(&input0, &ref0);
    net.setInput(input0);
    Mat out = net.forward().clone();
    normAssert(ref0, out, "", l1, lInf);

    checkBackend(&input1, &ref1);
    net.setInput(input1);
    out = net.forward().clone();
    normAssert(ref1, out, "", l1, lInf);

    expectNoFallbacksFromIE(net);
}

1017 1018 1019
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());

}} // namespace