test_onnx_importer.cpp 33.3 KB
Newer Older
1 2 3 4
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.

5
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 7 8 9 10 11 12 13 14
// Third party copyrights are property of their respective owners.


#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/dnn/shape_utils.hpp>
namespace opencv_test { namespace {

template<typename TString>
15
static std::string _tf(TString filename, bool required = true)
16
{
17
    return findDataFile(std::string("dnn/onnx/") + filename, required);
18 19 20 21 22
}

class Test_ONNX_layers : public DNNTestLayer
{
public:
23 24 25 26
    bool required;

    Test_ONNX_layers() : required(true) { }

27 28 29 30 31 32
    enum Extension
    {
        npy,
        pb
    };

33
    void testONNXModels(const String& basename, const Extension ext = npy,
34
                        const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
D
Dmitry Kurtaev 已提交
35
                        bool checkNoFallbacks = true, int numInps = 1)
36
    {
37
        String onnxmodel = _tf("models/" + basename + ".onnx", required);
D
Dmitry Kurtaev 已提交
38 39
        std::vector<Mat> inps(numInps);
        Mat ref;
40
        if (ext == npy) {
D
Dmitry Kurtaev 已提交
41 42
            for (int i = 0; i < numInps; ++i)
                inps[i] = blobFromNPY(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".npy"));
43 44 45
            ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
        }
        else if (ext == pb) {
D
Dmitry Kurtaev 已提交
46 47
            for (int i = 0; i < numInps; ++i)
                inps[i] = readTensorFromONNX(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".pb"));
48 49 50 51 52
            ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
        }
        else
            CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");

D
Dmitry Kurtaev 已提交
53
        checkBackend(&inps[0], &ref);
54 55 56 57 58 59
        Net net = readNetFromONNX(onnxmodel);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

60
        std::vector<String> inputNames;
D
Dmitry Kurtaev 已提交
61
        for (int i = 0; i < numInps; ++i)
62 63 64 65 66
            inputNames.push_back(format("%d", i));
        net.setInputsNames(inputNames);

        for (int i = 0; i < numInps; ++i)
            net.setInput(inps[i], inputNames[i]);
67 68 69 70 71 72
        Mat out = net.forward("");

        if (useSoftmax)
        {
            LayerParams lp;
            Net netSoftmax;
D
Dmitry Kurtaev 已提交
73
            netSoftmax.addLayerToPrev("softmaxLayer", "Softmax", lp);
74 75 76 77 78 79 80 81
            netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);

            netSoftmax.setInput(out);
            out = netSoftmax.forward();

            netSoftmax.setInput(ref);
            ref = netSoftmax.forward();
        }
82
        normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
83 84
        if (checkNoFallbacks)
            expectNoFallbacksFromIE(net);
85 86 87
    }
};

88 89 90 91 92 93 94 95
TEST_P(Test_ONNX_layers, InstanceNorm)
{
    if (target == DNN_TARGET_MYRIAD)
        testONNXModels("instancenorm", npy, 0, 0, false, false);
    else
        testONNXModels("instancenorm", npy);
}

96 97
TEST_P(Test_ONNX_layers, MaxPooling)
{
98 99 100 101
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
#endif
102
    testONNXModels("maxpooling", npy, 0, 0, false, false);
103 104 105
}
TEST_P(Test_ONNX_layers, MaxPooling_2)
{
106
    testONNXModels("two_maxpooling", npy, 0, 0, false, false);
107 108 109 110 111
}

TEST_P(Test_ONNX_layers, Convolution)
{
    testONNXModels("convolution");
112 113
}

114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169
TEST_P(Test_ONNX_layers, Convolution_variable_weight)
{
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

    String basename = "conv_variable_w";
    Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    for (int i = 0; i < 2; i++)
    {
        Mat input = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_0.npy"));
        Mat weights = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_1.npy"));
        Mat ref = blobFromNPY(_tf("data/output_" + basename + format("_%d", i) + ".npy"));

        net.setInput(input, "0");
        net.setInput(weights, "1");

        Mat out = net.forward();
        normAssert(ref, out, "", default_l1, default_lInf);
    }
}

TEST_P(Test_ONNX_layers, Convolution_variable_weight_bias)
{
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

    String basename = "conv_variable_wb";
    Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    for (int i = 0; i < 2; i++)
    {
        Mat input = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_0.npy"));
        Mat weights = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_1.npy"));
        Mat bias = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_2.npy"));
        Mat ref = blobFromNPY(_tf("data/output_" + basename + format("_%d", i) + ".npy"));

        net.setInput(input, "0");
        net.setInput(weights, "1");
        net.setInput(bias, "bias");

        Mat out = net.forward();
        normAssert(ref, out, "", default_l1, default_lInf);
    }
}

170 171 172 173 174 175 176 177 178 179 180
TEST_P(Test_ONNX_layers, Gather)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("gather");
    // GPU plugin unsupported slice for constant
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("gather_scalar", npy, 0, 0, false, false);
}

181 182
TEST_P(Test_ONNX_layers, Convolution3D)
{
183
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
184
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
185
#endif
186 187
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
188 189 190
    testONNXModels("conv3d");
    testONNXModels("conv3d_bias");
}
191 192 193

TEST_P(Test_ONNX_layers, Two_convolution)
{
194
#if defined(INF_ENGINE_RELEASE)
195
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
196 197
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
    )
198
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
199 200
#endif
    // Reference output values are in range [-0.855, 0.611]
201 202 203
    testONNXModels("two_convolution");
}

204 205
TEST_P(Test_ONNX_layers, Deconvolution)
{
206 207 208 209 210
    testONNXModels("deconvolution", npy, 0, 0, false, false);
    testONNXModels("two_deconvolution", npy, 0, 0, false, false);
    testONNXModels("deconvolution_group", npy, 0, 0, false, false);
    testONNXModels("deconvolution_output_shape", npy, 0, 0, false, false);
    testONNXModels("deconv_adjpad_2d", npy, 0, 0, false, false);
211 212
}

213 214
TEST_P(Test_ONNX_layers, Deconvolution3D)
{
215 216
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
217
#endif
218
    if (backend == DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
219 220 221 222 223 224 225
        throw SkipTestException("Only DLIE backend on CPU is supported");
    testONNXModels("deconv3d");
    testONNXModels("deconv3d_bias");
    testONNXModels("deconv3d_pad");
    testONNXModels("deconv3d_adjpad");
}

226 227 228 229 230 231 232 233
TEST_P(Test_ONNX_layers, Dropout)
{
    testONNXModels("dropout");
}

TEST_P(Test_ONNX_layers, Linear)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
234
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
235 236 237 238 239 240 241 242
    testONNXModels("linear");
}

TEST_P(Test_ONNX_layers, ReLU)
{
    testONNXModels("ReLU");
}

243 244 245 246 247
TEST_P(Test_ONNX_layers, Clip)
{
    testONNXModels("clip", npy);
}

248 249 250 251 252
TEST_P(Test_ONNX_layers, Shape)
{
    testONNXModels("shape_of_constant");
}

253 254 255
TEST_P(Test_ONNX_layers, ReduceMean)
{
    testONNXModels("reduce_mean");
256 257
    testONNXModels("reduce_mean_axis1");
    testONNXModels("reduce_mean_axis2");
258 259 260 261
}

TEST_P(Test_ONNX_layers, ReduceMean3D)
{
262 263 264 265
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
266 267 268 269 270
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("reduce_mean3d");
}

271 272 273 274 275
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
{
    testONNXModels("maxpooling_sigmoid");
}

276 277 278 279 280
TEST_P(Test_ONNX_layers, Cast)
{
    testONNXModels("cast");
}

281 282
TEST_P(Test_ONNX_layers, Concatenation)
{
283
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
284
    {
285 286 287
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
288
    }
289 290 291
    testONNXModels("concatenation");
}

292 293 294
TEST_P(Test_ONNX_layers, Eltwise3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
295
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
296
#endif
297 298 299 300
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
301 302 303
    testONNXModels("eltwise3d");
}

304 305 306 307 308
TEST_P(Test_ONNX_layers, AveragePooling)
{
    testONNXModels("average_pooling");
}

309 310
TEST_P(Test_ONNX_layers, MaxPooling3D)
{
311
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
312
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
313
#endif
314 315 316 317
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
318 319
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
320
    testONNXModels("max_pool3d", npy, 0, 0, false, false);
321 322 323 324
}

TEST_P(Test_ONNX_layers, AvePooling3D)
{
325
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
326
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
327
#endif
328 329 330 331
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
332 333
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
334 335 336
    testONNXModels("ave_pool3d");
}

337 338 339
TEST_P(Test_ONNX_layers, PoolConv3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
340
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
341
#endif
342 343 344 345
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
346 347 348 349 350
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("pool_conv_3d");
}

351 352 353 354 355
TEST_P(Test_ONNX_layers, BatchNormalization)
{
    testONNXModels("batch_norm");
}

L
Liubov Batanina 已提交
356 357
TEST_P(Test_ONNX_layers, BatchNormalization3D)
{
358
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
359
    {
360 361 362
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
363
    }
L
Liubov Batanina 已提交
364 365 366
    testONNXModels("batch_norm_3d");
}

367 368 369 370 371
TEST_P(Test_ONNX_layers, BatchNormalizationUnfused)
{
    testONNXModels("frozenBatchNorm2d");
}

372 373 374 375 376
TEST_P(Test_ONNX_layers, BatchNormalizationSubgraph)
{
    testONNXModels("batch_norm_subgraph");
}

377 378
TEST_P(Test_ONNX_layers, Transpose)
{
379
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
380
    {
381 382 383
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
384
    }
385 386 387
    testONNXModels("transpose");
}

388 389
TEST_P(Test_ONNX_layers, Multiplication)
{
390 391
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
392 393
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
394 395 396
    testONNXModels("mul");
}

397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414
TEST_P(Test_ONNX_layers, MatMul)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

    testONNXModels("matmul_2d");
    testONNXModels("matmul_3d");
    testONNXModels("matmul_4d");
}

TEST_P(Test_ONNX_layers, Expand)
{
    testONNXModels("expand_batch");
    testONNXModels("expand_channels");
}

TEST_P(Test_ONNX_layers, ExpandHW)
{
415 416
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
417 418 419
    testONNXModels("expand_hw");
}

420 421
TEST_P(Test_ONNX_layers, Constant)
{
422
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
423
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
424
            && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
425
       applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
426
#endif
427 428 429
    testONNXModels("constant");
}

D
Dmitry Kurtaev 已提交
430 431
TEST_P(Test_ONNX_layers, Padding)
{
432 433 434
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("padding", npy, 0, 0, false, false);
#else
D
Dmitry Kurtaev 已提交
435
    testONNXModels("padding");
436
#endif
D
Dmitry Kurtaev 已提交
437 438
}

439 440 441
TEST_P(Test_ONNX_layers, Resize)
{
    testONNXModels("resize_nearest");
442 443 444
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("resize_bilinear");
445 446
}

447 448
TEST_P(Test_ONNX_layers, ResizeUnfused)
{
449 450
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
451
    testONNXModels("upsample_unfused_torch1.2");
452 453 454
    testONNXModels("upsample_unfused_opset9_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.3");
455
    testONNXModels("resize_bilinear_unfused_opset11_torch1.4");
456 457
}

458 459 460 461 462 463 464 465 466 467
TEST_P(Test_ONNX_layers, ResizeUnfusedTwoInputs)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("upsample_unfused_two_inputs_opset9_torch1.4", npy, 0, 0, false, true, 2);
    testONNXModels("upsample_unfused_two_inputs_opset11_torch1.4", npy, 0, 0, false, true, 2);
}

468 469
TEST_P(Test_ONNX_layers, MultyInputs)
{
D
Dmitry Kurtaev 已提交
470 471
    testONNXModels("multy_inputs", npy, 0, 0, false, true, 2);
}
472

D
Dmitry Kurtaev 已提交
473 474 475 476 477
TEST_P(Test_ONNX_layers, Broadcast)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("channel_broadcast", npy, 0, 0, false, true, 2);
478 479
}

480 481 482 483 484
TEST_P(Test_ONNX_layers, DynamicResize)
{
    testONNXModels("dynamic_resize", npy, 0, 0, false, true, 2);
}

485 486 487 488 489 490 491 492 493
TEST_P(Test_ONNX_layers, Div)
{
    const String model =  _tf("models/div.onnx");
    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

494 495 496 497
    // Reference output values range is -68.80928, 2.991873. So to avoid computational
    // difference for FP16 we'll perform reversed division (just swap inputs).
    Mat inp1 = blobFromNPY(_tf("data/input_div_1.npy"));
    Mat inp2 = blobFromNPY(_tf("data/input_div_0.npy"));
498
    Mat ref  = blobFromNPY(_tf("data/output_div.npy"));
499
    cv::divide(1.0, ref, ref);
500 501 502 503 504 505 506 507 508 509
    checkBackend(&inp1, &ref);

    net.setInput(inp1, "0");
    net.setInput(inp2, "1");
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
    expectNoFallbacksFromIE(net);
}

510 511
TEST_P(Test_ONNX_layers, DynamicReshape)
{
512
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
513 514
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

515
    testONNXModels("dynamic_reshape");
516 517
    testONNXModels("dynamic_reshape_opset_11");
    testONNXModels("flatten_by_prod");
518
    testONNXModels("flatten_const");
519
}
520

521 522 523 524 525
TEST_P(Test_ONNX_layers, Reshape)
{
    testONNXModels("unsqueeze");
}

526 527
TEST_P(Test_ONNX_layers, Squeeze)
{
528 529
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
530 531 532 533 534 535
    testONNXModels("squeeze");
}

TEST_P(Test_ONNX_layers, ReduceL2)
{
    testONNXModels("reduceL2");
536
    testONNXModels("reduceL2_subgraph");
537
    testONNXModels("reduceL2_subgraph_2");
538 539
}

540 541 542 543 544 545 546 547 548 549 550 551
TEST_P(Test_ONNX_layers, Split)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("split_1");
    testONNXModels("split_2");
    testONNXModels("split_3");
    testONNXModels("split_4");
}

552 553
TEST_P(Test_ONNX_layers, Slice)
{
554 555 556
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("slice", npy, 0, 0, false, false);
#else
557
    testONNXModels("slice");
558
    testONNXModels("slice_opset_11");
559
#endif
560 561
}

D
dianlujitao 已提交
562 563 564
TEST_P(Test_ONNX_layers, Softmax)
{
    testONNXModels("softmax");
D
Dmitry Kurtaev 已提交
565
    testONNXModels("log_softmax", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
566
    testONNXModels("softmax_unfused");
D
dianlujitao 已提交
567 568
}

569 570
TEST_P(Test_ONNX_layers, Split_EltwiseMax)
{
571 572 573 574
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
575 576 577
    testONNXModels("split_max");
}

D
Dmitry Kurtaev 已提交
578 579
TEST_P(Test_ONNX_layers, LSTM)
{
580
    testONNXModels("lstm", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
581 582
}

D
Dmitry Kurtaev 已提交
583 584
TEST_P(Test_ONNX_layers, LSTM_bidirectional)
{
585
    testONNXModels("lstm_bidirectional", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
586 587
}

588 589 590 591 592 593
TEST_P(Test_ONNX_layers, Pad2d_Unfused)
{
    testONNXModels("ReflectionPad2d");
    testONNXModels("ZeroPad2d");
}

594 595
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());

596 597 598 599 600 601
class Test_ONNX_nets : public Test_ONNX_layers
{
public:
    Test_ONNX_nets() { required = false; }
};

602 603
TEST_P(Test_ONNX_nets, Alexnet)
{
604
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
605 606
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
607
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
608 609
#endif

610
    const String model =  _tf("models/alexnet.onnx", false);
611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    Mat inp = imread(_tf("../grace_hopper_227.png"));
    Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(out, ref, "", default_l1,  default_lInf);
627
    expectNoFallbacksFromIE(net);
628 629 630 631 632 633 634 635 636
}

TEST_P(Test_ONNX_nets, Squeezenet)
{
    testONNXModels("squeezenet", pb);
}

TEST_P(Test_ONNX_nets, Googlenet)
{
637 638
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
639

640 641 642
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

643
    const String model = _tf("models/googlenet.onnx", false);
644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    std::vector<Mat> images;
    images.push_back( imread(_tf("../googlenet_0.png")) );
    images.push_back( imread(_tf("../googlenet_1.png")) );
    Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
    Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(inp);
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
663
    expectNoFallbacksFromIE(net);
664 665 666 667
}

TEST_P(Test_ONNX_nets, CaffeNet)
{
668
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
669 670
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
671
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
672 673
#endif

A
Alexander Alekhin 已提交
674
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
675
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
676
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
677
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
678
#endif
679 680 681 682 683
    testONNXModels("caffenet", pb);
}

TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
{
684
#if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
685 686
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
687
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
688 689
#endif

A
Alexander Alekhin 已提交
690
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
691
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
692
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
693
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
694
#endif
695
    // Reference output values are in range [-4.992, -1.161]
696
    testONNXModels("rcnn_ilsvrc13", pb, 0.0046);
697 698 699 700
}

TEST_P(Test_ONNX_nets, VGG16_bn)
{
701 702
    applyTestTag(CV_TEST_TAG_MEMORY_6GB);  // > 2.3Gb

703 704 705
    // output range: [-16; 27], after Softmax [0; 0.67]
    const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
    testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
706 707 708 709
}

TEST_P(Test_ONNX_nets, ZFNet)
{
710
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
711 712 713 714 715
    testONNXModels("zfnet512", pb);
}

TEST_P(Test_ONNX_nets, ResNet18v1)
{
716 717
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

718
    // output range: [-16; 22], after Softmax [0, 0.51]
719
    testONNXModels("resnet18v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
720 721 722 723
}

TEST_P(Test_ONNX_nets, ResNet50v1)
{
724 725
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

726
    // output range: [-67; 75], after Softmax [0, 0.98]
727
    testONNXModels("resnet50v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
728 729 730 731
}

TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{
732 733
    applyTestTag(CV_TEST_TAG_VERYLONG);

734
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
735 736
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
737 738
#endif
#if defined(INF_ENGINE_RELEASE)
739 740
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
741 742
#endif
    if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
743 744 745
    {
        if (backend == DNN_BACKEND_OPENCV)
            applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
746
        throw SkipTestException("Test is disabled for OpenCL targets");
747
    }
748 749 750 751 752
    testONNXModels("resnet101_duc_hdc", pb);
}

TEST_P(Test_ONNX_nets, TinyYolov2)
{
753 754
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

755 756 757
    if (cvtest::skipUnstableTests)
        throw SkipTestException("Skip unstable test");
#if defined(INF_ENGINE_RELEASE)
758
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
759 760
            && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
    )
761
        applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
762

763
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
764
    )
765 766 767 768
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
769
#endif
770

771
    // output range: [-11; 8]
772 773
    double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
    double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
774 775 776 777 778 779 780
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
    {
        l1 = 0.018f; lInf = 0.16f;
    }
#endif

781 782 783 784 785
    testONNXModels("tiny_yolo2", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, CNN_MNIST)
{
786 787
    // output range: [-1952; 6574], after Softmax [0; 1]
    testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
788 789 790 791
}

TEST_P(Test_ONNX_nets, MobileNet_v2)
{
792 793
    // output range: [-166; 317], after Softmax [0; 1]
    testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
794 795 796 797
}

TEST_P(Test_ONNX_nets, LResNet100E_IR)
{
798
    applyTestTag(
799 800 801
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
        CV_TEST_TAG_MEMORY_2GB,
#else
802
        (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
803
#endif
804 805
        CV_TEST_TAG_DEBUG_LONG
    );
806
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
807
    {
808 809 810
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
811
    }
812 813 814 815 816 817
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
    {
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    }
818 819 820 821 822 823 824 825

    double l1 = default_l1;
    double lInf = default_lInf;
    // output range: [-3; 3]
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
        l1 = 0.009;
        lInf = 0.035;
    }
826
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_CPU) {
827
        l1 = 4.6e-5;
828 829
        lInf = 1.9e-4;
    }
830 831 832 833 834
    testONNXModels("LResNet100E_IR", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, Emotion_ferplus)
{
835
#if defined(INF_ENGINE_RELEASE)
836 837 838 839 840
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
841 842
#endif

843 844
    double l1 = default_l1;
    double lInf = default_lInf;
845 846

    // Output values are in range [-2.011, 2.111]
847 848
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        l1 = 0.007;
849
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
850 851 852 853
    {
        l1 = 0.021;
        lInf = 0.034;
    }
854
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
855 856 857
        l1 = 2.4e-4;
        lInf = 6e-4;
    }
858 859 860 861 862 863 864
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
    {
        l1 = 0.012f; lInf = 0.035f;
    }
#endif

865
    testONNXModels("emotion_ferplus", pb, l1, lInf);
866 867 868 869
}

TEST_P(Test_ONNX_nets, Inception_v2)
{
870
    testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
871 872 873 874
}

TEST_P(Test_ONNX_nets, DenseNet121)
{
875 876
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

877
    // output range: [-87; 138], after Softmax [0; 1]
878
    testONNXModels("densenet121", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
879 880
}

881 882
TEST_P(Test_ONNX_nets, Inception_v1)
{
883
#if defined(INF_ENGINE_RELEASE)
884 885
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD)
886
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
887
#endif
888 889
    testONNXModels("inception_v1", pb);
}
890

891 892
TEST_P(Test_ONNX_nets, Shufflenet)
{
893
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
894
    {
895 896 897
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
898
    }
899 900 901
    testONNXModels("shufflenet", pb);
}

902 903
TEST_P(Test_ONNX_nets, Resnet34_kinetics)
{
904
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
905
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
906
#endif
907 908 909 910
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
911 912
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
913

A
Alexander Alekhin 已提交
914
    String onnxmodel = findDataFile("dnn/resnet-34_kinetics.onnx", false);
915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931
    Mat image0 = imread(findDataFile("dnn/dog416.png"));
    Mat image1 = imread(findDataFile("dnn/street.png"));

    Mat ref0 = blobFromNPY(_tf("data/output_kinetics0.npy"));
    Mat ref1 = blobFromNPY(_tf("data/output_kinetics1.npy"));

    std::vector<Mat> images_0(16, image0);
    std::vector<Mat> images_1(16, image1);
    Mat blob0 = blobFromImages(images_0, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
    Mat blob1 = blobFromImages(images_1, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);

    Net permute;
    LayerParams lp;
    int order[] = {1, 0, 2, 3};
    lp.set("order", DictValue::arrayInt<int*>(&order[0], 4));
    permute.addLayerToPrev("perm", "Permute", lp);

932 933 934
    permute.setPreferableBackend(backend);
    permute.setPreferableTarget(target);

935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966
    permute.setInput(blob0);
    Mat input0 = permute.forward().clone();

    permute.setInput(blob1);
    Mat input1 = permute.forward().clone();

    int dims[] = {1, 3, 16, 112, 112};
    input0 = input0.reshape(0, 5, &dims[0]);
    input1 = input1.reshape(0, 5, &dims[0]);

    Net net = readNetFromONNX(onnxmodel);
    ASSERT_FALSE(net.empty());
    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    // output range [-5, 11]
    float l1 = 0.0013;
    float lInf = 0.009;

    checkBackend(&input0, &ref0);
    net.setInput(input0);
    Mat out = net.forward().clone();
    normAssert(ref0, out, "", l1, lInf);

    checkBackend(&input1, &ref1);
    net.setInput(input1);
    out = net.forward().clone();
    normAssert(ref1, out, "", l1, lInf);

    expectNoFallbacksFromIE(net);
}

967 968 969
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());

}} // namespace