test_onnx_importer.cpp 27.9 KB
Newer Older
1 2 3 4
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.

5
// Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 7 8 9 10 11 12 13 14
// Third party copyrights are property of their respective owners.


#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/dnn/shape_utils.hpp>
namespace opencv_test { namespace {

template<typename TString>
15
static std::string _tf(TString filename, bool required = true)
16
{
17
    return findDataFile(std::string("dnn/onnx/") + filename, required);
18 19 20 21 22
}

class Test_ONNX_layers : public DNNTestLayer
{
public:
23 24 25 26
    bool required;

    Test_ONNX_layers() : required(true) { }

27 28 29 30 31 32
    enum Extension
    {
        npy,
        pb
    };

33
    void testONNXModels(const String& basename, const Extension ext = npy,
34
                        const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
D
Dmitry Kurtaev 已提交
35
                        bool checkNoFallbacks = true, int numInps = 1)
36
    {
37
        String onnxmodel = _tf("models/" + basename + ".onnx", required);
D
Dmitry Kurtaev 已提交
38 39
        std::vector<Mat> inps(numInps);
        Mat ref;
40
        if (ext == npy) {
D
Dmitry Kurtaev 已提交
41 42
            for (int i = 0; i < numInps; ++i)
                inps[i] = blobFromNPY(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".npy"));
43 44 45
            ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
        }
        else if (ext == pb) {
D
Dmitry Kurtaev 已提交
46 47
            for (int i = 0; i < numInps; ++i)
                inps[i] = readTensorFromONNX(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".pb"));
48 49 50 51 52
            ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
        }
        else
            CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");

D
Dmitry Kurtaev 已提交
53
        checkBackend(&inps[0], &ref);
54 55 56 57 58 59
        Net net = readNetFromONNX(onnxmodel);
        ASSERT_FALSE(net.empty());

        net.setPreferableBackend(backend);
        net.setPreferableTarget(target);

60
        std::vector<String> inputNames;
D
Dmitry Kurtaev 已提交
61
        for (int i = 0; i < numInps; ++i)
62 63 64 65 66
            inputNames.push_back(format("%d", i));
        net.setInputsNames(inputNames);

        for (int i = 0; i < numInps; ++i)
            net.setInput(inps[i], inputNames[i]);
67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
        Mat out = net.forward("");

        if (useSoftmax)
        {
            LayerParams lp;
            Net netSoftmax;
            netSoftmax.addLayerToPrev("softmaxLayer", "SoftMax", lp);
            netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);

            netSoftmax.setInput(out);
            out = netSoftmax.forward();

            netSoftmax.setInput(ref);
            ref = netSoftmax.forward();
        }
82
        normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
83 84
        if (checkNoFallbacks)
            expectNoFallbacksFromIE(net);
85 86 87
    }
};

88 89 90 91 92 93 94 95
TEST_P(Test_ONNX_layers, InstanceNorm)
{
    if (target == DNN_TARGET_MYRIAD)
        testONNXModels("instancenorm", npy, 0, 0, false, false);
    else
        testONNXModels("instancenorm", npy);
}

96 97
TEST_P(Test_ONNX_layers, MaxPooling)
{
98 99
    testONNXModels("maxpooling", npy, 0, 0, false, false);
    testONNXModels("two_maxpooling", npy, 0, 0, false, false);
100 101 102 103 104
}

TEST_P(Test_ONNX_layers, Convolution)
{
    testONNXModels("convolution");
105 106
}

107 108
TEST_P(Test_ONNX_layers, Convolution3D)
{
109
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
110
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
111
#endif
112 113
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
114 115 116
    testONNXModels("conv3d");
    testONNXModels("conv3d_bias");
}
117 118 119

TEST_P(Test_ONNX_layers, Two_convolution)
{
120
#if defined(INF_ENGINE_RELEASE)
121
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
122 123
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
    )
124
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
125 126
#endif
    // Reference output values are in range [-0.855, 0.611]
127 128 129
    testONNXModels("two_convolution");
}

130 131
TEST_P(Test_ONNX_layers, Deconvolution)
{
132 133 134 135 136
    testONNXModels("deconvolution", npy, 0, 0, false, false);
    testONNXModels("two_deconvolution", npy, 0, 0, false, false);
    testONNXModels("deconvolution_group", npy, 0, 0, false, false);
    testONNXModels("deconvolution_output_shape", npy, 0, 0, false, false);
    testONNXModels("deconv_adjpad_2d", npy, 0, 0, false, false);
137 138
}

139 140
TEST_P(Test_ONNX_layers, Deconvolution3D)
{
141 142
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
143
#endif
144
    if (backend == DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
145 146 147 148 149 150 151
        throw SkipTestException("Only DLIE backend on CPU is supported");
    testONNXModels("deconv3d");
    testONNXModels("deconv3d_bias");
    testONNXModels("deconv3d_pad");
    testONNXModels("deconv3d_adjpad");
}

152 153 154 155 156 157 158 159
TEST_P(Test_ONNX_layers, Dropout)
{
    testONNXModels("dropout");
}

TEST_P(Test_ONNX_layers, Linear)
{
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
160
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
161 162 163 164 165 166 167 168
    testONNXModels("linear");
}

TEST_P(Test_ONNX_layers, ReLU)
{
    testONNXModels("ReLU");
}

169 170 171 172 173
TEST_P(Test_ONNX_layers, Clip)
{
    testONNXModels("clip", npy);
}

174 175 176 177 178
TEST_P(Test_ONNX_layers, Shape)
{
    testONNXModels("shape_of_constant");
}

179 180 181 182 183 184 185
TEST_P(Test_ONNX_layers, ReduceMean)
{
    testONNXModels("reduce_mean");
}

TEST_P(Test_ONNX_layers, ReduceMean3D)
{
186 187 188 189
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
190 191 192 193 194
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("reduce_mean3d");
}

195 196 197 198 199
TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
{
    testONNXModels("maxpooling_sigmoid");
}

200 201 202 203 204
TEST_P(Test_ONNX_layers, Cast)
{
    testONNXModels("cast");
}

205 206
TEST_P(Test_ONNX_layers, Concatenation)
{
207
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
208
    {
209 210 211
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
212
    }
213 214 215
    testONNXModels("concatenation");
}

216 217 218
TEST_P(Test_ONNX_layers, Eltwise3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
219
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
220
#endif
221 222 223 224
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
225 226 227
    testONNXModels("eltwise3d");
}

228 229 230 231 232
TEST_P(Test_ONNX_layers, AveragePooling)
{
    testONNXModels("average_pooling");
}

233 234
TEST_P(Test_ONNX_layers, MaxPooling3D)
{
235
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
236
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
237
#endif
238 239 240 241
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
242 243
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
244
    testONNXModels("max_pool3d", npy, 0, 0, false, false);
245 246 247 248
}

TEST_P(Test_ONNX_layers, AvePooling3D)
{
249
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
250
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
251
#endif
252 253 254 255
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
256 257
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
258 259 260
    testONNXModels("ave_pool3d");
}

261 262 263
TEST_P(Test_ONNX_layers, PoolConv3D)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
264
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
265
#endif
266 267 268 269
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
270 271 272 273 274
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
    testONNXModels("pool_conv_3d");
}

275 276 277 278 279
TEST_P(Test_ONNX_layers, BatchNormalization)
{
    testONNXModels("batch_norm");
}

L
Liubov Batanina 已提交
280 281
TEST_P(Test_ONNX_layers, BatchNormalization3D)
{
282
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
283
    {
284 285 286
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
287
    }
L
Liubov Batanina 已提交
288 289 290
    testONNXModels("batch_norm_3d");
}

291 292
TEST_P(Test_ONNX_layers, Transpose)
{
293
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
294
    {
295 296 297
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
298
    }
299 300 301
    testONNXModels("transpose");
}

302 303
TEST_P(Test_ONNX_layers, Multiplication)
{
304 305
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
306 307
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
308 309 310 311 312
    testONNXModels("mul");
}

TEST_P(Test_ONNX_layers, Constant)
{
313
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
314
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
315
            && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
316
       applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
317
#endif
318 319 320
    testONNXModels("constant");
}

D
Dmitry Kurtaev 已提交
321 322
TEST_P(Test_ONNX_layers, Padding)
{
323 324 325
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("padding", npy, 0, 0, false, false);
#else
D
Dmitry Kurtaev 已提交
326
    testONNXModels("padding");
327
#endif
D
Dmitry Kurtaev 已提交
328 329
}

330 331 332
TEST_P(Test_ONNX_layers, Resize)
{
    testONNXModels("resize_nearest");
333 334 335
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("resize_bilinear");
336 337
}

338 339
TEST_P(Test_ONNX_layers, ResizeUnfused)
{
340 341
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
342
    testONNXModels("upsample_unfused_torch1.2");
343 344 345 346 347
    testONNXModels("upsample_unfused_opset9_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.4");
    testONNXModels("resize_nearest_unfused_opset11_torch1.3");
}

348 349
TEST_P(Test_ONNX_layers, MultyInputs)
{
D
Dmitry Kurtaev 已提交
350 351
    testONNXModels("multy_inputs", npy, 0, 0, false, true, 2);
}
352

D
Dmitry Kurtaev 已提交
353 354 355 356 357
TEST_P(Test_ONNX_layers, Broadcast)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    testONNXModels("channel_broadcast", npy, 0, 0, false, true, 2);
358 359
}

360 361 362 363 364
TEST_P(Test_ONNX_layers, DynamicResize)
{
    testONNXModels("dynamic_resize", npy, 0, 0, false, true, 2);
}

365 366 367 368 369 370 371 372 373
TEST_P(Test_ONNX_layers, Div)
{
    const String model =  _tf("models/div.onnx");
    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

374 375 376 377
    // Reference output values range is -68.80928, 2.991873. So to avoid computational
    // difference for FP16 we'll perform reversed division (just swap inputs).
    Mat inp1 = blobFromNPY(_tf("data/input_div_1.npy"));
    Mat inp2 = blobFromNPY(_tf("data/input_div_0.npy"));
378
    Mat ref  = blobFromNPY(_tf("data/output_div.npy"));
379
    cv::divide(1.0, ref, ref);
380 381 382 383 384 385 386 387 388 389
    checkBackend(&inp1, &ref);

    net.setInput(inp1, "0");
    net.setInput(inp2, "1");
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
    expectNoFallbacksFromIE(net);
}

390 391
TEST_P(Test_ONNX_layers, DynamicReshape)
{
392
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
393 394
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);

395
    testONNXModels("dynamic_reshape");
396 397
    testONNXModels("dynamic_reshape_opset_11");
    testONNXModels("flatten_by_prod");
398
    testONNXModels("flatten_const");
399
}
400

401 402 403 404 405
TEST_P(Test_ONNX_layers, Reshape)
{
    testONNXModels("unsqueeze");
}

406 407 408 409 410 411 412 413 414 415
TEST_P(Test_ONNX_layers, Squeeze)
{
    testONNXModels("squeeze");
}

TEST_P(Test_ONNX_layers, ReduceL2)
{
    testONNXModels("reduceL2");
}

416 417 418 419 420 421 422 423 424 425 426 427
TEST_P(Test_ONNX_layers, Split)
{
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    testONNXModels("split_1");
    testONNXModels("split_2");
    testONNXModels("split_3");
    testONNXModels("split_4");
}

428 429
TEST_P(Test_ONNX_layers, Slice)
{
430 431 432
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
    testONNXModels("slice", npy, 0, 0, false, false);
#else
433
    testONNXModels("slice");
434
    testONNXModels("slice_opset_11");
435
#endif
436 437
}

D
dianlujitao 已提交
438 439 440
TEST_P(Test_ONNX_layers, Softmax)
{
    testONNXModels("softmax");
D
Dmitry Kurtaev 已提交
441
    testONNXModels("log_softmax", npy, 0, 0, false, false);
D
Dmitry Kurtaev 已提交
442
    testONNXModels("softmax_unfused");
D
dianlujitao 已提交
443 444
}

445 446
TEST_P(Test_ONNX_layers, Split_EltwiseMax)
{
447 448 449 450
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
451 452 453
    testONNXModels("split_max");
}

D
Dmitry Kurtaev 已提交
454 455 456 457 458
TEST_P(Test_ONNX_layers, LSTM)
{
    testONNXModels("lstm");
}

D
Dmitry Kurtaev 已提交
459 460 461 462 463
TEST_P(Test_ONNX_layers, LSTM_bidirectional)
{
    testONNXModels("lstm_bidirectional");
}

464 465
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());

466 467 468 469 470 471
class Test_ONNX_nets : public Test_ONNX_layers
{
public:
    Test_ONNX_nets() { required = false; }
};

472 473
TEST_P(Test_ONNX_nets, Alexnet)
{
474 475 476
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
477
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
478 479
#endif

480
    const String model =  _tf("models/alexnet.onnx", false);
481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    Mat inp = imread(_tf("../grace_hopper_227.png"));
    Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(out, ref, "", default_l1,  default_lInf);
497
    expectNoFallbacksFromIE(net);
498 499 500 501 502 503 504 505 506
}

TEST_P(Test_ONNX_nets, Squeezenet)
{
    testONNXModels("squeezenet", pb);
}

TEST_P(Test_ONNX_nets, Googlenet)
{
507 508
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
509

510 511 512
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);

513
    const String model = _tf("models/googlenet.onnx", false);
514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532

    Net net = readNetFromONNX(model);
    ASSERT_FALSE(net.empty());

    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    std::vector<Mat> images;
    images.push_back( imread(_tf("../googlenet_0.png")) );
    images.push_back( imread(_tf("../googlenet_1.png")) );
    Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
    Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
    checkBackend(&inp, &ref);

    net.setInput(inp);
    ASSERT_FALSE(net.empty());
    Mat out = net.forward();

    normAssert(ref, out, "", default_l1,  default_lInf);
533
    expectNoFallbacksFromIE(net);
534 535 536 537
}

TEST_P(Test_ONNX_nets, CaffeNet)
{
538 539 540
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
541
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
542 543
#endif

A
Alexander Alekhin 已提交
544
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
545
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
546
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
547
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
548
#endif
549 550 551 552 553
    testONNXModels("caffenet", pb);
}

TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
{
554 555 556
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
#else
557
    applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
558 559
#endif

A
Alexander Alekhin 已提交
560
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
561
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
A
Alexander Alekhin 已提交
562
        && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
563
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
A
Alexander Alekhin 已提交
564
#endif
565
    // Reference output values are in range [-4.992, -1.161]
566
    testONNXModels("rcnn_ilsvrc13", pb, 0.0046);
567 568 569 570
}

TEST_P(Test_ONNX_nets, VGG16_bn)
{
571 572
    applyTestTag(CV_TEST_TAG_MEMORY_6GB);  // > 2.3Gb

573 574 575
    // output range: [-16; 27], after Softmax [0; 0.67]
    const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
    testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
576 577 578 579
}

TEST_P(Test_ONNX_nets, ZFNet)
{
580
    applyTestTag(CV_TEST_TAG_MEMORY_2GB);
581 582 583 584 585
    testONNXModels("zfnet512", pb);
}

TEST_P(Test_ONNX_nets, ResNet18v1)
{
586 587
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

588
    // output range: [-16; 22], after Softmax [0, 0.51]
589
    testONNXModels("resnet18v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
590 591 592 593
}

TEST_P(Test_ONNX_nets, ResNet50v1)
{
594 595
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

596
    // output range: [-67; 75], after Softmax [0, 0.98]
597
    testONNXModels("resnet50v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
598 599 600 601
}

TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{
602 603
    applyTestTag(CV_TEST_TAG_VERYLONG);

604
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
605 606
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
607 608
#endif
#if defined(INF_ENGINE_RELEASE)
609 610
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
611 612
#endif
    if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
613 614 615
    {
        if (backend == DNN_BACKEND_OPENCV)
            applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
616
        throw SkipTestException("Test is disabled for OpenCL targets");
617
    }
618 619 620 621 622
    testONNXModels("resnet101_duc_hdc", pb);
}

TEST_P(Test_ONNX_nets, TinyYolov2)
{
623 624
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

625 626 627
    if (cvtest::skipUnstableTests)
        throw SkipTestException("Skip unstable test");
#if defined(INF_ENGINE_RELEASE)
628
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
629 630
            && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
    )
631
        applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
632

633
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
634
    )
635 636 637 638
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
639
#endif
640

641
    // output range: [-11; 8]
642 643
    double l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.017 : default_l1;
    double lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.14 : default_lInf;
644 645 646 647 648
    testONNXModels("tiny_yolo2", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, CNN_MNIST)
{
649 650
    // output range: [-1952; 6574], after Softmax [0; 1]
    testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
651 652 653 654
}

TEST_P(Test_ONNX_nets, MobileNet_v2)
{
655 656
    // output range: [-166; 317], after Softmax [0; 1]
    testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
657 658 659 660
}

TEST_P(Test_ONNX_nets, LResNet100E_IR)
{
661
    applyTestTag(
662 663 664
#if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
        CV_TEST_TAG_MEMORY_2GB,
#else
665
        (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
666
#endif
667 668
        CV_TEST_TAG_DEBUG_LONG
    );
669
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
670
    {
671 672 673
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
674
    }
675 676 677 678 679 680
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
    {
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
    }
681 682 683 684 685 686 687 688

    double l1 = default_l1;
    double lInf = default_lInf;
    // output range: [-3; 3]
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) {
        l1 = 0.009;
        lInf = 0.035;
    }
689
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_CPU) {
690
        l1 = 4.6e-5;
691 692
        lInf = 1.9e-4;
    }
693 694 695 696 697
    testONNXModels("LResNet100E_IR", pb, l1, lInf);
}

TEST_P(Test_ONNX_nets, Emotion_ferplus)
{
698
#if defined(INF_ENGINE_RELEASE)
699 700 701 702 703
    if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
                     backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
                     CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
                     CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
704 705
#endif

706 707
    double l1 = default_l1;
    double lInf = default_lInf;
708 709

    // Output values are in range [-2.011, 2.111]
710 711
    if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
        l1 = 0.007;
712
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
713 714 715 716
    {
        l1 = 0.021;
        lInf = 0.034;
    }
717
    else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
718 719 720
        l1 = 2.4e-4;
        lInf = 6e-4;
    }
721
    testONNXModels("emotion_ferplus", pb, l1, lInf);
722 723 724 725
}

TEST_P(Test_ONNX_nets, Inception_v2)
{
726
    testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
727 728 729 730
}

TEST_P(Test_ONNX_nets, DenseNet121)
{
731 732
    applyTestTag(CV_TEST_TAG_MEMORY_512MB);

733
    // output range: [-87; 138], after Softmax [0; 1]
734
    testONNXModels("densenet121", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
735 736
}

737 738
TEST_P(Test_ONNX_nets, Inception_v1)
{
739
#if defined(INF_ENGINE_RELEASE)
740 741
    if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
         backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD)
742
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
743
#endif
744 745
    testONNXModels("inception_v1", pb);
}
746

747 748
TEST_P(Test_ONNX_nets, Shufflenet)
{
749
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
750
    {
751 752 753
        if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_OPENCL)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
        if (target == DNN_TARGET_MYRIAD)      applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
754
    }
755 756 757
    testONNXModels("shufflenet", pb);
}

758 759
TEST_P(Test_ONNX_nets, Resnet34_kinetics)
{
760
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
761
    applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
762
#endif
763 764 765 766
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);  // Only CPU on DLIE backend is supported
    if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
        applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);  // Only CPU on DLIE backend is supported
767 768
    if (target != DNN_TARGET_CPU)
        throw SkipTestException("Only CPU is supported");
769

A
Alexander Alekhin 已提交
770
    String onnxmodel = findDataFile("dnn/resnet-34_kinetics.onnx", false);
771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787
    Mat image0 = imread(findDataFile("dnn/dog416.png"));
    Mat image1 = imread(findDataFile("dnn/street.png"));

    Mat ref0 = blobFromNPY(_tf("data/output_kinetics0.npy"));
    Mat ref1 = blobFromNPY(_tf("data/output_kinetics1.npy"));

    std::vector<Mat> images_0(16, image0);
    std::vector<Mat> images_1(16, image1);
    Mat blob0 = blobFromImages(images_0, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
    Mat blob1 = blobFromImages(images_1, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);

    Net permute;
    LayerParams lp;
    int order[] = {1, 0, 2, 3};
    lp.set("order", DictValue::arrayInt<int*>(&order[0], 4));
    permute.addLayerToPrev("perm", "Permute", lp);

788 789 790
    permute.setPreferableBackend(backend);
    permute.setPreferableTarget(target);

791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822
    permute.setInput(blob0);
    Mat input0 = permute.forward().clone();

    permute.setInput(blob1);
    Mat input1 = permute.forward().clone();

    int dims[] = {1, 3, 16, 112, 112};
    input0 = input0.reshape(0, 5, &dims[0]);
    input1 = input1.reshape(0, 5, &dims[0]);

    Net net = readNetFromONNX(onnxmodel);
    ASSERT_FALSE(net.empty());
    net.setPreferableBackend(backend);
    net.setPreferableTarget(target);

    // output range [-5, 11]
    float l1 = 0.0013;
    float lInf = 0.009;

    checkBackend(&input0, &ref0);
    net.setInput(input0);
    Mat out = net.forward().clone();
    normAssert(ref0, out, "", l1, lInf);

    checkBackend(&input1, &ref1);
    net.setInput(input1);
    out = net.forward().clone();
    normAssert(ref1, out, "", l1, lInf);

    expectNoFallbacksFromIE(net);
}

823 824 825
INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());

}} // namespace