Skip to content

Commit b95e93c

Browse files
committed
Merge remote-tracking branch 'upstream/3.4' into merge-3.4
2 parents b58b2c5 + 24790e4 commit b95e93c

43 files changed

Lines changed: 895 additions & 355 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

modules/calib3d/CMakeLists.txt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,6 @@ set(debug_modules "")
33
if(DEBUG_opencv_calib3d)
44
list(APPEND debug_modules opencv_highgui)
55
endif()
6-
ocv_define_module(calib3d opencv_imgproc opencv_features2d opencv_flann ${debug_modules} WRAP java python)
6+
ocv_define_module(calib3d opencv_imgproc opencv_features2d opencv_flann ${debug_modules}
7+
WRAP java python js
8+
)

modules/core/include/opencv2/core/hal/intrin_neon.hpp

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -875,13 +875,27 @@ OPENCV_HAL_IMPL_NEON_ROTATE_OP(v_int64x2, s64)
875875
OPENCV_HAL_IMPL_NEON_ROTATE_OP(v_float64x2, f64)
876876
#endif
877877

878+
#if defined(__clang__) && defined(__aarch64__)
879+
// avoid LD2 instruction. details: https://github.com/opencv/opencv/issues/14863
880+
#define OPENCV_HAL_IMPL_NEON_LOAD_LOW_OP(_Tpvec, _Tp, suffix) \
881+
inline _Tpvec v_load_low(const _Tp* ptr) \
882+
{ \
883+
typedef uint64 CV_DECL_ALIGNED(1) unaligned_uint64; \
884+
uint64 v = *(unaligned_uint64*)ptr; \
885+
return _Tpvec(v_reinterpret_as_##suffix(v_uint64x2(v, (uint64)123456))); \
886+
}
887+
#else
888+
#define OPENCV_HAL_IMPL_NEON_LOAD_LOW_OP(_Tpvec, _Tp, suffix) \
889+
inline _Tpvec v_load_low(const _Tp* ptr) \
890+
{ return _Tpvec(vcombine_##suffix(vld1_##suffix(ptr), vdup_n_##suffix((_Tp)0))); }
891+
#endif
892+
878893
#define OPENCV_HAL_IMPL_NEON_LOADSTORE_OP(_Tpvec, _Tp, suffix) \
879894
inline _Tpvec v_load(const _Tp* ptr) \
880895
{ return _Tpvec(vld1q_##suffix(ptr)); } \
881896
inline _Tpvec v_load_aligned(const _Tp* ptr) \
882897
{ return _Tpvec(vld1q_##suffix(ptr)); } \
883-
inline _Tpvec v_load_low(const _Tp* ptr) \
884-
{ return _Tpvec(vcombine_##suffix(vld1_##suffix(ptr), vdup_n_##suffix((_Tp)0))); } \
898+
OPENCV_HAL_IMPL_NEON_LOAD_LOW_OP(_Tpvec, _Tp, suffix) \
885899
inline _Tpvec v_load_halves(const _Tp* ptr0, const _Tp* ptr1) \
886900
{ return _Tpvec(vcombine_##suffix(vld1_##suffix(ptr0), vld1_##suffix(ptr1))); } \
887901
inline void v_store(_Tp* ptr, const _Tpvec& a) \

modules/core/src/kmeans.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -238,7 +238,7 @@ double cv::kmeans( InputArray _data, int K,
238238

239239
attempts = std::max(attempts, 1);
240240
CV_Assert( data0.dims <= 2 && type == CV_32F && K > 0 );
241-
CV_Assert( N >= K );
241+
CV_CheckGE(N, K, "Number of clusters should be more than number of elements");
242242

243243
Mat data(N, dims, CV_32F, data0.ptr(), isrow ? dims * sizeof(float) : static_cast<size_t>(data0.step));
244244

modules/dnn/perf/perf_caffe.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ namespace opencv_test {
3838

3939
static caffe::Net<float>* initNet(std::string proto, std::string weights)
4040
{
41-
proto = findDataFile(proto, false);
41+
proto = findDataFile(proto);
4242
weights = findDataFile(weights, false);
4343

4444
#ifdef HAVE_CLCAFFE

modules/dnn/perf/perf_net.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<Backend, Target>
3535

3636
weights = findDataFile(weights, false);
3737
if (!proto.empty())
38-
proto = findDataFile(proto, false);
38+
proto = findDataFile(proto);
3939
if (backend == DNN_BACKEND_HALIDE)
4040
{
4141
if (halide_scheduler == "disabled")
@@ -198,10 +198,10 @@ PERF_TEST_P_(DNNTestNetwork, YOLOv3)
198198
{
199199
if (backend == DNN_BACKEND_HALIDE)
200200
throw SkipTestException("");
201-
Mat sample = imread(findDataFile("dnn/dog416.png", false));
201+
Mat sample = imread(findDataFile("dnn/dog416.png"));
202202
Mat inp;
203203
sample.convertTo(inp, CV_32FC3);
204-
processNet("dnn/yolov3.cfg", "dnn/yolov3.weights", "", inp / 255);
204+
processNet("dnn/yolov3.weights", "dnn/yolov3.cfg", "", inp / 255);
205205
}
206206

207207
PERF_TEST_P_(DNNTestNetwork, EAST_text_detection)

modules/dnn/src/caffe/caffe_io.cpp

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1137,7 +1137,12 @@ bool ReadProtoFromBinaryFile(const char* filename, Message* proto) {
11371137

11381138
bool ReadProtoFromTextBuffer(const char* data, size_t len, Message* proto) {
11391139
ArrayInputStream input(data, len);
1140-
return google::protobuf::TextFormat::Parse(&input, proto);
1140+
#ifndef OPENCV_DNN_EXTERNAL_PROTOBUF
1141+
return google::protobuf::TextFormat::Parser(true).Parse(&input, proto);
1142+
#else
1143+
return google::protobuf::TextFormat::Parser().Parse(&input, proto);
1144+
#endif
1145+
11411146
}
11421147

11431148

modules/dnn/src/layers/elementwise_layers.cpp

Lines changed: 26 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -831,24 +831,46 @@ struct BNLLFunctor
831831
for( int i = 0; i < len; i++ )
832832
{
833833
float x = srcptr[i];
834-
dstptr[i] = log(1.f + exp(-abs(x)));
834+
// https://github.com/BVLC/caffe/blame/1.0/src/caffe/layers/bnll_layer.cpp#L17
835+
dstptr[i] = x > 0 ? x + log(1. + exp(-x)) : log(1. + exp(x));
835836
}
836837
}
837838
}
838839

839840
#ifdef HAVE_OPENCL
840841
bool applyOCL(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals)
841842
{
842-
// TODO: implement OCL version
843-
return false;
843+
std::vector<UMat> inputs;
844+
std::vector<UMat> outputs;
845+
846+
inps.getUMatVector(inputs);
847+
outs.getUMatVector(outputs);
848+
String buildopt = oclGetTMacro(inputs[0]);
849+
850+
for (size_t i = 0; i < inputs.size(); i++)
851+
{
852+
UMat& src = inputs[i];
853+
UMat& dst = outputs[i];
854+
855+
ocl::Kernel kernel("BNLLForward", ocl::dnn::activations_oclsrc, buildopt);
856+
kernel.set(0, (int)src.total());
857+
kernel.set(1, ocl::KernelArg::PtrReadOnly(src));
858+
kernel.set(2, ocl::KernelArg::PtrWriteOnly(dst));
859+
860+
size_t gSize = src.total();
861+
CV_Assert(kernel.run(1, &gSize, NULL, false));
862+
}
863+
864+
return true;
844865
}
845866
#endif
846867

847868
#ifdef HAVE_HALIDE
848869
void attachHalide(const Halide::Expr& input, Halide::Func& top)
849870
{
850871
Halide::Var x("x"), y("y"), c("c"), n("n");
851-
top(x, y, c, n) = log(1.0f + exp(-abs(input)));
872+
// https://github.com/BVLC/caffe/blame/1.0/src/caffe/layers/bnll_layer.cpp#L17
873+
top(x, y, c, n) = max(input, 0) + log(1.0f + exp(-abs(input)));
852874
}
853875
#endif // HAVE_HALIDE
854876

modules/dnn/src/layers/eltwise_layer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ class EltwiseLayerImpl CV_FINAL : public EltwiseLayer
140140
const std::vector<float>& coeffs, EltwiseOp op,
141141
const ActivationLayer* activ, int nstripes)
142142
{
143-
CV_Check(dst.dims, 1 < dst.dims && dst.dims <= 4, ""); CV_CheckTypeEQ(dst.type(), CV_32FC1, ""); CV_Assert(dst.isContinuous());
143+
CV_Check(dst.dims, 1 < dst.dims && dst.dims <= 5, ""); CV_CheckTypeEQ(dst.type(), CV_32FC1, ""); CV_Assert(dst.isContinuous());
144144
CV_Assert(coeffs.empty() || coeffs.size() == (size_t)nsrcs);
145145

146146
for( int i = 0; i < nsrcs; i++ )
@@ -156,9 +156,9 @@ class EltwiseLayerImpl CV_FINAL : public EltwiseLayer
156156
p.dst = &dst;
157157
p.op = op;
158158
p.nstripes = nstripes;
159-
p.channels = (dst.dims == 4 ? dst.size[1] : 1);
160-
p.planeSize = (dst.dims >= 3 ? dst.size[dst.dims - 1] * dst.size[dst.dims - 2] :
161-
dst.size[dst.dims - 1]);
159+
p.channels = (dst.dims >= 4 ? dst.size[1] : 1);
160+
161+
p.planeSize = dst.total(dst.dims >= 4 ? 2 : 1);
162162
CV_Assert(dst.total() == dst.size[0] * p.channels * p.planeSize);
163163

164164
bool simpleCoeffs = true;

modules/dnn/src/op_inf_engine.cpp

Lines changed: 36 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -410,6 +410,14 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
410410
enginePtr = dispatcher.getSuitablePlugin(targetDevice);
411411
sharedPlugins[targetDevice] = enginePtr;
412412

413+
std::vector<std::string> candidates;
414+
415+
std::string param_pluginPath = utils::getConfigurationParameterString("OPENCV_DNN_IE_EXTRA_PLUGIN_PATH", "");
416+
if (!param_pluginPath.empty())
417+
{
418+
candidates.push_back(param_pluginPath);
419+
}
420+
413421
if (targetDevice == InferenceEngine::TargetDevice::eCPU ||
414422
targetDevice == InferenceEngine::TargetDevice::eFPGA)
415423
{
@@ -423,24 +431,36 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
423431
{
424432
if (!haveFeature[i])
425433
continue;
426-
#ifdef _WIN32
427-
std::string libName = "cpu_extension" + suffixes[i] + ".dll";
428-
#elif defined(__APPLE__)
429-
std::string libName = "libcpu_extension" + suffixes[i] + ".dylib";
430-
#else
431-
std::string libName = "libcpu_extension" + suffixes[i] + ".so";
432-
#endif // _WIN32
433-
try
434-
{
435-
InferenceEngine::IExtensionPtr extension =
436-
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
437-
enginePtr->AddExtension(extension, 0);
438-
break;
439-
}
440-
catch(...) {}
434+
#ifdef _WIN32
435+
candidates.push_back("cpu_extension" + suffixes[i] + ".dll");
436+
#elif defined(__APPLE__)
437+
candidates.push_back("libcpu_extension" + suffixes[i] + ".so"); // built as loadable module
438+
candidates.push_back("libcpu_extension" + suffixes[i] + ".dylib"); // built as shared library
439+
#else
440+
candidates.push_back("libcpu_extension" + suffixes[i] + ".so");
441+
#endif // _WIN32
441442
}
442-
// Some of networks can work without a library of extra layers.
443443
}
444+
bool found = false;
445+
for (size_t i = 0; i != candidates.size(); ++i)
446+
{
447+
const std::string& libName = candidates[i];
448+
try
449+
{
450+
InferenceEngine::IExtensionPtr extension =
451+
InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
452+
enginePtr->AddExtension(extension, 0);
453+
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << libName);
454+
found = true;
455+
break;
456+
}
457+
catch(...) {}
458+
}
459+
if (!found && !candidates.empty())
460+
{
461+
CV_LOG_WARNING(NULL, "DNN-IE: Can't load extension plugin (extra layers for some networks). Specify path via OPENCV_DNN_IE_EXTRA_PLUGIN_PATH parameter");
462+
}
463+
// Some of networks can work without a library of extra layers.
444464
}
445465
plugin = InferenceEngine::InferencePlugin(enginePtr);
446466

modules/dnn/src/opencl/activations.cl

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,8 @@ __kernel void SigmoidForward(const int count, __global const T* in, __global T*
9898
__kernel void BNLLForward(const int n, __global const T* in, __global T* out) {
9999
int index = get_global_id(0);
100100
if (index < n) {
101-
out[index] = in[index] > 0 ? in[index] + log(1.0f + exp(-in[index])) : log(1.0f + exp(in[index]));
101+
T x = in[index];
102+
out[index] = x > 0 ? x + log(1.0f + exp(-x)) : log(1.0f + exp(x));
102103
}
103104
}
104105

0 commit comments

Comments
 (0)