Merge pull request #99 from fdeschenes/experimental/opencv3

Basic OpenCV 3.4.1 DNN Support
This commit is contained in:
ser1zw 2018-07-29 15:50:14 +09:00 committed by GitHub
commit bcf9dc5a0f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 6044 additions and 34 deletions

View File

@ -3,7 +3,7 @@
An OpenCV wrapper for Ruby.
* Web site: <https://github.com/ruby-opencv/ruby-opencv>
* Ruby 2.x and OpenCV 3.2.0 are supported.
* Ruby 2.x and OpenCV 3.3.1 are supported.
## Requirement
@ -87,6 +87,32 @@ window.show(image)
Cv::wait_key
```
### Image Classification
A samples to classify objects in an image.
```ruby
require 'opencv'
classes = []
File.open("./examples/synset_words.txt", "r") do |f|
f.each_line { |line|
_, value = line.strip.split(" ", 2)
classes << value.split(",", 2).first
}
f.close
end
net = Cv::Dnn.read_net_from_caffe("./examples/bvlc_googlenet.prototxt", "./examples/bvlc_googlenet.caffemodel")
net.set_input(Cv::Dnn.blob_from_image(Cv.imread("./examples/images/stuff.jpg", Cv::IMREAD_UNCHANGED), size: Cv::Size.new(224, 224), mean: Cv::Scalar.new(104, 117, 123)))
predictions = net.forward
for i in 0..(predictions.cols - 1)
confidence = predictions.at(0, i)[0]
puts "#{classes[i]} #{confidence}" if confidence > 0.1
end
```
For more samples, see examples/*.rb
## LICENSE:
@ -94,4 +120,3 @@ For more samples, see examples/*.rb
The MIT Liscense
see LICENSE.txt

Binary file not shown.

File diff suppressed because it is too large Load Diff

1000
examples/synset_words.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -17,7 +17,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_cascadeclassifier_type = {
"CascadeClassifier",
{ 0, free_cascadeclassifier, memsize_cascadeclassifier, 0 },
{ 0, free_cascadeclassifier, memsize_cascadeclassifier, },
0,
0,
0

92
ext/opencv/dnn.cpp Normal file
View File

@ -0,0 +1,92 @@
#include "opencv2/dnn.hpp"
#include "opencv.hpp"
#include "mat.hpp"
#include "size.hpp"
#include "scalar.hpp"
#include "dnn_net.hpp"
#include "dnn_layer.hpp"
#include "error.hpp"
/*
* Document-class: Cv::Dnn
*/
namespace rubyopencv {
namespace Dnn {
VALUE rb_module = Qnil;
VALUE rb_module_dnn() {
return rb_module;
}
/*
* Creates 4-dimensional blob from image. Optionally resizes and crops image from center, subtract mean values, scales values by scalefactor, swap Blue and Red channels.
*
* @overload blob_from_image(image, options = {})
* @param image [Mat] Input image (with 1-, 3- or 4-channels)
* @param options [Hash] Options
* @option options [Number] :scale_factor (1.0) Multiplier for image values
* @option options [Mat] :size Spatial size for output image
* @option options [Scalar] :mean Scalar with mean values which are subtracted from channels  values are intended to be in (mean-R, mean-G, mean-B) order if image has BGR ordering and swap_rb is true
* @option options [Boolean] :swap_rb (true) Indicates that swap first and last channels in 3-channel image is necessary
* @option options [Boolean] :crop (true) Indicates whether image will be cropped after resize or not
* @return [Mat] 4-dimensional Mat with NCHW dimensions order
*/
VALUE rb_blob_from_image(int argc, VALUE *argv, VALUE self) {
VALUE image, options;
rb_scan_args(argc, argv, "11", &image, &options);
cv::Mat *b = NULL;
try {
double scale_factor = 1.0;
cv::Size size;
cv::Scalar mean;
bool swap_rb = true;
bool crop = true;
if (!NIL_P(options)) {
Check_Type(options, T_HASH);
scale_factor = NUM2DBL_DEFAULT(HASH_LOOKUP(options, "scale_factor"), scale_factor);
swap_rb = RTEST_DEFAULT(HASH_LOOKUP(options, "swap_rb"), (bool)swap_rb);
crop = RTEST_DEFAULT(HASH_LOOKUP(options, "crop"), (bool)crop);
VALUE tmp = Qnil;
tmp = HASH_LOOKUP(options, "size");
if (!NIL_P(tmp)) {
size = *(Size::obj2size(tmp));
}
tmp = HASH_LOOKUP(options, "mean");
if (!NIL_P(tmp)) {
mean = *(Scalar::obj2scalar(tmp));
}
}
b = new cv::Mat(cv::dnn::blobFromImage(*Mat::obj2mat(image), scale_factor, size, mean, swap_rb, crop));
} catch(cv::Exception& e) {
delete b;
Error::raise(e);
}
return Mat::mat2obj(b);
}
void init() {
VALUE opencv = rb_define_module("Cv");
rb_module = rb_define_module_under(opencv, "Dnn");
rb_define_singleton_method(rb_module, "blob_from_image", RUBY_METHOD_FUNC(rb_blob_from_image), -1);
rb_define_singleton_method(rb_module, "read_net", RUBY_METHOD_FUNC(Dnn::Net::rb_read_net), -1); // in ext/opencv/dnn_net.cpp
rb_define_singleton_method(rb_module, "read_net_from_caffe", RUBY_METHOD_FUNC(Dnn::Net::rb_read_net_from_caffe), 2); // in ext/opencv/dnn_net.cpp
rb_define_singleton_method(rb_module, "read_net_from_tensorflow", RUBY_METHOD_FUNC(Dnn::Net::rb_read_net_from_tensorflow), 2); // in ext/opencv/dnn_net.cpp
rb_define_singleton_method(rb_module, "read_net_from_torch", RUBY_METHOD_FUNC(Dnn::Net::rb_read_net_from_torch), -1); // in ext/opencv/dnn_net.cpp
rb_define_singleton_method(rb_module, "read_net_from_darknet", RUBY_METHOD_FUNC(Dnn::Net::rb_read_net_from_darknet), 2); // in ext/opencv/dnn_net.cpp
Dnn::Net::init();
Dnn::Layer::init();
}
}
}

14
ext/opencv/dnn.hpp Normal file
View File

@ -0,0 +1,14 @@
#ifndef RUBY_OPENCV_DNN_H
#define RUBY_OPENCV_DNN_H
/*
* Document-class: Cv::Dnn
*/
namespace rubyopencv {
namespace Dnn {
void init();
VALUE rb_module_dnn();
}
}
#endif // RUBY_OPENCV_DNN_H

76
ext/opencv/dnn_layer.cpp Normal file
View File

@ -0,0 +1,76 @@
#include "opencv2/dnn.hpp"
#include "opencv.hpp"
#include "error.hpp"
/*
* Document-class: Cv::Dnn::Layer
*/
namespace rubyopencv {
namespace Dnn {
namespace Layer {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_layer_type = {
"Dnn::Layer", { 0, 0, 0, }, 0, 0, 0
};
VALUE layer2obj(cv::dnn::Layer* ptr) {
return TypedData_Wrap_Struct(rb_klass, &opencv_layer_type, ptr);
}
cv::dnn::Layer* obj2layer(VALUE obj) {
cv::dnn::Layer* ptr = NULL;
TypedData_Get_Struct(obj, cv::dnn::Layer, &opencv_layer_type, ptr);
return ptr;
}
VALUE rb_allocate(VALUE klass) {
cv::dnn::Layer* ptr = NULL;
return TypedData_Wrap_Struct(klass, &opencv_layer_type, ptr);
}
VALUE rb_initialize(VALUE self) {
return self;
}
/*
* Returns the layer name
* @overload name
* @return [String] Layer name
*/
VALUE rb_name(VALUE self) {
cv::dnn::Layer* selfptr = obj2layer(self);
return rb_str_new_cstr(selfptr->name.c_str());
}
/*
* Returns the layer type
*
* @overload type
* @return [String] Layer type
*/
VALUE rb_type(VALUE self) {
cv::dnn::Layer* selfptr = obj2layer(self);
return rb_str_new_cstr(selfptr->type.c_str());
}
void init() {
VALUE opencv = rb_define_module("Cv");
VALUE dnn = rb_define_module_under(opencv, "Dnn");
rb_klass = rb_define_class_under(dnn, "Layer", rb_cData);
rb_define_alloc_func(rb_klass, rb_allocate);
rb_define_private_method(rb_klass, "initialize", RUBY_METHOD_FUNC(rb_initialize), 0);
rb_define_method(rb_klass, "name", RUBY_METHOD_FUNC(rb_name), 0);
rb_define_method(rb_klass, "type", RUBY_METHOD_FUNC(rb_type), 0);
#if 0
rb_define_attr(rb_klass, "name", 1, 0);
rb_define_attr(rb_klass, "type", 1, 0);
#endif
}
}
}
}

16
ext/opencv/dnn_layer.hpp Normal file
View File

@ -0,0 +1,16 @@
#ifndef RUBY_OPENCV_DNN_LAYER_H
#define RUBY_OPENCV_DNN_LAYER_H
/*
* Document-class: Cv::Dnn::Layer
*/
namespace rubyopencv {
namespace Dnn {
namespace Layer {
void init();
VALUE layer2obj(cv::dnn::Layer* ptr);
}
}
}
#endif // RUBY_OPENCV_DNN_LAYER_H

340
ext/opencv/dnn_net.cpp Normal file
View File

@ -0,0 +1,340 @@
#include "opencv2/dnn.hpp"
#include "opencv.hpp"
#include "mat.hpp"
#include "error.hpp"
#include "dnn_layer.hpp"
/*
* Document-class: Cv::Dnn::Net
*/
namespace rubyopencv {
namespace Dnn {
namespace Net {
VALUE rb_klass = Qnil;
void free_net(void* ptr) {
delete (cv::dnn::Net*)ptr;
}
size_t memsize_net(const void* ptr) {
return sizeof(cv::dnn::Net);
}
rb_data_type_t opencv_net_type = {
"Dnn::Net", { 0, free_net, memsize_net, }, 0, 0, 0
};
VALUE net2obj(cv::dnn::Net* ptr) {
return TypedData_Wrap_Struct(rb_klass, &opencv_net_type, ptr);
}
cv::dnn::Net* obj2net(VALUE obj) {
cv::dnn::Net* ptr = NULL;
TypedData_Get_Struct(obj, cv::dnn::Net, &opencv_net_type, ptr);
return ptr;
}
VALUE rb_allocate(VALUE klass) {
cv::dnn::Net* ptr = new cv::dnn::Net();
return TypedData_Wrap_Struct(klass, &opencv_net_type, ptr);
}
cv::dnn::Net* rb_read_net_internal(VALUE model, VALUE config, VALUE framework) {
cv::dnn::Net* dataptr = NULL;
try {
cv::dnn::Net net = cv::dnn::readNet(StringValueCStr(model), CSTR_DEFAULT(config, ""), CSTR_DEFAULT(framework, ""));
dataptr = new cv::dnn::Net(net);
} catch(cv::Exception& e) {
delete dataptr;
Error::raise(e);
}
return dataptr;
}
/*
* Creates or reads a deep learning network
*
* @overload new(model = nil, config = nil, framework = nil)
* @param model [String] Binary file contains trained weights
* @param config [String] Text file contains network configuration
* @param framework [String] Explicit framework name tag to determine a format
* @return [Net] Network object
* @opencv_func cv::dnn::Net
* @example
* net1 = Dnn::Net.new
* net2 = Dnn::Net.new("bvlc_googlenet.caffemodel", "bvlc_googlenet.prototxt")
*/
VALUE rb_initialize(int argc, VALUE *argv, VALUE self) {
VALUE model, config, framework;
rb_scan_args(argc, argv, "03", &model, &config, &framework);
if (argc > 0) {
RTYPEDDATA_DATA(self) = rb_read_net_internal(model, config, framework);
}
return self;
}
void rb_set_input_internal(VALUE self, VALUE blob, VALUE name) {
cv::dnn::Net* selfptr = obj2net(self);
try {
selfptr->setInput(*Mat::obj2mat(blob), CSTR_DEFAULT(name, ""));
} catch(cv::Exception& e) {
Error::raise(e);
}
}
/*
* Sets the new input value for the network
*
* @overload input=(blob)
* @param blob [Mat] A blob of CV_32F or CV_8U depth
* @return [nil]
*/
VALUE rb_set_input_equals(VALUE self, VALUE blob) {
rb_set_input_internal(self, blob, Qnil);
return Qnil;
}
/*
* Sets the new input value for the network
*
* @overload input(blob, name = nil)
* @param blob [Mat] A blob of CV_32F or CV_8U depth
* @param name [String] Name of an input layer
* @return [Net] The current network
*/
VALUE rb_set_input(int argc, VALUE *argv, VALUE self) {
VALUE blob, name;
rb_scan_args(argc, argv, "11", &blob, &name);
rb_set_input_internal(self, blob, name);
return self;
}
/*
* Runs forward pass
*
* @overload forward(output_name = nil)
* @param output_name [String] Name of the layer for which output is needed
* @return [Mat] Blob for first output
*/
VALUE rb_forward(int argc, VALUE *argv, VALUE self) {
VALUE output_name;
rb_scan_args(argc, argv, "01", &output_name);
cv::dnn::Net* selfptr = obj2net(self);
cv::Mat* m = NULL;
try {
m = new cv::Mat(selfptr->forward(CSTR_DEFAULT(output_name, "")));
} catch(cv::Exception& e) {
delete m;
Error::raise(e);
}
return Mat::rb_clone(Mat::mat2obj(m));
}
/*
* Returns whether or not the network is empty
*
* @overload empty?
* @return [Boolean] Whether or not the network is empty
*/
VALUE rb_empty(VALUE self) {
cv::dnn::Net* selfptr = obj2net(self);
return selfptr->empty() ? Qtrue : Qfalse;
}
/*
* Returns an array of layers loaded in this model
*
* @overload layers
* @return [Array<Layer>] Loaded layers
*/
VALUE rb_get_layers(VALUE self) {
cv::dnn::Net* selfptr = obj2net(self);
long size = selfptr->getLayerNames().size();
VALUE layers = rb_ary_new_capa(size);
for (long i = 0; i < size; i++) {
VALUE layer = Dnn::Layer::layer2obj(selfptr->getLayer((int)i + 1));
rb_ary_store(layers, i, layer);
}
return layers;
}
/*
* Enables or disables layer fusion in the network
*
* @overload fusion=(fusion)
* @param fusion [Boolean] Whether or not fusion should be enabled
* @return [Net] The current network
*/
VALUE rb_enable_fusion(VALUE self, VALUE fusion) {
cv::dnn::Net* selfptr = obj2net(self);
selfptr->enableFusion(RTEST(fusion) ? true : false);
return self;
}
/*
* Ask network to use specific computation backend where it supported
*
* @overload preferable_backend=(backend_id)
* @param backend_id [Integer] The preferable backend identifier
* @return [Net] The current network
*/
VALUE rb_set_preferable_backend(VALUE self, VALUE backend_id) {
cv::dnn::Net* selfptr = obj2net(self);
selfptr->setPreferableBackend(NUM2INT(backend_id));
return self;
}
/*
* Ask network to make computations on specific target device
*
* @overload preferable_target=(target_id)
* @param target_id [Integer] The preferable target identifier
* @return [Net] The current network
*/
VALUE rb_set_preferable_target(VALUE self, VALUE target_id) {
cv::dnn::Net* selfptr = obj2net(self);
selfptr->setPreferableTarget(NUM2INT(target_id));
return self;
}
/*
* Read deep learning network represented in one of the supported formats.
*
* @overload read_net(model = nil, config = nil, framework = nil)
* @param model [String] Binary file contains trained weights
* @param config [String] Text file contains network configuration
* @param framework [String] Explicit framework name tag to determine a format
* @return [Net] Network object
*/
VALUE rb_read_net(int argc, VALUE *argv, VALUE self) {
VALUE model, config, framework;
rb_scan_args(argc, argv, "12", &model, &config, &framework);
return net2obj(rb_read_net_internal(model, config, framework));
}
/*
* Reads a network model stored in Caffe framework's format
*
* @overload read_net_from_caffe(prototxt, caffe_model)
* @param prototxt [String] Path to the .prototxt file with text description of the network architecture
* @param caffe_model [String] Path to the .caffemodel file with learned network
* @return [Net] Network object
*/
VALUE rb_read_net_from_caffe(VALUE self, VALUE prototxt, VALUE caffe_model) {
cv::dnn::Net *net = NULL;
try {
net = new cv::dnn::Net(cv::dnn::readNetFromCaffe(StringValueCStr(prototxt), StringValueCStr(caffe_model)));
} catch(cv::Exception& e) {
delete net;
Error::raise(e);
}
return net2obj(net);
}
/*
* Reads a network model stored in TensorFlow framework's format
*
* @overload read_net_from_tensorflow(model, config)
* @param model [String] Path to the .pb file with binary protobuf description of the network architecture
* @param config [String] Path to the .pbtxt file that contains text graph definition in protobuf format
* @return [Net] Network object
*/
VALUE rb_read_net_from_tensorflow(VALUE self, VALUE model, VALUE config) {
cv::dnn::Net *net = NULL;
try {
net = new cv::dnn::Net(cv::dnn::readNetFromTensorflow(StringValueCStr(model), StringValueCStr(config)));
} catch(cv::Exception& e) {
delete net;
Error::raise(e);
}
return net2obj(net);
}
/*
* Reads a network model stored in Torch7 framework's format
*
* @overload read_net_from_torch(model, binary = true)
* @param model [String] Path to the file, dumped from Torch by using torch.save() function
* @param binary [Boolean] Specifies whether the network was serialized in ascii mode or binary
* @return [Net] Network object
*/
VALUE rb_read_net_from_torch(int argc, VALUE *argv, VALUE self) {
VALUE model, binary;
rb_scan_args(argc, argv, "11", &model, &binary);
cv::dnn::Net *net = NULL;
try {
net = new cv::dnn::Net(cv::dnn::readNetFromTorch(StringValueCStr(model), RTEST_DEFAULT(binary, true)));
} catch(cv::Exception& e) {
delete net;
Error::raise(e);
}
return net2obj(net);
}
/*
* Reads a network model stored in Darknet model files
*
* @overload read_net_from_darknet(cfg_file, darknet_model)
* @param cfg_file [String] Path to the .cfg file with text description of the network architecture
* @param darknet_model [String] Path to the .weights file with learned network
* @return [Net] Network object
*/
VALUE rb_read_net_from_darknet(VALUE self, VALUE cfg_file, VALUE darknet_model) {
cv::dnn::Net *net = NULL;
try {
net = new cv::dnn::Net(cv::dnn::readNetFromDarknet(StringValueCStr(cfg_file), StringValueCStr(darknet_model)));
} catch(cv::Exception& e) {
delete net;
Error::raise(e);
}
return net2obj(net);
}
void init() {
VALUE opencv = rb_define_module("Cv");
VALUE dnn = rb_define_module_under(opencv, "Dnn");
rb_klass = rb_define_class_under(dnn, "Net", rb_cData);
rb_define_alloc_func(rb_klass, rb_allocate);
rb_define_method(rb_klass, "initialize", RUBY_METHOD_FUNC(rb_initialize), -1);
rb_define_method(rb_klass, "input=", RUBY_METHOD_FUNC(rb_set_input_equals), 1);
rb_define_method(rb_klass, "input", RUBY_METHOD_FUNC(rb_set_input), -1);
rb_define_method(rb_klass, "fusion=", RUBY_METHOD_FUNC(rb_enable_fusion), 1);
rb_define_method(rb_klass, "preferable_backend=", RUBY_METHOD_FUNC(rb_set_preferable_backend), 1);
rb_define_method(rb_klass, "preferable_target=", RUBY_METHOD_FUNC(rb_set_preferable_target), 1);
rb_define_method(rb_klass, "forward", RUBY_METHOD_FUNC(rb_forward), -1);
rb_define_method(rb_klass, "empty?", RUBY_METHOD_FUNC(rb_empty), 0);
rb_define_method(rb_klass, "layers", RUBY_METHOD_FUNC(rb_get_layers), 0);
#if 0
rb_define_attr(rb_klass, "layers", 1, 0);
rb_define_attr(rb_klass, "input", 0, 1);
rb_define_attr(rb_klass, "fusion", 0, 1);
rb_define_attr(rb_klass, "preferable_backend", 0, 1);
rb_define_attr(rb_klass, "preferable_target", 0, 1);
#endif
}
}
}
}

21
ext/opencv/dnn_net.hpp Normal file
View File

@ -0,0 +1,21 @@
#ifndef RUBY_OPENCV_DNN_NET_H
#define RUBY_OPENCV_DNN_NET_H
/*
* Document-class: Cv::Dnn::Net
*/
namespace rubyopencv {
namespace Dnn {
namespace Net {
void init();
VALUE net2obj(cv::dnn::Net* ptr);
VALUE rb_read_net(int argc, VALUE *argv, VALUE self);
VALUE rb_read_net_from_caffe(VALUE self, VALUE prototxt, VALUE caffe_model);
VALUE rb_read_net_from_tensorflow(VALUE self, VALUE model, VALUE config);
VALUE rb_read_net_from_torch(int argc, VALUE *argv, VALUE self);
VALUE rb_read_net_from_darknet(VALUE self, VALUE cfg_file, VALUE darknet_model);
}
}
}
#endif // RUBY_OPENCV_DNN_NET_H

View File

@ -21,10 +21,10 @@ def cv_version_suffix(incdir)
major + minor + subminor
end
incdir, libdir = dir_config("opencv", "/usr/local/include", "/usr/local/lib")
incdir, _ = dir_config("opencv", "/usr/local/include", "/usr/local/lib")
opencv_headers = ["opencv2/core.hpp", "opencv2/highgui.hpp", "opencv2/imgcodecs.hpp", "opencv2/imgproc.hpp", "opencv2/objdetect.hpp", "opencv2/videoio.hpp"]
opencv_libraries = ["opencv_core", "opencv_highgui", "opencv_imgcodecs", "opencv_imgproc", "opencv_objdetect", "opencv_videoio"]
opencv_headers = ["opencv2/core.hpp", "opencv2/highgui.hpp", "opencv2/imgcodecs.hpp", "opencv2/imgproc.hpp", "opencv2/objdetect.hpp", "opencv2/videoio.hpp", "opencv2/dnn.hpp"]
opencv_libraries = ["opencv_core", "opencv_highgui", "opencv_imgcodecs", "opencv_imgproc", "opencv_objdetect", "opencv_videoio", "opencv_dnn"]
puts ">> Check the required libraries..."
if $mswin or $mingw

View File

@ -2,10 +2,12 @@
#include <sstream>
#include "opencv2/highgui.hpp"
#include "opencv.hpp"
#include "mat.hpp"
#include "mat_imgproc.hpp"
#include "mat_drawing.hpp"
#include "scalar.hpp"
#include "size.hpp"
#include "rect.hpp"
#include "error.hpp"
@ -20,7 +22,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_mat_type = {
"Mat",
{ 0, free_mat, memsize_mat, 0 },
{ 0, free_mat, memsize_mat, },
0,
0,
0
@ -233,7 +235,7 @@ namespace rubyopencv {
/*
* Loads an image from a file.
*
* @overload imread(filename, flags)
* @overload imread(filename, flags = IMREAD_UNCHANGED)
* @param filename [String] Name of file to be loaded.
* @param flags [Integer] Flags specifying the color type of a loaded image:
* - CV_LOAD_IMAGE_ANYDEPTH - If set, return 16-bit/32-bit image when the input has the corresponding depth, otherwise convert it to 8-bit.
@ -245,8 +247,10 @@ namespace rubyopencv {
* @return [Mat] Loaded image
* @opencv_func cv::imread
*/
VALUE rb_imread(VALUE self, VALUE filename, VALUE flags) {
return rb_imread_internal(self, filename, flags, rb_klass);
VALUE rb_imread(int argc, VALUE *argv, VALUE self) {
VALUE filename, flags;
rb_scan_args(argc, argv, "11", &filename, &flags);
return rb_imread_internal(self, filename, NUM2INT_DEFAULT(flags, cv::IMREAD_UNCHANGED), rb_klass);
}
VALUE rb_imread_as(VALUE self, VALUE filename, VALUE flags, VALUE klass) {
@ -258,7 +262,7 @@ namespace rubyopencv {
if (!NIL_P(params)) {
Check_Type(params, T_ARRAY);
int size = RARRAY_LEN(params);
long size = RARRAY_LEN(params);
for (long i = 0; i < size; i++) {
VALUE n = rb_ary_entry(params, i);
params_value.push_back(NUM2INT(n));
@ -365,8 +369,10 @@ namespace rubyopencv {
* @return [CvMat] Loaded matrix
* @opencv_func cv::imdecode
*/
VALUE rb_imdecode(VALUE self, VALUE buf, VALUE flags) {
return rb_imdecode_internal(self, buf, flags, rb_klass);
VALUE rb_imdecode(int argc, VALUE *argv, VALUE self) {
VALUE buf, flags;
rb_scan_args(argc, argv, "11", &buf, &flags);
return rb_imdecode_internal(self, buf, NUM2INT_DEFAULT(flags, cv::IMREAD_UNCHANGED), rb_klass);
}
VALUE rb_imdecode_as(VALUE self, VALUE buf, VALUE flags, VALUE klass) {
@ -417,6 +423,20 @@ namespace rubyopencv {
return INT2NUM(dataptr->depth());
}
VALUE rb_size(int argc, VALUE *argv, VALUE self) {
VALUE i;
rb_scan_args(argc, argv, "01", &i);
const cv::Mat* dataptr = obj2mat(self);
if (NIL_P(i)) {
cv::Size *s = new cv::Size(dataptr->size());
return Size::size2obj(s);
} else {
return INT2NUM(dataptr->size[NUM2INT(i)]);
}
}
/*
* Returns number of channels of the matrix.
*
@ -1218,10 +1238,11 @@ namespace rubyopencv {
rb_define_alias(rb_klass, "height", "rows");
rb_define_method(rb_klass, "cols", RUBY_METHOD_FUNC(rb_cols), 0);
rb_define_alias(rb_klass, "width", "cols");
rb_define_method(rb_klass, "dims", RUBY_METHOD_FUNC(rb_dims), 0);
rb_define_method(rb_klass, "depth", RUBY_METHOD_FUNC(rb_depth), 0);
rb_define_method(rb_klass, "channels", RUBY_METHOD_FUNC(rb_channels), 0);
rb_define_method(rb_klass, "size", RUBY_METHOD_FUNC(rb_size), -1);
rb_define_method(rb_klass, "[]", RUBY_METHOD_FUNC(rb_aref), -2);
rb_define_alias(rb_klass, "at", "[]");

View File

@ -9,8 +9,8 @@
namespace rubyopencv {
namespace Mat {
void init();
VALUE rb_imread(VALUE self, VALUE filename, VALUE flags);
VALUE rb_imdecode(VALUE self, VALUE buf, VALUE flags);
VALUE rb_imread(int argc, VALUE *argv, VALUE self);
VALUE rb_imdecode(int argc, VALUE *argv, VALUE self);
VALUE rb_imwrite_internal(VALUE filename, VALUE img, VALUE params);
VALUE rb_clone(VALUE self);
VALUE rb_add_weighted(int argc, VALUE *argv, VALUE self);
@ -21,7 +21,7 @@ namespace rubyopencv {
cv::Mat* obj2mat(VALUE obj);
VALUE mat2obj(cv::Mat* ptr);
VALUE mat2obj(cv::Mat* ptr, VALUE klass);
class RubyMatAllocator: public cv::MatAllocator {
public:
RubyMatAllocator() {}

View File

@ -12,6 +12,7 @@
#include "scalar.hpp"
#include "cascadeclassifier.hpp"
#include "dnn.hpp"
#include "videocapture.hpp"
#include "error.hpp"
@ -92,6 +93,7 @@ namespace rubyopencv {
Size::init();
Scalar::init();
CascadeClassifier::init();
Dnn::init();
VideoCapture::init();
Window::init();
Trackbar::init();
@ -99,9 +101,9 @@ namespace rubyopencv {
rb_define_module_function(rb_module, "build_information", RUBY_METHOD_FUNC(rb_build_information), 0);
rb_define_singleton_method(rb_module, "imread", RUBY_METHOD_FUNC(Mat::rb_imread), 2); // in ext/opencv/mat.cpp
rb_define_singleton_method(rb_module, "imread", RUBY_METHOD_FUNC(Mat::rb_imread), -1); // in ext/opencv/mat.cpp
rb_define_singleton_method(rb_module, "imwrite", RUBY_METHOD_FUNC(rb_imwrite), -1);
rb_define_singleton_method(rb_module, "imdecode", RUBY_METHOD_FUNC(Mat::rb_imdecode), 2); // in ext/opencv/mat.cpp
rb_define_singleton_method(rb_module, "imdecode", RUBY_METHOD_FUNC(Mat::rb_imdecode), -1); // in ext/opencv/mat.cpp
rb_define_singleton_method(rb_module, "wait_key", RUBY_METHOD_FUNC(Window::rb_wait_key), -1); // in ext/opencv/window.cpp
rb_define_singleton_method(rb_module, "add_weighted", RUBY_METHOD_FUNC(Mat::rb_add_weighted), -1); // in ext/opencv/mat.cpp

View File

@ -15,6 +15,14 @@ namespace rubyopencv {
return NIL_P(value) ? (default_value) : NUM2DBL(value);
}
inline std::string CSTR_DEFAULT(VALUE value, std::string default_value) {
return NIL_P(value) ? (default_value) : StringValueCStr(value);
}
inline double RTEST_DEFAULT(VALUE value, bool default_value) {
return NIL_P(value) ? (default_value) : (RTEST(value) ? true : false);
}
inline VALUE HASH_LOOKUP(VALUE hash, std::string key) {
return rb_hash_lookup(hash, ID2SYM(rb_intern(key.c_str())));
}

View File

@ -1,6 +1,7 @@
// -*- mode: c++; coding: utf-8 -*-
#include "ruby.h"
#include "opencv2/core.hpp"
#include "opencv2/dnn.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/highgui.hpp"
@ -318,7 +319,7 @@ namespace rubyopencv {
rb_define_const(rb_module, "BORDER_REFLECT101", INT2FIX(cv::BORDER_REFLECT101));
rb_define_const(rb_module, "BORDER_DEFAULT", INT2FIX(cv::BORDER_DEFAULT));
rb_define_const(rb_module, "BORDER_ISOLATED", INT2FIX(cv::BORDER_ISOLATED));
rb_define_const(rb_module, "EVENT_MOUSEMOVE", INT2FIX(cv::EVENT_MOUSEMOVE));
rb_define_const(rb_module, "EVENT_LBUTTONDOWN", INT2FIX(cv::EVENT_LBUTTONDOWN));
rb_define_const(rb_module, "EVENT_RBUTTONDOWN", INT2FIX(cv::EVENT_RBUTTONDOWN));
@ -387,6 +388,15 @@ namespace rubyopencv {
rb_define_const(rb_module, "THRESH_OTSU", INT2FIX(cv::THRESH_OTSU));
rb_define_const(rb_module, "THRESH_TRIANGLE", INT2FIX(cv::THRESH_TRIANGLE));
rb_define_const(rb_module, "DNN_BACKEND_DEFAULT", INT2FIX(cv::dnn::DNN_BACKEND_DEFAULT));
rb_define_const(rb_module, "DNN_BACKEND_INFERENCE_ENGINE", INT2FIX(cv::dnn::DNN_BACKEND_INFERENCE_ENGINE));
rb_define_const(rb_module, "DNN_BACKEND_OPENCV", INT2FIX(cv::dnn::DNN_BACKEND_OPENCV));
rb_define_const(rb_module, "DNN_BACKEND_HALIDE", INT2FIX(cv::dnn::DNN_BACKEND_HALIDE));
rb_define_const(rb_module, "DNN_TARGET_CPU", INT2FIX(cv::dnn::DNN_TARGET_CPU));
rb_define_const(rb_module, "DNN_TARGET_OPENCL", INT2FIX(cv::dnn::DNN_TARGET_OPENCL));
rb_define_const(rb_module, "DNN_TARGET_OPENCL_FP16", INT2FIX(cv::dnn::DNN_TARGET_OPENCL_FP16));
rb_define_const(rb_module, "ADAPTIVE_THRESH_MEAN_C", INT2FIX(cv::ADAPTIVE_THRESH_MEAN_C));
rb_define_const(rb_module, "ADAPTIVE_THRESH_GAUSSIAN_C", INT2FIX(cv::ADAPTIVE_THRESH_GAUSSIAN_C));
}

View File

@ -12,7 +12,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_point_type = {
"Point",
{ 0, free_point, memsize_point, 0 },
{ 0, free_point, memsize_point, },
0,
0,
0
@ -169,4 +169,3 @@ namespace rubyopencv {
}
}
}

View File

@ -15,7 +15,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_rect_type = {
"Rect",
{ 0, free_rect, memsize_rect, 0 },
{ 0, free_rect, memsize_rect, },
0,
0,
0

View File

@ -11,7 +11,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_scalar_type = {
"Scalar",
{ 0, free_scalar, memsize_scalar, 0 },
{ 0, free_scalar, memsize_scalar, },
0,
0,
0
@ -28,7 +28,7 @@ namespace rubyopencv {
VALUE klass() {
return rb_klass;
}
cv::Scalar* obj2scalar(VALUE obj) {
cv::Scalar* ptr = NULL;
TypedData_Get_Struct(obj, cv::Scalar, &opencv_scalar_type, ptr);
@ -114,7 +114,7 @@ namespace rubyopencv {
return rb_ary_new3(4, rb_aref(self, INT2FIX(0)), rb_aref(self, INT2FIX(1)),
rb_aref(self, INT2FIX(2)), rb_aref(self, INT2FIX(3)));
}
void init() {
VALUE opencv = rb_define_module("Cv");

View File

@ -13,7 +13,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_size_type = {
"Size",
{ 0, free_size, memsize_size, 0 },
{ 0, free_size, memsize_size, },
0,
0,
0

View File

@ -6,6 +6,7 @@ namespace rubyopencv {
namespace Size {
void init();
cv::Size* obj2size(VALUE obj);
VALUE size2obj(cv::Size* ptr);
}
}
#endif // RUBY_OPENCV_SIZE_H

View File

@ -14,7 +14,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_trackbar_type = {
"Trackbar",
{ mark_trackbar, free_trackbar, 0, 0 },
{ mark_trackbar, free_trackbar, 0, },
0,
0,
0

View File

@ -18,7 +18,7 @@ namespace rubyopencv {
VALUE rb_klass = Qnil;
rb_data_type_t opencv_videocapture_type = {
"VideoCapture",
{ 0, free_videocapture, memsize_videocapture, 0 },
{ 0, free_videocapture, memsize_videocapture, },
0,
0,
0

View File

@ -12,7 +12,7 @@ namespace rubyopencv {
VALUE rb_klass;
rb_data_type_t opencv_window_type = {
"Window",
{ mark_window, free_window, 0, 0 },
{ mark_window, free_window, 0, },
0,
0,
0
@ -43,7 +43,7 @@ namespace rubyopencv {
window_t* w = obj2window(obj);
return StringValueCStr(w->name);
}
/*
* Creates a window.
*
@ -63,8 +63,8 @@ namespace rubyopencv {
char* name_str = StringValueCStr(name);
if (cvGetWindowHandle(name_str) != NULL) {
rb_raise(rb_eStandardError, "window name should be unique.");
}
}
int mode = CV_WINDOW_AUTOSIZE;
if (argc == 2) {
Check_Type(flags, T_FIXNUM);

View File

@ -12,6 +12,8 @@ class OpenCVTestCase < Test::Unit::TestCase
FILENAME_LENA32x32 = SAMPLE_DIR + 'lena-32x32.jpg'
FILENAME_GIRLS_PLAY_AND_PLANT_FLOWERS_IN_THE_PARK = SAMPLE_DIR + 'girls-play-and-plant-flowers-in-the-park-725x480.jpg'
HAARCASCADE_FRONTALFACE_ALT = SAMPLE_DIR + 'haarcascade_frontalface_alt.xml'
BVLC_GOOGLENET_CAFFEMODEL = SAMPLE_DIR + 'bvlc_googlenet.caffemodel'
BVLC_GOOGLENET_PROTXT = SAMPLE_DIR + 'bvlc_googlenet.prototxt'
AVI_SAMPLE = SAMPLE_DIR + 'movie_sample.avi'
DUMMY_OBJ = Digest::MD5.new # dummy object for argument type check test
@ -102,4 +104,3 @@ class OpenCVTestCase < Test::Unit::TestCase
end
end
end

Binary file not shown.

File diff suppressed because it is too large Load Diff

72
test/test_dnn.rb Normal file
View File

@ -0,0 +1,72 @@
#!/usr/bin/env ruby
# -*- mode: ruby; coding: utf-8 -*-
require 'opencv'
require File.expand_path(File.dirname(__FILE__)) + '/helper'
include Cv
class TestDnn < OpenCVTestCase
def test_read_net
c = Dnn.read_net(BVLC_GOOGLENET_PROTXT, BVLC_GOOGLENET_CAFFEMODEL)
assert_equal(Dnn::Net, c.class)
assert_raise(TypeError) {
Dnn.read_net(DUMMY_OBJ)
}
end
def test_read_net_from_caffe
c = Dnn.read_net_from_caffe(BVLC_GOOGLENET_PROTXT, BVLC_GOOGLENET_CAFFEMODEL)
assert_equal(Dnn::Net, c.class)
assert_raise(ArgumentError) {
Dnn.read_net_from_caffe(DUMMY_OBJ)
}
end
def test_blog_from_image
b = blob_from_image
assert_equal(Mat, b.class)
assert_equal(224, b.size(2))
assert_equal(224, b.size(3))
end
def test_net_initialize
c = Dnn::Net.new
assert_equal(Dnn::Net, c.class)
c = bvlc_googlenet
assert_equal(Dnn::Net, c.class)
assert_raise(TypeError) {
Dnn::Net.new(DUMMY_OBJ)
}
end
def test_net_empty
c = bvlc_googlenet
assert_equal(false, c.empty?)
c = Dnn::Net.new
assert_equal(true, c.empty?)
end
def test_net_forward
c = bvlc_googlenet
c.input = blob_from_image
m = c.forward
assert_equal(Mat, m.class)
end
private
def bvlc_googlenet
Dnn::Net.new(BVLC_GOOGLENET_PROTXT, BVLC_GOOGLENET_CAFFEMODEL)
end
def blob_from_image
i = Cv.imread(FILENAME_GIRLS_PLAY_AND_PLANT_FLOWERS_IN_THE_PARK)
Dnn.blob_from_image(i, size: Size.new(224, 224), mean: Scalar.new(104, 117, 123))
end
end