diff --git a/examples/matching_to_many_images/matching_to_many_images.rb b/examples/matching_to_many_images/matching_to_many_images.rb
index 0d70b1f..72d0a1c 100644
--- a/examples/matching_to_many_images/matching_to_many_images.rb
+++ b/examples/matching_to_many_images/matching_to_many_images.rb
@@ -4,9 +4,9 @@ include OpenCV
data = File.dirname(__FILE__)
-query = IplImage.load File.join(data, 'query.png'), CV_LOAD_IMAGE_GRAYSCALE
+query = CvMat.load File.join(data, 'query.png'), CV_LOAD_IMAGE_GRAYSCALE
image_files = ['1.png', '2.png', '3.png'].map{|f| File.join(data, 'train', f)}
-images = image_files.map{|f| IplImage.load f, CV_LOAD_IMAGE_GRAYSCALE}
+images = image_files.map{|f| CvMat.load f, CV_LOAD_IMAGE_GRAYSCALE}
matchs = query.match_descriptors(images)
diff --git a/ext/opencv/cvmat.cpp b/ext/opencv/cvmat.cpp
index 159d7ed..6ac93da 100644
--- a/ext/opencv/cvmat.cpp
+++ b/ext/opencv/cvmat.cpp
@@ -352,6 +352,7 @@ void define_ruby_class()
rb_define_method(rb_klass, "resize", RUBY_METHOD_FUNC(rb_resize), -1);
rb_define_method(rb_klass, "warp_affine", RUBY_METHOD_FUNC(rb_warp_affine), -1);
rb_define_singleton_method(rb_klass, "rotation_matrix2D", RUBY_METHOD_FUNC(rb_rotation_matrix2D), 3);
+ rb_define_singleton_method(rb_klass, "get_perspective_transform", RUBY_METHOD_FUNC(rb_get_perspective_transform), 2);
rb_define_method(rb_klass, "warp_perspective", RUBY_METHOD_FUNC(rb_warp_perspective), -1);
rb_define_singleton_method(rb_klass, "find_homography", RUBY_METHOD_FUNC(rb_find_homograpy), -1);
rb_define_method(rb_klass, "remap", RUBY_METHOD_FUNC(rb_remap), -1);
@@ -4053,6 +4054,40 @@ rb_rotation_matrix2D(VALUE self, VALUE center, VALUE angle, VALUE scale)
return map_matrix;
}
+/*
+ * call-seq:
+ * CvMat.get_perspective_transform(from_points,to_points) -> cvmat
+ *
+ * Calculates a perspective transform from four pairs of the corresponding points.
+ * Returns a matrix suitable for use with warp_perspective
+ */
+VALUE
+rb_get_perspective_transform(VALUE self, VALUE source, VALUE dest)
+{
+ Check_Type(source, T_ARRAY);
+ Check_Type(dest, T_ARRAY);
+
+ int count = RARRAY_LEN(source);
+
+ CvPoint2D32f* source_buff = ALLOCA_N(CvPoint2D32f, count);
+ CvPoint2D32f* dest_buff = ALLOCA_N(CvPoint2D32f, count);
+
+ for (int i = 0; i < count; i++) {
+ source_buff[i] = *(CVPOINT2D32F(RARRAY_PTR(source)[i]));
+ dest_buff[i] = *(CVPOINT2D32F(RARRAY_PTR(dest)[i]));
+ }
+
+ VALUE map_matrix = new_object(cvSize(3, 3), CV_MAKETYPE(CV_32F, 1));
+
+ try {
+ cvGetPerspectiveTransform(source_buff, dest_buff, CVMAT(map_matrix));
+ }
+ catch (cv::Exception& e) {
+ raise_cverror(e);
+ }
+ return map_matrix;
+}
+
/*
* call-seq:
* warp_perspective(map_matrix[,flags = CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS][,fillval=0])) -> cvmat
@@ -5351,7 +5386,6 @@ rb_match_descriptors(int argc, VALUE *argv, VALUE self)
{
VALUE images, detector_type, descriptor_type, matcher_type;
rb_scan_args(argc, argv, "13", &images, &detector_type, &descriptor_type, &matcher_type);
-
if (RARRAY_LEN(images) == 0) {
return rb_hash_new();
}
@@ -5365,54 +5399,58 @@ rb_match_descriptors(int argc, VALUE *argv, VALUE self)
matcher_type = rb_str_new2("FlannBased");
}
- cv::Mat queryImage = CVMAT(self);
- std::vector trainImages;
- for(int i=0; i < RARRAY_LEN(images); i++) {
- trainImages.push_back(CVMAT_WITH_CHECK(RARRAY_PTR(images)[i]));
- }
-
- cv::Ptr featureDetector = cv::FeatureDetector::create(RSTRING_PTR(detector_type));
- if (featureDetector.empty()) {
- rb_raise(rb_eArgError, "Could not create feature detector by given detector type: %s", RSTRING_PTR(detector_type));
- }
- cv::Ptr descriptorExtractor = cv::DescriptorExtractor::create(RSTRING_PTR(descriptor_type));
- if (descriptorExtractor.empty()) {
- rb_raise(rb_eArgError, "Could not create descriptor extractor by given descriptor type: %s", RSTRING_PTR(descriptor_type));
- }
- cv::Ptr descriptorMatcher;
- try {
- descriptorMatcher = cv::DescriptorMatcher::create(RSTRING_PTR(matcher_type));
- }
- catch(cv::Exception& e) {
- rb_raise(rb_eArgError, "Could not create descriptor matcher by given matcher type: %s", RSTRING_PTR(matcher_type));
- }
-
- std::vector queryKeypoints;
- std::vector > trainKeypoints;
- featureDetector->detect(queryImage, queryKeypoints);
- featureDetector->detect(trainImages, trainKeypoints);
-
- cv::Mat queryDescriptors;
- std::vector trainDescriptors;
- descriptorExtractor->compute(queryImage, queryKeypoints, queryDescriptors);
- descriptorExtractor->compute(trainImages, trainKeypoints, trainDescriptors);
-
- std::vector matches;
- descriptorMatcher->add(trainDescriptors);
- descriptorMatcher->train();
- descriptorMatcher->match(queryDescriptors, matches);
-
VALUE _matches = rb_hash_new();
- for (size_t i=0; i trainImages;
+ for(int i = 0, n = RARRAY_LEN(images); i < n; i++) {
+ trainImages.push_back(CVMAT_WITH_CHECK(RARRAY_PTR(images)[i]));
+ }
+
+ cv::Ptr featureDetector = cv::FeatureDetector::create(StringValueCStr(detector_type));
+ if (featureDetector.empty()) {
+ rb_raise(rb_eArgError, "Could not create feature detector by given detector type: %s", StringValueCStr(detector_type));
+ }
+ cv::Ptr descriptorExtractor = cv::DescriptorExtractor::create(StringValueCStr(descriptor_type));
+ if (descriptorExtractor.empty()) {
+ rb_raise(rb_eArgError, "Could not create descriptor extractor by given descriptor type: %s", StringValueCStr(descriptor_type));
+ }
+ cv::Ptr descriptorMatcher;
+ try {
+ descriptorMatcher = cv::DescriptorMatcher::create(StringValueCStr(matcher_type));
+ }
+ catch(cv::Exception& e) {
+ rb_raise(rb_eArgError, "Could not create descriptor matcher by given matcher type: %s", StringValueCStr(matcher_type));
+ }
+
+ std::vector queryKeypoints;
+ std::vector > trainKeypoints;
+ featureDetector->detect(queryImage, queryKeypoints);
+ featureDetector->detect(trainImages, trainKeypoints);
+ cv::Mat queryDescriptors;
+ std::vector trainDescriptors;
+ descriptorExtractor->compute(queryImage, queryKeypoints, queryDescriptors);
+ descriptorExtractor->compute(trainImages, trainKeypoints, trainDescriptors);
+ std::vector matches;
+ descriptorMatcher->add(trainDescriptors);
+ descriptorMatcher->train();
+ descriptorMatcher->match(queryDescriptors, matches);
+
+ for (size_t i = 0, n = matches.size(); i < n; i++) {
+ VALUE match = INT2FIX(matches[i].imgIdx);
+ VALUE count = rb_hash_lookup(_matches, match);
+ if (NIL_P(count)) {
+ count = INT2FIX(1);
+ } else {
+ count = INT2FIX(FIX2INT(count) + 1);
+ }
+ rb_hash_aset(_matches, match, count);
}
- rb_hash_aset(_matches, match, count);
}
+ catch (cv::Exception& e) {
+ raise_cverror(e);
+ }
+
return _matches;
}
diff --git a/ext/opencv/cvmat.h b/ext/opencv/cvmat.h
index a65bb2a..900be16 100644
--- a/ext/opencv/cvmat.h
+++ b/ext/opencv/cvmat.h
@@ -175,6 +175,7 @@ VALUE rb_quadrangle_sub_pix(int argc, VALUE *argv, VALUE self);
VALUE rb_resize(int argc, VALUE *argv, VALUE self);
VALUE rb_warp_affine(int argc, VALUE *argv, VALUE self);
VALUE rb_rotation_matrix2D(VALUE self, VALUE center, VALUE angle, VALUE scale);
+VALUE rb_get_perspective_transform(VALUE self, VALUE source, VALUE dest);
VALUE rb_warp_perspective(int argc, VALUE *argv, VALUE self);
VALUE rb_find_homograpy(int argc, VALUE *argv, VALUE self);
VALUE rb_remap(int argc, VALUE *argv, VALUE self);
diff --git a/test/test_cvmat_imageprocessing.rb b/test/test_cvmat_imageprocessing.rb
index e3bc9f0..05a4019 100755
--- a/test/test_cvmat_imageprocessing.rb
+++ b/test/test_cvmat_imageprocessing.rb
@@ -450,6 +450,40 @@ class TestCvMat_imageprocessing < OpenCVTestCase
# snap mat0, mat1, mat2, mat3, mat4
end
+ def test_get_perspective_transform
+ from = [
+ OpenCV::CvPoint2D32f.new(540, 382),
+ OpenCV::CvPoint2D32f.new(802, 400),
+ OpenCV::CvPoint2D32f.new(850, 731),
+ OpenCV::CvPoint2D32f.new(540, 731),
+ ]
+ to = [
+ OpenCV::CvPoint2D32f.new(0, 0),
+ OpenCV::CvPoint2D32f.new(233, 0),
+ OpenCV::CvPoint2D32f.new(233, 310),
+ OpenCV::CvPoint2D32f.new(0, 310),
+ ]
+ transform = OpenCV::CvMat.get_perspective_transform(from, to)
+ assert_equal 3, transform.rows
+ assert_equal 3, transform.columns
+ expected = [
+ 0.923332154750824,
+ 0.0,
+ 0.0,
+ 1.4432899320127035e-15,
+ 0.0,
+ 0.0,
+ -498.599365234375,
+ 0.0,
+ 0.0,
+ ]
+ 3.times do |i|
+ 3.times do |j|
+ assert_in_delta(expected.shift, transform[i][j], 0.001)
+ end
+ end
+ end
+
def test_rotation_matrix2D
mat1 = CvMat.rotation_matrix2D(CvPoint2D32f.new(10, 20), 60, 2.0)
expected = [1.0, 1.73205, -34.64102,
diff --git a/test/test_cvmat_matching.rb b/test/test_cvmat_matching.rb
index cd50016..27d8006 100755
--- a/test/test_cvmat_matching.rb
+++ b/test/test_cvmat_matching.rb
@@ -18,7 +18,7 @@ class TestCvMat_matching < OpenCVTestCase
end
def read_test_image(*path)
- IplImage.load File.join(data_dir, *path), CV_LOAD_IMAGE_GRAYSCALE
+ CvMat.load File.join(data_dir, *path), CV_LOAD_IMAGE_GRAYSCALE
end
def test_match_descriptors