1
0
Fork 0
mirror of https://github.com/ruby-opencv/ruby-opencv synced 2023-03-27 23:22:12 -04:00

added error handling to some methods

Added error handlings to the methods of following classes
 -CvBox2D
 -CvCapture
 -CvChain
 -CvCircle32f
 -CvConnectedComp
 -CvContour
 -CvFont
 -CvHaarclassifierCascade
 -CvHuMoments
 -CvLine
 -CvMoments
 -CvPoint
This commit is contained in:
ser1zw 2011-07-22 00:47:54 +09:00
parent a7e180a5db
commit ea55e0d42e
14 changed files with 353 additions and 182 deletions

View file

@ -126,8 +126,6 @@ rb_size(VALUE self)
VALUE
rb_set_size(VALUE self, VALUE value)
{
if (!cCvSize2D32f::rb_compatible_q(rb_klass, value))
rb_raise(rb_eArgError, "object is not compatible %s.", rb_class2name(cCvSize2D32f::rb_class()));
CVBOX2D(self)->size = VALUE_TO_CVSIZE2D32F(value);
return self;
}
@ -167,9 +165,14 @@ rb_points(VALUE self)
{
const int n = 4;
CvPoint2D32f p[n];
cvBoxPoints(*CVBOX2D(self), p);
try {
cvBoxPoints(*CVBOX2D(self), p);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
VALUE points = rb_ary_new2(n);
for(int i = 0; i < n; i++)
for (int i = 0; i < n; ++i)
rb_ary_store(points, i, cCvPoint2D32f::new_object(p[i]));
return points;
}

View file

@ -120,25 +120,30 @@ cvcapture_free(void *ptr)
VALUE
rb_open(int argc, VALUE *argv, VALUE self)
{
VALUE device, i;
VALUE device, interface;
rb_scan_args(argc, argv, "01", &device);
CvCapture *capture = 0;
switch (TYPE(device)) {
case T_STRING:
capture = cvCaptureFromFile(StringValueCStr(device));
break;
case T_FIXNUM:
capture = cvCaptureFromCAM(FIX2INT(device));
break;
case T_SYMBOL:
i = rb_hash_aref(rb_const_get(rb_class(), rb_intern("INTERFACE")), device);
if (NIL_P(i))
rb_raise(rb_eArgError, "undefined interface.");
capture = cvCaptureFromCAM(NUM2INT(i));
break;
case T_NIL:
capture = cvCaptureFromCAM(CV_CAP_ANY);
break;
try {
switch (TYPE(device)) {
case T_STRING:
capture = cvCaptureFromFile(StringValueCStr(device));
break;
case T_FIXNUM:
capture = cvCaptureFromCAM(FIX2INT(device));
break;
case T_SYMBOL:
interface = rb_hash_aref(rb_const_get(rb_class(), rb_intern("INTERFACE")), device);
if (NIL_P(interface))
rb_raise(rb_eArgError, "undefined interface.");
capture = cvCaptureFromCAM(NUM2INT(interface));
break;
case T_NIL:
capture = cvCaptureFromCAM(CV_CAP_ANY);
break;
}
}
catch (cv::Exception& e) {
raise_cverror(e);
}
if (!capture)
rb_raise(rb_eStandardError, "Invalid capture format.");
@ -161,7 +166,14 @@ rb_open(int argc, VALUE *argv, VALUE self)
VALUE
rb_grab(VALUE self)
{
return cvGrabFrame(CVCAPTURE(self)) ? Qtrue : Qfalse;
int grab = 0;
try {
grab = cvGrabFrame(CVCAPTURE(self));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return grab ? Qtrue : Qfalse;
}
/*
@ -173,14 +185,21 @@ rb_grab(VALUE self)
VALUE
rb_retrieve(VALUE self)
{
IplImage *frame = cvRetrieveFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
VALUE image = cIplImage::new_object(cvSize(frame->width, frame->height), CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
VALUE image = Qnil;
try {
IplImage *frame = cvRetrieveFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
image = cIplImage::new_object(cvSize(frame->width, frame->height),
CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return image;
}
@ -193,153 +212,181 @@ rb_retrieve(VALUE self)
VALUE
rb_query(VALUE self)
{
IplImage *frame = cvQueryFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
VALUE image = cIplImage::new_object(cvSize(frame->width, frame->height), CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
VALUE image = Qnil;
try {
IplImage *frame = cvQueryFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
image = cIplImage::new_object(cvSize(frame->width, frame->height),
CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return image;
}
VALUE
rb_get_capture_property(VALUE self, int id)
{
double result = 0;
try {
result = cvGetCaptureProperty(CVCAPTURE(self), id);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
VALUE
rb_set_capture_property(VALUE self, int id, VALUE value)
{
double result = 0;
try {
result = cvSetCaptureProperty(CVCAPTURE(self), id, NUM2DBL(value));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
/*
* Get film current position in milliseconds or video capture timestamp.
*/
VALUE
rb_get_millisecond(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_MSEC));
return rb_get_capture_property(self, CV_CAP_PROP_POS_MSEC);
}
/*
* Set film current position in milliseconds or video capture timestamp.
*/
VALUE
rb_set_millisecond(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_MSEC, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_POS_MSEC, value);
}
/*
* Get 0-based index of the frame to be decoded/captured next
*/
VALUE
rb_get_frames(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_FRAMES));
return rb_get_capture_property(self, CV_CAP_PROP_POS_FRAMES);
}
/*
* Set 0-based index of the frame to be decoded/captured next
*/
VALUE
rb_set_frames(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_FRAMES, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_POS_FRAMES, value);
}
/*
* Get relative position of video file (0 - start of the film, 1 - end of the film)
*/
VALUE
rb_get_avi_ratio(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_AVI_RATIO));
return rb_get_capture_property(self, CV_CAP_PROP_POS_AVI_RATIO);
}
/*
* Set relative position of video file (0 - start of the film, 1 - end of the film)
*/
VALUE
rb_set_avi_ratio(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_AVI_RATIO, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_POS_AVI_RATIO, value);
}
/*
* Get size of frames in the video stream.
*/
VALUE
rb_get_size(VALUE self)
{
return cCvSize::new_object(cvSize((int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH),
(int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT)));
CvSize size;
try {
CvCapture* self_ptr = CVCAPTURE(self);
size = cvSize((int)cvGetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_WIDTH),
(int)cvGetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_HEIGHT));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return cCvSize::new_object(size);
}
/*
* Set size of frames in the video stream.
*/
VALUE
rb_set_size(VALUE self, VALUE value)
{
double result;
if (cCvSize::rb_compatible_q(cCvSize::rb_class(), value)) {
int width = NUM2INT(rb_funcall(value, rb_intern("width"), 0));
int height = NUM2INT(rb_funcall(value, rb_intern("height"), 0));
cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH, width);
result = cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT, height);
double result = 0;
CvSize size = VALUE_TO_CVSIZE(value);
try {
CvCapture* self_ptr = CVCAPTURE(self);
cvSetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_WIDTH, size.width);
result = cvSetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_HEIGHT, size.height);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
else
raise_compatible_typeerror(value, cCvSize::rb_class());
return DBL2NUM(result);
}
/*
* Get width of frames in the video stream.
*/
VALUE
rb_get_width(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH));
return rb_get_capture_property(self, CV_CAP_PROP_FRAME_WIDTH);
}
/*
* Set width of frames in the video stream.
*/
VALUE
rb_set_width(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_FRAME_WIDTH, value);
}
/*
* Get height of frames in the video stream.
*/
VALUE
rb_get_height(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT));
return rb_get_capture_property(self, CV_CAP_PROP_FRAME_HEIGHT);
}
/*
* Set height of frames in the video stream.
*/
VALUE
rb_set_height(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_FRAME_HEIGHT, value);
}
/*
* Get frame rate
*/
VALUE
rb_get_fps(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FPS));
return rb_get_capture_property(self, CV_CAP_PROP_FPS);
}
/*
* Set frame rate
*/
VALUE
rb_set_fps(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FPS, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_FPS, value);
}
/*
* Get 4character code of codec. see http://www.fourcc.org/
*/
@ -351,106 +398,101 @@ rb_get_fourcc(VALUE self)
sprintf(str, "%s", (char*)&fourcc);
return rb_str_new2(str);
}
/*
* Get number of frames in video file.
*/
VALUE
rb_get_frame_count(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_COUNT));
return rb_get_capture_property(self, CV_CAP_PROP_FRAME_COUNT);
}
/*
* Get the format of the Mat objects returned by CvCapture#retrieve
*/
VALUE
rb_get_format(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FORMAT));
return rb_get_capture_property(self, CV_CAP_PROP_FORMAT);
}
/*
* Get a backend-specific value indicating the current capture mode
*/
VALUE
rb_get_mode(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_MODE));
return rb_get_capture_property(self, CV_CAP_PROP_MODE);
}
/*
* Get brightness of the image (only for cameras)
*/
VALUE
rb_get_brightness(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_BRIGHTNESS));
return rb_get_capture_property(self, CV_CAP_PROP_BRIGHTNESS);
}
/*
* Get contrast of the image (only for cameras)
*/
VALUE
rb_get_contrast(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_CONTRAST));
return rb_get_capture_property(self, CV_CAP_PROP_CONTRAST);
}
/*
* Get saturation of the image (only for cameras)
*/
VALUE
rb_get_saturation(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_SATURATION));
return rb_get_capture_property(self, CV_CAP_PROP_SATURATION);
}
/*
* Get hue of the image (only for cameras)
*/
VALUE
rb_get_hue(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_HUE));
return rb_get_capture_property(self, CV_CAP_PROP_HUE);
}
/*
* Get gain of the image (only for cameras)
*/
VALUE
rb_get_gain(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_GAIN));
return rb_get_capture_property(self, CV_CAP_PROP_GAIN);
}
/*
* Get exposure (only for cameras)
*/
VALUE
rb_get_exposure(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_EXPOSURE));
return rb_get_capture_property(self, CV_CAP_PROP_EXPOSURE);
}
/*
* Get boolean flags indicating whether images should be converted to RGB
*/
VALUE
rb_get_convert_rgb(VALUE self)
{
int flag = (int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_CONVERT_RGB);
return (flag == 1) ? Qtrue : Qfalse;
int flag = 0;
try {
flag = (int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_CONVERT_RGB);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return flag ? Qtrue : Qfalse;
}
/*
* Get TOWRITE (note: only supported by DC1394 v 2.x backend currently)
*/
VALUE
rb_get_rectification(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_RECTIFICATION));
return rb_get_capture_property(self, CV_CAP_PROP_RECTIFICATION);
}
__NAMESPACE_END_CVCAPTURE
__NAMESPACE_END_OPENCV

View file

@ -12,7 +12,7 @@
#include "opencv.h"
#define __NAMESPACE_BEGIN_CVCAPTURE namespace cCvCapture{
#define __NAMESPACE_BEGIN_CVCAPTURE namespace cCvCapture {
#define __NAMESPACE_END_CVCAPTURE }
__NAMESPACE_BEGIN_OPENCV

View file

@ -92,9 +92,13 @@ rb_initialize(int argc, VALUE *argv, VALUE self)
}
else
storage = rb_cvCreateMemStorage(0);
DATA_PTR(self) = (CvChain*)cvCreateSeq(CV_SEQ_ELTYPE_CODE, sizeof(CvChain),
sizeof(char), storage);
try {
DATA_PTR(self) = (CvChain*)cvCreateSeq(CV_SEQ_ELTYPE_CODE, sizeof(CvChain),
sizeof(char), storage);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return self;
}
@ -136,10 +140,15 @@ rb_codes(VALUE self)
CvChainPtReader reader;
int total = chain->total;
VALUE ary = rb_ary_new2(total);
cvStartReadChainPoints(chain, &reader);
for (int i = 0; i < total; ++i) {
CV_READ_SEQ_ELEM(reader.code, (*((CvSeqReader*)&(reader))));
rb_ary_store(ary, i, CHR2FIX(reader.code));
try {
cvStartReadChainPoints(chain, &reader);
for (int i = 0; i < total; ++i) {
CV_READ_SEQ_ELEM(reader.code, (*((CvSeqReader*)&(reader))));
rb_ary_store(ary, i, CHR2FIX(reader.code));
}
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return ary;
}
@ -158,10 +167,15 @@ rb_points(VALUE self)
CvPoint p = chain->origin;
int total = chain->total;
VALUE ary = rb_ary_new2(total);
cvStartReadChainPoints(chain, &reader);
for (int i = 0; i < total; ++i) {
CV_READ_CHAIN_POINT(p, reader);
rb_ary_store(ary, i, cCvPoint::new_object(p));
try {
cvStartReadChainPoints(chain, &reader);
for (int i = 0; i < total; ++i) {
CV_READ_CHAIN_POINT(p, reader);
rb_ary_store(ary, i, cCvPoint::new_object(p));
}
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return ary;
}
@ -211,9 +225,14 @@ VALUE
new_object()
{
VALUE storage = cCvMemStorage::new_object();
CvSeq *seq = cvCreateSeq(CV_SEQ_CHAIN_CONTOUR, sizeof(CvChain), sizeof(char), CVMEMSTORAGE(storage));
VALUE object = cCvSeq::new_sequence(cCvChain::rb_class(), seq, T_FIXNUM, storage);
return object;
CvSeq *seq = NULL;
try {
seq = cvCreateSeq(CV_SEQ_CHAIN_CONTOUR, sizeof(CvChain), sizeof(char), CVMEMSTORAGE(storage));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return cCvSeq::new_sequence(cCvChain::rb_class(), seq, T_FIXNUM, storage);
}
__NAMESPACE_END_CVCHAIN

View file

@ -105,9 +105,14 @@ rb_initialize(int argc, VALUE *argv, VALUE self)
}
else
storage = rb_cvCreateMemStorage(0);
DATA_PTR(self) = (CvContour*)cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvContour),
sizeof(CvPoint), storage);
try {
DATA_PTR(self) = (CvContour*)cvCreateSeq(CV_SEQ_ELTYPE_POINT, sizeof(CvContour),
sizeof(CvPoint), storage);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return self;
}
@ -180,7 +185,14 @@ rb_approx_poly(int argc, VALUE *argv, VALUE self)
VALUE
rb_bounding_rect(VALUE self)
{
return cCvRect::new_object(cvBoundingRect(CVCONTOUR(self), 1));
CvRect rect;
try {
rect = cvBoundingRect(CVCONTOUR(self), 1);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return cCvRect::new_object(rect);
}
/*
@ -199,7 +211,13 @@ rb_create_tree(int argc, VALUE *argv, VALUE self)
VALUE threshold, storage;
rb_scan_args(argc, argv, "01", &threshold);
storage = cCvMemStorage::new_object();
CvContourTree *tree = cvCreateContourTree(CVSEQ(self), CVMEMSTORAGE(storage), IF_DBL(threshold, 0.0));
CvContourTree *tree = NULL;
try {
tree = cvCreateContourTree(CVSEQ(self), CVMEMSTORAGE(storage), IF_DBL(threshold, 0.0));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return cCvSeq::new_sequence(cCvContourTree::rb_class(), (CvSeq*)tree, cCvPoint::rb_class(), storage);
}
@ -212,7 +230,13 @@ rb_create_tree(int argc, VALUE *argv, VALUE self)
VALUE
rb_in_q(VALUE self, VALUE point)
{
double n = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 0);
double n = 0;
try {
n = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 0);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return n == 0 ? Qnil : n > 0 ? Qtrue : Qfalse;
}
@ -225,7 +249,14 @@ rb_in_q(VALUE self, VALUE point)
VALUE
rb_measure_distance(VALUE self, VALUE point)
{
return rb_float_new(cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 1));
double distance = 0;
try {
distance = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 1);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(distance);
}
/*
@ -248,8 +279,13 @@ rb_point_polygon_test(VALUE self, VALUE point, VALUE measure_dist)
else
measure_dist_flag = NUM2INT(measure_dist);
dist = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), measure_dist_flag);
try {
dist = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), measure_dist_flag);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
/* cvPointPolygonTest returns 100, -100 or 0 when measure_dist = 0 */
if ((!measure_dist_flag) && ((int)dist) != 0)
dist = (dist > 0) ? 1 : -1;

View file

@ -128,13 +128,18 @@ rb_initialize(int argc, VALUE *argv, VALUE self)
}
font_option = FONT_OPTION(font_option);
cvInitFont(CVFONT(self),
(FIX2INT(face) | FO_ITALIC(font_option)),
FO_HSCALE(font_option),
FO_VSCALE(font_option),
FO_SHEAR(font_option),
FO_THICKNESS(font_option),
FO_LINE_TYPE(font_option));
try {
cvInitFont(CVFONT(self),
(FIX2INT(face) | FO_ITALIC(font_option)),
FO_HSCALE(font_option),
FO_VSCALE(font_option),
FO_SHEAR(font_option),
FO_THICKNESS(font_option),
FO_LINE_TYPE(font_option));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return self;
}

View file

@ -77,8 +77,14 @@ cvhaarclassifiercascade_free(void* ptr)
VALUE
rb_load(VALUE klass, VALUE path)
{
CvHaarClassifierCascade *cascade = (CvHaarClassifierCascade*)cvLoad(StringValueCStr(path), 0, 0, 0);
if(!CV_IS_HAAR_CLASSIFIER(cascade))
CvHaarClassifierCascade *cascade = NULL;
try {
cascade = (CvHaarClassifierCascade*)cvLoad(StringValueCStr(path), 0, 0, 0);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
if (!CV_IS_HAAR_CLASSIFIER(cascade))
rb_raise(rb_eArgError, "invalid format haar classifier cascade file.");
return Data_Wrap_Struct(klass, 0, cvhaarclassifiercascade_free, cascade);
}
@ -120,9 +126,6 @@ rb_detect_objects(int argc, VALUE *argv, VALUE self)
VALUE image, options;
rb_scan_args(argc, argv, "11", &image, &options);
if (!rb_obj_is_kind_of(image, cCvMat::rb_class()))
raise_typeerror(image, cCvMat::rb_class());
double scale_factor;
int flags, min_neighbors;
CvSize min_size, max_size;
@ -145,12 +148,18 @@ rb_detect_objects(int argc, VALUE *argv, VALUE self)
storage_val = CHECK_CVMEMSTORAGE(LOOKUP_CVMETHOD(options, "storage"));
}
CvSeq *seq = cvHaarDetectObjects(CVMAT(image), CVHAARCLASSIFIERCASCADE(self), CVMEMSTORAGE(storage_val),
scale_factor, min_neighbors, flags, min_size, max_size);
VALUE result = cCvSeq::new_sequence(cCvSeq::rb_class(), seq, cCvAvgComp::rb_class(), storage_val);
if (rb_block_given_p()) {
for(int i = 0; i < seq->total; ++i)
rb_yield(REFER_OBJECT(cCvAvgComp::rb_class(), cvGetSeqElem(seq, i), storage_val));
VALUE result = Qnil;
try {
CvSeq *seq = cvHaarDetectObjects(CVMAT_WITH_CHECK(image), CVHAARCLASSIFIERCASCADE(self), CVMEMSTORAGE(storage_val),
scale_factor, min_neighbors, flags, min_size, max_size);
result = cCvSeq::new_sequence(cCvSeq::rb_class(), seq, cCvAvgComp::rb_class(), storage_val);
if (rb_block_given_p()) {
for(int i = 0; i < seq->total; ++i)
rb_yield(REFER_OBJECT(cCvAvgComp::rb_class(), cvGetSeqElem(seq, i), storage_val));
}
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return result;
}

View file

@ -28,7 +28,8 @@ VALUE rb_load(VALUE klass, VALUE path);
VALUE rb_detect_objects(int argc, VALUE *argv, VALUE self);
__NAMESPACE_END_CVHAARCLASSIFERCASCADE
inline CvHaarClassifierCascade *CVHAARCLASSIFIERCASCADE(VALUE object) {
inline CvHaarClassifierCascade*
CVHAARCLASSIFIERCASCADE(VALUE object) {
CvHaarClassifierCascade *ptr;
Data_Get_Struct(object, CvHaarClassifierCascade, ptr);
return ptr;

View file

@ -85,11 +85,12 @@ rb_allocate(VALUE klass)
VALUE
rb_initialize(VALUE self, VALUE src_moments)
{
if (rb_obj_is_kind_of(src_moments, cCvMoments::rb_class()))
try {
cvGetHuMoments(CVMOMENTS(src_moments), CVHUMOMENTS(self));
else
rb_raise(rb_eTypeError, "argument 1 (src_moments) should be %s.",
rb_class2name(cCvMoments::rb_class()));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return self;
}
@ -124,8 +125,13 @@ rb_to_ary(VALUE self)
VALUE
new_object(CvMoments *src_moments)
{
VALUE object = rb_allocate(rb_class());
cvGetHuMoments(src_moments, CVHUMOMENTS(object));
VALUE object = rb_allocate(rb_klass);
try {
cvGetHuMoments(src_moments, CVHUMOMENTS(object));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return object;
}

View file

@ -95,8 +95,14 @@ rb_initialize(int argc, VALUE *argv, VALUE self)
VALUE src, is_binary;
rb_scan_args(argc, argv, "02", &src, &is_binary);
if (!NIL_P(src)) {
if (rb_obj_is_kind_of(src, cCvMat::rb_class()) || rb_obj_is_kind_of(src, cCvSeq::rb_class()))
cvMoments(CVARR(src), CVMOMENTS(self), TRUE_OR_FALSE(is_binary, 0));
if (rb_obj_is_kind_of(src, cCvMat::rb_class()) || rb_obj_is_kind_of(src, cCvSeq::rb_class())) {
try {
cvMoments(CVARR(src), CVMOMENTS(self), TRUE_OR_FALSE(is_binary, 0));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
}
else
rb_raise(rb_eTypeError, "argument 1 (src) should be %s or %s.",
rb_class2name(cCvMat::rb_class()), rb_class2name(cCvSeq::rb_class()));
@ -138,7 +144,14 @@ CVMOMENTS_ACCESSOR(inv_sqrt_m00);
VALUE
rb_spatial(VALUE self, VALUE x_order, VALUE y_order)
{
return rb_float_new(cvGetSpatialMoment(CVMOMENTS(self), NUM2INT(x_order), NUM2INT(y_order)));
double result = 0;
try {
result = cvGetSpatialMoment(CVMOMENTS(self), NUM2INT(x_order), NUM2INT(y_order));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
/*
@ -154,7 +167,14 @@ rb_spatial(VALUE self, VALUE x_order, VALUE y_order)
VALUE
rb_central(VALUE self, VALUE x_order, VALUE y_order)
{
return rb_float_new(cvGetCentralMoment(CVMOMENTS(self), NUM2INT(x_order), NUM2INT(y_order)));
double result = 0;
try {
result = cvGetCentralMoment(CVMOMENTS(self), NUM2INT(x_order), NUM2INT(y_order));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
/*
@ -168,7 +188,14 @@ rb_central(VALUE self, VALUE x_order, VALUE y_order)
VALUE
rb_normalized_central(VALUE self, VALUE x_order, VALUE y_order)
{
return rb_float_new(cvGetNormalizedCentralMoment(CVMOMENTS(self), NUM2INT(x_order), NUM2INT(y_order)));
double result = 0;
try {
result = cvGetNormalizedCentralMoment(CVMOMENTS(self), NUM2INT(x_order), NUM2INT(y_order));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
/*
@ -203,11 +230,18 @@ VALUE
rb_gravity_center(VALUE self)
{
CvMoments *moments = CVMOMENTS(self);
double
m00 = cvGetSpatialMoment(moments, 0, 0),
m10 = cvGetSpatialMoment(moments, 1, 0),
CvPoint2D32f point;
double m00 = 0, m01 = 0, m10 = 0;
try {
m00 = cvGetSpatialMoment(moments, 0, 0);
m10 = cvGetSpatialMoment(moments, 1, 0);
m01 = cvGetSpatialMoment(moments, 0, 1);
return cCvPoint2D32f::new_object(cvPoint2D32f(m10 / m00, m01 / m00));
point = cvPoint2D32f(m10 / m00, m01 / m00);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return cCvPoint2D32f::new_object(point);
}
/*
@ -220,12 +254,17 @@ VALUE
rb_angle(VALUE self)
{
CvMoments *moments = CVMOMENTS(self);
double
m11 = cvGetCentralMoment(moments, 1, 1),
m20 = cvGetCentralMoment(moments, 2, 0),
m02 = cvGetCentralMoment(moments, 0, 2),
mangle = 0.5 * atan(2 * m11 / (m20 - m02));
if(cvIsNaN(mangle) || cvIsInf(mangle))
double m11 = 0, m20 = 0, m02 = 0;
try {
m11 = cvGetCentralMoment(moments, 1, 1);
m20 = cvGetCentralMoment(moments, 2, 0);
m02 = cvGetCentralMoment(moments, 0, 2);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
double mangle = 0.5 * atan(2 * m11 / (m20 - m02));
if (cvIsNaN(mangle) || cvIsInf(mangle))
return Qnil;
else
return rb_float_new(mangle);
@ -234,8 +273,13 @@ rb_angle(VALUE self)
VALUE
new_object(CvArr *arr, int is_binary = 0)
{
VALUE object = rb_allocate(rb_class());
cvMoments(arr, CVMOMENTS(object), is_binary);
VALUE object = rb_allocate(rb_klass);
try {
cvMoments(arr, CVMOMENTS(object), is_binary);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return object;
}

View file

@ -62,6 +62,14 @@ CVMOMENTS(VALUE object)
return ptr;
}
inline CvMoments*
CVMOMENTS_WITH_CHECK(VALUE object)
{
if (!rb_obj_is_kind_of(object, cCvMoments::rb_class()))
raise_typeerror(object, cCvMoments::rb_class());
return CVMOMENTS(object);
}
__NAMESPACE_END_OPENCV
#endif // RUBY_OPENCV_CVMOMENTS_H

View file

@ -104,24 +104,19 @@ rb_allocate(VALUE klass)
VALUE
rb_initialize(int argc, VALUE *argv, VALUE self)
{
VALUE obj, x, y;
CvPoint* self_ptr = CVPOINT(self);
switch (argc) {
case 0:
break;
case 1:
obj = argv[0];
if (rb_compatible_q(rb_klass, obj)) {
CVPOINT(self)->x = NUM2INT(rb_funcall(rb_funcall(obj, rb_intern("x"), 0), rb_intern("to_i"), 0));
CVPOINT(self)->y = NUM2INT(rb_funcall(rb_funcall(obj, rb_intern("y"), 0), rb_intern("to_i"), 0));
}
else {
rb_raise(rb_eArgError, "object is not compatible %s.", rb_class2name(rb_klass));
}
case 1: {
CvPoint point = VALUE_TO_CVPOINT(argv[0]);
self_ptr->x = point.x;
self_ptr->y = point.y;
break;
}
case 2:
x = argv[0], y = argv[1];
CVPOINT(self)->x = NUM2INT(x);
CVPOINT(self)->y = NUM2INT(y);
self_ptr->x = NUM2INT(argv[0]);
self_ptr->y = NUM2INT(argv[1]);
break;
default:
rb_raise(rb_eArgError, "wrong number of arguments (%d for 0..2)", argc);
@ -207,7 +202,8 @@ rb_to_s(VALUE self)
VALUE
rb_to_ary(VALUE self)
{
return rb_ary_new3(2, rb_x(self), rb_y(self));
CvPoint* self_ptr = CVPOINT(self);
return rb_ary_new3(2, INT2NUM(self_ptr->x), INT2NUM(self_ptr->y));
}
VALUE

View file

@ -39,13 +39,15 @@ VALUE new_object(CvPoint point);
__NAMESPACE_END_CVPOINT
inline CvPoint *CVPOINT(VALUE object){
inline CvPoint*
CVPOINT(VALUE object){
CvPoint *ptr;
Data_Get_Struct(object, CvPoint, ptr);
return ptr;
}
inline CvPoint VALUE_TO_CVPOINT(VALUE object){
inline CvPoint
VALUE_TO_CVPOINT(VALUE object){
if (cCvPoint::rb_compatible_q(cCvPoint::rb_class(), object)) {
return cvPoint(NUM2INT(rb_funcall(object, rb_intern("x"), 0)),
NUM2INT(rb_funcall(object, rb_intern("y"), 0)));

View file

@ -47,8 +47,8 @@ class TestCvPoint < OpenCVTestCase
assert_equal(10, point.x)
assert_equal(20, point.y)
assert_raise(ArgumentError) {
CvPoint.new('string')
assert_raise(TypeError) {
CvPoint.new(DUMMY_OBJ)
}
assert_raise(ArgumentError) {
CvPoint.new(1, 2, 3)