1
0
Fork 0
mirror of https://github.com/ruby-opencv/ruby-opencv synced 2023-03-27 23:22:12 -04:00

added error handling to some methods

Added error handlings to the methods of following classes
 -CvBox2D
 -CvCapture
 -CvChain
 -CvCircle32f
 -CvConnectedComp
 -CvContour
 -CvFont
 -CvHaarclassifierCascade
 -CvHuMoments
 -CvLine
 -CvMoments
 -CvPoint
This commit is contained in:
ser1zw 2011-07-22 00:47:54 +09:00
parent a7e180a5db
commit ea55e0d42e
14 changed files with 353 additions and 182 deletions

View file

@ -120,25 +120,30 @@ cvcapture_free(void *ptr)
VALUE
rb_open(int argc, VALUE *argv, VALUE self)
{
VALUE device, i;
VALUE device, interface;
rb_scan_args(argc, argv, "01", &device);
CvCapture *capture = 0;
switch (TYPE(device)) {
case T_STRING:
capture = cvCaptureFromFile(StringValueCStr(device));
break;
case T_FIXNUM:
capture = cvCaptureFromCAM(FIX2INT(device));
break;
case T_SYMBOL:
i = rb_hash_aref(rb_const_get(rb_class(), rb_intern("INTERFACE")), device);
if (NIL_P(i))
rb_raise(rb_eArgError, "undefined interface.");
capture = cvCaptureFromCAM(NUM2INT(i));
break;
case T_NIL:
capture = cvCaptureFromCAM(CV_CAP_ANY);
break;
try {
switch (TYPE(device)) {
case T_STRING:
capture = cvCaptureFromFile(StringValueCStr(device));
break;
case T_FIXNUM:
capture = cvCaptureFromCAM(FIX2INT(device));
break;
case T_SYMBOL:
interface = rb_hash_aref(rb_const_get(rb_class(), rb_intern("INTERFACE")), device);
if (NIL_P(interface))
rb_raise(rb_eArgError, "undefined interface.");
capture = cvCaptureFromCAM(NUM2INT(interface));
break;
case T_NIL:
capture = cvCaptureFromCAM(CV_CAP_ANY);
break;
}
}
catch (cv::Exception& e) {
raise_cverror(e);
}
if (!capture)
rb_raise(rb_eStandardError, "Invalid capture format.");
@ -161,7 +166,14 @@ rb_open(int argc, VALUE *argv, VALUE self)
VALUE
rb_grab(VALUE self)
{
return cvGrabFrame(CVCAPTURE(self)) ? Qtrue : Qfalse;
int grab = 0;
try {
grab = cvGrabFrame(CVCAPTURE(self));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return grab ? Qtrue : Qfalse;
}
/*
@ -173,14 +185,21 @@ rb_grab(VALUE self)
VALUE
rb_retrieve(VALUE self)
{
IplImage *frame = cvRetrieveFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
VALUE image = cIplImage::new_object(cvSize(frame->width, frame->height), CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
VALUE image = Qnil;
try {
IplImage *frame = cvRetrieveFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
image = cIplImage::new_object(cvSize(frame->width, frame->height),
CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return image;
}
@ -193,153 +212,181 @@ rb_retrieve(VALUE self)
VALUE
rb_query(VALUE self)
{
IplImage *frame = cvQueryFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
VALUE image = cIplImage::new_object(cvSize(frame->width, frame->height), CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
VALUE image = Qnil;
try {
IplImage *frame = cvQueryFrame(CVCAPTURE(self));
if (!frame)
return Qnil;
image = cIplImage::new_object(cvSize(frame->width, frame->height),
CV_MAKETYPE(CV_8U, frame->nChannels));
if (frame->origin == IPL_ORIGIN_TL)
cvCopy(frame, CVARR(image));
else
cvFlip(frame, CVARR(image));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return image;
}
VALUE
rb_get_capture_property(VALUE self, int id)
{
double result = 0;
try {
result = cvGetCaptureProperty(CVCAPTURE(self), id);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
VALUE
rb_set_capture_property(VALUE self, int id, VALUE value)
{
double result = 0;
try {
result = cvSetCaptureProperty(CVCAPTURE(self), id, NUM2DBL(value));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return rb_float_new(result);
}
/*
* Get film current position in milliseconds or video capture timestamp.
*/
VALUE
rb_get_millisecond(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_MSEC));
return rb_get_capture_property(self, CV_CAP_PROP_POS_MSEC);
}
/*
* Set film current position in milliseconds or video capture timestamp.
*/
VALUE
rb_set_millisecond(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_MSEC, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_POS_MSEC, value);
}
/*
* Get 0-based index of the frame to be decoded/captured next
*/
VALUE
rb_get_frames(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_FRAMES));
return rb_get_capture_property(self, CV_CAP_PROP_POS_FRAMES);
}
/*
* Set 0-based index of the frame to be decoded/captured next
*/
VALUE
rb_set_frames(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_FRAMES, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_POS_FRAMES, value);
}
/*
* Get relative position of video file (0 - start of the film, 1 - end of the film)
*/
VALUE
rb_get_avi_ratio(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_AVI_RATIO));
return rb_get_capture_property(self, CV_CAP_PROP_POS_AVI_RATIO);
}
/*
* Set relative position of video file (0 - start of the film, 1 - end of the film)
*/
VALUE
rb_set_avi_ratio(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_POS_AVI_RATIO, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_POS_AVI_RATIO, value);
}
/*
* Get size of frames in the video stream.
*/
VALUE
rb_get_size(VALUE self)
{
return cCvSize::new_object(cvSize((int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH),
(int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT)));
CvSize size;
try {
CvCapture* self_ptr = CVCAPTURE(self);
size = cvSize((int)cvGetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_WIDTH),
(int)cvGetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_HEIGHT));
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return cCvSize::new_object(size);
}
/*
* Set size of frames in the video stream.
*/
VALUE
rb_set_size(VALUE self, VALUE value)
{
double result;
if (cCvSize::rb_compatible_q(cCvSize::rb_class(), value)) {
int width = NUM2INT(rb_funcall(value, rb_intern("width"), 0));
int height = NUM2INT(rb_funcall(value, rb_intern("height"), 0));
cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH, width);
result = cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT, height);
double result = 0;
CvSize size = VALUE_TO_CVSIZE(value);
try {
CvCapture* self_ptr = CVCAPTURE(self);
cvSetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_WIDTH, size.width);
result = cvSetCaptureProperty(self_ptr, CV_CAP_PROP_FRAME_HEIGHT, size.height);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
else
raise_compatible_typeerror(value, cCvSize::rb_class());
return DBL2NUM(result);
}
/*
* Get width of frames in the video stream.
*/
VALUE
rb_get_width(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH));
return rb_get_capture_property(self, CV_CAP_PROP_FRAME_WIDTH);
}
/*
* Set width of frames in the video stream.
*/
VALUE
rb_set_width(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_WIDTH, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_FRAME_WIDTH, value);
}
/*
* Get height of frames in the video stream.
*/
VALUE
rb_get_height(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT));
return rb_get_capture_property(self, CV_CAP_PROP_FRAME_HEIGHT);
}
/*
* Set height of frames in the video stream.
*/
VALUE
rb_set_height(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_HEIGHT, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_FRAME_HEIGHT, value);
}
/*
* Get frame rate
*/
VALUE
rb_get_fps(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FPS));
return rb_get_capture_property(self, CV_CAP_PROP_FPS);
}
/*
* Set frame rate
*/
VALUE
rb_set_fps(VALUE self, VALUE value)
{
return rb_float_new(cvSetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FPS, NUM2DBL(value)));
return rb_set_capture_property(self, CV_CAP_PROP_FPS, value);
}
/*
* Get 4character code of codec. see http://www.fourcc.org/
*/
@ -351,106 +398,101 @@ rb_get_fourcc(VALUE self)
sprintf(str, "%s", (char*)&fourcc);
return rb_str_new2(str);
}
/*
* Get number of frames in video file.
*/
VALUE
rb_get_frame_count(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FRAME_COUNT));
return rb_get_capture_property(self, CV_CAP_PROP_FRAME_COUNT);
}
/*
* Get the format of the Mat objects returned by CvCapture#retrieve
*/
VALUE
rb_get_format(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_FORMAT));
return rb_get_capture_property(self, CV_CAP_PROP_FORMAT);
}
/*
* Get a backend-specific value indicating the current capture mode
*/
VALUE
rb_get_mode(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_MODE));
return rb_get_capture_property(self, CV_CAP_PROP_MODE);
}
/*
* Get brightness of the image (only for cameras)
*/
VALUE
rb_get_brightness(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_BRIGHTNESS));
return rb_get_capture_property(self, CV_CAP_PROP_BRIGHTNESS);
}
/*
* Get contrast of the image (only for cameras)
*/
VALUE
rb_get_contrast(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_CONTRAST));
return rb_get_capture_property(self, CV_CAP_PROP_CONTRAST);
}
/*
* Get saturation of the image (only for cameras)
*/
VALUE
rb_get_saturation(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_SATURATION));
return rb_get_capture_property(self, CV_CAP_PROP_SATURATION);
}
/*
* Get hue of the image (only for cameras)
*/
VALUE
rb_get_hue(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_HUE));
return rb_get_capture_property(self, CV_CAP_PROP_HUE);
}
/*
* Get gain of the image (only for cameras)
*/
VALUE
rb_get_gain(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_GAIN));
return rb_get_capture_property(self, CV_CAP_PROP_GAIN);
}
/*
* Get exposure (only for cameras)
*/
VALUE
rb_get_exposure(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_EXPOSURE));
return rb_get_capture_property(self, CV_CAP_PROP_EXPOSURE);
}
/*
* Get boolean flags indicating whether images should be converted to RGB
*/
VALUE
rb_get_convert_rgb(VALUE self)
{
int flag = (int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_CONVERT_RGB);
return (flag == 1) ? Qtrue : Qfalse;
int flag = 0;
try {
flag = (int)cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_CONVERT_RGB);
}
catch (cv::Exception& e) {
raise_cverror(e);
}
return flag ? Qtrue : Qfalse;
}
/*
* Get TOWRITE (note: only supported by DC1394 v 2.x backend currently)
*/
VALUE
rb_get_rectification(VALUE self)
{
return rb_float_new(cvGetCaptureProperty(CVCAPTURE(self), CV_CAP_PROP_RECTIFICATION));
return rb_get_capture_property(self, CV_CAP_PROP_RECTIFICATION);
}
__NAMESPACE_END_CVCAPTURE
__NAMESPACE_END_OPENCV