mirror of
https://github.com/ruby/ruby.git
synced 2022-11-09 12:17:21 -05:00
* ext/json: merge JSON 1.7.7.
This includes security fix. [CVE-2013-0269]
d0a62f3ced
https://groups.google.com/d/topic/rubyonrails-security/4_YvCpLzL58/discussion
git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@39208 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
This commit is contained in:
parent
f1194eb9b0
commit
062d2ee6f7
20 changed files with 269 additions and 99 deletions
|
@ -1,3 +1,10 @@
|
|||
Tue Feb 12 12:02:35 2013 NARUSE, Yui <naruse@ruby-lang.org>
|
||||
|
||||
* ext/json: merge JSON 1.7.7.
|
||||
This includes security fix. [CVE-2013-0269]
|
||||
https://github.com/flori/json/commit/d0a62f3ced7560daba2ad546d83f0479a5ae2cf2
|
||||
https://groups.google.com/d/topic/rubyonrails-security/4_YvCpLzL58/discussion
|
||||
|
||||
Mon Feb 11 23:08:48 2013 Tanaka Akira <akr@fsij.org>
|
||||
|
||||
* configure.in: enable rb_cv_page_size_log test for MirOS BSD.
|
||||
|
|
3
NEWS
3
NEWS
|
@ -229,6 +229,9 @@ with all sufficient information, see the ChangeLog file.
|
|||
* added IO#wait_writable method.
|
||||
* added IO#wait_readable method as alias of IO#wait.
|
||||
|
||||
* json
|
||||
* updated to 1.7.7.
|
||||
|
||||
* net/http
|
||||
* new features:
|
||||
* Proxies are now automatically detected from the http_proxy environment
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
#define _FBUFFER_H_
|
||||
|
||||
#include "ruby.h"
|
||||
#include <assert.h>
|
||||
|
||||
#ifndef RHASH_SIZE
|
||||
#define RHASH_SIZE(hsh) (RHASH(hsh)->tbl->num_entries)
|
||||
|
@ -166,11 +165,8 @@ static FBuffer *fbuffer_dup(FBuffer *fb)
|
|||
unsigned long len = fb->len;
|
||||
FBuffer *result;
|
||||
|
||||
assert(len > 0);
|
||||
if (len > 0) {
|
||||
result = fbuffer_alloc(len);
|
||||
fbuffer_append(result, FBUFFER_PAIR(fb));
|
||||
}
|
||||
result = fbuffer_alloc(len);
|
||||
fbuffer_append(result, FBUFFER_PAIR(fb));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -522,7 +522,7 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||
unsigned long len;
|
||||
Check_Type(tmp, T_STRING);
|
||||
len = RSTRING_LEN(tmp);
|
||||
state->indent = fstrndup(RSTRING_PTR(tmp), len);
|
||||
state->indent = fstrndup(RSTRING_PTR(tmp), len + 1);
|
||||
state->indent_len = len;
|
||||
}
|
||||
tmp = rb_hash_aref(opts, ID2SYM(i_space));
|
||||
|
@ -530,7 +530,7 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||
unsigned long len;
|
||||
Check_Type(tmp, T_STRING);
|
||||
len = RSTRING_LEN(tmp);
|
||||
state->space = fstrndup(RSTRING_PTR(tmp), len);
|
||||
state->space = fstrndup(RSTRING_PTR(tmp), len + 1);
|
||||
state->space_len = len;
|
||||
}
|
||||
tmp = rb_hash_aref(opts, ID2SYM(i_space_before));
|
||||
|
@ -538,7 +538,7 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||
unsigned long len;
|
||||
Check_Type(tmp, T_STRING);
|
||||
len = RSTRING_LEN(tmp);
|
||||
state->space_before = fstrndup(RSTRING_PTR(tmp), len);
|
||||
state->space_before = fstrndup(RSTRING_PTR(tmp), len + 1);
|
||||
state->space_before_len = len;
|
||||
}
|
||||
tmp = rb_hash_aref(opts, ID2SYM(i_array_nl));
|
||||
|
@ -546,7 +546,7 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||
unsigned long len;
|
||||
Check_Type(tmp, T_STRING);
|
||||
len = RSTRING_LEN(tmp);
|
||||
state->array_nl = fstrndup(RSTRING_PTR(tmp), len);
|
||||
state->array_nl = fstrndup(RSTRING_PTR(tmp), len + 1);
|
||||
state->array_nl_len = len;
|
||||
}
|
||||
tmp = rb_hash_aref(opts, ID2SYM(i_object_nl));
|
||||
|
@ -554,11 +554,11 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||
unsigned long len;
|
||||
Check_Type(tmp, T_STRING);
|
||||
len = RSTRING_LEN(tmp);
|
||||
state->object_nl = fstrndup(RSTRING_PTR(tmp), len);
|
||||
state->object_nl = fstrndup(RSTRING_PTR(tmp), len + 1);
|
||||
state->object_nl_len = len;
|
||||
}
|
||||
tmp = ID2SYM(i_max_nesting);
|
||||
state->max_nesting = 19;
|
||||
state->max_nesting = 100;
|
||||
if (option_given_p(opts, tmp)) {
|
||||
VALUE max_nesting = rb_hash_aref(opts, tmp);
|
||||
if (RTEST(max_nesting)) {
|
||||
|
@ -598,6 +598,18 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||
return self;
|
||||
}
|
||||
|
||||
static void set_state_ivars(VALUE hash, VALUE state)
|
||||
{
|
||||
VALUE ivars = rb_obj_instance_variables(state);
|
||||
int i = 0;
|
||||
for (i = 0; i < RARRAY_LEN(ivars); i++) {
|
||||
VALUE key = rb_funcall(rb_ary_entry(ivars, i), i_to_s, 0);
|
||||
long key_len = RSTRING_LEN(key);
|
||||
VALUE value = rb_iv_get(state, StringValueCStr(key));
|
||||
rb_hash_aset(hash, rb_str_intern(rb_str_substr(key, 1, key_len - 1)), value);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* call-seq: to_h
|
||||
*
|
||||
|
@ -608,6 +620,7 @@ static VALUE cState_to_h(VALUE self)
|
|||
{
|
||||
VALUE result = rb_hash_new();
|
||||
GET_STATE(self);
|
||||
set_state_ivars(result, self);
|
||||
rb_hash_aset(result, ID2SYM(i_indent), rb_str_new(state->indent, state->indent_len));
|
||||
rb_hash_aset(result, ID2SYM(i_space), rb_str_new(state->space, state->space_len));
|
||||
rb_hash_aset(result, ID2SYM(i_space_before), rb_str_new(state->space_before, state->space_before_len));
|
||||
|
@ -629,14 +642,33 @@ static VALUE cState_to_h(VALUE self)
|
|||
*/
|
||||
static VALUE cState_aref(VALUE self, VALUE name)
|
||||
{
|
||||
GET_STATE(self);
|
||||
name = rb_funcall(name, i_to_s, 0);
|
||||
if (RTEST(rb_funcall(self, i_respond_to_p, 1, name))) {
|
||||
return rb_funcall(self, i_send, 1, name);
|
||||
} else {
|
||||
return Qnil;
|
||||
return rb_ivar_get(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* call-seq: []=(name, value)
|
||||
*
|
||||
* Set the attribute name to value.
|
||||
*/
|
||||
static VALUE cState_aset(VALUE self, VALUE name, VALUE value)
|
||||
{
|
||||
VALUE name_writer;
|
||||
|
||||
name = rb_funcall(name, i_to_s, 0);
|
||||
name_writer = rb_str_cat2(rb_str_dup(name), "=");
|
||||
if (RTEST(rb_funcall(self, i_respond_to_p, 1, name_writer))) {
|
||||
return rb_funcall(self, i_send, 2, name_writer, value);
|
||||
} else {
|
||||
rb_ivar_set(self, rb_intern_str(rb_str_concat(rb_str_new2("@"), name)), value);
|
||||
}
|
||||
return Qnil;
|
||||
}
|
||||
|
||||
static void generate_json_object(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
|
||||
{
|
||||
char *object_nl = state->object_nl;
|
||||
|
@ -908,7 +940,7 @@ static VALUE cState_initialize(int argc, VALUE *argv, VALUE self)
|
|||
{
|
||||
VALUE opts;
|
||||
GET_STATE(self);
|
||||
state->max_nesting = 19;
|
||||
state->max_nesting = 100;
|
||||
state->buffer_initial_length = FBUFFER_INITIAL_LENGTH_DEFAULT;
|
||||
rb_scan_args(argc, argv, "01", &opts);
|
||||
if (!NIL_P(opts)) cState_configure(self, opts);
|
||||
|
@ -970,7 +1002,7 @@ static VALUE cState_from_state_s(VALUE self, VALUE opts)
|
|||
static VALUE cState_indent(VALUE self)
|
||||
{
|
||||
GET_STATE(self);
|
||||
return state->indent ? rb_str_new2(state->indent) : rb_str_new2("");
|
||||
return state->indent ? rb_str_new(state->indent, state->indent_len) : rb_str_new2("");
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1007,7 +1039,7 @@ static VALUE cState_indent_set(VALUE self, VALUE indent)
|
|||
static VALUE cState_space(VALUE self)
|
||||
{
|
||||
GET_STATE(self);
|
||||
return state->space ? rb_str_new2(state->space) : rb_str_new2("");
|
||||
return state->space ? rb_str_new(state->space, state->space_len) : rb_str_new2("");
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1044,7 +1076,7 @@ static VALUE cState_space_set(VALUE self, VALUE space)
|
|||
static VALUE cState_space_before(VALUE self)
|
||||
{
|
||||
GET_STATE(self);
|
||||
return state->space_before ? rb_str_new2(state->space_before) : rb_str_new2("");
|
||||
return state->space_before ? rb_str_new(state->space_before, state->space_before_len) : rb_str_new2("");
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1081,7 +1113,7 @@ static VALUE cState_space_before_set(VALUE self, VALUE space_before)
|
|||
static VALUE cState_object_nl(VALUE self)
|
||||
{
|
||||
GET_STATE(self);
|
||||
return state->object_nl ? rb_str_new2(state->object_nl) : rb_str_new2("");
|
||||
return state->object_nl ? rb_str_new(state->object_nl, state->object_nl_len) : rb_str_new2("");
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1117,7 +1149,7 @@ static VALUE cState_object_nl_set(VALUE self, VALUE object_nl)
|
|||
static VALUE cState_array_nl(VALUE self)
|
||||
{
|
||||
GET_STATE(self);
|
||||
return state->array_nl ? rb_str_new2(state->array_nl) : rb_str_new2("");
|
||||
return state->array_nl ? rb_str_new(state->array_nl, state->array_nl_len) : rb_str_new2("");
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1327,7 +1359,9 @@ void Init_generator()
|
|||
rb_define_method(cState, "configure", cState_configure, 1);
|
||||
rb_define_alias(cState, "merge", "configure");
|
||||
rb_define_method(cState, "to_h", cState_to_h, 0);
|
||||
rb_define_alias(cState, "to_hash", "to_h");
|
||||
rb_define_method(cState, "[]", cState_aref, 1);
|
||||
rb_define_method(cState, "[]=", cState_aset, 2);
|
||||
rb_define_method(cState, "generate", cState_generate, 1);
|
||||
|
||||
mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods");
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
#define _GENERATOR_H_
|
||||
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include <math.h>
|
||||
#include <ctype.h>
|
||||
|
||||
|
@ -14,6 +13,14 @@
|
|||
#include "re.h"
|
||||
#endif
|
||||
|
||||
#ifndef rb_intern_str
|
||||
#define rb_intern_str(string) SYM2ID(rb_str_intern(string))
|
||||
#endif
|
||||
|
||||
#ifndef rb_obj_instance_variables
|
||||
#define rb_obj_instance_variables(object) rb_funcall(object, rb_intern("instance_variables"), 0)
|
||||
#endif
|
||||
|
||||
#define option_given_p(opts, key) RTEST(rb_funcall(opts, i_key_p, 1, key))
|
||||
|
||||
/* unicode defintions */
|
||||
|
|
|
@ -4,10 +4,16 @@ end
|
|||
defined?(::BigDecimal) or require 'bigdecimal'
|
||||
|
||||
class BigDecimal
|
||||
# Import a JSON Marshalled object.
|
||||
#
|
||||
# method used for JSON marshalling support.
|
||||
def self.json_create(object)
|
||||
BigDecimal._load object['b']
|
||||
end
|
||||
|
||||
# Marshal the object to JSON.
|
||||
#
|
||||
# method used for JSON marshalling support.
|
||||
def as_json(*)
|
||||
{
|
||||
JSON.create_id => self.class.name,
|
||||
|
@ -15,6 +21,7 @@ class BigDecimal
|
|||
}
|
||||
end
|
||||
|
||||
# return the JSON value
|
||||
def to_json(*)
|
||||
as_json.to_json
|
||||
end
|
||||
|
|
|
@ -139,7 +139,7 @@ module JSON
|
|||
# keys:
|
||||
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
||||
# structures. Disable depth checking with :max_nesting => false. It defaults
|
||||
# to 19.
|
||||
# to 100.
|
||||
# * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
|
||||
# defiance of RFC 4627 to be parsed by the Parser. This option defaults
|
||||
# to false.
|
||||
|
@ -199,7 +199,7 @@ module JSON
|
|||
# encountered. This options defaults to false.
|
||||
# * *max_nesting*: The maximum depth of nesting allowed in the data
|
||||
# structures from which JSON is to be generated. Disable depth checking
|
||||
# with :max_nesting => false, it defaults to 19.
|
||||
# with :max_nesting => false, it defaults to 100.
|
||||
#
|
||||
# See also the fast_generate for the fastest creation method with the least
|
||||
# amount of sanity checks, and the pretty_generate method for some
|
||||
|
@ -299,21 +299,28 @@ module JSON
|
|||
attr_accessor :load_default_options
|
||||
end
|
||||
self.load_default_options = {
|
||||
:max_nesting => false,
|
||||
:allow_nan => true,
|
||||
:quirks_mode => true,
|
||||
:max_nesting => false,
|
||||
:allow_nan => true,
|
||||
:quirks_mode => true,
|
||||
:create_additions => true,
|
||||
}
|
||||
|
||||
# Load a ruby data structure from a JSON _source_ and return it. A source can
|
||||
# either be a string-like object, an IO-like object, or an object responding
|
||||
# to the read method. If _proc_ was given, it will be called with any nested
|
||||
# Ruby object as an argument recursively in depth first order. The default
|
||||
# options for the parser can be changed via the load_default_options method.
|
||||
# Ruby object as an argument recursively in depth first order. To modify the
|
||||
# default options pass in the optional _options_ argument as well.
|
||||
#
|
||||
# BEWARE: This method is meant to serialise data from trusted user input,
|
||||
# like from your own database server or clients under your control, it could
|
||||
# be dangerous to allow untrusted users to pass JSON sources into it. The
|
||||
# default options for the parser can be changed via the load_default_options
|
||||
# method.
|
||||
#
|
||||
# This method is part of the implementation of the load/dump interface of
|
||||
# Marshal and YAML.
|
||||
def load(source, proc = nil)
|
||||
opts = load_default_options
|
||||
def load(source, proc = nil, options = {})
|
||||
opts = load_default_options.merge options
|
||||
if source.respond_to? :to_str
|
||||
source = source.to_str
|
||||
elsif source.respond_to? :to_io
|
||||
|
|
|
@ -5,12 +5,34 @@ module JSON
|
|||
class << self
|
||||
alias [] new
|
||||
|
||||
def json_creatable?
|
||||
@json_creatable
|
||||
end
|
||||
|
||||
attr_writer :json_creatable
|
||||
|
||||
def json_create(data)
|
||||
data = data.dup
|
||||
data.delete JSON.create_id
|
||||
self[data]
|
||||
end
|
||||
|
||||
def from_hash(object)
|
||||
case
|
||||
when object.respond_to?(:to_hash)
|
||||
result = new
|
||||
object.to_hash.each do |key, value|
|
||||
result[key] = from_hash(value)
|
||||
end
|
||||
result
|
||||
when object.respond_to?(:to_ary)
|
||||
object.to_ary.map { |a| from_hash(a) }
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
end
|
||||
self.json_creatable = false
|
||||
|
||||
def to_hash
|
||||
table
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module JSON
|
||||
# JSON version
|
||||
VERSION = '1.7.5'
|
||||
VERSION = '1.7.7'
|
||||
VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc:
|
||||
VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
|
||||
VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
|
||||
|
|
|
@ -1618,7 +1618,7 @@ static VALUE convert_encoding(VALUE source)
|
|||
* _opts_ can have the following keys:
|
||||
* * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
||||
* structures. Disable depth checking with :max_nesting => false|nil|0, it
|
||||
* defaults to 19.
|
||||
* defaults to 100.
|
||||
* * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
|
||||
* defiance of RFC 4627 to be parsed by the Parser. This option defaults to
|
||||
* false.
|
||||
|
@ -1655,7 +1655,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||
json->max_nesting = 0;
|
||||
}
|
||||
} else {
|
||||
json->max_nesting = 19;
|
||||
json->max_nesting = 100;
|
||||
}
|
||||
tmp = ID2SYM(i_allow_nan);
|
||||
if (option_given_p(opts, tmp)) {
|
||||
|
@ -1680,7 +1680,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||
if (option_given_p(opts, tmp)) {
|
||||
json->create_additions = RTEST(rb_hash_aref(opts, tmp));
|
||||
} else {
|
||||
json->create_additions = 1;
|
||||
json->create_additions = 0;
|
||||
}
|
||||
tmp = ID2SYM(i_create_id);
|
||||
if (option_given_p(opts, tmp)) {
|
||||
|
@ -1709,7 +1709,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||
}
|
||||
}
|
||||
} else {
|
||||
json->max_nesting = 19;
|
||||
json->max_nesting = 100;
|
||||
json->allow_nan = 0;
|
||||
json->create_additions = 1;
|
||||
json->create_id = rb_funcall(mJSON, i_create_id, 0);
|
||||
|
|
|
@ -602,7 +602,7 @@ static VALUE convert_encoding(VALUE source)
|
|||
* _opts_ can have the following keys:
|
||||
* * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
||||
* structures. Disable depth checking with :max_nesting => false|nil|0, it
|
||||
* defaults to 19.
|
||||
* defaults to 100.
|
||||
* * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
|
||||
* defiance of RFC 4627 to be parsed by the Parser. This option defaults to
|
||||
* false.
|
||||
|
@ -639,7 +639,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||
json->max_nesting = 0;
|
||||
}
|
||||
} else {
|
||||
json->max_nesting = 19;
|
||||
json->max_nesting = 100;
|
||||
}
|
||||
tmp = ID2SYM(i_allow_nan);
|
||||
if (option_given_p(opts, tmp)) {
|
||||
|
@ -664,7 +664,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||
if (option_given_p(opts, tmp)) {
|
||||
json->create_additions = RTEST(rb_hash_aref(opts, tmp));
|
||||
} else {
|
||||
json->create_additions = 1;
|
||||
json->create_additions = 0;
|
||||
}
|
||||
tmp = ID2SYM(i_create_id);
|
||||
if (option_given_p(opts, tmp)) {
|
||||
|
@ -693,7 +693,7 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||
}
|
||||
}
|
||||
} else {
|
||||
json->max_nesting = 19;
|
||||
json->max_nesting = 100;
|
||||
json->allow_nan = 0;
|
||||
json->create_additions = 1;
|
||||
json->create_id = rb_funcall(mJSON, i_create_id, 0);
|
||||
|
|
|
@ -1 +1 @@
|
|||
[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]
|
||||
[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
@ -329,12 +329,12 @@ class TestJSON < Test::Unit::TestCase
|
|||
def test_generate_core_subclasses_with_new_to_json
|
||||
obj = SubHash2["foo" => SubHash2["bar" => true]]
|
||||
obj_json = JSON(obj)
|
||||
obj_again = JSON(obj_json)
|
||||
obj_again = JSON.parse(obj_json, :create_additions => true)
|
||||
assert_kind_of SubHash2, obj_again
|
||||
assert_kind_of SubHash2, obj_again['foo']
|
||||
assert obj_again['foo']['bar']
|
||||
assert_equal obj, obj_again
|
||||
assert_equal ["foo"], JSON(JSON(SubArray2["foo"]))
|
||||
assert_equal ["foo"], JSON(JSON(SubArray2["foo"]), :create_additions => true)
|
||||
end
|
||||
|
||||
def test_generate_core_subclasses_with_default_to_json
|
||||
|
@ -446,12 +446,12 @@ EOT
|
|||
assert_raises(JSON::NestingError) { JSON.parse '[[]]', :max_nesting => 1 }
|
||||
assert_raises(JSON::NestingError) { JSON.parser.new('[[]]', :max_nesting => 1).parse }
|
||||
assert_equal [[]], JSON.parse('[[]]', :max_nesting => 2)
|
||||
too_deep = '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]'
|
||||
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
||||
too_deep_ary = eval too_deep
|
||||
assert_raises(JSON::NestingError) { JSON.parse too_deep }
|
||||
assert_raises(JSON::NestingError) { JSON.parser.new(too_deep).parse }
|
||||
assert_raises(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 19 }
|
||||
ok = JSON.parse too_deep, :max_nesting => 20
|
||||
assert_raises(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 100 }
|
||||
ok = JSON.parse too_deep, :max_nesting => 101
|
||||
assert_equal too_deep_ary, ok
|
||||
ok = JSON.parse too_deep, :max_nesting => nil
|
||||
assert_equal too_deep_ary, ok
|
||||
|
@ -462,8 +462,8 @@ EOT
|
|||
assert_raises(JSON::NestingError) { JSON.generate [[]], :max_nesting => 1 }
|
||||
assert_equal '[[]]', JSON.generate([[]], :max_nesting => 2)
|
||||
assert_raises(JSON::NestingError) { JSON.generate too_deep_ary }
|
||||
assert_raises(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 19 }
|
||||
ok = JSON.generate too_deep_ary, :max_nesting => 20
|
||||
assert_raises(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 100 }
|
||||
ok = JSON.generate too_deep_ary, :max_nesting => 101
|
||||
assert_equal too_deep, ok
|
||||
ok = JSON.generate too_deep_ary, :max_nesting => nil
|
||||
assert_equal too_deep, ok
|
||||
|
@ -493,19 +493,25 @@ EOT
|
|||
assert_equal nil, JSON.load('')
|
||||
end
|
||||
|
||||
def test_load_with_options
|
||||
small_hash = JSON("foo" => 'bar')
|
||||
symbol_hash = { :foo => 'bar' }
|
||||
assert_equal symbol_hash, JSON.load(small_hash, nil, :symbolize_names => true)
|
||||
end
|
||||
|
||||
def test_dump
|
||||
too_deep = '[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]'
|
||||
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
||||
assert_equal too_deep, JSON.dump(eval(too_deep))
|
||||
assert_kind_of String, Marshal.dump(eval(too_deep))
|
||||
assert_raises(ArgumentError) { JSON.dump(eval(too_deep), 19) }
|
||||
assert_raises(ArgumentError) { Marshal.dump(eval(too_deep), 19) }
|
||||
assert_equal too_deep, JSON.dump(eval(too_deep), 20)
|
||||
assert_kind_of String, Marshal.dump(eval(too_deep), 20)
|
||||
assert_raises(ArgumentError) { JSON.dump(eval(too_deep), 100) }
|
||||
assert_raises(ArgumentError) { Marshal.dump(eval(too_deep), 100) }
|
||||
assert_equal too_deep, JSON.dump(eval(too_deep), 101)
|
||||
assert_kind_of String, Marshal.dump(eval(too_deep), 101)
|
||||
output = StringIO.new
|
||||
JSON.dump(eval(too_deep), output)
|
||||
assert_equal too_deep, output.string
|
||||
output = StringIO.new
|
||||
JSON.dump(eval(too_deep), output, 20)
|
||||
JSON.dump(eval(too_deep), output, 101)
|
||||
assert_equal too_deep, output.string
|
||||
end
|
||||
|
||||
|
|
|
@ -73,11 +73,19 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||
a = A.new(666)
|
||||
assert A.json_creatable?
|
||||
json = generate(a)
|
||||
a_again = JSON.parse(json)
|
||||
a_again = JSON.parse(json, :create_additions => true)
|
||||
assert_kind_of a.class, a_again
|
||||
assert_equal a, a_again
|
||||
end
|
||||
|
||||
def test_extended_json_default
|
||||
a = A.new(666)
|
||||
assert A.json_creatable?
|
||||
json = generate(a)
|
||||
a_hash = JSON.parse(json)
|
||||
assert_kind_of Hash, a_hash
|
||||
end
|
||||
|
||||
def test_extended_json_disabled
|
||||
a = A.new(666)
|
||||
assert A.json_creatable?
|
||||
|
@ -104,7 +112,7 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||
c = C.new
|
||||
assert !C.json_creatable?
|
||||
json = generate(c)
|
||||
assert_raises(ArgumentError, NameError) { JSON.parse(json) }
|
||||
assert_raises(ArgumentError, NameError) { JSON.parse(json, :create_additions => true) }
|
||||
end
|
||||
|
||||
def test_raw_strings
|
||||
|
@ -122,7 +130,7 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||
assert_match(/\A\{.*\}\z/, json)
|
||||
assert_match(/"json_class":"String"/, json)
|
||||
assert_match(/"raw":\[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255\]/, json)
|
||||
raw_again = JSON.parse(json)
|
||||
raw_again = JSON.parse(json, :create_additions => true)
|
||||
assert_equal raw, raw_again
|
||||
end
|
||||
|
||||
|
@ -130,17 +138,17 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||
|
||||
def test_core
|
||||
t = Time.now
|
||||
assert_equal t, JSON(JSON(t))
|
||||
assert_equal t, JSON(JSON(t), :create_additions => true)
|
||||
d = Date.today
|
||||
assert_equal d, JSON(JSON(d))
|
||||
assert_equal d, JSON(JSON(d), :create_additions => true)
|
||||
d = DateTime.civil(2007, 6, 14, 14, 57, 10, Rational(1, 12), 2299161)
|
||||
assert_equal d, JSON(JSON(d))
|
||||
assert_equal 1..10, JSON(JSON(1..10))
|
||||
assert_equal 1...10, JSON(JSON(1...10))
|
||||
assert_equal "a".."c", JSON(JSON("a".."c"))
|
||||
assert_equal "a"..."c", JSON(JSON("a"..."c"))
|
||||
assert_equal d, JSON(JSON(d), :create_additions => true)
|
||||
assert_equal 1..10, JSON(JSON(1..10), :create_additions => true)
|
||||
assert_equal 1...10, JSON(JSON(1...10), :create_additions => true)
|
||||
assert_equal "a".."c", JSON(JSON("a".."c"), :create_additions => true)
|
||||
assert_equal "a"..."c", JSON(JSON("a"..."c"), :create_additions => true)
|
||||
s = MyJsonStruct.new 4711, 'foot'
|
||||
assert_equal s, JSON(JSON(s))
|
||||
assert_equal s, JSON(JSON(s), :create_additions => true)
|
||||
struct = Struct.new :foo, :bar
|
||||
s = struct.new 4711, 'foot'
|
||||
assert_raises(JSONError) { JSON(s) }
|
||||
|
@ -148,41 +156,41 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||
raise TypeError, "test me"
|
||||
rescue TypeError => e
|
||||
e_json = JSON.generate e
|
||||
e_again = JSON e_json
|
||||
e_again = JSON e_json, :create_additions => true
|
||||
assert_kind_of TypeError, e_again
|
||||
assert_equal e.message, e_again.message
|
||||
assert_equal e.backtrace, e_again.backtrace
|
||||
end
|
||||
assert_equal(/foo/, JSON(JSON(/foo/)))
|
||||
assert_equal(/foo/i, JSON(JSON(/foo/i)))
|
||||
assert_equal(/foo/, JSON(JSON(/foo/), :create_additions => true))
|
||||
assert_equal(/foo/i, JSON(JSON(/foo/i), :create_additions => true))
|
||||
end
|
||||
|
||||
def test_utc_datetime
|
||||
now = Time.now
|
||||
d = DateTime.parse(now.to_s) # usual case
|
||||
assert_equal d, JSON.parse(d.to_json)
|
||||
d = DateTime.parse(now.to_s, :create_additions => true) # usual case
|
||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
||||
d = DateTime.parse(now.utc.to_s) # of = 0
|
||||
assert_equal d, JSON.parse(d.to_json)
|
||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
||||
d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(1,24))
|
||||
assert_equal d, JSON.parse(d.to_json)
|
||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
||||
d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(12,24))
|
||||
assert_equal d, JSON.parse(d.to_json)
|
||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
||||
end
|
||||
|
||||
def test_rational_complex
|
||||
assert_equal Rational(2, 9), JSON(JSON(Rational(2, 9)))
|
||||
assert_equal Complex(2, 9), JSON(JSON(Complex(2, 9)))
|
||||
assert_equal Rational(2, 9), JSON.parse(JSON(Rational(2, 9)), :create_additions => true)
|
||||
assert_equal Complex(2, 9), JSON.parse(JSON(Complex(2, 9)), :create_additions => true)
|
||||
end
|
||||
|
||||
def test_bigdecimal
|
||||
assert_equal BigDecimal('3.141', 23), JSON(JSON(BigDecimal('3.141', 23)))
|
||||
assert_equal BigDecimal('3.141', 666), JSON(JSON(BigDecimal('3.141', 666)))
|
||||
assert_equal BigDecimal('3.141', 23), JSON(JSON(BigDecimal('3.141', 23)), :create_additions => true)
|
||||
assert_equal BigDecimal('3.141', 666), JSON(JSON(BigDecimal('3.141', 666)), :create_additions => true)
|
||||
end
|
||||
|
||||
def test_ostruct
|
||||
o = OpenStruct.new
|
||||
# XXX this won't work; o.foo = { :bar => true }
|
||||
o.foo = { 'bar' => true }
|
||||
assert_equal o, JSON(JSON(o))
|
||||
assert_equal o, JSON.parse(JSON(o), :create_additions => true)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
@ -130,7 +130,7 @@ EOT
|
|||
:quirks_mode => false,
|
||||
:depth => 0,
|
||||
:indent => " ",
|
||||
:max_nesting => 19,
|
||||
:max_nesting => 100,
|
||||
:object_nl => "\n",
|
||||
:space => " ",
|
||||
:space_before => "",
|
||||
|
@ -147,7 +147,7 @@ EOT
|
|||
:quirks_mode => false,
|
||||
:depth => 0,
|
||||
:indent => "",
|
||||
:max_nesting => 19,
|
||||
:max_nesting => 100,
|
||||
:object_nl => "",
|
||||
:space => "",
|
||||
:space_before => "",
|
||||
|
@ -200,7 +200,7 @@ EOT
|
|||
s = JSON.state.new
|
||||
assert_equal 0, s.depth
|
||||
assert_raises(JSON::NestingError) { ary.to_json(s) }
|
||||
assert_equal 19, s.depth
|
||||
assert_equal 100, s.depth
|
||||
end
|
||||
|
||||
def test_buffer_initial_length
|
||||
|
@ -228,6 +228,30 @@ EOT
|
|||
EOS
|
||||
end if GC.respond_to?(:stress=)
|
||||
|
||||
def test_configure_using_configure_and_merge
|
||||
numbered_state = {
|
||||
:indent => "1",
|
||||
:space => '2',
|
||||
:space_before => '3',
|
||||
:object_nl => '4',
|
||||
:array_nl => '5'
|
||||
}
|
||||
state1 = JSON.state.new
|
||||
state1.merge(numbered_state)
|
||||
assert_equal '1', state1.indent
|
||||
assert_equal '2', state1.space
|
||||
assert_equal '3', state1.space_before
|
||||
assert_equal '4', state1.object_nl
|
||||
assert_equal '5', state1.array_nl
|
||||
state2 = JSON.state.new
|
||||
state2.configure(numbered_state)
|
||||
assert_equal '1', state2.indent
|
||||
assert_equal '2', state2.space
|
||||
assert_equal '3', state2.space_before
|
||||
assert_equal '4', state2.object_nl
|
||||
assert_equal '5', state2.array_nl
|
||||
end
|
||||
|
||||
if defined?(JSON::Ext::Generator)
|
||||
def test_broken_bignum # [ruby-core:38867]
|
||||
pid = fork do
|
||||
|
@ -249,4 +273,29 @@ EOT
|
|||
# introducing race conditions of tests are run in parallel
|
||||
end
|
||||
end
|
||||
|
||||
def test_hash_likeness_set_symbol
|
||||
state = JSON.state.new
|
||||
assert_equal nil, state[:foo]
|
||||
assert_equal nil.class, state[:foo].class
|
||||
assert_equal nil, state['foo']
|
||||
state[:foo] = :bar
|
||||
assert_equal :bar, state[:foo]
|
||||
assert_equal :bar, state['foo']
|
||||
state_hash = state.to_hash
|
||||
assert_kind_of Hash, state_hash
|
||||
assert_equal :bar, state_hash[:foo]
|
||||
end
|
||||
|
||||
def test_hash_likeness_set_string
|
||||
state = JSON.state.new
|
||||
assert_equal nil, state[:foo]
|
||||
assert_equal nil, state['foo']
|
||||
state['foo'] = :bar
|
||||
assert_equal :bar, state[:foo]
|
||||
assert_equal :bar, state['foo']
|
||||
state_hash = state.to_hash
|
||||
assert_kind_of Hash, state_hash
|
||||
assert_equal :bar, state_hash[:foo]
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
@ -20,16 +20,41 @@ class TestJSONGenericObject < Test::Unit::TestCase
|
|||
end
|
||||
|
||||
def test_generate_json
|
||||
assert_equal @go, JSON(JSON(@go))
|
||||
switch_json_creatable do
|
||||
assert_equal @go, JSON(JSON(@go), :create_additions => true)
|
||||
end
|
||||
end
|
||||
|
||||
def test_parse_json
|
||||
assert_equal @go, l = JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }')
|
||||
assert_equal 1, l.a
|
||||
assert_equal @go, l = JSON('{ "a": 1, "b": 2 }', :object_class => GenericObject)
|
||||
assert_equal 1, l.a
|
||||
assert_equal GenericObject[:a => GenericObject[:b => 2]],
|
||||
l = JSON('{ "a": { "b": 2 } }', :object_class => GenericObject)
|
||||
assert_equal 2, l.a.b
|
||||
assert_kind_of Hash, JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true)
|
||||
switch_json_creatable do
|
||||
assert_equal @go, l = JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true)
|
||||
assert_equal 1, l.a
|
||||
assert_equal @go, l = JSON('{ "a": 1, "b": 2 }', :object_class => GenericObject)
|
||||
assert_equal 1, l.a
|
||||
assert_equal GenericObject[:a => GenericObject[:b => 2]],
|
||||
l = JSON('{ "a": { "b": 2 } }', :object_class => GenericObject)
|
||||
assert_equal 2, l.a.b
|
||||
end
|
||||
end
|
||||
|
||||
def test_from_hash
|
||||
result = GenericObject.from_hash(
|
||||
:foo => { :bar => { :baz => true }, :quux => [ { :foobar => true } ] })
|
||||
assert_kind_of GenericObject, result.foo
|
||||
assert_kind_of GenericObject, result.foo.bar
|
||||
assert_equal true, result.foo.bar.baz
|
||||
assert_kind_of GenericObject, result.foo.quux.first
|
||||
assert_equal true, result.foo.quux.first.foobar
|
||||
assert_equal true, GenericObject.from_hash(true)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def switch_json_creatable
|
||||
JSON::GenericObject.json_creatable = true
|
||||
yield
|
||||
ensure
|
||||
JSON::GenericObject.json_creatable = false
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
@ -27,14 +27,13 @@ class TestJSONStringMatching < Test::Unit::TestCase
|
|||
t = TestTime.new
|
||||
t_json = [ t ].to_json
|
||||
assert_equal [ t ],
|
||||
JSON.parse(t_json,
|
||||
JSON.parse(t_json, :create_additions => true,
|
||||
:match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
|
||||
assert_equal [ t.strftime('%FT%T%z') ],
|
||||
JSON.parse(t_json,
|
||||
JSON.parse(t_json, :create_additions => true,
|
||||
:match_string => { /\A\d{3}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
|
||||
assert_equal [ t.strftime('%FT%T%z') ],
|
||||
JSON.parse(t_json,
|
||||
:match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime },
|
||||
:create_additions => false)
|
||||
:match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env ruby
|
||||
# -*- coding: utf-8 -*-
|
||||
# encoding: utf-8
|
||||
|
||||
require 'test/unit'
|
||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
||||
|
|
Loading…
Reference in a new issue