diff --git a/CHANGES b/CHANGES index f0e6c6eb..bba29be6 100644 --- a/CHANGES +++ b/CHANGES @@ -1,3 +1,7 @@ +2010-05-05 (1.4.3) + * Fixed some test assertions, from Ruby r27587 and r27590, patch by nobu. + * Fixed issue http://github.com/flori/json/issues/#issue/20 reported by + electronicwhisper@github. Thx! 2010-04-26 (1.4.2) * Applied patch from naruse Yui NARUSE to make building with Microsoft Visual C possible again. diff --git a/VERSION b/VERSION index 9df886c4..428b770e 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.4.2 +1.4.3 diff --git a/ext/json/ext/generator/generator.c b/ext/json/ext/generator/generator.c index b398cac3..f9a7d060 100644 --- a/ext/json/ext/generator/generator.c +++ b/ext/json/ext/generator/generator.c @@ -738,150 +738,132 @@ static VALUE cState_aref(VALUE self, VALUE name) static void generate_json(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj, long depth) { VALUE tmp; - switch (TYPE(obj)) { - case T_HASH: - { - char *object_nl = state->object_nl; - long object_nl_len = state->object_nl_len; - char *indent = state->indent; - long indent_len = state->indent_len; - long max_nesting = state->max_nesting; - char *delim = FBUFFER_PTR(state->object_delim); - long delim_len = FBUFFER_LEN(state->object_delim); - char *delim2 = FBUFFER_PTR(state->object_delim2); - long delim2_len = FBUFFER_LEN(state->object_delim2); - int i, j; - VALUE key, key_to_s, keys; - depth++; - if (max_nesting != 0 && depth > max_nesting) { - fbuffer_free(buffer); - rb_raise(eNestingError, "nesting of %ld is too deep", depth); - } - fbuffer_append_char(buffer, '{'); - keys = rb_funcall(obj, rb_intern("keys"), 0); - for(i = 0; i < RARRAY_LEN(keys); i++) { - if (i > 0) fbuffer_append(buffer, delim, delim_len); - if (object_nl) { - fbuffer_append(buffer, object_nl, object_nl_len); - } - if (indent) { - for (j = 0; j < depth; j++) { - fbuffer_append(buffer, indent, indent_len); - } - } - key = rb_ary_entry(keys, i); - key_to_s = rb_funcall(key, i_to_s, 0); - Check_Type(key_to_s, T_STRING); - generate_json(buffer, Vstate, state, key_to_s, depth); - fbuffer_append(buffer, delim2, delim2_len); - generate_json(buffer, Vstate, state, rb_hash_aref(obj, key), depth); - } - depth--; - if (object_nl) { - fbuffer_append(buffer, object_nl, object_nl_len); - if (indent) { - for (j = 0; j < depth; j++) { - fbuffer_append(buffer, indent, indent_len); - } - } + VALUE klass = CLASS_OF(obj); + if (klass == rb_cHash) { + char *object_nl = state->object_nl; + long object_nl_len = state->object_nl_len; + char *indent = state->indent; + long indent_len = state->indent_len; + long max_nesting = state->max_nesting; + char *delim = FBUFFER_PTR(state->object_delim); + long delim_len = FBUFFER_LEN(state->object_delim); + char *delim2 = FBUFFER_PTR(state->object_delim2); + long delim2_len = FBUFFER_LEN(state->object_delim2); + int i, j; + VALUE key, key_to_s, keys; + depth++; + if (max_nesting != 0 && depth > max_nesting) { + fbuffer_free(buffer); + rb_raise(eNestingError, "nesting of %ld is too deep", depth); + } + fbuffer_append_char(buffer, '{'); + keys = rb_funcall(obj, rb_intern("keys"), 0); + for(i = 0; i < RARRAY_LEN(keys); i++) { + if (i > 0) fbuffer_append(buffer, delim, delim_len); + if (object_nl) { + fbuffer_append(buffer, object_nl, object_nl_len); + } + if (indent) { + for (j = 0; j < depth; j++) { + fbuffer_append(buffer, indent, indent_len); } - fbuffer_append_char(buffer, '}'); } - break; - case T_ARRAY: - { - char *array_nl = state->array_nl; - long array_nl_len = state->array_nl_len; - char *indent = state->indent; - long indent_len = state->indent_len; - long max_nesting = state->max_nesting; - char *delim = FBUFFER_PTR(state->array_delim); - long delim_len = FBUFFER_LEN(state->array_delim); - int i, j; - depth++; - if (max_nesting != 0 && depth > max_nesting) { - fbuffer_free(buffer); - rb_raise(eNestingError, "nesting of %ld is too deep", depth); + key = rb_ary_entry(keys, i); + key_to_s = rb_funcall(key, i_to_s, 0); + Check_Type(key_to_s, T_STRING); + generate_json(buffer, Vstate, state, key_to_s, depth); + fbuffer_append(buffer, delim2, delim2_len); + generate_json(buffer, Vstate, state, rb_hash_aref(obj, key), depth); + } + depth--; + if (object_nl) { + fbuffer_append(buffer, object_nl, object_nl_len); + if (indent) { + for (j = 0; j < depth; j++) { + fbuffer_append(buffer, indent, indent_len); } - fbuffer_append_char(buffer, '['); - if (array_nl) fbuffer_append(buffer, array_nl, array_nl_len); - for(i = 0; i < RARRAY_LEN(obj); i++) { - if (i > 0) fbuffer_append(buffer, delim, delim_len); - if (indent) { - for (j = 0; j < depth; j++) { - fbuffer_append(buffer, indent, indent_len); - } - } - generate_json(buffer, Vstate, state, rb_ary_entry(obj, i), depth); + } + } + fbuffer_append_char(buffer, '}'); + } else if (klass == rb_cArray) { + char *array_nl = state->array_nl; + long array_nl_len = state->array_nl_len; + char *indent = state->indent; + long indent_len = state->indent_len; + long max_nesting = state->max_nesting; + char *delim = FBUFFER_PTR(state->array_delim); + long delim_len = FBUFFER_LEN(state->array_delim); + int i, j; + depth++; + if (max_nesting != 0 && depth > max_nesting) { + fbuffer_free(buffer); + rb_raise(eNestingError, "nesting of %ld is too deep", depth); + } + fbuffer_append_char(buffer, '['); + if (array_nl) fbuffer_append(buffer, array_nl, array_nl_len); + for(i = 0; i < RARRAY_LEN(obj); i++) { + if (i > 0) fbuffer_append(buffer, delim, delim_len); + if (indent) { + for (j = 0; j < depth; j++) { + fbuffer_append(buffer, indent, indent_len); } - depth--; - if (array_nl) { - fbuffer_append(buffer, array_nl, array_nl_len); - if (indent) { - for (j = 0; j < depth; j++) { - fbuffer_append(buffer, indent, indent_len); - } - } + } + generate_json(buffer, Vstate, state, rb_ary_entry(obj, i), depth); + } + depth--; + if (array_nl) { + fbuffer_append(buffer, array_nl, array_nl_len); + if (indent) { + for (j = 0; j < depth; j++) { + fbuffer_append(buffer, indent, indent_len); } - fbuffer_append_char(buffer, ']'); } - break; - case T_STRING: - fbuffer_append_char(buffer, '"'); + } + fbuffer_append_char(buffer, ']'); + } else if (klass == rb_cString) { + fbuffer_append_char(buffer, '"'); #ifdef HAVE_RUBY_ENCODING_H - obj = rb_funcall(obj, i_encode, 1, CEncoding_UTF_8); + obj = rb_funcall(obj, i_encode, 1, CEncoding_UTF_8); #endif - if (state->ascii_only) { - convert_UTF8_to_JSON_ASCII(buffer, obj); - } else { - convert_UTF8_to_JSON(buffer, obj); - } - fbuffer_append_char(buffer, '"'); - break; - case T_NIL: - fbuffer_append(buffer, "null", 4); - break; - case T_FALSE: - fbuffer_append(buffer, "false", 5); - break; - case T_TRUE: - fbuffer_append(buffer, "true", 4); - break; - case T_FIXNUM: - fbuffer_append_long(buffer, FIX2LONG(obj)); - break; - case T_BIGNUM: - tmp = rb_funcall(obj, i_to_s, 0); - fbuffer_append(buffer, RSTRING_PAIR(tmp)); - break; - case T_FLOAT: - { - double value = RFLOAT_VALUE(obj); - char allow_nan = state->allow_nan; - tmp = rb_funcall(obj, i_to_s, 0); - if (!allow_nan) { - if (isinf(value)) { - fbuffer_free(buffer); - rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); - } else if (isnan(value)) { - fbuffer_free(buffer); - rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); - } - } - fbuffer_append(buffer, RSTRING_PAIR(tmp)); - } - break; - default: - if (rb_respond_to(obj, i_to_json)) { - tmp = rb_funcall(obj, i_to_json, 2, Vstate, INT2FIX(depth + 1)); - Check_Type(tmp, T_STRING); - fbuffer_append(buffer, RSTRING_PAIR(tmp)); - } else { - tmp = rb_funcall(obj, i_to_s, 0); - Check_Type(tmp, T_STRING); - generate_json(buffer, Vstate, state, tmp, depth + 1); + if (state->ascii_only) { + convert_UTF8_to_JSON_ASCII(buffer, obj); + } else { + convert_UTF8_to_JSON(buffer, obj); + } + fbuffer_append_char(buffer, '"'); + } else if (obj == Qnil) { + fbuffer_append(buffer, "null", 4); + } else if (obj == Qfalse) { + fbuffer_append(buffer, "false", 5); + } else if (obj == Qtrue) { + fbuffer_append(buffer, "true", 4); + } else if (klass == rb_cFixnum) { + fbuffer_append_long(buffer, FIX2LONG(obj)); + } else if (klass == rb_cBignum) { + tmp = rb_funcall(obj, i_to_s, 0); + fbuffer_append(buffer, RSTRING_PAIR(tmp)); + } else if (klass == rb_cFloat) { + double value = RFLOAT_VALUE(obj); + char allow_nan = state->allow_nan; + tmp = rb_funcall(obj, i_to_s, 0); + if (!allow_nan) { + if (isinf(value)) { + fbuffer_free(buffer); + rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); + } else if (isnan(value)) { + fbuffer_free(buffer); + rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); } - break; + } + fbuffer_append(buffer, RSTRING_PAIR(tmp)); + } else if (rb_respond_to(obj, i_to_json)) { + tmp = rb_funcall(obj, i_to_json, 2, Vstate, INT2FIX(depth + 1)); + Check_Type(tmp, T_STRING); + fbuffer_append(buffer, RSTRING_PAIR(tmp)); + } else { + tmp = rb_funcall(obj, i_to_s, 0); + Check_Type(tmp, T_STRING); + generate_json(buffer, Vstate, state, tmp, depth + 1); } } diff --git a/lib/json/version.rb b/lib/json/version.rb index de7690b5..a06d1578 100644 --- a/lib/json/version.rb +++ b/lib/json/version.rb @@ -1,6 +1,6 @@ module JSON # JSON version - VERSION = '1.4.2' + VERSION = '1.4.3' VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc: VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc: VERSION_MINOR = VERSION_ARRAY[1] # :nodoc: diff --git a/tests/test_json.rb b/tests/test_json.rb index f5a432f8..19b742f0 100755 --- a/tests/test_json.rb +++ b/tests/test_json.rb @@ -173,7 +173,18 @@ def test_parse_object assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } ')) end - class SubHash < Hash; end + class SubHash < Hash + def to_json(*a) + { + JSON.create_id => self.class.name, + }.merge(self).to_json(*a) + end + + def self.json_create(o) + o.delete JSON.create_id + new.merge(o) + end + end def test_parse_object_custom_class res = parse('{}', :object_class => SubHash) @@ -181,6 +192,16 @@ def test_parse_object_custom_class assert_equal(SubHash, res.class) end + def test_generation_of_core_subclasses + obj = SubHash.new.merge( "foo" => SubHash.new.merge("bar" => true)) + obj_json = JSON(obj) + obj_again = JSON(obj_json) + assert_kind_of SubHash, obj_again + assert_kind_of SubHash, obj_again['foo'] + assert obj_again['foo']['bar'] + assert_equal obj, obj_again + end + def test_parser_reset parser = Parser.new(@json) assert_equal(@hash, parser.parse)