* ext/json/*, test/json/*: Update json-2.0.1.
Changes of 2.0.0:f679ebd0c6/CHANGES.md (2015-09-11-200)
Changes of 2.0.1:f679ebd0c6/CHANGES.md (2016-07-01-201)
[Feature #12542][ruby-dev:49706][fix GH-1395] git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@55576 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
This commit is contained in:
parent
5eff15d1bd
commit
1130658738
@ -1,3 +1,10 @@
|
|||||||
|
Tue Jul 5 20:49:30 2016 SHIBATA Hiroshi <hsbt@ruby-lang.org>
|
||||||
|
|
||||||
|
* ext/json/*, test/json/*: Update json-2.0.1.
|
||||||
|
Changes of 2.0.0: https://github.com/flori/json/blob/f679ebd0c69a94e3e70a897ac9a229f5779c2ee1/CHANGES.md#2015-09-11-200
|
||||||
|
Changes of 2.0.1: https://github.com/flori/json/blob/f679ebd0c69a94e3e70a897ac9a229f5779c2ee1/CHANGES.md#2016-07-01-201
|
||||||
|
[Feature #12542][ruby-dev:49706][fix GH-1395]
|
||||||
|
|
||||||
Tue Jul 5 19:39:49 2016 Naohisa Goto <ngotogenome@gmail.com>
|
Tue Jul 5 19:39:49 2016 Naohisa Goto <ngotogenome@gmail.com>
|
||||||
|
|
||||||
* string.c (rb_str_change_terminator_length): New function to change
|
* string.c (rb_str_change_terminator_length): New function to change
|
||||||
|
@ -20,7 +20,7 @@ static VALUE mJSON, mExt, mGenerator, cState, mGeneratorMethods, mObject,
|
|||||||
|
|
||||||
static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before,
|
static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before,
|
||||||
i_object_nl, i_array_nl, i_max_nesting, i_allow_nan, i_ascii_only,
|
i_object_nl, i_array_nl, i_max_nesting, i_allow_nan, i_ascii_only,
|
||||||
i_quirks_mode, i_pack, i_unpack, i_create_id, i_extend, i_key_p,
|
i_pack, i_unpack, i_create_id, i_extend, i_key_p,
|
||||||
i_aref, i_send, i_respond_to_p, i_match, i_keys, i_depth,
|
i_aref, i_send, i_respond_to_p, i_match, i_keys, i_depth,
|
||||||
i_buffer_initial_length, i_dup;
|
i_buffer_initial_length, i_dup;
|
||||||
|
|
||||||
@ -222,6 +222,7 @@ static void convert_UTF8_to_JSON_ASCII(FBuffer *buffer, VALUE string)
|
|||||||
unicode_escape_to_buffer(buffer, buf, (UTF16)((ch & halfMask) + UNI_SUR_LOW_START));
|
unicode_escape_to_buffer(buffer, buf, (UTF16)((ch & halfMask) + UNI_SUR_LOW_START));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
RB_GC_GUARD(string);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Converts string to a JSON string in FBuffer buffer, where only the
|
/* Converts string to a JSON string in FBuffer buffer, where only the
|
||||||
@ -641,8 +642,6 @@ static VALUE cState_configure(VALUE self, VALUE opts)
|
|||||||
state->allow_nan = RTEST(tmp);
|
state->allow_nan = RTEST(tmp);
|
||||||
tmp = rb_hash_aref(opts, ID2SYM(i_ascii_only));
|
tmp = rb_hash_aref(opts, ID2SYM(i_ascii_only));
|
||||||
state->ascii_only = RTEST(tmp);
|
state->ascii_only = RTEST(tmp);
|
||||||
tmp = rb_hash_aref(opts, ID2SYM(i_quirks_mode));
|
|
||||||
state->quirks_mode = RTEST(tmp);
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -676,7 +675,6 @@ static VALUE cState_to_h(VALUE self)
|
|||||||
rb_hash_aset(result, ID2SYM(i_array_nl), rb_str_new(state->array_nl, state->array_nl_len));
|
rb_hash_aset(result, ID2SYM(i_array_nl), rb_str_new(state->array_nl, state->array_nl_len));
|
||||||
rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse);
|
rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse);
|
||||||
rb_hash_aset(result, ID2SYM(i_ascii_only), state->ascii_only ? Qtrue : Qfalse);
|
rb_hash_aset(result, ID2SYM(i_ascii_only), state->ascii_only ? Qtrue : Qfalse);
|
||||||
rb_hash_aset(result, ID2SYM(i_quirks_mode), state->quirks_mode ? Qtrue : Qfalse);
|
|
||||||
rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting));
|
rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting));
|
||||||
rb_hash_aset(result, ID2SYM(i_depth), LONG2FIX(state->depth));
|
rb_hash_aset(result, ID2SYM(i_depth), LONG2FIX(state->depth));
|
||||||
rb_hash_aset(result, ID2SYM(i_buffer_initial_length), LONG2FIX(state->buffer_initial_length));
|
rb_hash_aset(result, ID2SYM(i_buffer_initial_length), LONG2FIX(state->buffer_initial_length));
|
||||||
@ -853,7 +851,6 @@ static void generate_json_integer(FBuffer *buffer, VALUE Vstate, JSON_Generator_
|
|||||||
generate_json_bignum(buffer, Vstate, state, obj);
|
generate_json_bignum(buffer, Vstate, state, obj);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
|
static void generate_json_float(FBuffer *buffer, VALUE Vstate, JSON_Generator_State *state, VALUE obj)
|
||||||
{
|
{
|
||||||
double value = RFLOAT_VALUE(obj);
|
double value = RFLOAT_VALUE(obj);
|
||||||
@ -943,21 +940,6 @@ static VALUE cState_partial_generate(VALUE self, VALUE obj)
|
|||||||
return fbuffer_to_s(buffer);
|
return fbuffer_to_s(buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* This function returns true if string is either a JSON array or JSON object.
|
|
||||||
* It might suffer from false positives, e. g. syntactically incorrect JSON in
|
|
||||||
* the string or certain UTF-8 characters on the right hand side.
|
|
||||||
*/
|
|
||||||
static int isArrayOrObject(VALUE string)
|
|
||||||
{
|
|
||||||
long string_len = RSTRING_LEN(string);
|
|
||||||
char *p = RSTRING_PTR(string), *q = p + string_len - 1;
|
|
||||||
if (string_len < 2) return 0;
|
|
||||||
for (; p < q && isspace((unsigned char)*p); p++);
|
|
||||||
for (; q > p && isspace((unsigned char)*q); q--);
|
|
||||||
return (*p == '[' && *q == ']') || (*p == '{' && *q == '}');
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* call-seq: generate(obj)
|
* call-seq: generate(obj)
|
||||||
*
|
*
|
||||||
@ -969,9 +951,6 @@ static VALUE cState_generate(VALUE self, VALUE obj)
|
|||||||
{
|
{
|
||||||
VALUE result = cState_partial_generate(self, obj);
|
VALUE result = cState_partial_generate(self, obj);
|
||||||
GET_STATE(self);
|
GET_STATE(self);
|
||||||
if (!state->quirks_mode && !isArrayOrObject(result)) {
|
|
||||||
rb_raise(eGeneratorError, "only generation of JSON objects or arrays allowed");
|
|
||||||
}
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -990,8 +969,6 @@ static VALUE cState_generate(VALUE self, VALUE obj)
|
|||||||
* * *allow_nan*: true if NaN, Infinity, and -Infinity should be
|
* * *allow_nan*: true if NaN, Infinity, and -Infinity should be
|
||||||
* generated, otherwise an exception is thrown, if these values are
|
* generated, otherwise an exception is thrown, if these values are
|
||||||
* encountered. This options defaults to false.
|
* encountered. This options defaults to false.
|
||||||
* * *quirks_mode*: Enables quirks_mode for parser, that is for example
|
|
||||||
* generating single JSON values instead of documents is possible.
|
|
||||||
* * *buffer_initial_length*: sets the initial length of the generator's
|
* * *buffer_initial_length*: sets the initial length of the generator's
|
||||||
* internal buffer.
|
* internal buffer.
|
||||||
*/
|
*/
|
||||||
@ -1298,29 +1275,6 @@ static VALUE cState_ascii_only_p(VALUE self)
|
|||||||
return state->ascii_only ? Qtrue : Qfalse;
|
return state->ascii_only ? Qtrue : Qfalse;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* call-seq: quirks_mode?
|
|
||||||
*
|
|
||||||
* Returns true, if quirks mode is enabled. Otherwise returns false.
|
|
||||||
*/
|
|
||||||
static VALUE cState_quirks_mode_p(VALUE self)
|
|
||||||
{
|
|
||||||
GET_STATE(self);
|
|
||||||
return state->quirks_mode ? Qtrue : Qfalse;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* call-seq: quirks_mode=(enable)
|
|
||||||
*
|
|
||||||
* If set to true, enables the quirks_mode mode.
|
|
||||||
*/
|
|
||||||
static VALUE cState_quirks_mode_set(VALUE self, VALUE enable)
|
|
||||||
{
|
|
||||||
GET_STATE(self);
|
|
||||||
state->quirks_mode = RTEST(enable);
|
|
||||||
return Qnil;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* call-seq: depth
|
* call-seq: depth
|
||||||
*
|
*
|
||||||
@ -1409,9 +1363,6 @@ void Init_generator(void)
|
|||||||
rb_define_method(cState, "check_circular?", cState_check_circular_p, 0);
|
rb_define_method(cState, "check_circular?", cState_check_circular_p, 0);
|
||||||
rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0);
|
rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0);
|
||||||
rb_define_method(cState, "ascii_only?", cState_ascii_only_p, 0);
|
rb_define_method(cState, "ascii_only?", cState_ascii_only_p, 0);
|
||||||
rb_define_method(cState, "quirks_mode?", cState_quirks_mode_p, 0);
|
|
||||||
rb_define_method(cState, "quirks_mode", cState_quirks_mode_p, 0);
|
|
||||||
rb_define_method(cState, "quirks_mode=", cState_quirks_mode_set, 1);
|
|
||||||
rb_define_method(cState, "depth", cState_depth, 0);
|
rb_define_method(cState, "depth", cState_depth, 0);
|
||||||
rb_define_method(cState, "depth=", cState_depth_set, 1);
|
rb_define_method(cState, "depth=", cState_depth_set, 1);
|
||||||
rb_define_method(cState, "buffer_initial_length", cState_buffer_initial_length, 0);
|
rb_define_method(cState, "buffer_initial_length", cState_buffer_initial_length, 0);
|
||||||
@ -1468,7 +1419,6 @@ void Init_generator(void)
|
|||||||
i_max_nesting = rb_intern("max_nesting");
|
i_max_nesting = rb_intern("max_nesting");
|
||||||
i_allow_nan = rb_intern("allow_nan");
|
i_allow_nan = rb_intern("allow_nan");
|
||||||
i_ascii_only = rb_intern("ascii_only");
|
i_ascii_only = rb_intern("ascii_only");
|
||||||
i_quirks_mode = rb_intern("quirks_mode");
|
|
||||||
i_depth = rb_intern("depth");
|
i_depth = rb_intern("depth");
|
||||||
i_buffer_initial_length = rb_intern("buffer_initial_length");
|
i_buffer_initial_length = rb_intern("buffer_initial_length");
|
||||||
i_pack = rb_intern("pack");
|
i_pack = rb_intern("pack");
|
||||||
|
@ -73,7 +73,6 @@ typedef struct JSON_Generator_StateStruct {
|
|||||||
long max_nesting;
|
long max_nesting;
|
||||||
char allow_nan;
|
char allow_nan;
|
||||||
char ascii_only;
|
char ascii_only;
|
||||||
char quirks_mode;
|
|
||||||
long depth;
|
long depth;
|
||||||
long buffer_initial_length;
|
long buffer_initial_length;
|
||||||
} JSON_Generator_State;
|
} JSON_Generator_State;
|
||||||
|
Binary file not shown.
@ -4,12 +4,12 @@ require 'json/generic_object'
|
|||||||
|
|
||||||
module JSON
|
module JSON
|
||||||
class << self
|
class << self
|
||||||
# If _object_ is string-like, parse the string and return the parsed result
|
# If _object_ is string-like, parse the string and return the parsed
|
||||||
# as a Ruby data structure. Otherwise generate a JSON text from the Ruby
|
# result as a Ruby data structure. Otherwise generate a JSON text from the
|
||||||
# data structure object and return it.
|
# Ruby data structure object and return it.
|
||||||
#
|
#
|
||||||
# The _opts_ argument is passed through to generate/parse respectively. See
|
# The _opts_ argument is passed through to generate/parse respectively.
|
||||||
# generate and parse for their documentation.
|
# See generate and parse for their documentation.
|
||||||
def [](object, opts = {})
|
def [](object, opts = {})
|
||||||
if object.respond_to? :to_str
|
if object.respond_to? :to_str
|
||||||
JSON.parse(object.to_str, opts)
|
JSON.parse(object.to_str, opts)
|
||||||
@ -25,7 +25,7 @@ module JSON
|
|||||||
# Set the JSON parser class _parser_ to be used by JSON.
|
# Set the JSON parser class _parser_ to be used by JSON.
|
||||||
def parser=(parser) # :nodoc:
|
def parser=(parser) # :nodoc:
|
||||||
@parser = parser
|
@parser = parser
|
||||||
remove_const :Parser if JSON.const_defined_in?(self, :Parser)
|
remove_const :Parser if const_defined?(:Parser, false)
|
||||||
const_set :Parser, parser
|
const_set :Parser, parser
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -36,8 +36,8 @@ module JSON
|
|||||||
def deep_const_get(path) # :nodoc:
|
def deep_const_get(path) # :nodoc:
|
||||||
path.to_s.split(/::/).inject(Object) do |p, c|
|
path.to_s.split(/::/).inject(Object) do |p, c|
|
||||||
case
|
case
|
||||||
when c.empty? then p
|
when c.empty? then p
|
||||||
when JSON.const_defined_in?(p, c) then p.const_get(c)
|
when p.const_defined?(c, true) then p.const_get(c)
|
||||||
else
|
else
|
||||||
begin
|
begin
|
||||||
p.const_missing(c)
|
p.const_missing(c)
|
||||||
@ -139,10 +139,10 @@ module JSON
|
|||||||
# _opts_ can have the following
|
# _opts_ can have the following
|
||||||
# keys:
|
# keys:
|
||||||
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
||||||
# structures. Disable depth checking with :max_nesting => false. It defaults
|
# structures. Disable depth checking with :max_nesting => false. It
|
||||||
# to 100.
|
# defaults to 100.
|
||||||
# * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
|
# * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
|
||||||
# defiance of RFC 4627 to be parsed by the Parser. This option defaults
|
# defiance of RFC 7159 to be parsed by the Parser. This option defaults
|
||||||
# to false.
|
# to false.
|
||||||
# * *symbolize_names*: If set to true, returns symbols for the names
|
# * *symbolize_names*: If set to true, returns symbols for the names
|
||||||
# (keys) in a JSON object. Otherwise strings are returned. Strings are
|
# (keys) in a JSON object. Otherwise strings are returned. Strings are
|
||||||
@ -162,11 +162,11 @@ module JSON
|
|||||||
#
|
#
|
||||||
# _opts_ can have the following keys:
|
# _opts_ can have the following keys:
|
||||||
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
|
||||||
# structures. Enable depth checking with :max_nesting => anInteger. The parse!
|
# structures. Enable depth checking with :max_nesting => anInteger. The
|
||||||
# methods defaults to not doing max depth checking: This can be dangerous
|
# parse! methods defaults to not doing max depth checking: This can be
|
||||||
# if someone wants to fill up your stack.
|
# dangerous if someone wants to fill up your stack.
|
||||||
# * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in
|
# * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in
|
||||||
# defiance of RFC 4627 to be parsed by the Parser. This option defaults
|
# defiance of RFC 7159 to be parsed by the Parser. This option defaults
|
||||||
# to true.
|
# to true.
|
||||||
# * *create_additions*: If set to false, the Parser doesn't create
|
# * *create_additions*: If set to false, the Parser doesn't create
|
||||||
# additions even if a matching class and create_id was found. This option
|
# additions even if a matching class and create_id was found. This option
|
||||||
@ -175,7 +175,7 @@ module JSON
|
|||||||
opts = {
|
opts = {
|
||||||
:max_nesting => false,
|
:max_nesting => false,
|
||||||
:allow_nan => true
|
:allow_nan => true
|
||||||
}.update(opts)
|
}.merge(opts)
|
||||||
Parser.new(source, opts).parse
|
Parser.new(source, opts).parse
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -296,13 +296,13 @@ module JSON
|
|||||||
# The global default options for the JSON.load method:
|
# The global default options for the JSON.load method:
|
||||||
# :max_nesting: false
|
# :max_nesting: false
|
||||||
# :allow_nan: true
|
# :allow_nan: true
|
||||||
# :quirks_mode: true
|
# :allow_blank: true
|
||||||
attr_accessor :load_default_options
|
attr_accessor :load_default_options
|
||||||
end
|
end
|
||||||
self.load_default_options = {
|
self.load_default_options = {
|
||||||
:max_nesting => false,
|
:max_nesting => false,
|
||||||
:allow_nan => true,
|
:allow_nan => true,
|
||||||
:quirks_mode => true,
|
:allow_blank => true,
|
||||||
:create_additions => true,
|
:create_additions => true,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -329,7 +329,7 @@ module JSON
|
|||||||
elsif source.respond_to?(:read)
|
elsif source.respond_to?(:read)
|
||||||
source = source.read
|
source = source.read
|
||||||
end
|
end
|
||||||
if opts[:quirks_mode] && (source.nil? || source.empty?)
|
if opts[:allow_blank] && (source.nil? || source.empty?)
|
||||||
source = 'null'
|
source = 'null'
|
||||||
end
|
end
|
||||||
result = parse(source, opts)
|
result = parse(source, opts)
|
||||||
@ -358,13 +358,12 @@ module JSON
|
|||||||
# The global default options for the JSON.dump method:
|
# The global default options for the JSON.dump method:
|
||||||
# :max_nesting: false
|
# :max_nesting: false
|
||||||
# :allow_nan: true
|
# :allow_nan: true
|
||||||
# :quirks_mode: true
|
# :allow_blank: true
|
||||||
attr_accessor :dump_default_options
|
attr_accessor :dump_default_options
|
||||||
end
|
end
|
||||||
self.dump_default_options = {
|
self.dump_default_options = {
|
||||||
:max_nesting => false,
|
:max_nesting => false,
|
||||||
:allow_nan => true,
|
:allow_nan => true,
|
||||||
:quirks_mode => true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Dumps _obj_ as a JSON string, i.e. calls generate on the object and returns
|
# Dumps _obj_ as a JSON string, i.e. calls generate on the object and returns
|
||||||
@ -403,37 +402,9 @@ module JSON
|
|||||||
raise ArgumentError, "exceed depth limit"
|
raise ArgumentError, "exceed depth limit"
|
||||||
end
|
end
|
||||||
|
|
||||||
# Swap consecutive bytes of _string_ in place.
|
# Encodes string using Ruby's _String.encode_
|
||||||
def self.swap!(string) # :nodoc:
|
def self.iconv(to, from, string)
|
||||||
0.upto(string.size / 2) do |i|
|
string.encode(to, from)
|
||||||
break unless string[2 * i + 1]
|
|
||||||
string[2 * i], string[2 * i + 1] = string[2 * i + 1], string[2 * i]
|
|
||||||
end
|
|
||||||
string
|
|
||||||
end
|
|
||||||
|
|
||||||
# Shortcut for iconv.
|
|
||||||
if ::String.method_defined?(:encode)
|
|
||||||
# Encodes string using Ruby's _String.encode_
|
|
||||||
def self.iconv(to, from, string)
|
|
||||||
string.encode(to, from)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
require 'iconv'
|
|
||||||
# Encodes string using _iconv_ library
|
|
||||||
def self.iconv(to, from, string)
|
|
||||||
Iconv.conv(to, from, string)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
if ::Object.method(:const_defined?).arity == 1
|
|
||||||
def self.const_defined_in?(modul, constant)
|
|
||||||
modul.const_defined?(constant)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
def self.const_defined_in?(modul, constant)
|
|
||||||
modul.const_defined?(constant, false)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1,10 +1,4 @@
|
|||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
if ENV['SIMPLECOV_COVERAGE'].to_i == 1
|
|
||||||
require 'simplecov'
|
|
||||||
SimpleCov.start do
|
|
||||||
add_filter "/tests/"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
require 'json/common'
|
require 'json/common'
|
||||||
|
|
||||||
module JSON
|
module JSON
|
||||||
|
0
ext/json/lib/json/ext/.keep
Normal file
0
ext/json/lib/json/ext/.keep
Normal file
@ -48,6 +48,14 @@ module JSON
|
|||||||
table
|
table
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def [](name)
|
||||||
|
__send__(name)
|
||||||
|
end unless method_defined?(:[])
|
||||||
|
|
||||||
|
def []=(name, value)
|
||||||
|
__send__("#{name}=", value)
|
||||||
|
end unless method_defined?(:[]=)
|
||||||
|
|
||||||
def |(other)
|
def |(other)
|
||||||
self.class[other.to_hash.merge(to_hash)]
|
self.class[other.to_hash.merge(to_hash)]
|
||||||
end
|
end
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
module JSON
|
module JSON
|
||||||
# JSON version
|
# JSON version
|
||||||
VERSION = '1.8.3'
|
VERSION = '2.0.1'
|
||||||
VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc:
|
VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc:
|
||||||
VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
|
VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
|
||||||
VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
|
VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -34,11 +34,9 @@ typedef struct JSON_ParserStruct {
|
|||||||
char *memo;
|
char *memo;
|
||||||
VALUE create_id;
|
VALUE create_id;
|
||||||
int max_nesting;
|
int max_nesting;
|
||||||
int current_nesting;
|
|
||||||
int allow_nan;
|
int allow_nan;
|
||||||
int parsing_name;
|
int parsing_name;
|
||||||
int symbolize_names;
|
int symbolize_names;
|
||||||
int quirks_mode;
|
|
||||||
VALUE object_class;
|
VALUE object_class;
|
||||||
VALUE array_class;
|
VALUE array_class;
|
||||||
int create_additions;
|
int create_additions;
|
||||||
@ -58,11 +56,11 @@ typedef struct JSON_ParserStruct {
|
|||||||
|
|
||||||
static UTF32 unescape_unicode(const unsigned char *p);
|
static UTF32 unescape_unicode(const unsigned char *p);
|
||||||
static int convert_UTF32_to_UTF8(char *buf, UTF32 ch);
|
static int convert_UTF32_to_UTF8(char *buf, UTF32 ch);
|
||||||
static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting);
|
||||||
static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting);
|
||||||
static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
||||||
static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
||||||
static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting);
|
||||||
static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd);
|
static VALUE json_string_unescape(VALUE result, char *string, char *stringEnd);
|
||||||
static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result);
|
||||||
static VALUE convert_encoding(VALUE source);
|
static VALUE convert_encoding(VALUE source);
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
#include "parser.h"
|
#include "parser.h"
|
||||||
|
|
||||||
#if defined HAVE_RUBY_ENCODING_H
|
#if defined HAVE_RUBY_ENCODING_H
|
||||||
# define EXC_ENCODING UTF_8,
|
# define EXC_ENCODING rb_utf8_encoding(),
|
||||||
# ifndef HAVE_RB_ENC_RAISE
|
# ifndef HAVE_RB_ENC_RAISE
|
||||||
static void
|
static void
|
||||||
enc_raise(rb_encoding *enc, VALUE exc, const char *fmt, ...)
|
enc_raise(rb_encoding *enc, VALUE exc, const char *fmt, ...)
|
||||||
@ -87,17 +87,11 @@ static int convert_UTF32_to_UTF8(char *buf, UTF32 ch)
|
|||||||
return len;
|
return len;
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef HAVE_RUBY_ENCODING_H
|
|
||||||
static rb_encoding *UTF_8, *UTF_16BE, *UTF_16LE, *UTF_32BE, *UTF_32LE;
|
|
||||||
#else
|
|
||||||
static ID i_iconv;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static VALUE mJSON, mExt, cParser, eParserError, eNestingError;
|
static VALUE mJSON, mExt, cParser, eParserError, eNestingError;
|
||||||
static VALUE CNaN, CInfinity, CMinusInfinity;
|
static VALUE CNaN, CInfinity, CMinusInfinity;
|
||||||
|
|
||||||
static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
|
static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
|
||||||
i_chr, i_max_nesting, i_allow_nan, i_symbolize_names, i_quirks_mode,
|
i_chr, i_max_nesting, i_allow_nan, i_symbolize_names,
|
||||||
i_object_class, i_array_class, i_key_p, i_deep_const_get, i_match,
|
i_object_class, i_array_class, i_key_p, i_deep_const_get, i_match,
|
||||||
i_match_string, i_aset, i_aref, i_leftshift;
|
i_match_string, i_aset, i_aref, i_leftshift;
|
||||||
|
|
||||||
@ -137,7 +131,7 @@ static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
|
|||||||
|
|
||||||
action parse_value {
|
action parse_value {
|
||||||
VALUE v = Qnil;
|
VALUE v = Qnil;
|
||||||
char *np = JSON_parse_value(json, fpc, pe, &v);
|
char *np = JSON_parse_value(json, fpc, pe, &v, current_nesting);
|
||||||
if (np == NULL) {
|
if (np == NULL) {
|
||||||
fhold; fbreak;
|
fhold; fbreak;
|
||||||
} else {
|
} else {
|
||||||
@ -170,14 +164,14 @@ static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions,
|
|||||||
) @exit;
|
) @exit;
|
||||||
}%%
|
}%%
|
||||||
|
|
||||||
static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result)
|
static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting)
|
||||||
{
|
{
|
||||||
int cs = EVIL;
|
int cs = EVIL;
|
||||||
VALUE last_name = Qnil;
|
VALUE last_name = Qnil;
|
||||||
VALUE object_class = json->object_class;
|
VALUE object_class = json->object_class;
|
||||||
|
|
||||||
if (json->max_nesting && json->current_nesting > json->max_nesting) {
|
if (json->max_nesting && current_nesting > json->max_nesting) {
|
||||||
rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting);
|
rb_raise(eNestingError, "nesting of %d is too deep", current_nesting);
|
||||||
}
|
}
|
||||||
|
|
||||||
*result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class);
|
*result = NIL_P(object_class) ? rb_hash_new() : rb_class_new_instance(0, 0, object_class);
|
||||||
@ -243,7 +237,7 @@ static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *resu
|
|||||||
|
|
||||||
action parse_number {
|
action parse_number {
|
||||||
char *np;
|
char *np;
|
||||||
if(pe > fpc + 9 - json->quirks_mode && !strncmp(MinusInfinity, fpc, 9)) {
|
if(pe > fpc + 8 && !strncmp(MinusInfinity, fpc, 9)) {
|
||||||
if (json->allow_nan) {
|
if (json->allow_nan) {
|
||||||
*result = CMinusInfinity;
|
*result = CMinusInfinity;
|
||||||
fexec p + 10;
|
fexec p + 10;
|
||||||
@ -261,23 +255,19 @@ static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *resu
|
|||||||
|
|
||||||
action parse_array {
|
action parse_array {
|
||||||
char *np;
|
char *np;
|
||||||
json->current_nesting++;
|
np = JSON_parse_array(json, fpc, pe, result, current_nesting + 1);
|
||||||
np = JSON_parse_array(json, fpc, pe, result);
|
|
||||||
json->current_nesting--;
|
|
||||||
if (np == NULL) { fhold; fbreak; } else fexec np;
|
if (np == NULL) { fhold; fbreak; } else fexec np;
|
||||||
}
|
}
|
||||||
|
|
||||||
action parse_object {
|
action parse_object {
|
||||||
char *np;
|
char *np;
|
||||||
json->current_nesting++;
|
np = JSON_parse_object(json, fpc, pe, result, current_nesting + 1);
|
||||||
np = JSON_parse_object(json, fpc, pe, result);
|
|
||||||
json->current_nesting--;
|
|
||||||
if (np == NULL) { fhold; fbreak; } else fexec np;
|
if (np == NULL) { fhold; fbreak; } else fexec np;
|
||||||
}
|
}
|
||||||
|
|
||||||
action exit { fhold; fbreak; }
|
action exit { fhold; fbreak; }
|
||||||
|
|
||||||
main := (
|
main := ignore* (
|
||||||
Vnull @parse_null |
|
Vnull @parse_null |
|
||||||
Vfalse @parse_false |
|
Vfalse @parse_false |
|
||||||
Vtrue @parse_true |
|
Vtrue @parse_true |
|
||||||
@ -287,10 +277,10 @@ main := (
|
|||||||
begin_string >parse_string |
|
begin_string >parse_string |
|
||||||
begin_array >parse_array |
|
begin_array >parse_array |
|
||||||
begin_object >parse_object
|
begin_object >parse_object
|
||||||
) %*exit;
|
) ignore* %*exit;
|
||||||
}%%
|
}%%
|
||||||
|
|
||||||
static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result)
|
static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting)
|
||||||
{
|
{
|
||||||
int cs = EVIL;
|
int cs = EVIL;
|
||||||
|
|
||||||
@ -377,7 +367,7 @@ static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *resul
|
|||||||
|
|
||||||
action parse_value {
|
action parse_value {
|
||||||
VALUE v = Qnil;
|
VALUE v = Qnil;
|
||||||
char *np = JSON_parse_value(json, fpc, pe, &v);
|
char *np = JSON_parse_value(json, fpc, pe, &v, current_nesting);
|
||||||
if (np == NULL) {
|
if (np == NULL) {
|
||||||
fhold; fbreak;
|
fhold; fbreak;
|
||||||
} else {
|
} else {
|
||||||
@ -400,13 +390,13 @@ static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *resul
|
|||||||
end_array @exit;
|
end_array @exit;
|
||||||
}%%
|
}%%
|
||||||
|
|
||||||
static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result)
|
static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result, int current_nesting)
|
||||||
{
|
{
|
||||||
int cs = EVIL;
|
int cs = EVIL;
|
||||||
VALUE array_class = json->array_class;
|
VALUE array_class = json->array_class;
|
||||||
|
|
||||||
if (json->max_nesting && json->current_nesting > json->max_nesting) {
|
if (json->max_nesting && current_nesting > json->max_nesting) {
|
||||||
rb_raise(eNestingError, "nesting of %d is too deep", json->current_nesting);
|
rb_raise(eNestingError, "nesting of %d is too deep", current_nesting);
|
||||||
}
|
}
|
||||||
*result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class);
|
*result = NIL_P(array_class) ? rb_ary_new() : rb_class_new_instance(0, 0, array_class);
|
||||||
|
|
||||||
@ -548,6 +538,8 @@ static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *resu
|
|||||||
|
|
||||||
if (json->symbolize_names && json->parsing_name) {
|
if (json->symbolize_names && json->parsing_name) {
|
||||||
*result = rb_str_intern(*result);
|
*result = rb_str_intern(*result);
|
||||||
|
} else {
|
||||||
|
rb_str_resize(*result, RSTRING_LEN(*result));
|
||||||
}
|
}
|
||||||
if (cs >= JSON_string_first_final) {
|
if (cs >= JSON_string_first_final) {
|
||||||
return p + 1;
|
return p + 1;
|
||||||
@ -570,41 +562,13 @@ static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *resu
|
|||||||
|
|
||||||
static VALUE convert_encoding(VALUE source)
|
static VALUE convert_encoding(VALUE source)
|
||||||
{
|
{
|
||||||
const char *ptr = RSTRING_PTR(source);
|
|
||||||
long len = RSTRING_LEN(source);
|
|
||||||
if (len < 2) {
|
|
||||||
rb_raise(eParserError, "A JSON text must at least contain two octets!");
|
|
||||||
}
|
|
||||||
#ifdef HAVE_RUBY_ENCODING_H
|
#ifdef HAVE_RUBY_ENCODING_H
|
||||||
{
|
rb_encoding *enc = rb_enc_get(source);
|
||||||
rb_encoding *enc = rb_enc_get(source);
|
if (enc == rb_ascii8bit_encoding()) {
|
||||||
if (enc == rb_ascii8bit_encoding()) {
|
FORCE_UTF8(source);
|
||||||
if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) {
|
} else {
|
||||||
source = rb_str_conv_enc(source, UTF_32BE, rb_utf8_encoding());
|
source = rb_str_conv_enc(source, NULL, rb_utf8_encoding());
|
||||||
} else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) {
|
}
|
||||||
source = rb_str_conv_enc(source, UTF_16BE, rb_utf8_encoding());
|
|
||||||
} else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) {
|
|
||||||
source = rb_str_conv_enc(source, UTF_32LE, rb_utf8_encoding());
|
|
||||||
} else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) {
|
|
||||||
source = rb_str_conv_enc(source, UTF_16LE, rb_utf8_encoding());
|
|
||||||
} else {
|
|
||||||
source = rb_str_dup(source);
|
|
||||||
FORCE_UTF8(source);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
source = rb_str_conv_enc(source, NULL, rb_utf8_encoding());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) {
|
|
||||||
source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32be"), source);
|
|
||||||
} else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) {
|
|
||||||
source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16be"), source);
|
|
||||||
} else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) {
|
|
||||||
source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-32le"), source);
|
|
||||||
} else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) {
|
|
||||||
source = rb_funcall(mJSON, i_iconv, 3, rb_str_new2("utf-8"), rb_str_new2("utf-16le"), source);
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
@ -627,8 +591,9 @@ static VALUE convert_encoding(VALUE source)
|
|||||||
* defiance of RFC 4627 to be parsed by the Parser. This option defaults to
|
* defiance of RFC 4627 to be parsed by the Parser. This option defaults to
|
||||||
* false.
|
* false.
|
||||||
* * *symbolize_names*: If set to true, returns symbols for the names
|
* * *symbolize_names*: If set to true, returns symbols for the names
|
||||||
* (keys) in a JSON object. Otherwise strings are returned, which is also
|
* (keys) in a JSON object. Otherwise strings are returned, which is
|
||||||
* the default.
|
* also the default. It's not possible to use this option in
|
||||||
|
* conjunction with the *create_additions* option.
|
||||||
* * *create_additions*: If set to false, the Parser doesn't create
|
* * *create_additions*: If set to false, the Parser doesn't create
|
||||||
* additions even if a matching class and create_id was found. This option
|
* additions even if a matching class and create_id was found. This option
|
||||||
* defaults to false.
|
* defaults to false.
|
||||||
@ -679,19 +644,17 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||||||
} else {
|
} else {
|
||||||
json->symbolize_names = 0;
|
json->symbolize_names = 0;
|
||||||
}
|
}
|
||||||
tmp = ID2SYM(i_quirks_mode);
|
|
||||||
if (option_given_p(opts, tmp)) {
|
|
||||||
VALUE quirks_mode = rb_hash_aref(opts, tmp);
|
|
||||||
json->quirks_mode = RTEST(quirks_mode) ? 1 : 0;
|
|
||||||
} else {
|
|
||||||
json->quirks_mode = 0;
|
|
||||||
}
|
|
||||||
tmp = ID2SYM(i_create_additions);
|
tmp = ID2SYM(i_create_additions);
|
||||||
if (option_given_p(opts, tmp)) {
|
if (option_given_p(opts, tmp)) {
|
||||||
json->create_additions = RTEST(rb_hash_aref(opts, tmp));
|
json->create_additions = RTEST(rb_hash_aref(opts, tmp));
|
||||||
} else {
|
} else {
|
||||||
json->create_additions = 0;
|
json->create_additions = 0;
|
||||||
}
|
}
|
||||||
|
if (json->symbolize_names && json->create_additions) {
|
||||||
|
rb_raise(rb_eArgError,
|
||||||
|
"options :symbolize_names and :create_additions cannot be "
|
||||||
|
" used in conjunction");
|
||||||
|
}
|
||||||
tmp = ID2SYM(i_create_id);
|
tmp = ID2SYM(i_create_id);
|
||||||
if (option_given_p(opts, tmp)) {
|
if (option_given_p(opts, tmp)) {
|
||||||
json->create_id = rb_hash_aref(opts, tmp);
|
json->create_id = rb_hash_aref(opts, tmp);
|
||||||
@ -728,11 +691,8 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||||||
json->object_class = Qnil;
|
json->object_class = Qnil;
|
||||||
json->array_class = Qnil;
|
json->array_class = Qnil;
|
||||||
}
|
}
|
||||||
|
source = convert_encoding(StringValue(source));
|
||||||
StringValue(source);
|
StringValue(source);
|
||||||
if (!json->quirks_mode) {
|
|
||||||
source = convert_encoding(source);
|
|
||||||
}
|
|
||||||
json->current_nesting = 0;
|
|
||||||
json->len = RSTRING_LEN(source);
|
json->len = RSTRING_LEN(source);
|
||||||
json->source = RSTRING_PTR(source);;
|
json->source = RSTRING_PTR(source);;
|
||||||
json->Vsource = source;
|
json->Vsource = source;
|
||||||
@ -746,56 +706,8 @@ static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self)
|
|||||||
|
|
||||||
include JSON_common;
|
include JSON_common;
|
||||||
|
|
||||||
action parse_object {
|
|
||||||
char *np;
|
|
||||||
json->current_nesting = 1;
|
|
||||||
np = JSON_parse_object(json, fpc, pe, &result);
|
|
||||||
if (np == NULL) { fhold; fbreak; } else fexec np;
|
|
||||||
}
|
|
||||||
|
|
||||||
action parse_array {
|
|
||||||
char *np;
|
|
||||||
json->current_nesting = 1;
|
|
||||||
np = JSON_parse_array(json, fpc, pe, &result);
|
|
||||||
if (np == NULL) { fhold; fbreak; } else fexec np;
|
|
||||||
}
|
|
||||||
|
|
||||||
main := ignore* (
|
|
||||||
begin_object >parse_object |
|
|
||||||
begin_array >parse_array
|
|
||||||
) ignore*;
|
|
||||||
}%%
|
|
||||||
|
|
||||||
static VALUE cParser_parse_strict(VALUE self)
|
|
||||||
{
|
|
||||||
char *p, *pe;
|
|
||||||
int cs = EVIL;
|
|
||||||
VALUE result = Qnil;
|
|
||||||
GET_PARSER;
|
|
||||||
|
|
||||||
%% write init;
|
|
||||||
p = json->source;
|
|
||||||
pe = p + json->len;
|
|
||||||
%% write exec;
|
|
||||||
|
|
||||||
if (cs >= JSON_first_final && p == pe) {
|
|
||||||
return result;
|
|
||||||
} else {
|
|
||||||
rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p);
|
|
||||||
return Qnil;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
%%{
|
|
||||||
machine JSON_quirks_mode;
|
|
||||||
|
|
||||||
write data;
|
|
||||||
|
|
||||||
include JSON_common;
|
|
||||||
|
|
||||||
action parse_value {
|
action parse_value {
|
||||||
char *np = JSON_parse_value(json, fpc, pe, &result);
|
char *np = JSON_parse_value(json, fpc, pe, &result, 0);
|
||||||
if (np == NULL) { fhold; fbreak; } else fexec np;
|
if (np == NULL) { fhold; fbreak; } else fexec np;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -804,26 +716,6 @@ static VALUE cParser_parse_strict(VALUE self)
|
|||||||
) ignore*;
|
) ignore*;
|
||||||
}%%
|
}%%
|
||||||
|
|
||||||
static VALUE cParser_parse_quirks_mode(VALUE self)
|
|
||||||
{
|
|
||||||
char *p, *pe;
|
|
||||||
int cs = EVIL;
|
|
||||||
VALUE result = Qnil;
|
|
||||||
GET_PARSER;
|
|
||||||
|
|
||||||
%% write init;
|
|
||||||
p = json->source;
|
|
||||||
pe = p + json->len;
|
|
||||||
%% write exec;
|
|
||||||
|
|
||||||
if (cs >= JSON_quirks_mode_first_final && p == pe) {
|
|
||||||
return result;
|
|
||||||
} else {
|
|
||||||
rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p);
|
|
||||||
return Qnil;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* call-seq: parse()
|
* call-seq: parse()
|
||||||
*
|
*
|
||||||
@ -832,12 +724,21 @@ static VALUE cParser_parse_quirks_mode(VALUE self)
|
|||||||
*/
|
*/
|
||||||
static VALUE cParser_parse(VALUE self)
|
static VALUE cParser_parse(VALUE self)
|
||||||
{
|
{
|
||||||
|
char *p, *pe;
|
||||||
|
int cs = EVIL;
|
||||||
|
VALUE result = Qnil;
|
||||||
GET_PARSER;
|
GET_PARSER;
|
||||||
|
|
||||||
if (json->quirks_mode) {
|
%% write init;
|
||||||
return cParser_parse_quirks_mode(self);
|
p = json->source;
|
||||||
|
pe = p + json->len;
|
||||||
|
%% write exec;
|
||||||
|
|
||||||
|
if (cs >= JSON_first_final && p == pe) {
|
||||||
|
return result;
|
||||||
} else {
|
} else {
|
||||||
return cParser_parse_strict(self);
|
rb_enc_raise(EXC_ENCODING eParserError, "%u: unexpected token at '%s'", __LINE__, p);
|
||||||
|
return Qnil;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -895,18 +796,6 @@ static VALUE cParser_source(VALUE self)
|
|||||||
return rb_str_dup(json->Vsource);
|
return rb_str_dup(json->Vsource);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* call-seq: quirks_mode?()
|
|
||||||
*
|
|
||||||
* Returns a true, if this parser is in quirks_mode, false otherwise.
|
|
||||||
*/
|
|
||||||
static VALUE cParser_quirks_mode_p(VALUE self)
|
|
||||||
{
|
|
||||||
GET_PARSER;
|
|
||||||
return json->quirks_mode ? Qtrue : Qfalse;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void Init_parser(void)
|
void Init_parser(void)
|
||||||
{
|
{
|
||||||
rb_require("json/common");
|
rb_require("json/common");
|
||||||
@ -919,7 +808,6 @@ void Init_parser(void)
|
|||||||
rb_define_method(cParser, "initialize", cParser_initialize, -1);
|
rb_define_method(cParser, "initialize", cParser_initialize, -1);
|
||||||
rb_define_method(cParser, "parse", cParser_parse, 0);
|
rb_define_method(cParser, "parse", cParser_parse, 0);
|
||||||
rb_define_method(cParser, "source", cParser_source, 0);
|
rb_define_method(cParser, "source", cParser_source, 0);
|
||||||
rb_define_method(cParser, "quirks_mode?", cParser_quirks_mode_p, 0);
|
|
||||||
|
|
||||||
CNaN = rb_const_get(mJSON, rb_intern("NaN"));
|
CNaN = rb_const_get(mJSON, rb_intern("NaN"));
|
||||||
CInfinity = rb_const_get(mJSON, rb_intern("Infinity"));
|
CInfinity = rb_const_get(mJSON, rb_intern("Infinity"));
|
||||||
@ -933,7 +821,6 @@ void Init_parser(void)
|
|||||||
i_max_nesting = rb_intern("max_nesting");
|
i_max_nesting = rb_intern("max_nesting");
|
||||||
i_allow_nan = rb_intern("allow_nan");
|
i_allow_nan = rb_intern("allow_nan");
|
||||||
i_symbolize_names = rb_intern("symbolize_names");
|
i_symbolize_names = rb_intern("symbolize_names");
|
||||||
i_quirks_mode = rb_intern("quirks_mode");
|
|
||||||
i_object_class = rb_intern("object_class");
|
i_object_class = rb_intern("object_class");
|
||||||
i_array_class = rb_intern("array_class");
|
i_array_class = rb_intern("array_class");
|
||||||
i_match = rb_intern("match");
|
i_match = rb_intern("match");
|
||||||
@ -943,15 +830,6 @@ void Init_parser(void)
|
|||||||
i_aset = rb_intern("[]=");
|
i_aset = rb_intern("[]=");
|
||||||
i_aref = rb_intern("[]");
|
i_aref = rb_intern("[]");
|
||||||
i_leftshift = rb_intern("<<");
|
i_leftshift = rb_intern("<<");
|
||||||
#ifdef HAVE_RUBY_ENCODING_H
|
|
||||||
UTF_8 = rb_utf8_encoding();
|
|
||||||
UTF_16BE = rb_enc_find("utf-16be");
|
|
||||||
UTF_16LE = rb_enc_find("utf-16le");
|
|
||||||
UTF_32BE = rb_enc_find("utf-32be");
|
|
||||||
UTF_32LE = rb_enc_find("utf-32le");
|
|
||||||
#else
|
|
||||||
i_iconv = rb_intern("iconv");
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
RAGEL = ragel
|
|
||||||
|
|
||||||
.SUFFIXES: .rl
|
|
||||||
|
|
||||||
.rl.c:
|
|
||||||
$(RAGEL) -G2 $<
|
|
||||||
$(BASERUBY) -pli -e '$$_.sub!(/[ \t]+$$/, "")' \
|
|
||||||
-e '$$_.sub!(/^static const int (JSON_.*=.*);$$/, "enum {\\1};")' $@
|
|
||||||
|
|
||||||
parser.c:
|
|
@ -1 +0,0 @@
|
|||||||
"A JSON payload should be an object or array, not a string."
|
|
1
test/json/fixtures/obsolete_fail1.json
Normal file
1
test/json/fixtures/obsolete_fail1.json
Normal file
@ -0,0 +1 @@
|
|||||||
|
"A JSON payload should be an object or array, not a string."
|
@ -10,3 +10,13 @@ else
|
|||||||
$:.unshift 'ext', 'lib'
|
$:.unshift 'ext', 'lib'
|
||||||
require 'json'
|
require 'json'
|
||||||
end
|
end
|
||||||
|
|
||||||
|
require 'test/unit'
|
||||||
|
begin
|
||||||
|
require 'byebug'
|
||||||
|
rescue LoadError
|
||||||
|
end
|
||||||
|
if ENV['START_SIMPLECOV'].to_i == 1
|
||||||
|
require 'simplecov'
|
||||||
|
SimpleCov.start
|
||||||
|
end
|
@ -1,577 +0,0 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# encoding: utf-8
|
|
||||||
# frozen_string_literal: false
|
|
||||||
|
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
require 'stringio'
|
|
||||||
require 'tempfile'
|
|
||||||
require 'ostruct'
|
|
||||||
|
|
||||||
unless Array.method_defined?(:permutation)
|
|
||||||
begin
|
|
||||||
require 'enumerator'
|
|
||||||
require 'permutation'
|
|
||||||
class Array
|
|
||||||
def permutation
|
|
||||||
Permutation.for(self).to_enum.map { |x| x.project }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
rescue LoadError
|
|
||||||
warn "Skipping permutation tests."
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class TestJSON < Test::Unit::TestCase
|
|
||||||
include JSON
|
|
||||||
|
|
||||||
def setup
|
|
||||||
@ary = [1, "foo", 3.14, 4711.0, 2.718, nil, [1,-2,3], false, true].map do
|
|
||||||
|x| [x]
|
|
||||||
end
|
|
||||||
@ary_to_parse = ["1", '"foo"', "3.14", "4711.0", "2.718", "null",
|
|
||||||
"[1,-2,3]", "false", "true"].map do
|
|
||||||
|x| "[#{x}]"
|
|
||||||
end
|
|
||||||
@hash = {
|
|
||||||
'a' => 2,
|
|
||||||
'b' => 3.141,
|
|
||||||
'c' => 'c',
|
|
||||||
'd' => [ 1, "b", 3.14 ],
|
|
||||||
'e' => { 'foo' => 'bar' },
|
|
||||||
'g' => "\"\0\037",
|
|
||||||
'h' => 1000.0,
|
|
||||||
'i' => 0.001
|
|
||||||
}
|
|
||||||
@json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},'\
|
|
||||||
'"g":"\\"\\u0000\\u001f","h":1.0E3,"i":1.0E-3}'
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_construction
|
|
||||||
parser = JSON::Parser.new('test')
|
|
||||||
assert_equal 'test', parser.source
|
|
||||||
end
|
|
||||||
|
|
||||||
def assert_equal_float(expected, is)
|
|
||||||
assert_in_delta(expected.first, is.first, 1e-2)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_simple_arrays
|
|
||||||
assert_equal([], parse('[]'))
|
|
||||||
assert_equal([], parse(' [ ] '))
|
|
||||||
assert_equal([nil], parse('[null]'))
|
|
||||||
assert_equal([false], parse('[false]'))
|
|
||||||
assert_equal([true], parse('[true]'))
|
|
||||||
assert_equal([-23], parse('[-23]'))
|
|
||||||
assert_equal([23], parse('[23]'))
|
|
||||||
assert_equal([0.23], parse('[0.23]'))
|
|
||||||
assert_equal([0.0], parse('[0e0]'))
|
|
||||||
assert_raise(JSON::ParserError) { parse('[+23.2]') }
|
|
||||||
assert_raise(JSON::ParserError) { parse('[+23]') }
|
|
||||||
assert_raise(JSON::ParserError) { parse('[.23]') }
|
|
||||||
assert_raise(JSON::ParserError) { parse('[023]') }
|
|
||||||
assert_equal_float [3.141], parse('[3.141]')
|
|
||||||
assert_equal_float [-3.141], parse('[-3.141]')
|
|
||||||
assert_equal_float [3.141], parse('[3141e-3]')
|
|
||||||
assert_equal_float [3.141], parse('[3141.1e-3]')
|
|
||||||
assert_equal_float [3.141], parse('[3141E-3]')
|
|
||||||
assert_equal_float [3.141], parse('[3141.0E-3]')
|
|
||||||
assert_equal_float [-3.141], parse('[-3141.0e-3]')
|
|
||||||
assert_equal_float [-3.141], parse('[-3141e-3]')
|
|
||||||
assert_raise(ParserError) { parse('[NaN]') }
|
|
||||||
assert parse('[NaN]', :allow_nan => true).first.nan?
|
|
||||||
assert_raise(ParserError) { parse('[Infinity]') }
|
|
||||||
assert_equal [1.0/0], parse('[Infinity]', :allow_nan => true)
|
|
||||||
assert_raise(ParserError) { parse('[-Infinity]') }
|
|
||||||
assert_equal [-1.0/0], parse('[-Infinity]', :allow_nan => true)
|
|
||||||
assert_equal([""], parse('[""]'))
|
|
||||||
assert_equal(["foobar"], parse('["foobar"]'))
|
|
||||||
assert_equal([{}], parse('[{}]'))
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_simple_objects
|
|
||||||
assert_equal({}, parse('{}'))
|
|
||||||
assert_equal({}, parse(' { } '))
|
|
||||||
assert_equal({ "a" => nil }, parse('{ "a" : null}'))
|
|
||||||
assert_equal({ "a" => nil }, parse('{"a":null}'))
|
|
||||||
assert_equal({ "a" => false }, parse('{ "a" : false } '))
|
|
||||||
assert_equal({ "a" => false }, parse('{"a":false}'))
|
|
||||||
assert_raise(JSON::ParserError) { parse('{false}') }
|
|
||||||
assert_equal({ "a" => true }, parse('{"a":true}'))
|
|
||||||
assert_equal({ "a" => true }, parse(' { "a" : true } '))
|
|
||||||
assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
|
|
||||||
assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
|
|
||||||
assert_equal({ "a" => 23 }, parse('{"a":23 } '))
|
|
||||||
assert_equal({ "a" => 23 }, parse(' { "a" : 23 } '))
|
|
||||||
assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
|
|
||||||
assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_json_primitive_values
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('') }
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('', :quirks_mode => true) }
|
|
||||||
assert_raise(TypeError) { JSON::Parser.new(nil).parse }
|
|
||||||
assert_raise(TypeError) { JSON::Parser.new(nil, :quirks_mode => true).parse }
|
|
||||||
assert_raise(TypeError) { JSON.parse(nil) }
|
|
||||||
assert_raise(TypeError) { JSON.parse(nil, :quirks_mode => true) }
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse(' /* foo */ ') }
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse(' /* foo */ ', :quirks_mode => true) }
|
|
||||||
parser = JSON::Parser.new('null')
|
|
||||||
assert_equal false, parser.quirks_mode?
|
|
||||||
assert_raise(JSON::ParserError) { parser.parse }
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('null') }
|
|
||||||
assert_equal nil, JSON.parse('null', :quirks_mode => true)
|
|
||||||
parser = JSON::Parser.new('null', :quirks_mode => true)
|
|
||||||
assert_equal true, parser.quirks_mode?
|
|
||||||
assert_equal nil, parser.parse
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('false') }
|
|
||||||
assert_equal false, JSON.parse('false', :quirks_mode => true)
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('true') }
|
|
||||||
assert_equal true, JSON.parse('true', :quirks_mode => true)
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('23') }
|
|
||||||
assert_equal 23, JSON.parse('23', :quirks_mode => true)
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('1') }
|
|
||||||
assert_equal 1, JSON.parse('1', :quirks_mode => true)
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('3.141') }
|
|
||||||
assert_in_delta 3.141, JSON.parse('3.141', :quirks_mode => true), 1E-3
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('18446744073709551616') }
|
|
||||||
assert_equal 2 ** 64, JSON.parse('18446744073709551616', :quirks_mode => true)
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('"foo"') }
|
|
||||||
assert_equal 'foo', JSON.parse('"foo"', :quirks_mode => true)
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('NaN', :allow_nan => true) }
|
|
||||||
assert JSON.parse('NaN', :quirks_mode => true, :allow_nan => true).nan?
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('Infinity', :allow_nan => true) }
|
|
||||||
assert JSON.parse('Infinity', :quirks_mode => true, :allow_nan => true).infinite?
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('-Infinity', :allow_nan => true) }
|
|
||||||
assert JSON.parse('-Infinity', :quirks_mode => true, :allow_nan => true).infinite?
|
|
||||||
assert_raise(JSON::ParserError) { JSON.parse('[ 1, ]', :quirks_mode => true) }
|
|
||||||
end
|
|
||||||
|
|
||||||
if Array.method_defined?(:permutation)
|
|
||||||
def test_parse_more_complex_arrays
|
|
||||||
a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
|
|
||||||
a.permutation.each do |perm|
|
|
||||||
json = pretty_generate(perm)
|
|
||||||
assert_equal perm, parse(json)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_complex_objects
|
|
||||||
a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
|
|
||||||
a.permutation.each do |perm|
|
|
||||||
s = "a"
|
|
||||||
orig_obj = perm.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h }
|
|
||||||
json = pretty_generate(orig_obj)
|
|
||||||
assert_equal orig_obj, parse(json)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_arrays
|
|
||||||
assert_equal([1,2,3], parse('[1,2,3]'))
|
|
||||||
assert_equal([1.2,2,3], parse('[1.2,2,3]'))
|
|
||||||
assert_equal([[],[[],[]]], parse('[[],[[],[]]]'))
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_values
|
|
||||||
assert_equal([""], parse('[""]'))
|
|
||||||
assert_equal(["\\"], parse('["\\\\"]'))
|
|
||||||
assert_equal(['"'], parse('["\""]'))
|
|
||||||
assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]'))
|
|
||||||
assert_equal(["\"\b\n\r\t\0\037"],
|
|
||||||
parse('["\"\b\n\r\t\u0000\u001f"]'))
|
|
||||||
for i in 0 ... @ary.size
|
|
||||||
assert_equal(@ary[i], parse(@ary_to_parse[i]))
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_array
|
|
||||||
assert_equal([], parse('[]'))
|
|
||||||
assert_equal([], parse(' [ ] '))
|
|
||||||
assert_equal([1], parse('[1]'))
|
|
||||||
assert_equal([1], parse(' [ 1 ] '))
|
|
||||||
assert_equal(@ary,
|
|
||||||
parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]]'\
|
|
||||||
',[false],[true]]'))
|
|
||||||
assert_equal(@ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2]\s
|
|
||||||
, [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] }))
|
|
||||||
end
|
|
||||||
|
|
||||||
class SubArray < Array
|
|
||||||
def <<(v)
|
|
||||||
@shifted = true
|
|
||||||
super
|
|
||||||
end
|
|
||||||
|
|
||||||
def shifted?
|
|
||||||
@shifted
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class SubArray2 < Array
|
|
||||||
def to_json(*a)
|
|
||||||
{
|
|
||||||
JSON.create_id => self.class.name,
|
|
||||||
'ary' => to_a,
|
|
||||||
}.to_json(*a)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.json_create(o)
|
|
||||||
o.delete JSON.create_id
|
|
||||||
o['ary']
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class SubArrayWrapper
|
|
||||||
def initialize
|
|
||||||
@data = []
|
|
||||||
end
|
|
||||||
|
|
||||||
attr_reader :data
|
|
||||||
|
|
||||||
def [](index)
|
|
||||||
@data[index]
|
|
||||||
end
|
|
||||||
|
|
||||||
def <<(value)
|
|
||||||
@data << value
|
|
||||||
@shifted = true
|
|
||||||
end
|
|
||||||
|
|
||||||
def shifted?
|
|
||||||
@shifted
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_array_custom_array_derived_class
|
|
||||||
res = parse('[1,2]', :array_class => SubArray)
|
|
||||||
assert_equal([1,2], res)
|
|
||||||
assert_equal(SubArray, res.class)
|
|
||||||
assert res.shifted?
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_array_custom_non_array_derived_class
|
|
||||||
res = parse('[1,2]', :array_class => SubArrayWrapper)
|
|
||||||
assert_equal([1,2], res.data)
|
|
||||||
assert_equal(SubArrayWrapper, res.class)
|
|
||||||
assert res.shifted?
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_object
|
|
||||||
assert_equal({}, parse('{}'))
|
|
||||||
assert_equal({}, parse(' { } '))
|
|
||||||
assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}'))
|
|
||||||
assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } '))
|
|
||||||
end
|
|
||||||
|
|
||||||
class SubHash < Hash
|
|
||||||
def []=(k, v)
|
|
||||||
@item_set = true
|
|
||||||
super
|
|
||||||
end
|
|
||||||
|
|
||||||
def item_set?
|
|
||||||
@item_set
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class SubHash2 < Hash
|
|
||||||
def to_json(*a)
|
|
||||||
{
|
|
||||||
JSON.create_id => self.class.name,
|
|
||||||
}.merge(self).to_json(*a)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.json_create(o)
|
|
||||||
o.delete JSON.create_id
|
|
||||||
self[o]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
class SubOpenStruct < OpenStruct
|
|
||||||
def [](k)
|
|
||||||
__send__(k)
|
|
||||||
end
|
|
||||||
|
|
||||||
def []=(k, v)
|
|
||||||
@item_set = true
|
|
||||||
__send__("#{k}=", v)
|
|
||||||
end
|
|
||||||
|
|
||||||
def item_set?
|
|
||||||
@item_set
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_object_custom_hash_derived_class
|
|
||||||
res = parse('{"foo":"bar"}', :object_class => SubHash)
|
|
||||||
assert_equal({"foo" => "bar"}, res)
|
|
||||||
assert_equal(SubHash, res.class)
|
|
||||||
assert res.item_set?
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_object_custom_non_hash_derived_class
|
|
||||||
res = parse('{"foo":"bar"}', :object_class => SubOpenStruct)
|
|
||||||
assert_equal "bar", res.foo
|
|
||||||
assert_equal(SubOpenStruct, res.class)
|
|
||||||
assert res.item_set?
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parse_generic_object
|
|
||||||
res = parse('{"foo":"bar", "baz":{}}', :object_class => JSON::GenericObject)
|
|
||||||
assert_equal(JSON::GenericObject, res.class)
|
|
||||||
assert_equal "bar", res.foo
|
|
||||||
assert_equal "bar", res["foo"]
|
|
||||||
assert_equal "bar", res[:foo]
|
|
||||||
assert_equal "bar", res.to_hash[:foo]
|
|
||||||
assert_equal(JSON::GenericObject, res.baz.class)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_generate_core_subclasses_with_new_to_json
|
|
||||||
obj = SubHash2["foo" => SubHash2["bar" => true]]
|
|
||||||
obj_json = JSON(obj)
|
|
||||||
obj_again = JSON.parse(obj_json, :create_additions => true)
|
|
||||||
assert_kind_of SubHash2, obj_again
|
|
||||||
assert_kind_of SubHash2, obj_again['foo']
|
|
||||||
assert obj_again['foo']['bar']
|
|
||||||
assert_equal obj, obj_again
|
|
||||||
assert_equal ["foo"], JSON(JSON(SubArray2["foo"]), :create_additions => true)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_generate_core_subclasses_with_default_to_json
|
|
||||||
assert_equal '{"foo":"bar"}', JSON(SubHash["foo" => "bar"])
|
|
||||||
assert_equal '["foo"]', JSON(SubArray["foo"])
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_generate_of_core_subclasses
|
|
||||||
obj = SubHash["foo" => SubHash["bar" => true]]
|
|
||||||
obj_json = JSON(obj)
|
|
||||||
obj_again = JSON(obj_json)
|
|
||||||
assert_kind_of Hash, obj_again
|
|
||||||
assert_kind_of Hash, obj_again['foo']
|
|
||||||
assert obj_again['foo']['bar']
|
|
||||||
assert_equal obj, obj_again
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_parser_reset
|
|
||||||
parser = Parser.new(@json)
|
|
||||||
assert_equal(@hash, parser.parse)
|
|
||||||
assert_equal(@hash, parser.parse)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_comments
|
|
||||||
json = <<EOT
|
|
||||||
{
|
|
||||||
"key1":"value1", // eol comment
|
|
||||||
"key2":"value2" /* multi line
|
|
||||||
* comment */,
|
|
||||||
"key3":"value3" /* multi line
|
|
||||||
// nested eol comment
|
|
||||||
* comment */
|
|
||||||
}
|
|
||||||
EOT
|
|
||||||
assert_equal(
|
|
||||||
{ "key1" => "value1", "key2" => "value2", "key3" => "value3" },
|
|
||||||
parse(json))
|
|
||||||
json = <<EOT
|
|
||||||
{
|
|
||||||
"key1":"value1" /* multi line
|
|
||||||
// nested eol comment
|
|
||||||
/* illegal nested multi line comment */
|
|
||||||
* comment */
|
|
||||||
}
|
|
||||||
EOT
|
|
||||||
assert_raise(ParserError) { parse(json) }
|
|
||||||
json = <<EOT
|
|
||||||
{
|
|
||||||
"key1":"value1" /* multi line
|
|
||||||
// nested eol comment
|
|
||||||
closed multi comment */
|
|
||||||
and again, throw an Error */
|
|
||||||
}
|
|
||||||
EOT
|
|
||||||
assert_raise(ParserError) { parse(json) }
|
|
||||||
json = <<EOT
|
|
||||||
{
|
|
||||||
"key1":"value1" /*/*/
|
|
||||||
}
|
|
||||||
EOT
|
|
||||||
assert_equal({ "key1" => "value1" }, parse(json))
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_backslash
|
|
||||||
data = [ '\\.(?i:gif|jpe?g|png)$' ]
|
|
||||||
json = '["\\\\.(?i:gif|jpe?g|png)$"]'
|
|
||||||
assert_equal json, JSON.generate(data)
|
|
||||||
assert_equal data, JSON.parse(json)
|
|
||||||
#
|
|
||||||
data = [ '\\"' ]
|
|
||||||
json = '["\\\\\""]'
|
|
||||||
assert_equal json, JSON.generate(data)
|
|
||||||
assert_equal data, JSON.parse(json)
|
|
||||||
#
|
|
||||||
json = '["/"]'
|
|
||||||
data = JSON.parse(json)
|
|
||||||
assert_equal ['/'], data
|
|
||||||
assert_equal json, JSON.generate(data)
|
|
||||||
#
|
|
||||||
json = '["\""]'
|
|
||||||
data = JSON.parse(json)
|
|
||||||
assert_equal ['"'], data
|
|
||||||
assert_equal json, JSON.generate(data)
|
|
||||||
json = '["\\\'"]'
|
|
||||||
data = JSON.parse(json)
|
|
||||||
assert_equal ["'"], data
|
|
||||||
assert_equal '["\'"]', JSON.generate(data)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_wrong_inputs
|
|
||||||
assert_raise(ParserError) { JSON.parse('"foo"') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('123') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[] bla') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[] 1') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[] []') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[] {}') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('{} []') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('{} {}') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[NULL]') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[FALSE]') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[TRUE]') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[07] ') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[0a]') }
|
|
||||||
assert_raise(ParserError) { JSON.parse('[1.]') }
|
|
||||||
assert_raise(ParserError) { JSON.parse(' ') }
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_nesting
|
|
||||||
assert_raise(JSON::NestingError) { JSON.parse '[[]]', :max_nesting => 1 }
|
|
||||||
assert_raise(JSON::NestingError) { JSON.parser.new('[[]]', :max_nesting => 1).parse }
|
|
||||||
assert_equal [[]], JSON.parse('[[]]', :max_nesting => 2)
|
|
||||||
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
|
||||||
too_deep_ary = eval too_deep
|
|
||||||
assert_raise(JSON::NestingError) { JSON.parse too_deep }
|
|
||||||
assert_raise(JSON::NestingError) { JSON.parser.new(too_deep).parse }
|
|
||||||
assert_raise(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 100 }
|
|
||||||
ok = JSON.parse too_deep, :max_nesting => 101
|
|
||||||
assert_equal too_deep_ary, ok
|
|
||||||
ok = JSON.parse too_deep, :max_nesting => nil
|
|
||||||
assert_equal too_deep_ary, ok
|
|
||||||
ok = JSON.parse too_deep, :max_nesting => false
|
|
||||||
assert_equal too_deep_ary, ok
|
|
||||||
ok = JSON.parse too_deep, :max_nesting => 0
|
|
||||||
assert_equal too_deep_ary, ok
|
|
||||||
assert_raise(JSON::NestingError) { JSON.generate [[]], :max_nesting => 1 }
|
|
||||||
assert_equal '[[]]', JSON.generate([[]], :max_nesting => 2)
|
|
||||||
assert_raise(JSON::NestingError) { JSON.generate too_deep_ary }
|
|
||||||
assert_raise(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 100 }
|
|
||||||
ok = JSON.generate too_deep_ary, :max_nesting => 101
|
|
||||||
assert_equal too_deep, ok
|
|
||||||
ok = JSON.generate too_deep_ary, :max_nesting => nil
|
|
||||||
assert_equal too_deep, ok
|
|
||||||
ok = JSON.generate too_deep_ary, :max_nesting => false
|
|
||||||
assert_equal too_deep, ok
|
|
||||||
ok = JSON.generate too_deep_ary, :max_nesting => 0
|
|
||||||
assert_equal too_deep, ok
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_symbolize_names
|
|
||||||
assert_equal({ "foo" => "bar", "baz" => "quux" },
|
|
||||||
JSON.parse('{"foo":"bar", "baz":"quux"}'))
|
|
||||||
assert_equal({ :foo => "bar", :baz => "quux" },
|
|
||||||
JSON.parse('{"foo":"bar", "baz":"quux"}', :symbolize_names => true))
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_load
|
|
||||||
assert_equal @hash, JSON.load(@json)
|
|
||||||
tempfile = Tempfile.open('json')
|
|
||||||
tempfile.write @json
|
|
||||||
tempfile.rewind
|
|
||||||
assert_equal @hash, JSON.load(tempfile)
|
|
||||||
stringio = StringIO.new(@json)
|
|
||||||
stringio.rewind
|
|
||||||
assert_equal @hash, JSON.load(stringio)
|
|
||||||
assert_equal nil, JSON.load(nil)
|
|
||||||
assert_equal nil, JSON.load('')
|
|
||||||
ensure
|
|
||||||
tempfile.close!
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_load_with_options
|
|
||||||
small_hash = JSON("foo" => 'bar')
|
|
||||||
symbol_hash = { :foo => 'bar' }
|
|
||||||
assert_equal symbol_hash, JSON.load(small_hash, nil, :symbolize_names => true)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_dump
|
|
||||||
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
|
||||||
assert_equal too_deep, JSON.dump(eval(too_deep))
|
|
||||||
assert_kind_of String, Marshal.dump(eval(too_deep))
|
|
||||||
assert_raise(ArgumentError) { JSON.dump(eval(too_deep), 100) }
|
|
||||||
assert_raise(ArgumentError) { Marshal.dump(eval(too_deep), 100) }
|
|
||||||
assert_equal too_deep, JSON.dump(eval(too_deep), 101)
|
|
||||||
assert_kind_of String, Marshal.dump(eval(too_deep), 101)
|
|
||||||
output = StringIO.new
|
|
||||||
JSON.dump(eval(too_deep), output)
|
|
||||||
assert_equal too_deep, output.string
|
|
||||||
output = StringIO.new
|
|
||||||
JSON.dump(eval(too_deep), output, 101)
|
|
||||||
assert_equal too_deep, output.string
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_dump_should_modify_defaults
|
|
||||||
max_nesting = JSON.dump_default_options[:max_nesting]
|
|
||||||
JSON.dump([], StringIO.new, 10)
|
|
||||||
assert_equal max_nesting, JSON.dump_default_options[:max_nesting]
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_big_integers
|
|
||||||
json1 = JSON([orig = (1 << 31) - 1])
|
|
||||||
assert_equal orig, JSON[json1][0]
|
|
||||||
json2 = JSON([orig = 1 << 31])
|
|
||||||
assert_equal orig, JSON[json2][0]
|
|
||||||
json3 = JSON([orig = (1 << 62) - 1])
|
|
||||||
assert_equal orig, JSON[json3][0]
|
|
||||||
json4 = JSON([orig = 1 << 62])
|
|
||||||
assert_equal orig, JSON[json4][0]
|
|
||||||
json5 = JSON([orig = 1 << 64])
|
|
||||||
assert_equal orig, JSON[json5][0]
|
|
||||||
end
|
|
||||||
|
|
||||||
if defined?(JSON::Ext::Parser)
|
|
||||||
def test_allocate
|
|
||||||
parser = JSON::Ext::Parser.new("{}")
|
|
||||||
assert_raise(TypeError, '[ruby-core:35079]') {parser.__send__(:initialize, "{}")}
|
|
||||||
parser = JSON::Ext::Parser.allocate
|
|
||||||
assert_raise(TypeError, '[ruby-core:35079]') {parser.source}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_argument_encoding
|
|
||||||
source = "{}".force_encoding("ascii-8bit")
|
|
||||||
JSON::Parser.new(source)
|
|
||||||
assert_equal Encoding::ASCII_8BIT, source.encoding
|
|
||||||
end if defined?(Encoding::ASCII_8BIT)
|
|
||||||
|
|
||||||
def test_error_message_encoding
|
|
||||||
bug10705 = '[ruby-core:67386] [Bug #10705]'
|
|
||||||
json = "\"\xE2\x88\x9A\"".force_encoding(Encoding::UTF_8)
|
|
||||||
e = assert_raise(JSON::ParserError) {
|
|
||||||
JSON.parse(json)
|
|
||||||
}
|
|
||||||
assert_equal(Encoding::UTF_8, e.message.encoding, bug10705)
|
|
||||||
assert_include(e.message, json, bug10705)
|
|
||||||
end if defined?(Encoding::UTF_8)
|
|
||||||
|
|
||||||
if EnvUtil.gc_stress_to_class?
|
|
||||||
def assert_no_memory_leak(code, *rest, **opt)
|
|
||||||
code = "8.times {20_000.times {begin #{code}; rescue NoMemoryError; end}; GC.start}"
|
|
||||||
super(["-rjson/ext/parser"],
|
|
||||||
"GC.add_stress_to_class(JSON::Ext::Parser); "\
|
|
||||||
"#{code}", code, *rest, rss: true, limit: 1.1, **opt)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_no_memory_leak_allocate
|
|
||||||
assert_no_memory_leak("JSON::Ext::Parser.allocate")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
@ -1,9 +1,5 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# -*- coding:utf-8 -*-
|
|
||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
require 'json/add/core'
|
require 'json/add/core'
|
||||||
require 'json/add/complex'
|
require 'json/add/complex'
|
||||||
require 'json/add/rational'
|
require 'json/add/rational'
|
||||||
@ -74,7 +70,7 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
a = A.new(666)
|
a = A.new(666)
|
||||||
assert A.json_creatable?
|
assert A.json_creatable?
|
||||||
json = generate(a)
|
json = generate(a)
|
||||||
a_again = JSON.parse(json, :create_additions => true)
|
a_again = parse(json, :create_additions => true)
|
||||||
assert_kind_of a.class, a_again
|
assert_kind_of a.class, a_again
|
||||||
assert_equal a, a_again
|
assert_equal a, a_again
|
||||||
end
|
end
|
||||||
@ -83,7 +79,7 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
a = A.new(666)
|
a = A.new(666)
|
||||||
assert A.json_creatable?
|
assert A.json_creatable?
|
||||||
json = generate(a)
|
json = generate(a)
|
||||||
a_hash = JSON.parse(json)
|
a_hash = parse(json)
|
||||||
assert_kind_of Hash, a_hash
|
assert_kind_of Hash, a_hash
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -91,10 +87,10 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
a = A.new(666)
|
a = A.new(666)
|
||||||
assert A.json_creatable?
|
assert A.json_creatable?
|
||||||
json = generate(a)
|
json = generate(a)
|
||||||
a_again = JSON.parse(json, :create_additions => true)
|
a_again = parse(json, :create_additions => true)
|
||||||
assert_kind_of a.class, a_again
|
assert_kind_of a.class, a_again
|
||||||
assert_equal a, a_again
|
assert_equal a, a_again
|
||||||
a_hash = JSON.parse(json, :create_additions => false)
|
a_hash = parse(json, :create_additions => false)
|
||||||
assert_kind_of Hash, a_hash
|
assert_kind_of Hash, a_hash
|
||||||
assert_equal(
|
assert_equal(
|
||||||
{"args"=>[666], "json_class"=>"TestJSONAddition::A"}.sort_by { |k,| k },
|
{"args"=>[666], "json_class"=>"TestJSONAddition::A"}.sort_by { |k,| k },
|
||||||
@ -106,14 +102,14 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
b = B.new
|
b = B.new
|
||||||
assert !B.json_creatable?
|
assert !B.json_creatable?
|
||||||
json = generate(b)
|
json = generate(b)
|
||||||
assert_equal({ "json_class"=>"TestJSONAddition::B" }, JSON.parse(json))
|
assert_equal({ "json_class"=>"TestJSONAddition::B" }, parse(json))
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_extended_json_fail2
|
def test_extended_json_fail2
|
||||||
c = C.new
|
c = C.new
|
||||||
assert !C.json_creatable?
|
assert !C.json_creatable?
|
||||||
json = generate(c)
|
json = generate(c)
|
||||||
assert_raise(ArgumentError, NameError) { JSON.parse(json, :create_additions => true) }
|
assert_raise(ArgumentError, NameError) { parse(json, :create_additions => true) }
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_raw_strings
|
def test_raw_strings
|
||||||
@ -131,7 +127,7 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
assert_match(/\A\{.*\}\z/, json)
|
assert_match(/\A\{.*\}\z/, json)
|
||||||
assert_match(/"json_class":"String"/, json)
|
assert_match(/"json_class":"String"/, json)
|
||||||
assert_match(/"raw":\[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255\]/, json)
|
assert_match(/"raw":\[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255\]/, json)
|
||||||
raw_again = JSON.parse(json, :create_additions => true)
|
raw_again = parse(json, :create_additions => true)
|
||||||
assert_equal raw, raw_again
|
assert_equal raw, raw_again
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -168,19 +164,19 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
|
|
||||||
def test_utc_datetime
|
def test_utc_datetime
|
||||||
now = Time.now
|
now = Time.now
|
||||||
d = DateTime.parse(now.to_s, :create_additions => true) # usual case
|
d = DateTime.parse(now.to_s, :create_additions => true) # usual case
|
||||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
assert_equal d, parse(d.to_json, :create_additions => true)
|
||||||
d = DateTime.parse(now.utc.to_s) # of = 0
|
d = DateTime.parse(now.utc.to_s) # of = 0
|
||||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
assert_equal d, parse(d.to_json, :create_additions => true)
|
||||||
d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(1,24))
|
d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(1,24))
|
||||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
assert_equal d, parse(d.to_json, :create_additions => true)
|
||||||
d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(12,24))
|
d = DateTime.civil(2008, 6, 17, 11, 48, 32, Rational(12,24))
|
||||||
assert_equal d, JSON.parse(d.to_json, :create_additions => true)
|
assert_equal d, parse(d.to_json, :create_additions => true)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_rational_complex
|
def test_rational_complex
|
||||||
assert_equal Rational(2, 9), JSON.parse(JSON(Rational(2, 9)), :create_additions => true)
|
assert_equal Rational(2, 9), parse(JSON(Rational(2, 9)), :create_additions => true)
|
||||||
assert_equal Complex(2, 9), JSON.parse(JSON(Complex(2, 9)), :create_additions => true)
|
assert_equal Complex(2, 9), parse(JSON(Complex(2, 9)), :create_additions => true)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_bigdecimal
|
def test_bigdecimal
|
||||||
@ -192,6 +188,6 @@ class TestJSONAddition < Test::Unit::TestCase
|
|||||||
o = OpenStruct.new
|
o = OpenStruct.new
|
||||||
# XXX this won't work; o.foo = { :bar => true }
|
# XXX this won't work; o.foo = { :bar => true }
|
||||||
o.foo = { 'bar' => true }
|
o.foo = { 'bar' => true }
|
||||||
assert_equal o, JSON.parse(JSON(o), :create_additions => true)
|
assert_equal o, parse(JSON(o), :create_additions => true)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
126
test/json/test_json_common_interface.rb
Normal file
126
test/json/test_json_common_interface.rb
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
|
require 'stringio'
|
||||||
|
require 'tempfile'
|
||||||
|
|
||||||
|
class TestJSONCommonInterface < Test::Unit::TestCase
|
||||||
|
include JSON
|
||||||
|
|
||||||
|
def setup
|
||||||
|
@hash = {
|
||||||
|
'a' => 2,
|
||||||
|
'b' => 3.141,
|
||||||
|
'c' => 'c',
|
||||||
|
'd' => [ 1, "b", 3.14 ],
|
||||||
|
'e' => { 'foo' => 'bar' },
|
||||||
|
'g' => "\"\0\037",
|
||||||
|
'h' => 1000.0,
|
||||||
|
'i' => 0.001
|
||||||
|
}
|
||||||
|
@json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},'\
|
||||||
|
'"g":"\\"\\u0000\\u001f","h":1000.0,"i":0.001}'
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_index
|
||||||
|
assert_equal @json, JSON[@hash]
|
||||||
|
assert_equal @hash, JSON[@json]
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parser
|
||||||
|
assert_match /::Parser\z/, JSON.parser.name
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_generator
|
||||||
|
assert_match /::Generator\z/, JSON.generator.name
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_state
|
||||||
|
assert_match /::Generator::State\z/, JSON.state.name
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_create_id
|
||||||
|
assert_equal 'json_class', JSON.create_id
|
||||||
|
JSON.create_id = 'foo_bar'
|
||||||
|
assert_equal 'foo_bar', JSON.create_id
|
||||||
|
ensure
|
||||||
|
JSON.create_id = 'json_class'
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_deep_const_get
|
||||||
|
assert_raise(ArgumentError) { JSON.deep_const_get('Nix::Da') }
|
||||||
|
assert_equal File::SEPARATOR, JSON.deep_const_get('File::SEPARATOR')
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse
|
||||||
|
assert_equal [ 1, 2, 3, ], JSON.parse('[ 1, 2, 3 ]')
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_bang
|
||||||
|
assert_equal [ 1, NaN, 3, ], JSON.parse!('[ 1, NaN, 3 ]')
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_generate
|
||||||
|
assert_equal '[1,2,3]', JSON.generate([ 1, 2, 3 ])
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_fast_generate
|
||||||
|
assert_equal '[1,2,3]', JSON.generate([ 1, 2, 3 ])
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_pretty_generate
|
||||||
|
assert_equal "[\n 1,\n 2,\n 3\n]", JSON.pretty_generate([ 1, 2, 3 ])
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_load
|
||||||
|
assert_equal @hash, JSON.load(@json)
|
||||||
|
tempfile = Tempfile.open('@json')
|
||||||
|
tempfile.write @json
|
||||||
|
tempfile.rewind
|
||||||
|
assert_equal @hash, JSON.load(tempfile)
|
||||||
|
stringio = StringIO.new(@json)
|
||||||
|
stringio.rewind
|
||||||
|
assert_equal @hash, JSON.load(stringio)
|
||||||
|
assert_equal nil, JSON.load(nil)
|
||||||
|
assert_equal nil, JSON.load('')
|
||||||
|
ensure
|
||||||
|
tempfile.close!
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_load_with_options
|
||||||
|
json = '{ "foo": NaN }'
|
||||||
|
assert JSON.load(json, nil, :allow_nan => true)['foo'].nan?
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_load_null
|
||||||
|
assert_equal nil, JSON.load(nil, nil, :allow_blank => true)
|
||||||
|
assert_raise(TypeError) { JSON.load(nil, nil, :allow_blank => false) }
|
||||||
|
assert_raise(JSON::ParserError) { JSON.load('', nil, :allow_blank => false) }
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_dump
|
||||||
|
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
||||||
|
assert_equal too_deep, dump(eval(too_deep))
|
||||||
|
assert_kind_of String, Marshal.dump(eval(too_deep))
|
||||||
|
assert_raise(ArgumentError) { dump(eval(too_deep), 100) }
|
||||||
|
assert_raise(ArgumentError) { Marshal.dump(eval(too_deep), 100) }
|
||||||
|
assert_equal too_deep, dump(eval(too_deep), 101)
|
||||||
|
assert_kind_of String, Marshal.dump(eval(too_deep), 101)
|
||||||
|
output = StringIO.new
|
||||||
|
dump(eval(too_deep), output)
|
||||||
|
assert_equal too_deep, output.string
|
||||||
|
output = StringIO.new
|
||||||
|
dump(eval(too_deep), output, 101)
|
||||||
|
assert_equal too_deep, output.string
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_dump_should_modify_defaults
|
||||||
|
max_nesting = JSON.dump_default_options[:max_nesting]
|
||||||
|
dump([], StringIO.new, 10)
|
||||||
|
assert_equal max_nesting, JSON.dump_default_options[:max_nesting]
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_JSON
|
||||||
|
assert_equal @json, JSON(@hash)
|
||||||
|
assert_equal @hash, JSON(@json)
|
||||||
|
end
|
||||||
|
end
|
@ -1,44 +1,33 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
|
|
||||||
class TestJSONEncoding < Test::Unit::TestCase
|
class TestJSONEncoding < Test::Unit::TestCase
|
||||||
include JSON
|
include JSON
|
||||||
|
|
||||||
def setup
|
def setup
|
||||||
@utf_8 = '["© ≠ €!"]'
|
@utf_8 = '"© ≠ €!"'
|
||||||
@parsed = [ "© ≠ €!" ]
|
@ascii_8bit = @utf_8.dup.force_encoding('ascii-8bit')
|
||||||
@generated = '["\u00a9 \u2260 \u20ac!"]'
|
@parsed = "© ≠ €!"
|
||||||
|
@generated = '"\u00a9 \u2260 \u20ac!"'
|
||||||
if String.method_defined?(:encode)
|
if String.method_defined?(:encode)
|
||||||
@utf_16_data = [@parsed.first.encode('utf-16be', 'utf-8')]
|
@utf_16_data = @parsed.encode('utf-16be', 'utf-8')
|
||||||
@utf_8_ascii_8bit = @utf_8.dup.force_encoding(Encoding::ASCII_8BIT)
|
|
||||||
@utf_16be = @utf_8.encode('utf-16be', 'utf-8')
|
@utf_16be = @utf_8.encode('utf-16be', 'utf-8')
|
||||||
@utf_16be_ascii_8bit = @utf_16be.dup.force_encoding(Encoding::ASCII_8BIT)
|
|
||||||
@utf_16le = @utf_8.encode('utf-16le', 'utf-8')
|
@utf_16le = @utf_8.encode('utf-16le', 'utf-8')
|
||||||
@utf_16le_ascii_8bit = @utf_16le.dup.force_encoding(Encoding::ASCII_8BIT)
|
|
||||||
@utf_32be = @utf_8.encode('utf-32be', 'utf-8')
|
@utf_32be = @utf_8.encode('utf-32be', 'utf-8')
|
||||||
@utf_32be_ascii_8bit = @utf_32be.dup.force_encoding(Encoding::ASCII_8BIT)
|
|
||||||
@utf_32le = @utf_8.encode('utf-32le', 'utf-8')
|
@utf_32le = @utf_8.encode('utf-32le', 'utf-8')
|
||||||
@utf_32le_ascii_8bit = @utf_32le.dup.force_encoding(Encoding::ASCII_8BIT)
|
|
||||||
else
|
else
|
||||||
require 'iconv'
|
require 'iconv'
|
||||||
@utf_16_data = Iconv.iconv('utf-16be', 'utf-8', @parsed.first)
|
@utf_16_data, = Iconv.iconv('utf-16be', 'utf-8', @parsed)
|
||||||
@utf_8_ascii_8bit = @utf_8.dup
|
|
||||||
@utf_16be, = Iconv.iconv('utf-16be', 'utf-8', @utf_8)
|
@utf_16be, = Iconv.iconv('utf-16be', 'utf-8', @utf_8)
|
||||||
@utf_16be_ascii_8bit = @utf_16be.dup
|
|
||||||
@utf_16le, = Iconv.iconv('utf-16le', 'utf-8', @utf_8)
|
@utf_16le, = Iconv.iconv('utf-16le', 'utf-8', @utf_8)
|
||||||
@utf_16le_ascii_8bit = @utf_16le.dup
|
|
||||||
@utf_32be, = Iconv.iconv('utf-32be', 'utf-8', @utf_8)
|
@utf_32be, = Iconv.iconv('utf-32be', 'utf-8', @utf_8)
|
||||||
@utf_32be_ascii_8bit = @utf_32be.dup
|
|
||||||
@utf_32le, = Iconv.iconv('utf-32le', 'utf-8', @utf_8)
|
@utf_32le, = Iconv.iconv('utf-32le', 'utf-8', @utf_8)
|
||||||
@utf_32le_ascii_8bit = @utf_32le.dup
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_parse
|
def test_parse
|
||||||
|
assert_equal @parsed, JSON.parse(@ascii_8bit)
|
||||||
assert_equal @parsed, JSON.parse(@utf_8)
|
assert_equal @parsed, JSON.parse(@utf_8)
|
||||||
assert_equal @parsed, JSON.parse(@utf_16be)
|
assert_equal @parsed, JSON.parse(@utf_16be)
|
||||||
assert_equal @parsed, JSON.parse(@utf_16le)
|
assert_equal @parsed, JSON.parse(@utf_16le)
|
||||||
@ -46,21 +35,71 @@ class TestJSONEncoding < Test::Unit::TestCase
|
|||||||
assert_equal @parsed, JSON.parse(@utf_32le)
|
assert_equal @parsed, JSON.parse(@utf_32le)
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_parse_ascii_8bit
|
|
||||||
assert_equal @parsed, JSON.parse(@utf_8_ascii_8bit)
|
|
||||||
assert_equal @parsed, JSON.parse(@utf_16be_ascii_8bit)
|
|
||||||
assert_equal @parsed, JSON.parse(@utf_16le_ascii_8bit)
|
|
||||||
assert_equal @parsed, JSON.parse(@utf_32be_ascii_8bit)
|
|
||||||
assert_equal @parsed, JSON.parse(@utf_32le_ascii_8bit)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_generate
|
def test_generate
|
||||||
assert_equal @generated, JSON.generate(@parsed, :ascii_only => true)
|
assert_equal @generated, JSON.generate(@parsed, :ascii_only => true)
|
||||||
if defined?(::Encoding)
|
assert_equal @generated, JSON.generate(@utf_16_data, :ascii_only => true)
|
||||||
assert_equal @generated, JSON.generate(@utf_16_data, :ascii_only => true)
|
end
|
||||||
else
|
|
||||||
# XXX checking of correct utf8 data is not as strict (yet?) without :ascii_only
|
def test_unicode
|
||||||
assert_raise(JSON::GeneratorError) { JSON.generate(@utf_16_data, :ascii_only => true) }
|
assert_equal '""', ''.to_json
|
||||||
|
assert_equal '"\\b"', "\b".to_json
|
||||||
|
assert_equal '"\u0001"', 0x1.chr.to_json
|
||||||
|
assert_equal '"\u001f"', 0x1f.chr.to_json
|
||||||
|
assert_equal '" "', ' '.to_json
|
||||||
|
assert_equal "\"#{0x7f.chr}\"", 0x7f.chr.to_json
|
||||||
|
utf8 = [ "© ≠ €! \01" ]
|
||||||
|
json = '["© ≠ €! \u0001"]'
|
||||||
|
assert_equal json, utf8.to_json(:ascii_only => false)
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
json = '["\u00a9 \u2260 \u20ac! \u0001"]'
|
||||||
|
assert_equal json, utf8.to_json(:ascii_only => true)
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"]
|
||||||
|
json = "[\"\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212\"]"
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
assert_equal json, utf8.to_json(:ascii_only => false)
|
||||||
|
utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"]
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
json = "[\"\\u3042\\u3044\\u3046\\u3048\\u304a\"]"
|
||||||
|
assert_equal json, utf8.to_json(:ascii_only => true)
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
utf8 = ['საქართველო']
|
||||||
|
json = '["საქართველო"]'
|
||||||
|
assert_equal json, utf8.to_json(:ascii_only => false)
|
||||||
|
json = "[\"\\u10e1\\u10d0\\u10e5\\u10d0\\u10e0\\u10d7\\u10d5\\u10d4\\u10da\\u10dd\"]"
|
||||||
|
assert_equal json, utf8.to_json(:ascii_only => true)
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
assert_equal '["Ã"]', generate(["Ã"], :ascii_only => false)
|
||||||
|
assert_equal '["\\u00c3"]', generate(["Ã"], :ascii_only => true)
|
||||||
|
assert_equal ["€"], parse('["\u20ac"]')
|
||||||
|
utf8 = ["\xf0\xa0\x80\x81"]
|
||||||
|
json = "[\"\xf0\xa0\x80\x81\"]"
|
||||||
|
assert_equal json, generate(utf8, :ascii_only => false)
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
json = '["\ud840\udc01"]'
|
||||||
|
assert_equal json, generate(utf8, :ascii_only => true)
|
||||||
|
assert_equal utf8, parse(json)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_chars
|
||||||
|
(0..0x7f).each do |i|
|
||||||
|
json = '["\u%04x"]' % i
|
||||||
|
if RUBY_VERSION >= "1.9."
|
||||||
|
i = i.chr
|
||||||
|
end
|
||||||
|
assert_equal i, parse(json).first[0]
|
||||||
|
if i == ?\b
|
||||||
|
generated = generate(["" << i])
|
||||||
|
assert '["\b"]' == generated || '["\10"]' == generated
|
||||||
|
elsif [?\n, ?\r, ?\t, ?\f].include?(i)
|
||||||
|
assert_equal '[' << ('' << i).dump << ']', generate(["" << i])
|
||||||
|
elsif i.chr < 0x20.chr
|
||||||
|
assert_equal json, generate(["" << i])
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
assert_raise(JSON::GeneratorError) do
|
||||||
|
generate(["\x80"], :ascii_only => true)
|
||||||
|
end
|
||||||
|
assert_equal "\302\200", parse('["\u0080"]').first
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
28
test/json/test_json_ext_parser.rb
Normal file
28
test/json/test_json_ext_parser.rb
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
|
|
||||||
|
class TestJSONExtParser < Test::Unit::TestCase
|
||||||
|
if defined?(JSON::Ext::Parser)
|
||||||
|
def test_allocate
|
||||||
|
parser = JSON::Ext::Parser.new("{}")
|
||||||
|
assert_raise(TypeError, '[ruby-core:35079]') do
|
||||||
|
parser.__send__(:initialize, "{}")
|
||||||
|
end
|
||||||
|
parser = JSON::Ext::Parser.allocate
|
||||||
|
assert_raise(TypeError, '[ruby-core:35079]') { parser.source }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if EnvUtil.gc_stress_to_class?
|
||||||
|
def assert_no_memory_leak(code, *rest, **opt)
|
||||||
|
code = "8.times {20_000.times {begin #{code}; rescue NoMemoryError; end}; GC.start}"
|
||||||
|
super(["-rjson/ext/parser"],
|
||||||
|
"GC.add_stress_to_class(JSON::Ext::Parser); "\
|
||||||
|
"#{code}", code, *rest, rss: true, limit: 1.1, **opt)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_no_memory_leak_allocate
|
||||||
|
assert_no_memory_leak("JSON::Ext::Parser.allocate")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
@ -1,13 +1,9 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# encoding: utf-8
|
|
||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
|
|
||||||
class TestJSONFixtures < Test::Unit::TestCase
|
class TestJSONFixtures < Test::Unit::TestCase
|
||||||
def setup
|
def setup
|
||||||
fixtures = File.join(File.dirname(__FILE__), 'fixtures/*.json')
|
fixtures = File.join(File.dirname(__FILE__), 'fixtures/{fail,pass}.json')
|
||||||
passed, failed = Dir[fixtures].partition { |f| f['pass'] }
|
passed, failed = Dir[fixtures].partition { |f| f['pass'] }
|
||||||
@passed = passed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort
|
@passed = passed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort
|
||||||
@failed = failed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort
|
@failed = failed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort
|
||||||
|
@ -2,10 +2,9 @@
|
|||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
|
|
||||||
require 'test/unit'
|
require 'test_helper'
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
|
|
||||||
class TestJSONGenerate < Test::Unit::TestCase
|
class TestJSONGenerator < Test::Unit::TestCase
|
||||||
include JSON
|
include JSON
|
||||||
|
|
||||||
def setup
|
def setup
|
||||||
@ -43,23 +42,23 @@ EOT
|
|||||||
|
|
||||||
def test_generate
|
def test_generate
|
||||||
json = generate(@hash)
|
json = generate(@hash)
|
||||||
assert_equal(JSON.parse(@json2), JSON.parse(json))
|
assert_equal(parse(@json2), parse(json))
|
||||||
json = JSON[@hash]
|
json = JSON[@hash]
|
||||||
assert_equal(JSON.parse(@json2), JSON.parse(json))
|
assert_equal(parse(@json2), parse(json))
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal(@hash, parsed_json)
|
assert_equal(@hash, parsed_json)
|
||||||
json = generate({1=>2})
|
json = generate({1=>2})
|
||||||
assert_equal('{"1":2}', json)
|
assert_equal('{"1":2}', json)
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal({"1"=>2}, parsed_json)
|
assert_equal({"1"=>2}, parsed_json)
|
||||||
assert_raise(GeneratorError) { generate(666) }
|
assert_equal '666', generate(666)
|
||||||
assert_equal '666', generate(666, :quirks_mode => true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_generate_pretty
|
def test_generate_pretty
|
||||||
json = pretty_generate(@hash)
|
json = pretty_generate(@hash)
|
||||||
# hashes aren't (insertion) ordered on every ruby implementation assert_equal(@json3, json)
|
# hashes aren't (insertion) ordered on every ruby implementation
|
||||||
assert_equal(JSON.parse(@json3), JSON.parse(json))
|
# assert_equal(@json3, json)
|
||||||
|
assert_equal(parse(@json3), parse(json))
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal(@hash, parsed_json)
|
assert_equal(@hash, parsed_json)
|
||||||
json = pretty_generate({1=>2})
|
json = pretty_generate({1=>2})
|
||||||
@ -70,8 +69,7 @@ EOT
|
|||||||
EOT
|
EOT
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal({"1"=>2}, parsed_json)
|
assert_equal({"1"=>2}, parsed_json)
|
||||||
assert_raise(GeneratorError) { pretty_generate(666) }
|
assert_equal '666', pretty_generate(666)
|
||||||
assert_equal '666', pretty_generate(666, :quirks_mode => true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_generate_custom
|
def test_generate_custom
|
||||||
@ -89,30 +87,26 @@ EOT
|
|||||||
|
|
||||||
def test_fast_generate
|
def test_fast_generate
|
||||||
json = fast_generate(@hash)
|
json = fast_generate(@hash)
|
||||||
assert_equal(JSON.parse(@json2), JSON.parse(json))
|
assert_equal(parse(@json2), parse(json))
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal(@hash, parsed_json)
|
assert_equal(@hash, parsed_json)
|
||||||
json = fast_generate({1=>2})
|
json = fast_generate({1=>2})
|
||||||
assert_equal('{"1":2}', json)
|
assert_equal('{"1":2}', json)
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal({"1"=>2}, parsed_json)
|
assert_equal({"1"=>2}, parsed_json)
|
||||||
assert_raise(GeneratorError) { fast_generate(666) }
|
assert_equal '666', fast_generate(666)
|
||||||
assert_equal '666', fast_generate(666, :quirks_mode => true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_own_state
|
def test_own_state
|
||||||
state = State.new
|
state = State.new
|
||||||
json = generate(@hash, state)
|
json = generate(@hash, state)
|
||||||
assert_equal(JSON.parse(@json2), JSON.parse(json))
|
assert_equal(parse(@json2), parse(json))
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal(@hash, parsed_json)
|
assert_equal(@hash, parsed_json)
|
||||||
json = generate({1=>2}, state)
|
json = generate({1=>2}, state)
|
||||||
assert_equal('{"1":2}', json)
|
assert_equal('{"1":2}', json)
|
||||||
parsed_json = parse(json)
|
parsed_json = parse(json)
|
||||||
assert_equal({"1"=>2}, parsed_json)
|
assert_equal({"1"=>2}, parsed_json)
|
||||||
assert_raise(GeneratorError) { generate(666, state) }
|
|
||||||
state.quirks_mode = true
|
|
||||||
assert state.quirks_mode?
|
|
||||||
assert_equal '666', generate(666, state)
|
assert_equal '666', generate(666, state)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -141,7 +135,6 @@ EOT
|
|||||||
:array_nl => "\n",
|
:array_nl => "\n",
|
||||||
:ascii_only => false,
|
:ascii_only => false,
|
||||||
:buffer_initial_length => 1024,
|
:buffer_initial_length => 1024,
|
||||||
:quirks_mode => false,
|
|
||||||
:depth => 0,
|
:depth => 0,
|
||||||
:indent => " ",
|
:indent => " ",
|
||||||
:max_nesting => 100,
|
:max_nesting => 100,
|
||||||
@ -158,7 +151,6 @@ EOT
|
|||||||
:array_nl => "",
|
:array_nl => "",
|
||||||
:ascii_only => false,
|
:ascii_only => false,
|
||||||
:buffer_initial_length => 1024,
|
:buffer_initial_length => 1024,
|
||||||
:quirks_mode => false,
|
|
||||||
:depth => 0,
|
:depth => 0,
|
||||||
:indent => "",
|
:indent => "",
|
||||||
:max_nesting => 100,
|
:max_nesting => 100,
|
||||||
@ -175,7 +167,6 @@ EOT
|
|||||||
:array_nl => "",
|
:array_nl => "",
|
||||||
:ascii_only => false,
|
:ascii_only => false,
|
||||||
:buffer_initial_length => 1024,
|
:buffer_initial_length => 1024,
|
||||||
:quirks_mode => false,
|
|
||||||
:depth => 0,
|
:depth => 0,
|
||||||
:indent => "",
|
:indent => "",
|
||||||
:max_nesting => 0,
|
:max_nesting => 0,
|
||||||
@ -206,7 +197,7 @@ EOT
|
|||||||
def test_depth
|
def test_depth
|
||||||
ary = []; ary << ary
|
ary = []; ary << ary
|
||||||
assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth
|
assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth
|
||||||
assert_raise(JSON::NestingError) { JSON.generate(ary) }
|
assert_raise(JSON::NestingError) { generate(ary) }
|
||||||
assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth
|
assert_equal 0, JSON::SAFE_STATE_PROTOTYPE.depth
|
||||||
assert_equal 0, JSON::PRETTY_STATE_PROTOTYPE.depth
|
assert_equal 0, JSON::PRETTY_STATE_PROTOTYPE.depth
|
||||||
assert_raise(JSON::NestingError) { JSON.pretty_generate(ary) }
|
assert_raise(JSON::NestingError) { JSON.pretty_generate(ary) }
|
||||||
@ -286,7 +277,7 @@ EOT
|
|||||||
if defined?(JSON::Ext::Generator)
|
if defined?(JSON::Ext::Generator)
|
||||||
def test_broken_bignum # [ruby-core:38867]
|
def test_broken_bignum # [ruby-core:38867]
|
||||||
pid = fork do
|
pid = fork do
|
||||||
Integer.class_eval do
|
Bignum.class_eval do
|
||||||
def to_s
|
def to_s
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -332,17 +323,54 @@ EOT
|
|||||||
|
|
||||||
def test_json_generate
|
def test_json_generate
|
||||||
assert_raise JSON::GeneratorError do
|
assert_raise JSON::GeneratorError do
|
||||||
assert_equal true, JSON.generate(["\xea"])
|
assert_equal true, generate(["\xea"])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_nesting
|
||||||
|
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
||||||
|
too_deep_ary = eval too_deep
|
||||||
|
assert_raise(JSON::NestingError) { generate too_deep_ary }
|
||||||
|
assert_raise(JSON::NestingError) { generate too_deep_ary, :max_nesting => 100 }
|
||||||
|
ok = generate too_deep_ary, :max_nesting => 101
|
||||||
|
assert_equal too_deep, ok
|
||||||
|
ok = generate too_deep_ary, :max_nesting => nil
|
||||||
|
assert_equal too_deep, ok
|
||||||
|
ok = generate too_deep_ary, :max_nesting => false
|
||||||
|
assert_equal too_deep, ok
|
||||||
|
ok = generate too_deep_ary, :max_nesting => 0
|
||||||
|
assert_equal too_deep, ok
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_backslash
|
||||||
|
data = [ '\\.(?i:gif|jpe?g|png)$' ]
|
||||||
|
json = '["\\\\.(?i:gif|jpe?g|png)$"]'
|
||||||
|
assert_equal json, generate(data)
|
||||||
|
#
|
||||||
|
data = [ '\\"' ]
|
||||||
|
json = '["\\\\\""]'
|
||||||
|
assert_equal json, generate(data)
|
||||||
|
#
|
||||||
|
data = [ '/' ]
|
||||||
|
json = '["/"]'
|
||||||
|
assert_equal json, generate(data)
|
||||||
|
#
|
||||||
|
data = ['"']
|
||||||
|
json = '["\""]'
|
||||||
|
assert_equal json, generate(data)
|
||||||
|
#
|
||||||
|
data = ["'"]
|
||||||
|
json = '["\\\'"]'
|
||||||
|
assert_equal '["\'"]', generate(data)
|
||||||
|
end
|
||||||
|
|
||||||
def test_string_subclass
|
def test_string_subclass
|
||||||
s = Class.new(String) do
|
s = Class.new(String) do
|
||||||
def to_s; self; end
|
def to_s; self; end
|
||||||
undef to_json
|
undef to_json
|
||||||
end
|
end
|
||||||
assert_nothing_raised(SystemStackError) do
|
assert_nothing_raised(SystemStackError) do
|
||||||
assert_equal '[""]', JSON.generate([s.new])
|
assert_equal '["foo"]', JSON.generate([s.new('foo')])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
@ -1,9 +1,6 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# encoding: utf-8
|
|
||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
|
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
class TestJSONGenericObject < Test::Unit::TestCase
|
class TestJSONGenericObject < Test::Unit::TestCase
|
||||||
include JSON
|
include JSON
|
||||||
|
|
||||||
@ -27,11 +24,20 @@ class TestJSONGenericObject < Test::Unit::TestCase
|
|||||||
end
|
end
|
||||||
|
|
||||||
def test_parse_json
|
def test_parse_json
|
||||||
assert_kind_of Hash, JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true)
|
assert_kind_of Hash,
|
||||||
|
JSON(
|
||||||
|
'{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }',
|
||||||
|
:create_additions => true
|
||||||
|
)
|
||||||
switch_json_creatable do
|
switch_json_creatable do
|
||||||
assert_equal @go, l = JSON('{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }', :create_additions => true)
|
assert_equal @go, l =
|
||||||
|
JSON(
|
||||||
|
'{ "json_class": "JSON::GenericObject", "a": 1, "b": 2 }',
|
||||||
|
:create_additions => true
|
||||||
|
)
|
||||||
assert_equal 1, l.a
|
assert_equal 1, l.a
|
||||||
assert_equal @go, l = JSON('{ "a": 1, "b": 2 }', :object_class => GenericObject)
|
assert_equal @go,
|
||||||
|
l = JSON('{ "a": 1, "b": 2 }', :object_class => GenericObject)
|
||||||
assert_equal 1, l.a
|
assert_equal 1, l.a
|
||||||
assert_equal GenericObject[:a => GenericObject[:b => 2]],
|
assert_equal GenericObject[:a => GenericObject[:b => 2]],
|
||||||
l = JSON('{ "a": { "b": 2 } }', :object_class => GenericObject)
|
l = JSON('{ "a": { "b": 2 } }', :object_class => GenericObject)
|
||||||
|
448
test/json/test_json_parser.rb
Normal file
448
test/json/test_json_parser.rb
Normal file
@ -0,0 +1,448 @@
|
|||||||
|
# encoding: utf-8
|
||||||
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
|
require 'stringio'
|
||||||
|
require 'tempfile'
|
||||||
|
require 'ostruct'
|
||||||
|
|
||||||
|
class TestJSONParser < Test::Unit::TestCase
|
||||||
|
include JSON
|
||||||
|
|
||||||
|
def test_construction
|
||||||
|
parser = JSON::Parser.new('test')
|
||||||
|
assert_equal 'test', parser.source
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_argument_encoding
|
||||||
|
source = "{}".encode("UTF-16")
|
||||||
|
JSON::Parser.new(source)
|
||||||
|
assert_equal Encoding::UTF_16, source.encoding
|
||||||
|
end if defined?(Encoding::UTF_16)
|
||||||
|
|
||||||
|
def test_error_message_encoding
|
||||||
|
bug10705 = '[ruby-core:67386] [Bug #10705]'
|
||||||
|
json = ".\"\xE2\x88\x9A\"".force_encoding(Encoding::UTF_8)
|
||||||
|
e = assert_raise(JSON::ParserError) {
|
||||||
|
JSON::Ext::Parser.new(json).parse
|
||||||
|
}
|
||||||
|
assert_equal(Encoding::UTF_8, e.message.encoding, bug10705)
|
||||||
|
assert_include(e.message, json, bug10705)
|
||||||
|
end if defined?(Encoding::UTF_8) and defined?(JSON::Ext::Parser)
|
||||||
|
|
||||||
|
def test_parsing
|
||||||
|
parser = JSON::Parser.new('"test"')
|
||||||
|
assert_equal 'test', parser.parse
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parser_reset
|
||||||
|
parser = Parser.new('{"a":"b"}')
|
||||||
|
assert_equal({ 'a' => 'b' }, parser.parse)
|
||||||
|
assert_equal({ 'a' => 'b' }, parser.parse)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_simple_arrays
|
||||||
|
assert_equal([], parse('[]'))
|
||||||
|
assert_equal([], parse(' [ ] '))
|
||||||
|
assert_equal([ nil ], parse('[null]'))
|
||||||
|
assert_equal([ false ], parse('[false]'))
|
||||||
|
assert_equal([ true ], parse('[true]'))
|
||||||
|
assert_equal([ -23 ], parse('[-23]'))
|
||||||
|
assert_equal([ 23 ], parse('[23]'))
|
||||||
|
assert_equal_float([ 0.23 ], parse('[0.23]'))
|
||||||
|
assert_equal_float([ 0.0 ], parse('[0e0]'))
|
||||||
|
assert_equal([""], parse('[""]'))
|
||||||
|
assert_equal(["foobar"], parse('["foobar"]'))
|
||||||
|
assert_equal([{}], parse('[{}]'))
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_simple_objects
|
||||||
|
assert_equal({}, parse('{}'))
|
||||||
|
assert_equal({}, parse(' { } '))
|
||||||
|
assert_equal({ "a" => nil }, parse('{ "a" : null}'))
|
||||||
|
assert_equal({ "a" => nil }, parse('{"a":null}'))
|
||||||
|
assert_equal({ "a" => false }, parse('{ "a" : false } '))
|
||||||
|
assert_equal({ "a" => false }, parse('{"a":false}'))
|
||||||
|
assert_raise(JSON::ParserError) { parse('{false}') }
|
||||||
|
assert_equal({ "a" => true }, parse('{"a":true}'))
|
||||||
|
assert_equal({ "a" => true }, parse(' { "a" : true } '))
|
||||||
|
assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
|
||||||
|
assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
|
||||||
|
assert_equal({ "a" => 23 }, parse('{"a":23 } '))
|
||||||
|
assert_equal({ "a" => 23 }, parse(' { "a" : 23 } '))
|
||||||
|
assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
|
||||||
|
assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_numbers
|
||||||
|
assert_raise(JSON::ParserError) { parse('+23.2') }
|
||||||
|
assert_raise(JSON::ParserError) { parse('+23') }
|
||||||
|
assert_raise(JSON::ParserError) { parse('.23') }
|
||||||
|
assert_raise(JSON::ParserError) { parse('023') }
|
||||||
|
assert_equal 23, parse('23')
|
||||||
|
assert_equal -23, parse('-23')
|
||||||
|
assert_equal_float 3.141, parse('3.141')
|
||||||
|
assert_equal_float -3.141, parse('-3.141')
|
||||||
|
assert_equal_float 3.141, parse('3141e-3')
|
||||||
|
assert_equal_float 3.141, parse('3141.1e-3')
|
||||||
|
assert_equal_float 3.141, parse('3141E-3')
|
||||||
|
assert_equal_float 3.141, parse('3141.0E-3')
|
||||||
|
assert_equal_float -3.141, parse('-3141.0e-3')
|
||||||
|
assert_equal_float -3.141, parse('-3141e-3')
|
||||||
|
assert_raise(ParserError) { parse('NaN') }
|
||||||
|
assert parse('NaN', :allow_nan => true).nan?
|
||||||
|
assert_raise(ParserError) { parse('Infinity') }
|
||||||
|
assert_equal 1.0/0, parse('Infinity', :allow_nan => true)
|
||||||
|
assert_raise(ParserError) { parse('-Infinity') }
|
||||||
|
assert_equal -1.0/0, parse('-Infinity', :allow_nan => true)
|
||||||
|
end
|
||||||
|
|
||||||
|
if Array.method_defined?(:permutation)
|
||||||
|
def test_parse_more_complex_arrays
|
||||||
|
a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
|
||||||
|
a.permutation.each do |perm|
|
||||||
|
json = pretty_generate(perm)
|
||||||
|
assert_equal perm, parse(json)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_complex_objects
|
||||||
|
a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
|
||||||
|
a.permutation.each do |perm|
|
||||||
|
s = "a"
|
||||||
|
orig_obj = perm.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h }
|
||||||
|
json = pretty_generate(orig_obj)
|
||||||
|
assert_equal orig_obj, parse(json)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_arrays
|
||||||
|
assert_equal([1,2,3], parse('[1,2,3]'))
|
||||||
|
assert_equal([1.2,2,3], parse('[1.2,2,3]'))
|
||||||
|
assert_equal([[],[[],[]]], parse('[[],[[],[]]]'))
|
||||||
|
assert_equal([], parse('[]'))
|
||||||
|
assert_equal([], parse(' [ ] '))
|
||||||
|
assert_equal([1], parse('[1]'))
|
||||||
|
assert_equal([1], parse(' [ 1 ] '))
|
||||||
|
ary = [[1], ["foo"], [3.14], [4711.0], [2.718], [nil],
|
||||||
|
[[1, -2, 3]], [false], [true]]
|
||||||
|
assert_equal(ary,
|
||||||
|
parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]],[false],[true]]'))
|
||||||
|
assert_equal(ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2]\s
|
||||||
|
, [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] }))
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_json_primitive_values
|
||||||
|
assert_raise(JSON::ParserError) { parse('') }
|
||||||
|
assert_raise(TypeError) { parse(nil) }
|
||||||
|
assert_raise(JSON::ParserError) { parse(' /* foo */ ') }
|
||||||
|
assert_equal nil, parse('null')
|
||||||
|
assert_equal false, parse('false')
|
||||||
|
assert_equal true, parse('true')
|
||||||
|
assert_equal 23, parse('23')
|
||||||
|
assert_equal 1, parse('1')
|
||||||
|
assert_equal_float 3.141, parse('3.141'), 1E-3
|
||||||
|
assert_equal 2 ** 64, parse('18446744073709551616')
|
||||||
|
assert_equal 'foo', parse('"foo"')
|
||||||
|
assert parse('NaN', :allow_nan => true).nan?
|
||||||
|
assert parse('Infinity', :allow_nan => true).infinite?
|
||||||
|
assert parse('-Infinity', :allow_nan => true).infinite?
|
||||||
|
assert_raise(JSON::ParserError) { parse('[ 1, ]') }
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_some_strings
|
||||||
|
assert_equal([""], parse('[""]'))
|
||||||
|
assert_equal(["\\"], parse('["\\\\"]'))
|
||||||
|
assert_equal(['"'], parse('["\""]'))
|
||||||
|
assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]'))
|
||||||
|
assert_equal(
|
||||||
|
["\"\b\n\r\t\0\037"],
|
||||||
|
parse('["\"\b\n\r\t\u0000\u001f"]')
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_big_integers
|
||||||
|
json1 = JSON(orig = (1 << 31) - 1)
|
||||||
|
assert_equal orig, parse(json1)
|
||||||
|
json2 = JSON(orig = 1 << 31)
|
||||||
|
assert_equal orig, parse(json2)
|
||||||
|
json3 = JSON(orig = (1 << 62) - 1)
|
||||||
|
assert_equal orig, parse(json3)
|
||||||
|
json4 = JSON(orig = 1 << 62)
|
||||||
|
assert_equal orig, parse(json4)
|
||||||
|
json5 = JSON(orig = 1 << 64)
|
||||||
|
assert_equal orig, parse(json5)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_some_wrong_inputs
|
||||||
|
assert_raise(ParserError) { parse('[] bla') }
|
||||||
|
assert_raise(ParserError) { parse('[] 1') }
|
||||||
|
assert_raise(ParserError) { parse('[] []') }
|
||||||
|
assert_raise(ParserError) { parse('[] {}') }
|
||||||
|
assert_raise(ParserError) { parse('{} []') }
|
||||||
|
assert_raise(ParserError) { parse('{} {}') }
|
||||||
|
assert_raise(ParserError) { parse('[NULL]') }
|
||||||
|
assert_raise(ParserError) { parse('[FALSE]') }
|
||||||
|
assert_raise(ParserError) { parse('[TRUE]') }
|
||||||
|
assert_raise(ParserError) { parse('[07] ') }
|
||||||
|
assert_raise(ParserError) { parse('[0a]') }
|
||||||
|
assert_raise(ParserError) { parse('[1.]') }
|
||||||
|
assert_raise(ParserError) { parse(' ') }
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_symbolize_names
|
||||||
|
assert_equal({ "foo" => "bar", "baz" => "quux" },
|
||||||
|
parse('{"foo":"bar", "baz":"quux"}'))
|
||||||
|
assert_equal({ :foo => "bar", :baz => "quux" },
|
||||||
|
parse('{"foo":"bar", "baz":"quux"}', :symbolize_names => true))
|
||||||
|
assert_raise(ArgumentError) do
|
||||||
|
parse('{}', :symbolize_names => true, :create_additions => true)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_comments
|
||||||
|
json = <<EOT
|
||||||
|
{
|
||||||
|
"key1":"value1", // eol comment
|
||||||
|
"key2":"value2" /* multi line
|
||||||
|
* comment */,
|
||||||
|
"key3":"value3" /* multi line
|
||||||
|
// nested eol comment
|
||||||
|
* comment */
|
||||||
|
}
|
||||||
|
EOT
|
||||||
|
assert_equal(
|
||||||
|
{ "key1" => "value1", "key2" => "value2", "key3" => "value3" },
|
||||||
|
parse(json))
|
||||||
|
json = <<EOT
|
||||||
|
{
|
||||||
|
"key1":"value1" /* multi line
|
||||||
|
// nested eol comment
|
||||||
|
/* illegal nested multi line comment */
|
||||||
|
* comment */
|
||||||
|
}
|
||||||
|
EOT
|
||||||
|
assert_raise(ParserError) { parse(json) }
|
||||||
|
json = <<EOT
|
||||||
|
{
|
||||||
|
"key1":"value1" /* multi line
|
||||||
|
// nested eol comment
|
||||||
|
closed multi comment */
|
||||||
|
and again, throw an Error */
|
||||||
|
}
|
||||||
|
EOT
|
||||||
|
assert_raise(ParserError) { parse(json) }
|
||||||
|
json = <<EOT
|
||||||
|
{
|
||||||
|
"key1":"value1" /*/*/
|
||||||
|
}
|
||||||
|
EOT
|
||||||
|
assert_equal({ "key1" => "value1" }, parse(json))
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_nesting
|
||||||
|
too_deep = '[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]'
|
||||||
|
too_deep_ary = eval too_deep
|
||||||
|
assert_raise(JSON::NestingError) { parse too_deep }
|
||||||
|
assert_raise(JSON::NestingError) { parse too_deep, :max_nesting => 100 }
|
||||||
|
ok = parse too_deep, :max_nesting => 101
|
||||||
|
assert_equal too_deep_ary, ok
|
||||||
|
ok = parse too_deep, :max_nesting => nil
|
||||||
|
assert_equal too_deep_ary, ok
|
||||||
|
ok = parse too_deep, :max_nesting => false
|
||||||
|
assert_equal too_deep_ary, ok
|
||||||
|
ok = parse too_deep, :max_nesting => 0
|
||||||
|
assert_equal too_deep_ary, ok
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_backslash
|
||||||
|
data = [ '\\.(?i:gif|jpe?g|png)$' ]
|
||||||
|
json = '["\\\\.(?i:gif|jpe?g|png)$"]'
|
||||||
|
assert_equal data, parse(json)
|
||||||
|
#
|
||||||
|
data = [ '\\"' ]
|
||||||
|
json = '["\\\\\""]'
|
||||||
|
assert_equal data, parse(json)
|
||||||
|
#
|
||||||
|
json = '["/"]'
|
||||||
|
data = [ '/' ]
|
||||||
|
assert_equal data, parse(json)
|
||||||
|
#
|
||||||
|
json = '["\""]'
|
||||||
|
data = ['"']
|
||||||
|
assert_equal data, parse(json)
|
||||||
|
#
|
||||||
|
json = '["\\\'"]'
|
||||||
|
data = ["'"]
|
||||||
|
assert_equal data, parse(json)
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
class SubArray < Array
|
||||||
|
def <<(v)
|
||||||
|
@shifted = true
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def shifted?
|
||||||
|
@shifted
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SubArray2 < Array
|
||||||
|
def to_json(*a)
|
||||||
|
{
|
||||||
|
JSON.create_id => self.class.name,
|
||||||
|
'ary' => to_a,
|
||||||
|
}.to_json(*a)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.json_create(o)
|
||||||
|
o.delete JSON.create_id
|
||||||
|
o['ary']
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SubArrayWrapper
|
||||||
|
def initialize
|
||||||
|
@data = []
|
||||||
|
end
|
||||||
|
|
||||||
|
attr_reader :data
|
||||||
|
|
||||||
|
def [](index)
|
||||||
|
@data[index]
|
||||||
|
end
|
||||||
|
|
||||||
|
def <<(value)
|
||||||
|
@data << value
|
||||||
|
@shifted = true
|
||||||
|
end
|
||||||
|
|
||||||
|
def shifted?
|
||||||
|
@shifted
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_array_custom_array_derived_class
|
||||||
|
res = parse('[1,2]', :array_class => SubArray)
|
||||||
|
assert_equal([1,2], res)
|
||||||
|
assert_equal(SubArray, res.class)
|
||||||
|
assert res.shifted?
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_array_custom_non_array_derived_class
|
||||||
|
res = parse('[1,2]', :array_class => SubArrayWrapper)
|
||||||
|
assert_equal([1,2], res.data)
|
||||||
|
assert_equal(SubArrayWrapper, res.class)
|
||||||
|
assert res.shifted?
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_object
|
||||||
|
assert_equal({}, parse('{}'))
|
||||||
|
assert_equal({}, parse(' { } '))
|
||||||
|
assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}'))
|
||||||
|
assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } '))
|
||||||
|
end
|
||||||
|
|
||||||
|
class SubHash < Hash
|
||||||
|
def []=(k, v)
|
||||||
|
@item_set = true
|
||||||
|
super
|
||||||
|
end
|
||||||
|
|
||||||
|
def item_set?
|
||||||
|
@item_set
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SubHash2 < Hash
|
||||||
|
def to_json(*a)
|
||||||
|
{
|
||||||
|
JSON.create_id => self.class.name,
|
||||||
|
}.merge(self).to_json(*a)
|
||||||
|
end
|
||||||
|
|
||||||
|
def self.json_create(o)
|
||||||
|
o.delete JSON.create_id
|
||||||
|
self[o]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class SubOpenStruct < OpenStruct
|
||||||
|
def [](k)
|
||||||
|
__send__(k)
|
||||||
|
end
|
||||||
|
|
||||||
|
def []=(k, v)
|
||||||
|
@item_set = true
|
||||||
|
__send__("#{k}=", v)
|
||||||
|
end
|
||||||
|
|
||||||
|
def item_set?
|
||||||
|
@item_set
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_object_custom_hash_derived_class
|
||||||
|
res = parse('{"foo":"bar"}', :object_class => SubHash)
|
||||||
|
assert_equal({"foo" => "bar"}, res)
|
||||||
|
assert_equal(SubHash, res.class)
|
||||||
|
assert res.item_set?
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_object_custom_non_hash_derived_class
|
||||||
|
res = parse('{"foo":"bar"}', :object_class => SubOpenStruct)
|
||||||
|
assert_equal "bar", res.foo
|
||||||
|
assert_equal(SubOpenStruct, res.class)
|
||||||
|
assert res.item_set?
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_parse_generic_object
|
||||||
|
res = parse(
|
||||||
|
'{"foo":"bar", "baz":{}}',
|
||||||
|
:object_class => JSON::GenericObject
|
||||||
|
)
|
||||||
|
assert_equal(JSON::GenericObject, res.class)
|
||||||
|
assert_equal "bar", res.foo
|
||||||
|
assert_equal "bar", res["foo"]
|
||||||
|
assert_equal "bar", res[:foo]
|
||||||
|
assert_equal "bar", res.to_hash[:foo]
|
||||||
|
assert_equal(JSON::GenericObject, res.baz.class)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_generate_core_subclasses_with_new_to_json
|
||||||
|
obj = SubHash2["foo" => SubHash2["bar" => true]]
|
||||||
|
obj_json = JSON(obj)
|
||||||
|
obj_again = parse(obj_json, :create_additions => true)
|
||||||
|
assert_kind_of SubHash2, obj_again
|
||||||
|
assert_kind_of SubHash2, obj_again['foo']
|
||||||
|
assert obj_again['foo']['bar']
|
||||||
|
assert_equal obj, obj_again
|
||||||
|
assert_equal ["foo"],
|
||||||
|
JSON(JSON(SubArray2["foo"]), :create_additions => true)
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_generate_core_subclasses_with_default_to_json
|
||||||
|
assert_equal '{"foo":"bar"}', JSON(SubHash["foo" => "bar"])
|
||||||
|
assert_equal '["foo"]', JSON(SubArray["foo"])
|
||||||
|
end
|
||||||
|
|
||||||
|
def test_generate_of_core_subclasses
|
||||||
|
obj = SubHash["foo" => SubHash["bar" => true]]
|
||||||
|
obj_json = JSON(obj)
|
||||||
|
obj_again = JSON(obj_json)
|
||||||
|
assert_kind_of Hash, obj_again
|
||||||
|
assert_kind_of Hash, obj_again['foo']
|
||||||
|
assert obj_again['foo']['bar']
|
||||||
|
assert_equal obj, obj_again
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def assert_equal_float(expected, actual, delta = 1e-2)
|
||||||
|
Array === expected and expected = expected.first
|
||||||
|
Array === actual and actual = actual.first
|
||||||
|
assert_in_delta(expected, actual, delta)
|
||||||
|
end
|
||||||
|
end
|
@ -1,10 +1,5 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# encoding: utf-8
|
|
||||||
# frozen_string_literal: false
|
# frozen_string_literal: false
|
||||||
|
require 'test_helper'
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
require 'stringio'
|
|
||||||
require 'time'
|
require 'time'
|
||||||
|
|
||||||
class TestJSONStringMatching < Test::Unit::TestCase
|
class TestJSONStringMatching < Test::Unit::TestCase
|
||||||
@ -27,14 +22,17 @@ class TestJSONStringMatching < Test::Unit::TestCase
|
|||||||
def test_match_date
|
def test_match_date
|
||||||
t = TestTime.new
|
t = TestTime.new
|
||||||
t_json = [ t ].to_json
|
t_json = [ t ].to_json
|
||||||
|
time_regexp = /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/
|
||||||
assert_equal [ t ],
|
assert_equal [ t ],
|
||||||
JSON.parse(t_json, :create_additions => true,
|
parse(
|
||||||
:match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
|
t_json,
|
||||||
|
:create_additions => true,
|
||||||
|
:match_string => { time_regexp => TestTime }
|
||||||
|
)
|
||||||
assert_equal [ t.strftime('%FT%T%z') ],
|
assert_equal [ t.strftime('%FT%T%z') ],
|
||||||
JSON.parse(t_json, :create_additions => true,
|
parse(
|
||||||
:match_string => { /\A\d{3}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
|
t_json,
|
||||||
assert_equal [ t.strftime('%FT%T%z') ],
|
:match_string => { time_regexp => TestTime }
|
||||||
JSON.parse(t_json,
|
)
|
||||||
:match_string => { /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}[+-]\d{4}\z/ => TestTime })
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1,73 +0,0 @@
|
|||||||
#!/usr/bin/env ruby
|
|
||||||
# encoding: utf-8
|
|
||||||
# frozen_string_literal: false
|
|
||||||
|
|
||||||
require 'test/unit'
|
|
||||||
require File.join(File.dirname(__FILE__), 'setup_variant')
|
|
||||||
|
|
||||||
class TestJSONUnicode < Test::Unit::TestCase
|
|
||||||
include JSON
|
|
||||||
|
|
||||||
def test_unicode
|
|
||||||
assert_equal '""', ''.to_json
|
|
||||||
assert_equal '"\\b"', "\b".to_json
|
|
||||||
assert_equal '"\u0001"', 0x1.chr.to_json
|
|
||||||
assert_equal '"\u001f"', 0x1f.chr.to_json
|
|
||||||
assert_equal '" "', ' '.to_json
|
|
||||||
assert_equal "\"#{0x7f.chr}\"", 0x7f.chr.to_json
|
|
||||||
utf8 = [ "© ≠ €! \01" ]
|
|
||||||
json = '["© ≠ €! \u0001"]'
|
|
||||||
assert_equal json, utf8.to_json(:ascii_only => false)
|
|
||||||
assert_equal utf8, parse(json)
|
|
||||||
json = '["\u00a9 \u2260 \u20ac! \u0001"]'
|
|
||||||
assert_equal json, utf8.to_json(:ascii_only => true)
|
|
||||||
assert_equal utf8, parse(json)
|
|
||||||
utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"]
|
|
||||||
json = "[\"\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212\"]"
|
|
||||||
assert_equal utf8, parse(json)
|
|
||||||
assert_equal json, utf8.to_json(:ascii_only => false)
|
|
||||||
utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"]
|
|
||||||
assert_equal utf8, parse(json)
|
|
||||||
json = "[\"\\u3042\\u3044\\u3046\\u3048\\u304a\"]"
|
|
||||||
assert_equal json, utf8.to_json(:ascii_only => true)
|
|
||||||
assert_equal utf8, parse(json)
|
|
||||||
utf8 = ['საქართველო']
|
|
||||||
json = '["საქართველო"]'
|
|
||||||
assert_equal json, utf8.to_json(:ascii_only => false)
|
|
||||||
json = "[\"\\u10e1\\u10d0\\u10e5\\u10d0\\u10e0\\u10d7\\u10d5\\u10d4\\u10da\\u10dd\"]"
|
|
||||||
assert_equal json, utf8.to_json(:ascii_only => true)
|
|
||||||
assert_equal utf8, parse(json)
|
|
||||||
assert_equal '["Ã"]', JSON.generate(["Ã"], :ascii_only => false)
|
|
||||||
assert_equal '["\\u00c3"]', JSON.generate(["Ã"], :ascii_only => true)
|
|
||||||
assert_equal ["€"], JSON.parse('["\u20ac"]')
|
|
||||||
utf8 = ["\xf0\xa0\x80\x81"]
|
|
||||||
json = "[\"\xf0\xa0\x80\x81\"]"
|
|
||||||
assert_equal json, JSON.generate(utf8, :ascii_only => false)
|
|
||||||
assert_equal utf8, JSON.parse(json)
|
|
||||||
json = '["\ud840\udc01"]'
|
|
||||||
assert_equal json, JSON.generate(utf8, :ascii_only => true)
|
|
||||||
assert_equal utf8, JSON.parse(json)
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_chars
|
|
||||||
(0..0x7f).each do |i|
|
|
||||||
json = '["\u%04x"]' % i
|
|
||||||
if RUBY_VERSION >= "1.9."
|
|
||||||
i = i.chr
|
|
||||||
end
|
|
||||||
assert_equal i, JSON.parse(json).first[0]
|
|
||||||
if i == ?\b
|
|
||||||
generated = JSON.generate(["" << i])
|
|
||||||
assert '["\b"]' == generated || '["\10"]' == generated
|
|
||||||
elsif [?\n, ?\r, ?\t, ?\f].include?(i)
|
|
||||||
assert_equal '[' << ('' << i).dump << ']', JSON.generate(["" << i])
|
|
||||||
elsif i.chr < 0x20.chr
|
|
||||||
assert_equal json, JSON.generate(["" << i])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
assert_raise(JSON::GeneratorError) do
|
|
||||||
JSON.generate(["\x80"], :ascii_only => true)
|
|
||||||
end
|
|
||||||
assert_equal "\302\200", JSON.parse('["\u0080"]').first
|
|
||||||
end
|
|
||||||
end
|
|
Loading…
x
Reference in New Issue
Block a user