2017-07-14 02:15:58 -04:00
|
|
|
# frozen_string_literal: true
|
2013-03-26 10:55:04 -04:00
|
|
|
require_relative 'helper'
|
2011-12-17 22:05:02 -05:00
|
|
|
require 'date'
|
2010-03-28 17:49:37 -04:00
|
|
|
|
|
|
|
module Psych
|
|
|
|
class TestScalarScanner < TestCase
|
|
|
|
attr_reader :ss
|
|
|
|
|
|
|
|
def setup
|
|
|
|
super
|
2013-05-14 13:26:41 -04:00
|
|
|
@ss = Psych::ScalarScanner.new ClassLoader.new
|
2010-03-28 17:49:37 -04:00
|
|
|
end
|
|
|
|
|
2011-02-25 17:12:46 -05:00
|
|
|
def test_scan_time
|
|
|
|
{ '2001-12-15T02:59:43.1Z' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
|
|
|
|
'2001-12-14t21:59:43.10-05:00' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
|
|
|
|
'2001-12-14 21:59:43.10 -5' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
|
|
|
|
'2001-12-15 2:59:43.10' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
|
|
|
|
'2011-02-24 11:17:06 -0800' => Time.utc(2011, 02, 24, 19, 17, 06)
|
|
|
|
}.each do |time_str, time|
|
|
|
|
assert_equal time, @ss.tokenize(time_str)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-10-22 17:09:56 -04:00
|
|
|
def test_scan_bad_time
|
|
|
|
[ '2001-12-15T02:59:73.1Z',
|
|
|
|
'2001-12-14t90:59:43.10-05:00',
|
|
|
|
'2001-92-14 21:59:43.10 -5',
|
|
|
|
'2001-12-15 92:59:43.10',
|
|
|
|
'2011-02-24 81:17:06 -0800',
|
|
|
|
].each do |time_str|
|
|
|
|
assert_equal time_str, @ss.tokenize(time_str)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-12-17 22:05:02 -05:00
|
|
|
def test_scan_bad_dates
|
|
|
|
x = '2000-15-01'
|
|
|
|
assert_equal x, @ss.tokenize(x)
|
|
|
|
|
|
|
|
x = '2000-10-51'
|
|
|
|
assert_equal x, @ss.tokenize(x)
|
|
|
|
|
|
|
|
x = '2000-10-32'
|
|
|
|
assert_equal x, @ss.tokenize(x)
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_good_edge_date
|
|
|
|
x = '2000-1-31'
|
|
|
|
assert_equal Date.strptime(x, '%Y-%m-%d'), @ss.tokenize(x)
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_bad_edge_date
|
|
|
|
x = '2000-11-31'
|
|
|
|
assert_equal x, @ss.tokenize(x)
|
|
|
|
end
|
|
|
|
|
2010-03-28 17:49:37 -04:00
|
|
|
def test_scan_date
|
|
|
|
date = '1980-12-16'
|
|
|
|
token = @ss.tokenize date
|
|
|
|
assert_equal 1980, token.year
|
|
|
|
assert_equal 12, token.month
|
|
|
|
assert_equal 16, token.day
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_inf
|
|
|
|
assert_equal(1 / 0.0, ss.tokenize('.inf'))
|
|
|
|
end
|
|
|
|
|
2020-08-08 07:46:05 -04:00
|
|
|
def test_scan_plus_inf
|
|
|
|
assert_equal(1 / 0.0, ss.tokenize('+.inf'))
|
|
|
|
end
|
|
|
|
|
2010-03-28 17:49:37 -04:00
|
|
|
def test_scan_minus_inf
|
|
|
|
assert_equal(-1 / 0.0, ss.tokenize('-.inf'))
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_nan
|
|
|
|
assert ss.tokenize('.nan').nan?
|
|
|
|
end
|
|
|
|
|
2016-09-06 23:16:15 -04:00
|
|
|
def test_scan_float_with_exponent_but_no_fraction
|
|
|
|
assert_equal(0.0, ss.tokenize('0.E+0'))
|
|
|
|
end
|
|
|
|
|
2010-03-28 17:49:37 -04:00
|
|
|
def test_scan_null
|
2017-04-05 09:16:32 -04:00
|
|
|
assert_nil ss.tokenize('null')
|
|
|
|
assert_nil ss.tokenize('~')
|
|
|
|
assert_nil ss.tokenize('')
|
2010-03-28 17:49:37 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_symbol
|
|
|
|
assert_equal :foo, ss.tokenize(':foo')
|
|
|
|
end
|
|
|
|
|
2017-04-05 09:16:32 -04:00
|
|
|
def test_scan_not_sexagesimal
|
|
|
|
assert_equal '00:00:00:00:0f', ss.tokenize('00:00:00:00:0f')
|
|
|
|
assert_equal '00:00:00:00:00', ss.tokenize('00:00:00:00:00')
|
|
|
|
assert_equal '00:00:00:00:00.0', ss.tokenize('00:00:00:00:00.0')
|
|
|
|
end
|
|
|
|
|
2010-03-28 17:49:37 -04:00
|
|
|
def test_scan_sexagesimal_float
|
|
|
|
assert_equal 685230.15, ss.tokenize('190:20:30.15')
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_sexagesimal_int
|
|
|
|
assert_equal 685230, ss.tokenize('190:20:30')
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_float
|
|
|
|
assert_equal 1.2, ss.tokenize('1.2')
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_true
|
|
|
|
assert_equal true, ss.tokenize('true')
|
|
|
|
end
|
2012-10-22 17:06:25 -04:00
|
|
|
|
|
|
|
def test_scan_strings_starting_with_underscores
|
|
|
|
assert_equal "_100", ss.tokenize('_100')
|
|
|
|
end
|
2019-04-26 13:38:43 -04:00
|
|
|
|
2021-01-29 07:04:37 -05:00
|
|
|
def test_scan_strings_ending_with_underscores
|
|
|
|
assert_equal "100_", ss.tokenize('100_')
|
|
|
|
end
|
|
|
|
|
2019-04-26 13:38:43 -04:00
|
|
|
def test_scan_int_commas_and_underscores
|
|
|
|
# NB: This test is to ensure backward compatibility with prior Psych versions,
|
|
|
|
# not to test against any actual YAML specification.
|
|
|
|
assert_equal 123_456_789, ss.tokenize('123_456_789')
|
|
|
|
assert_equal 123_456_789, ss.tokenize('123,456,789')
|
|
|
|
assert_equal 123_456_789, ss.tokenize('1_2,3,4_5,6_789')
|
2021-01-29 07:04:37 -05:00
|
|
|
|
|
|
|
assert_equal 1, ss.tokenize('1')
|
2021-01-29 07:10:46 -05:00
|
|
|
assert_equal 1, ss.tokenize('+1')
|
|
|
|
assert_equal -1, ss.tokenize('-1')
|
2019-04-26 13:38:43 -04:00
|
|
|
|
|
|
|
assert_equal 0b010101010, ss.tokenize('0b010101010')
|
|
|
|
assert_equal 0b010101010, ss.tokenize('0b0,1_0,1_,0,1_01,0')
|
|
|
|
|
|
|
|
assert_equal 01234567, ss.tokenize('01234567')
|
|
|
|
assert_equal 01234567, ss.tokenize('0_,,,1_2,_34567')
|
|
|
|
|
|
|
|
assert_equal 0x123456789abcdef, ss.tokenize('0x123456789abcdef')
|
|
|
|
assert_equal 0x123456789abcdef, ss.tokenize('0x12_,34,_56,_789abcdef')
|
2020-03-03 13:03:28 -05:00
|
|
|
assert_equal 0x123456789abcdef, ss.tokenize('0x_12_,34,_56,_789abcdef')
|
|
|
|
assert_equal 0x123456789abcdef, ss.tokenize('0x12_,34,_56,_789abcdef__')
|
2019-04-26 13:38:43 -04:00
|
|
|
end
|
2020-08-08 07:46:05 -04:00
|
|
|
|
|
|
|
def test_scan_dot
|
|
|
|
assert_equal '.', ss.tokenize('.')
|
|
|
|
end
|
|
|
|
|
|
|
|
def test_scan_plus_dot
|
|
|
|
assert_equal '+.', ss.tokenize('+.')
|
|
|
|
end
|
2010-03-28 17:49:37 -04:00
|
|
|
end
|
|
|
|
end
|