From 6017de0314b5115183c04b81562bbe7fdb71959a Mon Sep 17 00:00:00 2001 From: duerst Date: Sun, 19 Oct 2014 00:38:40 +0000 Subject: [PATCH] lib/unicode_normalize/normalize.rb: Importing from https://github.com/duerst/eprun/blob/master/lib/normalize.rb. git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@48005 b2dd03c8-39d4-4d8f-98ff-823fe69b080e --- ChangeLog | 5 + lib/unicode_normalize/normalize.rb | 176 +++++++++++++++++++++++++++++ 2 files changed, 181 insertions(+) create mode 100644 lib/unicode_normalize/normalize.rb diff --git a/ChangeLog b/ChangeLog index ddbc6e8b5a..df03a91453 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,8 @@ +Sun Oct 19 09:38:38 2014 Martin Duerst + + * lib/unicode_normalize/normalize.rb: Importing from + https://github.com/duerst/eprun/blob/master/lib/normalize.rb. + Sat Oct 18 20:40:52 2014 Kazuki Tsujimoto * vm_core.h, proc.c, vm_backtrace.c, vm_trace.c: diff --git a/lib/unicode_normalize/normalize.rb b/lib/unicode_normalize/normalize.rb new file mode 100644 index 0000000000..4f0fdbc78a --- /dev/null +++ b/lib/unicode_normalize/normalize.rb @@ -0,0 +1,176 @@ +# coding: utf-8 + +# Copyright 2010-2013 Ayumu Nojima (野島 歩) and Martin J. Dürst (duerst@it.aoyama.ac.jp) +# available under the same licence as Ruby itself +# (see http://www.ruby-lang.org/en/LICENSE.txt) + +require_relative 'normalize_tables' + + +module Normalize + ## Constant for max hash capacity to avoid DoS attack + MAX_HASH_LENGTH = 18000 # enough for all test cases, otherwise tests get slow + + ## Regular Expressions and Hash Constants + REGEXP_D = Regexp.compile(REGEXP_D_STRING, Regexp::EXTENDED) + REGEXP_C = Regexp.compile(REGEXP_C_STRING, Regexp::EXTENDED) + REGEXP_K = Regexp.compile(REGEXP_K_STRING, Regexp::EXTENDED) + NF_HASH_D = Hash.new do |hash, key| + hash.delete hash.first[0] if hash.length>MAX_HASH_LENGTH # prevent DoS attack + hash[key] = Normalize.nfd_one(key) + end + NF_HASH_C = Hash.new do |hash, key| + hash.delete hash.first[0] if hash.length>MAX_HASH_LENGTH # prevent DoS attack + hash[key] = Normalize.nfc_one(key) + end + NF_HASH_K = Hash.new do |hash, key| + hash.delete hash.first[0] if hash.length>MAX_HASH_LENGTH # prevent DoS attack + hash[key] = Normalize.nfkd_one(key) + end + + ## Constants For Hangul + SBASE = 0xAC00 + LBASE = 0x1100 + VBASE = 0x1161 + TBASE = 0x11A7 + LCOUNT = 19 + VCOUNT = 21 + TCOUNT = 28 + NCOUNT = VCOUNT * TCOUNT + SCOUNT = LCOUNT * NCOUNT + + # Unicode-based encodings (except UTF-8) + UNICODE_ENCODINGS = [Encoding::UTF_16BE, Encoding::UTF_16LE, Encoding::UTF_32BE, Encoding::UTF_32LE, + Encoding::GB18030, Encoding::UCS_2BE, Encoding::UCS_4BE] + + ## Hangul Algorithm + def Normalize.hangul_decomp_one(target) + sIndex = target.ord - SBASE + return target if sIndex < 0 || sIndex >= SCOUNT + l = LBASE + sIndex / NCOUNT + v = VBASE + (sIndex % NCOUNT) / TCOUNT + t = TBASE + sIndex % TCOUNT + (t==TBASE ? [l, v] : [l, v, t]).pack('U*') + target[1..-1] + end + + def Normalize.hangul_comp_one(string) + length = string.length + if length>1 and 0 <= (lead =string[0].ord-LBASE) and lead < LCOUNT and + 0 <= (vowel=string[1].ord-VBASE) and vowel < VCOUNT + lead_vowel = SBASE + (lead * VCOUNT + vowel) * TCOUNT + if length>2 and 0 <= (trail=string[2].ord-TBASE) and trail < TCOUNT + (lead_vowel + trail).chr(Encoding::UTF_8) + string[3..-1] + else + lead_vowel.chr(Encoding::UTF_8) + string[2..-1] + end + else + string + end + end + + ## Canonical Ordering + def Normalize.canonical_ordering_one(string) + sorting = string.each_char.collect { |c| [c, CLASS_TABLE[c]] } + (sorting.length-2).downto(0) do |i| # bubble sort + (0..i).each do |j| + later_class = sorting[j+1].last + if 0