class Lingo::Attendee::Tokenizer
Constants
- ALNUM
- DIGIT
- NESTS
- OTHER
- PROTO
- RULES
Public Class Methods
append(*rules)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 144 def append(*rules) _insert(-1, rules) end
delete(*names)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 132 def delete(*names) names.map { |name| rules(name).each { |rule| RULES.delete(rule) } } end
insert(*rules)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 140 def insert(*rules) _insert(0, rules) end
insert_after(name, *rules)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 152 def insert_after(name, *rules) _insert_name(name, rules, -1) end
insert_before(name, *rules)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 148 def insert_before(name, *rules) _insert_name(name, rules, 0) end
replace(name, expr = nil) { |*rule| ... }
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 136 def replace(name, expr = nil) rules(name).each { |rule| rule[1] = expr || yield(*rule) } end
rule(name)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 124 def rule(name) RULES.assoc(name) end
rules(name = nil)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 128 def rules(name = nil) name ? RULES.select { |rule,| rule == name } : RULES.map(&:first) end
Private Class Methods
_insert(index, rules)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 158 def _insert(index, rules) rules.push(*rules.pop) if rules.last.is_a?(Hash) RULES.insert(index, *rules) end
_insert_name(name, rules, offset)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 163 def _insert_name(name, rules, offset) index = RULES.index(rule(name)) _insert(index ? index - offset : offset, rules) end
Public Instance Methods
control(cmd, filename = nil, *)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 201 def control(cmd, filename = nil, *) case cmd when :FILE then reset(filename) when :LIR then reset(nil, nil) when :EOL then @linenum += 1 if @linenum when :EOF then @override.clear; @nest.clear end end
init()
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 170 def init @space = get_key('space', false) @tags = get_key('tags', false) @wiki = get_key('wiki', false) @skip_tags = get_ary('skip-tags', '', :downcase) @tags = true unless @skip_tags.empty? skip = [] skip << TA_HTML unless @tags skip << TA_WIKI unless @wiki [@rules = RULES.dup, @nests = NESTS.dup].each { |hash| hash.delete_if { |name, _| skip.include?(Token.clean(name)) } } @override, @nest, nest_re = [], [], [] @nests.each { |name, re| re.map!.with_index { |r, i| r.is_a?(Regexp) ? r : /^#{'.*?' if i > 0}#{Regexp.escape(r)}/ } nest_re << "(?<#{name}>#{Regexp.new( re[0].source.sub(/^\^/, ''), re[0].options)})" } @nest_re = /^(?<_>.*?)(?:#{nest_re.join('|')})/ reset end
process(line, offset)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 210 def process(line, offset) @offset = offset tokenize(line) command(:EOL, @filename) if @filename end
Private Instance Methods
forward_nest(match, nest, rest)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 289 def forward_nest(match, nest, rest) if overriding?(nest) tag = rest[/^[^\s>]*/].downcase @override << tag if @skip_tags.include?(tag) end forward_token(match, nest, rest) @nest << nest tokenize(rest) end
forward_token(form, attr, rest = '')
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 301 def forward_token(form, attr, rest = '') forward(Token.new(form, @override.empty? ? attr : TA_SKIP, @position += 1, @offset - form.bytesize - rest.bytesize)) end
overriding?(nest)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 306 def overriding?(nest) nest == TA_HTML && !@skip_tags.empty? end
reset(filename = nil, linenum = 1)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 218 def reset(filename = nil, linenum = 1) @filename, @linenum, @position, @offset = filename, linenum, -1, 0 end
tokenize(line)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 222 def tokenize(line) @nest.empty? ? tokenize_line(line) : tokenize_nest(line) rescue => err raise err if err.is_a?(TokenizeError) raise TokenizeError.new(line, @filename, @linenum, err) end
tokenize_line(line)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 229 def tokenize_line(line) while (length = line.length) > 0 && tokenize_rule(line) { |rest| length == rest.length ? break : line = rest } end tokenize_open(line) unless line.empty? end
tokenize_nest(line)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 249 def tokenize_nest(line) mdo = @nest_re.match(line) mdc = @nests[@nest.last].last.match(line) if mdo && (!mdc || mdo[0].length < mdc[0].length) rest = mdo.post_match nest = @nests.keys.find { |name| mdo[name] } text = mdo[nest] lead = mdo[:_] forward_token(lead, @nest.last, text + rest) unless lead.empty? forward_nest(text, nest, rest) elsif mdc rest = mdc.post_match nest = @nest.pop text = mdc[0] forward_token(text, nest, rest) if overriding?(nest) @override.pop if text.downcase.end_with?("/#{@override.last}>") end tokenize(rest) else forward_token(line, @nest.last) end end
tokenize_open(line)
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 279 def tokenize_open(line) @nests.each { |nest, (open_re, _)| next unless line =~ open_re return forward_nest($&, nest, $') } tokenize_rule(line, OTHER) { |rest| line = rest } tokenize(line) end
tokenize_rule(line, rules = @rules) { |rest| ... }
click to toggle source
# File lib/lingo/attendee/tokenizer.rb, line 238 def tokenize_rule(line, rules = @rules) rules.find { |name, expr| next unless line =~ expr rest = $' forward_token($&, name, rest) if name != TA_SPACE || @space yield rest } end