Lrama v0.5.8

This commit is contained in:
yui-knk 2023-10-26 07:54:47 +09:00 committed by Yuichiro Kaneko
parent c8d162c889
commit 7d159a8787
5 changed files with 210 additions and 283 deletions

View File

@ -8,7 +8,7 @@ module Lrama
warning = Lrama::Warning.new warning = Lrama::Warning.new
text = options.y.read text = options.y.read
options.y.close if options.y != STDIN options.y.close if options.y != STDIN
grammar = Lrama::Parser.new(text).parse grammar = Lrama::Parser.new(text, options.grammar_file).parse
states = Lrama::States.new(grammar, warning, trace_state: (options.trace_opts[:automaton] || options.trace_opts[:closure])) states = Lrama::States.new(grammar, warning, trace_state: (options.trace_opts[:automaton] || options.trace_opts[:closure]))
states.compute states.compute
context = Lrama::Context.new(states) context = Lrama::Context.new(states)

View File

@ -308,39 +308,45 @@ module Lrama
@nterms ||= @symbols.select(&:nterm?) @nterms ||= @symbols.select(&:nterm?)
end end
def scan_reference(scanner)
start = scanner.pos
case
# $ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
return [:dollar, "$", tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
return [:dollar, Integer(scanner[2]), tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
return [:dollar, scanner[2], tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
return [:dollar, scanner[2], tag, start, scanner.pos - 1]
# @ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/@\$/) # @$
return [:at, "$", nil, start, scanner.pos - 1]
when scanner.scan(/@(\d+)/) # @1
return [:at, Integer(scanner[1]), nil, start, scanner.pos - 1]
when scanner.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
return [:at, scanner[1], nil, start, scanner.pos - 1]
when scanner.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
return [:at, scanner[1], nil, start, scanner.pos - 1]
end
end
def extract_references def extract_references
unless initial_action.nil? unless initial_action.nil?
scanner = StringScanner.new(initial_action.s_value) scanner = StringScanner.new(initial_action.s_value)
references = [] references = []
while !scanner.eos? do while !scanner.eos? do
start = scanner.pos if reference = scan_reference(scanner)
case references << reference
# $ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, "$", tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, Integer(scanner[2]), tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
# @ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/@\$/) # @$
references << [:at, "$", nil, start, scanner.pos - 1]
when scanner.scan(/@(\d+)/) # @1
references << [:at, Integer(scanner[1]), nil, start, scanner.pos - 1]
when scanner.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
when scanner.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
else else
scanner.getch scanner.getch
end end
@ -355,33 +361,8 @@ module Lrama
references = [] references = []
while !scanner.eos? do while !scanner.eos? do
start = scanner.pos if reference = scan_reference(scanner)
case references << reference
# $ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, "$", tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, Integer(scanner[2]), tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
# @ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/@\$/) # @$
references << [:at, "$", nil, start, scanner.pos - 1]
when scanner.scan(/@(\d+)/) # @1
references << [:at, Integer(scanner[1]), nil, start, scanner.pos - 1]
when scanner.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
when scanner.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
else else
scanner.getch scanner.getch
end end
@ -396,33 +377,8 @@ module Lrama
references = [] references = []
while !scanner.eos? do while !scanner.eos? do
start = scanner.pos if reference = scan_reference(scanner)
case references << reference
# $ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, "$", tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, Integer(scanner[2]), tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
# @ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/@\$/) # @$
references << [:at, "$", nil, start, scanner.pos - 1]
when scanner.scan(/@(\d+)/) # @1
references << [:at, Integer(scanner[1]), nil, start, scanner.pos - 1]
when scanner.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
when scanner.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
else else
scanner.getch scanner.getch
end end
@ -440,34 +396,9 @@ module Lrama
references = [] references = []
while !scanner.eos? do while !scanner.eos? do
start = scanner.pos
case case
# $ references when reference = scan_reference(scanner)
# It need to wrap an identifier with brackets to use ".-" for identifiers references << reference
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, "$", tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, Integer(scanner[2]), tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?([a-zA-Z_][a-zA-Z0-9_]*)/) # $foo, $expr, $<long>program (named reference without brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
when scanner.scan(/\$(<[a-zA-Z0-9_]+>)?\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # $expr.right, $expr-right, $<long>program (named reference with brackets)
tag = scanner[1] ? Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Tag, s_value: scanner[1]) : nil
references << [:dollar, scanner[2], tag, start, scanner.pos - 1]
# @ references
# It need to wrap an identifier with brackets to use ".-" for identifiers
when scanner.scan(/@\$/) # @$
references << [:at, "$", nil, start, scanner.pos - 1]
when scanner.scan(/@(\d+)/) # @1
references << [:at, Integer(scanner[1]), nil, start, scanner.pos - 1]
when scanner.scan(/@([a-zA-Z][a-zA-Z0-9_]*)/) # @foo, @expr (named reference without brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
when scanner.scan(/@\[([a-zA-Z_.][-a-zA-Z0-9_.]*)\]/) # @expr.right, @expr-right (named reference with brackets)
references << [:at, scanner[1], nil, start, scanner.pos - 1]
when scanner.scan(/\/\*/) when scanner.scan(/\/\*/)
scanner.scan_until(/\*\//) scanner.scan_until(/\*\//)
else else
@ -482,14 +413,6 @@ module Lrama
end end
end end
def create_token(type, s_value, line, column)
t = Token.new(type: type, s_value: s_value)
t.line = line
t.column = column
return t
end
private private
def find_nterm_by_id!(id) def find_nterm_by_id!(id)

View File

@ -61,7 +61,6 @@ module Lrama
o.separator '' o.separator ''
o.separator 'Output:' o.separator 'Output:'
o.on('-H', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v } o.on('-H', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v }
o.on('-h=[FILE]', 'also produce a header file named FILE (deprecated)') {|v| @options.header = true; @options.header_file = v }
o.on('-d', 'also produce a header file') { @options.header = true } o.on('-d', 'also produce a header file') { @options.header = true }
o.on('-r', '--report=THINGS', Array, 'also produce details on the automaton') {|v| @report = v } o.on('-r', '--report=THINGS', Array, 'also produce details on the automaton') {|v| @report = v }
o.on('--report-file=FILE', 'also produce details on the automaton output to a file named FILE') {|v| @options.report_file = v } o.on('--report-file=FILE', 'also produce details on the automaton output to a file named FILE') {|v| @options.report_file = v }
@ -74,7 +73,7 @@ module Lrama
o.separator '' o.separator ''
o.separator 'Other options:' o.separator 'Other options:'
o.on('-V', '--version', "output version information and exit") {|v| puts "lrama #{Lrama::VERSION}"; exit 0 } o.on('-V', '--version', "output version information and exit") {|v| puts "lrama #{Lrama::VERSION}"; exit 0 }
o.on('--help', "display this help and exit") {|v| puts o; exit 0 } o.on('-h', '--help', "display this help and exit") {|v| puts o; exit 0 }
o.separator '' o.separator ''
o.parse!(argv) o.parse!(argv)
end end

View File

@ -668,12 +668,13 @@ end
module Lrama module Lrama
class Parser < Racc::Parser class Parser < Racc::Parser
module_eval(<<'...end parser.y/module_eval...', 'parser.y', 387) module_eval(<<'...end parser.y/module_eval...', 'parser.y', 388)
include Lrama::Report::Duration include Lrama::Report::Duration
def initialize(text) def initialize(text, path)
@text = text @text = text
@path = path
end end
def parse def parse
@ -696,74 +697,78 @@ def next_token
end end
def on_error(error_token_id, error_value, value_stack) def on_error(error_token_id, error_value, value_stack)
raise ParseError, sprintf("\n%d:%d: parse error on value %s (%s)", source = @text.split("\n")[error_value.line - 1]
@lexer.line, @lexer.column, error_value.inspect, token_to_str(error_token_id) || '?') raise ParseError, <<~ERROR
#{@path}:#{@lexer.line}:#{@lexer.column}: parse error on value #{error_value.inspect} (#{token_to_str(error_token_id) || '?'})
#{source}
#{' ' * @lexer.column}^
ERROR
end end
...end parser.y/module_eval... ...end parser.y/module_eval...
##### State transition tables begin ### ##### State transition tables begin ###
racc_action_table = [ racc_action_table = [
77, 6, 41, 42, 7, 83, 65, 56, 41, 42, 82, 132, 83, 42, 42, 41, 41, 65, 65, 42,
68, 41, 42, 3, 134, 8, 20, 22, 23, 24, 42, 41, 41, 131, 56, 68, 3, 8, 38, 134,
25, 26, 27, 28, 29, 30, 65, 82, 41, 42, 134, 42, 42, 41, 41, 65, 61, 68, 38, 6,
65, 132, 135, 32, 38, 84, 41, 42, 134, 68, 32, 7, 84, 77, 135, 135, 20, 22, 23, 24,
41, 42, 65, 68, 41, 42, 131, 61, 41, 42, 25, 26, 27, 28, 29, 30, 20, 22, 23, 24,
39, 9, 41, 42, 44, 14, 135, 12, 65, 13, 25, 26, 27, 28, 29, 30, 9, 39, 44, 14,
15, 16, 17, 18, 41, 42, 19, 20, 22, 23, 12, 13, 15, 16, 17, 18, 46, 46, 19, 20,
24, 25, 26, 27, 28, 29, 30, 65, 46, 65, 22, 23, 24, 25, 26, 27, 28, 29, 30, 42,
41, 42, 65, 46, 65, 41, 42, 65, 41, 42, 42, 41, 41, 46, 68, 68, 42, 42, 41, 41,
65, 41, 42, 65, 41, 42, 65, 41, 42, 41, 65, 153, 42, 42, 41, 41, 65, 153, 42, 42,
42, 41, 42, 65, 41, 42, 41, 42, 153, 41, 41, 41, 65, 153, 42, 42, 41, 41, 65, 153,
42, 153, 41, 42, 153, 41, 42, 153, 41, 42, 42, 42, 41, 41, 65, 153, 42, 42, 41, 41,
65, 68, 41, 42, 153, 41, 42, 20, 22, 23, 65, 153, 42, 42, 41, 41, 65, 65, 42, 42,
24, 25, 26, 27, 28, 29, 30, 41, 42, 41, 41, 41, 65, 65, 42, 42, 41, 41, 65, 65,
42, 153, 41, 42, 46, 38, 41, 42, 41, 42, 42, 42, 41, 41, 42, 42, 41, 41, 42, 42,
41, 42, 41, 42, 41, 42, 49, 50, 51, 52, 41, 41, 42, 42, 41, 41, 49, 50, 51, 52,
53, 74, 78, 80, 85, 85, 85, 92, 96, 97, 53, 74, 78, 80, 85, 85, 85, 92, 96, 97,
105, 106, 108, 109, 110, 111, 112, 113, 116, 118, 105, 106, 108, 109, 110, 111, 112, 113, 116, 118,
119, 122, 123, 124, 138, 139, 140, 141, 142, 143, 119, 122, 123, 124, 138, 139, 140, 141, 142, 143,
122, 80, 148, 149, 156, 160, 161, 80, 80 ] 122, 80, 148, 149, 156, 160, 161, 80, 80 ]
racc_action_check = [ racc_action_check = [
33, 2, 13, 13, 2, 40, 121, 25, 25, 25, 40, 120, 40, 121, 144, 121, 144, 121, 144, 25,
27, 27, 27, 1, 121, 3, 33, 33, 33, 33, 27, 25, 27, 120, 25, 27, 1, 3, 9, 121,
33, 33, 33, 33, 33, 33, 26, 40, 121, 121, 144, 26, 28, 26, 28, 26, 26, 28, 33, 2,
144, 120, 121, 7, 33, 40, 55, 55, 144, 28, 7, 2, 40, 33, 121, 144, 9, 9, 9, 9,
28, 28, 60, 29, 29, 29, 120, 26, 26, 26, 9, 9, 9, 9, 9, 9, 33, 33, 33, 33,
12, 4, 144, 144, 14, 4, 144, 4, 61, 4, 33, 33, 33, 33, 33, 33, 4, 12, 14, 4,
4, 4, 4, 4, 60, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 15, 16, 4, 4,
4, 4, 4, 4, 4, 4, 4, 97, 15, 99, 4, 4, 4, 4, 4, 4, 4, 4, 4, 29,
61, 61, 117, 16, 135, 56, 56, 141, 66, 66, 30, 29, 30, 17, 29, 30, 141, 13, 141, 13,
142, 67, 67, 143, 68, 68, 150, 71, 71, 97, 141, 141, 142, 55, 142, 55, 142, 142, 143, 56,
97, 99, 99, 154, 117, 117, 135, 135, 141, 141, 143, 56, 143, 143, 150, 66, 150, 66, 150, 150,
141, 142, 142, 142, 143, 143, 143, 150, 150, 150, 154, 67, 154, 67, 154, 154, 155, 68, 155, 68,
155, 30, 30, 30, 154, 154, 154, 9, 9, 9, 155, 155, 60, 61, 60, 61, 60, 61, 97, 99,
9, 9, 9, 9, 9, 9, 9, 72, 72, 73, 97, 99, 97, 99, 117, 135, 117, 135, 117, 135,
73, 155, 155, 155, 17, 9, 92, 92, 94, 94, 71, 72, 71, 72, 73, 92, 73, 92, 94, 100,
100, 100, 102, 102, 114, 114, 18, 20, 22, 23, 94, 100, 102, 114, 102, 114, 18, 20, 22, 23,
24, 31, 36, 37, 45, 47, 48, 54, 58, 59, 24, 31, 36, 37, 45, 47, 48, 54, 58, 59,
79, 80, 86, 87, 88, 89, 90, 91, 95, 103, 79, 80, 86, 87, 88, 89, 90, 91, 95, 103,
104, 105, 106, 107, 125, 126, 127, 128, 129, 130, 104, 105, 106, 107, 125, 126, 127, 128, 129, 130,
131, 133, 136, 137, 146, 157, 159, 160, 161 ] 131, 133, 136, 137, 146, 157, 159, 160, 161 ]
racc_action_pointer = [ racc_action_pointer = [
nil, 13, -2, 15, 49, nil, nil, 26, nil, 109, nil, 16, 19, 17, 47, nil, nil, 23, nil, 14,
nil, nil, 41, -27, 47, 63, 68, 129, 141, nil, nil, nil, 51, 84, 51, 47, 48, 64, 137, nil,
142, nil, 143, 144, 145, -21, 19, -18, 11, 15, 138, nil, 139, 140, 141, 6, 18, 7, 19, 76,
93, 157, nil, -2, nil, nil, 145, 129, nil, nil, 77, 159, nil, 24, nil, nil, 141, 128, nil, nil,
-2, nil, nil, nil, nil, 149, nil, 150, 151, nil, -5, nil, nil, nil, nil, 145, nil, 146, 147, nil,
nil, nil, nil, nil, 139, 7, 56, nil, 159, 141, nil, nil, nil, nil, 159, 90, 96, nil, 162, 161,
35, 51, nil, nil, nil, nil, 59, 62, 65, nil, 119, 120, nil, nil, nil, nil, 102, 108, 114, nil,
nil, 68, 108, 110, nil, nil, nil, nil, nil, 139, nil, 137, 138, 141, nil, nil, nil, nil, nil, 138,
142, nil, nil, nil, nil, nil, 168, 169, 170, 171, 166, nil, nil, nil, nil, nil, 170, 171, 172, 173,
172, 173, 117, nil, 119, 171, nil, 70, nil, 72, 174, 175, 142, nil, 145, 171, nil, 125, nil, 126,
121, nil, 123, 174, 176, 166, 147, 179, nil, nil, 146, nil, 149, 168, 178, 162, 146, 181, nil, nil,
nil, nil, nil, nil, 125, nil, nil, 75, nil, nil, nil, nil, nil, nil, 150, nil, nil, 131, nil, nil,
14, -1, nil, nil, nil, 168, 169, 170, 171, 172, -20, 0, nil, nil, nil, 164, 165, 166, 167, 168,
173, 175, nil, 157, nil, 77, 188, 177, nil, nil, 169, 171, nil, 156, nil, 132, 190, 173, nil, nil,
nil, 80, 83, 86, 23, nil, 190, nil, nil, nil, nil, 83, 89, 95, 1, nil, 192, nil, nil, nil,
89, nil, nil, nil, 96, 113, nil, 179, nil, 180, 101, nil, nil, nil, 107, 113, nil, 175, nil, 176,
163, 164, nil, nil ] 162, 163, nil, nil ]
racc_action_default = [ racc_action_default = [
-2, -104, -8, -104, -104, -3, -4, -104, 164, -104, -2, -104, -8, -104, -104, -3, -4, -104, 164, -104,
@ -943,41 +948,41 @@ racc_shift_n = 164
racc_token_table = { racc_token_table = {
false => 0, false => 0,
:error => 1, :error => 1,
"%%" => 2, :C_DECLARATION => 2,
"%{" => 3, :CHARACTER => 3,
:C_DECLARATION => 4, :IDENT_COLON => 4,
"%}" => 5, :IDENTIFIER => 5,
"%require" => 6, :INTEGER => 6,
:STRING => 7, :STRING => 7,
"%expect" => 8, :TAG => 8,
:INTEGER => 9, "%%" => 9,
"%define" => 10, "%{" => 10,
"%param" => 11, "%}" => 11,
"%lex-param" => 12, "%require" => 12,
"%parse-param" => 13, "%expect" => 13,
"%initial-action" => 14, "%define" => 14,
"{" => 15, "%param" => 15,
"}" => 16, "%lex-param" => 16,
";" => 17, "%parse-param" => 17,
"%union" => 18, "%initial-action" => 18,
"%destructor" => 19, "{" => 19,
"%printer" => 20, "}" => 20,
"%error-token" => 21, ";" => 21,
"%token" => 22, "%union" => 22,
"%type" => 23, "%destructor" => 23,
"%left" => 24, "%printer" => 24,
"%right" => 25, "%error-token" => 25,
"%precedence" => 26, "%token" => 26,
"%nonassoc" => 27, "%type" => 27,
:TAG => 28, "%left" => 28,
:IDENTIFIER => 29, "%right" => 29,
:CHARACTER => 30, "%precedence" => 30,
":" => 31, "%nonassoc" => 31,
"|" => 32, ":" => 32,
"%prec" => 33, "|" => 33,
"[" => 34, "%prec" => 34,
"]" => 35, "[" => 35,
:IDENT_COLON => 36, "]" => 36,
"{...}" => 37 } "{...}" => 37 }
racc_nt_base = 38 racc_nt_base = 38
@ -1004,14 +1009,18 @@ Ractor.make_shareable(Racc_arg) if defined?(Ractor)
Racc_token_to_s_table = [ Racc_token_to_s_table = [
"$end", "$end",
"error", "error",
"C_DECLARATION",
"CHARACTER",
"IDENT_COLON",
"IDENTIFIER",
"INTEGER",
"STRING",
"TAG",
"\"%%\"", "\"%%\"",
"\"%{\"", "\"%{\"",
"C_DECLARATION",
"\"%}\"", "\"%}\"",
"\"%require\"", "\"%require\"",
"STRING",
"\"%expect\"", "\"%expect\"",
"INTEGER",
"\"%define\"", "\"%define\"",
"\"%param\"", "\"%param\"",
"\"%lex-param\"", "\"%lex-param\"",
@ -1030,15 +1039,11 @@ Racc_token_to_s_table = [
"\"%right\"", "\"%right\"",
"\"%precedence\"", "\"%precedence\"",
"\"%nonassoc\"", "\"%nonassoc\"",
"TAG",
"IDENTIFIER",
"CHARACTER",
"\":\"", "\":\"",
"\"|\"", "\"|\"",
"\"%prec\"", "\"%prec\"",
"\"[\"", "\"[\"",
"\"]\"", "\"]\"",
"IDENT_COLON",
"\"{...}\"", "\"{...}\"",
"$start", "$start",
"input", "input",
@ -1109,7 +1114,7 @@ Racc_debug_parser = false
# reduce 3 omitted # reduce 3 omitted
module_eval(<<'.,.,', 'parser.y', 9) module_eval(<<'.,.,', 'parser.y', 10)
def _reduce_4(val, _values, result) def _reduce_4(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '%}' @lexer.end_symbol = '%}'
@ -1119,7 +1124,7 @@ module_eval(<<'.,.,', 'parser.y', 9)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 15) module_eval(<<'.,.,', 'parser.y', 16)
def _reduce_5(val, _values, result) def _reduce_5(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1128,7 +1133,7 @@ module_eval(<<'.,.,', 'parser.y', 15)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 20) module_eval(<<'.,.,', 'parser.y', 21)
def _reduce_6(val, _values, result) def _reduce_6(val, _values, result)
@grammar.prologue = val[2].s_value @grammar.prologue = val[2].s_value
@ -1138,7 +1143,7 @@ module_eval(<<'.,.,', 'parser.y', 20)
# reduce 7 omitted # reduce 7 omitted
module_eval(<<'.,.,', 'parser.y', 24) module_eval(<<'.,.,', 'parser.y', 25)
def _reduce_8(val, _values, result) def _reduce_8(val, _values, result)
result = "" result = ""
result result
@ -1149,7 +1154,7 @@ module_eval(<<'.,.,', 'parser.y', 24)
# reduce 10 omitted # reduce 10 omitted
module_eval(<<'.,.,', 'parser.y', 28) module_eval(<<'.,.,', 'parser.y', 29)
def _reduce_11(val, _values, result) def _reduce_11(val, _values, result)
@grammar.expect = val[1] @grammar.expect = val[1]
result result
@ -1162,7 +1167,7 @@ module_eval(<<'.,.,', 'parser.y', 28)
# reduce 14 omitted # reduce 14 omitted
module_eval(<<'.,.,', 'parser.y', 34) module_eval(<<'.,.,', 'parser.y', 35)
def _reduce_15(val, _values, result) def _reduce_15(val, _values, result)
val[1].each {|token| val[1].each {|token|
token.references = [] token.references = []
@ -1173,7 +1178,7 @@ module_eval(<<'.,.,', 'parser.y', 34)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 41) module_eval(<<'.,.,', 'parser.y', 42)
def _reduce_16(val, _values, result) def _reduce_16(val, _values, result)
val[1].each {|token| val[1].each {|token|
token.references = [] token.references = []
@ -1184,7 +1189,7 @@ module_eval(<<'.,.,', 'parser.y', 41)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 48) module_eval(<<'.,.,', 'parser.y', 49)
def _reduce_17(val, _values, result) def _reduce_17(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1193,7 +1198,7 @@ module_eval(<<'.,.,', 'parser.y', 48)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 53) module_eval(<<'.,.,', 'parser.y', 54)
def _reduce_18(val, _values, result) def _reduce_18(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1202,7 +1207,7 @@ module_eval(<<'.,.,', 'parser.y', 53)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 58) module_eval(<<'.,.,', 'parser.y', 59)
def _reduce_19(val, _values, result) def _reduce_19(val, _values, result)
@grammar.initial_action = @grammar.build_code(:initial_action, val[3]) @grammar.initial_action = @grammar.build_code(:initial_action, val[3])
@ -1212,7 +1217,7 @@ module_eval(<<'.,.,', 'parser.y', 58)
# reduce 20 omitted # reduce 20 omitted
module_eval(<<'.,.,', 'parser.y', 64) module_eval(<<'.,.,', 'parser.y', 65)
def _reduce_21(val, _values, result) def _reduce_21(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1221,7 +1226,7 @@ module_eval(<<'.,.,', 'parser.y', 64)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 69) module_eval(<<'.,.,', 'parser.y', 70)
def _reduce_22(val, _values, result) def _reduce_22(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1230,7 +1235,7 @@ module_eval(<<'.,.,', 'parser.y', 69)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 74) module_eval(<<'.,.,', 'parser.y', 75)
def _reduce_23(val, _values, result) def _reduce_23(val, _values, result)
@grammar.set_union(@grammar.build_code(:union, val[3]), val[3].line) @grammar.set_union(@grammar.build_code(:union, val[3]), val[3].line)
@ -1240,7 +1245,7 @@ module_eval(<<'.,.,', 'parser.y', 74)
# reduce 24 omitted # reduce 24 omitted
module_eval(<<'.,.,', 'parser.y', 79) module_eval(<<'.,.,', 'parser.y', 80)
def _reduce_25(val, _values, result) def _reduce_25(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1249,7 +1254,7 @@ module_eval(<<'.,.,', 'parser.y', 79)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 84) module_eval(<<'.,.,', 'parser.y', 85)
def _reduce_26(val, _values, result) def _reduce_26(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1260,7 +1265,7 @@ module_eval(<<'.,.,', 'parser.y', 84)
# reduce 27 omitted # reduce 27 omitted
module_eval(<<'.,.,', 'parser.y', 90) module_eval(<<'.,.,', 'parser.y', 91)
def _reduce_28(val, _values, result) def _reduce_28(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1269,7 +1274,7 @@ module_eval(<<'.,.,', 'parser.y', 90)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 95) module_eval(<<'.,.,', 'parser.y', 96)
def _reduce_29(val, _values, result) def _reduce_29(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1278,7 +1283,7 @@ module_eval(<<'.,.,', 'parser.y', 95)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 100) module_eval(<<'.,.,', 'parser.y', 101)
def _reduce_30(val, _values, result) def _reduce_30(val, _values, result)
@grammar.add_printer(ident_or_tags: val[6], code: @grammar.build_code(:printer, val[3]), lineno: val[3].line) @grammar.add_printer(ident_or_tags: val[6], code: @grammar.build_code(:printer, val[3]), lineno: val[3].line)
@ -1286,7 +1291,7 @@ module_eval(<<'.,.,', 'parser.y', 100)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 104) module_eval(<<'.,.,', 'parser.y', 105)
def _reduce_31(val, _values, result) def _reduce_31(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1295,7 +1300,7 @@ module_eval(<<'.,.,', 'parser.y', 104)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 109) module_eval(<<'.,.,', 'parser.y', 110)
def _reduce_32(val, _values, result) def _reduce_32(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1304,7 +1309,7 @@ module_eval(<<'.,.,', 'parser.y', 109)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 114) module_eval(<<'.,.,', 'parser.y', 115)
def _reduce_33(val, _values, result) def _reduce_33(val, _values, result)
@grammar.add_error_token(ident_or_tags: val[6], code: @grammar.build_code(:error_token, val[3]), lineno: val[3].line) @grammar.add_error_token(ident_or_tags: val[6], code: @grammar.build_code(:error_token, val[3]), lineno: val[3].line)
@ -1314,7 +1319,7 @@ module_eval(<<'.,.,', 'parser.y', 114)
# reduce 34 omitted # reduce 34 omitted
module_eval(<<'.,.,', 'parser.y', 120) module_eval(<<'.,.,', 'parser.y', 121)
def _reduce_35(val, _values, result) def _reduce_35(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1326,7 +1331,7 @@ module_eval(<<'.,.,', 'parser.y', 120)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 128) module_eval(<<'.,.,', 'parser.y', 129)
def _reduce_36(val, _values, result) def _reduce_36(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1340,7 +1345,7 @@ module_eval(<<'.,.,', 'parser.y', 128)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 138) module_eval(<<'.,.,', 'parser.y', 139)
def _reduce_37(val, _values, result) def _reduce_37(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1354,7 +1359,7 @@ module_eval(<<'.,.,', 'parser.y', 138)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 148) module_eval(<<'.,.,', 'parser.y', 149)
def _reduce_38(val, _values, result) def _reduce_38(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1368,7 +1373,7 @@ module_eval(<<'.,.,', 'parser.y', 148)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 158) module_eval(<<'.,.,', 'parser.y', 159)
def _reduce_39(val, _values, result) def _reduce_39(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1382,7 +1387,7 @@ module_eval(<<'.,.,', 'parser.y', 158)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 169) module_eval(<<'.,.,', 'parser.y', 170)
def _reduce_40(val, _values, result) def _reduce_40(val, _values, result)
val[0].each {|token_declaration| val[0].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: nil, replace: true) @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: nil, replace: true)
@ -1392,7 +1397,7 @@ module_eval(<<'.,.,', 'parser.y', 169)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 175) module_eval(<<'.,.,', 'parser.y', 176)
def _reduce_41(val, _values, result) def _reduce_41(val, _values, result)
val[1].each {|token_declaration| val[1].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[0], replace: true) @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[0], replace: true)
@ -1402,7 +1407,7 @@ module_eval(<<'.,.,', 'parser.y', 175)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 181) module_eval(<<'.,.,', 'parser.y', 182)
def _reduce_42(val, _values, result) def _reduce_42(val, _values, result)
val[2].each {|token_declaration| val[2].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[1], replace: true) @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[1], replace: true)
@ -1412,21 +1417,21 @@ module_eval(<<'.,.,', 'parser.y', 181)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 186) module_eval(<<'.,.,', 'parser.y', 187)
def _reduce_43(val, _values, result) def _reduce_43(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 187) module_eval(<<'.,.,', 'parser.y', 188)
def _reduce_44(val, _values, result) def _reduce_44(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 189) module_eval(<<'.,.,', 'parser.y', 190)
def _reduce_45(val, _values, result) def _reduce_45(val, _values, result)
result = val result = val
result result
@ -1441,7 +1446,7 @@ module_eval(<<'.,.,', 'parser.y', 189)
# reduce 49 omitted # reduce 49 omitted
module_eval(<<'.,.,', 'parser.y', 199) module_eval(<<'.,.,', 'parser.y', 200)
def _reduce_50(val, _values, result) def _reduce_50(val, _values, result)
result = [{tag: nil, tokens: val[0]}] result = [{tag: nil, tokens: val[0]}]
@ -1449,7 +1454,7 @@ module_eval(<<'.,.,', 'parser.y', 199)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 203) module_eval(<<'.,.,', 'parser.y', 204)
def _reduce_51(val, _values, result) def _reduce_51(val, _values, result)
result = [{tag: val[0], tokens: val[1]}] result = [{tag: val[0], tokens: val[1]}]
@ -1457,7 +1462,7 @@ module_eval(<<'.,.,', 'parser.y', 203)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 207) module_eval(<<'.,.,', 'parser.y', 208)
def _reduce_52(val, _values, result) def _reduce_52(val, _values, result)
result = val[0].append({tag: val[1], tokens: val[2]}) result = val[0].append({tag: val[1], tokens: val[2]})
@ -1465,14 +1470,14 @@ module_eval(<<'.,.,', 'parser.y', 207)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 210) module_eval(<<'.,.,', 'parser.y', 211)
def _reduce_53(val, _values, result) def _reduce_53(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 211) module_eval(<<'.,.,', 'parser.y', 212)
def _reduce_54(val, _values, result) def _reduce_54(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
@ -1483,7 +1488,7 @@ module_eval(<<'.,.,', 'parser.y', 211)
# reduce 56 omitted # reduce 56 omitted
module_eval(<<'.,.,', 'parser.y', 218) module_eval(<<'.,.,', 'parser.y', 219)
def _reduce_57(val, _values, result) def _reduce_57(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1492,7 +1497,7 @@ module_eval(<<'.,.,', 'parser.y', 218)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 223) module_eval(<<'.,.,', 'parser.y', 224)
def _reduce_58(val, _values, result) def _reduce_58(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1501,7 +1506,7 @@ module_eval(<<'.,.,', 'parser.y', 223)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 228) module_eval(<<'.,.,', 'parser.y', 229)
def _reduce_59(val, _values, result) def _reduce_59(val, _values, result)
result = val[0].append(val[3]) result = val[0].append(val[3])
@ -1509,7 +1514,7 @@ module_eval(<<'.,.,', 'parser.y', 228)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 232) module_eval(<<'.,.,', 'parser.y', 233)
def _reduce_60(val, _values, result) def _reduce_60(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '}' @lexer.end_symbol = '}'
@ -1518,7 +1523,7 @@ module_eval(<<'.,.,', 'parser.y', 232)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 237) module_eval(<<'.,.,', 'parser.y', 238)
def _reduce_61(val, _values, result) def _reduce_61(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1527,7 +1532,7 @@ module_eval(<<'.,.,', 'parser.y', 237)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 242) module_eval(<<'.,.,', 'parser.y', 243)
def _reduce_62(val, _values, result) def _reduce_62(val, _values, result)
result = [val[2]] result = [val[2]]
@ -1535,7 +1540,7 @@ module_eval(<<'.,.,', 'parser.y', 242)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 247) module_eval(<<'.,.,', 'parser.y', 248)
def _reduce_63(val, _values, result) def _reduce_63(val, _values, result)
result = [{tag: nil, tokens: val[0]}] result = [{tag: nil, tokens: val[0]}]
@ -1543,7 +1548,7 @@ module_eval(<<'.,.,', 'parser.y', 247)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 251) module_eval(<<'.,.,', 'parser.y', 252)
def _reduce_64(val, _values, result) def _reduce_64(val, _values, result)
result = [{tag: val[0], tokens: val[1]}] result = [{tag: val[0], tokens: val[1]}]
@ -1551,7 +1556,7 @@ module_eval(<<'.,.,', 'parser.y', 251)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 255) module_eval(<<'.,.,', 'parser.y', 256)
def _reduce_65(val, _values, result) def _reduce_65(val, _values, result)
result = val[0].append({tag: nil, tokens: val[1]}) result = val[0].append({tag: nil, tokens: val[1]})
@ -1559,14 +1564,14 @@ module_eval(<<'.,.,', 'parser.y', 255)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 258) module_eval(<<'.,.,', 'parser.y', 259)
def _reduce_66(val, _values, result) def _reduce_66(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 259) module_eval(<<'.,.,', 'parser.y', 260)
def _reduce_67(val, _values, result) def _reduce_67(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
@ -1575,14 +1580,14 @@ module_eval(<<'.,.,', 'parser.y', 259)
# reduce 68 omitted # reduce 68 omitted
module_eval(<<'.,.,', 'parser.y', 263) module_eval(<<'.,.,', 'parser.y', 264)
def _reduce_69(val, _values, result) def _reduce_69(val, _values, result)
raise "Ident after %prec" if @prec_seen raise "Ident after %prec" if @prec_seen
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 264) module_eval(<<'.,.,', 'parser.y', 265)
def _reduce_70(val, _values, result) def _reduce_70(val, _values, result)
raise "Char after %prec" if @prec_seen raise "Char after %prec" if @prec_seen
result result
@ -1597,7 +1602,7 @@ module_eval(<<'.,.,', 'parser.y', 264)
# reduce 74 omitted # reduce 74 omitted
module_eval(<<'.,.,', 'parser.y', 274) module_eval(<<'.,.,', 'parser.y', 275)
def _reduce_75(val, _values, result) def _reduce_75(val, _values, result)
lhs = val[0] lhs = val[0]
lhs.alias = val[1] lhs.alias = val[1]
@ -1609,7 +1614,7 @@ module_eval(<<'.,.,', 'parser.y', 274)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 283) module_eval(<<'.,.,', 'parser.y', 284)
def _reduce_76(val, _values, result) def _reduce_76(val, _values, result)
result = [{rhs: val[0], lineno: val[0].first&.line || @lexer.line - 1}] result = [{rhs: val[0], lineno: val[0].first&.line || @lexer.line - 1}]
@ -1617,7 +1622,7 @@ module_eval(<<'.,.,', 'parser.y', 283)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 287) module_eval(<<'.,.,', 'parser.y', 288)
def _reduce_77(val, _values, result) def _reduce_77(val, _values, result)
result = val[0].append({rhs: val[2], lineno: val[2].first&.line || @lexer.line - 1}) result = val[0].append({rhs: val[2], lineno: val[2].first&.line || @lexer.line - 1})
@ -1627,7 +1632,7 @@ module_eval(<<'.,.,', 'parser.y', 287)
# reduce 78 omitted # reduce 78 omitted
module_eval(<<'.,.,', 'parser.y', 293) module_eval(<<'.,.,', 'parser.y', 294)
def _reduce_79(val, _values, result) def _reduce_79(val, _values, result)
result = [] result = []
@prec_seen = false @prec_seen = false
@ -1637,17 +1642,17 @@ module_eval(<<'.,.,', 'parser.y', 293)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 299) module_eval(<<'.,.,', 'parser.y', 300)
def _reduce_80(val, _values, result) def _reduce_80(val, _values, result)
token = val[1] token = val[1]
val[1].alias = val[2] token.alias = val[2]
result = val[0].append(token) result = val[0].append(token)
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 305) module_eval(<<'.,.,', 'parser.y', 306)
def _reduce_81(val, _values, result) def _reduce_81(val, _values, result)
if @prec_seen if @prec_seen
raise "Multiple User_code after %prec" if @code_after_prec raise "Multiple User_code after %prec" if @code_after_prec
@ -1660,7 +1665,7 @@ module_eval(<<'.,.,', 'parser.y', 305)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 314) module_eval(<<'.,.,', 'parser.y', 315)
def _reduce_82(val, _values, result) def _reduce_82(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1669,7 +1674,7 @@ module_eval(<<'.,.,', 'parser.y', 314)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 319) module_eval(<<'.,.,', 'parser.y', 320)
def _reduce_83(val, _values, result) def _reduce_83(val, _values, result)
token = val[3] token = val[3]
token.alias = val[6] token.alias = val[6]
@ -1679,7 +1684,7 @@ module_eval(<<'.,.,', 'parser.y', 319)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 325) module_eval(<<'.,.,', 'parser.y', 326)
def _reduce_84(val, _values, result) def _reduce_84(val, _values, result)
if @prec_seen if @prec_seen
raise "Multiple User_code after %prec" if @code_after_prec raise "Multiple User_code after %prec" if @code_after_prec
@ -1692,7 +1697,7 @@ module_eval(<<'.,.,', 'parser.y', 325)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 334) module_eval(<<'.,.,', 'parser.y', 335)
def _reduce_85(val, _values, result) def _reduce_85(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1701,7 +1706,7 @@ module_eval(<<'.,.,', 'parser.y', 334)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 339) module_eval(<<'.,.,', 'parser.y', 340)
def _reduce_86(val, _values, result) def _reduce_86(val, _values, result)
token = val[2] token = val[2]
token.alias = val[5] token.alias = val[5]
@ -1711,7 +1716,7 @@ module_eval(<<'.,.,', 'parser.y', 339)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 345) module_eval(<<'.,.,', 'parser.y', 346)
def _reduce_87(val, _values, result) def _reduce_87(val, _values, result)
sym = @grammar.find_symbol_by_id!(val[2]) sym = @grammar.find_symbol_by_id!(val[2])
result = val[0].append(sym) result = val[0].append(sym)
@ -1723,7 +1728,7 @@ module_eval(<<'.,.,', 'parser.y', 345)
# reduce 88 omitted # reduce 88 omitted
module_eval(<<'.,.,', 'parser.y', 351) module_eval(<<'.,.,', 'parser.y', 352)
def _reduce_89(val, _values, result) def _reduce_89(val, _values, result)
result = val[1].s_value result = val[1].s_value
result result
@ -1734,7 +1739,7 @@ module_eval(<<'.,.,', 'parser.y', 351)
# reduce 91 omitted # reduce 91 omitted
module_eval(<<'.,.,', 'parser.y', 358) module_eval(<<'.,.,', 'parser.y', 359)
def _reduce_92(val, _values, result) def _reduce_92(val, _values, result)
@lexer.status = :c_declaration @lexer.status = :c_declaration
@lexer.end_symbol = '\Z' @lexer.end_symbol = '\Z'
@ -1744,7 +1749,7 @@ module_eval(<<'.,.,', 'parser.y', 358)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 364) module_eval(<<'.,.,', 'parser.y', 365)
def _reduce_93(val, _values, result) def _reduce_93(val, _values, result)
@lexer.status = :initial @lexer.status = :initial
@lexer.end_symbol = nil @lexer.end_symbol = nil
@ -1764,14 +1769,14 @@ module_eval(<<'.,.,', 'parser.y', 364)
# reduce 98 omitted # reduce 98 omitted
module_eval(<<'.,.,', 'parser.y', 376) module_eval(<<'.,.,', 'parser.y', 377)
def _reduce_99(val, _values, result) def _reduce_99(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 377) module_eval(<<'.,.,', 'parser.y', 378)
def _reduce_100(val, _values, result) def _reduce_100(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
@ -1782,7 +1787,7 @@ module_eval(<<'.,.,', 'parser.y', 377)
# reduce 102 omitted # reduce 102 omitted
module_eval(<<'.,.,', 'parser.y', 382) module_eval(<<'.,.,', 'parser.y', 383)
def _reduce_103(val, _values, result) def _reduce_103(val, _values, result)
result = Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Ident, s_value: val[0]) result = Lrama::Lexer::Token.new(type: Lrama::Lexer::Token::Ident, s_value: val[0])
result result

View File

@ -1,3 +1,3 @@
module Lrama module Lrama
VERSION = "0.5.7".freeze VERSION = "0.5.8".freeze
end end