Lrama v0.6.4

This commit is contained in:
yui-knk 2024-03-22 08:17:51 +09:00 committed by Yuichiro Kaneko
parent b4d73e9f80
commit c5045830b7
17 changed files with 340 additions and 107 deletions

View File

@ -1,5 +1,69 @@
# NEWS for Lrama # NEWS for Lrama
## Lrama 0.6.4 (2024-03-22)
### Parameterizing rules (preceded, terminated, delimited)
Support `preceded`, `terminated` and `delimited` rules.
```
program: preceded(opening, X)
// Expanded to
program: preceded_opening_X
preceded_opening_X: opening X
```
```
program: terminated(X, closing)
// Expanded to
program: terminated_X_closing
terminated_X_closing: X closing
```
```
program: delimited(opening, X, closing)
// Expanded to
program: delimited_opening_X_closing
delimited_opening_X_closing: opening X closing
```
https://github.com/ruby/lrama/pull/382
### Support `%destructor` declaration
User can set codes for freeing semantic value resources by using `%destructor`.
In general, these resources are freed by actions or after parsing.
However if syntax error happens in parsing, these codes may not be executed.
Codes associated to `%destructor` are executed when semantic value is popped from the stack by an error.
```
%token <val1> NUM
%type <val2> expr2
%type <val3> expr
%destructor {
printf("destructor for val1: %d\n", $$);
} <val1> // printer for TAG
%destructor {
printf("destructor for val2: %d\n", $$);
} <val2>
%destructor {
printf("destructor for expr: %d\n", $$);
} expr // printer for symbol
```
Bison supports this feature from 1.75b.
https://github.com/ruby/lrama/pull/385
## Lrama 0.6.3 (2024-02-15) ## Lrama 0.6.3 (2024-02-15)
### Bring Your Own Stack ### Bring Your Own Stack
@ -34,6 +98,8 @@ primary: k_if expr_value then compstmt if_tail k_end
} }
``` ```
https://github.com/ruby/lrama/pull/367
## Lrama 0.6.2 (2024-01-27) ## Lrama 0.6.2 (2024-01-27)
### %no-stdlib directive ### %no-stdlib directive
@ -51,7 +117,7 @@ Allow to pass an instantiated rule to other parameterizing rules.
``` ```
%rule constant(X) : X %rule constant(X) : X
; ;
%rule option(Y) : /* empty */ %rule option(Y) : /* empty */
| Y | Y

View File

@ -3,6 +3,7 @@ require "lrama/grammar/auxiliary"
require "lrama/grammar/binding" require "lrama/grammar/binding"
require "lrama/grammar/code" require "lrama/grammar/code"
require "lrama/grammar/counter" require "lrama/grammar/counter"
require "lrama/grammar/destructor"
require "lrama/grammar/error_token" require "lrama/grammar/error_token"
require "lrama/grammar/parameterizing_rule" require "lrama/grammar/parameterizing_rule"
require "lrama/grammar/percent_code" require "lrama/grammar/percent_code"
@ -34,7 +35,7 @@ module Lrama
def_delegators "@symbols_resolver", :symbols, :nterms, :terms, :add_nterm, :add_term, def_delegators "@symbols_resolver", :symbols, :nterms, :terms, :add_nterm, :add_term,
:find_symbol_by_number!, :find_symbol_by_id!, :token_to_symbol, :find_symbol_by_number!, :find_symbol_by_id!, :token_to_symbol,
:find_symbol_by_s_value!, :fill_symbol_number, :fill_nterm_type, :find_symbol_by_s_value!, :fill_symbol_number, :fill_nterm_type,
:fill_printer, :fill_error_token, :sort_by_number! :fill_printer, :fill_destructor, :fill_error_token, :sort_by_number!
def initialize(rule_counter) def initialize(rule_counter)
@ -43,6 +44,7 @@ module Lrama
# Code defined by "%code" # Code defined by "%code"
@percent_codes = [] @percent_codes = []
@printers = [] @printers = []
@destructors = []
@error_tokens = [] @error_tokens = []
@symbols_resolver = Grammar::Symbols::Resolver.new @symbols_resolver = Grammar::Symbols::Resolver.new
@types = [] @types = []
@ -65,6 +67,10 @@ module Lrama
@percent_codes << PercentCode.new(id.s_value, code.s_value) @percent_codes << PercentCode.new(id.s_value, code.s_value)
end end
def add_destructor(ident_or_tags:, token_code:, lineno:)
@destructors << Destructor.new(ident_or_tags: ident_or_tags, token_code: token_code, lineno: lineno)
end
def add_printer(ident_or_tags:, token_code:, lineno:) def add_printer(ident_or_tags:, token_code:, lineno:)
@printers << Printer.new(ident_or_tags: ident_or_tags, token_code: token_code, lineno: lineno) @printers << Printer.new(ident_or_tags: ident_or_tags, token_code: token_code, lineno: lineno)
end end
@ -345,6 +351,7 @@ module Lrama
fill_symbol_number fill_symbol_number
fill_nterm_type(@types) fill_nterm_type(@types)
fill_printer(@printers) fill_printer(@printers)
fill_destructor(@destructors)
fill_error_token(@error_tokens) fill_error_token(@error_tokens)
sort_by_number! sort_by_number!
end end

View File

@ -1,4 +1,5 @@
require "forwardable" require "forwardable"
require "lrama/grammar/code/destructor_code"
require "lrama/grammar/code/initial_action_code" require "lrama/grammar/code/initial_action_code"
require "lrama/grammar/code/no_reference_code" require "lrama/grammar/code/no_reference_code"
require "lrama/grammar/code/printer_code" require "lrama/grammar/code/printer_code"

View File

@ -0,0 +1,40 @@
module Lrama
class Grammar
class Code
class DestructorCode < Code
def initialize(type:, token_code:, tag:)
super(type: type, token_code: token_code)
@tag = tag
end
private
# * ($$) *yyvaluep
# * (@$) *yylocationp
# * ($:$) error
# * ($1) error
# * (@1) error
# * ($:1) error
def reference_to_c(ref)
case
when ref.type == :dollar && ref.name == "$" # $$
member = @tag.member
"((*yyvaluep).#{member})"
when ref.type == :at && ref.name == "$" # @$
"(*yylocationp)"
when ref.type == :index && ref.name == "$" # $:$
raise "$:#{ref.value} can not be used in #{type}."
when ref.type == :dollar # $n
raise "$#{ref.value} can not be used in #{type}."
when ref.type == :at # @n
raise "@#{ref.value} can not be used in #{type}."
when ref.type == :index # $:n
raise "$:#{ref.value} can not be used in #{type}."
else
raise "Unexpected. #{self}, #{ref}"
end
end
end
end
end
end

View File

@ -0,0 +1,9 @@
module Lrama
class Grammar
class Destructor < Struct.new(:ident_or_tags, :token_code, :lineno, keyword_init: true)
def translated_code(tag)
Code::DestructorCode.new(type: :destructor, token_code: token_code, tag: tag).translated_code
end
end
end
end

View File

@ -115,12 +115,12 @@ module Lrama
@replaced_rhs << lhs_token @replaced_rhs << lhs_token
parameterizing_rule_resolver.created_lhs_list << lhs_token parameterizing_rule_resolver.created_lhs_list << lhs_token
parameterizing_rule.rhs_list.each do |r| parameterizing_rule.rhs_list.each do |r|
rule_builder = RuleBuilder.new(@rule_counter, @midrule_action_counter, i, lhs_tag: token.lhs_tag, skip_preprocess_references: true) rule_builder = RuleBuilder.new(@rule_counter, @midrule_action_counter, lhs_tag: token.lhs_tag, skip_preprocess_references: true)
rule_builder.lhs = lhs_token rule_builder.lhs = lhs_token
r.symbols.each { |sym| rule_builder.add_rhs(bindings.resolve_symbol(sym)) } r.symbols.each { |sym| rule_builder.add_rhs(bindings.resolve_symbol(sym)) }
rule_builder.line = line rule_builder.line = line
rule_builder.user_code = r.user_code
rule_builder.precedence_sym = r.precedence_sym rule_builder.precedence_sym = r.precedence_sym
rule_builder.user_code = r.user_code
rule_builder.complete_input rule_builder.complete_input
rule_builder.setup_rules(parameterizing_rule_resolver) rule_builder.setup_rules(parameterizing_rule_resolver)
@rule_builders_for_parameterizing_rules << rule_builder @rule_builders_for_parameterizing_rules << rule_builder

View File

@ -8,6 +8,9 @@
**********************************************************************/ **********************************************************************/
// -------------------------------------------------------------------
// Options
/* /*
* program: option(number) * program: option(number)
* *
@ -21,6 +24,45 @@
| X | X
; ;
// -------------------------------------------------------------------
// Sequences
/*
* program: preceded(opening, X)
*
* =>
*
* program: preceded_opening_X
* preceded_opening_X: opening X
*/
%rule preceded(opening, X): opening X { $$ = $2; }
;
/*
* program: terminated(X, closing)
*
* =>
*
* program: terminated_X_closing
* terminated_X_closing: X closing
*/
%rule terminated(X, closing): X closing { $$ = $1; }
;
/*
* program: delimited(opening, X, closing)
*
* =>
*
* program: delimited_opening_X_closing
* delimited_opening_X_closing: opening X closing
*/
%rule delimited(opening, X, closing): opening X closing { $$ = $2; }
;
// -------------------------------------------------------------------
// Lists
/* /*
* program: list(number) * program: list(number)
* *

View File

@ -7,11 +7,12 @@
module Lrama module Lrama
class Grammar class Grammar
class Symbol class Symbol
attr_accessor :id, :alias_name, :tag, :number, :token_id, :nullable, :precedence, :printer, :error_token, :first_set, :first_set_bitmap attr_accessor :id, :alias_name, :tag, :number, :token_id, :nullable, :precedence,
:printer, :destructor, :error_token, :first_set, :first_set_bitmap
attr_reader :term attr_reader :term
attr_writer :eof_symbol, :error_symbol, :undef_symbol, :accept_symbol attr_writer :eof_symbol, :error_symbol, :undef_symbol, :accept_symbol
def initialize(id:, term:, alias_name: nil, number: nil, tag: nil, token_id: nil, nullable: nil, precedence: nil, printer: nil) def initialize(id:, term:, alias_name: nil, number: nil, tag: nil, token_id: nil, nullable: nil, precedence: nil, printer: nil, destructor: nil)
@id = id @id = id
@alias_name = alias_name @alias_name = alias_name
@number = number @number = number
@ -21,6 +22,7 @@ module Lrama
@nullable = nullable @nullable = nullable
@precedence = precedence @precedence = precedence
@printer = printer @printer = printer
@destructor = destructor
end end
def term? def term?

View File

@ -58,7 +58,7 @@ module Lrama
end end
def find_symbol_by_s_value!(s_value) def find_symbol_by_s_value!(s_value)
find_symbol_by_s_value(s_value) || (raise "Symbol not found: #{s_value}") find_symbol_by_s_value(s_value) || (raise "Symbol not found. value: `#{s_value}`")
end end
def find_symbol_by_id(id) def find_symbol_by_id(id)
@ -68,7 +68,7 @@ module Lrama
end end
def find_symbol_by_id!(id) def find_symbol_by_id!(id)
find_symbol_by_id(id) || (raise "Symbol not found: #{id}") find_symbol_by_id(id) || (raise "Symbol not found. #{id}")
end end
def find_symbol_by_token_id(token_id) def find_symbol_by_token_id(token_id)
@ -78,7 +78,7 @@ module Lrama
def find_symbol_by_number!(number) def find_symbol_by_number!(number)
sym = symbols[number] sym = symbols[number]
raise "Symbol not found: #{number}" unless sym raise "Symbol not found. number: `#{number}`" unless sym
raise "[BUG] Symbol number mismatch. #{number}, #{sym}" if sym.number != number raise "[BUG] Symbol number mismatch. #{number}, #{sym}" if sym.number != number
sym sym
@ -118,6 +118,23 @@ module Lrama
end end
end end
def fill_destructor(destructors)
symbols.each do |sym|
destructors.each do |destructor|
destructor.ident_or_tags.each do |ident_or_tag|
case ident_or_tag
when Lrama::Lexer::Token::Ident
sym.destructor = destructor if sym.id == ident_or_tag
when Lrama::Lexer::Token::Tag
sym.destructor = destructor if sym.tag == ident_or_tag
else
raise "Unknown token type. #{destructor}"
end
end
end
end
end
def fill_error_token(error_tokens) def fill_error_token(error_tokens)
symbols.each do |sym| symbols.each do |sym|
error_tokens.each do |token| error_tokens.each do |token|
@ -154,7 +171,7 @@ module Lrama
def find_nterm_by_id!(id) def find_nterm_by_id!(id)
@nterms.find do |s| @nterms.find do |s|
s.id == id s.id == id
end || (raise "Symbol not found: #{id}") end || (raise "Symbol not found. #{id}")
end end
def fill_terms_number def fill_terms_number

View File

@ -21,6 +21,7 @@ module Lrama
%define %define
%require %require
%printer %printer
%destructor
%lex-param %lex-param
%parse-param %parse-param
%initial-action %initial-action

View File

@ -18,7 +18,7 @@ module Lrama
end end
def to_s def to_s
"#{super} location: #{location}" "value: `#{s_value}`, location: #{location}"
end end
def referred_by?(string) def referred_by?(string)

View File

@ -64,9 +64,18 @@ module Lrama
o.on('-H', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v } o.on('-H', '--header=[FILE]', 'also produce a header file named FILE') {|v| @options.header = true; @options.header_file = v }
o.on('-d', 'also produce a header file') { @options.header = true } o.on('-d', 'also produce a header file') { @options.header = true }
o.on('-r', '--report=THINGS', Array, 'also produce details on the automaton') {|v| @report = v } o.on('-r', '--report=THINGS', Array, 'also produce details on the automaton') {|v| @report = v }
o.on_tail ''
o.on_tail 'Valid Reports:'
o.on_tail " #{VALID_REPORTS.join(' ')}"
o.on('--report-file=FILE', 'also produce details on the automaton output to a file named FILE') {|v| @options.report_file = v } o.on('--report-file=FILE', 'also produce details on the automaton output to a file named FILE') {|v| @options.report_file = v }
o.on('-o', '--output=FILE', 'leave output to FILE') {|v| @options.outfile = v } o.on('-o', '--output=FILE', 'leave output to FILE') {|v| @options.outfile = v }
o.on('--trace=THINGS', Array, 'also output trace logs at runtime') {|v| @trace = v } o.on('--trace=THINGS', Array, 'also output trace logs at runtime') {|v| @trace = v }
o.on_tail ''
o.on_tail 'Valid Traces:'
o.on_tail " #{VALID_TRACES.join(' ')}"
o.on('-v', 'reserved, do nothing') { } o.on('-v', 'reserved, do nothing') { }
o.separator '' o.separator ''
o.separator 'Error Recovery:' o.separator 'Error Recovery:'
@ -75,20 +84,22 @@ module Lrama
o.separator 'Other options:' o.separator 'Other options:'
o.on('-V', '--version', "output version information and exit") {|v| puts "lrama #{Lrama::VERSION}"; exit 0 } o.on('-V', '--version', "output version information and exit") {|v| puts "lrama #{Lrama::VERSION}"; exit 0 }
o.on('-h', '--help', "display this help and exit") {|v| puts o; exit 0 } o.on('-h', '--help', "display this help and exit") {|v| puts o; exit 0 }
o.separator '' o.on_tail
o.parse!(argv) o.parse!(argv)
end end
end end
BISON_REPORTS = %w[states itemsets lookaheads solved counterexamples cex all none]
OTHER_REPORTS = %w[verbose]
NOT_SUPPORTED_REPORTS = %w[cex none]
VALID_REPORTS = BISON_REPORTS + OTHER_REPORTS - NOT_SUPPORTED_REPORTS
def validate_report(report) def validate_report(report)
bison_list = %w[states itemsets lookaheads solved counterexamples cex all none] list = VALID_REPORTS
others = %w[verbose]
list = bison_list + others
not_supported = %w[cex none]
h = { grammar: true } h = { grammar: true }
report.each do |r| report.each do |r|
if list.include?(r) && !not_supported.include?(r) if list.include?(r)
h[r.to_sym] = true h[r.to_sym] = true
else else
raise "Invalid report option \"#{r}\"." raise "Invalid report option \"#{r}\"."
@ -96,7 +107,7 @@ module Lrama
end end
if h[:all] if h[:all]
(bison_list - not_supported).each do |r| (BISON_REPORTS - NOT_SUPPORTED_REPORTS).each do |r|
h[r.to_sym] = true h[r.to_sym] = true
end end
@ -106,12 +117,14 @@ module Lrama
return h return h
end end
VALID_TRACES = %w[
none locations scan parse automaton bitsets
closure grammar rules resource sets muscles tools
m4-early m4 skeleton time ielr cex all
]
def validate_trace(trace) def validate_trace(trace)
list = %w[ list = VALID_TRACES
none locations scan parse automaton bitsets
closure grammar rules resource sets muscles tools
m4-early m4 skeleton time ielr cex all
]
h = {} h = {}
trace.each do |t| trace.each do |t|

View File

@ -18,6 +18,7 @@ module Lrama
@trace_opts = nil @trace_opts = nil
@report_opts = nil @report_opts = nil
@y = STDIN @y = STDIN
@debug = false
end end
end end
end end

View File

@ -150,6 +150,25 @@ module Lrama
str str
end end
def symbol_actions_for_destructor
str = ""
@grammar.symbols.each do |sym|
next unless sym.destructor
str << <<-STR
case #{sym.enum_name}: /* #{sym.comment} */
#line #{sym.destructor.lineno} "#{@grammar_file_path}"
{#{sym.destructor.translated_code(sym.tag)}}
#line [@oline@] [@ofile@]
break;
STR
end
str
end
# b4_user_initial_action # b4_user_initial_action
def user_initial_action(comment = "") def user_initial_action(comment = "")
return "" unless @grammar.initial_action return "" unless @grammar.initial_action

View File

@ -658,7 +658,7 @@ end
module Lrama module Lrama
class Parser < Racc::Parser class Parser < Racc::Parser
module_eval(<<'...end parser.y/module_eval...', 'parser.y', 521) module_eval(<<'...end parser.y/module_eval...', 'parser.y', 528)
include Lrama::Report::Duration include Lrama::Report::Duration
@ -933,7 +933,7 @@ racc_reduce_table = [
1, 63, :_reduce_none, 1, 63, :_reduce_none,
0, 76, :_reduce_29, 0, 76, :_reduce_29,
0, 77, :_reduce_30, 0, 77, :_reduce_30,
7, 63, :_reduce_none, 7, 63, :_reduce_31,
0, 78, :_reduce_32, 0, 78, :_reduce_32,
0, 79, :_reduce_33, 0, 79, :_reduce_33,
7, 63, :_reduce_34, 7, 63, :_reduce_34,
@ -1424,9 +1424,19 @@ module_eval(<<'.,.,', 'parser.y', 96)
end end
.,., .,.,
# reduce 31 omitted module_eval(<<'.,.,', 'parser.y', 100)
def _reduce_31(val, _values, result)
@grammar.add_destructor(
ident_or_tags: val[6],
token_code: val[3],
lineno: val[3].line
)
module_eval(<<'.,.,', 'parser.y', 101) result
end
.,.,
module_eval(<<'.,.,', 'parser.y', 108)
def _reduce_32(val, _values, result) def _reduce_32(val, _values, result)
begin_c_declaration("}") begin_c_declaration("}")
@ -1434,7 +1444,7 @@ module_eval(<<'.,.,', 'parser.y', 101)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 105) module_eval(<<'.,.,', 'parser.y', 112)
def _reduce_33(val, _values, result) def _reduce_33(val, _values, result)
end_c_declaration end_c_declaration
@ -1442,7 +1452,7 @@ module_eval(<<'.,.,', 'parser.y', 105)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 109) module_eval(<<'.,.,', 'parser.y', 116)
def _reduce_34(val, _values, result) def _reduce_34(val, _values, result)
@grammar.add_printer( @grammar.add_printer(
ident_or_tags: val[6], ident_or_tags: val[6],
@ -1454,7 +1464,7 @@ module_eval(<<'.,.,', 'parser.y', 109)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 117) module_eval(<<'.,.,', 'parser.y', 124)
def _reduce_35(val, _values, result) def _reduce_35(val, _values, result)
begin_c_declaration("}") begin_c_declaration("}")
@ -1462,7 +1472,7 @@ module_eval(<<'.,.,', 'parser.y', 117)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 121) module_eval(<<'.,.,', 'parser.y', 128)
def _reduce_36(val, _values, result) def _reduce_36(val, _values, result)
end_c_declaration end_c_declaration
@ -1470,7 +1480,7 @@ module_eval(<<'.,.,', 'parser.y', 121)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 125) module_eval(<<'.,.,', 'parser.y', 132)
def _reduce_37(val, _values, result) def _reduce_37(val, _values, result)
@grammar.add_error_token( @grammar.add_error_token(
ident_or_tags: val[6], ident_or_tags: val[6],
@ -1482,7 +1492,7 @@ module_eval(<<'.,.,', 'parser.y', 125)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 133) module_eval(<<'.,.,', 'parser.y', 140)
def _reduce_38(val, _values, result) def _reduce_38(val, _values, result)
@grammar.after_shift = val[1] @grammar.after_shift = val[1]
@ -1490,7 +1500,7 @@ module_eval(<<'.,.,', 'parser.y', 133)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 137) module_eval(<<'.,.,', 'parser.y', 144)
def _reduce_39(val, _values, result) def _reduce_39(val, _values, result)
@grammar.before_reduce = val[1] @grammar.before_reduce = val[1]
@ -1498,7 +1508,7 @@ module_eval(<<'.,.,', 'parser.y', 137)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 141) module_eval(<<'.,.,', 'parser.y', 148)
def _reduce_40(val, _values, result) def _reduce_40(val, _values, result)
@grammar.after_reduce = val[1] @grammar.after_reduce = val[1]
@ -1506,7 +1516,7 @@ module_eval(<<'.,.,', 'parser.y', 141)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 145) module_eval(<<'.,.,', 'parser.y', 152)
def _reduce_41(val, _values, result) def _reduce_41(val, _values, result)
@grammar.after_shift_error_token = val[1] @grammar.after_shift_error_token = val[1]
@ -1514,7 +1524,7 @@ module_eval(<<'.,.,', 'parser.y', 145)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 149) module_eval(<<'.,.,', 'parser.y', 156)
def _reduce_42(val, _values, result) def _reduce_42(val, _values, result)
@grammar.after_pop_stack = val[1] @grammar.after_pop_stack = val[1]
@ -1524,7 +1534,7 @@ module_eval(<<'.,.,', 'parser.y', 149)
# reduce 43 omitted # reduce 43 omitted
module_eval(<<'.,.,', 'parser.y', 155) module_eval(<<'.,.,', 'parser.y', 162)
def _reduce_44(val, _values, result) def _reduce_44(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1536,7 +1546,7 @@ module_eval(<<'.,.,', 'parser.y', 155)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 163) module_eval(<<'.,.,', 'parser.y', 170)
def _reduce_45(val, _values, result) def _reduce_45(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1550,7 +1560,7 @@ module_eval(<<'.,.,', 'parser.y', 163)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 173) module_eval(<<'.,.,', 'parser.y', 180)
def _reduce_46(val, _values, result) def _reduce_46(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1564,7 +1574,7 @@ module_eval(<<'.,.,', 'parser.y', 173)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 183) module_eval(<<'.,.,', 'parser.y', 190)
def _reduce_47(val, _values, result) def _reduce_47(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1578,7 +1588,7 @@ module_eval(<<'.,.,', 'parser.y', 183)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 193) module_eval(<<'.,.,', 'parser.y', 200)
def _reduce_48(val, _values, result) def _reduce_48(val, _values, result)
val[1].each {|hash| val[1].each {|hash|
hash[:tokens].each {|id| hash[:tokens].each {|id|
@ -1592,7 +1602,7 @@ module_eval(<<'.,.,', 'parser.y', 193)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 204) module_eval(<<'.,.,', 'parser.y', 211)
def _reduce_49(val, _values, result) def _reduce_49(val, _values, result)
val[0].each {|token_declaration| val[0].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: nil, replace: true) @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: nil, replace: true)
@ -1602,7 +1612,7 @@ module_eval(<<'.,.,', 'parser.y', 204)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 210) module_eval(<<'.,.,', 'parser.y', 217)
def _reduce_50(val, _values, result) def _reduce_50(val, _values, result)
val[1].each {|token_declaration| val[1].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[0], replace: true) @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[0], replace: true)
@ -1612,7 +1622,7 @@ module_eval(<<'.,.,', 'parser.y', 210)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 216) module_eval(<<'.,.,', 'parser.y', 223)
def _reduce_51(val, _values, result) def _reduce_51(val, _values, result)
val[2].each {|token_declaration| val[2].each {|token_declaration|
@grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[1], replace: true) @grammar.add_term(id: token_declaration[0], alias_name: token_declaration[2], token_id: token_declaration[1], tag: val[1], replace: true)
@ -1622,28 +1632,28 @@ module_eval(<<'.,.,', 'parser.y', 216)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 221) module_eval(<<'.,.,', 'parser.y', 228)
def _reduce_52(val, _values, result) def _reduce_52(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 222) module_eval(<<'.,.,', 'parser.y', 229)
def _reduce_53(val, _values, result) def _reduce_53(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 224) module_eval(<<'.,.,', 'parser.y', 231)
def _reduce_54(val, _values, result) def _reduce_54(val, _values, result)
result = val result = val
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 228) module_eval(<<'.,.,', 'parser.y', 235)
def _reduce_55(val, _values, result) def _reduce_55(val, _values, result)
rule = Grammar::ParameterizingRule::Rule.new(val[1].s_value, val[3], val[6]) rule = Grammar::ParameterizingRule::Rule.new(val[1].s_value, val[3], val[6])
@grammar.add_parameterizing_rule(rule) @grammar.add_parameterizing_rule(rule)
@ -1652,21 +1662,21 @@ module_eval(<<'.,.,', 'parser.y', 228)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 232) module_eval(<<'.,.,', 'parser.y', 239)
def _reduce_56(val, _values, result) def _reduce_56(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 233) module_eval(<<'.,.,', 'parser.y', 240)
def _reduce_57(val, _values, result) def _reduce_57(val, _values, result)
result = val[0].append(val[2]) result = val[0].append(val[2])
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 237) module_eval(<<'.,.,', 'parser.y', 244)
def _reduce_58(val, _values, result) def _reduce_58(val, _values, result)
builder = val[0] builder = val[0]
result = [builder] result = [builder]
@ -1675,7 +1685,7 @@ module_eval(<<'.,.,', 'parser.y', 237)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 242) module_eval(<<'.,.,', 'parser.y', 249)
def _reduce_59(val, _values, result) def _reduce_59(val, _values, result)
builder = val[2] builder = val[2]
result = val[0].append(builder) result = val[0].append(builder)
@ -1684,7 +1694,7 @@ module_eval(<<'.,.,', 'parser.y', 242)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 248) module_eval(<<'.,.,', 'parser.y', 255)
def _reduce_60(val, _values, result) def _reduce_60(val, _values, result)
reset_precs reset_precs
result = Grammar::ParameterizingRule::Rhs.new result = Grammar::ParameterizingRule::Rhs.new
@ -1693,7 +1703,7 @@ module_eval(<<'.,.,', 'parser.y', 248)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 253) module_eval(<<'.,.,', 'parser.y', 260)
def _reduce_61(val, _values, result) def _reduce_61(val, _values, result)
reset_precs reset_precs
result = Grammar::ParameterizingRule::Rhs.new result = Grammar::ParameterizingRule::Rhs.new
@ -1702,7 +1712,7 @@ module_eval(<<'.,.,', 'parser.y', 253)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 258) module_eval(<<'.,.,', 'parser.y', 265)
def _reduce_62(val, _values, result) def _reduce_62(val, _values, result)
token = val[1] token = val[1]
token.alias_name = val[2] token.alias_name = val[2]
@ -1714,7 +1724,7 @@ module_eval(<<'.,.,', 'parser.y', 258)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 266) module_eval(<<'.,.,', 'parser.y', 273)
def _reduce_63(val, _values, result) def _reduce_63(val, _values, result)
builder = val[0] builder = val[0]
builder.symbols << Lrama::Lexer::Token::InstantiateRule.new(s_value: val[2], location: @lexer.location, args: [val[1]]) builder.symbols << Lrama::Lexer::Token::InstantiateRule.new(s_value: val[2], location: @lexer.location, args: [val[1]])
@ -1724,7 +1734,7 @@ module_eval(<<'.,.,', 'parser.y', 266)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 272) module_eval(<<'.,.,', 'parser.y', 279)
def _reduce_64(val, _values, result) def _reduce_64(val, _values, result)
builder = val[0] builder = val[0]
builder.symbols << Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[3]) builder.symbols << Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[3])
@ -1734,7 +1744,7 @@ module_eval(<<'.,.,', 'parser.y', 272)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 278) module_eval(<<'.,.,', 'parser.y', 285)
def _reduce_65(val, _values, result) def _reduce_65(val, _values, result)
if @prec_seen if @prec_seen
on_action_error("multiple User_code after %prec", val[0]) if @code_after_prec on_action_error("multiple User_code after %prec", val[0]) if @code_after_prec
@ -1746,7 +1756,7 @@ module_eval(<<'.,.,', 'parser.y', 278)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 286) module_eval(<<'.,.,', 'parser.y', 293)
def _reduce_66(val, _values, result) def _reduce_66(val, _values, result)
end_c_declaration end_c_declaration
@ -1754,7 +1764,7 @@ module_eval(<<'.,.,', 'parser.y', 286)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 290) module_eval(<<'.,.,', 'parser.y', 297)
def _reduce_67(val, _values, result) def _reduce_67(val, _values, result)
user_code = val[3] user_code = val[3]
user_code.alias_name = val[6] user_code.alias_name = val[6]
@ -1766,7 +1776,7 @@ module_eval(<<'.,.,', 'parser.y', 290)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 298) module_eval(<<'.,.,', 'parser.y', 305)
def _reduce_68(val, _values, result) def _reduce_68(val, _values, result)
sym = @grammar.find_symbol_by_id!(val[2]) sym = @grammar.find_symbol_by_id!(val[2])
@prec_seen = true @prec_seen = true
@ -1786,7 +1796,7 @@ module_eval(<<'.,.,', 'parser.y', 298)
# reduce 72 omitted # reduce 72 omitted
module_eval(<<'.,.,', 'parser.y', 313) module_eval(<<'.,.,', 'parser.y', 320)
def _reduce_73(val, _values, result) def _reduce_73(val, _values, result)
result = [{tag: nil, tokens: val[0]}] result = [{tag: nil, tokens: val[0]}]
@ -1794,7 +1804,7 @@ module_eval(<<'.,.,', 'parser.y', 313)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 317) module_eval(<<'.,.,', 'parser.y', 324)
def _reduce_74(val, _values, result) def _reduce_74(val, _values, result)
result = [{tag: val[0], tokens: val[1]}] result = [{tag: val[0], tokens: val[1]}]
@ -1802,7 +1812,7 @@ module_eval(<<'.,.,', 'parser.y', 317)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 321) module_eval(<<'.,.,', 'parser.y', 328)
def _reduce_75(val, _values, result) def _reduce_75(val, _values, result)
result = val[0].append({tag: val[1], tokens: val[2]}) result = val[0].append({tag: val[1], tokens: val[2]})
@ -1810,14 +1820,14 @@ module_eval(<<'.,.,', 'parser.y', 321)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 324) module_eval(<<'.,.,', 'parser.y', 331)
def _reduce_76(val, _values, result) def _reduce_76(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 325) module_eval(<<'.,.,', 'parser.y', 332)
def _reduce_77(val, _values, result) def _reduce_77(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
@ -1828,7 +1838,7 @@ module_eval(<<'.,.,', 'parser.y', 325)
# reduce 79 omitted # reduce 79 omitted
module_eval(<<'.,.,', 'parser.y', 332) module_eval(<<'.,.,', 'parser.y', 339)
def _reduce_80(val, _values, result) def _reduce_80(val, _values, result)
begin_c_declaration("}") begin_c_declaration("}")
@ -1836,7 +1846,7 @@ module_eval(<<'.,.,', 'parser.y', 332)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 336) module_eval(<<'.,.,', 'parser.y', 343)
def _reduce_81(val, _values, result) def _reduce_81(val, _values, result)
end_c_declaration end_c_declaration
@ -1844,7 +1854,7 @@ module_eval(<<'.,.,', 'parser.y', 336)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 340) module_eval(<<'.,.,', 'parser.y', 347)
def _reduce_82(val, _values, result) def _reduce_82(val, _values, result)
result = val[0].append(val[3]) result = val[0].append(val[3])
@ -1852,7 +1862,7 @@ module_eval(<<'.,.,', 'parser.y', 340)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 344) module_eval(<<'.,.,', 'parser.y', 351)
def _reduce_83(val, _values, result) def _reduce_83(val, _values, result)
begin_c_declaration("}") begin_c_declaration("}")
@ -1860,7 +1870,7 @@ module_eval(<<'.,.,', 'parser.y', 344)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 348) module_eval(<<'.,.,', 'parser.y', 355)
def _reduce_84(val, _values, result) def _reduce_84(val, _values, result)
end_c_declaration end_c_declaration
@ -1868,7 +1878,7 @@ module_eval(<<'.,.,', 'parser.y', 348)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 352) module_eval(<<'.,.,', 'parser.y', 359)
def _reduce_85(val, _values, result) def _reduce_85(val, _values, result)
result = [val[2]] result = [val[2]]
@ -1876,7 +1886,7 @@ module_eval(<<'.,.,', 'parser.y', 352)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 357) module_eval(<<'.,.,', 'parser.y', 364)
def _reduce_86(val, _values, result) def _reduce_86(val, _values, result)
result = [{tag: nil, tokens: val[0]}] result = [{tag: nil, tokens: val[0]}]
@ -1884,7 +1894,7 @@ module_eval(<<'.,.,', 'parser.y', 357)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 361) module_eval(<<'.,.,', 'parser.y', 368)
def _reduce_87(val, _values, result) def _reduce_87(val, _values, result)
result = [{tag: val[0], tokens: val[1]}] result = [{tag: val[0], tokens: val[1]}]
@ -1892,7 +1902,7 @@ module_eval(<<'.,.,', 'parser.y', 361)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 365) module_eval(<<'.,.,', 'parser.y', 372)
def _reduce_88(val, _values, result) def _reduce_88(val, _values, result)
result = val[0].append({tag: val[1], tokens: val[2]}) result = val[0].append({tag: val[1], tokens: val[2]})
@ -1900,14 +1910,14 @@ module_eval(<<'.,.,', 'parser.y', 365)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 368) module_eval(<<'.,.,', 'parser.y', 375)
def _reduce_89(val, _values, result) def _reduce_89(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 369) module_eval(<<'.,.,', 'parser.y', 376)
def _reduce_90(val, _values, result) def _reduce_90(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
@ -1916,14 +1926,14 @@ module_eval(<<'.,.,', 'parser.y', 369)
# reduce 91 omitted # reduce 91 omitted
module_eval(<<'.,.,', 'parser.y', 373) module_eval(<<'.,.,', 'parser.y', 380)
def _reduce_92(val, _values, result) def _reduce_92(val, _values, result)
on_action_error("ident after %prec", val[0]) if @prec_seen on_action_error("ident after %prec", val[0]) if @prec_seen
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 374) module_eval(<<'.,.,', 'parser.y', 381)
def _reduce_93(val, _values, result) def _reduce_93(val, _values, result)
on_action_error("char after %prec", val[0]) if @prec_seen on_action_error("char after %prec", val[0]) if @prec_seen
result result
@ -1938,7 +1948,7 @@ module_eval(<<'.,.,', 'parser.y', 374)
# reduce 97 omitted # reduce 97 omitted
module_eval(<<'.,.,', 'parser.y', 384) module_eval(<<'.,.,', 'parser.y', 391)
def _reduce_98(val, _values, result) def _reduce_98(val, _values, result)
lhs = val[0] lhs = val[0]
lhs.alias_name = val[1] lhs.alias_name = val[1]
@ -1952,7 +1962,7 @@ module_eval(<<'.,.,', 'parser.y', 384)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 395) module_eval(<<'.,.,', 'parser.y', 402)
def _reduce_99(val, _values, result) def _reduce_99(val, _values, result)
builder = val[0] builder = val[0]
if !builder.line if !builder.line
@ -1964,7 +1974,7 @@ module_eval(<<'.,.,', 'parser.y', 395)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 403) module_eval(<<'.,.,', 'parser.y', 410)
def _reduce_100(val, _values, result) def _reduce_100(val, _values, result)
builder = val[2] builder = val[2]
if !builder.line if !builder.line
@ -1978,7 +1988,7 @@ module_eval(<<'.,.,', 'parser.y', 403)
# reduce 101 omitted # reduce 101 omitted
module_eval(<<'.,.,', 'parser.y', 413) module_eval(<<'.,.,', 'parser.y', 420)
def _reduce_102(val, _values, result) def _reduce_102(val, _values, result)
reset_precs reset_precs
result = Grammar::RuleBuilder.new(@rule_counter, @midrule_action_counter) result = Grammar::RuleBuilder.new(@rule_counter, @midrule_action_counter)
@ -1987,7 +1997,7 @@ module_eval(<<'.,.,', 'parser.y', 413)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 418) module_eval(<<'.,.,', 'parser.y', 425)
def _reduce_103(val, _values, result) def _reduce_103(val, _values, result)
reset_precs reset_precs
result = Grammar::RuleBuilder.new(@rule_counter, @midrule_action_counter) result = Grammar::RuleBuilder.new(@rule_counter, @midrule_action_counter)
@ -1996,7 +2006,7 @@ module_eval(<<'.,.,', 'parser.y', 418)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 423) module_eval(<<'.,.,', 'parser.y', 430)
def _reduce_104(val, _values, result) def _reduce_104(val, _values, result)
token = val[1] token = val[1]
token.alias_name = val[2] token.alias_name = val[2]
@ -2008,7 +2018,7 @@ module_eval(<<'.,.,', 'parser.y', 423)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 431) module_eval(<<'.,.,', 'parser.y', 438)
def _reduce_105(val, _values, result) def _reduce_105(val, _values, result)
token = Lrama::Lexer::Token::InstantiateRule.new(s_value: val[2], location: @lexer.location, args: [val[1]], lhs_tag: val[3]) token = Lrama::Lexer::Token::InstantiateRule.new(s_value: val[2], location: @lexer.location, args: [val[1]], lhs_tag: val[3])
builder = val[0] builder = val[0]
@ -2020,7 +2030,7 @@ module_eval(<<'.,.,', 'parser.y', 431)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 439) module_eval(<<'.,.,', 'parser.y', 446)
def _reduce_106(val, _values, result) def _reduce_106(val, _values, result)
token = Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[3], lhs_tag: val[5]) token = Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[3], lhs_tag: val[5])
builder = val[0] builder = val[0]
@ -2032,7 +2042,7 @@ module_eval(<<'.,.,', 'parser.y', 439)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 447) module_eval(<<'.,.,', 'parser.y', 454)
def _reduce_107(val, _values, result) def _reduce_107(val, _values, result)
if @prec_seen if @prec_seen
on_action_error("multiple User_code after %prec", val[0]) if @code_after_prec on_action_error("multiple User_code after %prec", val[0]) if @code_after_prec
@ -2044,7 +2054,7 @@ module_eval(<<'.,.,', 'parser.y', 447)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 455) module_eval(<<'.,.,', 'parser.y', 462)
def _reduce_108(val, _values, result) def _reduce_108(val, _values, result)
end_c_declaration end_c_declaration
@ -2052,7 +2062,7 @@ module_eval(<<'.,.,', 'parser.y', 455)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 459) module_eval(<<'.,.,', 'parser.y', 466)
def _reduce_109(val, _values, result) def _reduce_109(val, _values, result)
user_code = val[3] user_code = val[3]
user_code.alias_name = val[6] user_code.alias_name = val[6]
@ -2064,7 +2074,7 @@ module_eval(<<'.,.,', 'parser.y', 459)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 467) module_eval(<<'.,.,', 'parser.y', 474)
def _reduce_110(val, _values, result) def _reduce_110(val, _values, result)
sym = @grammar.find_symbol_by_id!(val[2]) sym = @grammar.find_symbol_by_id!(val[2])
@prec_seen = true @prec_seen = true
@ -2076,49 +2086,49 @@ module_eval(<<'.,.,', 'parser.y', 467)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 474) module_eval(<<'.,.,', 'parser.y', 481)
def _reduce_111(val, _values, result) def _reduce_111(val, _values, result)
result = "option" result = "option"
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 475) module_eval(<<'.,.,', 'parser.y', 482)
def _reduce_112(val, _values, result) def _reduce_112(val, _values, result)
result = "nonempty_list" result = "nonempty_list"
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 476) module_eval(<<'.,.,', 'parser.y', 483)
def _reduce_113(val, _values, result) def _reduce_113(val, _values, result)
result = "list" result = "list"
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 478) module_eval(<<'.,.,', 'parser.y', 485)
def _reduce_114(val, _values, result) def _reduce_114(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 479) module_eval(<<'.,.,', 'parser.y', 486)
def _reduce_115(val, _values, result) def _reduce_115(val, _values, result)
result = val[0].append(val[2]) result = val[0].append(val[2])
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 480) module_eval(<<'.,.,', 'parser.y', 487)
def _reduce_116(val, _values, result) def _reduce_116(val, _values, result)
result = [Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[0])] result = [Lrama::Lexer::Token::InstantiateRule.new(s_value: val[1].s_value, location: @lexer.location, args: val[0])]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 481) module_eval(<<'.,.,', 'parser.y', 488)
def _reduce_117(val, _values, result) def _reduce_117(val, _values, result)
result = [Lrama::Lexer::Token::InstantiateRule.new(s_value: val[0].s_value, location: @lexer.location, args: val[2])] result = [Lrama::Lexer::Token::InstantiateRule.new(s_value: val[0].s_value, location: @lexer.location, args: val[2])]
result result
@ -2127,7 +2137,7 @@ module_eval(<<'.,.,', 'parser.y', 481)
# reduce 118 omitted # reduce 118 omitted
module_eval(<<'.,.,', 'parser.y', 484) module_eval(<<'.,.,', 'parser.y', 491)
def _reduce_119(val, _values, result) def _reduce_119(val, _values, result)
result = val[1].s_value result = val[1].s_value
result result
@ -2138,7 +2148,7 @@ module_eval(<<'.,.,', 'parser.y', 484)
# reduce 121 omitted # reduce 121 omitted
module_eval(<<'.,.,', 'parser.y', 491) module_eval(<<'.,.,', 'parser.y', 498)
def _reduce_122(val, _values, result) def _reduce_122(val, _values, result)
begin_c_declaration('\Z') begin_c_declaration('\Z')
@grammar.epilogue_first_lineno = @lexer.line + 1 @grammar.epilogue_first_lineno = @lexer.line + 1
@ -2147,7 +2157,7 @@ module_eval(<<'.,.,', 'parser.y', 491)
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 496) module_eval(<<'.,.,', 'parser.y', 503)
def _reduce_123(val, _values, result) def _reduce_123(val, _values, result)
end_c_declaration end_c_declaration
@grammar.epilogue = val[2].s_value @grammar.epilogue = val[2].s_value
@ -2166,14 +2176,14 @@ module_eval(<<'.,.,', 'parser.y', 496)
# reduce 128 omitted # reduce 128 omitted
module_eval(<<'.,.,', 'parser.y', 507) module_eval(<<'.,.,', 'parser.y', 514)
def _reduce_129(val, _values, result) def _reduce_129(val, _values, result)
result = [val[0]] result = [val[0]]
result result
end end
.,., .,.,
module_eval(<<'.,.,', 'parser.y', 508) module_eval(<<'.,.,', 'parser.y', 515)
def _reduce_130(val, _values, result) def _reduce_130(val, _values, result)
result = val[0].append(val[1]) result = val[0].append(val[1])
result result
@ -2184,7 +2194,7 @@ module_eval(<<'.,.,', 'parser.y', 508)
# reduce 132 omitted # reduce 132 omitted
module_eval(<<'.,.,', 'parser.y', 513) module_eval(<<'.,.,', 'parser.y', 520)
def _reduce_133(val, _values, result) def _reduce_133(val, _values, result)
result = Lrama::Lexer::Token::Ident.new(s_value: val[0]) result = Lrama::Lexer::Token::Ident.new(s_value: val[0])
result result

View File

@ -1,3 +1,3 @@
module Lrama module Lrama
VERSION = "0.6.3".freeze VERSION = "0.6.4".freeze
end end

View File

@ -1145,7 +1145,12 @@ yydestruct (const char *yymsg,
YY_SYMBOL_PRINT (yymsg, yykind, yyvaluep, yylocationp<%= output.user_args %>); YY_SYMBOL_PRINT (yymsg, yykind, yyvaluep, yylocationp<%= output.user_args %>);
YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
YY_USE (yykind); switch (yykind)
{
<%= output.symbol_actions_for_destructor -%>
default:
break;
}
YY_IGNORE_MAYBE_UNINITIALIZED_END YY_IGNORE_MAYBE_UNINITIALIZED_END
} }