[ruby/yarp] Move lex compat into its own file
https://github.com/ruby/yarp/commit/e90f88f21f
This commit is contained in:
parent
3e8aa3d1cc
commit
7d11f58b6e
25
lib/yarp.rb
25
lib/yarp.rb
@ -303,6 +303,8 @@ module YARP
|
||||
autoload :DesugarCompiler, "yarp/desugar_compiler"
|
||||
autoload :Dispatcher, "yarp/dispatcher"
|
||||
autoload :DSL, "yarp/dsl"
|
||||
autoload :LexCompat, "yarp/lex_compat"
|
||||
autoload :LexRipper, "yarp/lex_compat"
|
||||
autoload :MutationCompiler, "yarp/mutation_compiler"
|
||||
autoload :NodeInspector, "yarp/node_inspector"
|
||||
autoload :RipperCompat, "yarp/ripper_compat"
|
||||
@ -311,10 +313,25 @@ module YARP
|
||||
autoload :Serialize, "yarp/serialize"
|
||||
autoload :Visitor, "yarp/visitor"
|
||||
|
||||
# Marking this as private so that consumers don't see it. It makes it a little
|
||||
# annoying for testing since you have to const_get it to access the methods,
|
||||
# but at least this way it's clear it's not meant for consumers.
|
||||
# Some of these constants are not meant to be exposed, so marking them as
|
||||
# private here.
|
||||
private_constant :Debug
|
||||
private_constant :LexCompat
|
||||
private_constant :LexRipper
|
||||
|
||||
# Returns an array of tokens that closely resembles that of the Ripper lexer.
|
||||
# The only difference is that since we don't keep track of lexer state in the
|
||||
# same way, it's going to always return the NONE state.
|
||||
def self.lex_compat(source, filepath = "")
|
||||
LexCompat.new(source, filepath).result
|
||||
end
|
||||
|
||||
# This lexes with the Ripper lex. It drops any space events but otherwise
|
||||
# returns the same tokens. Raises SyntaxError if the syntax in source is
|
||||
# invalid.
|
||||
def self.lex_ripper(source)
|
||||
LexRipper.new(source).result
|
||||
end
|
||||
|
||||
# Load the serialized AST using the source as a reference into a tree.
|
||||
def self.load(source, serialized)
|
||||
@ -322,9 +339,7 @@ module YARP
|
||||
end
|
||||
end
|
||||
|
||||
require_relative "yarp/lex_compat"
|
||||
require_relative "yarp/node"
|
||||
|
||||
require_relative "yarp/parse_result/comments"
|
||||
require_relative "yarp/parse_result/newlines"
|
||||
|
||||
|
@ -795,48 +795,44 @@ module YARP
|
||||
end
|
||||
end
|
||||
|
||||
# The constant that wraps the behavior of the lexer to match Ripper's output
|
||||
# is an implementation detail, so we don't want it to be public.
|
||||
private_constant :LexCompat
|
||||
# This is a class that wraps the Ripper lexer to produce almost exactly the
|
||||
# same tokens.
|
||||
class LexRipper
|
||||
attr_reader :source
|
||||
|
||||
# Returns an array of tokens that closely resembles that of the Ripper lexer.
|
||||
# The only difference is that since we don't keep track of lexer state in the
|
||||
# same way, it's going to always return the NONE state.
|
||||
def self.lex_compat(source, filepath = "")
|
||||
LexCompat.new(source, filepath).result
|
||||
end
|
||||
|
||||
# This lexes with the Ripper lex. It drops any space events but otherwise
|
||||
# returns the same tokens. Raises SyntaxError if the syntax in source is
|
||||
# invalid.
|
||||
def self.lex_ripper(source)
|
||||
previous = []
|
||||
results = []
|
||||
|
||||
Ripper.lex(source, raise_errors: true).each do |token|
|
||||
case token[1]
|
||||
when :on_sp
|
||||
# skip
|
||||
when :on_tstring_content
|
||||
if previous[1] == :on_tstring_content && (token[2].start_with?("\#$") || token[2].start_with?("\#@"))
|
||||
previous[2] << token[2]
|
||||
else
|
||||
results << token
|
||||
previous = token
|
||||
end
|
||||
when :on_words_sep
|
||||
if previous[1] == :on_words_sep
|
||||
previous[2] << token[2]
|
||||
else
|
||||
results << token
|
||||
previous = token
|
||||
end
|
||||
else
|
||||
results << token
|
||||
previous = token
|
||||
end
|
||||
def initialize(source)
|
||||
@source = source
|
||||
end
|
||||
|
||||
results
|
||||
def result
|
||||
previous = []
|
||||
results = []
|
||||
|
||||
Ripper.lex(source, raise_errors: true).each do |token|
|
||||
case token[1]
|
||||
when :on_sp
|
||||
# skip
|
||||
when :on_tstring_content
|
||||
if previous[1] == :on_tstring_content && (token[2].start_with?("\#$") || token[2].start_with?("\#@"))
|
||||
previous[2] << token[2]
|
||||
else
|
||||
results << token
|
||||
previous = token
|
||||
end
|
||||
when :on_words_sep
|
||||
if previous[1] == :on_words_sep
|
||||
previous[2] << token[2]
|
||||
else
|
||||
results << token
|
||||
previous = token
|
||||
end
|
||||
else
|
||||
results << token
|
||||
previous = token
|
||||
end
|
||||
end
|
||||
|
||||
results
|
||||
end
|
||||
end
|
||||
end
|
||||
|
Loading…
x
Reference in New Issue
Block a user