|
| 1 | +# typed: true |
| 2 | +# frozen_string_literal: true |
| 3 | + |
| 4 | +begin |
| 5 | + gem("rubocop", ">= 1.63.0") |
| 6 | +rescue LoadError |
| 7 | + $stderr.puts("AST translation turned off because RuboCop >= 1.63.0 is required") |
| 8 | + return |
| 9 | +end |
| 10 | + |
| 11 | +require "prism/translation/parser/rubocop" |
| 12 | + |
| 13 | +# Processed Source patch so that we can pass the existing AST to RuboCop without having to re-parse files a second time |
| 14 | +module ProcessedSourcePatch |
| 15 | + extend T::Sig |
| 16 | + |
| 17 | + sig do |
| 18 | + params( |
| 19 | + source: String, |
| 20 | + ruby_version: Float, |
| 21 | + path: T.nilable(String), |
| 22 | + parser_engine: Symbol, |
| 23 | + prism_result: T.nilable(Prism::ParseLexResult), |
| 24 | + ).void |
| 25 | + end |
| 26 | + def initialize(source, ruby_version, path = nil, parser_engine: :parser_whitequark, prism_result: nil) |
| 27 | + @prism_result = prism_result |
| 28 | + |
| 29 | + # Invoking super will end up invoking our patched version of tokenize, which avoids re-parsing the file |
| 30 | + super(source, ruby_version, path, parser_engine: parser_engine) |
| 31 | + end |
| 32 | + |
| 33 | + sig { params(parser: T.untyped).returns(T::Array[T.untyped]) } |
| 34 | + def tokenize(parser) |
| 35 | + begin |
| 36 | + # This is where we need to pass the existing result to prevent a re-parse |
| 37 | + ast, comments, tokens = parser.tokenize(@buffer, parse_result: @prism_result) |
| 38 | + |
| 39 | + ast ||= nil |
| 40 | + rescue Parser::SyntaxError |
| 41 | + comments = [] |
| 42 | + tokens = [] |
| 43 | + end |
| 44 | + |
| 45 | + ast&.complete! |
| 46 | + tokens.map! { |t| RuboCop::AST::Token.from_parser_token(t) } |
| 47 | + |
| 48 | + [ast, comments, tokens] |
| 49 | + end |
| 50 | + |
| 51 | + RuboCop::AST::ProcessedSource.prepend(self) |
| 52 | +end |
| 53 | + |
| 54 | +# This patch allows Prism's translation parser to accept an existing AST in `tokenize`. This doesn't match the original |
| 55 | +# signature of RuboCop itself, but there's no other way to allow reusing the AST |
| 56 | +module TranslatorPatch |
| 57 | + extend T::Sig |
| 58 | + extend T::Helpers |
| 59 | + |
| 60 | + requires_ancestor { Prism::Translation::Parser } |
| 61 | + |
| 62 | + sig do |
| 63 | + params( |
| 64 | + source_buffer: ::Parser::Source::Buffer, |
| 65 | + recover: T::Boolean, |
| 66 | + parse_result: T.nilable(Prism::ParseLexResult), |
| 67 | + ).returns(T::Array[T.untyped]) |
| 68 | + end |
| 69 | + def tokenize(source_buffer, recover = false, parse_result: nil) |
| 70 | + @source_buffer = source_buffer |
| 71 | + source = source_buffer.source |
| 72 | + |
| 73 | + offset_cache = build_offset_cache(source) |
| 74 | + result = if @prism_result |
| 75 | + @prism_result |
| 76 | + else |
| 77 | + begin |
| 78 | + unwrap( |
| 79 | + Prism.parse_lex(source, filepath: source_buffer.name, version: convert_for_prism(version)), |
| 80 | + offset_cache, |
| 81 | + ) |
| 82 | + rescue ::Parser::SyntaxError |
| 83 | + raise unless recover |
| 84 | + end |
| 85 | + end |
| 86 | + |
| 87 | + program, tokens = result.value |
| 88 | + ast = build_ast(program, offset_cache) if result.success? |
| 89 | + |
| 90 | + [ |
| 91 | + ast, |
| 92 | + build_comments(result.comments, offset_cache), |
| 93 | + build_tokens(tokens, offset_cache), |
| 94 | + ] |
| 95 | + ensure |
| 96 | + @source_buffer = nil |
| 97 | + end |
| 98 | + |
| 99 | + Prism::Translation::Parser.prepend(self) |
| 100 | +end |
0 commit comments