Skip to content

Commit e7ee0ca

Browse files
committed
use backward compatible tokenizer func
1 parent 0a9d163 commit e7ee0ca

File tree

1 file changed

+19
-8
lines changed

1 file changed

+19
-8
lines changed

pyls/plugins/importmagic_lint.py

Lines changed: 19 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import re
44
import sys
55
import tokenize
6-
from io import BytesIO
76
from concurrent.futures import ThreadPoolExecutor
87
import importmagic
98
from pyls import hookimpl, lsp, _utils
@@ -21,6 +20,19 @@
2120
_index_cache = {}
2221

2322

23+
class _SourceReader():
24+
# Used to tokenize python source code
25+
def __init__(self, source):
26+
self.lines = re.findall(r'[^\n]*\n', source)
27+
# To pop lines later
28+
self.lines.reverse()
29+
30+
def readline(self):
31+
if self.lines:
32+
return self.lines.pop()
33+
return ''
34+
35+
2436
def _build_index(paths):
2537
"""Build index of symbols from python modules.
2638
"""
@@ -66,12 +78,11 @@ def _get_imports_list(source, index=None):
6678

6779
def _tokenize(source):
6880
"""Tokenize python source code.
81+
Returns only NAME tokens.
6982
"""
70-
stream = BytesIO(source.encode())
71-
tokens = tokenize.tokenize(stream.readline)
72-
if tokens is None:
73-
return []
74-
return list(tokens)
83+
readline = _SourceReader(source).readline
84+
filter_name = lambda token: token[0] == tokenize.NAME
85+
return filter(filter_name, tokenize.generate_tokens(readline))
7586

7687

7788
def _search_symbol(source, symbol):
@@ -94,8 +105,8 @@ def _search_symbol(source, symbol):
94105
}
95106
}
96107
"""
97-
symbol_tokens = _tokenize(symbol)
98-
source_tokens = _tokenize(source)
108+
symbol_tokens = list(_tokenize(symbol))
109+
source_tokens = list(_tokenize(source))
99110

100111
get_str = lambda token: token[1]
101112
symbol_tokens_str = list(map(get_str, symbol_tokens))

0 commit comments

Comments
 (0)