Skip to content

Commit 226dc59

Browse files
committed
use backward compatible tokenizer func
1 parent 0a9d163 commit 226dc59

File tree

1 file changed

+7
-8
lines changed

1 file changed

+7
-8
lines changed

pyls/plugins/importmagic_lint.py

+7-8
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import re
44
import sys
55
import tokenize
6-
from io import BytesIO
6+
from io import StringIO
77
from concurrent.futures import ThreadPoolExecutor
88
import importmagic
99
from pyls import hookimpl, lsp, _utils
@@ -66,12 +66,11 @@ def _get_imports_list(source, index=None):
6666

6767
def _tokenize(source):
6868
"""Tokenize python source code.
69+
Returns only NAME tokens.
6970
"""
70-
stream = BytesIO(source.encode())
71-
tokens = tokenize.tokenize(stream.readline)
72-
if tokens is None:
73-
return []
74-
return list(tokens)
71+
readline = StringIO(source).readline
72+
filter_name = lambda token: token[0] == tokenize.NAME
73+
return filter(filter_name, tokenize.tokenize(readline))
7574

7675

7776
def _search_symbol(source, symbol):
@@ -94,8 +93,8 @@ def _search_symbol(source, symbol):
9493
}
9594
}
9695
"""
97-
symbol_tokens = _tokenize(symbol)
98-
source_tokens = _tokenize(source)
96+
symbol_tokens = list(_tokenize(symbol))
97+
source_tokens = list(_tokenize(source))
9998

10099
get_str = lambda token: token[1]
101100
symbol_tokens_str = list(map(get_str, symbol_tokens))

0 commit comments

Comments
 (0)