3
3
import re
4
4
import sys
5
5
import tokenize
6
- from io import BytesIO
7
6
from concurrent .futures import ThreadPoolExecutor
8
7
import importmagic
9
8
from pyls import hookimpl , lsp , _utils
21
20
_index_cache = {}
22
21
23
22
23
+ class _SourceReader ():
24
+ # Used to tokenize python source code
25
+ def __init__ (self , source ):
26
+ self .lines = re .findall (r'[^\n]*\n' , source )
27
+ # To pop lines later
28
+ self .lines .reverse ()
29
+
30
+ def readline (self ):
31
+ if self .lines :
32
+ return self .lines .pop ()
33
+ return ''
34
+
35
+
24
36
def _build_index (paths ):
25
37
"""Build index of symbols from python modules.
26
38
"""
@@ -66,12 +78,11 @@ def _get_imports_list(source, index=None):
66
78
67
79
def _tokenize (source ):
68
80
"""Tokenize python source code.
81
+ Returns only NAME tokens.
69
82
"""
70
- stream = BytesIO (source .encode ())
71
- tokens = tokenize .tokenize (stream .readline )
72
- if tokens is None :
73
- return []
74
- return list (tokens )
83
+ readline = _SourceReader (source ).readline
84
+ filter_name = lambda token : token [0 ] == tokenize .NAME
85
+ return filter (filter_name , tokenize .generate_tokens (readline ))
75
86
76
87
77
88
def _search_symbol (source , symbol ):
@@ -94,8 +105,8 @@ def _search_symbol(source, symbol):
94
105
}
95
106
}
96
107
"""
97
- symbol_tokens = _tokenize (symbol )
98
- source_tokens = _tokenize (source )
108
+ symbol_tokens = list ( _tokenize (symbol ) )
109
+ source_tokens = list ( _tokenize (source ) )
99
110
100
111
get_str = lambda token : token [1 ]
101
112
symbol_tokens_str = list (map (get_str , symbol_tokens ))
0 commit comments