-
Notifications
You must be signed in to change notification settings - Fork 17
Expand file tree
/
Copy pathfull-analysis.jl
More file actions
392 lines (344 loc) · 14.8 KB
/
full-analysis.jl
File metadata and controls
392 lines (344 loc) · 14.8 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
function start_analysis_workers!(server::Server)
for i = 1:length(server.state.analysis_manager.worker_tasks)
server.state.analysis_manager.worker_tasks[i] = Threads.@spawn :default try
analysis_worker(server)
catch err
@error "Critical error happened in analysis worker"
Base.display_error(stderr, err, catch_backtrace())
end
end
end
get_analysis_info(manager::AnalysisManager, uri::URI) = get(load(manager.cache), uri, nothing)
struct RequestAnalysisCaller <: RequestCaller
uri::URI
onsave::Bool
token::ProgressToken
end
cancellable_token(rc::RequestAnalysisCaller) = rc.token
function request_analysis_on_open!(server::Server, uri::URI)
if supports(server, :window, :workDoneProgress)
id = String(gensym(:WorkDoneProgressCreateRequest_request_analysis_on_open!))
token = String(gensym(:WorkDoneProgressCreateRequest_request_analysis_on_open!))
addrequest!(server, id=>RequestAnalysisCaller(uri, #=onsave=#false, token))
params = WorkDoneProgressCreateParams(; token)
send(server, WorkDoneProgressCreateRequest(; id, params))
else
request_analysis!(server, uri)
end
end
function request_analysis_on_save!(server::Server, uri::URI)
if supports(server, :window, :workDoneProgress)
id = String(gensym(:WorkDoneProgressCreateRequest_request_analysis_on_save!))
token = String(gensym(:WorkDoneProgressCreateRequest_request_analysis_on_save!))
addrequest!(server, id=>RequestAnalysisCaller(uri, #=onsave=#true, token))
params = WorkDoneProgressCreateParams(; token)
send(server, WorkDoneProgressCreateRequest(; id, params))
else
request_analysis!(server, uri; onsave=true)
end
end
function handle_request_analysis_response(
server::Server, request_caller::RequestAnalysisCaller, cancel_flag::CancelFlag
)
(; uri, onsave, token) = request_caller
cancellable_token = CancellableToken(token, cancel_flag)
# Each response message handler needs to be written synchronously, so we use `wait=true`
request_analysis!(server, uri; cancellable_token, onsave, wait=true)
end
function request_analysis!(
server::Server, uri::URI;
cancellable_token::Union{Nothing,CancellableToken} = nothing,
onsave::Bool = false,
wait::Bool = false,
notify::Bool = true, # used by tests
)
manager = server.state.analysis_manager
analysis_info = get_analysis_info(server.state.analysis_manager, uri)
prev_analysis_result = nothing
if isnothing(analysis_info)
entry = lookup_analysis_entry(server.state, uri)
elseif analysis_info isa OutOfScope
entry = analysis_info
else
analysis_result = analysis_info::AnalysisResult # cached analysis result
entry = analysis_result.entry
prev_analysis_result = analysis_result
end
if entry isa OutOfScope
local outofscope = entry
store!(manager.cache) do cache
if get(cache, uri, nothing) === outofscope
cache, nothing
else
local new_cache = copy(cache)
new_cache[uri] = outofscope
new_cache, nothing
end
end
return nothing
end
entry = entry::AnalysisEntry
if onsave
generation = increment_generation!(manager, entry)
else
generation = get_generation(manager, entry)
end
completion = Channel{Nothing}(1)
request = AnalysisRequest(
entry, uri, generation, cancellable_token, notify, prev_analysis_result, completion)
# Check if already analyzing and handle pending requests
should_queue = store!(manager.pending_analyses) do analyses
if haskey(analyses, request.entry)
# Replace any existing pending request with this new one
local new_analyses = copy(analyses)
new_analyses[request.entry] = request
new_analyses, false # Don't queue - just update pending
else
analyses, true # Not analyzing - should queue
end
end
should_queue || @goto wait_or_return # Request saved as pending
debounce = get_config(server.state.config_manager, :full_analysis, :debounce)
if onsave && debounce isa Float64 && debounce > 0
local delay::Float64 = debounce
store!(manager.debounced) do debounced
# Cancel existing timer if any
if haskey(debounced, request.entry)
close(debounced[request.entry])
end
local new_debounced = copy(debounced)
# Set debounce timer
new_debounced[request.entry] = Timer(delay) do _
store!(manager.debounced) do debounced′
local new_debounced′ = copy(debounced′)
delete!(new_debounced′, request.entry)
return new_debounced′, nothing
end
# Queue the request after debounce period
queue_request!(manager, request)
end
return new_debounced, nothing
end
else
queue_request!(manager, request)
end
@label wait_or_return
wait && take!(completion)
nothing
end
function queue_request!(manager::AnalysisManager, request::AnalysisRequest)
store!(manager.pending_analyses) do analyses
new_analyses = copy(analyses)
new_analyses[request.entry] = nothing # Mark as analyzing, no pending yet
return new_analyses, nothing
end
put!(manager.queue, request)
end
# Analysis queue processing implementation (analysis serialized per AnalysisEntry)
function analysis_worker(server::Server)
# Note: Currently single worker, but designed for future multi-worker scaling.
# When multiple workers exist, the per-entry serialization ensures correctness.
while true
request = take!(server.state.analysis_manager.queue)
@tryinvokelatest resolve_analysis_request(server, request)
GC.safepoint()
end
end
function resolve_analysis_request(server::Server, request::AnalysisRequest)
manager = server.state.analysis_manager
is_staled_request(manager, request) || @goto next_request # skip analysis if the analyzed generation is still latest
has_any_parse_errors(server, request) && @goto next_request
analysis_result = @something try
execute_analysis_request(server, request)
catch err
@error "Error in `execute_analysis_request` for " request
Base.display_error(stderr, err, catch_backtrace())
nothing
end @goto next_request
update_analysis_cache!(manager, analysis_result)
mark_analyzed_generation!(manager, request)
request.notify && notify_diagnostics!(server)
@label next_request
put!(request.completion, nothing) # Notify the completion callback
# Check for pending request and re-queue if needed
pending_request = store!(manager.pending_analyses) do analyses
if haskey(analyses, request.entry)
new_analyses = copy(analyses)
pending = pop!(new_analyses, request.entry)
if pending !== nothing
# Re-mark as analyzing before queueing the pending request
new_analyses[request.entry] = nothing
end
return new_analyses, pending
end
return analyses, nothing
end
if pending_request !== nothing
put!(manager.queue, pending_request)
end
end
function increment_generation!(manager::AnalysisManager, @nospecialize entry::AnalysisEntry)
some = Some{AnalysisEntry}(entry)
store!(manager.current_generations) do generations
new_generations = copy(generations)
generation = get(new_generations, some.value, 0) + 1
new_generations[some.value] = generation
return new_generations, generation
end
end
get_generation(manager::AnalysisManager, @nospecialize entry::AnalysisEntry) =
get(load(manager.current_generations), entry, 0)
function is_staled_request(manager::AnalysisManager, request::AnalysisRequest)
analyzed_generation = get(load(manager.analyzed_generations), request.entry, -1)
return analyzed_generation != request.generation
end
function has_any_parse_errors(server::Server, request::AnalysisRequest)
prev_analysis_result = @something request.prev_analysis_result return false # fresh analysis, no knowledge about the sources
return any(analyzed_file_uris(prev_analysis_result)) do uri::URI
saved_fi = @something get_saved_file_info(server.state, uri) return false
return !isempty(saved_fi.parsed_stream.diagnostics)
end
end
function update_analysis_cache!(manager::AnalysisManager, analysis_result::AnalysisResult)
analyzed_uris = analyzed_file_uris(analysis_result)
store!(manager.cache) do cache
new_cache = copy(cache)
for uri in analyzed_uris
new_cache[uri] = analysis_result
end
return new_cache, nothing
end
end
function mark_analyzed_generation!(manager::AnalysisManager, request::AnalysisRequest)
store!(manager.analyzed_generations) do generations
new_generations = copy(generations)
new_generations[request.entry] = request.generation
return new_generations, nothing
end
end
function begin_full_analysis_progress(server::Server, request::AnalysisRequest)
cancellable_token = @something request.cancellable_token return nothing
filename = uri2filename(entryuri(request.entry))
pre = isnothing(request.prev_analysis_result) ? "Analyzing" : "Reanalyzing"
title = "$(pre) $(basename(filename)) [$(entrykind(request.entry))]"
send_progress(server, cancellable_token.token,
WorkDoneProgressBegin(;
title,
cancellable = true,
message = "Full analysis initiated",
percentage = 0))
yield_to_endpoint()
end
function end_full_analysis_progress(server::Server, request::AnalysisRequest)
cancellable_token = @something request.cancellable_token return nothing
send_progress(server, cancellable_token.token,
WorkDoneProgressEnd(; message = "Full analysis finished"))
end
function analyze_parsed_if_exist(server::Server, request::AnalysisRequest, args...)
uri = entryuri(request.entry)
jetconfigs = entryjetconfigs(request.entry)
fi = get_saved_file_info(server.state, uri)
if !isnothing(fi)
filename = @something uri2filename(uri) error(lazy"Unsupported URI: $uri")
parsed = fi.syntax_node
begin_full_analysis_progress(server, request)
try
return JET.analyze_and_report_expr!(LSInterpreter(server, request), parsed, filename, args...; jetconfigs...)
finally
end_full_analysis_progress(server, request)
end
else
filepath = @something uri2filepath(uri) error(lazy"Unsupported URI: $uri")
begin_full_analysis_progress(server, request)
try
return JET.analyze_and_report_file!(LSInterpreter(server, request), filepath, args...; jetconfigs...)
finally
end_full_analysis_progress(server, request)
end
end
end
function is_full_analysis_successful(result)
return isempty(result.res.toplevel_error_reports)
end
# update `AnalyzerState(analyzer).world` so that `analyzer` can infer any newly defined methods
function update_analyzer_world(analyzer::LSAnalyzer)
state = JET.AnalyzerState(analyzer)
newstate = JET.AnalyzerState(state; world = Base.get_world_counter())
return JET.AbstractAnalyzer(analyzer, newstate)
end
function new_analysis_result(request::AnalysisRequest, result)
analyzed_file_infos = Dict{URI,JET.AnalyzedFileInfo}(
# `filepath` is an absolute path (since `path` is specified as absolute)
filename2uri(filepath) => analyzed_file_info
for (filepath, analyzed_file_info) in result.res.analyzed_files)
uri2diagnostics = jet_result_to_diagnostics(keys(analyzed_file_infos), result)
(; entry, prev_analysis_result) = request
if !is_full_analysis_successful(result) && !isnothing(prev_analysis_result)
(; actual2virtual, analyzer, analyzed_file_infos) = prev_analysis_result
else
actual2virtual = result.res.actual2virtual::JET.Actual2Virtual
analyzer = update_analyzer_world(result.analyzer)
end
return AnalysisResult(entry, uri2diagnostics, analyzer, analyzed_file_infos, actual2virtual)
end
function lookup_analysis_entry(state::ServerState, uri::URI)
maybe_env_path = find_analysis_env_path(state, uri)
if maybe_env_path isa OutOfScope
return maybe_env_path
end
env_path = maybe_env_path
if isnothing(env_path)
return ScriptAnalysisEntry(uri)
elseif uri.scheme == "untitled"
return ScriptInEnvAnalysisEntry(env_path, uri)
end
pkgname = find_pkg_name(env_path)
filepath = uri2filepath(uri)::String # uri.scheme === "file"
filekind, filedir = find_package_directory(filepath, env_path)
if filekind === :src
return @something activate_do(env_path) do
pkgenv = @lock Base.require_lock @something Base.identify_package_env(pkgname) begin
@warn "Failed to identify package environment" pkgname
return nothing
end
pkgid, env = pkgenv
pkgfile = @something Base.locate_package(pkgid, env) begin
@warn "Expected a package to have a source file" pkgname
return nothing
end
pkgfileuri = filepath2uri(pkgfile)
PackageSourceAnalysisEntry(env_path, pkgfileuri, pkgid)
end ScriptInEnvAnalysisEntry(env_path, uri)
elseif filekind === :test
runtestsfile = joinpath(filedir, "runtests.jl")
runtestsuri = filepath2uri(runtestsfile)
return PackageTestAnalysisEntry(env_path, runtestsuri)
elseif filekind === :docs # TODO
elseif filekind === :ext # TODO
else
@assert filekind === :script
end
return ScriptInEnvAnalysisEntry(env_path, uri)
end
function execute_analysis_request(server::Server, request::AnalysisRequest)
entry = request.entry
if entry isa ScriptAnalysisEntry
result = analyze_parsed_if_exist(server, request)
elseif entry isa ScriptInEnvAnalysisEntry
result = activate_do(entryenvpath(entry)) do
analyze_parsed_if_exist(server, request)
end
elseif entry isa PackageSourceAnalysisEntry
result = activate_do(entryenvpath(entry)) do
analyze_parsed_if_exist(server, request, entry.pkgid)
end
elseif entry isa PackageTestAnalysisEntry
result = activate_do(entryenvpath(entry)) do
analyze_parsed_if_exist(server, request)
end
else error("Unsupported analysis entry $entry") end
ret = new_analysis_result(request, result)
# TODO Request fallback analysis in cases this script was not analyzed by the analysis entry
# request.uri ∉ analyzed_file_uris(ret)
return ret
end