Skip to content

Commit cdd4ea8

Browse files
fix(lsp): correct semantic token delta encoding for multiline tokens
1 parent 7218bc9 commit cdd4ea8

File tree

2 files changed

+94
-25
lines changed

2 files changed

+94
-25
lines changed

internal/lsp/token_encoder.go

Lines changed: 39 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,9 @@ type TokenEncoder struct {
1717
Tokens []lang.SemanticToken
1818
ClientCaps lsp.SemanticTokensClientCapabilities
1919

20-
// lastEncodedTokenIdx tracks index of the last encoded token
21-
// so we can account for any skipped tokens in calculating diff
22-
lastEncodedTokenIdx int
20+
lastEncodedLine int
21+
lastEncodedStartChar int
22+
hasEncodedToken bool
2323
}
2424

2525
func (te *TokenEncoder) Encode() []uint32 {
@@ -54,17 +54,8 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 {
5454

5555
tokenLineDelta := token.Range.End.Line - token.Range.Start.Line
5656

57-
previousLine := 0
58-
previousStartChar := 0
59-
if i > 0 {
60-
previousLine = te.Tokens[te.lastEncodedTokenIdx].Range.End.Line - 1
61-
currentLine := te.Tokens[i].Range.End.Line - 1
62-
if currentLine == previousLine {
63-
previousStartChar = te.Tokens[te.lastEncodedTokenIdx].Range.Start.Column - 1
64-
}
65-
}
66-
6757
if tokenLineDelta == 0 || false /* te.clientCaps.MultilineTokenSupport */ {
58+
previousLine, previousStartChar := te.lastPosition(token.Range.Start.Line - 1)
6859
deltaLine := token.Range.Start.Line - 1 - previousLine
6960
tokenLength := token.Range.End.Byte - token.Range.Start.Byte
7061
deltaStartChar := token.Range.Start.Column - 1 - previousStartChar
@@ -76,23 +67,31 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 {
7667
uint32(tokenTypeIdx),
7768
uint32(modifierBitMask),
7869
}...)
70+
71+
te.recordPosition(token.Range.Start.Line-1, token.Range.Start.Column-1)
7972
} else {
8073
// Add entry for each line of a multiline token
8174
for tokenLine := token.Range.Start.Line - 1; tokenLine <= token.Range.End.Line-1; tokenLine++ {
82-
deltaLine := tokenLine - previousLine
83-
84-
deltaStartChar := 0
85-
if tokenLine == token.Range.Start.Line-1 {
86-
deltaStartChar = token.Range.Start.Column - 1 - previousStartChar
87-
}
88-
75+
startChar := 0
8976
lineBytes := bytes.TrimRight(te.Lines[tokenLine].Bytes, "\n\r")
9077
length := len(lineBytes)
78+
if tokenLine == token.Range.Start.Line-1 {
79+
startChar = token.Range.Start.Column - 1
80+
length -= startChar
81+
}
9182

9283
if tokenLine == token.Range.End.Line-1 {
93-
length = token.Range.End.Column - 1
84+
length = token.Range.End.Column - 1 - startChar
9485
}
9586

87+
if length <= 0 {
88+
continue
89+
}
90+
91+
previousLine, previousStartChar := te.lastPosition(tokenLine)
92+
deltaLine := tokenLine - previousLine
93+
deltaStartChar := startChar - previousStartChar
94+
9695
data = append(data, []uint32{
9796
uint32(deltaLine),
9897
uint32(deltaStartChar),
@@ -101,15 +100,31 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 {
101100
uint32(modifierBitMask),
102101
}...)
103102

104-
previousLine = tokenLine
103+
te.recordPosition(tokenLine, startChar)
105104
}
106105
}
107106

108-
te.lastEncodedTokenIdx = i
109-
110107
return data
111108
}
112109

110+
func (te *TokenEncoder) lastPosition(tokenLine int) (int, int) {
111+
if !te.hasEncodedToken {
112+
return 0, 0
113+
}
114+
115+
if tokenLine == te.lastEncodedLine {
116+
return te.lastEncodedLine, te.lastEncodedStartChar
117+
}
118+
119+
return te.lastEncodedLine, 0
120+
}
121+
122+
func (te *TokenEncoder) recordPosition(line, startChar int) {
123+
te.lastEncodedLine = line
124+
te.lastEncodedStartChar = startChar
125+
te.hasEncodedToken = true
126+
}
127+
113128
func (te *TokenEncoder) resolveTokenType(token lang.SemanticToken) (semtok.TokenType, bool) {
114129
switch token.Type {
115130
case lang.TokenBlockType:

internal/lsp/token_encoder_test.go

Lines changed: 55 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import (
99
"github.com/google/go-cmp/cmp"
1010
"github.com/hashicorp/hcl-lang/lang"
1111
"github.com/hashicorp/hcl/v2"
12+
"github.com/hashicorp/terraform-ls/internal/lsp/semtok"
1213
"github.com/hashicorp/terraform-ls/internal/protocol"
1314
"github.com/hashicorp/terraform-ls/internal/source"
1415
)
@@ -164,7 +165,7 @@ func TestTokenEncoder_multiLineTokens(t *testing.T) {
164165
}
165166
data := te.Encode()
166167
expectedData := []uint32{
167-
1, 2, 24, 9, 0,
168+
1, 2, 22, 9, 0,
168169
1, 0, 15, 9, 0,
169170
1, 0, 11, 9, 0,
170171
}
@@ -175,6 +176,59 @@ func TestTokenEncoder_multiLineTokens(t *testing.T) {
175176
}
176177
}
177178

179+
func TestTokenEncoder_multiLineTokensAfterInterpolation(t *testing.T) {
180+
bytes := []byte(" ${local.foo}\n ${local.bar}\n")
181+
legend := TokenTypesLegend(serverTokenTypes.AsStrings())
182+
stringTokenTypeIdx := legend.Index(semtok.TokenType(lang.TokenString))
183+
referenceTokenTypeIdx := legend.Index(semtok.TokenType(lang.TokenReferenceStep))
184+
185+
te := &TokenEncoder{
186+
Lines: source.MakeSourceLines("test.tf", bytes),
187+
Tokens: []lang.SemanticToken{
188+
{
189+
Type: lang.TokenString,
190+
Range: hcl.Range{
191+
Filename: "test.tf",
192+
Start: hcl.Pos{Line: 1, Column: 15, Byte: 14},
193+
End: hcl.Pos{Line: 2, Column: 3, Byte: 17},
194+
},
195+
},
196+
{
197+
Type: lang.TokenReferenceStep,
198+
Range: hcl.Range{
199+
Filename: "test.tf",
200+
Start: hcl.Pos{Line: 2, Column: 5, Byte: 19},
201+
End: hcl.Pos{Line: 2, Column: 10, Byte: 24},
202+
},
203+
},
204+
{
205+
Type: lang.TokenReferenceStep,
206+
Range: hcl.Range{
207+
Filename: "test.tf",
208+
Start: hcl.Pos{Line: 2, Column: 11, Byte: 25},
209+
End: hcl.Pos{Line: 2, Column: 14, Byte: 28},
210+
},
211+
},
212+
},
213+
ClientCaps: protocol.SemanticTokensClientCapabilities{
214+
TokenTypes: serverTokenTypes.AsStrings(),
215+
TokenModifiers: serverTokenModifiers.AsStrings(),
216+
},
217+
}
218+
219+
data := te.Encode()
220+
expectedData := []uint32{
221+
1, 0, 2, uint32(stringTokenTypeIdx), 0,
222+
0, 4, 5, uint32(referenceTokenTypeIdx), 0,
223+
0, 6, 3, uint32(referenceTokenTypeIdx), 0,
224+
}
225+
226+
if diff := cmp.Diff(expectedData, data); diff != "" {
227+
t.Fatalf("unexpected encoded data.\nexpected: %#v\ngiven: %#v",
228+
expectedData, data)
229+
}
230+
}
231+
178232
func TestTokenEncoder_deltaStartCharBug(t *testing.T) {
179233
bytes := []byte(`resource "aws_iam_role_policy" "firehose_s3_access" {
180234
}

0 commit comments

Comments
 (0)