Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 39 additions & 24 deletions internal/lsp/token_encoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ type TokenEncoder struct {
Tokens []lang.SemanticToken
ClientCaps lsp.SemanticTokensClientCapabilities

// lastEncodedTokenIdx tracks index of the last encoded token
// so we can account for any skipped tokens in calculating diff
lastEncodedTokenIdx int
lastEncodedLine int
lastEncodedStartChar int
hasEncodedToken bool
}

func (te *TokenEncoder) Encode() []uint32 {
Expand Down Expand Up @@ -54,17 +54,8 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 {

tokenLineDelta := token.Range.End.Line - token.Range.Start.Line

previousLine := 0
previousStartChar := 0
if i > 0 {
previousLine = te.Tokens[te.lastEncodedTokenIdx].Range.End.Line - 1
currentLine := te.Tokens[i].Range.End.Line - 1
if currentLine == previousLine {
previousStartChar = te.Tokens[te.lastEncodedTokenIdx].Range.Start.Column - 1
}
}

if tokenLineDelta == 0 || false /* te.clientCaps.MultilineTokenSupport */ {
previousLine, previousStartChar := te.lastPosition(token.Range.Start.Line - 1)
deltaLine := token.Range.Start.Line - 1 - previousLine
tokenLength := token.Range.End.Byte - token.Range.Start.Byte
deltaStartChar := token.Range.Start.Column - 1 - previousStartChar
Expand All @@ -76,23 +67,31 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 {
uint32(tokenTypeIdx),
uint32(modifierBitMask),
}...)

te.recordPosition(token.Range.Start.Line-1, token.Range.Start.Column-1)
} else {
// Add entry for each line of a multiline token
for tokenLine := token.Range.Start.Line - 1; tokenLine <= token.Range.End.Line-1; tokenLine++ {
deltaLine := tokenLine - previousLine

deltaStartChar := 0
if tokenLine == token.Range.Start.Line-1 {
deltaStartChar = token.Range.Start.Column - 1 - previousStartChar
}

startChar := 0
lineBytes := bytes.TrimRight(te.Lines[tokenLine].Bytes, "\n\r")
length := len(lineBytes)
if tokenLine == token.Range.Start.Line-1 {
startChar = token.Range.Start.Column - 1
length -= startChar
}

if tokenLine == token.Range.End.Line-1 {
length = token.Range.End.Column - 1
length = token.Range.End.Column - 1 - startChar
}

if length <= 0 {
continue
}

previousLine, previousStartChar := te.lastPosition(tokenLine)
deltaLine := tokenLine - previousLine
deltaStartChar := startChar - previousStartChar

data = append(data, []uint32{
uint32(deltaLine),
uint32(deltaStartChar),
Expand All @@ -101,15 +100,31 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 {
uint32(modifierBitMask),
}...)

previousLine = tokenLine
te.recordPosition(tokenLine, startChar)
}
}

te.lastEncodedTokenIdx = i

return data
}

func (te *TokenEncoder) lastPosition(tokenLine int) (int, int) {
if !te.hasEncodedToken {
return 0, 0
}

if tokenLine == te.lastEncodedLine {
return te.lastEncodedLine, te.lastEncodedStartChar
}

return te.lastEncodedLine, 0
}

func (te *TokenEncoder) recordPosition(line, startChar int) {
te.lastEncodedLine = line
te.lastEncodedStartChar = startChar
te.hasEncodedToken = true
}

func (te *TokenEncoder) resolveTokenType(token lang.SemanticToken) (semtok.TokenType, bool) {
switch token.Type {
case lang.TokenBlockType:
Expand Down
56 changes: 55 additions & 1 deletion internal/lsp/token_encoder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/hcl-lang/lang"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/terraform-ls/internal/lsp/semtok"
"github.com/hashicorp/terraform-ls/internal/protocol"
"github.com/hashicorp/terraform-ls/internal/source"
)
Expand Down Expand Up @@ -164,7 +165,7 @@ func TestTokenEncoder_multiLineTokens(t *testing.T) {
}
data := te.Encode()
expectedData := []uint32{
1, 2, 24, 9, 0,
1, 2, 22, 9, 0,
1, 0, 15, 9, 0,
1, 0, 11, 9, 0,
}
Expand All @@ -175,6 +176,59 @@ func TestTokenEncoder_multiLineTokens(t *testing.T) {
}
}

func TestTokenEncoder_multiLineTokensAfterInterpolation(t *testing.T) {
bytes := []byte(" ${local.foo}\n ${local.bar}\n")
legend := TokenTypesLegend(serverTokenTypes.AsStrings())
stringTokenTypeIdx := legend.Index(semtok.TokenType(lang.TokenString))
referenceTokenTypeIdx := legend.Index(semtok.TokenType(lang.TokenReferenceStep))

te := &TokenEncoder{
Lines: source.MakeSourceLines("test.tf", bytes),
Tokens: []lang.SemanticToken{
{
Type: lang.TokenString,
Range: hcl.Range{
Filename: "test.tf",
Start: hcl.Pos{Line: 1, Column: 15, Byte: 14},
End: hcl.Pos{Line: 2, Column: 3, Byte: 17},
},
},
{
Type: lang.TokenReferenceStep,
Range: hcl.Range{
Filename: "test.tf",
Start: hcl.Pos{Line: 2, Column: 5, Byte: 19},
End: hcl.Pos{Line: 2, Column: 10, Byte: 24},
},
},
{
Type: lang.TokenReferenceStep,
Range: hcl.Range{
Filename: "test.tf",
Start: hcl.Pos{Line: 2, Column: 11, Byte: 25},
End: hcl.Pos{Line: 2, Column: 14, Byte: 28},
},
},
},
ClientCaps: protocol.SemanticTokensClientCapabilities{
TokenTypes: serverTokenTypes.AsStrings(),
TokenModifiers: serverTokenModifiers.AsStrings(),
},
}

data := te.Encode()
expectedData := []uint32{
1, 0, 2, uint32(stringTokenTypeIdx), 0,
0, 4, 5, uint32(referenceTokenTypeIdx), 0,
0, 6, 3, uint32(referenceTokenTypeIdx), 0,
}

if diff := cmp.Diff(expectedData, data); diff != "" {
t.Fatalf("unexpected encoded data.\nexpected: %#v\ngiven: %#v",
expectedData, data)
}
}

func TestTokenEncoder_deltaStartCharBug(t *testing.T) {
bytes := []byte(`resource "aws_iam_role_policy" "firehose_s3_access" {
}
Expand Down
Loading