diff --git a/grammars/xml.cson b/grammars/xml.cson
index be5fabd..ce30b98 100644
--- a/grammars/xml.cson
+++ b/grammars/xml.cson
@@ -406,8 +406,8 @@
'captures':
'0':
'name': 'punctuation.definition.comment.xml'
- 'end': '--%>'
- 'name': 'comment.block.xml'
+ 'end': '--%>'
+ 'name': 'comment.block.xml'
}
{
'begin': ')'
+ 'name': 'invalid.illegal.bad-comments-or-CDATA.xml'
}
]
}
diff --git a/spec/xml-spec.coffee b/spec/xml-spec.coffee
index 6320738..7250c78 100644
--- a/spec/xml-spec.coffee
+++ b/spec/xml-spec.coffee
@@ -25,18 +25,36 @@ describe "XML grammar", ->
expect(lines[2][1]).toEqual value: '')
+ expect(tokens[0]).toEqual value: '', scopes: ['text.xml', 'comment.block.xml', 'punctuation.definition.comment.xml']
+
it 'tokenizes comment endings with more than two dashes as invalid', ->
{tokens} = grammar.tokenizeLine('')
expect(tokens[0]).toEqual value: '', scopes: ['text.xml', 'comment.block.xml', 'invalid.illegal.bad-comments-or-CDATA.xml']
it 'tokenizes comments with two dashes not followed by ">" as invalid', ->
{tokens} = grammar.tokenizeLine('')
expect(tokens[0]).toEqual value: '', scopes: ['text.xml', 'comment.block.xml']
+ expect(tokens[2]).toEqual value: '-- comment ', scopes: ['text.xml', 'comment.block.xml', 'invalid.illegal.bad-comments-or-CDATA.xml']
+ expect(tokens[3]).toEqual value: '-->', scopes: ['text.xml', 'comment.block.xml', 'punctuation.definition.comment.xml']
+
+ it 'tokenizes after invalid comment only if comment was properly closed', ->
+ {tokens} = grammar.tokenizeLine('-->')
+ expect(tokens[0]).toEqual value: '', scopes: ['text.xml', 'comment.block.xml', 'invalid.illegal.bad-comments-or-CDATA.xml']
+ expect(tokens[3]).toEqual value: '-->', scopes: ['text.xml', 'comment.block.xml', 'punctuation.definition.comment.xml']
+ expect(tokens[4]).toEqual value: '<', scopes: ['text.xml', 'meta.tag.no-content.xml', 'punctuation.definition.tag.xml']
+ expect(tokens[5]).toEqual value: 'n', scopes: ['text.xml', 'meta.tag.no-content.xml', 'entity.name.tag.xml', 'entity.name.tag.localname.xml']
+ expect(tokens[6]).toEqual value: '>', scopes: ['text.xml', 'meta.tag.no-content.xml', 'punctuation.definition.tag.xml']
+ expect(tokens[7]).toEqual value: '', scopes: ['text.xml', 'meta.tag.no-content.xml', 'punctuation.definition.tag.xml']
+ expect(tokens[8]).toEqual value: 'n', scopes: ['text.xml', 'meta.tag.no-content.xml', 'entity.name.tag.xml', 'entity.name.tag.localname.xml']
+ expect(tokens[9]).toEqual value: '>', scopes: ['text.xml', 'meta.tag.no-content.xml', 'punctuation.definition.tag.xml']
it "tokenizes empty element meta.tag.no-content.xml", ->
{tokens} = grammar.tokenizeLine('')