fix tokenizing js comments
This commit is contained in:
parent
c9badb0a64
commit
983771a2be
2 changed files with 11 additions and 0 deletions
|
|
@ -154,6 +154,9 @@ var JavaScriptHighlightRules = function() {
|
|||
// makes sure we don't mix up regexps with the divison operator
|
||||
"regex_allowed": [
|
||||
{
|
||||
token : "comment",
|
||||
regex : "\\/\\/.*$"
|
||||
}, {
|
||||
token: "string.regexp",
|
||||
regex: "\\/(?:(?:\\[(?:\\\\]|[^\\]])+\\])"
|
||||
+ "|(?:\\\\/|[^\\]/]))*"
|
||||
|
|
|
|||
|
|
@ -142,6 +142,14 @@ module.exports = {
|
|||
"test tokenize identifier with umlauts": function() {
|
||||
var tokens = this.tokenizer.getLineTokens("füße", "start").tokens;
|
||||
assert.equal(1, tokens.length);
|
||||
},
|
||||
|
||||
"test // is not a regexp": function() {
|
||||
var tokens = this.tokenizer.getLineTokens("{ // 123", "start").tokens;
|
||||
assert.equal(3, tokens.length);
|
||||
assert.equal("lparen", tokens[0].type);
|
||||
assert.equal("text", tokens[1].type);
|
||||
assert.equal("comment", tokens[2].type);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue