fix tokenizing js comments

This commit is contained in:
Fabian Jakobs 2011-09-06 15:55:27 +02:00
commit 983771a2be
2 changed files with 11 additions and 0 deletions

View file

@ -154,6 +154,9 @@ var JavaScriptHighlightRules = function() {
// makes sure we don't mix up regexps with the divison operator
"regex_allowed": [
{
token : "comment",
regex : "\\/\\/.*$"
}, {
token: "string.regexp",
regex: "\\/(?:(?:\\[(?:\\\\]|[^\\]])+\\])"
+ "|(?:\\\\/|[^\\]/]))*"

View file

@ -142,6 +142,14 @@ module.exports = {
"test tokenize identifier with umlauts": function() {
var tokens = this.tokenizer.getLineTokens("füße", "start").tokens;
assert.equal(1, tokens.length);
},
"test // is not a regexp": function() {
var tokens = this.tokenizer.getLineTokens("{ // 123", "start").tokens;
assert.equal(3, tokens.length);
assert.equal("lparen", tokens[0].type);
assert.equal("text", tokens[1].type);
assert.equal("comment", tokens[2].type);
}
};