small fix and add tests

This commit is contained in:
ukyo 2012-09-30 18:58:45 +09:00
commit 706652048f
2 changed files with 114 additions and 1 deletions

View file

@ -160,7 +160,14 @@ define(function(require, exports, module) {
"entity.name.function", "text", "keyword.operator", "text",
"paren.lparen", "variable.parameter", "paren.rparen", "text", "storage.type"
],
regex : "(" + identifier + ")(\\s*)(=|:)(\\s*)(\\(?)([^)]*)(\\)?)(\\s*)([-=]>)"
regex : "(" + identifier + ")(\\s*)(=|:)(\\s*)(\\()([^)]*)(\\))(\\s*)([\\-=]>)"
}, {
//play = ->
//play : ->
token : [
"entity.name.function", "text", "keyword.operator", "text", "storage.type"
],
regex : "(" + identifier + ")(\\s*)(=|:)(\\s*)([\\-=]>)"
}, {
//(args) ->
token : [

View file

@ -48,7 +48,113 @@ module.exports = {
assert.equal(tokens.length, 1);
assert.equal(tokens[0].type, "keyword");
},
"test: tokenize function: 'foo = (args) ->'": function() {
var tokens = this.tokenizer.getLineTokens("foo = (args) ->", "start").tokens;
console.log(tokens);
assert.equal(tokens.length, 9);
[
"entity.name.function", "text", "keyword.operator", "text",
"paren.lparen", "variable.parameter", "paren.rparen", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize function: 'window.foo = (args) ->'": function() {
var tokens = this.tokenizer.getLineTokens("window.foo = (args) ->", "start").tokens;
console.log(tokens);
assert.equal(tokens.length, 11);
[
"variable.language", "punctuation.operator", "entity.name.function", "text", "keyword.operator", "text",
"paren.lparen", "variable.parameter", "paren.rparen", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize function: 'foo : (args) ->'": function() {
var tokens = this.tokenizer.getLineTokens("foo : (args) ->", "start").tokens;
assert.equal(tokens.length, 9);
[
"entity.name.function", "text", "keyword.operator", "text",
"paren.lparen", "variable.parameter", "paren.rparen", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize function: 'foo = ->'": function() {
var tokens = this.tokenizer.getLineTokens("foo = ->", "start").tokens;
assert.equal(tokens.length, 5);
[
"entity.name.function", "text", "keyword.operator", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize function: 'foo : ->'": function() {
var tokens = this.tokenizer.getLineTokens("foo : ->", "start").tokens;
assert.equal(tokens.length, 5);
[
"entity.name.function", "text", "keyword.operator", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize function: '(args) ->'": function() {
var tokens = this.tokenizer.getLineTokens("(args) ->", "start").tokens;
assert.equal(tokens.length, 5);
[
"paren.lparen", "variable.parameter", "paren.rparen", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize function(callback): 'foo bar: 1, (args) ->'": function() {
var tokens = this.tokenizer.getLineTokens("foo bar: 1, (args) ->", "start").tokens;
assert.equal(tokens.length, 13);
[
"identifier", "text", "identifier", "punctuation.operator", "text", "constant.numeric", "punctuation.operator", "text",
"paren.lparen", "variable.parameter", "paren.rparen", "text", "storage.type"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize class: 'class Foo'": function() {
var tokens = this.tokenizer.getLineTokens("class Foo", "start").tokens;
assert.equal(tokens.length, 3);
[
"keyword", "text", "language.support.class"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize class 'class Foo extends Bar'": function() {
var tokens = this.tokenizer.getLineTokens("class Foo extends Bar", "start").tokens;
assert.equal(tokens.length, 7);
[
"keyword", "text", "language.support.class", "text", "keyword", "text", "language.support.class"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
"test: tokenize illegal name property: 'foo.static.function'": function() {
var tokens = this.tokenizer.getLineTokens("foo.static.function", "start").tokens;
assert.equal(tokens.length, 5);
[
"identifier", "punctuation.operator", "identifier", "punctuation.operator", "identifier"
].forEach(function(type, i) {
assert.equal(tokens[i].type, type);
});
},
// TODO: disable. not yet implemented
"!test tokenize string with interpolation": function() {
var tokens = this.tokenizer.getLineTokens('"#{ 22 / 7 } is a decent approximation of π"', "start").tokens;