diff --git a/lib/ace/mode/html_highlight_rules_test.js b/lib/ace/mode/html_highlight_rules_test.js
index 1b1431fc..19c96f78 100644
--- a/lib/ace/mode/html_highlight_rules_test.js
+++ b/lib/ace/mode/html_highlight_rules_test.js
@@ -45,46 +45,146 @@ define(function(require, exports, module) {
var HtmlMode = require("./html").Mode;
var assert = require("../test/assertions");
+var testData = {
+ "test: tokenize embedded script" : [{
+ text: "'123'",
+ state: ["start", "start"],
+ tokens: [{
+ type: "meta.tag",
+ value: "<"
+ }, {
+ type: "meta.tag.tag-name.script",
+ value: "script"
+ }, {
+ type: "text",
+ value: " "
+ }, {
+ type: "entity.other.attribute-name",
+ value: "a"
+ }, {
+ type: "keyword.operator",
+ value: "="
+ }, {
+ type: "string",
+ value: "'a'"
+ }, {
+ type: "meta.tag",
+ value: ">"
+ }, {
+ type: "storage.type",
+ value: "var"
+ }, {
+ type: "meta.tag",
+ value: ""
+ }, {
+ type: "meta.tag.tag-name.script",
+ value: "script"
+ }, {
+ type: "meta.tag",
+ value: ">"
+ }, {
+ type: "text",
+ value: "'123'"
+ }]
+ }],
+
+ "test: tokenize multiline attribute value with double quotes": [{
+ text: "",
+ state: [ "tag_qqstring", "start" ],
+ tokens: [ {
+ type: "string",
+ value: "def\""
+ }, {
+ type: "meta.tag",
+ value: ">"
+ }
+ ]
+ }],
+
+ "test: tokenize multiline attribute value with single quotes": [{
+ text: "",
+ state: [ "tag_qstring", "start" ],
+ tokens: [ {
+ type: "string",
+ value: "def\"'"
+ }, {
+ type: "meta.tag",
+ value: ">"
+ }
+ ]
+ }]
+};
+
+function generateTest(exampleData) {
+ return function testTokenizer() {
+ for (var i = 0; i < exampleData.length; i++) {
+ var s = exampleData[i];
+ var lineTokens = tokenizer.getLineTokens(s.text, s.state[0]);
+
+ assert.equal(
+ JSON.stringify(lineTokens, null, 4),
+ JSON.stringify({tokens:s.tokens, state: s.state[1]}, null, 4)
+ );
+ }
+ }
+}
+
+var tokenizer;
module.exports = {
setUp : function() {
- this.tokenizer = new HtmlMode().getTokenizer();
- },
-
- "test: tokenize embedded script" : function() {
- var line = "'123'";
- var tokens = this.tokenizer.getLineTokens(line, "start").tokens;
-
- assert.equal(12, tokens.length);
- assert.equal("meta.tag", tokens[0].type);
- assert.equal("meta.tag.script", tokens[1].type);
- assert.equal("text", tokens[2].type);
- assert.equal("entity.other.attribute-name", tokens[3].type);
- assert.equal("keyword.operator", tokens[4].type);
- assert.equal("string", tokens[5].type);
- assert.equal("meta.tag", tokens[6].type);
- assert.equal("storage.type", tokens[7].type);
- assert.equal("meta.tag", tokens[8].type);
- assert.equal("meta.tag.script", tokens[9].type);
- assert.equal("meta.tag", tokens[10].type);
- assert.equal("text", tokens[11].type);
- },
-
- "test: tokenize multiline attribute value with double quotes": function() {
- var line1 = this.tokenizer.getLineTokens('', line1.state).tokens;
- assert.equal(t1[t1.length-1].type, "string");
- assert.equal(t2[0].type, "string");
- },
-
- "test: tokenize multiline attribute value with single quotes": function() {
- var line1 = this.tokenizer.getLineTokens("', line1.state).tokens;
- assert.equal(t1[t1.length-1].type, "string");
- assert.equal(t2[0].type, "string");
+ tokenizer = new HtmlMode().getTokenizer();
}
-};
+}
+
+for (var i in testData) {
+ module.exports[i] = generateTest(testData[i])
+}
});
diff --git a/lib/ace/mode/xml_highlight_rules_test.js b/lib/ace/mode/xml_highlight_rules_test.js
index 8f764b2d..19e142c9 100644
--- a/lib/ace/mode/xml_highlight_rules_test.js
+++ b/lib/ace/mode/xml_highlight_rules_test.js
@@ -45,57 +45,169 @@ define(function(require, exports, module) {
var XmlMode = require("./xml").Mode;
var assert = require("../test/assertions");
-module.exports = {
-
- name: "XML Tokenizer",
-
- setUp : function() {
- this.tokenizer = new XmlMode().getTokenizer();
- },
+var testData = {
+ "test: tokenize1" : [{
+ text: "//Juhu Kinners",
+ state: ["start", "start"],
+ tokens: [
+ {
+ type: "meta.tag",
+ value: "<"
+ },
+ {
+ type: "meta.tag.tag-name",
+ value: "Juhu"
+ },
+ {
+ type: "meta.tag",
+ value: ">"
+ },
+ {
+ type: "text",
+ value: "//Juhu Kinners"
+ },
+ {
+ type: "meta.tag",
+ value: ""
+ },
+ {
+ type: "meta.tag.tag-name",
+ value: "Kinners"
+ },
+ {
+ type: "meta.tag",
+ value: ">"
+ }
+ ]
+ }],
- "test: tokenize1" : function() {
- var line = "//Juhu Kinners";
- var tokens = this.tokenizer.getLineTokens(line, "start").tokens;
+ "test: two tags in the same lines should be in separate tokens": [{
+ text: "",
+ state: [ "start", "start"],
+ tokens: [
+ {
+ type: "meta.tag",
+ value: "<"
+ },
+ {
+ type: "meta.tag.tag-name",
+ value: "Juhu"
+ },
+ {
+ type: "meta.tag",
+ value: ">"
+ },
+ {
+ type: "meta.tag",
+ value: "<"
+ },
+ {
+ type: "meta.tag.tag-name",
+ value: "Kinners"
+ },
+ {
+ type: "meta.tag",
+ value: ">"
+ }
+ ]
+ }],
- assert.equal(3, tokens.length);
- assert.equal("meta.tag", tokens[0].type);
- assert.equal("text", tokens[1].type);
- assert.equal("meta.tag", tokens[2].type);
- },
-
- "test: two tags in the same lines should be in separate tokens" : function() {
- var line = "";
- var tokens = this.tokenizer.getLineTokens(line, "start").tokens;
-
- assert.equal(2, tokens.length);
- assert.equal("meta.tag", tokens[0].type);
- assert.equal("meta.tag", tokens[1].type);
-
- assert.equal("", tokens[0].value);
- assert.equal("", tokens[1].value);
- },
-
- "test: multiline attributes": function() {
- var multiLine = [''];
-
- var state = "start";
- var multiLineTokens = multiLine.map(function(line) {
- var tokens = this.tokenizer.getLineTokens(line, state);
- state = tokens.state;
- return tokens.tokens;
- }, this);
-
- assert.equal(multiLineTokens[0].length, 5);
- assert.equal(multiLineTokens[1].length, 5);
- assert.equal(multiLineTokens[2].length, 2);
-
- assert.equal(multiLineTokens[0][4].type, "string");
- assert.equal(multiLineTokens[1][0].type, "string");
- assert.equal(multiLineTokens[1][4].type, "string");
- assert.equal(multiLineTokens[2][0].type, "string");
- }
+ "test: multiline attributes": [{
+ text: "",
+ state: ["tag_qqstring", "start"],
+ tokens: [
+ {
+ type: "string",
+ value: "}\""
+ },
+ {
+ type: "meta.tag",
+ value: "/>"
+ }
+ ]
+ }]
};
+function generateTest(exampleData) {
+ return function testTokenizer() {
+ for (var i = 0; i < exampleData.length; i++) {
+ var s = exampleData[i];
+ var lineTokens = tokenizer.getLineTokens(s.text, s.state[0]);
+
+ assert.equal(
+ JSON.stringify(lineTokens, null, 4),
+ JSON.stringify({tokens:s.tokens, state: s.state[1]}, null, 4)
+ );
+ }
+ }
+}
+
+var tokenizer;
+module.exports = {
+ name: "XML Tokenizer",
+
+ setUp : function() {
+ tokenizer = new XmlMode().getTokenizer();
+ }
+}
+
+for (var i in testData) {
+ module.exports[i] = generateTest(testData[i])
+}
});
if (typeof module !== "undefined" && module === require.main) {