Merge pull request #2186 from ajaxorg/highlighter-fixes

Highlighter fixes
This commit is contained in:
Lennart Kats 2014-10-11 17:16:48 +02:00
commit 3af776df12
18 changed files with 1180 additions and 189 deletions

View file

@ -184,7 +184,7 @@ function jsFileList(path, filter) {
filter = /_test/;
return fs.readdirSync(path).map(function(x) {
if (x.slice(-3) == ".js" && !filter.test(x) && !/\s/.test(x))
if (x.slice(-3) == ".js" && !filter.test(x) && !/\s|BASE|(\b|_)dummy(\b|_)/.test(x))
return x.slice(0, -3);
}).filter(Boolean);
}

View file

@ -0,0 +1,12 @@
{- Ace {- 4 -} Elm -}
main = lift clock (every second)
clock t = collage 400 400 [ filled lightGrey (ngon 12 110)
, outlined (solid grey) (ngon 12 110)
, hand orange 100 t
, hand charcoal 100 (t/60)
, hand charcoal 60 (t/720) ]
hand clr len time =
let angle = degrees (90 - 6 * inSeconds time)
in traced (solid clr) <| segment (0,0) (len * cos angle, len * sin angle)

View file

@ -65,8 +65,11 @@ var supportedModes = {
Diff: ["diff|patch"],
Dockerfile: ["^Dockerfile"],
Dot: ["dot"],
Dummy: ["dummy"],
DummySyntax: ["dummy"],
Eiffel: ["e"],
EJS: ["ejs"],
Elm: ["elm"],
Erlang: ["erl|hrl"],
Forth: ["frt|fs|ldr"],
FTL: ["ftl"],

View file

@ -35,22 +35,28 @@ var oop = require("../lib/oop");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var DocCommentHighlightRules = function() {
this.$rules = {
"start" : [ {
token : "comment.doc.tag",
regex : "@[\\w\\d_]+" // TODO: fix email addresses
}, {
token : "comment.doc.tag",
regex : "\\bTODO\\b"
}, {
defaultToken : "comment.doc"
},
DocCommentHighlightRules.getTagRule(),
{
defaultToken : "comment.doc",
caseInsensitive: true
}]
};
};
oop.inherits(DocCommentHighlightRules, TextHighlightRules);
DocCommentHighlightRules.getTagRule = function(start) {
return {
token : "comment.doc.tag.storage.type",
regex : "\\b(?:TODO|FIXME|XXX|HACK)\\b"
};
}
DocCommentHighlightRules.getStartRule = function(start) {
return {
token : "comment.doc", // doc comment

58
lib/ace/mode/elm.js Normal file
View file

@ -0,0 +1,58 @@
/* ***** BEGIN LICENSE BLOCK *****
* Distributed under the BSD license:
*
* Copyright (c) 2012, Ajax.org B.V.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Ajax.org B.V. nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ***** END LICENSE BLOCK ***** */
/*
THIS FILE WAS AUTOGENERATED BY mode.tmpl.js
*/
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var HighlightRules = require("./elm_highlight_rules").ElmHighlightRules;
// TODO: pick appropriate fold mode
var FoldMode = require("./folding/cstyle").FoldMode;
var Mode = function() {
this.HighlightRules = HighlightRules;
this.foldingRules = new FoldMode();
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = "--";
this.blockComment = {start: "{-", end: "-}"};
// Extra logic goes here.
this.$id = "ace/mode/elm";
}).call(Mode.prototype);
exports.Mode = Mode;
});

View file

@ -0,0 +1,162 @@
/* ***** BEGIN LICENSE BLOCK *****
* Distributed under the BSD license:
*
* Copyright (c) 2012, Ajax.org B.V.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Ajax.org B.V. nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ***** END LICENSE BLOCK ***** */
// TODO check with https://github.com/deadfoxygrandpa/Elm.tmLanguage
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var ElmHighlightRules = function() {
var keywordMapper = this.createKeywordMapper({
"keyword": "as|case|class|data|default|deriving|do|else|export|foreign|" +
"hiding|jsevent|if|import|in|infix|infixl|infixr|instance|let|" +
"module|newtype|of|open|then|type|where|_|port|\u03BB"
}, "identifier");
var escapeRe = /\\(\d+|['"\\&trnbvf])/;
var smallRe = /[a-z_]/.source;
var largeRe = /[A-Z]/.source;
var idRe = /[a-z_A-Z0-9\']/.source;
this.$rules = {
start: [{
token: "string.start",
regex: '"',
next: "string"
}, {
token: "string.character",
regex: "'(?:" + escapeRe.source + "|.)'?"
}, {
regex: /0(?:[xX][0-9A-Fa-f]+|[oO][0-7]+)|\d+(\.\d+)?([eE][-+]?\d*)?/,
token: "constant.numeric"
}, {
token : "keyword",
regex : /\.\.|\||:|=|\\|\"|->|<-|\u2192/
}, {
token : "keyword.operator",
regex : /[-!#$%&*+.\/<=>?@\\^|~:\u03BB\u2192]+/
}, {
token : "operator.punctuation",
regex : /[,;`]/
}, {
regex : largeRe + idRe + "+\\.?",
token : function(value) {
if (value[value.length - 1] == ".")
return "entity.name.function";
return "constant.language";
}
}, {
regex : "^" + smallRe + idRe + "+",
token : function(value) {
return "constant.language";
}
}, {
token : keywordMapper,
regex : "[\\w\\xff-\\u218e\\u2455-\\uffff]+\\b"
}, {
regex: "{-#?",
token: "comment.start",
onMatch: function(value, currentState, stack) {
this.next = value.length == 2 ? "blockComment" : "docComment";
return this.token;
}
}, {
token: "variable.language",
regex: /\[markdown\|/,
next: "markdown"
}, {
token: "paren.lparen",
regex: /[\[({]/
}, {
token: "paren.rparen",
regex: /[\])}]/
}, ],
markdown: [{
regex: /\|\]/,
next: "start"
}, {
defaultToken : "string"
}],
blockComment: [{
regex: "{-",
token: "comment.start",
push: "blockComment"
}, {
regex: "-}",
token: "comment.end",
next: "pop"
}, {
defaultToken: "comment"
}],
docComment: [{
regex: "{-",
token: "comment.start",
push: "docComment"
}, {
regex: "-}",
token: "comment.end",
next: "pop"
}, {
defaultToken: "doc.comment"
}],
string: [{
token: "constant.language.escape",
regex: escapeRe,
}, {
token: "text",
regex: /\\(\s|$)/,
next: "stringGap"
}, {
token: "string.end",
regex: '"',
next: "start"
}],
stringGap: [{
token: "text",
regex: /\\/,
next: "string"
}, {
token: "error",
regex: "",
next: "start"
}],
};
this.normalizeRules();
};
oop.inherits(ElmHighlightRules, TextHighlightRules);
exports.ElmHighlightRules = ElmHighlightRules;
});

View file

@ -302,20 +302,24 @@ var JavaScriptHighlightRules = function(options) {
}
],
"comment_regex_allowed" : [
DocCommentHighlightRules.getTagRule(),
{token : "comment", regex : "\\*\\/", next : "start"},
{defaultToken : "comment"}
{defaultToken : "comment", caseInsensitive: true}
],
"comment" : [
DocCommentHighlightRules.getTagRule(),
{token : "comment", regex : "\\*\\/", next : "no_regex"},
{defaultToken : "comment"}
{defaultToken : "comment", caseInsensitive: true}
],
"line_comment_regex_allowed" : [
DocCommentHighlightRules.getTagRule(),
{token : "comment", regex : "$|^", next : "start"},
{defaultToken : "comment"}
{defaultToken : "comment", caseInsensitive: true}
],
"line_comment" : [
DocCommentHighlightRules.getTagRule(),
{token : "comment", regex : "$|^", next : "no_regex"},
{defaultToken : "comment"}
{defaultToken : "comment", caseInsensitive: true}
],
"qqstring" : [
{

View file

@ -29,10 +29,6 @@
* ***** END LICENSE BLOCK ***** */
/* This file was autogenerated from https://raw.github.com/dbp/sublime-rust/master/Rust.tmLanguage (uuid: ) */
/****************************************************************************************
* IT MIGHT NOT BE PERFECT ...But it's a good start from an existing *.tmlanguage file. *
* fileTypes *
****************************************************************************************/
define(function(require, exports, module) {
"use strict";
@ -55,6 +51,35 @@ var RustHighlightRules = function() {
next: 'pop' },
{ include: '#rust_escaped_character' },
{ defaultToken: 'string.quoted.single.source.rust' } ] },
{
stateName: "bracketedComment",
onMatch : function(value, currentState, stack){
stack.unshift(this.next, value.length - 1, currentState);
return "string.quoted.raw.source.rust";
},
regex : /r#*"/,
next : [
{
onMatch : function(value, currentState, stack) {
var token = "string.quoted.raw.source.rust";
if (value.length >= stack[1]) {
if (value.length > stack[1])
token = "invalid";
stack.shift();
stack.shift();
this.next = stack.shift();
} else {
this.next = "";
}
return token;
},
regex : /"#*/,
next : "start"
}, {
defaultToken : "string.quoted.raw.source.rust"
}
]
},
{ token: 'string.quoted.double.source.rust',
regex: '"',
push:

26
tool/Readme.md Normal file
View file

@ -0,0 +1,26 @@
Helper Scripts for Ace
======================
To use this you need to install node.js. and run `npm install` in this directory.
# add_mode.js
Run
```
node add_mode.js ModeName "extension1|extension2|^FullName"
```
to create all the files needed for a new mode named `ModeName`
this adds stubs for:
`ace/mode/mode_name.js`
`ace/mode/mode_name_hightlight_rules.js`
`ace/snippets/mode_name.js`
`ace/demo/kitchen_sink/docs/mode_name.extension1`
and adds entry for the new mode to `ace/ext/modelist.js`
# tmlanguage.js
```
node tmlanguage.js ./templates/dummy.JSON-tmLanguage
```

View file

@ -2,100 +2,105 @@ var fs = require('fs');
var lib = require('./lib');
var path = require('path');
var args = process.argv.slice(2);
function main(displayName, extRe) {
var name = lib.snakeCase(displayName).replace(/[^\w]/g, "");
/** demo **/
var demoFileExt = extRe.split("|")[0] || name;
var demoFileName = demoFileExt[0] == "^" ? demoFileExt.substr(1) : name + "." + demoFileExt;
var demoFilePath = lib.AceRoot + "demo/kitchen-sink/docs/" + demoFileName;
fs.writeFileSync(demoFilePath, "TODO add a nice demo!", "utf8");
console.log("Created demo file at: " + path.normalize(demoFilePath));
var displayName = args[0];
var extRe = args[1];
if (!displayName || ! extRe) {
console.log("Usage: ModeName ext1|ext2");
process.exit(1);
/** mode **/
var template = fs.readFileSync(__dirname + "/templates/mode.js", "utf8");
var modePath = lib.AceLib + "ace/mode/" + name + ".js";
var text = lib.fillTemplate(template, {
languageHighlightFilename: name,
languagename: name,
lineCommentStart: "TODO",
blockCommentStart: "TODO",
blockCommentEnd: "TODO"
});
fs.writeFileSync(modePath, text);
console.log("Created mode file at: " + path.normalize(modePath));
/** highlight rules **/
template = fs.readFileSync(__dirname + "/templates/highlight_rules.js", "utf8");
var hlPath = lib.AceLib + "ace/mode/" + name + "_highlight_rules.js";
template = template.replace(/\/\* THIS[\s\S]*?\*{3}\/\s*/, "");
text = lib.fillTemplate(template, {
language: name,
languageTokens: '{\n\
start: [{\n\
token: "string.start",\n\
regex: \'"\',\n\
next: "qstring"\n\
}],\n\
qstring: [{\n\
token: "escape",\n\
regex: /\\\\./,\n\
}, {\n\
token: "string.end",\n\
regex: \'"\',\n\
next: "start"\n\
}],\n\
}'
});
fs.writeFileSync(hlPath, text);
console.log("Created mode file at: " + path.normalize(hlPath));
/** snippets **/
template = fs.readFileSync(__dirname + "/templates/snippets.js", "utf8");
var snipetsPath = lib.AceLib + "ace/snippets/" + name + ".js";
text = lib.fillTemplate(template, {
languagename: name,
snippets: ""
});
fs.writeFileSync(snipetsPath, text);
console.log("Created snippets file at: " + path.normalize(snipetsPath));
/** modelist **/
var modelistPath = lib.AceLib + "ace/ext/modelist.js";
var modelist = fs.readFileSync(modelistPath, "utf8").replace(/\r\n?/g, "\n");
modelist = modelist.replace(/(supportedModes = {\n)([\s\S]*?)(\n^};)/m, function(_, m1, m2, m3) {
var langs = m2.split(/,\n/);
var offset = langs[0].trim().indexOf("[");
var padding = Array(Math.max(offset - displayName.length - 1, 0) + 1).join(" ");
var newLang = " " + displayName + ":" + padding + "[\"" + extRe + "\"]";
langs = langs.concat(newLang).map(function(x) {
return {
value: x,
id: x.match(/[^"':\s]+/)[0].toLowerCase()
};
});
langs[langs.length - 1].isNew = true;
langs = langs.filter(function(x) {
console.log(x.id, displayName)
return x.id != displayName.toLowerCase() || x.isNew;
});
langs = langs.sort(function(a, b) {
return a.id.localeCompare(b.id);
}).map(function(x) {
return x.value;
});
return m1 + langs.join(",\n") + m3;
});
fs.writeFileSync(modelistPath, modelist, "utf8");
console.log("Updated modelist at: " + path.normalize(modelistPath));
}
var name = lib.snakeCase(displayName).replace(/[^\w]/g, "");
/** demo **/
var demoFileExt = extRe.split("|")[0] || name;
var demoFileName = demoFileExt[0] == "^" ? demoFileExt.substr(1) : name + "." + demoFileExt;
var demoFilePath = lib.AceRoot + "demo/kitchen-sink/docs/" + demoFileName;
fs.writeFileSync(demoFilePath, "TODO add a nice demo!", "utf8");
console.log("Created demo file at: " + path.normalize(demoFilePath));
/** mode **/
var template = fs.readFileSync(__dirname + "/templates/mode.js", "utf8");
var modePath = lib.AceLib + "ace/mode/" + name + ".js";
var text = lib.fillTemplate(template, {
languageHighlightFilename: name,
languagename: name,
lineCommentStart: "TODO",
blockCommentStart: "TODO",
blockCommentEnd: "TODO"
});
fs.writeFileSync(modePath, text);
console.log("Created mode file at: " + path.normalize(modePath));
/** highlight rules **/
template = fs.readFileSync(__dirname + "/templates/highlight_rules.js", "utf8");
var hlPath = lib.AceLib + "ace/mode/" + name + "_highlight_rules.js";
template = template.replace(/\/\* THIS[\s\S]*?\*{3}\/\s*/, "");
text = lib.fillTemplate(template, {
language: name,
languageTokens: '{\n\
start: [{\n\
token: "string.start",\n\
regex: \'"\',\n\
next: "qstring"\n\
}],\n\
qstring: [{\n\
token: "escape",\n\
regex: /\\\\./,\n\
}, {\n\
token: "string.end",\n\
regex: \'"\',\n\
next: "start"\n\
}],\n\
}'
});
fs.writeFileSync(hlPath, text);
console.log("Created mode file at: " + path.normalize(hlPath));
/** snippets **/
template = fs.readFileSync(__dirname + "/templates/snippets.js", "utf8");
var snipetsPath = lib.AceLib + "ace/snippets/" + name + ".js";
text = lib.fillTemplate(template, {
languagename: name,
snippets: ""
});
fs.writeFileSync(snipetsPath, text);
console.log("Created snippets file at: " + path.normalize(snipetsPath));
/** modelist **/
var modelistPath = lib.AceLib + "ace/ext/modelist.js";
var modelist = fs.readFileSync(modelistPath, "utf8").replace(/\r\n?/g, "\n");
modelist = modelist.replace(/(supportedModes = {\n)([\s\S]*?)(\n^};)/m, function(_, m1, m2, m3) {
var langs = m2.split(/,\n/);
var offset = langs[0].trim().indexOf("[");
var padding = Array(Math.max(offset - displayName.length - 1, 0) + 1).join(" ");
var newLang = " " + displayName + ":" + padding + "[\"" + extRe + "\"]";
langs = langs.concat(newLang).map(function(x) {
return {
value: x,
id: x.match(/[^"':\s]+/)[0].toLowerCase()
};
});
langs[langs.length - 1].isNew = true;
langs = langs.filter(function(x) {
return x.id != name || x.isNew;
});
langs = langs.sort(function(a, b) {
return a.id.localeCompare(b.id);
}).map(function(x) {
return x.value;
});
return m1 + langs.join(",\n") + m3;
});
fs.writeFileSync(modelistPath, modelist, "utf8");
console.log("Updated modelist at: " + path.normalize(modelistPath));
if (!module.parent) {
var args = process.argv.slice(2);
var displayName = args[0];
var extRe = args[1];
if (!displayName || ! extRe) {
console.log("Usage: ModeName ext1|ext2");
process.exit(1);
}
} else {
module.exports = main;
}

View file

@ -5,13 +5,18 @@ var url = require("url");
var https = require("https");
var http = require("http");
exports.parsePlist = function(themeXml, callback) {
var result = "";
plist.parseString(themeXml, function(_, theme) {
result = theme[0];
callback && callback(theme[0]);
});
return result;
exports.parsePlist = function(xmlOrJSON, callback) {
var json;
if (xmlOrJSON[0] == "<") {
plist.parseString(xmlOrJSON, function(_, result) {
json = result[0];
});
} else {
xmlOrJSON = xmlOrJSON.replace(/^\s*\/\/.*/gm, "");
json = JSON.parse(xmlOrJSON)
}
callback && callback(json);
return json;
};
exports.formatJSON = function(object, initialIndent) {

View file

@ -94,7 +94,7 @@ document.getElementById("perfTest").onclick = function() {
}
var tk = new Tokenizer(currentRules);
var testPerf = function(lines, tk){
var testPerf = function(lines, tk) {
var state = "start";
for (var i=0, l = lines.length; i <l; i++) {
state = tk.getLineTokens(lines[i], state).state;
@ -126,7 +126,7 @@ util.bindDropdown("themeEl", function(value) {
function getDeps(src, path) {
var deps = [];
src.replace(/require\((['"])(.*?)\1/g, function(a,b,c){
src.replace(/require\((['"])(.*?)\1/g, function(a,b,c) {
if (c[0] == ".") {
var base = path.split("/");
c.split("/").forEach(function(part) {
@ -151,7 +151,7 @@ function run() {
var path = "ace/mode/new";
var deps = getDeps(src, path);
window.require.undef(path);
src = src.replace("define(", 'define("' + path +'", ["require","exports","module",' + deps +'],');
src = src.replace("define(", 'define("' + path +'", ["require","exports","module",' + deps +'],');
try {
eval(src);
require(["ace/mode/new"], function(e) {

114
tool/regexp_tokenizer.js Normal file
View file

@ -0,0 +1,114 @@
/***** regexp tokenizer */
require("amd-loader");
var lib = require("./lib");
var Tokenizer = require(lib.AceLib+ "ace/tokenizer").Tokenizer;
var Tokenizer = require(lib.AceLib + "ace/tokenizer_dev").Tokenizer; // todo can't use tokenizer because of max token count
var TextHighlightRules = require(lib.AceLib + "ace/mode/text_highlight_rules").TextHighlightRules;
var r = new TextHighlightRules()
r.$rules = {
start: [
{token: "anchor", regex: /[\^\$]|\\[bBAZzG]/, merge:false},
{token: "backRef", regex: /\\([1-9]|k(<\w+\b[+-]?\d>|'\w+\b[+-]?\d'))/, merge:false},
{include: "charTypes", merge:false},
{token: "charclass", regex: /\[\^?/, push: "charclass", merge:false},
{token: "alternation", regex: /\|/, merge:false},
{include: "quantifiers", merge:false},
{include: "groups", merge:false},
{include: "xGroup", merge:true}
],
charTypes: [
{token: "char", regex: /\\([tvnrbfae]|[0-8]{1,3}|x[\dA-Fa-f]{2}|x7[\dA-Fa-f]{7})/, merge:false}, // todo \cx
{token: "charType", regex: /\.|\\[wWsSdDhH]/, merge:false},
{token: "charProperty", regex: /\\p{\w+}/, merge:false},
{token: "char", regex: /\\./, merge:false},
],
quantifiers: [
{token: "quantifier", regex: /([?*+]|{\d+\b,?\d*}|{,\d+})[?+]?/, merge:false}
],
charclass: [
{include: "charTypes", merge:false},
{token: "charclass.start", regex: /\[\^?/, push: "charclass", merge:false},
{token: "charclass.end", regex: /\]/, next: "pop", merge:false}
],
groups: [
{token: "group", regex: /[(]([?](#|[imx\-]+:?|:|=|!|<=|<!|>|<\w+>|'\w+'|))?|[)]/,
onMatch: function(val, state, stack) {
if (!stack.groupNumber)
stack.groupNumber = 1;
var isStart = val !== ")";
var t = {depth:0,type: isStart ? "group.start" : "group.end", value: val};
t.groupType = val[2];
if (val == "(") {
t.number = stack.groupNumber++;
t.isGroup = true
} else if (t.groupType == "'" || (t.groupType == "<" && val.slice(-1) == ">")) {
t.name = val.slice(2, -1)
t.isGroup = true
} else if (t.groupType == ":") {
t.isGroup = true
}
if (t.groupType && val.indexOf("x") != -1) {
var minus = val.indexOf("-");
if (minus == -1 || minus > val.indexOf("x"))
stack.xGroup = t;
else
stack.xGroup = null;
} else if (!isStart && stack.xGroup && stack.xGroup == stack[0]) {
if (stack.xGroup.value.slice(-1) == ":")
stack.xGroup = null;
}
if (isStart) {
if (stack.groupDepth) {
stack[0].hasChildren = true
}
stack.groupDepth = (stack.groupDepth||0)+1;
stack.unshift(t)
} else {
stack.groupDepth --;
t.start = stack.shift(t)
t.start.end = t
}
return [t]
}, merge:false
}
],
xGroup: [
{token: "text", regex:/\s+/, onMatch: function(val, state, stack) {
return stack.xGroup ? [] : "text"
}, merge: true},
{token: "text", regex: /#/, onMatch: function(val, state, stack) {
if (stack.xGroup) {
this.next = "comment";
stack.unshift(state);
return [];
}
this.next = "";
return "text";
}, merge: true}
],
comment: [{
regex: "[^\n\r]*|^", token: "", onMatch: function(val, state, stack) {
this.next = stack.shift();
return [];
}
}]
}
r.normalizeRules()
var tmReTokenizer = new Tokenizer(r.getRules());
function tokenize(str) {
return tmReTokenizer.getLineTokens(str).tokens;
}
function toStr(tokens) { return tokens.map(function(x){return x.value}).join("")}
exports.tokenize = tokenize;
exports.toStr = toStr;
exports.tmReTokenizer = tmReTokenizer;

View file

@ -0,0 +1,30 @@
require("amd-loader");
var assert = require("assert");
var tk = require("./regexp_tokenizer");
var tokenize = tk.tokenize;
var toStr = tk.toStr;
var logTokens = function(tokens) {
tokens.forEach(function(x) {
delete x.end
delete x.start
})
console.log(tokens)
}
assert.equal(toStr(
tokenize("(?x)c + +\n\
# comment\n\
(?-x) # (?x: 1 \n\
(2) [ ] # a \n\
3 4) c#"
)),
"(?x)c++(?-x) # (?x:1(2)[ ]34) c#"
)
assert.equal(toStr(
tokenize("(?x)\n\
u # comment\n\
")),
"(?x)u"
)

View file

@ -0,0 +1,45 @@
// [PackageDev] target_format: plist, ext: tmLanguage
{
"name": "Dummy",
"scopeName": "source.dummy",
"fileTypes": ["dummy"],
"patterns": [
{
"include": "#string"
}, {
"include": "#escapes"
}
],
"repository": {
"escapes": {
"patterns": [
{
"match": "\\\\[nrt\\\\\\$\\\"']",
"name": "keyword.dummy"
}
]
},
"string": {
"beginCaptures": {
"0": {
"name": "punctuation.definition.string.begin.dummy"
}
},
"endCaptures": {
"0": {
"name": "punctuation.definition.string.end.dummy"
}
},
"contentName": "meta.string-contents.quoted.double.dummy",
"name": "string.quoted.double.dummy",
"end": "'''",
"begin": "'''",
"patterns": [
{
"include": "#escapes"
}
],
"comment": "This is a comment"
}
}
}

View file

@ -28,17 +28,11 @@
*
* ***** END LICENSE BLOCK ***** */
/* THIS FILE WAS AUTOGENERATED FROM %name% (UUID: %uuid%) */
/****************************************************************
* IT MIGHT NOT BE PERFECT, PARTICULARLY: *
* IN DECIDING STATES TO TRANSITION TO, *
* IGNORING WHITESPACE, *
* IGNORING GROUPS WITH ?:, *
* EXTENDING EXISTING MODES, *
* GATHERING KEYWORDS, OR *
* DECIDING WHEN TO USE PUSH. *
* ...But it's a good start from an existing *.tmlanguage file. *
****************************************************************/
/* This file was autogenerated from %name% (uuid: %uuid%) */
/****************************************************************************************
* IT MIGHT NOT BE PERFECT ...But it's a good start from an existing *.tmlanguage file. *
* fileTypes *
****************************************************************************************/
define(function(require, exports, module) {
"use strict";
@ -55,6 +49,9 @@ var %language%HighlightRules = function() {
this.normalizeRules();
};
%language%HighlightRules.metaData = %metaData%
oop.inherits(%language%HighlightRules, TextHighlightRules);
exports.%language%HighlightRules = %language%HighlightRules;

View file

@ -48,8 +48,8 @@ var Mode = function() {
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = "%lineCommentStart%";
this.blockComment = {start: "%blockCommentStart%", end: "%blockCommentEnd%"};
// this.lineCommentStart = "%lineCommentStart%";
// this.blockComment = {start: "%blockCommentStart%", end: "%blockCommentEnd%"};
// Extra logic goes here.
this.$id = "ace/mode/%languageHighlightFilename%"
}).call(Mode.prototype);

View file

@ -1,8 +1,330 @@
require("amd-loader");
var fs = require("fs");
var util = require("util");
var lib = require("./lib");
var pathlib = require("path");
var parseLanguage = lib.parsePlist;
var tk = require("./regexp_tokenizer");
var tokenize = tk.tokenize;
var toStr = tk.toStr;
function last(array) {return array[array.length - 1]}
function convertHexEscape(tokens) {
var inChClass = false;
tokens.forEach(function(t) {
if (t.type == "charclass")
inChClass = true;
else if (t.type == "charclass.end")
inChClass = false;
else if (t.type == "charType"){
if (t.value == "\\h") {
t.type = "text";
t.value = inChClass ? "\\da-fA-F" : "[\\da-fA-F]";
}
else if (t.value == "\\H") {
if (inChClass) {
console.warn("can't convert \\H in charclass");
return;
}
t.type = "text";
t.value = "[^\\da-fA-F]";
}
}
});
return tokens;
}
function convertNewLinesTo$(str) {
var tokens = tokenize(str);
for (var i = 0; i < tokens.length; i++) {
var t= tokens[i];
if (t.type == "char" && t.value == "\\n") {
var p = tokens[i + 1] || {};
if (p.type != "quantifier") {
t.value = "$";
while (p.value == "\\n" || p.type == "quantifier") {
p.value = "";
p = tokens[++i + 1] || {};
}
} else if (/\?|\*|{,|{0,/.test(p.value)) {
t.value = p.value = "";
} else
p.value = "";
}
}
return toStr(tokens).replace(/[$]+/g, "$");
}
function convertCharacterTypes(str) {
var tokens = tokenize(str);
tokens = convertHexEscape(tokens);
var warn = false;
tokens.forEach(function(t){
if (t.type == "quantifier") {
var val = t.value;
if (val.slice(-1) == "+" && val.length > 1) {
t.value = val.slice(0, -1);
warn = val;
}
}
});
if (warn)
console.log("converted possesive quantifier " + warn + " to *");
return toStr(tokens);
}
function removeInlineFlags(str, rule) {
var tokens = tokenize(str);
var caseInsensitive = false;
tokens.forEach(function(t, i) {
if (t.type == "group.start" && /[imsx]/.test(t.value)) {
if (/i/.test(t.value))
caseInsensitive = true;
t.value = t.value.replace(/[imsx\-]/g, "");
var next = tokens[i + 1];
if (next && next.type == "group.end") {
t.value = next.value = "";
}
}
});
if (caseInsensitive && rule)
rule.caseInsensitive = true;
return toStr(tokens);
}
function convertToNonCapturingGroups(str) {
var tokens = tokenize(str);
tokens.forEach(function(t, i) {
if (t.type == "group.start" && t.value == "(")
t.value += "?:";
});
return toStr(tokens);
}
function simplifyNonCapturingGroups(str) {
var tokens = tokenize(str);
var t = tokens[0];
if (t.type == "group.start" && t.value == "(?:"
&& t.end == last(tokens)) {
t.value = t.end.value = "";
}
var i = 0;
function iter(f) {
for (i = 0; i < tokens.length; i++)
f(tokens[i]);
}
function iterGroup(end, f) {
for (var i1 = i + 1; i1 < tokens.length; i1++) {
var t = tokens[i1];
if (t == end)
break;
var index = f && f(t);
if (index > i1)
i1 = index;
}
return i1;
}
iter(function (t) {
if (t.type == "group.start" && t.value == "(?:") {
if (!t.end)
return console.error("malformed regex: " + str);
var canRemove = true;
var next = tokens[tokens.indexOf(t.end, i) + 1];
if (next && next.type == "quantifier")
return;
iterGroup(t.end, function(t) {
if (t.type == "alternation")
canRemove = false;
else if (t.type == "group.start" && t.end)
return iterGroup(t.end);
});
if (canRemove)
t.value = t.end.value = "";
}
});
return toStr(tokens);
}
function removeLookBehinds(str) {
var tokens = tokenize(str);
var toRemove = null;
tokens.forEach(function(t, i) {
if (!toRemove && t.type == "group.start" && /</.test(t.value)) {
toRemove = t.end;
toRemove.content = [];
}
if (toRemove) {
toRemove.content.push(t.value);
t.value = "";
}
if (t == toRemove) {
var c = toRemove.content.slice(1, -1).join("");
if (/\^/.test(c))
toRemove.value = "(?:" + c +")";
toRemove = null;
}
});
return toStr(tokens);
}
function convertBeginEndBackrefs(rule) {
if (!/\\\d/.test(rule.end))
return;
var startTokens = tokenize(rule.begin);
var endTokens = tokenize(rule.end);
var groups = {};
startTokens.forEach(function(t, i) {
if (t.number && t.end && t.type == "group.start") {
var endIndex = startTokens.indexOf(t.end, i + 1);
var content = startTokens.slice(i+1, endIndex);
groups[t.number] = toStr(content);
}
});
endTokens.forEach(function(t) {
if (t.type == "backRef") {
var num = t.value.substr(1);
if (groups[num])
t.value = "(?:" + groups[num] + ")";
}
});
rule.end = toStr(endTokens);
console.warn("Begin-End-Backreference is detected", rule);
}
function checkForNamedCaptures(str) {
var tokens = tokenize(str);
tokens.forEach(function(t) {
if (t.type == "group.start" && t.name)
console.warn("named capture not implemented", str);
if (t.type == "backRef")
console.warn("backRef not implemented ", str);
});
}
function fixGroups(captures, defaultName, regex) {
var tokens = tokenize(regex);
var opened = [], isStart = true, i = 0;
function open() {
var t = {value: "(", type: "group.start", isGroup: true};
opened.push(t);
tokens.splice(i++, 0, t);
}
function close() {
var t = {value: ")", type: "group.start"};
t.start = opened.pop();
t.start.end = t;
tokens.splice(i++, 0, t);
}
function tryOpen(){if (isStart) {open(); isStart = false}}
function tryClose(){if (opened.length) close()}
function skip(t) {
var i1 = tokens.indexOf(t.end, i);
if (i1 > i)
i = i1;
}
function lst(t) {return t[t.length - 1]}
function iter(f) {
for (i = 0; i < tokens.length; i++)
f(tokens[i]);
}
function iterGroup(end, f) {
for (var i1 = i + 1; i1 < tokens.length; i1++) {
var t = tokens[i1];
if (t == end)
break;
f(t);
}
}
function peek() { return tokens[i + 1] || {}}
// groupify
iter(function(t){
if (t.type == "group.start") {
tryClose();
isStart = true;
if (!t.hasChildren || t.isSpecial)
skip(t);
} else if (t.type == "group.end") {
isStart = true;
tryClose();
} else if (t.type == "alternation") {
isStart = true;
tryClose();
} else if (t.type != "anchor" && t.type != "quantifier"){
tryOpen();
}
});
tryClose();
// remove redundand groups
var names = [defaultName];
iter(function(t){
if (t.type == "group.start" && !t.isSpecial) {
var captureName = captures[t.number];
if (!t.hasChildren) {
t.tokenName = captureName || lst(names);
skip(t);
} else {
var hasCapture = false;
iterGroup(t.end, function(t1) {
if (t1.type == "group.start" && captures[t1.number])
hasCapture = true;
});
if (hasCapture) {
t.value = "(?:";
if (captureName) {
names.push(captureName);
t.isTokenGroup = true;
}
} else {
t.tokenName = captureName || lst(names);
iterGroup(t.end, function(t1) {
if (t1.value == "(")
t1.value = "(?:";
});
}
}
} else if (t.type == "group.end") {
if (t.start.isTokenGroup)
names.pop();
}
});
// wrap capturing groups with quantifier
iter(function(t){
if (t.type == "group.end" && t.start.value == "(" && peek().type == "quantifier") {
peek().value += ")";
t.start.value += "(?:";
}
});
names = [];
tokens.forEach(function(t) {
if (t.value == "(" || t.value == "((?:" )
t.tokenName && names.push(t.tokenName);
});
return {
names: names,
regex: toStr(tokens)
};
}
/***** converter */
function logDebug(string, obj) {
console.log(string, obj);
@ -13,7 +335,6 @@ function logDebug(string, obj) {
// for tracking token states
var states = {start: []};
var stateName = "start";
function processRules(rules){
if (rules.patterns)
@ -40,29 +361,38 @@ function processPatterns(pl) {
return pl.map(processPattern);
}
function processPattern(p) {
if (p.end == "(?!\\G)" && p.patterns && p.patterns.length == 1) {
var rule = processPattern(p.patterns[0]);
}
else if (p.begin && p.end) {
var rule = simpleRule(p.begin, p.name, p.beginCaptures || p.captures)
else if (p.begin != null && p.end != null) {
convertBeginEndBackrefs(p);
var rule = simpleRule(p.begin, p.name, p.beginCaptures || p.captures);
var next = processPatterns(p.patterns || []);
var endRule = simpleRule(p.end, p.name, p.endCaptures || p.captures);
endRule.next = "pop";
next.push(endRule);
if (p.applyEndPatternLast)
next.push(endRule);
else
next.unshift(endRule);
if (p.name || p.contentName)
next.push({defaultToken: p.name || p.contentName});
rule.push = next;
rule = removeIncludeSelf(rule);
}
else if (p.match) {
var rule = simpleRule(p.match, p.name, p.captures)
var rule = simpleRule(p.match, p.name, p.captures);
}
else if (p.include) {
var rule = {include: p.include};
}
else {
var rule = {todo: p};
}
if (p.comment)
rule.comment = (rule.comment || "") + p.comment;
@ -73,106 +403,269 @@ function processPattern(p) {
}
function simpleRule(regex, name, captures) {
name = name || "text";
var rule = {};
var rule = {token: "", regex: ""};
var origRegex = regex
regex = transformRegExp(regex, rule);
var origRegex = regex;
regex = transformRegExp(origRegex, rule);
if (captures) {
var tokenArray = [];
Object.keys(captures).forEach(function(x){
tokenArray[x] = captures[x] && captures[x].name;
});
if (tokenArray.length == 1) {
name = tokenArray[0];
} else {
for (var i = 0; i < tokenArray.length; i++)
if (!tokenArray[i])
tokenArray[i] = name;
name = tokenArray;
rule.todo = "fix grouping";
var fixed = fixGroups(tokenArray, name, regex);
name = fixed.names;
regex = fixed.regex;
if (name.length == 1)
name = name[0];
}
}
if (typeof name == "string")
regex = convertToNonCapturingGroups(regex);
regex = simplifyNonCapturingGroups(regex);
try {new RegExp(regex);} catch(e) {
rule.TODO = "FIXME: regexp doesn't have js equivalent";
rule.originalRegex = origRegex
rule.originalRegex = origRegex;
// lookbehinds are mostly used to force ordering
// regex = removeLookBehinds(regex);
}
rule.token = name;
rule.regex = regex;
return rule;
}
function removeIncludeSelf(rule) {
if (!rule.push)
return rule;
var hasSelfInclude = false;
var escapeRule = null;
var complexSelfInclude = false;
rule.push.forEach(function(sub) {
if (sub.include == "$self") {
hasSelfInclude = true;
} else if (sub.defaultToken) {
return;
} else if (sub.next == "pop") {
escapeRule = sub;
} else
complexSelfInclude = true;
});
if (hasSelfInclude) {
console.warn("can't convert include $self");
return {todo: rule};
if (complexSelfInclude) {
console.warn("can't convert include $self");
rule.toDo = "include $self not fully supported";
return rule;
}
console.warn("include $self not fully supported");
delete rule.push;
delete escapeRule.next;
rule.includeSelf = true;
escapeRule.includeSelf = true;
return [rule, escapeRule];
}
return rule;
}
// regex transformation
function removeXFlag(str) {
if (str && str.slice(0,4) == "(?x)") {
str = str.replace(/\\.|\[([^\]\\]|\\.)*?\]|\s+|(?:#[^\n]*)/g, function(s) {
if (s[0] == "[")
return s;
if (s[0] == "\\")
return /[#\s]/.test(s[1]) ? s[1] : s;
return "";
}).substr(4);
}
return str;
var tokens = tokenize(str);
return toStr(tokens);
}
function transformRegExp(str, rule) {
str = removeXFlag(str);
//str = str.replace(/\\n\$|\$\\n/g, '$');
str = str.replace(/\\n(?!\?).?/g, '$'); // replace newlines by $ except if its postfixed by ?
if (/\(\?[i]\:|\(?\w*i\w*\)/g.test(str)) {
str = str.replace(/\(\?[ims\-]\:/g, "(?:"); // checkForInvariantRegex
str = str.replace(/\(\?[imsx\-]\)/g, "");
rule && (rule.caseInsensitive = true);
}
str = convertNewLinesTo$(str);
str = removeInlineFlags(str, rule);
str = str.replace(/(\\[xu]){([a-fA-F\d]+)}/g, '$1$2');
str = convertCharacterTypes(str, rule);
checkForNamedCaptures(str);
return str;
}
//
function extractPatterns(tmRules) {
var patterns = processRules(tmRules);
return lib.restoreJSONComments(lib.formatJSON(patterns, " "));
return processRules(tmRules);
}
function detectLoops(states) {
var data = {};
var keys = Object.keys(states);
var flattenedStates = {};
function addRef(item, name) {
if (item.refs.indexOf(name) == -1)
item.refs.push(name);
}
function anonStateId(name, next) {
var i = 0, old = name;
while (flattenedStates[name] || states[name]) {
name = old + "_" + i++;
}
// console.log(old, name)
return name;
}
function addState(key, rules) {
if (rules && !flattenedStates[key])
flattenedStates[key] = rules;
return rules || flattenedStates[key];
}
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
var state = addState(key, states[key]);
var item = data[key] || (data[key] = {/* name: key, */ refs: []});
state.forEach(function(rule) {
var next = rule.push || rule.next;
if (next == "pop") {
// nothing
} else if (typeof next == "string") {
addRef(item, next);
} else if (next) {
var anonId = anonStateId(key, next);
addState(anonId, next);
if (rule.push)
addRef(item, anonId);
keys.push(anonId);
} else if (rule.include) {
addRef(item, rule.include);
}
});
}
var cycles = [];
function addPath(start, path) {
var node = data[start];
path.push(start);
if (!node || !node.refs)
console.log(start);
var i = path.indexOf(start);
if (i > -1 && i != path.length - 1 || start == "$self" || start == "$base") {
if (i != -1)
path = path.slice(i);
for (var j = 0; j < cycles.length; j++) {
if (cycles[j] + "" == path + "")
return;
}
return cycles.push(path);
}
if (!node || !node.refs || !node.refs.length || path.length>30)
return;
node.refs.forEach(function(x) {
addPath(x, path.concat());
});
}
addPath("start", []);
console.error(cycles.join("\n"));
}
function test(fileName) {
console.log("testing highlighter");
try {
var module = require(fileName);
var Mode = module[Object.keys(module)[0]];
var mode = new Mode();
mode.getTokenizer().getLineTokens("hello world");
} catch(e) {
console.log(e);
}
}
function guessComment(patterns) {
var comment = {};
for (var i in patterns) {
var state = patterns[i];
state.forEach(function(r) {
if (typeof r.token == "string") {
if (/\bcomment\b/.test(r.token)) {
comment.line = r.regex;
}
}
});
}
return comment;
}
// cli stuff
var modeTemplate = fs.readFileSync(__dirname + "/templates/mode.js", "utf8");
var modeHighlightTemplate = fs.readFileSync(__dirname + "/templates/highlight_rules.js", "utf8");
function convertLanguageFile(name) {
var path = /^(\/|\w:)/.test(name) ? name : process.cwd() + "/" + name
var tmLanguage = fs.readFileSync(path, "utf8");
parseLanguage(tmLanguage, function(language) {
var languageHighlightFilename = language.name.replace(/[-_]/g, "").toLowerCase();
var languageNameSanitized = language.name.replace(/-/g, "");
function fetchAndConvert(name) {
console.log("Converting " + name);
if (/^http/.test(name)) {
if (/:\/\/github.com/.test(name)) {
name = name.replace(/\/blob\//, "/").replace("github.com", "raw.github.com");
}
return lib.download(name, function(data) {
convertTmLanguage(name, data);
});
}
var path = /^(\/|\w:)/.test(name) ? name : process.cwd() + "/" + name;
var langStr = fs.readFileSync(path, "utf8");
convertTmLanguage(name, langStr);
}
var languageHighlightFile = __dirname + "/../lib/ace/mode/" + languageHighlightFilename + "_highlight_rules.js";
var languageModeFile = __dirname + "/../lib/ace/mode/" + languageHighlightFilename + ".js";
console.log("Converting " + name + " to " + languageHighlightFile);
function convertTmLanguage(name, langStr) {
parseLanguage(langStr, function(language) {
var highlighterFilename = lib.snakeCase(language.name).replace(/[^\w]/g, "");
var languageNameSanitized = lib.camelCase(language.name).replace(/[^\w]/g, "");
require("./add_mode")(languageNameSanitized, (language.fileTypes || []).join("|"));
var highlighterFile = pathlib.normalize(lib.AceLib + "ace/mode/" + highlighterFilename + "_highlight_rules.js");
var modeFile = pathlib.normalize(lib.AceLib + "ace/mode/" + highlighterFilename + ".js");
if (devMode) {
console.log(util.inspect(language.patterns, false, 4));
console.log(util.inspect(language.repository, false, 4));
}
var patterns = extractPatterns(language);
detectLoops(patterns);
// var uuid = language.uuid
delete language.uuid;
delete language.patterns;
delete language.repository;
var comment = guessComment(patterns);
var languageMode = lib.fillTemplate(modeTemplate, {
language: languageNameSanitized,
languageHighlightFilename: languageHighlightFilename
languageHighlightFilename: highlighterFilename,
lineCommentStart: JSON.stringify(comment.line || "//"),
blockCommentStart: JSON.stringify(comment.start || "/*"),
blockCommentEnd: JSON.stringify(comment.end || "*/")
});
var patterns = extractPatterns(language);
var languageHighlightRules = lib.fillTemplate(modeHighlightTemplate, {
language: languageNameSanitized,
languageTokens: patterns.trim(),
languageTokens: lib.formatJSON(patterns, " ").trim(),
uuid: language.uuid,
name: name
name: name,
metaData: lib.formatJSON(language, " ").trim()
});
if (devMode) {
@ -181,17 +674,23 @@ function convertLanguageFile(name) {
console.log("Not writing, 'cause we're in dev mode, baby.");
}
else {
fs.writeFileSync(languageHighlightFile, languageHighlightRules);
fs.writeFileSync(languageModeFile, languageMode);
fs.writeFileSync(highlighterFile, languageHighlightRules);
fs.writeFileSync(modeFile, languageMode);
console.log("created file " + highlighterFile);
test(modeFile);
}
});
}
var args = process.argv.splice(2);
var tmLanguageFile = args[0];
var devMode = args[1];
if (tmLanguageFile === undefined) {
console.error("Please pass in a language file via the command line.");
process.exit(1);
if (!module.parent) {
var args = process.argv.splice(2);
var tmLanguageFile = args[0];
var devMode = args[1];
if (tmLanguageFile === undefined) {
console.error("Usage: node tmlanguage.js path/or/url/to/syntax.file");
process.exit(1);
}
fetchAndConvert(tmLanguageFile);
} else {
exports.fetchAndConvert = fetchAndConvert;
}
convertLanguageFile(tmLanguageFile);