Merge branch 'master' of github.com:ajaxorg/ace

This commit is contained in:
Garen Torikian 2012-11-20 10:49:38 -08:00
commit f0189716c0
9 changed files with 175 additions and 7 deletions

View file

@ -1,4 +1,4 @@
2012.09.17, Version 0.1.0
2012.09.17, Version 1.0.0
* New Features
- Multiple cursors and selections (https://c9.io/site/blog/2012/08/be-an-armenian-warrior-with-block-selection-on-steroids/)

View file

@ -131,7 +131,7 @@ env.editor.commands.addCommands([{
}, {
name: "execute",
bindKey: "ctrl+enter",
exec: function(editor) {
exec: function(editor) {
try {
var r = eval(editor.getCopyText()||editor.getValue());
} catch(e) {

View file

@ -90,6 +90,7 @@ var docs = {
"docs/lisp.lisp": "Lisp",
"docs/liquid.liquid": "Liquid",
"docs/lua.lua": "Lua",
"docs/lucene.lucene": "Lucene",
"docs/luapage.lp": "LuaPage",
"docs/Makefile": "Makefile",
"docs/markdown.md": {name: "Markdown", wrapped: true},
@ -184,13 +185,13 @@ module.exports = {
ownSource: prepareDocList(ownSource),
hugeDocs: prepareDocList(hugeDocs),
initDoc: initDoc,
loadDoc: loadDoc
loadDoc: loadDoc
};
module.exports.all = {
"Mode Examples": module.exports.docs,
"Huge documents": module.exports.hugeDocs,
"Huge documents": module.exports.hugeDocs,
"own source": module.exports.ownSource
};
});

View file

@ -0,0 +1 @@
(title:"foo bar" AND body:"quick fox") OR title:fox

View file

@ -63,6 +63,7 @@ var modesByName = {
liquid: ["Liquid" , "liquid"],
lua: ["Lua" , "lua"],
luapage: ["LuaPage" , "lp"], // http://keplerproject.github.com/cgilua/manual.html#templates
lucene: ["Lucene" , "lucene"],
makefile: ["Makefile" , "^GNUmakefile|^makefile|^Makefile|^OCamlMakefile|make"],
markdown: ["Markdown" , "md|markdown"],
objectivec: ["Objective-C" , "m"],

16
lib/ace/mode/lucene.js Normal file
View file

@ -0,0 +1,16 @@
define(function(require, exports, module) {
'use strict';
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var Tokenizer = require("../tokenizer").Tokenizer;
var LuceneHighlightRules = require("./lucene_highlight_rules").LuceneHighlightRules;
var Mode = function() {
this.$tokenizer = new Tokenizer(new LuceneHighlightRules().getRules());
};
oop.inherits(Mode, TextMode);
exports.Mode = Mode;
});

View file

@ -0,0 +1,49 @@
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var lang = require("../lib/lang");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var LuceneHighlightRules = function() {
this.$rules = {
"start" : [
{
token : "constant.character.negation",
regex : "[\\-]"
}, {
token : "constant.character.interro",
regex : "[\\?]"
}, {
token : "constant.character.asterisk",
regex : "[\\*]"
}, {
token: 'constant.character.proximity',
regex: '~[0-9]+\\b'
}, {
token : 'keyword.operator',
regex: '(?:AND|OR|NOT)\\b'
}, {
token : "paren.lparen",
regex : "[\\(]"
}, {
token : "paren.rparen",
regex : "[\\)]"
}, {
token : "keyword",
regex : "[\\S]+:"
}, {
token : "string", // " string
regex : '".*?"'
}, {
token : "text",
regex : "\\s+"
}
]
};
};
oop.inherits(LuceneHighlightRules, TextHighlightRules);
exports.LuceneHighlightRules = LuceneHighlightRules;
});

View file

@ -0,0 +1,99 @@
if (typeof process !== "undefined") {
require("amd-loader");
}
define(function(require, exports, module) {
"use strict";
var LuceneMode = require("./lucene").Mode;
var assert = require("../test/assertions");
module.exports = {
name: "Lucene Tokenizer",
setUp : function() {
this.tokenizer = new LuceneMode().getTokenizer();
},
"test: recognises AND as keyword" : function() {
var tokens = this.tokenizer.getLineTokens("AND", "start").tokens;
assert.equal("keyword.operator", tokens[0].type);
},
"test: recognises OR as keyword" : function() {
var tokens = this.tokenizer.getLineTokens("OR", "start").tokens;
assert.equal("keyword.operator", tokens[0].type);
},
"test: recognises NOT as keyword" : function() {
var tokens = this.tokenizer.getLineTokens("NOT", "start").tokens;
assert.equal("keyword.operator", tokens[0].type);
},
'test: recognises "hello this is dog" as string' : function() {
var tokens = this.tokenizer.getLineTokens('"hello this is dog"', "start").tokens;
assert.equal("string", tokens[0].type);
},
'test: recognises -"hello this is dog" as negation with string' : function() {
var tokens = this.tokenizer.getLineTokens('-"hello this is dog"', "start").tokens;
assert.equal("constant.character.negation", tokens[0].type);
assert.equal("string", tokens[1].type);
},
'test: recognises ~100 as text with proximity' : function() {
var tokens = this.tokenizer.getLineTokens('~100', "start").tokens;
assert.equal("constant.character.proximity", tokens[0].type);
},
'test: recognises "hello this is dog"~100 as string with proximity' : function() {
var tokens = this.tokenizer.getLineTokens('"hello this is dog"~100', "start").tokens;
assert.equal("string", tokens[0].type);
assert.equal("constant.character.proximity", tokens[1].type);
},
'test: recognises raw:"hello this is dog" as keyword' : function() {
var tokens = this.tokenizer.getLineTokens('raw:"hello this is dog"', "start").tokens;
assert.equal("keyword", tokens[0].type);
},
'test: recognises raw:foo as"keyword' : function() {
var tokens = this.tokenizer.getLineTokens('raw:foo', "start").tokens;
assert.equal("keyword", tokens[0].type);
},
'test: recognises "(" as opening parenthesis' : function() {
var tokens = this.tokenizer.getLineTokens('(', "start").tokens;
assert.equal("paren.lparen", tokens[0].type);
},
'test: recognises ")" as closing parenthesis' : function() {
var tokens = this.tokenizer.getLineTokens(')', "start").tokens;
assert.equal("paren.rparen", tokens[0].type);
},
'test: recognises foo* as text with asterisk' : function() {
var tokens = this.tokenizer.getLineTokens('foo*', "start").tokens;
assert.equal("text", tokens[0].type);
assert.equal("constant.character.asterisk", tokens[1].type);
},
'test: recognises foo? as text with interro' : function() {
var tokens = this.tokenizer.getLineTokens('foo?', "start").tokens;
assert.equal("text", tokens[0].type);
assert.equal("constant.character.interro", tokens[1].type);
},
'test: recognises single word as text' : function() {
var tokens = this.tokenizer.getLineTokens(' foo', "start").tokens;
assert.equal("text", tokens[0].type);
}
};
});
if (typeof module !== "undefined" && module === require.main) {
require("asyncjs").test.testcase(module.exports).exec();
}

View file

@ -33,6 +33,7 @@ var testNames = [
"ace/mode/javascript_test",
"ace/mode/javascript_highlight_rules_test",
"ace/mode/javascript_worker_test",
"ace/mode/lucene_highlight_rules_test",
"ace/mode/python_test",
"ace/mode/ruby_highlight_rules_test",
"ace/mode/text_test",
@ -60,7 +61,7 @@ for (var i in testNames) {
var nav = document.createElement("div");
nav.innerHTML = html.join("");
nav.style.cssText = "position:absolute;right:0;top:0";
nav.style.cssText = "position:absolute;right:0;top:0";
document.body.appendChild(nav);
if (location.search)
@ -72,7 +73,7 @@ require(testNames, function() {
module.href = x;
return module;
});
async.list(tests)
.expand(function(test) {
return AsyncTest.testcase(test)