Merge pull request #1220 from wcandillon/master
XQuery syntax highlighting improvements
This commit is contained in:
commit
8d07cf88d1
17 changed files with 5462 additions and 1463 deletions
|
|
@ -230,7 +230,7 @@ var BackgroundTokenizer = function(tokenizer, editor) {
|
|||
var overflow = {value: line.substr(MAX_LINE_LENGTH), type: "text"};
|
||||
line = line.slice(0, MAX_LINE_LENGTH);
|
||||
}
|
||||
var data = this.tokenizer.getLineTokens(line, state);
|
||||
var data = this.tokenizer.getLineTokens(line, state, row);
|
||||
if (overflow) {
|
||||
data.tokens.push(overflow);
|
||||
data.state = "start";
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ function generateTestData() {
|
|||
var docs = fs.readdirSync(cwd + root);
|
||||
var specialDocs = fs.readdirSync(cwd);
|
||||
var modes = fs.readdirSync(cwd + "../").filter(function(x){
|
||||
return /^\w+_highlight_rules.js$/.test(x);
|
||||
return !/(_highlight_rules|behaviour|worker)\.js$/.test(x) && /\.js$/.test(x);
|
||||
}).map(function(x) {
|
||||
return x.replace(/_highlight_rules.js$/, "");
|
||||
return x.replace(/\.js$/, "");
|
||||
});
|
||||
|
||||
console.log("Docs:", docs);
|
||||
|
|
|
|||
|
|
@ -1,46 +1,44 @@
|
|||
[[
|
||||
"start",
|
||||
"[\"start\"]",
|
||||
["keyword","xquery"],
|
||||
["text"," "],
|
||||
["keyword","version"],
|
||||
["text"," "],
|
||||
["string","\"1.0\""],
|
||||
["string","\""],
|
||||
["string","1.0"],
|
||||
["string","\""],
|
||||
["text",";"]
|
||||
],[
|
||||
"start"
|
||||
"[\"start\"]"
|
||||
],[
|
||||
"start",
|
||||
"[\"start\"]",
|
||||
["keyword","let"],
|
||||
["text"," "],
|
||||
["variable","$message"],
|
||||
["text"," "],
|
||||
["keyword.operator",":="],
|
||||
["text"," "],
|
||||
["string","\"Hello World!\""]
|
||||
["string","\""],
|
||||
["string","Hello World!"],
|
||||
["string","\""]
|
||||
],[
|
||||
"start",
|
||||
"[\"start\",\"StartTag\",\"TagContent\"]",
|
||||
["keyword","return"],
|
||||
["text"," "],
|
||||
["text","<"],
|
||||
["meta.tag","results"],
|
||||
["text",">"]
|
||||
["meta.tag","<results"],
|
||||
["meta.tag",">"]
|
||||
],[
|
||||
"start",
|
||||
"[\"start\",\"StartTag\",\"TagContent\"]",
|
||||
["text"," "],
|
||||
["text","<"],
|
||||
["meta.tag","message"],
|
||||
["text",">"],
|
||||
["lparen","{"],
|
||||
["meta.tag","<message"],
|
||||
["meta.tag",">"],
|
||||
["text","{"],
|
||||
["variable","$message"],
|
||||
["rparen","}"],
|
||||
["text","</"],
|
||||
["meta.tag","message"],
|
||||
["text",">"]
|
||||
["text","}"],
|
||||
["meta.tag","</message>"]
|
||||
],[
|
||||
"start",
|
||||
["text","</"],
|
||||
["meta.tag","results"],
|
||||
["text",">"]
|
||||
"[\"start\"]",
|
||||
["meta.tag","</results>"]
|
||||
],[
|
||||
"start"
|
||||
"[\"start\"]"
|
||||
]]
|
||||
|
|
@ -1,66 +1,88 @@
|
|||
/*
|
||||
* eXide - web-based XQuery IDE
|
||||
*
|
||||
* Copyright (C) 2011 Wolfgang Meier
|
||||
*
|
||||
* This program is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
define(function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../../lib/oop");
|
||||
var Behaviour = require('../behaviour').Behaviour;
|
||||
var CstyleBehaviour = require('./cstyle').CstyleBehaviour;
|
||||
var XmlBehaviour = require("../behaviour/xml").XmlBehaviour;
|
||||
var TokenIterator = require("../../token_iterator").TokenIterator;
|
||||
|
||||
var XQueryBehaviour = function (parent) {
|
||||
function hasType(token, type) {
|
||||
var hasType = true;
|
||||
var typeList = token.type.split('.');
|
||||
var needleList = type.split('.');
|
||||
needleList.forEach(function(needle){
|
||||
if (typeList.indexOf(needle) == -1) {
|
||||
hasType = false;
|
||||
return false;
|
||||
}
|
||||
});
|
||||
return hasType;
|
||||
}
|
||||
|
||||
var XQueryBehaviour = function () {
|
||||
|
||||
this.inherit(CstyleBehaviour, ["braces", "parens", "string_dquotes"]); // Get string behaviour
|
||||
this.parent = parent;
|
||||
this.inherit(XmlBehaviour); // Get xml behaviour
|
||||
|
||||
// this.add("brackets", "insertion", function (state, action, editor, session, text) {
|
||||
// if (text == "\n") {
|
||||
// var cursor = editor.getCursorPosition();
|
||||
// var line = session.doc.getLine(cursor.row);
|
||||
// var rightChars = line.substring(cursor.column, cursor.column + 2);
|
||||
// if (rightChars == '</') {
|
||||
// var indent = this.$getIndent(session.doc.getLine(cursor.row)) + session.getTabString();
|
||||
// var next_indent = this.$getIndent(session.doc.getLine(cursor.row));
|
||||
//
|
||||
// return {
|
||||
// text: '\n' + indent + '\n' + next_indent,
|
||||
// selection: [1, indent.length, 1, indent.length]
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// return false;
|
||||
// });
|
||||
this.add("autoclosing", "insertion", function (state, action, editor, session, text) {
|
||||
if (text == '>') {
|
||||
var position = editor.getCursorPosition();
|
||||
var iterator = new TokenIterator(session, position.row, position.column);
|
||||
var token = iterator.getCurrentToken();
|
||||
var atCursor = false;
|
||||
if (!token || !hasType(token, 'meta.tag') && !(hasType(token, 'text') && token.value.match('/'))){
|
||||
do {
|
||||
token = iterator.stepBackward();
|
||||
} while (token && (hasType(token, 'string') || hasType(token, 'keyword.operator') || hasType(token, 'entity.attribute-name') || hasType(token, 'text')));
|
||||
} else {
|
||||
atCursor = true;
|
||||
}
|
||||
var previous = iterator.stepBackward();
|
||||
if (!token || !hasType(token, 'meta.tag') || (previous !== null && previous.value.match('/'))) {
|
||||
return
|
||||
}
|
||||
var tag = token.value.substring(1);
|
||||
if (atCursor){
|
||||
var tag = tag.substring(0, position.column - token.start);
|
||||
}
|
||||
|
||||
return {
|
||||
text: '>' + '</' + tag + '>',
|
||||
selection: [1, 1]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check for open tag if user enters / and auto-close it.
|
||||
// this.add("slash", "insertion", function (state, action, editor, session, text) {
|
||||
// if (text == "/") {
|
||||
// var cursor = editor.getCursorPosition();
|
||||
// var line = session.doc.getLine(cursor.row);
|
||||
// if (cursor.column > 0 && line.charAt(cursor.column - 1) == "<") {
|
||||
// line = line.substring(0, cursor.column) + "/" + line.substring(cursor.column);
|
||||
// var lines = session.doc.getAllLines();
|
||||
// lines[cursor.row] = line;
|
||||
// // call mode helper to close the tag if possible
|
||||
// parent.exec("closeTag", lines.join(session.doc.getNewLineCharacter()), cursor.row);
|
||||
// }
|
||||
// }
|
||||
// return false;
|
||||
// });
|
||||
}
|
||||
oop.inherits(XQueryBehaviour, Behaviour);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,86 +1,86 @@
|
|||
define(function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../lib/oop");
|
||||
var TextMode = require("./text").Mode;
|
||||
var Tokenizer = require("../tokenizer").Tokenizer;
|
||||
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
|
||||
|
||||
var SnippetHighlightRules = function() {
|
||||
|
||||
var builtins = "SELECTION|CURRENT_WORD|SELECTED_TEXT|CURRENT_LINE|LINE_INDEX|" +
|
||||
"LINE_NUMBER|SOFT_TABS|TAB_SIZE|FILENAME|FILEPATH|FULLNAME";
|
||||
|
||||
this.$rules = {
|
||||
"start" : [
|
||||
{token:"constant.language.escape", regex: /\\[\$}`\\]/},
|
||||
{token:"keyword", regex: "\\$(?:TM_)?(?:" + builtins + ")\\b"},
|
||||
{token:"variable", regex: "\\$\\w+"},
|
||||
{token: function(value, state, stack) {
|
||||
if (stack[1])
|
||||
stack[1]++;
|
||||
else
|
||||
stack.unshift("start", 1);
|
||||
return this.tokenName;
|
||||
}, tokenName: "markup.list", regex: "\\${", next: "varDecl"},
|
||||
{token: function(value, state, stack) {
|
||||
if (!stack[1])
|
||||
return "text";
|
||||
stack[1]--;
|
||||
if (!stack[1])
|
||||
stack.splice(0,2);
|
||||
return this.tokenName;
|
||||
}, tokenName: "markup.list", regex: "}"},
|
||||
{token: "doc,comment", regex:/^\${2}-{5,}$/}
|
||||
],
|
||||
"varDecl" : [
|
||||
{regex: /\d+\b/, token: "constant.numeric"},
|
||||
{token:"keyword", regex: "(?:TM_)?(?:" + builtins + ")\\b"},
|
||||
{token:"variable", regex: "\\w+"},
|
||||
{regex: /:/, token: "punctuation.operator", next: "start"},
|
||||
{regex: /\//, token: "string.regex", next: "regexp"},
|
||||
{regex: "", next: "start"}
|
||||
],
|
||||
"regexp" : [
|
||||
{regex: /\\./, token: "escape"},
|
||||
{regex: /\[/, token: "regex.start", next: "charClass"},
|
||||
{regex: "/", token: "string.regex", next: "format"},
|
||||
//{"default": "string.regex"},
|
||||
{"token": "string.regex", regex:"."},
|
||||
],
|
||||
charClass : [
|
||||
{regex: "\\.", token: "escape"},
|
||||
{regex: "\\]", token: "regex.end", next: "regexp"},
|
||||
{"token": "string.regex", regex:"."},
|
||||
],
|
||||
"format" : [
|
||||
{regex: /\\[ulULE]/, token: "keyword"},
|
||||
{regex: /\$\d+/, token: "variable"},
|
||||
{regex: "/[gim]*:?", token: "string.regex", next: "start"},
|
||||
// {"default": "string"},
|
||||
{"token": "string", regex:"."},
|
||||
]
|
||||
};
|
||||
};
|
||||
|
||||
oop.inherits(SnippetHighlightRules, TextHighlightRules);
|
||||
|
||||
exports.SnippetHighlightRules = SnippetHighlightRules;
|
||||
|
||||
|
||||
var Mode = function() {
|
||||
var highlighter = new SnippetHighlightRules();
|
||||
|
||||
this.$tokenizer = new Tokenizer(highlighter.getRules());
|
||||
};
|
||||
oop.inherits(Mode, TextMode);
|
||||
|
||||
(function() {
|
||||
this.getNextLineIndent = function(state, line, tab) {
|
||||
return this.$getIndent(line);
|
||||
};
|
||||
}).call(Mode.prototype);
|
||||
exports.Mode = Mode;
|
||||
|
||||
|
||||
});
|
||||
define(function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../lib/oop");
|
||||
var TextMode = require("./text").Mode;
|
||||
var Tokenizer = require("../tokenizer").Tokenizer;
|
||||
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
|
||||
|
||||
var SnippetHighlightRules = function() {
|
||||
|
||||
var builtins = "SELECTION|CURRENT_WORD|SELECTED_TEXT|CURRENT_LINE|LINE_INDEX|" +
|
||||
"LINE_NUMBER|SOFT_TABS|TAB_SIZE|FILENAME|FILEPATH|FULLNAME";
|
||||
|
||||
this.$rules = {
|
||||
"start" : [
|
||||
{token:"constant.language.escape", regex: /\\[\$}`\\]/},
|
||||
{token:"keyword", regex: "\\$(?:TM_)?(?:" + builtins + ")\\b"},
|
||||
{token:"variable", regex: "\\$\\w+"},
|
||||
{token: function(value, state, stack) {
|
||||
if (stack[1])
|
||||
stack[1]++;
|
||||
else
|
||||
stack.unshift("start", 1);
|
||||
return this.tokenName;
|
||||
}, tokenName: "markup.list", regex: "\\${", next: "varDecl"},
|
||||
{token: function(value, state, stack) {
|
||||
if (!stack[1])
|
||||
return "text";
|
||||
stack[1]--;
|
||||
if (!stack[1])
|
||||
stack.splice(0,2);
|
||||
return this.tokenName;
|
||||
}, tokenName: "markup.list", regex: "}"},
|
||||
{token: "doc,comment", regex:/^\${2}-{5,}$/}
|
||||
],
|
||||
"varDecl" : [
|
||||
{regex: /\d+\b/, token: "constant.numeric"},
|
||||
{token:"keyword", regex: "(?:TM_)?(?:" + builtins + ")\\b"},
|
||||
{token:"variable", regex: "\\w+"},
|
||||
{regex: /:/, token: "punctuation.operator", next: "start"},
|
||||
{regex: /\//, token: "string.regex", next: "regexp"},
|
||||
{regex: "", next: "start"}
|
||||
],
|
||||
"regexp" : [
|
||||
{regex: /\\./, token: "escape"},
|
||||
{regex: /\[/, token: "regex.start", next: "charClass"},
|
||||
{regex: "/", token: "string.regex", next: "format"},
|
||||
//{"default": "string.regex"},
|
||||
{"token": "string.regex", regex:"."},
|
||||
],
|
||||
charClass : [
|
||||
{regex: "\\.", token: "escape"},
|
||||
{regex: "\\]", token: "regex.end", next: "regexp"},
|
||||
{"token": "string.regex", regex:"."},
|
||||
],
|
||||
"format" : [
|
||||
{regex: /\\[ulULE]/, token: "keyword"},
|
||||
{regex: /\$\d+/, token: "variable"},
|
||||
{regex: "/[gim]*:?", token: "string.regex", next: "start"},
|
||||
// {"default": "string"},
|
||||
{"token": "string", regex:"."},
|
||||
]
|
||||
};
|
||||
};
|
||||
|
||||
oop.inherits(SnippetHighlightRules, TextHighlightRules);
|
||||
|
||||
exports.SnippetHighlightRules = SnippetHighlightRules;
|
||||
|
||||
|
||||
var Mode = function() {
|
||||
var highlighter = new SnippetHighlightRules();
|
||||
|
||||
this.$tokenizer = new Tokenizer(highlighter.getRules());
|
||||
};
|
||||
oop.inherits(Mode, TextMode);
|
||||
|
||||
(function() {
|
||||
this.getNextLineIndent = function(state, line, tab) {
|
||||
return this.$getIndent(line);
|
||||
};
|
||||
}).call(Mode.prototype);
|
||||
exports.Mode = Mode;
|
||||
|
||||
|
||||
});
|
||||
|
|
@ -33,17 +33,15 @@ define(function(require, exports, module) {
|
|||
var WorkerClient = require("../worker/worker_client").WorkerClient;
|
||||
var oop = require("../lib/oop");
|
||||
var TextMode = require("./text").Mode;
|
||||
var Tokenizer = require("../tokenizer").Tokenizer;
|
||||
var XQueryHighlightRules = require("./xquery_highlight_rules").XQueryHighlightRules;
|
||||
//var XQueryBehaviour = require("./behaviour/xquery").XQueryBehaviour;
|
||||
var XQueryLexer = require("./xquery/XQueryLexer").XQueryLexer;
|
||||
var Range = require("../range").Range;
|
||||
var CstyleBehaviour = require("./behaviour/cstyle").CstyleBehaviour;
|
||||
var XQueryBehaviour = require("./behaviour/xquery").XQueryBehaviour;
|
||||
var CStyleFoldMode = require("./folding/cstyle").FoldMode;
|
||||
|
||||
|
||||
var Mode = function(parent) {
|
||||
this.$tokenizer = new Tokenizer(new XQueryHighlightRules().getRules());
|
||||
this.$behaviour = new CstyleBehaviour(parent);
|
||||
this.$tokenizer = new XQueryLexer();
|
||||
this.$behaviour = new XQueryBehaviour();
|
||||
this.foldingRules = new CStyleFoldMode();
|
||||
};
|
||||
|
||||
|
|
@ -133,21 +131,21 @@ oop.inherits(Mode, TextMode);
|
|||
});
|
||||
|
||||
worker.on("ok", function(e) {
|
||||
session.clearAnnotations();
|
||||
session.clearAnnotations();
|
||||
});
|
||||
|
||||
worker.on("highlight", function(tokens) {
|
||||
if(that.$deltas.length > 0) return;
|
||||
|
||||
var firstRow = 0;
|
||||
var lastRow = session.getLength() - 1;
|
||||
that.$tokenizer.tokens = tokens.data.tokens;
|
||||
that.$tokenizer.lines = session.getDocument().getAllLines();
|
||||
session.bgTokenizer.lines = [];
|
||||
session.bgTokenizer.states = [];
|
||||
|
||||
var lines = tokens.data.lines;
|
||||
var states = tokens.data.states;
|
||||
|
||||
session.bgTokenizer.lines = lines;
|
||||
session.bgTokenizer.states = states;
|
||||
session.bgTokenizer.fireUpdateEvent(firstRow, lastRow);
|
||||
var rows = Object.keys(that.$tokenizer.tokens);
|
||||
for(var i=0; i < rows.length; i++) {
|
||||
var row = parseInt(rows[i]);
|
||||
session.bgTokenizer.fireUpdateEvent(row, row);
|
||||
}
|
||||
});
|
||||
|
||||
return worker;
|
||||
|
|
|
|||
|
|
@ -1,129 +0,0 @@
|
|||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
define(function(require, exports, module){
|
||||
var CommentHandler = exports.CommentHandler = function(code) {
|
||||
|
||||
var ast = null;
|
||||
var ptr = null;
|
||||
var remains = code;
|
||||
var cursor = 0;
|
||||
var lineCursor = 0;
|
||||
var line = 0;
|
||||
var col = 0;
|
||||
|
||||
function createNode(name){
|
||||
return { name: name, children: [], getParent: null, pos: { sl: 0, sc: 0, el: 0, ec: 0 } };
|
||||
}
|
||||
|
||||
function pushNode(name, begin){
|
||||
var node = createNode(name);
|
||||
if(ast === null) {
|
||||
ast = node;
|
||||
ptr = node;
|
||||
} else {
|
||||
node.getParent = ptr;
|
||||
ptr.children.push(node);
|
||||
ptr = ptr.children[ptr.children.length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
function popNode(name, end){
|
||||
|
||||
if(ptr.children.length > 0) {
|
||||
var s = ptr.children[0];
|
||||
var e = ptr.children[ptr.children.length - 1];
|
||||
ptr.pos.sl = s.pos.sl;
|
||||
ptr.pos.sc = s.pos.sc;
|
||||
ptr.pos.el = e.pos.el;
|
||||
ptr.pos.ec = e.pos.ec;
|
||||
}
|
||||
|
||||
if(ptr.getParent !== null) {
|
||||
ptr = ptr.getParent;
|
||||
for(var i in ptr.children) {
|
||||
delete ptr.children[i].getParent;
|
||||
}
|
||||
} else {
|
||||
delete ptr.getParent;
|
||||
}
|
||||
}
|
||||
|
||||
this.peek = function() {
|
||||
return ptr;
|
||||
};
|
||||
|
||||
this.getParseTree = function() {
|
||||
return ast;
|
||||
};
|
||||
|
||||
this.reset = function(input) {};
|
||||
|
||||
this.startNonterminal = function(name, begin) {
|
||||
pushNode(name, begin);
|
||||
};
|
||||
|
||||
this.endNonterminal = function(name, end) {
|
||||
popNode(name, end);
|
||||
};
|
||||
|
||||
this.terminal = function(name, begin, end) {
|
||||
name = (name.substring(0, 1) === "'" && name.substring(name.length - 1) === "'") ? "TOKEN" : name;
|
||||
pushNode(name, begin);
|
||||
setValue(ptr, begin, end);
|
||||
popNode(name, end);
|
||||
};
|
||||
|
||||
this.whitespace = function(begin, end) {
|
||||
var name = "WS";
|
||||
pushNode(name, begin);
|
||||
setValue(ptr, begin, end);
|
||||
popNode(name, end);
|
||||
};
|
||||
|
||||
function setValue(node, begin, end) {
|
||||
var e = end - cursor;
|
||||
ptr.value = remains.substring(0, e);
|
||||
var sl = line;
|
||||
var sc = line === 0 ? lineCursor : lineCursor - 1;
|
||||
var el = sl + ptr.value.split("\n").length - 1;
|
||||
var lastIdx = ptr.value.lastIndexOf("\n");
|
||||
var ec = lastIdx === -1 ? sc + ptr.value.length : ptr.value.substring(lastIdx).length;
|
||||
remains = remains.substring(e);
|
||||
cursor = end;
|
||||
lineCursor = lastIdx === -1 ? lineCursor + (ptr.value.length) : ec;
|
||||
line = el;
|
||||
ptr.pos.sl = sl;
|
||||
ptr.pos.sc = sc;
|
||||
ptr.pos.el = el;
|
||||
ptr.pos.ec = ec;
|
||||
}
|
||||
};
|
||||
});
|
||||
|
|
@ -1,371 +0,0 @@
|
|||
// This file was generated on Wed Dec 12, 2012 20:06 (UTC+01) by REx v5.20 which is Copyright (c) 1979-2012 by Gunther Rademacher <grd@gmx.net>
|
||||
// REx command line: CommentParser.ebnf -tree -javascript -a xqlint
|
||||
|
||||
// line 2 "CommentParser.ebnf"
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
define(function(require, exports, module){
|
||||
var CommentParser = exports.CommentParser = function CommentParser(string, parsingEventHandler)
|
||||
{
|
||||
init(string, parsingEventHandler);
|
||||
// line 40 "CommentParser.js"
|
||||
var self = this;
|
||||
|
||||
this.ParseException = function(b, e, s, o, x)
|
||||
{
|
||||
var
|
||||
begin = b,
|
||||
end = e,
|
||||
state = s,
|
||||
offending = o,
|
||||
expected = x;
|
||||
|
||||
this.getBegin = function() {return begin;};
|
||||
this.getEnd = function() {return end;};
|
||||
this.getState = function() {return state;};
|
||||
this.getExpected = function() {return expected;};
|
||||
this.getOffending = function() {return offending;};
|
||||
|
||||
this.getMessage = function()
|
||||
{
|
||||
return offending < 0 ? "lexical analysis failed" : "syntax error";
|
||||
};
|
||||
};
|
||||
|
||||
function init(string, parsingEventHandler)
|
||||
{
|
||||
eventHandler = parsingEventHandler;
|
||||
input = string;
|
||||
size = string.length;
|
||||
reset(0, 0, 0);
|
||||
}
|
||||
|
||||
this.getInput = function()
|
||||
{
|
||||
return input;
|
||||
};
|
||||
|
||||
function reset(l, b, e)
|
||||
{
|
||||
b0 = b; e0 = b;
|
||||
l1 = l; b1 = b; e1 = e;
|
||||
end = e;
|
||||
eventHandler.reset(input);
|
||||
}
|
||||
|
||||
this.getOffendingToken = function(e)
|
||||
{
|
||||
var o = e.getOffending();
|
||||
return o >= 0 ? CommentParser.TOKEN[o] : null;
|
||||
};
|
||||
|
||||
this.getExpectedTokenSet = function(e)
|
||||
{
|
||||
var expected;
|
||||
if (e.getExpected() < 0)
|
||||
{
|
||||
expected = getExpectedTokenSet(e.getState());
|
||||
}
|
||||
else
|
||||
{
|
||||
expected = [CommentParser.TOKEN[e.getExpected()]];
|
||||
}
|
||||
return expected;
|
||||
};
|
||||
|
||||
this.getErrorMessage = function(e)
|
||||
{
|
||||
var tokenSet = this.getExpectedTokenSet(e);
|
||||
var found = this.getOffendingToken(e);
|
||||
var prefix = input.substring(0, e.getBegin());
|
||||
var lines = prefix.split("\n");
|
||||
var line = lines.length;
|
||||
var column = lines[line - 1].length + 1;
|
||||
var size = e.getEnd() - e.getBegin();
|
||||
return e.getMessage()
|
||||
+ (found == null ? "" : ", found " + found)
|
||||
+ "\nwhile expecting "
|
||||
+ (tokenSet.length == 1 ? tokenSet[0] : ("[" + tokenSet.join(", ") + "]"))
|
||||
+ "\n"
|
||||
+ (size == 0 ? "" : "after successfully scanning " + size + " characters beginning ")
|
||||
+ "at line " + line + ", column " + column + ":\n..."
|
||||
+ input.substring(e.getBegin(), Math.min(input.length, e.getBegin() + 64))
|
||||
+ "...";
|
||||
};
|
||||
|
||||
this.parse_Comments = function()
|
||||
{
|
||||
eventHandler.startNonterminal("Comments", e0);
|
||||
for (;;)
|
||||
{
|
||||
lookahead1(0); // S^WS | EOF | '(:'
|
||||
if (l1 == 3) // EOF
|
||||
{
|
||||
break;
|
||||
}
|
||||
switch (l1)
|
||||
{
|
||||
case 1: // S^WS
|
||||
shift(1); // S^WS
|
||||
break;
|
||||
default:
|
||||
parse_Comment();
|
||||
}
|
||||
}
|
||||
shift(3); // EOF
|
||||
eventHandler.endNonterminal("Comments", e0);
|
||||
};
|
||||
|
||||
function parse_Comment()
|
||||
{
|
||||
eventHandler.startNonterminal("Comment", e0);
|
||||
shift(4); // '(:'
|
||||
for (;;)
|
||||
{
|
||||
lookahead1(1); // CommentContents | '(:' | ':)'
|
||||
if (l1 == 5) // ':)'
|
||||
{
|
||||
break;
|
||||
}
|
||||
switch (l1)
|
||||
{
|
||||
case 2: // CommentContents
|
||||
shift(2); // CommentContents
|
||||
break;
|
||||
default:
|
||||
parse_Comment();
|
||||
}
|
||||
}
|
||||
shift(5); // ':)'
|
||||
eventHandler.endNonterminal("Comment", e0);
|
||||
}
|
||||
|
||||
var lk, b0, e0;
|
||||
var l1, b1, e1;
|
||||
var eventHandler;
|
||||
|
||||
function error(b, e, s, l, t)
|
||||
{
|
||||
throw new self.ParseException(b, e, s, l, t);
|
||||
}
|
||||
|
||||
function shift(t)
|
||||
{
|
||||
if (l1 == t)
|
||||
{
|
||||
eventHandler.terminal(CommentParser.TOKEN[l1], b1, e1 > size ? size : e1);
|
||||
b0 = b1; e0 = e1; l1 = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
error(b1, e1, 0, l1, t);
|
||||
}
|
||||
}
|
||||
|
||||
function lookahead1(set)
|
||||
{
|
||||
if (l1 == 0)
|
||||
{
|
||||
l1 = match(set);
|
||||
b1 = begin;
|
||||
e1 = end;
|
||||
}
|
||||
}
|
||||
|
||||
var input;
|
||||
var size;
|
||||
var begin;
|
||||
var end;
|
||||
var state;
|
||||
|
||||
function match(tokenset)
|
||||
{
|
||||
var nonbmp = false;
|
||||
begin = end;
|
||||
var current = end;
|
||||
var result = CommentParser.INITIAL[tokenset];
|
||||
|
||||
for (var code = result & 15; code != 0; )
|
||||
{
|
||||
var charclass;
|
||||
var c0 = current < size ? input.charCodeAt(current) : 0;
|
||||
++current;
|
||||
if (c0 < 0x80)
|
||||
{
|
||||
charclass = CommentParser.MAP0[c0];
|
||||
}
|
||||
else if (c0 < 0xd800)
|
||||
{
|
||||
var c1 = c0 >> 5;
|
||||
charclass = CommentParser.MAP1[(c0 & 31) + CommentParser.MAP1[(c1 & 31) + CommentParser.MAP1[c1 >> 5]]];
|
||||
}
|
||||
else
|
||||
{
|
||||
if (c0 < 0xdc00)
|
||||
{
|
||||
var c1 = current < size ? input.charCodeAt(current) : 0;
|
||||
if (c1 >= 0xdc00 && c1 < 0xe000)
|
||||
{
|
||||
++current;
|
||||
c0 = ((c0 & 0x3ff) << 10) + (c1 & 0x3ff) + 0x10000;
|
||||
nonbmp = true;
|
||||
}
|
||||
}
|
||||
var lo = 0, hi = 1;
|
||||
for (var m = 1; ; m = (hi + lo) >> 1)
|
||||
{
|
||||
if (CommentParser.MAP2[m] > c0) hi = m - 1;
|
||||
else if (CommentParser.MAP2[2 + m] < c0) lo = m + 1;
|
||||
else {charclass = CommentParser.MAP2[4 + m]; break;}
|
||||
if (lo > hi) {charclass = 0; break;}
|
||||
}
|
||||
}
|
||||
|
||||
state = code;
|
||||
var i0 = (charclass << 4) + code - 1;
|
||||
code = CommentParser.TRANSITION[(i0 & 3) + CommentParser.TRANSITION[i0 >> 2]];
|
||||
|
||||
if (code > 15)
|
||||
{
|
||||
result = code;
|
||||
code &= 15;
|
||||
end = current;
|
||||
}
|
||||
}
|
||||
|
||||
result >>= 4;
|
||||
if (result == 0)
|
||||
{
|
||||
end = current - 1;
|
||||
var c1 = end < size ? input.charCodeAt(end) : 0;
|
||||
if (c1 >= 0xdc00 && c1 < 0xe000) --end;
|
||||
error(begin, end, state, -1, -1);
|
||||
}
|
||||
|
||||
if (nonbmp)
|
||||
{
|
||||
for (var i = result >> 3; i > 0; --i)
|
||||
{
|
||||
--end;
|
||||
var c1 = end < size ? input.charCodeAt(end) : 0;
|
||||
if (c1 >= 0xdc00 && c1 < 0xe000) --end;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
end -= result >> 3;
|
||||
}
|
||||
|
||||
return (result & 7) - 1;
|
||||
}
|
||||
|
||||
function getExpectedTokenSet(s)
|
||||
{
|
||||
var set = [];
|
||||
if (s > 0)
|
||||
{
|
||||
for (var i = 0; i < 6; i += 32)
|
||||
{
|
||||
var j = i;
|
||||
for (var f = ec(i >>> 5, s); f != 0; f >>>= 1, ++j)
|
||||
{
|
||||
if ((f & 1) != 0)
|
||||
{
|
||||
set[set.length] = CommentParser.TOKEN[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
||||
function ec(t, s)
|
||||
{
|
||||
var i0 = t * 9 + s - 1;
|
||||
return CommentParser.EXPECTED[i0];
|
||||
}
|
||||
}
|
||||
|
||||
CommentParser.MAP0 =
|
||||
[
|
||||
/* 0 */ 6, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2,
|
||||
/* 36 */ 2, 2, 2, 2, 3, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
||||
/* 72 */ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
||||
/* 108 */ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2
|
||||
];
|
||||
|
||||
CommentParser.MAP1 =
|
||||
[
|
||||
/* 0 */ 54, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56,
|
||||
/* 27 */ 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56,
|
||||
/* 54 */ 88, 120, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147,
|
||||
/* 76 */ 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 6, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0,
|
||||
/* 104 */ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2, 2, 2, 2, 3, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
||||
/* 140 */ 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
|
||||
/* 176 */ 2, 2, 2
|
||||
];
|
||||
|
||||
CommentParser.MAP2 =
|
||||
[
|
||||
/* 0 */ 57344, 65536, 65533, 1114111, 2, 2
|
||||
];
|
||||
|
||||
CommentParser.INITIAL =
|
||||
[
|
||||
/* 0 */ 1, 2
|
||||
];
|
||||
|
||||
CommentParser.TRANSITION =
|
||||
[
|
||||
/* 0 */ 33, 33, 33, 33, 28, 37, 32, 33, 31, 37, 32, 33, 43, 50, 53, 33, 31, 39, 33, 33, 46, 57, 59, 33, 63, 33, 33,
|
||||
/* 27 */ 33, 35, 5, 35, 0, 5, 0, 0, 0, 0, 5, 5, 5, 5, 96, 5, 4, 6, 0, 0, 7, 0, 80, 184, 184, 184, 184, 0, 0, 0, 185,
|
||||
/* 58 */ 80, 185, 0, 0, 0, 64, 0, 0, 0
|
||||
];
|
||||
|
||||
CommentParser.EXPECTED =
|
||||
[
|
||||
/* 0 */ 26, 52, 2, 16, 4, 20, 36, 4, 4
|
||||
];
|
||||
|
||||
CommentParser.TOKEN =
|
||||
[
|
||||
"(0)",
|
||||
"S",
|
||||
"CommentContents",
|
||||
"EOF",
|
||||
"'(:'",
|
||||
"':)'"
|
||||
];
|
||||
|
||||
// line 54 "CommentParser.ebnf"
|
||||
});
|
||||
// line 371 "CommentParser.js"
|
||||
// End
|
||||
|
|
@ -32,8 +32,14 @@ define(function(require, exports, module){
|
|||
|
||||
var JSONParseTreeHandler = exports.JSONParseTreeHandler = function(code) {
|
||||
|
||||
//List of nodes that are left untouched in the parse tree size optimization.
|
||||
var blacklist = ["VarDeclStatement"];
|
||||
//List of nodes that are not targeted by the parse tree size optimization.
|
||||
var list = [
|
||||
"OrExpr", "AndExpr", "ComparisonExpr", "StringConcatExpr", "RangeExpr"
|
||||
//, "AdditiveExpr", "MultiplicativeExpr"
|
||||
, "UnionExpr", "IntersectExceptExpr", "InstanceofExpr", "TreatExpr", "CastableExpr"
|
||||
, "CastExpr", "UnaryExpr", "ValueExpr", "FTContainsExpr", "SimpleMapExpr", "PathExpr", "RelativePathExpr"
|
||||
, "PostfixExpr", "StepExpr"
|
||||
];
|
||||
|
||||
var ast = null;
|
||||
var ptr = null;
|
||||
|
|
@ -63,13 +69,31 @@ define(function(require, exports, module){
|
|||
|
||||
if(ptr.children.length > 0) {
|
||||
var s = ptr.children[0];
|
||||
var e = ptr.children[ptr.children.length - 1];
|
||||
var e = null;
|
||||
//We want to skip empty non terminals. For instance PredicateList:
|
||||
// [108] AxisStep ::= (ReverseStep | ForwardStep) PredicateList
|
||||
// [120] PredicateList ::= Predicate*
|
||||
for(var i= ptr.children.length - 1; i >= 0;i--) {
|
||||
e = ptr.children[i];
|
||||
if(e.pos.el !== 0 || e.pos.ec !== 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ptr.pos.sl = s.pos.sl;
|
||||
ptr.pos.sc = s.pos.sc;
|
||||
ptr.pos.el = e.pos.el;
|
||||
ptr.pos.ec = e.pos.ec;
|
||||
}
|
||||
|
||||
//Normalize EQName && FunctionName
|
||||
if(ptr.name === "FunctionName") {
|
||||
ptr.name = "EQName";
|
||||
}
|
||||
if(ptr.name === "EQName" && ptr.value === undefined) {
|
||||
ptr.value = ptr.children[0].value;
|
||||
ptr.children.pop();
|
||||
}
|
||||
|
||||
if(ptr.getParent !== null) {
|
||||
ptr = ptr.getParent;
|
||||
//for(var i in ptr.children) {
|
||||
|
|
@ -80,12 +104,12 @@ define(function(require, exports, module){
|
|||
}
|
||||
|
||||
//Parse tree size optimization
|
||||
//if(ptr.children.length > 0) {
|
||||
// var lastChild = ptr.children[ptr.children.length - 1];
|
||||
// if(lastChild.children.length === 1 && blacklist.indexOf(lastChild.name) !== -1) {
|
||||
// ptr.children[ptr.children.length - 1] = lastChild.children[0];
|
||||
// }
|
||||
//}
|
||||
if(ptr.children.length > 0) {
|
||||
var lastChild = ptr.children[ptr.children.length - 1];
|
||||
if(lastChild.children.length === 1 && list.indexOf(lastChild.name) !== -1) {
|
||||
ptr.children[ptr.children.length - 1] = lastChild.children[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.peek = function() {
|
||||
|
|
@ -121,17 +145,23 @@ define(function(require, exports, module){
|
|||
};
|
||||
|
||||
function setValue(node, begin, end) {
|
||||
|
||||
var e = end - cursor;
|
||||
ptr.value = remains.substring(0, e);
|
||||
var sl = line;
|
||||
var sc = line === 0 ? lineCursor : lineCursor - 1;
|
||||
var el = sl + ptr.value.split("\n").length - 1;
|
||||
var lastIdx = ptr.value.lastIndexOf("\n");
|
||||
var ec = lastIdx === -1 ? sc + ptr.value.length : ptr.value.substring(lastIdx).length;
|
||||
remains = remains.substring(e);
|
||||
cursor = end;
|
||||
lineCursor = lastIdx === -1 ? lineCursor + (ptr.value.length) : ec;
|
||||
line = el;
|
||||
|
||||
var sl = line;
|
||||
var sc = lineCursor;
|
||||
var el = sl + ptr.value.split("\n").length - 1;
|
||||
var lastIdx = ptr.value.lastIndexOf("\n");
|
||||
var ec = lastIdx === -1 ? sc + ptr.value.length : ptr.value.substring(lastIdx + 1).length;
|
||||
// ec = ec === 0 ? 0 : ec - 1;
|
||||
|
||||
line = el;
|
||||
//lineCursor = ec === 0 ? 0 : ec;
|
||||
lineCursor = ec;
|
||||
|
||||
ptr.pos.sl = sl;
|
||||
ptr.pos.sc = sc;
|
||||
ptr.pos.el = el;
|
||||
|
|
|
|||
302
lib/ace/mode/xquery/XQueryLexer.js
Normal file
302
lib/ace/mode/xquery/XQueryLexer.js
Normal file
|
|
@ -0,0 +1,302 @@
|
|||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
define(function(require, exports, module){
|
||||
|
||||
var XQueryTokenizer = require("./XQueryTokenizer").XQueryTokenizer;
|
||||
|
||||
var TokenHandler = function(code) {
|
||||
|
||||
var input = code;
|
||||
|
||||
this.tokens = [];
|
||||
|
||||
this.reset = function(code) {
|
||||
input = input;
|
||||
this.tokens = [];
|
||||
};
|
||||
|
||||
this.startNonterminal = function(name, begin) {};
|
||||
|
||||
this.endNonterminal = function(name, end) {};
|
||||
|
||||
this.terminal = function(name, begin, end) {
|
||||
this.tokens.push({
|
||||
name: name,
|
||||
value: input.substring(begin, end)
|
||||
});
|
||||
};
|
||||
|
||||
this.whitespace = function(begin, end) {
|
||||
this.tokens.push({
|
||||
name: "WS",
|
||||
value: input.substring(begin, end)
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
var keys = "after|ancestor|ancestor-or-self|and|as|ascending|attribute|before|case|cast|castable|child|collation|comment|copy|count|declare|default|delete|descendant|descendant-or-self|descending|div|document|document-node|element|else|empty|empty-sequence|end|eq|every|except|first|following|following-sibling|for|function|ge|group|gt|idiv|if|import|insert|instance|intersect|into|is|item|last|le|let|lt|mod|modify|module|namespace|namespace-node|ne|node|only|or|order|ordered|parent|preceding|preceding-sibling|processing-instruction|rename|replace|return|satisfies|schema-attribute|schema-element|self|some|stable|start|switch|text|to|treat|try|typeswitch|union|unordered|validate|where|with|xquery|contains|paragraphs|sentences|times|words|by|collectionreturn|variable|version|option|when|encoding|toswitch|catch|tumbling|sliding|window|at|using|stemming|collection|schema|while|on|nodes|index|external|then|in|updating|value|of|containsbreak|loop|continue|exit|returning|append|json|position".split("|");
|
||||
var keywords = keys.map(
|
||||
function(val) { return { name: "'" + val + "'", token: "keyword" }; }
|
||||
);
|
||||
|
||||
var ncnames = keys.map(
|
||||
function(val) { return { name: "'" + val + "'", token: "text", next: function(stack){ stack.pop(); } }; }
|
||||
);
|
||||
|
||||
var cdata = "constant.language";
|
||||
var number = "constant";
|
||||
var xmlcomment = "comment";
|
||||
var pi = "xml-pe";
|
||||
var pragma = "constant.buildin";
|
||||
|
||||
var Rules = {
|
||||
start: [
|
||||
{ name: "'(#'", token: pragma, next: function(stack){ stack.push("Pragma"); } },
|
||||
{ name: "'(:'", token: "comment", next: function(stack){ stack.push("Comment"); } },
|
||||
{ name: "'(:~'", token: "comment.doc", next: function(stack){ stack.push("CommentDoc"); } },
|
||||
{ name: "'<!--'", token: xmlcomment, next: function(stack){ stack.push("XMLComment"); } },
|
||||
{ name: "'<?'", token: pi, next: function(stack) { stack.push("PI"); } },
|
||||
{ name: "''''", token: "string", next: function(stack){ stack.push("AposString"); } },
|
||||
{ name: "'\"'", token: "string", next: function(stack){ stack.push("QuotString"); } },
|
||||
{ name: "Annotation", token: "support.function" },
|
||||
{ name: "ModuleDecl", token: "keyword", next: function(stack){ stack.push("Prefix"); } },
|
||||
{ name: "OptionDecl", token: "keyword", next: function(stack){ stack.push("_EQName"); } },
|
||||
{ name: "AttrTest", token: "support.type" },
|
||||
{ name: "Variable", token: "variable" },
|
||||
{ name: "'<![CDATA['", token: cdata, next: function(stack){ stack.push("CData"); } },
|
||||
{ name: "IntegerLiteral", token: number },
|
||||
{ name: "DecimalLiteral", token: number },
|
||||
{ name: "DoubleLiteral", token: number },
|
||||
{ name: "Operator", token: "keyword.operator" },
|
||||
{ name: "EQName", token: function(val) { return keys.indexOf(val) !== -1 ? "keyword" : "support.function"; } },
|
||||
{ name: "'('", token:"lparen" },
|
||||
{ name: "')'", token:"rparen" },
|
||||
{ name: "Tag", token: "meta.tag", next: function(stack){ stack.push("StartTag"); } },
|
||||
{ name: "'}'", token: "text", next: function(stack){ if(stack.length > 1) { stack.pop(); } } },
|
||||
{ name: "'{'", token: "text" } //, next: function(stack){ if(stack.length > 1) { stack.pop(); } } }
|
||||
].concat(keywords),
|
||||
_EQName: [
|
||||
{ name: "EQName", token: "text", next: function(stack) { stack.pop(); } }
|
||||
].concat(ncnames),
|
||||
Prefix: [
|
||||
{ name: "NCName", token: "text", next: function(stack) { stack.pop(); } }
|
||||
].concat(ncnames),
|
||||
StartTag: [
|
||||
{ name: "'>'", token: "meta.tag", next: function(stack){ stack.push("TagContent"); } },
|
||||
{ name: "QName", token: "entity.other.attribute-name" },
|
||||
{ name: "'='", token: "text" },
|
||||
{ name: "''''", token: "string", next: function(stack){ stack.push("AposAttr"); } },
|
||||
{ name: "'\"'", token: "string", next: function(stack){ stack.push("QuotAttr"); } },
|
||||
{ name: "'/>'", token: "meta.tag.r", next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
TagContent: [
|
||||
{ name: "ElementContentChar", token: "text" },
|
||||
{ name: "'<![CDATA['", token: cdata, next: function(stack){ stack.push("CData"); } },
|
||||
{ name: "'<!--'", token: xmlcomment, next: function(stack){ stack.push("XMLComment"); } },
|
||||
{ name: "Tag", token: "meta.tag", next: function(stack){ stack.push("StartTag"); } },
|
||||
{ name: "PredefinedEntityRef", token: "constant.language.escape" },
|
||||
{ name: "CharRef", token: "constant.language.escape" },
|
||||
{ name: "'{{'", token: "text" },
|
||||
{ name: "'}}'", token: "text" },
|
||||
{ name: "'{'", token: "text", next: function(stack){ stack.push("start"); } },
|
||||
{ name: "EndTag", token: "meta.tag", next: function(stack){ stack.pop(); stack.pop(); } }
|
||||
],
|
||||
AposAttr: [
|
||||
{ name: "''''", token: "string", next: function(stack){ stack.pop(); } },
|
||||
{ name: "EscapeApos", token: "constant.language.escape" },
|
||||
{ name: "AposAttrContentChar", token: "string" },
|
||||
{ name: "PredefinedEntityRef", token: "constant.language.escape" },
|
||||
{ name: "CharRef", token: "constant.language.escape" },
|
||||
{ name: "'{{'", token: "string" },
|
||||
{ name: "'}}'", token: "string" },
|
||||
{ name: "'{'", token: "text", next: function(stack){ stack.push("start"); } }
|
||||
],
|
||||
QuotAttr: [
|
||||
{ name: "'\"'", token: "string", next: function(stack){ stack.pop(); } },
|
||||
{ name: "EscapeQuot", token: "constant.language.escape" },
|
||||
{ name: "QuotAttrContentChar", token: "string" },
|
||||
{ name: "PredefinedEntityRef", token: "constant.language.escape" },
|
||||
{ name: "CharRef", token: "constant.language.escape" },
|
||||
{ name: "'{{'", token: "string" },
|
||||
{ name: "'}}'", token: "string" },
|
||||
{ name: "'{'", token: "text", next: function(stack){ stack.push("start"); } }
|
||||
],
|
||||
Pragma: [
|
||||
{ name: "PragmaContents", token: pragma },
|
||||
{ name: "'#)'", token: pragma, next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
Comment: [
|
||||
{ name: "CommentContents", token: "comment" },
|
||||
{ name: "'(:'", token: "comment", next: function(stack){ stack.push("Comment"); } },
|
||||
{ name: "':)'", token: "comment", next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
CommentDoc: [
|
||||
{ name: "DocCommentContents", token: "comment.doc" },
|
||||
{ name: "DocTag", token: "comment.doc.tag" },
|
||||
{ name: "'(:'", token: "comment.doc", next: function(stack){ stack.push("CommentDoc"); } },
|
||||
{ name: "':)'", token: "comment.doc", next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
XMLComment: [
|
||||
{ name: "DirCommentContents", token: xmlcomment },
|
||||
{ name: "'-->'", token: xmlcomment, next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
CData: [
|
||||
{ name: "CDataSectionContents", token: cdata },
|
||||
{ name: "']]>'", token: cdata, next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
PI: [
|
||||
{ name: "DirPIContents", token: pi },
|
||||
{ name: "PITarget", token: pi },
|
||||
{ name: "S", token: pi },
|
||||
{ name: "'?>'", token: pi, next: function(stack){ stack.pop(); } }
|
||||
],
|
||||
AposString: [
|
||||
{ name: "''''", token: "string", next: function(stack){ stack.pop(); } },
|
||||
{ name: "PredefinedEntityRef", token: "constant.language.escape" },
|
||||
{ name: "CharRef", token: "constant.language.escape" },
|
||||
{ name: "EscapeApos", token: "constant.language.escape" },
|
||||
{ name: "AposChar", token: "string" }
|
||||
],
|
||||
QuotString: [
|
||||
{ name: "'\"'", token: "string", next: function(stack){ stack.pop(); } },
|
||||
{ name: "PredefinedEntityRef", token: "constant.language.escape" },
|
||||
{ name: "CharRef", token: "constant.language.escape" },
|
||||
{ name: "EscapeQuot", token: "constant.language.escape" },
|
||||
{ name: "QuotChar", token: "string" }
|
||||
]
|
||||
};
|
||||
|
||||
exports.XQueryLexer = function() {
|
||||
|
||||
this.tokens = [];
|
||||
|
||||
this.getLineTokens = function(line, state, row) {
|
||||
state = (state === "start" || !state) ? '["start"]' : state;
|
||||
var stack = JSON.parse(state);
|
||||
var h = new TokenHandler(line);
|
||||
var tokenizer = new XQueryTokenizer(line, h);
|
||||
var tokens = [];
|
||||
|
||||
while(true) {
|
||||
var currentState = stack[stack.length - 1];
|
||||
try {
|
||||
|
||||
h.tokens = [];
|
||||
tokenizer["parse_" + currentState]();
|
||||
var info = null;
|
||||
|
||||
if(h.tokens.length > 1 && h.tokens[0].name === "WS") {
|
||||
tokens.push({
|
||||
type: "text",
|
||||
value: h.tokens[0].value
|
||||
});
|
||||
h.tokens.splice(0, 1);
|
||||
}
|
||||
|
||||
var token = h.tokens[0];
|
||||
var rules = Rules[currentState];
|
||||
for(var k = 0; k < rules.length; k++) {
|
||||
var rule = Rules[currentState][k];
|
||||
if((typeof(rule.name) === "function" && rule.name(token)) || rule.name === token.name) {
|
||||
info = rule;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(token.name === "EOF") { break; }
|
||||
|
||||
tokens.push({
|
||||
type: info === null ? "text" : (typeof(info.token) === "function" ? info.token(token.value) : info.token),
|
||||
value: token.value
|
||||
});
|
||||
|
||||
if(info && info.next) {
|
||||
info.next(stack);
|
||||
}
|
||||
|
||||
} catch(e) {
|
||||
if(e instanceof tokenizer.ParseException) {
|
||||
var index = 0;
|
||||
for(var i=0; i < tokens.length; i++) {
|
||||
index += tokens[i].value.length;
|
||||
}
|
||||
tokens.push({ type: "text", value: line.substring(index) });
|
||||
return {
|
||||
tokens: tokens,
|
||||
state: JSON.stringify(["start"])
|
||||
};
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if(this.tokens[row] !== undefined) {
|
||||
var cachedLine = this.lines[row];
|
||||
var begin = sharedStart([line, cachedLine]);
|
||||
var diff = cachedLine.length - line.length;
|
||||
var idx = 0;
|
||||
var col = 0;
|
||||
for(var i = 0; i < tokens.length; i++) {
|
||||
var token = tokens[i];
|
||||
for(var j = 0; j < this.tokens[row].length; j++) {
|
||||
var semanticToken = this.tokens[row][j];
|
||||
if(
|
||||
((col + token.value.length) <= begin.length && semanticToken.sc === col && semanticToken.ec === (col + token.value.length)) ||
|
||||
(semanticToken.sc === (col + diff) && semanticToken.ec === (col + token.value.length + diff))
|
||||
) {
|
||||
idx = i;
|
||||
tokens[i].type = semanticToken.type;
|
||||
}
|
||||
}
|
||||
col += token.value.length;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
tokens: tokens,
|
||||
state: JSON.stringify(stack)
|
||||
};
|
||||
};
|
||||
|
||||
function sharedStart(A) {
|
||||
var tem1, tem2, s, A = A.slice(0).sort();
|
||||
tem1 = A[0];
|
||||
s = tem1.length;
|
||||
tem2 = A.pop();
|
||||
while(s && tem2.indexOf(tem1) == -1) {
|
||||
tem1 = tem1.substring(0, --s);
|
||||
}
|
||||
return tem1;
|
||||
}
|
||||
};
|
||||
});
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
// This file was generated on Wed Dec 12, 2012 12:21 (UTC+01) by REx v5.20 which is Copyright (c) 1979-2012 by Gunther Rademacher <grd@gmx.net>
|
||||
// This file was generated on Sat Jan 5, 2013 17:36 (UTC+01) by REx v5.21 which is Copyright (c) 1979-2012 by Gunther Rademacher <grd@gmx.net>
|
||||
// REx command line: XQueryParser.ebnf -ll 2 -backtrack -tree -javascript -a xqlint
|
||||
|
||||
// line 2 "XQueryParser.ebnf"
|
||||
|
|
@ -84,6 +84,8 @@
|
|||
eventHandler.reset(input);
|
||||
}
|
||||
|
||||
this.reset = function(l, b, e) {reset(l, b, e);};
|
||||
|
||||
this.getOffendingToken = function(e)
|
||||
{
|
||||
var o = e.getOffending();
|
||||
|
|
@ -7896,16 +7898,17 @@
|
|||
// 'union' | 'unordered' | 'updating' | 'validate' | 'value' | 'variable' |
|
||||
// 'version' | 'where' | 'while' | 'with' | 'xquery' | '{' | '{|'
|
||||
try_StepExpr();
|
||||
lk = -1;
|
||||
memoize(2, e0A, -1);
|
||||
continue;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(2, e0A, -2);
|
||||
break;
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(2, e0, lk);
|
||||
}
|
||||
}
|
||||
if (lk != -1
|
||||
|
|
@ -9466,16 +9469,17 @@
|
|||
try
|
||||
{
|
||||
try_PostfixExpr();
|
||||
lk = -1;
|
||||
memoize(3, e0A, -1);
|
||||
lk = -3;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(3, e0A, -2);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(3, e0, lk);
|
||||
}
|
||||
}
|
||||
switch (lk)
|
||||
|
|
@ -10081,6 +10085,8 @@
|
|||
case 141568: // 'unordered' '{'
|
||||
try_PostfixExpr();
|
||||
break;
|
||||
case -3:
|
||||
break;
|
||||
default:
|
||||
try_AxisStep();
|
||||
}
|
||||
|
|
@ -12937,16 +12943,17 @@
|
|||
try
|
||||
{
|
||||
try_KindTest();
|
||||
lk = -1;
|
||||
memoize(4, e0A, -1);
|
||||
lk = -8;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -6;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(4, e0A, -6);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(4, e0, lk);
|
||||
}
|
||||
}
|
||||
switch (lk)
|
||||
|
|
@ -12984,6 +12991,8 @@
|
|||
case 242: // 'structured-item'
|
||||
try_StructuredItemTest();
|
||||
break;
|
||||
case -8:
|
||||
break;
|
||||
default:
|
||||
try_AtomicOrUnionType();
|
||||
}
|
||||
|
|
@ -14004,22 +14013,25 @@
|
|||
try
|
||||
{
|
||||
try_AnyFunctionTest();
|
||||
lk = -1;
|
||||
memoize(5, e0A, -1);
|
||||
lk = -3;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(5, e0A, -2);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(5, e0, lk);
|
||||
}
|
||||
switch (lk)
|
||||
{
|
||||
case -1:
|
||||
try_AnyFunctionTest();
|
||||
break;
|
||||
case -3:
|
||||
break;
|
||||
default:
|
||||
try_TypedFunctionTest();
|
||||
}
|
||||
|
|
@ -22135,16 +22147,17 @@
|
|||
try
|
||||
{
|
||||
try_Statement();
|
||||
lk = -1;
|
||||
memoize(6, e0A, -1);
|
||||
continue;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(6, e0A, -2);
|
||||
break;
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(6, e0, lk);
|
||||
}
|
||||
}
|
||||
if (lk != -1
|
||||
|
|
@ -23499,7 +23512,8 @@
|
|||
try
|
||||
{
|
||||
try_ApplyStatement();
|
||||
lk = -1;
|
||||
memoize(7, e0A, -1);
|
||||
lk = -14;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
|
|
@ -23509,7 +23523,8 @@
|
|||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
try_AssignStatement();
|
||||
lk = -2;
|
||||
memoize(7, e0A, -2);
|
||||
lk = -14;
|
||||
}
|
||||
catch (p2A)
|
||||
{
|
||||
|
|
@ -23519,7 +23534,8 @@
|
|||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
try_BlockStatement();
|
||||
lk = -3;
|
||||
memoize(7, e0A, -3);
|
||||
lk = -14;
|
||||
}
|
||||
catch (p3A)
|
||||
{
|
||||
|
|
@ -23529,19 +23545,20 @@
|
|||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
try_VarDeclStatement();
|
||||
lk = -12;
|
||||
memoize(7, e0A, -12);
|
||||
lk = -14;
|
||||
}
|
||||
catch (p12A)
|
||||
{
|
||||
lk = -13;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(7, e0A, -13);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(7, e0, lk);
|
||||
}
|
||||
}
|
||||
switch (lk)
|
||||
|
|
@ -23587,6 +23604,8 @@
|
|||
case -13:
|
||||
try_WhileStatement();
|
||||
break;
|
||||
case -14:
|
||||
break;
|
||||
default:
|
||||
try_ApplyStatement();
|
||||
}
|
||||
|
|
@ -24728,11 +24747,9 @@
|
|||
shiftT(250); // 'try'
|
||||
lookahead1W(87); // S^WS | '(:' | '{'
|
||||
try_BlockStatement();
|
||||
for (;;)
|
||||
{
|
||||
lookahead1W(36); // S^WS | '(:' | 'catch'
|
||||
shiftT(91); // 'catch'
|
||||
lookahead1W(251); // Wildcard | EQName^Token | S^WS | '(:' | 'after' | 'allowing' | 'ancestor' |
|
||||
lookahead1W(36); // S^WS | '(:' | 'catch'
|
||||
shiftT(91); // 'catch'
|
||||
lookahead1W(251); // Wildcard | EQName^Token | S^WS | '(:' | 'after' | 'allowing' | 'ancestor' |
|
||||
// 'ancestor-or-self' | 'and' | 'as' | 'ascending' | 'at' | 'attribute' |
|
||||
// 'base-uri' | 'before' | 'boundary-space' | 'break' | 'case' | 'cast' |
|
||||
// 'castable' | 'catch' | 'child' | 'collation' | 'comment' | 'constraint' |
|
||||
|
|
@ -24753,8 +24770,10 @@
|
|||
// 'try' | 'tumbling' | 'type' | 'typeswitch' | 'union' | 'unordered' | 'updating' |
|
||||
// 'validate' | 'value' | 'variable' | 'version' | 'where' | 'while' | 'with' |
|
||||
// 'xquery'
|
||||
try_CatchErrorList();
|
||||
try_BlockStatement();
|
||||
try_CatchErrorList();
|
||||
try_BlockStatement();
|
||||
for (;;)
|
||||
{
|
||||
lookahead1W(278); // Wildcard | EQName^Token | IntegerLiteral | DecimalLiteral | DoubleLiteral |
|
||||
// StringLiteral | S^WS | EOF | '$' | '%' | '(' | '(#' | '(:' | '+' | '-' | '.' |
|
||||
// '..' | '/' | '//' | '<' | '<!--' | '<?' | '@' | '[' | 'after' | 'allowing' |
|
||||
|
|
@ -24863,16 +24882,17 @@
|
|||
// 'xquery'
|
||||
try_CatchErrorList();
|
||||
try_BlockStatement();
|
||||
lk = -1;
|
||||
memoize(8, e0A, -1);
|
||||
continue;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(8, e0A, -2);
|
||||
break;
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(8, e0, lk);
|
||||
}
|
||||
}
|
||||
if (lk != -1
|
||||
|
|
@ -24989,6 +25009,31 @@
|
|||
{
|
||||
break;
|
||||
}
|
||||
lookahead1W(36); // S^WS | '(:' | 'catch'
|
||||
shiftT(91); // 'catch'
|
||||
lookahead1W(251); // Wildcard | EQName^Token | S^WS | '(:' | 'after' | 'allowing' | 'ancestor' |
|
||||
// 'ancestor-or-self' | 'and' | 'as' | 'ascending' | 'at' | 'attribute' |
|
||||
// 'base-uri' | 'before' | 'boundary-space' | 'break' | 'case' | 'cast' |
|
||||
// 'castable' | 'catch' | 'child' | 'collation' | 'comment' | 'constraint' |
|
||||
// 'construction' | 'context' | 'continue' | 'copy' | 'copy-namespaces' | 'count' |
|
||||
// 'decimal-format' | 'declare' | 'default' | 'delete' | 'descendant' |
|
||||
// 'descendant-or-self' | 'descending' | 'div' | 'document' | 'document-node' |
|
||||
// 'element' | 'else' | 'empty' | 'empty-sequence' | 'encoding' | 'end' | 'eq' |
|
||||
// 'every' | 'except' | 'exit' | 'external' | 'first' | 'following' |
|
||||
// 'following-sibling' | 'for' | 'ft-option' | 'function' | 'ge' | 'group' | 'gt' |
|
||||
// 'idiv' | 'if' | 'import' | 'in' | 'index' | 'insert' | 'instance' | 'integrity' |
|
||||
// 'intersect' | 'into' | 'is' | 'item' | 'last' | 'lax' | 'le' | 'let' | 'loop' |
|
||||
// 'lt' | 'mod' | 'modify' | 'module' | 'namespace' | 'namespace-node' | 'ne' |
|
||||
// 'node' | 'nodes' | 'only' | 'option' | 'or' | 'order' | 'ordered' | 'ordering' |
|
||||
// 'parent' | 'preceding' | 'preceding-sibling' | 'processing-instruction' |
|
||||
// 'rename' | 'replace' | 'return' | 'returning' | 'revalidation' | 'satisfies' |
|
||||
// 'schema' | 'schema-attribute' | 'schema-element' | 'score' | 'self' | 'sliding' |
|
||||
// 'some' | 'stable' | 'start' | 'strict' | 'switch' | 'text' | 'to' | 'treat' |
|
||||
// 'try' | 'tumbling' | 'type' | 'typeswitch' | 'union' | 'unordered' | 'updating' |
|
||||
// 'validate' | 'value' | 'variable' | 'version' | 'where' | 'while' | 'with' |
|
||||
// 'xquery'
|
||||
try_CatchErrorList();
|
||||
try_BlockStatement();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -26497,16 +26542,16 @@
|
|||
// 'validate' | 'value' | 'variable' | 'version' | 'where' | 'while' | 'with' |
|
||||
// 'xquery' | '{' | '{|'
|
||||
try_ExprSingle();
|
||||
lk = -1;
|
||||
memoize(9, e0A, -1);
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(9, e0A, -2);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(9, e0, lk);
|
||||
lk = -2;
|
||||
}
|
||||
}
|
||||
if (lk == -1)
|
||||
|
|
@ -27214,16 +27259,17 @@
|
|||
shiftT(276); // '{'
|
||||
lookahead1W(88); // S^WS | '(:' | '}'
|
||||
shiftT(282); // '}'
|
||||
lk = -1;
|
||||
memoize(10, e0A, -1);
|
||||
lk = -3;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(10, e0A, -2);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(10, e0, lk);
|
||||
}
|
||||
}
|
||||
switch (lk)
|
||||
|
|
@ -27233,6 +27279,8 @@
|
|||
lookahead1W(88); // S^WS | '(:' | '}'
|
||||
shiftT(282); // '}'
|
||||
break;
|
||||
case -3:
|
||||
break;
|
||||
default:
|
||||
try_BlockExpr();
|
||||
}
|
||||
|
|
@ -27473,16 +27521,17 @@
|
|||
shiftT(276); // '{'
|
||||
lookahead1W(88); // S^WS | '(:' | '}'
|
||||
shiftT(282); // '}'
|
||||
lk = -1;
|
||||
memoize(11, e0A, -1);
|
||||
lk = -3;
|
||||
}
|
||||
catch (p1A)
|
||||
{
|
||||
lk = -2;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(11, e0A, -2);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(11, e0, lk);
|
||||
}
|
||||
}
|
||||
switch (lk)
|
||||
|
|
@ -27492,6 +27541,8 @@
|
|||
lookahead1W(88); // S^WS | '(:' | '}'
|
||||
shiftT(282); // '}'
|
||||
break;
|
||||
case -3:
|
||||
break;
|
||||
default:
|
||||
try_BlockExpr();
|
||||
}
|
||||
|
|
@ -28610,16 +28661,17 @@
|
|||
try
|
||||
{
|
||||
try_BlockExpr();
|
||||
lk = -10;
|
||||
memoize(12, e0A, -10);
|
||||
lk = -14;
|
||||
}
|
||||
catch (p10A)
|
||||
{
|
||||
lk = -11;
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(12, e0A, -11);
|
||||
}
|
||||
b0 = b0A; e0 = e0A; l1 = l1A; if (l1 == 0) {end = e0A;} else {
|
||||
b1 = b1A; e1 = e1A; l2 = l2A; if (l2 == 0) {end = e1A;} else {
|
||||
b2 = b2A; e2 = e2A; end = e2A; }}
|
||||
memoize(12, e0, lk);
|
||||
}
|
||||
}
|
||||
switch (lk)
|
||||
|
|
@ -28904,6 +28956,8 @@
|
|||
case 278: // '{|'
|
||||
try_JSONSimpleObjectUnion();
|
||||
break;
|
||||
case -14:
|
||||
break;
|
||||
default:
|
||||
try_Constructor();
|
||||
}
|
||||
|
|
@ -29427,8 +29481,8 @@
|
|||
var l1, b1, e1;
|
||||
var l2, b2, e2;
|
||||
var bx, ex, sx, lx, tx;
|
||||
var memo;
|
||||
var eventHandler;
|
||||
var memo;
|
||||
|
||||
function memoize(i, e, v)
|
||||
{
|
||||
|
|
@ -29633,7 +29687,7 @@
|
|||
end = current - 1;
|
||||
var c1 = end < size ? input.charCodeAt(end) : 0;
|
||||
if (c1 >= 0xdc00 && c1 < 0xe000) --end;
|
||||
error(begin, end, state, -1, -1);
|
||||
return error(begin, end, state, -1, -1);
|
||||
}
|
||||
|
||||
if (nonbmp)
|
||||
|
|
@ -33235,5 +33289,5 @@ XQueryParser.TOKEN =
|
|||
|
||||
// line 1174 "XQueryParser.ebnf"
|
||||
});
|
||||
// line 33239 "XQueryParser.js"
|
||||
// line 33293 "XQueryParser.js"
|
||||
// End
|
||||
|
|
|
|||
543
lib/ace/mode/xquery/XQueryTokenizer.ebnf
Normal file
543
lib/ace/mode/xquery/XQueryTokenizer.ebnf
Normal file
|
|
@ -0,0 +1,543 @@
|
|||
<?xqlint
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
define(function(require, exports, module){
|
||||
var XQueryTokenizer = exports.XQueryTokenizer = function XQueryTokenizer(string, parsingEventHandler)
|
||||
{
|
||||
init(string, parsingEventHandler);
|
||||
?>
|
||||
|
||||
start ::= '<![CDATA['
|
||||
| '<!--'
|
||||
| '<?'
|
||||
| '(#'
|
||||
| '(:~'
|
||||
| '(:'
|
||||
| '"'
|
||||
| "'"
|
||||
| "}"
|
||||
| "{"
|
||||
| "("
|
||||
| ")"
|
||||
| "/"
|
||||
| "["
|
||||
| "]"
|
||||
| ","
|
||||
| "."
|
||||
| ";"
|
||||
| ":"
|
||||
| "!"
|
||||
| "|"
|
||||
| Annotation
|
||||
| ModuleDecl
|
||||
| OptionDecl
|
||||
| AttrTest
|
||||
| Wildcard
|
||||
| IntegerLiteral
|
||||
| DecimalLiteral
|
||||
| DoubleLiteral
|
||||
| Variable
|
||||
| EQName
|
||||
| Tag
|
||||
| Operator
|
||||
| EOF
|
||||
|
||||
StartTag ::= '>' | '/>' | QName | "=" | '"' | "'" | EOF
|
||||
|
||||
|
||||
TagContent
|
||||
::= ElementContentChar | Tag | EndTag | '<![CDATA[' | '<!--' | PredefinedEntityRef | CharRef | '{{' | '}}' | '{' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
AposAttr
|
||||
::= EscapeApos | AposAttrContentChar | PredefinedEntityRef | CharRef | '{{' | '}}' | '{' | "'" | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
QuotAttr
|
||||
::= EscapeQuot | QuotAttrContentChar | PredefinedEntityRef | CharRef | '{{' | '}}' | '{' | '"' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
CData ::= CDataSectionContents | ']]>' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
XMLComment
|
||||
::= DirCommentContents | '-->' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
PI ::= DirPIContents | '?>' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
Pragma ::= PragmaContents | '#)' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
Comment ::= ':)' | '(:' | CommentContents | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
CommentDoc
|
||||
::= DocTag | DocCommentContents | ':)' | '(:' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
QuotString
|
||||
::= PredefinedEntityRef | CharRef | EscapeQuot | QuotChar | '"' | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
AposString
|
||||
::= PredefinedEntityRef | CharRef | EscapeApos | AposChar | "'" | EOF
|
||||
/* ws: explicit */
|
||||
|
||||
Prefix ::= NCName
|
||||
|
||||
_EQName ::= EQName
|
||||
|
||||
Whitespace
|
||||
::= S^WS
|
||||
/* ws: definition */
|
||||
|
||||
EQName ::= FunctionName
|
||||
| 'attribute'
|
||||
| 'comment'
|
||||
| 'document-node'
|
||||
| 'element'
|
||||
| 'empty-sequence'
|
||||
| 'function'
|
||||
| 'if'
|
||||
| 'item'
|
||||
| 'namespace-node'
|
||||
| 'node'
|
||||
| 'processing-instruction'
|
||||
| 'schema-attribute'
|
||||
| 'schema-element'
|
||||
| 'switch'
|
||||
| 'text'
|
||||
| 'typeswitch'
|
||||
FunctionName
|
||||
::= EQName^Token
|
||||
| 'after'
|
||||
| 'ancestor'
|
||||
| 'ancestor-or-self'
|
||||
| 'and'
|
||||
| 'as'
|
||||
| 'ascending'
|
||||
| 'before'
|
||||
| 'case'
|
||||
| 'cast'
|
||||
| 'castable'
|
||||
| 'child'
|
||||
| 'collation'
|
||||
| 'copy'
|
||||
| 'count'
|
||||
| 'declare'
|
||||
| 'default'
|
||||
| 'delete'
|
||||
| 'descendant'
|
||||
| 'descendant-or-self'
|
||||
| 'descending'
|
||||
| 'div'
|
||||
| 'document'
|
||||
| 'else'
|
||||
| 'empty'
|
||||
| 'end'
|
||||
| 'eq'
|
||||
| 'every'
|
||||
| 'except'
|
||||
| 'first'
|
||||
| 'following'
|
||||
| 'following-sibling'
|
||||
| 'for'
|
||||
| 'ge'
|
||||
| 'group'
|
||||
| 'gt'
|
||||
| 'idiv'
|
||||
| 'import'
|
||||
| 'insert'
|
||||
| 'instance'
|
||||
| 'intersect'
|
||||
| 'into'
|
||||
| 'is'
|
||||
| 'last'
|
||||
| 'le'
|
||||
| 'let'
|
||||
| 'lt'
|
||||
| 'mod'
|
||||
| 'modify'
|
||||
| 'module'
|
||||
| 'namespace'
|
||||
| 'ne'
|
||||
| 'only'
|
||||
| 'or'
|
||||
| 'order'
|
||||
| 'ordered'
|
||||
| 'parent'
|
||||
| 'preceding'
|
||||
| 'preceding-sibling'
|
||||
| 'rename'
|
||||
| 'replace'
|
||||
| 'return'
|
||||
| 'satisfies'
|
||||
| 'self'
|
||||
| 'some'
|
||||
| 'stable'
|
||||
| 'start'
|
||||
| 'to'
|
||||
| 'treat'
|
||||
| 'try'
|
||||
| 'union'
|
||||
| 'unordered'
|
||||
| 'validate'
|
||||
| 'where'
|
||||
| 'with'
|
||||
| 'xquery'
|
||||
| 'allowing'
|
||||
| 'at'
|
||||
| 'base-uri'
|
||||
| 'boundary-space'
|
||||
| 'break'
|
||||
| 'catch'
|
||||
| 'construction'
|
||||
| 'context'
|
||||
| 'continue'
|
||||
| 'copy-namespaces'
|
||||
| 'decimal-format'
|
||||
| 'encoding'
|
||||
| 'exit'
|
||||
| 'external'
|
||||
| 'ft-option'
|
||||
| 'in'
|
||||
| 'index'
|
||||
| 'integrity'
|
||||
| 'lax'
|
||||
| 'nodes'
|
||||
| 'option'
|
||||
| 'ordering'
|
||||
| 'revalidation'
|
||||
| 'schema'
|
||||
| 'score'
|
||||
| 'sliding'
|
||||
| 'strict'
|
||||
| 'tumbling'
|
||||
| 'type'
|
||||
| 'updating'
|
||||
| 'value'
|
||||
| 'variable'
|
||||
| 'version'
|
||||
| 'while'
|
||||
| 'constraint'
|
||||
| 'loop'
|
||||
| 'returning'
|
||||
NCName ::= NCName^Token
|
||||
| 'after'
|
||||
| 'and'
|
||||
| 'as'
|
||||
| 'ascending'
|
||||
| 'before'
|
||||
| 'case'
|
||||
| 'cast'
|
||||
| 'castable'
|
||||
| 'collation'
|
||||
| 'count'
|
||||
| 'default'
|
||||
| 'descending'
|
||||
| 'div'
|
||||
| 'else'
|
||||
| 'empty'
|
||||
| 'end'
|
||||
| 'eq'
|
||||
| 'except'
|
||||
| 'for'
|
||||
| 'ge'
|
||||
| 'group'
|
||||
| 'gt'
|
||||
| 'idiv'
|
||||
| 'instance'
|
||||
| 'intersect'
|
||||
| 'into'
|
||||
| 'is'
|
||||
| 'le'
|
||||
| 'let'
|
||||
| 'lt'
|
||||
| 'mod'
|
||||
| 'modify'
|
||||
| 'ne'
|
||||
| 'only'
|
||||
| 'or'
|
||||
| 'order'
|
||||
| 'return'
|
||||
| 'satisfies'
|
||||
| 'stable'
|
||||
| 'start'
|
||||
| 'to'
|
||||
| 'treat'
|
||||
| 'union'
|
||||
| 'where'
|
||||
| 'with'
|
||||
| 'ancestor'
|
||||
| 'ancestor-or-self'
|
||||
| 'attribute'
|
||||
| 'child'
|
||||
| 'comment'
|
||||
| 'copy'
|
||||
| 'declare'
|
||||
| 'delete'
|
||||
| 'descendant'
|
||||
| 'descendant-or-self'
|
||||
| 'document'
|
||||
| 'document-node'
|
||||
| 'element'
|
||||
| 'empty-sequence'
|
||||
| 'every'
|
||||
| 'first'
|
||||
| 'following'
|
||||
| 'following-sibling'
|
||||
| 'function'
|
||||
| 'if'
|
||||
| 'import'
|
||||
| 'insert'
|
||||
| 'item'
|
||||
| 'last'
|
||||
| 'module'
|
||||
| 'namespace'
|
||||
| 'namespace-node'
|
||||
| 'node'
|
||||
| 'ordered'
|
||||
| 'parent'
|
||||
| 'preceding'
|
||||
| 'preceding-sibling'
|
||||
| 'processing-instruction'
|
||||
| 'rename'
|
||||
| 'replace'
|
||||
| 'schema-attribute'
|
||||
| 'schema-element'
|
||||
| 'self'
|
||||
| 'some'
|
||||
| 'switch'
|
||||
| 'text'
|
||||
| 'try'
|
||||
| 'typeswitch'
|
||||
| 'unordered'
|
||||
| 'validate'
|
||||
| 'variable'
|
||||
| 'xquery'
|
||||
| 'allowing'
|
||||
| 'at'
|
||||
| 'base-uri'
|
||||
| 'boundary-space'
|
||||
| 'break'
|
||||
| 'catch'
|
||||
| 'construction'
|
||||
| 'context'
|
||||
| 'continue'
|
||||
| 'copy-namespaces'
|
||||
| 'decimal-format'
|
||||
| 'encoding'
|
||||
| 'exit'
|
||||
| 'external'
|
||||
| 'ft-option'
|
||||
| 'in'
|
||||
| 'index'
|
||||
| 'integrity'
|
||||
| 'lax'
|
||||
| 'nodes'
|
||||
| 'option'
|
||||
| 'ordering'
|
||||
| 'revalidation'
|
||||
| 'schema'
|
||||
| 'score'
|
||||
| 'sliding'
|
||||
| 'strict'
|
||||
| 'tumbling'
|
||||
| 'type'
|
||||
| 'updating'
|
||||
| 'value'
|
||||
| 'version'
|
||||
| 'while'
|
||||
| 'constraint'
|
||||
| 'loop'
|
||||
| 'returning'
|
||||
<?TOKENS?>
|
||||
|
||||
ModuleDecl
|
||||
::= ('import' S)? ('module' | 'schema') S 'namespace'
|
||||
|
||||
Annotation
|
||||
::= '%' EQName ?
|
||||
|
||||
OptionDecl
|
||||
::= 'declare' S ( ( 'decimal-format' | 'option' )
|
||||
| ('default' S 'decimal-format') )
|
||||
|
||||
Operator ::= '!=' | ':=' | '>=' | '<=' | '=' | '<' | '>' | '-' | '+' | 'div' | '||' | '?'
|
||||
|
||||
Variable ::= '$' EQName
|
||||
|
||||
Tag ::= '<' QName
|
||||
|
||||
EndTag ::= '</' QName S? '>'
|
||||
|
||||
PragmaContents
|
||||
::= ( Char* - ( Char* '#' Char* ) )+
|
||||
DirCommentContents
|
||||
::= ( ( Char - '-' ) | '-' ( Char - '-' ) )+
|
||||
DirPIContents
|
||||
::= ( Char* - ( Char* '?' Char* ) )
|
||||
CDataSectionContents
|
||||
::= ( Char+ - ( Char* ']]>' Char* ) ) & ']]'
|
||||
| ( Char+ - ( Char* ']]>' Char* ) ) & $
|
||||
AttrTest ::= "@" ( Wildcard | QName )
|
||||
Wildcard ::= "*"
|
||||
| (NCName ":" "*")
|
||||
| ("*" ":" NCName)
|
||||
| (BracedURILiteral "*")
|
||||
EQName ::= QName
|
||||
| URIQualifiedName
|
||||
URIQualifiedName
|
||||
::= BracedURILiteral NCName
|
||||
BracedURILiteral
|
||||
::= 'Q' '{' (PredefinedEntityRef | CharRef | [^&{}] )* '}'
|
||||
URILiteral
|
||||
::= StringLiteral
|
||||
IntegerLiteral
|
||||
::= Digits
|
||||
DecimalLiteral
|
||||
::= '.' Digits
|
||||
| Digits '.' [0-9]*
|
||||
/* ws: explicit */
|
||||
DoubleLiteral
|
||||
::= ( '.' Digits | Digits ( '.' [0-9]* )? ) [Ee] [+#x002D]? Digits
|
||||
/* ws: explicit */
|
||||
PredefinedEntityRef
|
||||
::= '&' ( 'lt' | 'gt' | 'amp' | 'quot' | 'apos' ) ';'
|
||||
/* ws: explicit */
|
||||
EscapeQuot
|
||||
::= '""'
|
||||
EscapeApos
|
||||
::= "''"
|
||||
QuotChar ::= (Char - ["&])+
|
||||
AposChar ::= (Char - [&'])+
|
||||
ElementContentChar
|
||||
::= (Char - [&<{}])+
|
||||
QuotAttrContentChar
|
||||
::= (Char - ["&<{}])+
|
||||
AposAttrContentChar
|
||||
::= (Char - [&'<{}])+
|
||||
PITarget ::= NCName - ( ( 'X' | 'x' ) ( 'M' | 'm' ) ( 'L' | 'l' ) )
|
||||
Name ::= NameStartChar NameChar*
|
||||
NameStartChar
|
||||
::= [:A-Z_a-z#x00C0-#x00D6#x00D8-#x00F6#x00F8-#x02FF#x0370-#x037D#x037F-#x1FFF#x200C-#x200D#x2070-#x218F#x2C00-#x2FEF#x3001-#xD7FF#xF900-#xFDCF#xFDF0-#xFFFD#x10000-#xEFFFF]
|
||||
NameChar ::= NameStartChar
|
||||
| [-.0-9#x00B7#x0300-#x036F#x203F-#x2040]
|
||||
NCName ::= Name - ( Char* ':' Char* )
|
||||
Char ::= [#x0009#x000A#x000D#x0020-#xD7FF#xE000-#xFFFD#x10000-#x10FFFF]
|
||||
QName ::= PrefixedName
|
||||
| UnprefixedName
|
||||
PrefixedName
|
||||
::= Prefix ':' LocalPart
|
||||
UnprefixedName
|
||||
::= LocalPart
|
||||
Prefix ::= NCName
|
||||
LocalPart
|
||||
::= NCName
|
||||
S ::= [#x0009#x000A#x000D#x0020]+
|
||||
CharRef ::= '&#' [0-9]+ ';'
|
||||
| '&#x' [0-9A-Fa-f]+ ';'
|
||||
Digits ::= [0-9]+
|
||||
CommentContents
|
||||
::= ( Char+ - ( Char* ( '(:' | ':)' ) Char* ) ) & '('
|
||||
| ( Char+ - ( Char* ( '(:' | ':)' ) Char* ) ) & $
|
||||
| ( ( Char+ - ( Char* ( '(:' | ':)' | ' @' ) Char* ) ) - ( Char* '(' ) ) & ':'
|
||||
DocTag ::= ' @' NCName?
|
||||
DocCommentContents
|
||||
::= ( ( Char+ - ( Char* ( '(:' | ':)' | ' @' ) Char* ) ) - ( Char* '(' ) ) & ':'
|
||||
| ( Char+ - ( Char* ( '(:' | ':)' | ' @' ) Char* ) ) & '('
|
||||
| ( Char+ - ( Char* ( '(:' | ':)' | ' @' ) Char* ) ) & ' @'
|
||||
| ( Char+ - ( Char* ( '(:' | ':)' | ' @') Char* ) ) & $
|
||||
EOF ::= $
|
||||
NonNCNameChar
|
||||
::= $
|
||||
| ':'
|
||||
| ( Char - NameChar )
|
||||
DelimitingChar
|
||||
::= NonNCNameChar
|
||||
| '-'
|
||||
| '.'
|
||||
DelimitingChar
|
||||
\\ IntegerLiteral DecimalLiteral DoubleLiteral
|
||||
NonNCNameChar
|
||||
\\ EQName^Token QName NCName^Token 'NaN' 'after' 'all'
|
||||
'allowing' 'ancestor' 'ancestor-or-self' 'and' 'any'
|
||||
'append' 'array' 'as' 'ascending' 'at' 'attribute'
|
||||
'base-uri' 'before' 'boundary-space' 'break' 'by' 'case'
|
||||
'cast' 'castable' 'catch' 'check' 'child' 'collation'
|
||||
'collection' 'comment' 'constraint' 'construction'
|
||||
'contains' 'content' 'context' 'continue' 'copy'
|
||||
'copy-namespaces' 'count' 'decimal-format'
|
||||
'decimal-separator' 'declare' 'default' 'delete'
|
||||
'descendant' 'descendant-or-self' 'descending'
|
||||
'diacritics' 'different' 'digit' 'distance' 'div'
|
||||
'document' 'document-node' 'element' 'else' 'empty'
|
||||
'empty-sequence' 'encoding' 'end' 'entire' 'eq' 'every'
|
||||
'exactly' 'except' 'exit' 'external' 'first' 'following'
|
||||
'following-sibling' 'for' 'foreach' 'foreign' 'from'
|
||||
'ft-option' 'ftand' 'ftnot' 'ftor' 'function' 'ge'
|
||||
'greatest' 'group' 'grouping-separator' 'gt' 'idiv' 'if'
|
||||
'import' 'in' 'index' 'infinity' 'inherit' 'insensitive'
|
||||
'insert' 'instance' 'integrity' 'intersect' 'into' 'is'
|
||||
'item' 'json' 'json-item' 'key' 'language' 'last' 'lax'
|
||||
'le' 'least' 'let' 'levels' 'loop' 'lowercase' 'lt'
|
||||
'minus-sign' 'mod' 'modify' 'module' 'most' 'namespace'
|
||||
'namespace-node' 'ne' 'next' 'no' 'no-inherit'
|
||||
'no-preserve' 'node' 'nodes' 'not' 'object' 'occurs'
|
||||
'of' 'on' 'only' 'option' 'or' 'order' 'ordered'
|
||||
'ordering' 'paragraph' 'paragraphs' 'parent'
|
||||
'pattern-separator' 'per-mille' 'percent' 'phrase'
|
||||
'position' 'preceding' 'preceding-sibling' 'preserve'
|
||||
'previous' 'processing-instruction' 'relationship'
|
||||
'rename' 'replace' 'return' 'returning' 'revalidation'
|
||||
'same' 'satisfies' 'schema' 'schema-attribute'
|
||||
'schema-element' 'score' 'self' 'sensitive' 'sentence'
|
||||
'sentences' 'skip' 'sliding' 'some' 'stable' 'start'
|
||||
'stemming' 'stop' 'strict' 'strip' 'structured-item'
|
||||
'switch' 'text' 'then' 'thesaurus' 'times' 'to'
|
||||
'treat' 'try' 'tumbling' 'type' 'typeswitch' 'union'
|
||||
'unique' 'unordered' 'updating' 'uppercase' 'using'
|
||||
'validate' 'value' 'variable' 'version' 'weight'
|
||||
'when' 'where' 'while' 'wildcards' 'window' 'with'
|
||||
'without' 'word' 'words' 'xquery' 'zero-digit'
|
||||
'*' << Wildcard '*'^OccurrenceIndicator
|
||||
EQName^Token
|
||||
<< 'after' 'ancestor' 'ancestor-or-self' 'and' 'as' 'ascending' 'attribute' 'before' 'case' 'cast' 'castable' 'child' 'collation' 'comment' 'copy' 'count' 'declare' 'default' 'delete' 'descendant' 'descendant-or-self' 'descending' 'div' 'document' 'document-node' 'element' 'else' 'empty' 'empty-sequence' 'end' 'eq' 'every' 'except' 'first' 'following' 'following-sibling' 'for' 'function' 'ge' 'group' 'gt' 'idiv' 'if' 'import' 'insert' 'instance' 'intersect' 'into' 'is' 'item' 'last' 'le' 'let' 'lt' 'mod' 'modify' 'module' 'namespace' 'namespace-node' 'ne' 'node' 'only' 'or' 'order' 'ordered' 'parent' 'preceding' 'preceding-sibling' 'processing-instruction' 'rename' 'replace' 'return' 'satisfies' 'schema-attribute' 'schema-element' 'self' 'some' 'stable' 'start' 'switch' 'text' 'to' 'treat' 'try' 'typeswitch' 'union' 'unordered' 'validate' 'where' 'with' 'xquery' 'contains' 'paragraphs' 'sentences' 'times' 'words' 'by' 'collection' 'allowing' 'at' 'base-uri' 'boundary-space' 'break' 'catch' 'construction' 'context' 'continue' 'copy-namespaces' 'decimal-format' 'encoding' 'exit' 'external' 'ft-option' 'in' 'index' 'integrity' 'lax' 'nodes' 'option' 'ordering' 'revalidation' 'schema' 'score' 'sliding' 'strict' 'tumbling' 'type' 'updating' 'value' 'variable' 'version' 'while' 'constraint' 'loop' 'returning' 'append' 'array' 'json-item' 'object' 'structured-item'
|
||||
NCName^Token
|
||||
<< 'after' 'and' 'as' 'ascending' 'before' 'case' 'cast' 'castable' 'collation' 'count' 'default' 'descending' 'div' 'else' 'empty' 'end' 'eq' 'except' 'for' 'ge' 'group' 'gt' 'idiv' 'instance' 'intersect' 'into' 'is' 'le' 'let' 'lt' 'mod' 'modify' 'ne' 'only' 'or' 'order' 'return' 'satisfies' 'stable' 'start' 'to' 'treat' 'union' 'where' 'with' 'contains' 'paragraphs' 'sentences' 'times' 'words' 'by' 'ancestor' 'ancestor-or-self' 'attribute' 'child' 'comment' 'copy' 'declare' 'delete' 'descendant' 'descendant-or-self' 'document' 'document-node' 'element' 'empty-sequence' 'every' 'first' 'following' 'following-sibling' 'function' 'if' 'import' 'insert' 'item' 'last' 'module' 'namespace' 'namespace-node' 'node' 'ordered' 'parent' 'preceding' 'preceding-sibling' 'processing-instruction' 'rename' 'replace' 'schema-attribute' 'schema-element' 'self' 'some' 'switch' 'text' 'try' 'typeswitch' 'unordered' 'validate' 'variable' 'xquery' 'allowing' 'at' 'base-uri' 'boundary-space' 'break' 'catch' 'construction' 'context' 'continue' 'copy-namespaces' 'decimal-format' 'encoding' 'exit' 'external' 'ft-option' 'in' 'index' 'integrity' 'lax' 'nodes' 'option' 'ordering' 'revalidation' 'schema' 'score' 'sliding' 'strict' 'tumbling' 'type' 'updating' 'value' 'version' 'while' 'constraint' 'loop' 'returning'
|
||||
|
||||
<?ENCORE?>
|
||||
|
||||
<?xqlint
|
||||
});
|
||||
?>
|
||||
4202
lib/ace/mode/xquery/XQueryTokenizer.js
Normal file
4202
lib/ace/mode/xquery/XQueryTokenizer.js
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,10 +1,9 @@
|
|||
<?xqlint
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
|
|
@ -15,7 +14,7 @@
|
|||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
|
|
@ -29,27 +28,49 @@
|
|||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
define(function(require, exports, module){
|
||||
var CommentParser = exports.CommentParser = function CommentParser(string, parsingEventHandler)
|
||||
{
|
||||
init(string, parsingEventHandler);
|
||||
?>
|
||||
define(function(require, exports, module) {
|
||||
|
||||
var SemanticHighlighter = exports.SemanticHighlighter = function(ast) {
|
||||
|
||||
this.tokens = {};
|
||||
|
||||
Comments ::= (S^WS | Comment)* EOF
|
||||
this.getTokens = function() {
|
||||
this.visit(ast);
|
||||
return this.tokens;
|
||||
};
|
||||
|
||||
Comment ::= '(:' ( CommentContents | Comment )* ':)'
|
||||
this.EQName = this.NCName = function(node)
|
||||
{
|
||||
var row = node.pos.sl;
|
||||
this.tokens[row] = this.tokens[row] === undefined ? [] : this.tokens[row];
|
||||
node.pos.type = "support.function";
|
||||
this.tokens[row].push(node.pos);
|
||||
return true;
|
||||
};
|
||||
|
||||
<?TOKENS?>
|
||||
S ::= [#x0009#x000A#x000D#x0020]+
|
||||
Char ::= [#x0009#x000A#x000D#x0020-#xD7FF#xE000-#xFFFD#x10000-#x10FFFF]
|
||||
CommentContents
|
||||
::= ( ( Char+ - ( Char* ( '(:' | ':)' ) Char* ) ) - ( Char* '(' ) ) &':'
|
||||
| ( Char+ - ( Char* ( '(:' | ':)' ) Char* ) ) &'('
|
||||
this.visit = function(node) {
|
||||
var name = node.name;
|
||||
var skip = false;
|
||||
|
||||
EOF ::= $
|
||||
<?ENCORE?>
|
||||
if (typeof this[name] === "function") skip = this[name](node) === true ? true : false;
|
||||
|
||||
<?xqlint
|
||||
if (!skip) {
|
||||
this.visitChildren(node);
|
||||
}
|
||||
};
|
||||
|
||||
this.visitChildren = function(node, handler) {
|
||||
for (var i = 0; i < node.children.length; i++) {
|
||||
var child = node.children[i];
|
||||
if (handler !== undefined && typeof handler[child.name] === "function") {
|
||||
handler[child.name](child);
|
||||
}
|
||||
else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
});
|
||||
?>
|
||||
|
|
@ -1,422 +0,0 @@
|
|||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
define(function(require, exports, module){
|
||||
|
||||
var CommentParser = require("../CommentParser").CommentParser;
|
||||
var CommentHandler = require("../CommentHandler").CommentHandler;
|
||||
|
||||
var SyntaxHighlighter = exports.SyntaxHighlighter = function(tree)
|
||||
{
|
||||
var keywords = ['after', 'ancestor', 'ancestor-or-self', 'and', 'as', 'ascending', 'attribute', 'before', 'case', 'cast', 'castable', 'child', 'collation', 'comment', 'copy', 'count', 'declare', 'default', 'delete', 'descendant', 'descendant-or-self', 'descending', 'div', 'document', 'document-node', 'element', 'else', 'empty', 'empty-sequence', 'end', 'eq', 'every', 'except', 'first', 'following', 'following-sibling', 'for', 'function', 'ge', 'group', 'gt', 'idiv', 'if', 'then', 'import', 'insert', 'instance', 'intersect', 'into', 'is', 'item', 'last', 'le', 'let', 'lt', 'mod', 'modify', 'module', 'namespace', 'namespace-node', 'ne', 'node', 'only', 'or', 'order', 'ordered', 'parent', 'preceding', 'preceding-sibling', 'processing-instruction', 'rename', 'replace', 'return', 'satisfies', 'schema-attribute', 'schema-element', 'self', 'some', 'stable', 'start', 'switch', 'text', 'to', 'treat', 'try', 'typeswitch', 'union', 'unordered', 'validate', 'where', 'with', 'xquery', 'contains', 'paragraphs', 'sentences', 'times', 'words', 'by', 'collection', 'allowing', 'at', 'base-uri', 'boundary-space', 'break', 'catch', 'construction', 'context', 'continue', 'copy-namespaces', 'decimal-format', 'encoding', 'exit', 'external', 'ft-option', 'in', 'index', 'integrity', 'lax', 'nodes', 'option', 'ordering', 'revalidation', 'schema', 'score', 'sliding', 'strict', 'tumbling', 'type', 'updating', 'value', 'variable', 'version', 'while', 'constraint', 'loop', 'returning', 'append', 'array', 'json-item', 'object', 'structured-item', 'when', 'next', 'previous', 'window'];
|
||||
|
||||
var docTags = "([A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[a-zA-Z]{2,6})|(@[\\w\\d_]+)|(TODO)";
|
||||
|
||||
var states = ["cdata", "comment", "tag", "comment.doc"];
|
||||
var info = { lines: [ [] ], states: [] };
|
||||
|
||||
this.getTokens = function(recover) {
|
||||
this.visit(tree);
|
||||
if(recover === true) {
|
||||
var computed = "";
|
||||
for(var i in info.lines) {
|
||||
for(var j in info.lines[i]) {
|
||||
var token = info.lines[i][j];
|
||||
computed += token.value;
|
||||
}
|
||||
if(i < info.lines.length - 1)
|
||||
computed += "\n";
|
||||
}
|
||||
this.addTokens(source.substring(computed.length), "text");
|
||||
}
|
||||
return info;
|
||||
};
|
||||
|
||||
this.addTokens = function(value, type)
|
||||
{
|
||||
var tokens = value.split("\n");
|
||||
var lastState = "start";
|
||||
for(var i in tokens)
|
||||
{
|
||||
if(i > 0) {
|
||||
info.lines.push([]);
|
||||
info.states.push(lastState);
|
||||
}
|
||||
var value = tokens[i];
|
||||
var linesLength = info.lines.length - 1;
|
||||
var linesIdx = info.lines[linesLength];
|
||||
linesIdx.push({ value: value, type: type });
|
||||
lastState = states.indexOf(type) != -1 ? type : "start";
|
||||
}
|
||||
};
|
||||
|
||||
this.getNodeValue = function(node) {
|
||||
var value = "";
|
||||
if(node.value === undefined) {
|
||||
for(var i in node.children)
|
||||
{
|
||||
var child = node.children[i];
|
||||
value += this.getNodeValue(child);
|
||||
}
|
||||
} else {
|
||||
value += node.value;
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
|
||||
this.DirPIConstructor = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "xml_pe");
|
||||
return true;
|
||||
};
|
||||
|
||||
this.DirElemConstructor = function(node)
|
||||
{
|
||||
for(var i in node.children)
|
||||
{
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN" || child.name === "QName") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "meta.tag");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.DirAttributeList = function(node)
|
||||
{
|
||||
for(var i in node.children)
|
||||
{
|
||||
var child = node.children[i];
|
||||
if(child.name === "QName") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "meta.tag");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.DirAttributeValue = function(node)
|
||||
{
|
||||
for(var i in node.children)
|
||||
{
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "string");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.QuotAttrContentChar = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "string");
|
||||
return true;
|
||||
};
|
||||
|
||||
//this.EQName = function(node)
|
||||
//{
|
||||
// var value = source.substring(node.begin, node.end);
|
||||
// this.addTokens(value, "support.function");
|
||||
// return true;
|
||||
//};
|
||||
|
||||
//this.FunctionName = function(node)
|
||||
//{
|
||||
// for(var i in node.children) {
|
||||
// var child = node.children[i];
|
||||
// if(child.children[0] && (child.name === "EQName" || child.name === "TOKEN")) {
|
||||
// var value = this.getNodeValue(child.children[0]);
|
||||
// this.addTokens(value, "support.function");
|
||||
// } else {
|
||||
// this.visit(child);
|
||||
// }
|
||||
// }
|
||||
// return true;
|
||||
//};
|
||||
|
||||
this.StringConcatExpr = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "keyword.operator");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.AdditiveExpr = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "keyword.operator");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.MultiplicativeExpr = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "keyword.operator");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.UnaryExpr = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "keyword.operator");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.GeneralComp = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name === "TOKEN") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "keyword.operator");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.NumericLiteral = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name != "TEXT") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "constant");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}return true;
|
||||
}
|
||||
|
||||
this.DirCommentConstructor = function(node)
|
||||
{
|
||||
for(var i in node.children) {
|
||||
var child = node.children[i];
|
||||
if(child.name != "TEXT") {
|
||||
var value = this.getNodeValue(child);
|
||||
this.addTokens(value, "comment");
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}return true;
|
||||
}
|
||||
|
||||
this.CDataSection = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "support.type");
|
||||
return true;
|
||||
};
|
||||
|
||||
this.Comment = function(node)
|
||||
{
|
||||
return true;
|
||||
};
|
||||
|
||||
this.URILiteral = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "string");
|
||||
return true;
|
||||
};
|
||||
|
||||
this.StringLiteral = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "string");
|
||||
return true;
|
||||
};
|
||||
|
||||
// this.NCName = function(node)
|
||||
// {
|
||||
// inName = true;
|
||||
// for(var i in node.children)
|
||||
// {
|
||||
// var child = node.children[i];
|
||||
// this.visit(child);
|
||||
// }
|
||||
// inName = false;
|
||||
// return true;
|
||||
// };
|
||||
this.NCName = function(node) {
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "support.function");
|
||||
return true;
|
||||
};
|
||||
|
||||
this.EQName = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "support.function");
|
||||
//inName = true;
|
||||
//for(var i in node.children)
|
||||
//{
|
||||
// var child = node.children[i];
|
||||
// this.visit(child);
|
||||
//}
|
||||
//inName = false;
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
this.TOKEN = function(node)
|
||||
{
|
||||
var value = this.getNodeValue(node);
|
||||
if(keywords.indexOf(value) > -1 ) {//&& !inName) {
|
||||
this.addTokens(value, "keyword");
|
||||
} else if(value === "$") {
|
||||
|
||||
} else {
|
||||
this.addTokens(value, "text");
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.WS = function(node) {
|
||||
var value = node.value;
|
||||
var h = new CommentHandler(value);
|
||||
var parser = new CommentParser(value, h);
|
||||
parser.parse_Comments();
|
||||
var ast = h.getParseTree();
|
||||
var children = ast.children;
|
||||
for(var i in children)
|
||||
{
|
||||
var child = children[i];
|
||||
if(child.name === "Comment" && child.children[1] && child.children[1].value.substring(0, 1) === "~")
|
||||
{
|
||||
var remains = this.getNodeValue(child);
|
||||
while(remains.length > 0) {
|
||||
var match = remains.match(docTags);
|
||||
if(match !== null) {
|
||||
var str = match[0];
|
||||
var index = match.index;
|
||||
if(index > 0) {
|
||||
this.addTokens(remains.substring(0, index), "comment.doc");
|
||||
remains = remains.substring(index);
|
||||
}
|
||||
this.addTokens(remains.substring(0, str.length), "comment.doc.tag");
|
||||
remains = remains.substring(str.length);
|
||||
} else {
|
||||
this.addTokens(remains, "comment.doc");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if(child.name === "Comment")
|
||||
{
|
||||
this.addTokens(this.getNodeValue(child), "comment");
|
||||
} else if(child.name === "S") {
|
||||
this.addTokens(child.value, "text");
|
||||
}
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
this.EverythingElse = function(node)
|
||||
{
|
||||
if(node.children.length === 0) {
|
||||
var value = this.getNodeValue(node);
|
||||
this.addTokens(value, "text");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
this.visit = function(node){
|
||||
var name = node.name;
|
||||
var skip = false;
|
||||
|
||||
if(typeof this[name] === "function")
|
||||
skip = this[name](node) === true ? true : false ;
|
||||
else
|
||||
skip = this.EverythingElse(node) === true ? true : false;
|
||||
|
||||
if(!skip && typeof node.children === "object")
|
||||
{
|
||||
var isVarEQName = false;
|
||||
for(var i = 0; i < node.children.length; i++)
|
||||
{
|
||||
var child = node.children[i];
|
||||
var value = this.getNodeValue(child);
|
||||
if(child.name === "TOKEN" && value === "$")
|
||||
{
|
||||
isVarEQName = true;
|
||||
} else if(isVarEQName) {
|
||||
this.addTokens("$" + value, "variable");
|
||||
isVarEQName = false;
|
||||
} else {
|
||||
this.visit(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
});
|
||||
|
|
@ -1,249 +0,0 @@
|
|||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Distributed under the BSD license:
|
||||
*
|
||||
* Copyright (c) 2010, Ajax.org B.V.
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
* * Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
* * Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in the
|
||||
* documentation and/or other materials provided with the distribution.
|
||||
* * Neither the name of Ajax.org B.V. nor the
|
||||
* names of its contributors may be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
|
||||
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
define(function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../lib/oop");
|
||||
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
|
||||
|
||||
var XQueryHighlightRules = function() {
|
||||
|
||||
var keywords = "after|ancestor|ancestor-or-self|and|as|ascending|attribute|before|case|cast|castable|child|collation|comment|copy|count|declare|default|delete|descendant|descendant-or-self|descending|div|document|document-node|element|else|empty|empty-sequence|end|eq|every|except|first|following|following-sibling|for|function|ge|group|gt|idiv|if|import|insert|instance|intersect|into|is|item|last|le|let|lt|mod|modify|module|namespace|namespace-node|ne|node|only|or|order|ordered|parent|preceding|preceding-sibling|processing-instruction|rename|replace|return|satisfies|schema-attribute|schema-element|self|some|stable|start|switch|text|to|treat|try|typeswitch|union|unordered|validate|where|with|xquery|contains|paragraphs|sentences|times|words|by|collectionreturn|variable|version|option|when|encoding|toswitch|catch|tumbling|sliding|window|at|using|stemming|collection|schema|while|on|nodes|index|external|then|in|updating|value|of|containsbreak|loop|continue|exit|returning|append|json|position".split("|");
|
||||
|
||||
var nameStartChar = "[_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02ff\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]";
|
||||
var nameChar = "[-._A-Za-z0-9\u00B7\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02ff\u0300-\u037D\u037F-\u1FFF\u200C\u200D\u203f\u2040\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]";
|
||||
var ncname = nameStartChar + nameChar + "*";
|
||||
var qname = "(?:" + ncname + ":)?" + ncname;
|
||||
var eqname = "(?:(?:Q{.*}" + ncname + ")|(?:" + qname + "))";
|
||||
|
||||
// regexp must not have capturing parentheses
|
||||
// regexps are ordered -> the first match is used
|
||||
this.$rules = {
|
||||
start: [{
|
||||
token: "support.type",
|
||||
regex: "<\\!\\[CDATA\\[",
|
||||
next: "cdata"
|
||||
}, {
|
||||
token: "xml-pe",
|
||||
regex: "<\\?",
|
||||
next: "pi"
|
||||
}, {
|
||||
token: "comment",
|
||||
regex: "<\\!--",
|
||||
next: "xmlcomment"
|
||||
}, {
|
||||
token: "comment.doc",
|
||||
regex: "\\(:~",
|
||||
next: "comment.doc"
|
||||
},
|
||||
{
|
||||
token: "comment",
|
||||
regex: "\\(:",
|
||||
next: "comment"
|
||||
},
|
||||
{
|
||||
token: ["text", "meta.tag"], // opening tag
|
||||
regex: "(<\\/?)(" + qname + ")",
|
||||
next: "tag"
|
||||
}, {
|
||||
token: "constant", // number
|
||||
regex: "[+-]?\\d+(?:(?:\\.\\d*)?(?:[eE][+-]?\\d+)?)?\\b"
|
||||
}, {
|
||||
token: "variable", // variable
|
||||
regex: "\\$" + eqname
|
||||
}, {
|
||||
token: "string",
|
||||
regex: "'",
|
||||
next: "apos-string"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: '"',
|
||||
next: "quot-string"
|
||||
}, {
|
||||
token: "text",
|
||||
regex: "\\s+"
|
||||
}, {
|
||||
token: function(match) {
|
||||
if(keywords.indexOf(match.toLowerCase()) !== -1) {
|
||||
return "keyword"
|
||||
} else {
|
||||
return "support.function"
|
||||
}
|
||||
},
|
||||
regex: eqname
|
||||
}, {
|
||||
token: "keyword.operator",
|
||||
regex: "\\*|:=|=|<|>|\\-|\\+"
|
||||
}, {
|
||||
token: "lparen",
|
||||
regex: "[[({]"
|
||||
}, {
|
||||
token: "rparen",
|
||||
regex: "[\\])}]"
|
||||
}],
|
||||
|
||||
tag: [{
|
||||
token: "text",
|
||||
regex: "\\/?>",
|
||||
next: "start"
|
||||
}, {
|
||||
token: ["text", "meta.tag"],
|
||||
regex: "(<\\/)(" + qname + ")",
|
||||
next: "start"
|
||||
}, {
|
||||
token: "meta.tag",
|
||||
regex: qname
|
||||
}, {
|
||||
token: "text",
|
||||
regex: "\\s+"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: "'",
|
||||
next: "apos-attr"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: '"',
|
||||
next: "quot-attr"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: "'.*?'"
|
||||
}, {
|
||||
token: "text",
|
||||
regex: "="
|
||||
}],
|
||||
|
||||
pi: [{
|
||||
token: "xml-pe",
|
||||
regex: ".*\\?>",
|
||||
next: "start"
|
||||
},
|
||||
{
|
||||
token: "xml-pe",
|
||||
regex: ".*"
|
||||
}],
|
||||
|
||||
cdata: [{
|
||||
token: "support.type",
|
||||
regex: "\\]\\]>",
|
||||
next: "start"
|
||||
}, {
|
||||
token: "support.type",
|
||||
regex: "\\s+"
|
||||
}, {
|
||||
token: "support.type",
|
||||
regex: "(?:[^\\]]|\\](?!\\]>))+"
|
||||
}],
|
||||
|
||||
"comment.doc": [
|
||||
{
|
||||
token: "comment.doc",
|
||||
regex: ":\\)",
|
||||
next: "start"
|
||||
}, {
|
||||
token: "comment.doc.tag",
|
||||
regex: "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[a-zA-Z]{2,6}"
|
||||
}, {
|
||||
token : "comment.doc.tag",
|
||||
regex : "@[\\w\\d_]+"
|
||||
}, {
|
||||
token : "comment.doc",
|
||||
regex : "\\s+"
|
||||
}, {
|
||||
token : "comment.doc.tag",
|
||||
regex : "TODO"
|
||||
}, {
|
||||
token : "comment.doc",
|
||||
regex : "[^@:^\\s]+"
|
||||
}, {
|
||||
token : "comment.doc",
|
||||
regex : "."
|
||||
}
|
||||
],
|
||||
|
||||
comment: [{
|
||||
token: "comment",
|
||||
regex: ".*:\\)",
|
||||
next: "start"
|
||||
}, {
|
||||
token: "comment",
|
||||
regex: ".+"
|
||||
}],
|
||||
|
||||
xmlcomment: [{
|
||||
token: "comment",
|
||||
regex: ".*?-->",
|
||||
next: "start"
|
||||
}, {
|
||||
token: "comment",
|
||||
regex: ".+"
|
||||
}],
|
||||
|
||||
"apos-string": [{
|
||||
token: "string",
|
||||
regex: ".*'",
|
||||
next: "start"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: ".*"
|
||||
}],
|
||||
|
||||
"quot-string": [{
|
||||
token: "string",
|
||||
regex: '.*"',
|
||||
next: "start"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: ".*"
|
||||
}],
|
||||
|
||||
"apos-attr": [{
|
||||
token: "string",
|
||||
regex: ".*'",
|
||||
next: "tag"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: ".*"
|
||||
}],
|
||||
|
||||
"quot-attr": [{
|
||||
token: "string",
|
||||
regex: '.*"',
|
||||
next: "tag"
|
||||
}, {
|
||||
token: "string",
|
||||
regex: ".*"
|
||||
}]
|
||||
};
|
||||
};
|
||||
|
||||
oop.inherits(XQueryHighlightRules, TextHighlightRules);
|
||||
|
||||
exports.XQueryHighlightRules = XQueryHighlightRules;
|
||||
});
|
||||
|
|
@ -35,7 +35,7 @@ var oop = require("../lib/oop");
|
|||
var Mirror = require("../worker/mirror").Mirror;
|
||||
var JSONParseTreeHandler = require("./xquery/JSONParseTreeHandler").JSONParseTreeHandler;
|
||||
var XQueryParser = require("./xquery/XQueryParser").XQueryParser;
|
||||
var SyntaxHighlighter = require("./xquery/visitors/SyntaxHighlighter").SyntaxHighlighter;
|
||||
var SemanticHighlighter = require("./xquery/visitors/SemanticHighlighter").SemanticHighlighter;
|
||||
|
||||
var XQueryWorker = exports.XQueryWorker = function(sender) {
|
||||
Mirror.call(this, sender);
|
||||
|
|
@ -55,9 +55,9 @@ oop.inherits(XQueryWorker, Mirror);
|
|||
parser.parse_XQuery();
|
||||
this.sender.emit("ok");
|
||||
var ast = h.getParseTree();
|
||||
var highlighter = new SyntaxHighlighter(ast);
|
||||
var highlighter = new SemanticHighlighter(ast, value);
|
||||
var tokens = highlighter.getTokens();
|
||||
this.sender.emit("highlight", tokens);
|
||||
this.sender.emit("highlight", { tokens: tokens, lines: highlighter.lines });
|
||||
} catch(e) {
|
||||
if(e instanceof parser.ParseException) {
|
||||
var prefix = value.substring(0, e.getBegin());
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue