Update XQuery Semantic highlighter to work with the new background highlighter API.
This commit is contained in:
parent
972a33a518
commit
d6ebfae8b8
6 changed files with 50 additions and 35 deletions
|
|
@ -130,7 +130,7 @@ oop.inherits(Mode, TextMode);
|
|||
worker.attachToDocument(session.getDocument());
|
||||
|
||||
worker.on("start", function(e) {
|
||||
console.log("start");
|
||||
//console.log("start");
|
||||
that.$deltas = [];
|
||||
});
|
||||
|
||||
|
|
@ -146,7 +146,8 @@ oop.inherits(Mode, TextMode);
|
|||
var firstRow = 0;
|
||||
var lastRow = session.getLength() - 1;
|
||||
|
||||
var lines = tokens.data;
|
||||
var lines = tokens.data.lines;
|
||||
var states = tokens.data.states;
|
||||
|
||||
for(var i=0; i < that.$deltas.length; i++)
|
||||
{
|
||||
|
|
@ -157,6 +158,7 @@ oop.inherits(Mode, TextMode);
|
|||
var newLineCount = delta.lines.length;
|
||||
for (var i = 0; i < newLineCount; i++) {
|
||||
lines.splice(delta.range.start.row + i, 0, undefined);
|
||||
states.splice(delta.range.start.row + i, 0, undefined);
|
||||
}
|
||||
}
|
||||
else if (delta.action === "insertText")
|
||||
|
|
@ -164,23 +166,30 @@ oop.inherits(Mode, TextMode);
|
|||
if (session.getDocument().isNewLine(delta.text))
|
||||
{
|
||||
lines.splice(delta.range.end.row, 0, undefined);
|
||||
states.splice(delta.range.end.row, 0, undefined);
|
||||
} else {
|
||||
lines[delta.range.start.row] = undefined;
|
||||
states[delta.range.start.row] = undefined;
|
||||
}
|
||||
} else if (delta.action === "removeLines") {
|
||||
var oldLineCount = delta.lines.length;
|
||||
lines.splice(delta.range.start.row, oldLineCount);
|
||||
states.splice(delta.range.start.row, oldLineCount);
|
||||
} else if (delta.action === "removeText") {
|
||||
if (session.getDocument().isNewLine(delta.text))
|
||||
{
|
||||
lines[delta.range.start.row] = undefined;
|
||||
lines.splice(delta.range.end.row, 1);
|
||||
states[delta.range.start.row] = undefined;
|
||||
states.splice(delta.range.end.row, 1);
|
||||
} else {
|
||||
lines[delta.range.start.row] = undefined;
|
||||
states[delta.range.start.row] = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
session.bgTokenizer.lines = lines;
|
||||
session.bgTokenizer.states = states;
|
||||
session.bgTokenizer.fireUpdateEvent(firstRow, lastRow);
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/StringLexer.g 2012-05-24 17:09:24
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/StringLexer.g 2012-05-25 02:35:38
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XMLLexer.g 2012-05-24 17:09:26
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XMLLexer.g 2012-05-25 02:35:40
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XQueryLexer.g 2012-05-24 17:09:22
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XQueryLexer.g 2012-05-25 02:35:37
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XQueryParser.g 2012-05-24 17:09:31
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XQueryParser.g 2012-05-25 02:35:45
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
@ -1539,7 +1539,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: p_NCName, p_StringLiteral
|
||||
// elements: p_StringLiteral, p_NCName
|
||||
// token labels:
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
|
|
@ -1815,17 +1815,17 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: dnd, i, fto, nd, od, s
|
||||
// elements: nd, i, s, od, fto, dnd
|
||||
// token labels:
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
// rule list labels: od, s, fto, nd, dnd, i
|
||||
// rule list labels: od, fto, s, nd, dnd, i
|
||||
if ( this.state.backtracking===0 ) {
|
||||
retval.tree = root_0;
|
||||
var stream_retval=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token retval",retval!=null?retval.tree:null);
|
||||
var stream_od=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token od",list_od);
|
||||
var stream_s=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token s",list_s);
|
||||
var stream_fto=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token fto",list_fto);
|
||||
var stream_s=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token s",list_s);
|
||||
var stream_nd=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token nd",list_nd);
|
||||
var stream_dnd=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token dnd",list_dnd);
|
||||
var stream_i=new org.antlr.runtime.tree.RewriteRuleSubtreeStream(this.adaptor,"token i",list_i);
|
||||
|
|
@ -3923,7 +3923,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: sp, ah, us
|
||||
// elements: ah, sp, us
|
||||
// token labels:
|
||||
// rule labels: retval, sp, us
|
||||
// token list labels:
|
||||
|
|
@ -4329,7 +4329,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: ah, nn, us
|
||||
// elements: us, nn, ah
|
||||
// token labels:
|
||||
// rule labels: retval, nn, us
|
||||
// token list labels:
|
||||
|
|
@ -5234,7 +5234,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: qn, vdv, td, vv
|
||||
// elements: vv, vdv, qn, td
|
||||
// token labels:
|
||||
// rule labels: qn, vv, retval, vdv, td
|
||||
// token list labels:
|
||||
|
|
@ -5848,7 +5848,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: qn, soe, st, pl
|
||||
// elements: st, pl, qn, soe
|
||||
// token labels:
|
||||
// rule labels: soe, qn, retval, pl, st
|
||||
// token list labels:
|
||||
|
|
@ -12879,7 +12879,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: PLUS, p_ValueExpr
|
||||
// elements: p_ValueExpr, PLUS
|
||||
// token labels:
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
|
|
@ -17899,7 +17899,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: pm_DirElemContent, p_DirAttributeList
|
||||
// elements: p_DirAttributeList, pm_DirElemContent
|
||||
// token labels:
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
|
|
@ -20812,7 +20812,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: r, k, l
|
||||
// elements: l, k, r
|
||||
// token labels: r, l, k
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
|
|
@ -21111,7 +21111,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: RPAREN, BINARY, LPAREN
|
||||
// elements: LPAREN, RPAREN, BINARY
|
||||
// token labels:
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
|
|
@ -21159,7 +21159,7 @@ org.antlr.lang.augmentObject(XQueryParser.prototype, {
|
|||
|
||||
|
||||
// AST REWRITE
|
||||
// elements: LPAREN, ITEM, RPAREN
|
||||
// elements: ITEM, RPAREN, LPAREN
|
||||
// token labels:
|
||||
// rule labels: retval
|
||||
// token list labels:
|
||||
|
|
@ -37547,7 +37547,7 @@ org.antlr.lang.extend(XQueryParser.DFA44, org.antlr.runtime.DFA, {
|
|||
var index44_33 = input.index();
|
||||
input.rewind();
|
||||
s = -1;
|
||||
if ( ((((this.lc(XQS))&&(this.lc(MLS)))||(this.lc(XQS))||((this.lc(XQS))&&(this.lc(MLS)))||((this.lc(XQS))&&(this.lc(MLS)))||((this.lc(XQS))&&(this.lc(MLS))))) ) {s = 67;}
|
||||
if ( (((this.lc(XQS))||((this.lc(XQS))&&(this.lc(MLS)))||((this.lc(XQS))&&(this.lc(MLS)))||((this.lc(XQS))&&(this.lc(MLS)))||((this.lc(XQS))&&(this.lc(MLS))))) ) {s = 67;}
|
||||
|
||||
else if ( (true) ) {s = 71;}
|
||||
|
||||
|
|
@ -37802,7 +37802,7 @@ org.antlr.lang.extend(XQueryParser.DFA44, org.antlr.runtime.DFA, {
|
|||
var index44_50 = input.index();
|
||||
input.rewind();
|
||||
s = -1;
|
||||
if ( ((((this.lc(XQS))&&(this.lc(XQU)))||(this.lc(XQS)))) ) {s = 67;}
|
||||
if ( (((this.lc(XQS))||((this.lc(XQS))&&(this.lc(XQU))))) ) {s = 67;}
|
||||
|
||||
else if ( (true) ) {s = 71;}
|
||||
|
||||
|
|
@ -37937,7 +37937,7 @@ org.antlr.lang.extend(XQueryParser.DFA44, org.antlr.runtime.DFA, {
|
|||
var index44_59 = input.index();
|
||||
input.rewind();
|
||||
s = -1;
|
||||
if ( ((((this.lc(XQS))&&(this.lc(XQU)))||(this.lc(XQS)))) ) {s = 67;}
|
||||
if ( (((this.lc(XQS))||((this.lc(XQS))&&(this.lc(XQU))))) ) {s = 67;}
|
||||
|
||||
else if ( (true) ) {s = 71;}
|
||||
|
||||
|
|
@ -37952,7 +37952,7 @@ org.antlr.lang.extend(XQueryParser.DFA44, org.antlr.runtime.DFA, {
|
|||
var index44_60 = input.index();
|
||||
input.rewind();
|
||||
s = -1;
|
||||
if ( ((((this.lc(XQS))&&(this.lc(XQU)))||(this.lc(XQS)))) ) {s = 67;}
|
||||
if ( (((this.lc(XQS))||((this.lc(XQS))&&(this.lc(XQU))))) ) {s = 67;}
|
||||
|
||||
else if ( (true) ) {s = 71;}
|
||||
|
||||
|
|
@ -37967,7 +37967,7 @@ org.antlr.lang.extend(XQueryParser.DFA44, org.antlr.runtime.DFA, {
|
|||
var index44_61 = input.index();
|
||||
input.rewind();
|
||||
s = -1;
|
||||
if ( ((((this.lc(XQS))&&(this.lc(XQU)))||(this.lc(XQS)))) ) {s = 67;}
|
||||
if ( (((this.lc(XQS))||((this.lc(XQS))&&(this.lc(XQU))))) ) {s = 67;}
|
||||
|
||||
else if ( (true) ) {s = 71;}
|
||||
|
||||
|
|
@ -37982,7 +37982,7 @@ org.antlr.lang.extend(XQueryParser.DFA44, org.antlr.runtime.DFA, {
|
|||
var index44_62 = input.index();
|
||||
input.rewind();
|
||||
s = -1;
|
||||
if ( ((((this.lc(XQS))&&(this.lc(XQU)))||(this.lc(XQS)))) ) {s = 67;}
|
||||
if ( (((this.lc(XQS))||((this.lc(XQS))&&(this.lc(XQU))))) ) {s = 67;}
|
||||
|
||||
else if ( (true) ) {s = 71;}
|
||||
|
||||
|
|
@ -39065,7 +39065,7 @@ org.antlr.lang.extend(XQueryParser.DFA119, org.antlr.runtime.DFA, {
|
|||
|
||||
else if ( (this.synpred10_XQueryParser()) ) {s = 49;}
|
||||
|
||||
else if ( (((this.synpred11_XQueryParser()&&(this.lc(MLS)))||this.synpred11_XQueryParser())) ) {s = 47;}
|
||||
else if ( ((this.synpred11_XQueryParser()||(this.synpred11_XQueryParser()&&(this.lc(MLS))))) ) {s = 47;}
|
||||
|
||||
else if ( (((this.synpred12_XQueryParser()&&(this.lc(MLS)))||this.synpred12_XQueryParser())) ) {s = 48;}
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,8 @@ define(function(require, exports, module){
|
|||
this.lines = [];
|
||||
|
||||
this.getTokens = function() {
|
||||
var result = [];
|
||||
var resultLines = new Array(this.source.length);
|
||||
var resultStates = new Array(this.source.length);
|
||||
var previousState = "start";
|
||||
for(i in this.source){
|
||||
var lineTokens = [];
|
||||
|
|
@ -91,20 +92,25 @@ define(function(require, exports, module){
|
|||
}
|
||||
//Check if the tokenized line is equal to the original one:
|
||||
if(sourceLine == tokenizedLine) {
|
||||
result[i] = { tokens: lineTokens, state: nextState };
|
||||
resultLines[i] = lineTokens;
|
||||
resultStates[i] = nextState;
|
||||
//result[i] = { line: sourceLine, startState: previousState, tokens: { tokens: lineTokens, state: nextState } };
|
||||
} else {
|
||||
console.log("sourceLine: " + sourceLine);
|
||||
console.log("tokenizedLine: " + tokenizedLine);
|
||||
result[i] = { tokens: [ { type: "text", value: sourceLine } ], state: nextState };
|
||||
//console.log("sourceLine: " + sourceLine);
|
||||
//console.log("tokenizedLine: " + tokenizedLine);
|
||||
resultLines[i] = [{ type: "text", value: sourceLine }];
|
||||
resultStates[i] = nextState;
|
||||
//result[i] = { tokens: [ { type: "text", value: sourceLine } ], state: nextState };
|
||||
}
|
||||
|
||||
if(result[i].tokens.length === 1 && result[i].tokens[0].type === "text" && this.tokenizer instanceof Object) {
|
||||
var prev = result[i - 1] ? result[i - 1].state : "start";
|
||||
result[i] = this.tokenizer.getLineTokens(result[i].tokens[0].value, prev);
|
||||
if(resultLines[i].length === 1 && resultLines[i][0].type === "text" && this.tokenizer instanceof Object) {
|
||||
var prev = resultStates[i - 1] ? resultStates[i - 1] : "start";
|
||||
var result = this.tokenizer.getLineTokens(resultLines[i][0].value, prev);
|
||||
resultLines[i] = result.tokens;
|
||||
resultStates[i] = result.state;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
return {states: resultStates, lines: resultLines};
|
||||
};
|
||||
|
||||
this.addToken = function(start, stop, type) {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue