Update Semantic highlighter.
This commit is contained in:
parent
b63d2c178b
commit
b744549a7d
17 changed files with 29494 additions and 28353 deletions
|
|
@ -359,7 +359,7 @@ define('ace/mode/xquery_tokenizer', ['require', 'exports', 'module' , 'ace/token
|
|||
for(var i in this.cache)
|
||||
{
|
||||
var c = this.cache[i];
|
||||
if(c && c.line == line && c.state == startState) {
|
||||
if(c && c.line == line && c.startState == startState) {
|
||||
return c.tokens;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -40,4 +40,4 @@ body {
|
|||
|
||||
#controls td + td {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
|
|
@ -193,7 +193,7 @@ ace.define('ace/mode/xquery_tokenizer', ['require', 'exports', 'module' , 'ace/t
|
|||
for(var i in this.cache)
|
||||
{
|
||||
var c = this.cache[i];
|
||||
if(c && c.line == line && c.state == startState) {
|
||||
if(c && c.line == line && c.startState == startState) {
|
||||
return c.tokens;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -193,7 +193,7 @@ define('ace/mode/xquery_tokenizer', ['require', 'exports', 'module' , 'ace/token
|
|||
for(var i in this.cache)
|
||||
{
|
||||
var c = this.cache[i];
|
||||
if(c && c.line == line && c.state == startState) {
|
||||
if(c && c.line == line && c.startState == startState) {
|
||||
return c.tokens;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load diff
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/StringLexer.g 2012-04-13 15:32:18
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/StringLexer.g 2012-04-18 21:05:08
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XMLLexer.g 2012-04-13 15:32:20
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XMLLexer.g 2012-04-18 21:05:09
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
|
|||
|
|
@ -44,6 +44,10 @@ var XQDTLexer = exports.XQDTLexer = function(input, state)
|
|||
|
||||
org.antlr.lang.extend(XQDTLexer, org.antlr.runtime.Lexer, {
|
||||
|
||||
comments: [],
|
||||
|
||||
addComment: function(start, stop){ console.log("BOUH!"); },
|
||||
|
||||
isWsExplicit: false,
|
||||
|
||||
setIsWsExplicit: function (wsExplicit) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
define(function(require, exports, module) {
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XQueryLexer.g 2012-04-13 15:32:15
|
||||
// $ANTLR 3.3 Nov 30, 2010 12:50:56 xquery/XQueryLexer.g 2012-04-18 21:05:06
|
||||
|
||||
/* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
|
|
@ -6494,7 +6494,7 @@ org.antlr.lang.augmentObject(XQueryLexer.prototype, {
|
|||
|
||||
this.match(":)");
|
||||
|
||||
_channel = HIDDEN;
|
||||
_channel = HIDDEN; this.addComment(this.state.tokenStartCharIndex, (this.getCharIndex()-1));
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -37,6 +37,7 @@ define(function(require, exports, module){
|
|||
|
||||
var Position = require("./Position").Position;
|
||||
var XQuerySemanticHighlighter = exports.XQuerySemanticHighlighter = function() {
|
||||
this.plain = null;
|
||||
this.source = [];
|
||||
this.lines = [];
|
||||
|
||||
|
|
@ -50,39 +51,85 @@ define(function(require, exports, module){
|
|||
tokens = this.lines[i].sort(function(a, b){ return a.position.getOffset() - b.position.getOffset(); });
|
||||
}
|
||||
var sourceLine = this.source[i];
|
||||
var tokenizedLine = "";
|
||||
var cursor = 0;
|
||||
for(j in tokens)
|
||||
{
|
||||
var token = tokens[j];
|
||||
var position = token.position;
|
||||
if(position.getOffset() > cursor) {
|
||||
var value = sourceLine.substring(cursor, position.getOffset());
|
||||
tokenizedLine += value;
|
||||
lineTokens.push({
|
||||
type: "text",
|
||||
value: sourceLine.substring(cursor, position.getOffset())
|
||||
value: value
|
||||
});
|
||||
}
|
||||
cursor = position.getOffset() + position.getLength();
|
||||
value = sourceLine.substring(position.getOffset(), cursor);
|
||||
tokenizedLine += value;
|
||||
lineTokens.push({
|
||||
type: token.type,
|
||||
value: sourceLine.substring(position.getOffset(), cursor)
|
||||
value: value
|
||||
});
|
||||
}
|
||||
if(cursor < (sourceLine.length - 1)) {
|
||||
console.log(lineTokens);
|
||||
var nextState = "start";
|
||||
if(lineTokens.length > 0) {
|
||||
lineTokens[lineTokens.length - 1].type;
|
||||
}
|
||||
nextState = nextState != "comment" ? "start" : nextState;
|
||||
|
||||
if(cursor < (sourceLine.length )) {
|
||||
value = sourceLine.substring(cursor);
|
||||
lineTokens.push({
|
||||
type: "text",
|
||||
value: sourceLine.substring(cursor)
|
||||
value: value
|
||||
});
|
||||
tokenizedLine += value;
|
||||
}
|
||||
//Check if the tokenized line is equal to the original one:
|
||||
if(sourceLine == tokenizedLine)
|
||||
result[i] = { line: sourceLine, startState: previousState, tokens: { tokens: lineTokens, state: nextState } };
|
||||
else {
|
||||
console.log(sourceLine);
|
||||
console.log(tokenizedLine);
|
||||
}
|
||||
result[i] = { line: sourceLine, state: previousState, tokens: { tokens: lineTokens, state: "start" } };
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
this.addToken = function(token, type) {
|
||||
var line = token.getLine() - 1;
|
||||
var offset = token.getCharPositionInLine();
|
||||
var length = token.getStopIndex() - token.getStartIndex() + 1;
|
||||
var position = new Position(line, offset, length);
|
||||
this.addToken = function(start, stop, type) {
|
||||
var before = this.plain.substring(0, start);
|
||||
var startLine = this.plain.substring(0, start).split("\n").length;
|
||||
startLine = startLine == 0 ? 0 : startLine - 1;
|
||||
|
||||
var offset = before.lastIndexOf("\n");
|
||||
offset = offset == -1 ? start : start - before.lastIndexOf("\n") - 1;
|
||||
|
||||
var cursor = start;
|
||||
|
||||
var text = this.plain.substring(start, stop);
|
||||
|
||||
var currentLine = startLine;
|
||||
for(var i in text)
|
||||
{
|
||||
var c = text[i];
|
||||
if(c == "\n") {
|
||||
var s = i;
|
||||
s = s < stop ? s : stop;
|
||||
this.addPosition(new Position(currentLine, offset, s), type);
|
||||
currentLine++;
|
||||
offset = 0;
|
||||
cursor = i;
|
||||
}
|
||||
};
|
||||
this.addPosition(new Position(currentLine, offset, stop - cursor + 1), type);
|
||||
};
|
||||
|
||||
this.addPosition = function(position, type)
|
||||
{
|
||||
var line = position.getLine();
|
||||
if(!this.lines[line]) {
|
||||
this.lines[line] = [];
|
||||
}
|
||||
|
|
@ -94,7 +141,8 @@ define(function(require, exports, module){
|
|||
|
||||
this.setSource = function(source)
|
||||
{
|
||||
this.source = source.data.split("\n");
|
||||
this.plain = source.data;
|
||||
this.source = this.plain.split("\n");
|
||||
};
|
||||
//console.log("Line: " + token.getLine());
|
||||
//console.log(token.getText());
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ var NewLazyTokenStream = exports.NewLazyTokenStream = function(tokenSource) {
|
|||
this.isWsExplicit = false;
|
||||
this.p = 0;
|
||||
this.channel = org.antlr.runtime.Token.DEFAULT_CHANNEL;
|
||||
|
||||
|
||||
this.LT = function(k) {
|
||||
if (k == 0)
|
||||
return null;
|
||||
|
|
@ -71,27 +71,6 @@ var NewLazyTokenStream = exports.NewLazyTokenStream = function(tokenSource) {
|
|||
|
||||
this.done = false;
|
||||
|
||||
// public void consume() {
|
||||
// if (done && p >= tokens.size())
|
||||
// return;
|
||||
//
|
||||
// Token t = null;
|
||||
// do {
|
||||
// p++;
|
||||
// t = LT(1);
|
||||
// if (t == Token.EOF_TOKEN) {
|
||||
// done = true;
|
||||
// return;
|
||||
// }
|
||||
// p = t.getTokenIndex();
|
||||
//
|
||||
// } while (!isWsExplicit && t.getChannel() != channel);
|
||||
//
|
||||
// if (LT(1) == Token.EOF_TOKEN) {
|
||||
// done = true;
|
||||
// }
|
||||
// }
|
||||
|
||||
this.consume = function() {
|
||||
if (this.done) {
|
||||
return;
|
||||
|
|
|
|||
|
|
@ -49,10 +49,7 @@ define(function(require, exports, module) {
|
|||
var tstream = new NewLazyTokenStream(lexer);
|
||||
tstream.jumpToFirstValidToken();
|
||||
var parser = new XQueryParser(tstream);
|
||||
//parser.source = cstream;
|
||||
parser.stream = tstream;
|
||||
parser.setSource(cstream);
|
||||
//parser.setTokenStream(tstream);
|
||||
return parser;
|
||||
};
|
||||
});
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ define(function(require, exports, module) {
|
|||
for(var i in this.cache)
|
||||
{
|
||||
var c = this.cache[i];
|
||||
if(c && c.line == line && c.state == startState) {
|
||||
if(c && c.line == line && c.startState == startState) {
|
||||
return c.tokens;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue