move line length checking to tokenizer to not break long data uri's in css
This commit is contained in:
parent
94a6dac5a7
commit
e084dc1303
2 changed files with 8 additions and 10 deletions
|
|
@ -34,8 +34,6 @@ define(function(require, exports, module) {
|
|||
var oop = require("./lib/oop");
|
||||
var EventEmitter = require("./lib/event_emitter").EventEmitter;
|
||||
|
||||
// tokenizing lines longer than this makes editor very slow
|
||||
var MAX_LINE_LENGTH = 5000;
|
||||
|
||||
/**
|
||||
*
|
||||
|
|
@ -226,15 +224,7 @@ var BackgroundTokenizer = function(tokenizer, editor) {
|
|||
var line = this.doc.getLine(row);
|
||||
var state = this.states[row - 1];
|
||||
|
||||
if (line.length > MAX_LINE_LENGTH) {
|
||||
var overflow = {value: line.substr(MAX_LINE_LENGTH), type: "text"};
|
||||
line = line.slice(0, MAX_LINE_LENGTH);
|
||||
}
|
||||
var data = this.tokenizer.getLineTokens(line, state, row);
|
||||
if (overflow) {
|
||||
data.tokens.push(overflow);
|
||||
data.state = "start";
|
||||
}
|
||||
|
||||
if (this.states[row] + "" !== data.state + "") {
|
||||
this.states[row] = data.state;
|
||||
|
|
|
|||
|
|
@ -31,6 +31,8 @@
|
|||
define(function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
// tokenizing lines longer than this makes editor very slow
|
||||
var MAX_TOKEN_COUNT = 1000;
|
||||
/**
|
||||
*
|
||||
*
|
||||
|
|
@ -230,6 +232,12 @@ var Tokenizer = function(rules, flag) {
|
|||
break;
|
||||
|
||||
lastIndex = index;
|
||||
|
||||
if (tokens.length > MAX_TOKEN_COUNT) {
|
||||
token.value += line.substr(lastIndex);
|
||||
currentState = "start"
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (token.type)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue