mirror of
https://github.com/ether/etherpad-lite.git
synced 2025-04-21 16:06:16 -04:00
Moved more classes to ts. (#6179)
This commit is contained in:
parent
3ea6f1072d
commit
4bd27a1c79
22 changed files with 790 additions and 653 deletions
|
@ -1,4 +1,7 @@
|
|||
'use strict';
|
||||
import {AText, PadType} from "../types/PadType";
|
||||
import {MapArrayType} from "../types/MapType";
|
||||
|
||||
/**
|
||||
* Copyright 2009 Google Inc.
|
||||
*
|
||||
|
@ -26,7 +29,7 @@ const _analyzeLine = require('./ExportHelper')._analyzeLine;
|
|||
const _encodeWhitespace = require('./ExportHelper')._encodeWhitespace;
|
||||
const padutils = require('../../static/js/pad_utils').padutils;
|
||||
|
||||
const getPadHTML = async (pad, revNum) => {
|
||||
const getPadHTML = async (pad: PadType, revNum: string) => {
|
||||
let atext = pad.atext;
|
||||
|
||||
// fetch revision atext
|
||||
|
@ -38,7 +41,7 @@ const getPadHTML = async (pad, revNum) => {
|
|||
return await getHTMLFromAtext(pad, atext);
|
||||
};
|
||||
|
||||
const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
||||
const getHTMLFromAtext = async (pad:PadType, atext: AText, authorColors?: string[]) => {
|
||||
const apool = pad.apool();
|
||||
const textLines = atext.text.slice(0, -1).split('\n');
|
||||
const attribLines = Changeset.splitAttributionLines(atext.attribs, atext.text);
|
||||
|
@ -48,7 +51,7 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
|
||||
await Promise.all([
|
||||
// prepare tags stored as ['tag', true] to be exported
|
||||
hooks.aCallAll('exportHtmlAdditionalTags', pad).then((newProps) => {
|
||||
hooks.aCallAll('exportHtmlAdditionalTags', pad).then((newProps: string[]) => {
|
||||
newProps.forEach((prop) => {
|
||||
tags.push(prop);
|
||||
props.push(prop);
|
||||
|
@ -56,7 +59,7 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
}),
|
||||
// prepare tags stored as ['tag', 'value'] to be exported. This will generate HTML with tags
|
||||
// like <span data-tag="value">
|
||||
hooks.aCallAll('exportHtmlAdditionalTagsWithData', pad).then((newProps) => {
|
||||
hooks.aCallAll('exportHtmlAdditionalTagsWithData', pad).then((newProps: string[]) => {
|
||||
newProps.forEach((prop) => {
|
||||
tags.push(`span data-${prop[0]}="${prop[1]}"`);
|
||||
props.push(prop);
|
||||
|
@ -68,10 +71,10 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
// and maps them to an index in props
|
||||
// *3:2 -> the attribute *3 means strong
|
||||
// *2:5 -> the attribute *2 means s(trikethrough)
|
||||
const anumMap = {};
|
||||
const anumMap:MapArrayType<number> = {};
|
||||
let css = '';
|
||||
|
||||
const stripDotFromAuthorID = (id) => id.replace(/\./g, '_');
|
||||
const stripDotFromAuthorID = (id: string) => id.replace(/\./g, '_');
|
||||
|
||||
if (authorColors) {
|
||||
css += '<style>\n';
|
||||
|
@ -118,7 +121,7 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
}
|
||||
});
|
||||
|
||||
const getLineHTML = (text, attribs) => {
|
||||
const getLineHTML = (text: string, attribs: string[]) => {
|
||||
// Use order of tags (b/i/u) as order of nesting, for simplicity
|
||||
// and decent nesting. For example,
|
||||
// <b>Just bold<b> <b><i>Bold and italics</i></b> <i>Just italics</i>
|
||||
|
@ -126,12 +129,13 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
// <b>Just bold <i>Bold and italics</i></b> <i>Just italics</i>
|
||||
const taker = Changeset.stringIterator(text);
|
||||
const assem = Changeset.stringAssembler();
|
||||
const openTags = [];
|
||||
const openTags:string[] = [];
|
||||
|
||||
const getSpanClassFor = (i) => {
|
||||
const getSpanClassFor = (i: string) => {
|
||||
// return if author colors are disabled
|
||||
if (!authorColors) return false;
|
||||
|
||||
// @ts-ignore
|
||||
const property = props[i];
|
||||
|
||||
// we are not insterested on properties in the form of ['color', 'red'],
|
||||
|
@ -153,12 +157,13 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
|
||||
// tags added by exportHtmlAdditionalTagsWithData will be exported as <span> with
|
||||
// data attributes
|
||||
const isSpanWithData = (i) => {
|
||||
const isSpanWithData = (i: string) => {
|
||||
// @ts-ignore
|
||||
const property = props[i];
|
||||
return Array.isArray(property);
|
||||
};
|
||||
|
||||
const emitOpenTag = (i) => {
|
||||
const emitOpenTag = (i: string) => {
|
||||
openTags.unshift(i);
|
||||
const spanClass = getSpanClassFor(i);
|
||||
|
||||
|
@ -168,13 +173,14 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
assem.append('">');
|
||||
} else {
|
||||
assem.append('<');
|
||||
// @ts-ignore
|
||||
assem.append(tags[i]);
|
||||
assem.append('>');
|
||||
}
|
||||
};
|
||||
|
||||
// this closes an open tag and removes its reference from openTags
|
||||
const emitCloseTag = (i) => {
|
||||
const emitCloseTag = (i: string) => {
|
||||
openTags.shift();
|
||||
const spanClass = getSpanClassFor(i);
|
||||
const spanWithData = isSpanWithData(i);
|
||||
|
@ -183,6 +189,7 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
assem.append('</span>');
|
||||
} else {
|
||||
assem.append('</');
|
||||
// @ts-ignore
|
||||
assem.append(tags[i]);
|
||||
assem.append('>');
|
||||
}
|
||||
|
@ -192,7 +199,7 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
|
||||
let idx = 0;
|
||||
|
||||
const processNextChars = (numChars) => {
|
||||
const processNextChars = (numChars: number) => {
|
||||
if (numChars <= 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -203,12 +210,12 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
// this iterates over every op string and decides which tags to open or to close
|
||||
// based on the attribs used
|
||||
for (const o of ops) {
|
||||
const usedAttribs = [];
|
||||
const usedAttribs:string[] = [];
|
||||
|
||||
// mark all attribs as used
|
||||
for (const a of attributes.decodeAttribString(o.attribs)) {
|
||||
if (a in anumMap) {
|
||||
usedAttribs.push(anumMap[a]); // i = 0 => bold, etc.
|
||||
usedAttribs.push(String(anumMap[a])); // i = 0 => bold, etc.
|
||||
}
|
||||
}
|
||||
let outermostTag = -1;
|
||||
|
@ -256,7 +263,9 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
};
|
||||
// end processNextChars
|
||||
if (urls) {
|
||||
urls.forEach((urlData) => {
|
||||
urls.forEach((urlData: [number, {
|
||||
length: number,
|
||||
}]) => {
|
||||
const startIndex = urlData[0];
|
||||
const url = urlData[1];
|
||||
const urlLength = url.length;
|
||||
|
@ -288,7 +297,13 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
// so we want to do something reasonable there. We also
|
||||
// want to deal gracefully with blank lines.
|
||||
// => keeps track of the parents level of indentation
|
||||
let openLists = [];
|
||||
|
||||
type openList = {
|
||||
level: number,
|
||||
type: string,
|
||||
}
|
||||
|
||||
let openLists: openList[] = [];
|
||||
for (let i = 0; i < textLines.length; i++) {
|
||||
let context;
|
||||
const line = _analyzeLine(textLines[i], attribLines[i], apool);
|
||||
|
@ -315,7 +330,7 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
// To create list parent elements
|
||||
if ((!prevLine || prevLine.listLevel !== line.listLevel) ||
|
||||
(line.listTypeName !== prevLine.listTypeName)) {
|
||||
const exists = _.find(openLists, (item) => (
|
||||
const exists = _.find(openLists, (item:openList) => (
|
||||
item.level === line.listLevel && item.type === line.listTypeName));
|
||||
if (!exists) {
|
||||
let prevLevel = 0;
|
||||
|
@ -456,12 +471,12 @@ const getHTMLFromAtext = async (pad, atext, authorColors) => {
|
|||
return pieces.join('');
|
||||
};
|
||||
|
||||
exports.getPadHTMLDocument = async (padId, revNum, readOnlyId) => {
|
||||
exports.getPadHTMLDocument = async (padId: string, revNum: string, readOnlyId: number) => {
|
||||
const pad = await padManager.getPad(padId);
|
||||
|
||||
// Include some Styles into the Head for Export
|
||||
let stylesForExportCSS = '';
|
||||
const stylesForExport = await hooks.aCallAll('stylesForExport', padId);
|
||||
const stylesForExport: string[] = await hooks.aCallAll('stylesForExport', padId);
|
||||
stylesForExport.forEach((css) => {
|
||||
stylesForExportCSS += css;
|
||||
});
|
||||
|
@ -480,7 +495,7 @@ exports.getPadHTMLDocument = async (padId, revNum, readOnlyId) => {
|
|||
};
|
||||
|
||||
// copied from ACE
|
||||
const _processSpaces = (s) => {
|
||||
const _processSpaces = (s: string) => {
|
||||
const doesWrap = true;
|
||||
if (s.indexOf('<') < 0 && !doesWrap) {
|
||||
// short-cut
|
||||
|
@ -489,6 +504,7 @@ const _processSpaces = (s) => {
|
|||
const parts = [];
|
||||
s.replace(/<[^>]*>?| |[^ <]+/g, (m) => {
|
||||
parts.push(m);
|
||||
return m
|
||||
});
|
||||
if (doesWrap) {
|
||||
let endOfLine = true;
|
|
@ -27,8 +27,14 @@ const settings = require('./Settings');
|
|||
|
||||
const logger = log4js.getLogger('LibreOffice');
|
||||
|
||||
const doConvertTask = async (task) => {
|
||||
const doConvertTask = async (task:{
|
||||
type: string,
|
||||
srcFile: string,
|
||||
fileExtension: string,
|
||||
destFile: string,
|
||||
}) => {
|
||||
const tmpDir = os.tmpdir();
|
||||
// @ts-ignore
|
||||
const p = runCmd([
|
||||
settings.soffice,
|
||||
'--headless',
|
||||
|
@ -43,8 +49,10 @@ const doConvertTask = async (task) => {
|
|||
tmpDir,
|
||||
], {stdio: [
|
||||
null,
|
||||
(line) => logger.info(`[${p.child.pid}] stdout: ${line}`),
|
||||
(line) => logger.error(`[${p.child.pid}] stderr: ${line}`),
|
||||
// @ts-ignore
|
||||
(line) => logger.info(`[${p.child.pid}] stdout: ${line}`),
|
||||
// @ts-ignore
|
||||
(line) => logger.error(`[${p.child.pid}] stderr: ${line}`),
|
||||
]});
|
||||
logger.info(`[${p.child.pid}] Converting ${task.srcFile} to ${task.type} in ${tmpDir}`);
|
||||
// Soffice/libreoffice is buggy and often hangs.
|
||||
|
@ -56,7 +64,7 @@ const doConvertTask = async (task) => {
|
|||
}, 120000);
|
||||
try {
|
||||
await p;
|
||||
} catch (err) {
|
||||
} catch (err:any) {
|
||||
logger.error(`[${p.child.pid}] Conversion failed: ${err.stack || err}`);
|
||||
throw err;
|
||||
} finally {
|
||||
|
@ -81,7 +89,7 @@ const queue = async.queue(doConvertTask, 1);
|
|||
* @param {String} type The type to convert into
|
||||
* @param {Function} callback Standard callback function
|
||||
*/
|
||||
exports.convertFile = async (srcFile, destFile, type) => {
|
||||
exports.convertFile = async (srcFile: string, destFile: string, type:string) => {
|
||||
// Used for the moving of the file, not the conversion
|
||||
const fileExtension = type;
|
||||
|
|
@ -26,7 +26,7 @@ const semver = require('semver');
|
|||
*
|
||||
* @param {String} minNodeVersion Minimum required Node version
|
||||
*/
|
||||
exports.enforceMinNodeVersion = (minNodeVersion) => {
|
||||
exports.enforceMinNodeVersion = (minNodeVersion: string) => {
|
||||
const currentNodeVersion = process.version;
|
||||
|
||||
// we cannot use template literals, since we still do not know if we are
|
||||
|
@ -49,7 +49,7 @@ exports.enforceMinNodeVersion = (minNodeVersion) => {
|
|||
* @param {Function} epRemovalVersion Etherpad version that will remove support for deprecated
|
||||
* Node releases
|
||||
*/
|
||||
exports.checkDeprecationStatus = (lowestNonDeprecatedNodeVersion, epRemovalVersion) => {
|
||||
exports.checkDeprecationStatus = (lowestNonDeprecatedNodeVersion: string, epRemovalVersion:Function) => {
|
||||
const currentNodeVersion = process.version;
|
||||
|
||||
if (semver.lt(currentNodeVersion, lowestNonDeprecatedNodeVersion)) {
|
|
@ -50,13 +50,13 @@ const nonSettings = [
|
|||
|
||||
// This is a function to make it easy to create a new instance. It is important to not reuse a
|
||||
// config object after passing it to log4js.configure() because that method mutates the object. :(
|
||||
const defaultLogConfig = (level) => ({appenders: {console: {type: 'console'}},
|
||||
const defaultLogConfig = (level:string) => ({appenders: {console: {type: 'console'}},
|
||||
categories: {
|
||||
default: {appenders: ['console'], level},
|
||||
}});
|
||||
const defaultLogLevel = 'INFO';
|
||||
|
||||
const initLogging = (logLevel, config) => {
|
||||
const initLogging = (config:any) => {
|
||||
// log4js.configure() modifies exports.logconfig so check for equality first.
|
||||
log4js.configure(config);
|
||||
log4js.getLogger('console');
|
||||
|
@ -70,7 +70,7 @@ const initLogging = (logLevel, config) => {
|
|||
|
||||
// Initialize logging as early as possible with reasonable defaults. Logging will be re-initialized
|
||||
// with the user's chosen log level and logger config after the settings have been loaded.
|
||||
initLogging(defaultLogLevel, defaultLogConfig(defaultLogLevel));
|
||||
initLogging(defaultLogConfig(defaultLogLevel));
|
||||
|
||||
/* Root path of the installation */
|
||||
exports.root = absolutePaths.findEtherpadRoot();
|
||||
|
@ -487,7 +487,7 @@ exports.getGitCommit = () => {
|
|||
version = ref;
|
||||
}
|
||||
version = version.substring(0, 7);
|
||||
} catch (e) {
|
||||
} catch (e:any) {
|
||||
logger.warn(`Can't get git version for server header\n${e.message}`);
|
||||
}
|
||||
return version;
|
||||
|
@ -503,7 +503,7 @@ exports.getEpVersion = () => require('../../package.json').version;
|
|||
* This code refactors a previous version that copied & pasted the same code for
|
||||
* both "settings.json" and "credentials.json".
|
||||
*/
|
||||
const storeSettings = (settingsObj) => {
|
||||
const storeSettings = (settingsObj:any) => {
|
||||
for (const i of Object.keys(settingsObj || {})) {
|
||||
if (nonSettings.includes(i)) {
|
||||
logger.warn(`Ignoring setting: '${i}'`);
|
||||
|
@ -542,8 +542,9 @@ const storeSettings = (settingsObj) => {
|
|||
* short syntax "${ABIWORD}", and not "${ABIWORD:null}": the latter would result
|
||||
* in the literal string "null", instead.
|
||||
*/
|
||||
const coerceValue = (stringValue) => {
|
||||
const coerceValue = (stringValue:string) => {
|
||||
// cooked from https://stackoverflow.com/questions/175739/built-in-way-in-javascript-to-check-if-a-string-is-a-valid-number
|
||||
// @ts-ignore
|
||||
const isNumeric = !isNaN(stringValue) && !isNaN(parseFloat(stringValue) && isFinite(stringValue));
|
||||
|
||||
if (isNumeric) {
|
||||
|
@ -597,7 +598,7 @@ const coerceValue = (stringValue) => {
|
|||
*
|
||||
* see: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify#The_replacer_parameter
|
||||
*/
|
||||
const lookupEnvironmentVariables = (obj) => {
|
||||
const lookupEnvironmentVariables = (obj: object) => {
|
||||
const stringifiedAndReplaced = JSON.stringify(obj, (key, value) => {
|
||||
/*
|
||||
* the first invocation of replacer() is with an empty key. Just go on, or
|
||||
|
@ -669,7 +670,7 @@ const lookupEnvironmentVariables = (obj) => {
|
|||
logger.debug(
|
||||
`Configuration key "${key}" will be read from environment variable "${envVarName}"`);
|
||||
|
||||
return coerceValue(envVarValue);
|
||||
return coerceValue(envVarValue!);
|
||||
});
|
||||
|
||||
const newSettings = JSON.parse(stringifiedAndReplaced);
|
||||
|
@ -685,7 +686,7 @@ const lookupEnvironmentVariables = (obj) => {
|
|||
*
|
||||
* The isSettings variable only controls the error logging.
|
||||
*/
|
||||
const parseSettings = (settingsFilename, isSettings) => {
|
||||
const parseSettings = (settingsFilename:string, isSettings:boolean) => {
|
||||
let settingsStr = '';
|
||||
|
||||
let settingsType, notFoundMessage, notFoundFunction;
|
||||
|
@ -720,7 +721,7 @@ const parseSettings = (settingsFilename, isSettings) => {
|
|||
const replacedSettings = lookupEnvironmentVariables(settings);
|
||||
|
||||
return replacedSettings;
|
||||
} catch (e) {
|
||||
} catch (e:any) {
|
||||
logger.error(`There was an error processing your ${settingsType} ` +
|
||||
`file from ${settingsFilename}: ${e.message}`);
|
||||
|
||||
|
@ -736,7 +737,7 @@ exports.reloadSettings = () => {
|
|||
|
||||
// Init logging config
|
||||
exports.logconfig = defaultLogConfig(exports.loglevel ? exports.loglevel : defaultLogLevel);
|
||||
initLogging(exports.loglevel, exports.logconfig);
|
||||
initLogging(exports.logconfig);
|
||||
|
||||
if (!exports.skinName) {
|
||||
logger.warn('No "skinName" parameter found. Please check out settings.json.template and ' +
|
||||
|
@ -780,7 +781,7 @@ exports.reloadSettings = () => {
|
|||
if (exports.abiword) {
|
||||
// Check abiword actually exists
|
||||
if (exports.abiword != null) {
|
||||
fs.exists(exports.abiword, (exists) => {
|
||||
fs.exists(exports.abiword, (exists: boolean) => {
|
||||
if (!exists) {
|
||||
const abiwordError = 'Abiword does not exist at this path, check your settings file.';
|
||||
if (!exports.suppressErrorsInPadText) {
|
||||
|
@ -794,7 +795,7 @@ exports.reloadSettings = () => {
|
|||
}
|
||||
|
||||
if (exports.soffice) {
|
||||
fs.exists(exports.soffice, (exists) => {
|
||||
fs.exists(exports.soffice, (exists: boolean) => {
|
||||
if (!exists) {
|
||||
const sofficeError =
|
||||
'soffice (libreoffice) does not exist at this path, check your settings file.';
|
|
@ -1,447 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
const AttributeMap = require('../../static/js/AttributeMap');
|
||||
const Changeset = require('../../static/js/Changeset');
|
||||
const attributes = require('../../static/js/attributes');
|
||||
const exportHtml = require('./ExportHtml');
|
||||
|
||||
function PadDiff(pad, fromRev, toRev) {
|
||||
// check parameters
|
||||
if (!pad || !pad.id || !pad.atext || !pad.pool) {
|
||||
throw new Error('Invalid pad');
|
||||
}
|
||||
|
||||
const range = pad.getValidRevisionRange(fromRev, toRev);
|
||||
if (!range) throw new Error(`Invalid revision range. startRev: ${fromRev} endRev: ${toRev}`);
|
||||
|
||||
this._pad = pad;
|
||||
this._fromRev = range.startRev;
|
||||
this._toRev = range.endRev;
|
||||
this._html = null;
|
||||
this._authors = [];
|
||||
}
|
||||
|
||||
PadDiff.prototype._isClearAuthorship = function (changeset) {
|
||||
// unpack
|
||||
const unpacked = Changeset.unpack(changeset);
|
||||
|
||||
// check if there is nothing in the charBank
|
||||
if (unpacked.charBank !== '') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// check if oldLength == newLength
|
||||
if (unpacked.oldLen !== unpacked.newLen) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [clearOperator, anotherOp] = Changeset.deserializeOps(unpacked.ops);
|
||||
|
||||
// check if there is only one operator
|
||||
if (anotherOp != null) return false;
|
||||
|
||||
// check if this operator doesn't change text
|
||||
if (clearOperator.opcode !== '=') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// check that this operator applys to the complete text
|
||||
// if the text ends with a new line, its exactly one character less, else it has the same length
|
||||
if (clearOperator.chars !== unpacked.oldLen - 1 && clearOperator.chars !== unpacked.oldLen) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [appliedAttribute, anotherAttribute] =
|
||||
attributes.attribsFromString(clearOperator.attribs, this._pad.pool);
|
||||
|
||||
// Check that the operation has exactly one attribute.
|
||||
if (appliedAttribute == null || anotherAttribute != null) return false;
|
||||
|
||||
// check if the applied attribute is an anonymous author attribute
|
||||
if (appliedAttribute[0] !== 'author' || appliedAttribute[1] !== '') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
PadDiff.prototype._createClearAuthorship = async function (rev) {
|
||||
const atext = await this._pad.getInternalRevisionAText(rev);
|
||||
|
||||
// build clearAuthorship changeset
|
||||
const builder = Changeset.builder(atext.text.length);
|
||||
builder.keepText(atext.text, [['author', '']], this._pad.pool);
|
||||
const changeset = builder.toString();
|
||||
|
||||
return changeset;
|
||||
};
|
||||
|
||||
PadDiff.prototype._createClearStartAtext = async function (rev) {
|
||||
// get the atext of this revision
|
||||
const atext = await this._pad.getInternalRevisionAText(rev);
|
||||
|
||||
// create the clearAuthorship changeset
|
||||
const changeset = await this._createClearAuthorship(rev);
|
||||
|
||||
// apply the clearAuthorship changeset
|
||||
const newAText = Changeset.applyToAText(changeset, atext, this._pad.pool);
|
||||
|
||||
return newAText;
|
||||
};
|
||||
|
||||
PadDiff.prototype._getChangesetsInBulk = async function (startRev, count) {
|
||||
// find out which revisions we need
|
||||
const revisions = [];
|
||||
for (let i = startRev; i < (startRev + count) && i <= this._pad.head; i++) {
|
||||
revisions.push(i);
|
||||
}
|
||||
|
||||
// get all needed revisions (in parallel)
|
||||
const changesets = []; const
|
||||
authors = [];
|
||||
await Promise.all(revisions.map((rev) => this._pad.getRevision(rev).then((revision) => {
|
||||
const arrayNum = rev - startRev;
|
||||
changesets[arrayNum] = revision.changeset;
|
||||
authors[arrayNum] = revision.meta.author;
|
||||
})));
|
||||
|
||||
return {changesets, authors};
|
||||
};
|
||||
|
||||
PadDiff.prototype._addAuthors = function (authors) {
|
||||
const self = this;
|
||||
|
||||
// add to array if not in the array
|
||||
authors.forEach((author) => {
|
||||
if (self._authors.indexOf(author) === -1) {
|
||||
self._authors.push(author);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
PadDiff.prototype._createDiffAtext = async function () {
|
||||
const bulkSize = 100;
|
||||
|
||||
// get the cleaned startAText
|
||||
let atext = await this._createClearStartAtext(this._fromRev);
|
||||
|
||||
let superChangeset = null;
|
||||
|
||||
for (let rev = this._fromRev + 1; rev <= this._toRev; rev += bulkSize) {
|
||||
// get the bulk
|
||||
const {changesets, authors} = await this._getChangesetsInBulk(rev, bulkSize);
|
||||
|
||||
const addedAuthors = [];
|
||||
|
||||
// run through all changesets
|
||||
for (let i = 0; i < changesets.length && (rev + i) <= this._toRev; ++i) {
|
||||
let changeset = changesets[i];
|
||||
|
||||
// skip clearAuthorship Changesets
|
||||
if (this._isClearAuthorship(changeset)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
changeset = this._extendChangesetWithAuthor(changeset, authors[i], this._pad.pool);
|
||||
|
||||
// add this author to the authorarray
|
||||
addedAuthors.push(authors[i]);
|
||||
|
||||
// compose it with the superChangset
|
||||
if (superChangeset == null) {
|
||||
superChangeset = changeset;
|
||||
} else {
|
||||
superChangeset = Changeset.compose(superChangeset, changeset, this._pad.pool);
|
||||
}
|
||||
}
|
||||
|
||||
// add the authors to the PadDiff authorArray
|
||||
this._addAuthors(addedAuthors);
|
||||
}
|
||||
|
||||
// if there are only clearAuthorship changesets, we don't get a superChangeset,
|
||||
// so we can skip this step
|
||||
if (superChangeset) {
|
||||
const deletionChangeset = this._createDeletionChangeset(superChangeset, atext, this._pad.pool);
|
||||
|
||||
// apply the superChangeset, which includes all addings
|
||||
atext = Changeset.applyToAText(superChangeset, atext, this._pad.pool);
|
||||
|
||||
// apply the deletionChangeset, which adds a deletions
|
||||
atext = Changeset.applyToAText(deletionChangeset, atext, this._pad.pool);
|
||||
}
|
||||
|
||||
return atext;
|
||||
};
|
||||
|
||||
PadDiff.prototype.getHtml = async function () {
|
||||
// cache the html
|
||||
if (this._html != null) {
|
||||
return this._html;
|
||||
}
|
||||
|
||||
// get the diff atext
|
||||
const atext = await this._createDiffAtext();
|
||||
|
||||
// get the authorColor table
|
||||
const authorColors = await this._pad.getAllAuthorColors();
|
||||
|
||||
// convert the atext to html
|
||||
this._html = await exportHtml.getHTMLFromAtext(this._pad, atext, authorColors);
|
||||
|
||||
return this._html;
|
||||
};
|
||||
|
||||
PadDiff.prototype.getAuthors = async function () {
|
||||
// check if html was already produced, if not produce it, this generates
|
||||
// the author array at the same time
|
||||
if (this._html == null) {
|
||||
await this.getHtml();
|
||||
}
|
||||
|
||||
return self._authors;
|
||||
};
|
||||
|
||||
PadDiff.prototype._extendChangesetWithAuthor = (changeset, author, apool) => {
|
||||
// unpack
|
||||
const unpacked = Changeset.unpack(changeset);
|
||||
|
||||
const assem = Changeset.opAssembler();
|
||||
|
||||
// create deleted attribs
|
||||
const authorAttrib = apool.putAttrib(['author', author || '']);
|
||||
const deletedAttrib = apool.putAttrib(['removed', true]);
|
||||
const attribs = `*${Changeset.numToString(authorAttrib)}*${Changeset.numToString(deletedAttrib)}`;
|
||||
|
||||
for (const operator of Changeset.deserializeOps(unpacked.ops)) {
|
||||
if (operator.opcode === '-') {
|
||||
// this is a delete operator, extend it with the author
|
||||
operator.attribs = attribs;
|
||||
} else if (operator.opcode === '=' && operator.attribs) {
|
||||
// this is operator changes only attributes, let's mark which author did that
|
||||
operator.attribs += `*${Changeset.numToString(authorAttrib)}`;
|
||||
}
|
||||
|
||||
// append the new operator to our assembler
|
||||
assem.append(operator);
|
||||
}
|
||||
|
||||
// return the modified changeset
|
||||
return Changeset.pack(unpacked.oldLen, unpacked.newLen, assem.toString(), unpacked.charBank);
|
||||
};
|
||||
|
||||
// this method is 80% like Changeset.inverse. I just changed so instead of reverting,
|
||||
// it adds deletions and attribute changes to to the atext.
|
||||
PadDiff.prototype._createDeletionChangeset = function (cs, startAText, apool) {
|
||||
const lines = Changeset.splitTextLines(startAText.text);
|
||||
const alines = Changeset.splitAttributionLines(startAText.attribs, startAText.text);
|
||||
|
||||
// lines and alines are what the exports is meant to apply to.
|
||||
// They may be arrays or objects with .get(i) and .length methods.
|
||||
// They include final newlines on lines.
|
||||
|
||||
const linesGet = (idx) => {
|
||||
if (lines.get) {
|
||||
return lines.get(idx);
|
||||
} else {
|
||||
return lines[idx];
|
||||
}
|
||||
};
|
||||
|
||||
const aLinesGet = (idx) => {
|
||||
if (alines.get) {
|
||||
return alines.get(idx);
|
||||
} else {
|
||||
return alines[idx];
|
||||
}
|
||||
};
|
||||
|
||||
let curLine = 0;
|
||||
let curChar = 0;
|
||||
let curLineOps = null;
|
||||
let curLineOpsNext = null;
|
||||
let curLineOpsLine;
|
||||
let curLineNextOp = new Changeset.Op('+');
|
||||
|
||||
const unpacked = Changeset.unpack(cs);
|
||||
const builder = Changeset.builder(unpacked.newLen);
|
||||
|
||||
const consumeAttribRuns = (numChars, func /* (len, attribs, endsLine)*/) => {
|
||||
if (!curLineOps || curLineOpsLine !== curLine) {
|
||||
curLineOps = Changeset.deserializeOps(aLinesGet(curLine));
|
||||
curLineOpsNext = curLineOps.next();
|
||||
curLineOpsLine = curLine;
|
||||
let indexIntoLine = 0;
|
||||
while (!curLineOpsNext.done) {
|
||||
curLineNextOp = curLineOpsNext.value;
|
||||
curLineOpsNext = curLineOps.next();
|
||||
if (indexIntoLine + curLineNextOp.chars >= curChar) {
|
||||
curLineNextOp.chars -= (curChar - indexIntoLine);
|
||||
break;
|
||||
}
|
||||
indexIntoLine += curLineNextOp.chars;
|
||||
}
|
||||
}
|
||||
|
||||
while (numChars > 0) {
|
||||
if (!curLineNextOp.chars && curLineOpsNext.done) {
|
||||
curLine++;
|
||||
curChar = 0;
|
||||
curLineOpsLine = curLine;
|
||||
curLineNextOp.chars = 0;
|
||||
curLineOps = Changeset.deserializeOps(aLinesGet(curLine));
|
||||
curLineOpsNext = curLineOps.next();
|
||||
}
|
||||
|
||||
if (!curLineNextOp.chars) {
|
||||
if (curLineOpsNext.done) {
|
||||
curLineNextOp = new Changeset.Op();
|
||||
} else {
|
||||
curLineNextOp = curLineOpsNext.value;
|
||||
curLineOpsNext = curLineOps.next();
|
||||
}
|
||||
}
|
||||
|
||||
const charsToUse = Math.min(numChars, curLineNextOp.chars);
|
||||
|
||||
func(charsToUse, curLineNextOp.attribs,
|
||||
charsToUse === curLineNextOp.chars && curLineNextOp.lines > 0);
|
||||
numChars -= charsToUse;
|
||||
curLineNextOp.chars -= charsToUse;
|
||||
curChar += charsToUse;
|
||||
}
|
||||
|
||||
if (!curLineNextOp.chars && curLineOpsNext.done) {
|
||||
curLine++;
|
||||
curChar = 0;
|
||||
}
|
||||
};
|
||||
|
||||
const skip = (N, L) => {
|
||||
if (L) {
|
||||
curLine += L;
|
||||
curChar = 0;
|
||||
} else if (curLineOps && curLineOpsLine === curLine) {
|
||||
consumeAttribRuns(N, () => {});
|
||||
} else {
|
||||
curChar += N;
|
||||
}
|
||||
};
|
||||
|
||||
const nextText = (numChars) => {
|
||||
let len = 0;
|
||||
const assem = Changeset.stringAssembler();
|
||||
const firstString = linesGet(curLine).substring(curChar);
|
||||
len += firstString.length;
|
||||
assem.append(firstString);
|
||||
|
||||
let lineNum = curLine + 1;
|
||||
|
||||
while (len < numChars) {
|
||||
const nextString = linesGet(lineNum);
|
||||
len += nextString.length;
|
||||
assem.append(nextString);
|
||||
lineNum++;
|
||||
}
|
||||
|
||||
return assem.toString().substring(0, numChars);
|
||||
};
|
||||
|
||||
const cachedStrFunc = (func) => {
|
||||
const cache = {};
|
||||
|
||||
return (s) => {
|
||||
if (!cache[s]) {
|
||||
cache[s] = func(s);
|
||||
}
|
||||
return cache[s];
|
||||
};
|
||||
};
|
||||
|
||||
for (const csOp of Changeset.deserializeOps(unpacked.ops)) {
|
||||
if (csOp.opcode === '=') {
|
||||
const textBank = nextText(csOp.chars);
|
||||
|
||||
// decide if this equal operator is an attribution change or not.
|
||||
// We can see this by checkinf if attribs is set.
|
||||
// If the text this operator applies to is only a star,
|
||||
// than this is a false positive and should be ignored
|
||||
if (csOp.attribs && textBank !== '*') {
|
||||
const attribs = AttributeMap.fromString(csOp.attribs, apool);
|
||||
const undoBackToAttribs = cachedStrFunc((oldAttribsStr) => {
|
||||
const oldAttribs = AttributeMap.fromString(oldAttribsStr, apool);
|
||||
const backAttribs = new AttributeMap(apool)
|
||||
.set('author', '')
|
||||
.set('removed', 'true');
|
||||
for (const [key, value] of attribs) {
|
||||
const oldValue = oldAttribs.get(key);
|
||||
if (oldValue !== value) backAttribs.set(key, oldValue);
|
||||
}
|
||||
// TODO: backAttribs does not restore removed attributes (it is missing attributes that
|
||||
// are in oldAttribs but not in attribs). I don't know if that is intentional.
|
||||
return backAttribs.toString();
|
||||
});
|
||||
|
||||
let textLeftToProcess = textBank;
|
||||
|
||||
while (textLeftToProcess.length > 0) {
|
||||
// process till the next line break or process only one line break
|
||||
let lengthToProcess = textLeftToProcess.indexOf('\n');
|
||||
let lineBreak = false;
|
||||
switch (lengthToProcess) {
|
||||
case -1:
|
||||
lengthToProcess = textLeftToProcess.length;
|
||||
break;
|
||||
case 0:
|
||||
lineBreak = true;
|
||||
lengthToProcess = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
// get the text we want to procceed in this step
|
||||
const processText = textLeftToProcess.substr(0, lengthToProcess);
|
||||
|
||||
textLeftToProcess = textLeftToProcess.substr(lengthToProcess);
|
||||
|
||||
if (lineBreak) {
|
||||
builder.keep(1, 1); // just skip linebreaks, don't do a insert + keep for a linebreak
|
||||
|
||||
// consume the attributes of this linebreak
|
||||
consumeAttribRuns(1, () => {});
|
||||
} else {
|
||||
// add the old text via an insert, but add a deletion attribute +
|
||||
// the author attribute of the author who deleted it
|
||||
let textBankIndex = 0;
|
||||
consumeAttribRuns(lengthToProcess, (len, attribs, endsLine) => {
|
||||
// get the old attributes back
|
||||
const oldAttribs = undoBackToAttribs(attribs);
|
||||
|
||||
builder.insert(processText.substr(textBankIndex, len), oldAttribs);
|
||||
textBankIndex += len;
|
||||
});
|
||||
|
||||
builder.keep(lengthToProcess, 0);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
skip(csOp.chars, csOp.lines);
|
||||
builder.keep(csOp.chars, csOp.lines);
|
||||
}
|
||||
} else if (csOp.opcode === '+') {
|
||||
builder.keep(csOp.chars, csOp.lines);
|
||||
} else if (csOp.opcode === '-') {
|
||||
const textBank = nextText(csOp.chars);
|
||||
let textBankIndex = 0;
|
||||
|
||||
consumeAttribRuns(csOp.chars, (len, attribs, endsLine) => {
|
||||
builder.insert(textBank.substr(textBankIndex, len), attribs + csOp.attribs);
|
||||
textBankIndex += len;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Changeset.checkRep(builder.toString());
|
||||
};
|
||||
|
||||
// export the constructor
|
||||
module.exports = PadDiff;
|
458
src/node/utils/padDiff.ts
Normal file
458
src/node/utils/padDiff.ts
Normal file
|
@ -0,0 +1,458 @@
|
|||
'use strict';
|
||||
|
||||
import {PadAuthor, PadType} from "../types/PadType";
|
||||
import {MapArrayType} from "../types/MapType";
|
||||
|
||||
const AttributeMap = require('../../static/js/AttributeMap');
|
||||
const Changeset = require('../../static/js/Changeset');
|
||||
const attributes = require('../../static/js/attributes');
|
||||
const exportHtml = require('./ExportHtml');
|
||||
|
||||
|
||||
class PadDiff {
|
||||
private readonly _pad: PadType;
|
||||
private readonly _fromRev: string;
|
||||
private readonly _toRev: string;
|
||||
private _html: any;
|
||||
public _authors: any[];
|
||||
private self: PadDiff | undefined
|
||||
constructor(pad: PadType, fromRev:string, toRev:string) {
|
||||
// check parameters
|
||||
if (!pad || !pad.id || !pad.atext || !pad.pool) {
|
||||
throw new Error('Invalid pad');
|
||||
}
|
||||
|
||||
const range = pad.getValidRevisionRange(fromRev, toRev);
|
||||
if (!range) throw new Error(`Invalid revision range. startRev: ${fromRev} endRev: ${toRev}`);
|
||||
|
||||
this._pad = pad;
|
||||
this._fromRev = range.startRev;
|
||||
this._toRev = range.endRev;
|
||||
this._html = null;
|
||||
this._authors = [];
|
||||
}
|
||||
_isClearAuthorship(changeset: any){
|
||||
// unpack
|
||||
const unpacked = Changeset.unpack(changeset);
|
||||
|
||||
// check if there is nothing in the charBank
|
||||
if (unpacked.charBank !== '') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// check if oldLength == newLength
|
||||
if (unpacked.oldLen !== unpacked.newLen) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [clearOperator, anotherOp] = Changeset.deserializeOps(unpacked.ops);
|
||||
|
||||
// check if there is only one operator
|
||||
if (anotherOp != null) return false;
|
||||
|
||||
// check if this operator doesn't change text
|
||||
if (clearOperator.opcode !== '=') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// check that this operator applys to the complete text
|
||||
// if the text ends with a new line, its exactly one character less, else it has the same length
|
||||
if (clearOperator.chars !== unpacked.oldLen - 1 && clearOperator.chars !== unpacked.oldLen) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [appliedAttribute, anotherAttribute] =
|
||||
attributes.attribsFromString(clearOperator.attribs, this._pad.pool);
|
||||
|
||||
// Check that the operation has exactly one attribute.
|
||||
if (appliedAttribute == null || anotherAttribute != null) return false;
|
||||
|
||||
// check if the applied attribute is an anonymous author attribute
|
||||
if (appliedAttribute[0] !== 'author' || appliedAttribute[1] !== '') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
async _createClearAuthorship(rev: any){
|
||||
const atext = await this._pad.getInternalRevisionAText(rev);
|
||||
|
||||
// build clearAuthorship changeset
|
||||
const builder = Changeset.builder(atext.text.length);
|
||||
builder.keepText(atext.text, [['author', '']], this._pad.pool);
|
||||
const changeset = builder.toString();
|
||||
|
||||
return changeset;
|
||||
}
|
||||
|
||||
async _createClearStartAtext(rev: any){
|
||||
// get the atext of this revision
|
||||
const atext = await this._pad.getInternalRevisionAText(rev);
|
||||
|
||||
// create the clearAuthorship changeset
|
||||
const changeset = await this._createClearAuthorship(rev);
|
||||
|
||||
// apply the clearAuthorship changeset
|
||||
const newAText = Changeset.applyToAText(changeset, atext, this._pad.pool);
|
||||
|
||||
return newAText;
|
||||
}
|
||||
async _getChangesetsInBulk(startRev: any, count: any) {
|
||||
// find out which revisions we need
|
||||
const revisions = [];
|
||||
for (let i = startRev; i < (startRev + count) && i <= this._pad.head; i++) {
|
||||
revisions.push(i);
|
||||
}
|
||||
|
||||
// get all needed revisions (in parallel)
|
||||
const changesets:any[] = [];
|
||||
const authors: any[] = [];
|
||||
await Promise.all(revisions.map((rev) => this._pad.getRevision(rev).then((revision) => {
|
||||
const arrayNum = rev - startRev;
|
||||
changesets[arrayNum] = revision.changeset;
|
||||
authors[arrayNum] = revision.meta.author;
|
||||
})));
|
||||
|
||||
return {changesets, authors};
|
||||
}
|
||||
_addAuthors(authors: PadAuthor[]){
|
||||
this.self = this;
|
||||
|
||||
// add to array if not in the array
|
||||
authors.forEach((author) => {
|
||||
if (this.self!._authors.indexOf(author) === -1) {
|
||||
this.self!._authors.push(author);
|
||||
}
|
||||
});
|
||||
}
|
||||
async _createDiffAtext(){
|
||||
const bulkSize = 100;
|
||||
|
||||
// get the cleaned startAText
|
||||
let atext = await this._createClearStartAtext(this._fromRev);
|
||||
|
||||
let superChangeset = null;
|
||||
|
||||
for (let rev = this._fromRev + 1; rev <= this._toRev; rev += bulkSize) {
|
||||
// get the bulk
|
||||
const {changesets, authors} = await this._getChangesetsInBulk(rev, bulkSize);
|
||||
|
||||
const addedAuthors = [];
|
||||
|
||||
// run through all changesets
|
||||
for (let i = 0; i < changesets.length && (rev + i) <= this._toRev; ++i) {
|
||||
let changeset = changesets[i];
|
||||
|
||||
// skip clearAuthorship Changesets
|
||||
if (this._isClearAuthorship(changeset)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
changeset = this._extendChangesetWithAuthor(changeset, authors[i], this._pad.pool);
|
||||
|
||||
// add this author to the authorarray
|
||||
addedAuthors.push(authors[i]);
|
||||
|
||||
// compose it with the superChangset
|
||||
if (superChangeset == null) {
|
||||
superChangeset = changeset;
|
||||
} else {
|
||||
superChangeset = Changeset.compose(superChangeset, changeset, this._pad.pool);
|
||||
}
|
||||
}
|
||||
|
||||
// add the authors to the PadDiff authorArray
|
||||
this._addAuthors(addedAuthors);
|
||||
}
|
||||
|
||||
// if there are only clearAuthorship changesets, we don't get a superChangeset,
|
||||
// so we can skip this step
|
||||
if (superChangeset) {
|
||||
const deletionChangeset = this._createDeletionChangeset(superChangeset, atext, this._pad.pool);
|
||||
|
||||
// apply the superChangeset, which includes all addings
|
||||
atext = Changeset.applyToAText(superChangeset, atext, this._pad.pool);
|
||||
|
||||
// apply the deletionChangeset, which adds a deletions
|
||||
atext = Changeset.applyToAText(deletionChangeset, atext, this._pad.pool);
|
||||
}
|
||||
|
||||
return atext;
|
||||
}
|
||||
async getHtml(){
|
||||
// cache the html
|
||||
if (this._html != null) {
|
||||
return this._html;
|
||||
}
|
||||
|
||||
// get the diff atext
|
||||
const atext = await this._createDiffAtext();
|
||||
|
||||
// get the authorColor table
|
||||
const authorColors = await this._pad.getAllAuthorColors();
|
||||
|
||||
// convert the atext to html
|
||||
this._html = await exportHtml.getHTMLFromAtext(this._pad, atext, authorColors);
|
||||
|
||||
return this._html;
|
||||
}
|
||||
|
||||
async getAuthors() {
|
||||
// check if html was already produced, if not produce it, this generates
|
||||
// the author array at the same time
|
||||
if (this._html == null) {
|
||||
await this.getHtml();
|
||||
}
|
||||
|
||||
return this.self!._authors;
|
||||
}
|
||||
|
||||
_extendChangesetWithAuthor(changeset: any, author: any, apool: any){
|
||||
// unpack
|
||||
const unpacked = Changeset.unpack(changeset);
|
||||
|
||||
const assem = Changeset.opAssembler();
|
||||
|
||||
// create deleted attribs
|
||||
const authorAttrib = apool.putAttrib(['author', author || '']);
|
||||
const deletedAttrib = apool.putAttrib(['removed', true]);
|
||||
const attribs = `*${Changeset.numToString(authorAttrib)}*${Changeset.numToString(deletedAttrib)}`;
|
||||
|
||||
for (const operator of Changeset.deserializeOps(unpacked.ops)) {
|
||||
if (operator.opcode === '-') {
|
||||
// this is a delete operator, extend it with the author
|
||||
operator.attribs = attribs;
|
||||
} else if (operator.opcode === '=' && operator.attribs) {
|
||||
// this is operator changes only attributes, let's mark which author did that
|
||||
operator.attribs += `*${Changeset.numToString(authorAttrib)}`;
|
||||
}
|
||||
|
||||
// append the new operator to our assembler
|
||||
assem.append(operator);
|
||||
}
|
||||
|
||||
// return the modified changeset
|
||||
return Changeset.pack(unpacked.oldLen, unpacked.newLen, assem.toString(), unpacked.charBank);
|
||||
}
|
||||
_createDeletionChangeset(cs: any, startAText: any, apool: any){
|
||||
const lines = Changeset.splitTextLines(startAText.text);
|
||||
const alines = Changeset.splitAttributionLines(startAText.attribs, startAText.text);
|
||||
|
||||
// lines and alines are what the exports is meant to apply to.
|
||||
// They may be arrays or objects with .get(i) and .length methods.
|
||||
// They include final newlines on lines.
|
||||
|
||||
const linesGet = (idx: number) => {
|
||||
if (lines.get) {
|
||||
return lines.get(idx);
|
||||
} else {
|
||||
return lines[idx];
|
||||
}
|
||||
};
|
||||
|
||||
const aLinesGet = (idx: number) => {
|
||||
if (alines.get) {
|
||||
return alines.get(idx);
|
||||
} else {
|
||||
return alines[idx];
|
||||
}
|
||||
};
|
||||
|
||||
let curLine = 0;
|
||||
let curChar = 0;
|
||||
let curLineOps: { next: () => any; } | null = null;
|
||||
let curLineOpsNext: { done: any; value: any; } | null = null;
|
||||
let curLineOpsLine: number;
|
||||
let curLineNextOp = new Changeset.Op('+');
|
||||
|
||||
const unpacked = Changeset.unpack(cs);
|
||||
const builder = Changeset.builder(unpacked.newLen);
|
||||
|
||||
const consumeAttribRuns = (numChars: number, func: Function /* (len, attribs, endsLine)*/) => {
|
||||
if (!curLineOps || curLineOpsLine !== curLine) {
|
||||
curLineOps = Changeset.deserializeOps(aLinesGet(curLine));
|
||||
curLineOpsNext = curLineOps!.next();
|
||||
curLineOpsLine = curLine;
|
||||
let indexIntoLine = 0;
|
||||
while (!curLineOpsNext!.done) {
|
||||
curLineNextOp = curLineOpsNext!.value;
|
||||
curLineOpsNext = curLineOps!.next();
|
||||
if (indexIntoLine + curLineNextOp.chars >= curChar) {
|
||||
curLineNextOp.chars -= (curChar - indexIntoLine);
|
||||
break;
|
||||
}
|
||||
indexIntoLine += curLineNextOp.chars;
|
||||
}
|
||||
}
|
||||
|
||||
while (numChars > 0) {
|
||||
if (!curLineNextOp.chars && curLineOpsNext!.done) {
|
||||
curLine++;
|
||||
curChar = 0;
|
||||
curLineOpsLine = curLine;
|
||||
curLineNextOp.chars = 0;
|
||||
curLineOps = Changeset.deserializeOps(aLinesGet(curLine));
|
||||
curLineOpsNext = curLineOps!.next();
|
||||
}
|
||||
|
||||
if (!curLineNextOp.chars) {
|
||||
if (curLineOpsNext!.done) {
|
||||
curLineNextOp = new Changeset.Op();
|
||||
} else {
|
||||
curLineNextOp = curLineOpsNext!.value;
|
||||
curLineOpsNext = curLineOps!.next();
|
||||
}
|
||||
}
|
||||
|
||||
const charsToUse = Math.min(numChars, curLineNextOp.chars);
|
||||
|
||||
func(charsToUse, curLineNextOp.attribs,
|
||||
charsToUse === curLineNextOp.chars && curLineNextOp.lines > 0);
|
||||
numChars -= charsToUse;
|
||||
curLineNextOp.chars -= charsToUse;
|
||||
curChar += charsToUse;
|
||||
}
|
||||
|
||||
if (!curLineNextOp.chars && curLineOpsNext!.done) {
|
||||
curLine++;
|
||||
curChar = 0;
|
||||
}
|
||||
};
|
||||
|
||||
const skip = (N:number, L:number) => {
|
||||
if (L) {
|
||||
curLine += L;
|
||||
curChar = 0;
|
||||
} else if (curLineOps && curLineOpsLine === curLine) {
|
||||
consumeAttribRuns(N, () => {});
|
||||
} else {
|
||||
curChar += N;
|
||||
}
|
||||
};
|
||||
|
||||
const nextText = (numChars: number) => {
|
||||
let len = 0;
|
||||
const assem = Changeset.stringAssembler();
|
||||
const firstString = linesGet(curLine).substring(curChar);
|
||||
len += firstString.length;
|
||||
assem.append(firstString);
|
||||
|
||||
let lineNum = curLine + 1;
|
||||
|
||||
while (len < numChars) {
|
||||
const nextString = linesGet(lineNum);
|
||||
len += nextString.length;
|
||||
assem.append(nextString);
|
||||
lineNum++;
|
||||
}
|
||||
|
||||
return assem.toString().substring(0, numChars);
|
||||
};
|
||||
|
||||
const cachedStrFunc = (func:Function) => {
|
||||
const cache:MapArrayType<any> = {};
|
||||
|
||||
return (s:string) => {
|
||||
if (!cache[s]) {
|
||||
cache[s] = func(s);
|
||||
}
|
||||
return cache[s];
|
||||
};
|
||||
};
|
||||
|
||||
for (const csOp of Changeset.deserializeOps(unpacked.ops)) {
|
||||
if (csOp.opcode === '=') {
|
||||
const textBank = nextText(csOp.chars);
|
||||
|
||||
// decide if this equal operator is an attribution change or not.
|
||||
// We can see this by checkinf if attribs is set.
|
||||
// If the text this operator applies to is only a star,
|
||||
// than this is a false positive and should be ignored
|
||||
if (csOp.attribs && textBank !== '*') {
|
||||
const attribs = AttributeMap.fromString(csOp.attribs, apool);
|
||||
const undoBackToAttribs = cachedStrFunc((oldAttribsStr: string) => {
|
||||
const oldAttribs = AttributeMap.fromString(oldAttribsStr, apool);
|
||||
const backAttribs = new AttributeMap(apool)
|
||||
.set('author', '')
|
||||
.set('removed', 'true');
|
||||
for (const [key, value] of attribs) {
|
||||
const oldValue = oldAttribs.get(key);
|
||||
if (oldValue !== value) backAttribs.set(key, oldValue);
|
||||
}
|
||||
// TODO: backAttribs does not restore removed attributes (it is missing attributes that
|
||||
// are in oldAttribs but not in attribs). I don't know if that is intentional.
|
||||
return backAttribs.toString();
|
||||
});
|
||||
|
||||
let textLeftToProcess = textBank;
|
||||
|
||||
while (textLeftToProcess.length > 0) {
|
||||
// process till the next line break or process only one line break
|
||||
let lengthToProcess = textLeftToProcess.indexOf('\n');
|
||||
let lineBreak = false;
|
||||
switch (lengthToProcess) {
|
||||
case -1:
|
||||
lengthToProcess = textLeftToProcess.length;
|
||||
break;
|
||||
case 0:
|
||||
lineBreak = true;
|
||||
lengthToProcess = 1;
|
||||
break;
|
||||
}
|
||||
|
||||
// get the text we want to procceed in this step
|
||||
const processText = textLeftToProcess.substr(0, lengthToProcess);
|
||||
|
||||
textLeftToProcess = textLeftToProcess.substr(lengthToProcess);
|
||||
|
||||
if (lineBreak) {
|
||||
builder.keep(1, 1); // just skip linebreaks, don't do a insert + keep for a linebreak
|
||||
|
||||
// consume the attributes of this linebreak
|
||||
consumeAttribRuns(1, () => {});
|
||||
} else {
|
||||
// add the old text via an insert, but add a deletion attribute +
|
||||
// the author attribute of the author who deleted it
|
||||
let textBankIndex = 0;
|
||||
consumeAttribRuns(lengthToProcess, (len: number, attribs:string, endsLine: string) => {
|
||||
// get the old attributes back
|
||||
const oldAttribs = undoBackToAttribs(attribs);
|
||||
|
||||
builder.insert(processText.substr(textBankIndex, len), oldAttribs);
|
||||
textBankIndex += len;
|
||||
});
|
||||
|
||||
builder.keep(lengthToProcess, 0);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
skip(csOp.chars, csOp.lines);
|
||||
builder.keep(csOp.chars, csOp.lines);
|
||||
}
|
||||
} else if (csOp.opcode === '+') {
|
||||
builder.keep(csOp.chars, csOp.lines);
|
||||
} else if (csOp.opcode === '-') {
|
||||
const textBank = nextText(csOp.chars);
|
||||
let textBankIndex = 0;
|
||||
|
||||
consumeAttribRuns(csOp.chars, (len: number, attribs: string[], endsLine: string) => {
|
||||
builder.insert(textBank.substr(textBankIndex, len), attribs + csOp.attribs);
|
||||
textBankIndex += len;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Changeset.checkRep(builder.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
// this method is 80% like Changeset.inverse. I just changed so instead of reverting,
|
||||
// it adds deletions and attribute changes to to the atext.
|
||||
PadDiff.prototype._createDeletionChangeset = function (cs, startAText, apool) {
|
||||
|
||||
};
|
||||
|
||||
// export the constructor
|
||||
module.exports = PadDiff;
|
Loading…
Add table
Add a link
Reference in a new issue