lint: Run eslint --fix on bin/ and tests/

This commit is contained in:
Richard Hansen 2020-11-23 13:21:51 -05:00 committed by John McLear
parent 0625739cb8
commit b8d07a42eb
78 changed files with 4319 additions and 4599 deletions

View file

@ -3,88 +3,85 @@
*/
if (process.argv.length != 2) {
console.error("Use: node bin/checkAllPads.js");
console.error('Use: node bin/checkAllPads.js');
process.exit(1);
}
// load and initialize NPM
let npm = require('../src/node_modules/npm');
npm.load({}, async function() {
const npm = require('../src/node_modules/npm');
npm.load({}, async () => {
try {
// initialize the database
let settings = require('../src/node/utils/Settings');
let db = require('../src/node/db/DB');
const settings = require('../src/node/utils/Settings');
const db = require('../src/node/db/DB');
await db.init();
// load modules
let Changeset = require('../src/static/js/Changeset');
let padManager = require('../src/node/db/PadManager');
const Changeset = require('../src/static/js/Changeset');
const padManager = require('../src/node/db/PadManager');
// get all pads
let res = await padManager.listAllPads();
const res = await padManager.listAllPads();
for (let padId of res.padIDs) {
let pad = await padManager.getPad(padId);
for (const padId of res.padIDs) {
const pad = await padManager.getPad(padId);
// check if the pad has a pool
if (pad.pool === undefined) {
console.error("[" + pad.id + "] Missing attribute pool");
console.error(`[${pad.id}] Missing attribute pool`);
continue;
}
// create an array with key kevisions
// key revisions always save the full pad atext
let head = pad.getHeadRevisionNumber();
let keyRevisions = [];
const head = pad.getHeadRevisionNumber();
const keyRevisions = [];
for (let rev = 0; rev < head; rev += 100) {
keyRevisions.push(rev);
}
// run through all key revisions
for (let keyRev of keyRevisions) {
for (const keyRev of keyRevisions) {
// create an array of revisions we need till the next keyRevision or the End
var revisionsNeeded = [];
for (let rev = keyRev ; rev <= keyRev + 100 && rev <= head; rev++) {
const revisionsNeeded = [];
for (let rev = keyRev; rev <= keyRev + 100 && rev <= head; rev++) {
revisionsNeeded.push(rev);
}
// this array will hold all revision changesets
var revisions = [];
const revisions = [];
// run through all needed revisions and get them from the database
for (let revNum of revisionsNeeded) {
let revision = await db.get("pad:" + pad.id + ":revs:" + revNum);
revisions[revNum] = revision;
for (const revNum of revisionsNeeded) {
const revision = await db.get(`pad:${pad.id}:revs:${revNum}`);
revisions[revNum] = revision;
}
// check if the revision exists
if (revisions[keyRev] == null) {
console.error("[" + pad.id + "] Missing revision " + keyRev);
console.error(`[${pad.id}] Missing revision ${keyRev}`);
continue;
}
// check if there is a atext in the keyRevisions
if (revisions[keyRev].meta === undefined || revisions[keyRev].meta.atext === undefined) {
console.error("[" + pad.id + "] Missing atext in revision " + keyRev);
console.error(`[${pad.id}] Missing atext in revision ${keyRev}`);
continue;
}
let apool = pad.pool;
const apool = pad.pool;
let atext = revisions[keyRev].meta.atext;
for (let rev = keyRev + 1; rev <= keyRev + 100 && rev <= head; rev++) {
try {
let cs = revisions[rev].changeset;
const cs = revisions[rev].changeset;
atext = Changeset.applyToAText(cs, atext, apool);
} catch (e) {
console.error("[" + pad.id + "] Bad changeset at revision " + i + " - " + e.message);
console.error(`[${pad.id}] Bad changeset at revision ${i} - ${e.message}`);
}
}
}
console.log("finished");
console.log('finished');
process.exit(0);
}
} catch (err) {

View file

@ -3,7 +3,7 @@
*/
if (process.argv.length != 3) {
console.error("Use: node bin/checkPad.js $PADID");
console.error('Use: node bin/checkPad.js $PADID');
process.exit(1);
}
@ -11,83 +11,80 @@ if (process.argv.length != 3) {
const padId = process.argv[2];
// load and initialize NPM;
let npm = require('../src/node_modules/npm');
npm.load({}, async function() {
const npm = require('../src/node_modules/npm');
npm.load({}, async () => {
try {
// initialize database
let settings = require('../src/node/utils/Settings');
let db = require('../src/node/db/DB');
const settings = require('../src/node/utils/Settings');
const db = require('../src/node/db/DB');
await db.init();
// load modules
let Changeset = require('ep_etherpad-lite/static/js/Changeset');
let padManager = require('../src/node/db/PadManager');
const Changeset = require('ep_etherpad-lite/static/js/Changeset');
const padManager = require('../src/node/db/PadManager');
let exists = await padManager.doesPadExists(padId);
const exists = await padManager.doesPadExists(padId);
if (!exists) {
console.error("Pad does not exist");
console.error('Pad does not exist');
process.exit(1);
}
// get the pad
let pad = await padManager.getPad(padId);
const pad = await padManager.getPad(padId);
// create an array with key revisions
// key revisions always save the full pad atext
let head = pad.getHeadRevisionNumber();
let keyRevisions = [];
const head = pad.getHeadRevisionNumber();
const keyRevisions = [];
for (let rev = 0; rev < head; rev += 100) {
keyRevisions.push(rev);
}
// run through all key revisions
for (let keyRev of keyRevisions) {
for (const keyRev of keyRevisions) {
// create an array of revisions we need till the next keyRevision or the End
let revisionsNeeded = [];
const revisionsNeeded = [];
for (let rev = keyRev; rev <= keyRev + 100 && rev <= head; rev++) {
revisionsNeeded.push(rev);
}
// this array will hold all revision changesets
var revisions = [];
const revisions = [];
// run through all needed revisions and get them from the database
for (let revNum of revisionsNeeded) {
let revision = await db.get("pad:" + padId + ":revs:" + revNum);
for (const revNum of revisionsNeeded) {
const revision = await db.get(`pad:${padId}:revs:${revNum}`);
revisions[revNum] = revision;
}
// check if the pad has a pool
if (pad.pool === undefined ) {
console.error("Attribute pool is missing");
if (pad.pool === undefined) {
console.error('Attribute pool is missing');
process.exit(1);
}
// check if there is an atext in the keyRevisions
if (revisions[keyRev] === undefined || revisions[keyRev].meta === undefined || revisions[keyRev].meta.atext === undefined) {
console.error("No atext in key revision " + keyRev);
console.error(`No atext in key revision ${keyRev}`);
continue;
}
let apool = pad.pool;
const apool = pad.pool;
let atext = revisions[keyRev].meta.atext;
for (let rev = keyRev + 1; rev <= keyRev + 100 && rev <= head; rev++) {
try {
// console.log("check revision " + rev);
let cs = revisions[rev].changeset;
const cs = revisions[rev].changeset;
atext = Changeset.applyToAText(cs, atext, apool);
} catch(e) {
console.error("Bad changeset at revision " + rev + " - " + e.message);
} catch (e) {
console.error(`Bad changeset at revision ${rev} - ${e.message}`);
continue;
}
}
console.log("finished");
console.log('finished');
process.exit(0);
}
} catch (e) {
console.trace(e);
process.exit(1);

View file

@ -3,7 +3,7 @@
*/
if (process.argv.length != 3) {
console.error("Use: node bin/checkPadDeltas.js $PADID");
console.error('Use: node bin/checkPadDeltas.js $PADID');
process.exit(1);
}
@ -11,108 +11,101 @@ if (process.argv.length != 3) {
const padId = process.argv[2];
// load and initialize NPM;
var expect = require('expect.js')
var diff = require('diff')
var async = require('async')
const expect = require('expect.js');
const diff = require('diff');
var async = require('async');
let npm = require('../src/node_modules/npm');
var async = require("ep_etherpad-lite/node_modules/async");
var Changeset = require("ep_etherpad-lite/static/js/Changeset");
npm.load({}, async function() {
const npm = require('../src/node_modules/npm');
var async = require('ep_etherpad-lite/node_modules/async');
const Changeset = require('ep_etherpad-lite/static/js/Changeset');
npm.load({}, async () => {
try {
// initialize database
let settings = require('../src/node/utils/Settings');
let db = require('../src/node/db/DB');
const settings = require('../src/node/utils/Settings');
const db = require('../src/node/db/DB');
await db.init();
// load modules
let Changeset = require('ep_etherpad-lite/static/js/Changeset');
let padManager = require('../src/node/db/PadManager');
const Changeset = require('ep_etherpad-lite/static/js/Changeset');
const padManager = require('../src/node/db/PadManager');
let exists = await padManager.doesPadExists(padId);
const exists = await padManager.doesPadExists(padId);
if (!exists) {
console.error("Pad does not exist");
console.error('Pad does not exist');
process.exit(1);
}
// get the pad
let pad = await padManager.getPad(padId);
const pad = await padManager.getPad(padId);
//create an array with key revisions
//key revisions always save the full pad atext
var head = pad.getHeadRevisionNumber();
var keyRevisions = [];
for(var i=0;i<head;i+=100)
{
// create an array with key revisions
// key revisions always save the full pad atext
const head = pad.getHeadRevisionNumber();
const keyRevisions = [];
for (var i = 0; i < head; i += 100) {
keyRevisions.push(i);
}
//create an array with all revisions
var revisions = [];
for(var i=0;i<=head;i++)
{
// create an array with all revisions
const revisions = [];
for (var i = 0; i <= head; i++) {
revisions.push(i);
}
var atext = Changeset.makeAText("\n")
let atext = Changeset.makeAText('\n');
//run trough all revisions
async.forEachSeries(revisions, function(revNum, callback) {
//console.log('Fetching', revNum)
db.db.get("pad:"+padId+":revs:" + revNum, function(err, revision) {
if(err) return callback(err);
// run trough all revisions
async.forEachSeries(revisions, (revNum, callback) => {
// console.log('Fetching', revNum)
db.db.get(`pad:${padId}:revs:${revNum}`, (err, revision) => {
if (err) return callback(err);
//check if there is a atext in the keyRevisions
if(~keyRevisions.indexOf(revNum) && (revision === undefined || revision.meta === undefined || revision.meta.atext === undefined)) {
console.error("No atext in key revision " + revNum);
// check if there is a atext in the keyRevisions
if (~keyRevisions.indexOf(revNum) && (revision === undefined || revision.meta === undefined || revision.meta.atext === undefined)) {
console.error(`No atext in key revision ${revNum}`);
callback();
return;
}
try {
//console.log("check revision ", revNum);
var cs = revision.changeset;
// console.log("check revision ", revNum);
const cs = revision.changeset;
atext = Changeset.applyToAText(cs, atext, pad.pool);
}
catch(e) {
console.error("Bad changeset at revision " + revNum + " - " + e.message);
} catch (e) {
console.error(`Bad changeset at revision ${revNum} - ${e.message}`);
callback();
return;
}
if(~keyRevisions.indexOf(revNum)) {
if (~keyRevisions.indexOf(revNum)) {
try {
expect(revision.meta.atext.text).to.eql(atext.text)
expect(revision.meta.atext.attribs).to.eql(atext.attribs)
}catch(e) {
console.error("Atext in key revision "+revNum+" doesn't match computed one.")
console.log(diff.diffChars(atext.text, revision.meta.atext.text).map(function(op) {if(!op.added && !op.removed) op.value = op.value.length; return op}))
//console.error(e)
//console.log('KeyRev. :', revision.meta.atext)
//console.log('Computed:', atext)
callback()
return
expect(revision.meta.atext.text).to.eql(atext.text);
expect(revision.meta.atext.attribs).to.eql(atext.attribs);
} catch (e) {
console.error(`Atext in key revision ${revNum} doesn't match computed one.`);
console.log(diff.diffChars(atext.text, revision.meta.atext.text).map((op) => { if (!op.added && !op.removed) op.value = op.value.length; return op; }));
// console.error(e)
// console.log('KeyRev. :', revision.meta.atext)
// console.log('Computed:', atext)
callback();
return;
}
}
setImmediate(callback)
setImmediate(callback);
});
}, function(er) {
if(pad.atext.text == atext.text) console.log('ok')
else {
console.error('Pad AText doesn\'t match computed one! (Computed ',atext.text.length, ', db', pad.atext.text.length,')')
console.log(diff.diffChars(atext.text, pad.atext.text).map(function(op) {if(!op.added && !op.removed) op.value = op.value.length; return op}))
}, (er) => {
if (pad.atext.text == atext.text) { console.log('ok'); } else {
console.error('Pad AText doesn\'t match computed one! (Computed ', atext.text.length, ', db', pad.atext.text.length, ')');
console.log(diff.diffChars(atext.text, pad.atext.text).map((op) => { if (!op.added && !op.removed) op.value = op.value.length; return op; }));
}
callback(er)
callback(er);
});
process.exit(0);
} catch (e) {
console.trace(e);
process.exit(1);
}
});

View file

@ -1,120 +1,116 @@
var startTime = Date.now();
var fs = require("fs");
var ueberDB = require("../src/node_modules/ueberdb2");
var mysql = require("../src/node_modules/ueberdb2/node_modules/mysql");
var async = require("../src/node_modules/async");
var Changeset = require("ep_etherpad-lite/static/js/Changeset");
var randomString = require('ep_etherpad-lite/static/js/pad_utils').randomString;
var AttributePool = require("ep_etherpad-lite/static/js/AttributePool");
const startTime = Date.now();
const fs = require('fs');
const ueberDB = require('../src/node_modules/ueberdb2');
const mysql = require('../src/node_modules/ueberdb2/node_modules/mysql');
const async = require('../src/node_modules/async');
const Changeset = require('ep_etherpad-lite/static/js/Changeset');
const randomString = require('ep_etherpad-lite/static/js/pad_utils').randomString;
const AttributePool = require('ep_etherpad-lite/static/js/AttributePool');
var settingsFile = process.argv[2];
var sqlOutputFile = process.argv[3];
const settingsFile = process.argv[2];
const sqlOutputFile = process.argv[3];
//stop if the settings file is not set
if(!settingsFile || !sqlOutputFile)
{
console.error("Use: node convert.js $SETTINGSFILE $SQLOUTPUT");
// stop if the settings file is not set
if (!settingsFile || !sqlOutputFile) {
console.error('Use: node convert.js $SETTINGSFILE $SQLOUTPUT');
process.exit(1);
}
log("read settings file...");
//read the settings file and parse the json
var settings = JSON.parse(fs.readFileSync(settingsFile, "utf8"));
log("done");
log('read settings file...');
// read the settings file and parse the json
const settings = JSON.parse(fs.readFileSync(settingsFile, 'utf8'));
log('done');
log("open output file...");
var sqlOutput = fs.openSync(sqlOutputFile, "w");
var sql = "SET CHARACTER SET UTF8;\n" +
"CREATE TABLE IF NOT EXISTS `store` ( \n" +
"`key` VARCHAR( 100 ) NOT NULL , \n" +
"`value` LONGTEXT NOT NULL , \n" +
"PRIMARY KEY ( `key` ) \n" +
") ENGINE = INNODB;\n" +
"START TRANSACTION;\n\n";
log('open output file...');
const sqlOutput = fs.openSync(sqlOutputFile, 'w');
const sql = 'SET CHARACTER SET UTF8;\n' +
'CREATE TABLE IF NOT EXISTS `store` ( \n' +
'`key` VARCHAR( 100 ) NOT NULL , \n' +
'`value` LONGTEXT NOT NULL , \n' +
'PRIMARY KEY ( `key` ) \n' +
') ENGINE = INNODB;\n' +
'START TRANSACTION;\n\n';
fs.writeSync(sqlOutput, sql);
log("done");
log('done');
var etherpadDB = mysql.createConnection({
host : settings.etherpadDB.host,
user : settings.etherpadDB.user,
password : settings.etherpadDB.password,
database : settings.etherpadDB.database,
port : settings.etherpadDB.port
const etherpadDB = mysql.createConnection({
host: settings.etherpadDB.host,
user: settings.etherpadDB.user,
password: settings.etherpadDB.password,
database: settings.etherpadDB.database,
port: settings.etherpadDB.port,
});
//get the timestamp once
var timestamp = Date.now();
// get the timestamp once
const timestamp = Date.now();
var padIDs;
let padIDs;
async.series([
//get all padids out of the database...
function(callback) {
log("get all padIds out of the database...");
// get all padids out of the database...
function (callback) {
log('get all padIds out of the database...');
etherpadDB.query("SELECT ID FROM PAD_META", [], function(err, _padIDs) {
etherpadDB.query('SELECT ID FROM PAD_META', [], (err, _padIDs) => {
padIDs = _padIDs;
callback(err);
});
},
function(callback) {
log("done");
function (callback) {
log('done');
//create a queue with a concurrency 100
var queue = async.queue(function (padId, callback) {
convertPad(padId, function(err) {
// create a queue with a concurrency 100
const queue = async.queue((padId, callback) => {
convertPad(padId, (err) => {
incrementPadStats();
callback(err);
});
}, 100);
//set the step callback as the queue callback
// set the step callback as the queue callback
queue.drain = callback;
//add the padids to the worker queue
for(var i=0,length=padIDs.length;i<length;i++)
{
// add the padids to the worker queue
for (let i = 0, length = padIDs.length; i < length; i++) {
queue.push(padIDs[i].ID);
}
}
], function(err) {
if(err) throw err;
},
], (err) => {
if (err) throw err;
//write the groups
var sql = "";
for(var proID in proID2groupID)
{
var groupID = proID2groupID[proID];
var subdomain = proID2subdomain[proID];
// write the groups
let sql = '';
for (const proID in proID2groupID) {
const groupID = proID2groupID[proID];
const subdomain = proID2subdomain[proID];
sql+="REPLACE INTO store VALUES (" + etherpadDB.escape("group:" + groupID) + ", " + etherpadDB.escape(JSON.stringify(groups[groupID]))+ ");\n";
sql+="REPLACE INTO store VALUES (" + etherpadDB.escape("mapper2group:subdomain:" + subdomain) + ", " + etherpadDB.escape(groupID)+ ");\n";
sql += `REPLACE INTO store VALUES (${etherpadDB.escape(`group:${groupID}`)}, ${etherpadDB.escape(JSON.stringify(groups[groupID]))});\n`;
sql += `REPLACE INTO store VALUES (${etherpadDB.escape(`mapper2group:subdomain:${subdomain}`)}, ${etherpadDB.escape(groupID)});\n`;
}
//close transaction
sql+="COMMIT;";
// close transaction
sql += 'COMMIT;';
//end the sql file
fs.writeSync(sqlOutput, sql, undefined, "utf-8");
// end the sql file
fs.writeSync(sqlOutput, sql, undefined, 'utf-8');
fs.closeSync(sqlOutput);
log("finished.");
log('finished.');
process.exit(0);
});
function log(str) {
console.log((Date.now() - startTime)/1000 + "\t" + str);
console.log(`${(Date.now() - startTime) / 1000}\t${str}`);
}
var padsDone = 0;
let padsDone = 0;
function incrementPadStats() {
padsDone++;
if(padsDone%100 == 0)
{
var averageTime = Math.round(padsDone/((Date.now() - startTime)/1000));
log(padsDone + "/" + padIDs.length + "\t" + averageTime + " pad/s")
if (padsDone % 100 == 0) {
const averageTime = Math.round(padsDone / ((Date.now() - startTime) / 1000));
log(`${padsDone}/${padIDs.length}\t${averageTime} pad/s`);
}
}
@ -123,275 +119,245 @@ var proID2subdomain = {};
var groups = {};
function convertPad(padId, callback) {
var changesets = [];
var changesetsMeta = [];
var chatMessages = [];
var authors = [];
var apool;
var subdomain;
var padmeta;
const changesets = [];
const changesetsMeta = [];
const chatMessages = [];
const authors = [];
let apool;
let subdomain;
let padmeta;
async.series([
//get all needed db values
function(callback) {
// get all needed db values
function (callback) {
async.parallel([
//get the pad revisions
function(callback) {
var sql = "SELECT * FROM `PAD_CHAT_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_CHAT_META` WHERE ID=?)";
// get the pad revisions
function (callback) {
const sql = 'SELECT * FROM `PAD_CHAT_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_CHAT_META` WHERE ID=?)';
etherpadDB.query(sql, [padId], function(err, results) {
if(!err)
{
try
{
//parse the pages
for(var i=0,length=results.length;i<length;i++)
{
etherpadDB.query(sql, [padId], (err, results) => {
if (!err) {
try {
// parse the pages
for (let i = 0, length = results.length; i < length; i++) {
parsePage(chatMessages, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, true);
}
}catch(e) {err = e}
} catch (e) { err = e; }
}
callback(err);
});
},
//get the chat entries
function(callback) {
var sql = "SELECT * FROM `PAD_REVS_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_REVS_META` WHERE ID=?)";
// get the chat entries
function (callback) {
const sql = 'SELECT * FROM `PAD_REVS_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_REVS_META` WHERE ID=?)';
etherpadDB.query(sql, [padId], function(err, results) {
if(!err)
{
try
{
//parse the pages
for(var i=0,length=results.length;i<length;i++)
{
etherpadDB.query(sql, [padId], (err, results) => {
if (!err) {
try {
// parse the pages
for (let i = 0, length = results.length; i < length; i++) {
parsePage(changesets, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, false);
}
}catch(e) {err = e}
} catch (e) { err = e; }
}
callback(err);
});
},
//get the pad revisions meta data
function(callback) {
var sql = "SELECT * FROM `PAD_REVMETA_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_REVMETA_META` WHERE ID=?)";
// get the pad revisions meta data
function (callback) {
const sql = 'SELECT * FROM `PAD_REVMETA_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_REVMETA_META` WHERE ID=?)';
etherpadDB.query(sql, [padId], function(err, results) {
if(!err)
{
try
{
//parse the pages
for(var i=0,length=results.length;i<length;i++)
{
etherpadDB.query(sql, [padId], (err, results) => {
if (!err) {
try {
// parse the pages
for (let i = 0, length = results.length; i < length; i++) {
parsePage(changesetsMeta, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, true);
}
}catch(e) {err = e}
} catch (e) { err = e; }
}
callback(err);
});
},
//get the attribute pool of this pad
function(callback) {
var sql = "SELECT `JSON` FROM `PAD_APOOL` WHERE `ID` = ?";
// get the attribute pool of this pad
function (callback) {
const sql = 'SELECT `JSON` FROM `PAD_APOOL` WHERE `ID` = ?';
etherpadDB.query(sql, [padId], function(err, results) {
if(!err)
{
try
{
apool=JSON.parse(results[0].JSON).x;
}catch(e) {err = e}
etherpadDB.query(sql, [padId], (err, results) => {
if (!err) {
try {
apool = JSON.parse(results[0].JSON).x;
} catch (e) { err = e; }
}
callback(err);
});
},
//get the authors informations
function(callback) {
var sql = "SELECT * FROM `PAD_AUTHORS_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_AUTHORS_META` WHERE ID=?)";
// get the authors informations
function (callback) {
const sql = 'SELECT * FROM `PAD_AUTHORS_TEXT` WHERE NUMID = (SELECT `NUMID` FROM `PAD_AUTHORS_META` WHERE ID=?)';
etherpadDB.query(sql, [padId], function(err, results) {
if(!err)
{
try
{
//parse the pages
for(var i=0, length=results.length;i<length;i++)
{
etherpadDB.query(sql, [padId], (err, results) => {
if (!err) {
try {
// parse the pages
for (let i = 0, length = results.length; i < length; i++) {
parsePage(authors, results[i].PAGESTART, results[i].OFFSETS, results[i].DATA, true);
}
}catch(e) {err = e}
} catch (e) { err = e; }
}
callback(err);
});
},
//get the pad information
function(callback) {
var sql = "SELECT JSON FROM `PAD_META` WHERE ID=?";
// get the pad information
function (callback) {
const sql = 'SELECT JSON FROM `PAD_META` WHERE ID=?';
etherpadDB.query(sql, [padId], function(err, results) {
if(!err)
{
try
{
etherpadDB.query(sql, [padId], (err, results) => {
if (!err) {
try {
padmeta = JSON.parse(results[0].JSON).x;
}catch(e) {err = e}
} catch (e) { err = e; }
}
callback(err);
});
},
//get the subdomain
function(callback) {
//skip if this is no proPad
if(padId.indexOf("$") == -1)
{
// get the subdomain
function (callback) {
// skip if this is no proPad
if (padId.indexOf('$') == -1) {
callback();
return;
}
//get the proID out of this padID
var proID = padId.split("$")[0];
// get the proID out of this padID
const proID = padId.split('$')[0];
var sql = "SELECT subDomain FROM pro_domains WHERE ID = ?";
const sql = 'SELECT subDomain FROM pro_domains WHERE ID = ?';
etherpadDB.query(sql, [proID], function(err, results) {
if(!err)
{
etherpadDB.query(sql, [proID], (err, results) => {
if (!err) {
subdomain = results[0].subDomain;
}
callback(err);
});
}
},
], callback);
},
function(callback) {
//saves all values that should be written to the database
var values = {};
function (callback) {
// saves all values that should be written to the database
const values = {};
//this is a pro pad, let's convert it to a group pad
if(padId.indexOf("$") != -1)
{
var padIdParts = padId.split("$");
var proID = padIdParts[0];
var padName = padIdParts[1];
// this is a pro pad, let's convert it to a group pad
if (padId.indexOf('$') != -1) {
const padIdParts = padId.split('$');
const proID = padIdParts[0];
const padName = padIdParts[1];
var groupID
let groupID;
//this proID is not converted so far, do it
if(proID2groupID[proID] == null)
{
groupID = "g." + randomString(16);
// this proID is not converted so far, do it
if (proID2groupID[proID] == null) {
groupID = `g.${randomString(16)}`;
//create the mappers for this new group
// create the mappers for this new group
proID2groupID[proID] = groupID;
proID2subdomain[proID] = subdomain;
groups[groupID] = {pads: {}};
}
//use the generated groupID;
// use the generated groupID;
groupID = proID2groupID[proID];
//rename the pad
padId = groupID + "$" + padName;
// rename the pad
padId = `${groupID}$${padName}`;
//set the value for this pad in the group
// set the value for this pad in the group
groups[groupID].pads[padId] = 1;
}
try
{
var newAuthorIDs = {};
var oldName2newName = {};
try {
const newAuthorIDs = {};
const oldName2newName = {};
//replace the authors with generated authors
// replace the authors with generated authors
// we need to do that cause where the original etherpad saves pad local authors, the new (lite) etherpad uses them global
for(var i in apool.numToAttrib)
{
for (var i in apool.numToAttrib) {
var key = apool.numToAttrib[i][0];
var value = apool.numToAttrib[i][1];
const value = apool.numToAttrib[i][1];
//skip non authors and anonymous authors
if(key != "author" || value == "")
continue;
// skip non authors and anonymous authors
if (key != 'author' || value == '') continue;
//generate new author values
var authorID = "a." + randomString(16);
var authorColorID = authors[i].colorId || Math.floor(Math.random()*(exports.getColorPalette().length));
var authorName = authors[i].name || null;
// generate new author values
const authorID = `a.${randomString(16)}`;
const authorColorID = authors[i].colorId || Math.floor(Math.random() * (exports.getColorPalette().length));
const authorName = authors[i].name || null;
//overwrite the authorID of the attribute pool
// overwrite the authorID of the attribute pool
apool.numToAttrib[i][1] = authorID;
//write the author to the database
values["globalAuthor:" + authorID] = {"colorId" : authorColorID, "name": authorName, "timestamp": timestamp};
// write the author to the database
values[`globalAuthor:${authorID}`] = {colorId: authorColorID, name: authorName, timestamp};
//save in mappers
// save in mappers
newAuthorIDs[i] = authorID;
oldName2newName[value] = authorID;
}
//save all revisions
for(var i=0;i<changesets.length;i++)
{
values["pad:" + padId + ":revs:" + i] = {changeset: changesets[i],
meta : {
author: newAuthorIDs[changesetsMeta[i].a],
timestamp: changesetsMeta[i].t,
atext: changesetsMeta[i].atext || undefined
}};
// save all revisions
for (var i = 0; i < changesets.length; i++) {
values[`pad:${padId}:revs:${i}`] = {changeset: changesets[i],
meta: {
author: newAuthorIDs[changesetsMeta[i].a],
timestamp: changesetsMeta[i].t,
atext: changesetsMeta[i].atext || undefined,
}};
}
//save all chat messages
for(var i=0;i<chatMessages.length;i++)
{
values["pad:" + padId + ":chat:" + i] = {"text": chatMessages[i].lineText,
"userId": oldName2newName[chatMessages[i].userId],
"time": chatMessages[i].time}
// save all chat messages
for (var i = 0; i < chatMessages.length; i++) {
values[`pad:${padId}:chat:${i}`] = {text: chatMessages[i].lineText,
userId: oldName2newName[chatMessages[i].userId],
time: chatMessages[i].time};
}
//generate the latest atext
var fullAPool = (new AttributePool()).fromJsonable(apool);
var keyRev = Math.floor(padmeta.head / padmeta.keyRevInterval) * padmeta.keyRevInterval;
var atext = changesetsMeta[keyRev].atext;
var curRev = keyRev;
while (curRev < padmeta.head)
{
// generate the latest atext
const fullAPool = (new AttributePool()).fromJsonable(apool);
const keyRev = Math.floor(padmeta.head / padmeta.keyRevInterval) * padmeta.keyRevInterval;
let atext = changesetsMeta[keyRev].atext;
let curRev = keyRev;
while (curRev < padmeta.head) {
curRev++;
var changeset = changesets[curRev];
const changeset = changesets[curRev];
atext = Changeset.applyToAText(changeset, atext, fullAPool);
}
values["pad:" + padId] = {atext: atext,
pool: apool,
head: padmeta.head,
chatHead: padmeta.numChatMessages }
}
catch(e)
{
console.error("Error while converting pad " + padId + ", pad skipped");
values[`pad:${padId}`] = {atext,
pool: apool,
head: padmeta.head,
chatHead: padmeta.numChatMessages};
} catch (e) {
console.error(`Error while converting pad ${padId}, pad skipped`);
console.error(e.stack ? e.stack : JSON.stringify(e));
callback();
return;
}
var sql = "";
for(var key in values)
{
sql+="REPLACE INTO store VALUES (" + etherpadDB.escape(key) + ", " + etherpadDB.escape(JSON.stringify(values[key]))+ ");\n";
let sql = '';
for (var key in values) {
sql += `REPLACE INTO store VALUES (${etherpadDB.escape(key)}, ${etherpadDB.escape(JSON.stringify(values[key]))});\n`;
}
fs.writeSync(sqlOutput, sql, undefined, "utf-8");
fs.writeSync(sqlOutput, sql, undefined, 'utf-8');
callback();
}
},
], callback);
}
@ -401,27 +367,25 @@ function convertPad(padId, callback) {
* all values behind each other
*/
function parsePage(array, pageStart, offsets, data, json) {
var start = 0;
var lengths = offsets.split(",");
let start = 0;
const lengths = offsets.split(',');
for(var i=0;i<lengths.length;i++)
{
var unitLength = lengths[i];
for (let i = 0; i < lengths.length; i++) {
let unitLength = lengths[i];
//skip empty units
if(unitLength == "")
continue;
// skip empty units
if (unitLength == '') continue;
//parse the number
// parse the number
unitLength = Number(unitLength);
//cut the unit out of data
var unit = data.substr(start, unitLength);
// cut the unit out of data
const unit = data.substr(start, unitLength);
//put it into the array
// put it into the array
array[pageStart + i] = json ? JSON.parse(unit) : unit;
//update start
start+=unitLength;
// update start
start += unitLength;
}
}

View file

@ -2,7 +2,7 @@
* A tool for generating a test user session which can be used for debugging configs
* that require sessions.
*/
const m = (f) => __dirname + '/../' + f;
const m = (f) => `${__dirname}/../${f}`;
const fs = require('fs');
const path = require('path');
@ -12,10 +12,10 @@ const settings = require(m('src/node/utils/Settings'));
const supertest = require(m('src/node_modules/supertest'));
(async () => {
const api = supertest('http://'+settings.ip+':'+settings.port);
const api = supertest(`http://${settings.ip}:${settings.port}`);
const filePath = path.join(__dirname, '../APIKEY.txt');
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
let res;
@ -43,5 +43,5 @@ const supertest = require(m('src/node_modules/supertest'));
res = await api.post(uri('createSession', {apikey, groupID, authorID, validUntil}));
if (res.body.code === 1) throw new Error(`Error creating session: ${res.body}`);
console.log('Session made: ====> create a cookie named sessionID and set the value to',
res.body.data.sessionID);
res.body.data.sessionID);
})();

View file

@ -4,48 +4,48 @@
*/
const request = require('../src/node_modules/request');
const settings = require(__dirname+'/../tests/container/loadSettings').loadSettings();
const supertest = require(__dirname+'/../src/node_modules/supertest');
const api = supertest('http://'+settings.ip+":"+settings.port);
const settings = require(`${__dirname}/../tests/container/loadSettings`).loadSettings();
const supertest = require(`${__dirname}/../src/node_modules/supertest`);
const api = supertest(`http://${settings.ip}:${settings.port}`);
const path = require('path');
const fs = require('fs');
// get the API Key
var filePath = path.join(__dirname, '../APIKEY.txt');
var apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
const filePath = path.join(__dirname, '../APIKEY.txt');
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
// Set apiVersion to base value, we change this later.
var apiVersion = 1;
var guids;
let apiVersion = 1;
let guids;
// Update the apiVersion
api.get('/api/')
.expect(function(res){
apiVersion = res.body.currentVersion;
if (!res.body.currentVersion) throw new Error("No version set in API");
return;
})
.then(function(){
let guri = '/api/'+apiVersion+'/listAllGroups?apikey='+apikey;
api.get(guri)
.then(function(res){
guids = res.body.data.groupIDs;
guids.forEach(function(groupID){
let luri = '/api/'+apiVersion+'/listSessionsOfGroup?apikey='+apikey + "&groupID="+groupID;
api.get(luri)
.then(function(res){
if(res.body.data){
Object.keys(res.body.data).forEach(function(sessionID){
if(sessionID){
console.log("Deleting", sessionID);
let duri = '/api/'+apiVersion+'/deleteSession?apikey='+apikey + "&sessionID="+sessionID;
api.post(duri); // deletes
}
})
}else{
// no session in this group.
}
})
.expect((res) => {
apiVersion = res.body.currentVersion;
if (!res.body.currentVersion) throw new Error('No version set in API');
return;
})
})
})
.then(() => {
const guri = `/api/${apiVersion}/listAllGroups?apikey=${apikey}`;
api.get(guri)
.then((res) => {
guids = res.body.data.groupIDs;
guids.forEach((groupID) => {
const luri = `/api/${apiVersion}/listSessionsOfGroup?apikey=${apikey}&groupID=${groupID}`;
api.get(luri)
.then((res) => {
if (res.body.data) {
Object.keys(res.body.data).forEach((sessionID) => {
if (sessionID) {
console.log('Deleting', sessionID);
const duri = `/api/${apiVersion}/deleteSession?apikey=${apikey}&sessionID=${sessionID}`;
api.post(duri); // deletes
}
});
} else {
// no session in this group.
}
});
});
});
});

View file

@ -4,47 +4,45 @@
*/
const request = require('../src/node_modules/request');
const settings = require(__dirname+'/../tests/container/loadSettings').loadSettings();
const supertest = require(__dirname+'/../src/node_modules/supertest');
const api = supertest('http://'+settings.ip+":"+settings.port);
const settings = require(`${__dirname}/../tests/container/loadSettings`).loadSettings();
const supertest = require(`${__dirname}/../src/node_modules/supertest`);
const api = supertest(`http://${settings.ip}:${settings.port}`);
const path = require('path');
const fs = require('fs');
if (process.argv.length != 3) {
console.error("Use: node deletePad.js $PADID");
console.error('Use: node deletePad.js $PADID');
process.exit(1);
}
// get the padID
let padId = process.argv[2];
const padId = process.argv[2];
// get the API Key
var filePath = path.join(__dirname, '../APIKEY.txt');
var apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
const filePath = path.join(__dirname, '../APIKEY.txt');
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
// Set apiVersion to base value, we change this later.
var apiVersion = 1;
let apiVersion = 1;
// Update the apiVersion
api.get('/api/')
.expect(function(res){
apiVersion = res.body.currentVersion;
if (!res.body.currentVersion) throw new Error("No version set in API");
return;
})
.end(function(err, res){
.expect((res) => {
apiVersion = res.body.currentVersion;
if (!res.body.currentVersion) throw new Error('No version set in API');
return;
})
.end((err, res) => {
// Now we know the latest API version, let's delete pad
var uri = '/api/'+apiVersion+'/deletePad?apikey='+apikey+'&padID='+padId;
api.post(uri)
.expect(function(res){
if (res.body.code === 1){
console.error("Error deleting pad", res.body);
}else{
console.log("Deleted pad", res.body);
}
return;
})
.end(function(){})
});
const uri = `/api/${apiVersion}/deletePad?apikey=${apikey}&padID=${padId}`;
api.post(uri)
.expect((res) => {
if (res.body.code === 1) {
console.error('Error deleting pad', res.body);
} else {
console.log('Deleted pad', res.body);
}
return;
})
.end(() => {});
});
// end

View file

@ -20,19 +20,19 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var marked = require('marked');
var fs = require('fs');
var path = require('path');
const marked = require('marked');
const fs = require('fs');
const path = require('path');
// parse the args.
// Don't use nopt or whatever for this. It's simple enough.
var args = process.argv.slice(2);
var format = 'json';
var template = null;
var inputFile = null;
const args = process.argv.slice(2);
let format = 'json';
let template = null;
let inputFile = null;
args.forEach(function (arg) {
args.forEach((arg) => {
if (!arg.match(/^\-\-/)) {
inputFile = arg;
} else if (arg.match(/^\-\-format=/)) {
@ -40,7 +40,7 @@ args.forEach(function (arg) {
} else if (arg.match(/^\-\-template=/)) {
template = arg.replace(/^\-\-template=/, '');
}
})
});
if (!inputFile) {
@ -49,25 +49,25 @@ if (!inputFile) {
console.error('Input file = %s', inputFile);
fs.readFile(inputFile, 'utf8', function(er, input) {
fs.readFile(inputFile, 'utf8', (er, input) => {
if (er) throw er;
// process the input for @include lines
processIncludes(inputFile, input, next);
});
var includeExpr = /^@include\s+([A-Za-z0-9-_\/]+)(?:\.)?([a-zA-Z]*)$/gmi;
var includeData = {};
const includeExpr = /^@include\s+([A-Za-z0-9-_\/]+)(?:\.)?([a-zA-Z]*)$/gmi;
const includeData = {};
function processIncludes(inputFile, input, cb) {
var includes = input.match(includeExpr);
const includes = input.match(includeExpr);
if (includes === null) return cb(null, input);
var errState = null;
let errState = null;
console.error(includes);
var incCount = includes.length;
let incCount = includes.length;
if (incCount === 0) cb(null, input);
includes.forEach(function(include) {
var fname = include.replace(/^@include\s+/, '');
includes.forEach((include) => {
let fname = include.replace(/^@include\s+/, '');
if (!fname.match(/\.md$/)) fname += '.md';
if (includeData.hasOwnProperty(fname)) {
@ -78,11 +78,11 @@ function processIncludes(inputFile, input, cb) {
}
}
var fullFname = path.resolve(path.dirname(inputFile), fname);
fs.readFile(fullFname, 'utf8', function(er, inc) {
const fullFname = path.resolve(path.dirname(inputFile), fname);
fs.readFile(fullFname, 'utf8', (er, inc) => {
if (errState) return;
if (er) return cb(errState = er);
processIncludes(fullFname, inc, function(er, inc) {
processIncludes(fullFname, inc, (er, inc) => {
if (errState) return;
if (er) return cb(errState = er);
incCount--;
@ -101,20 +101,20 @@ function next(er, input) {
if (er) throw er;
switch (format) {
case 'json':
require('./json.js')(input, inputFile, function(er, obj) {
require('./json.js')(input, inputFile, (er, obj) => {
console.log(JSON.stringify(obj, null, 2));
if (er) throw er;
});
break;
case 'html':
require('./html.js')(input, inputFile, template, function(er, html) {
require('./html.js')(input, inputFile, template, (er, html) => {
if (er) throw er;
console.log(html);
});
break;
default:
throw new Error('Invalid format: ' + format);
throw new Error(`Invalid format: ${format}`);
}
}

View file

@ -19,15 +19,15 @@
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var fs = require('fs');
var marked = require('marked');
var path = require('path');
const fs = require('fs');
const marked = require('marked');
const path = require('path');
module.exports = toHTML;
function toHTML(input, filename, template, cb) {
var lexed = marked.lexer(input);
fs.readFile(template, 'utf8', function(er, template) {
const lexed = marked.lexer(input);
fs.readFile(template, 'utf8', (er, template) => {
if (er) return cb(er);
render(lexed, filename, template, cb);
});
@ -35,7 +35,7 @@ function toHTML(input, filename, template, cb) {
function render(lexed, filename, template, cb) {
// get the section
var section = getSection(lexed);
const section = getSection(lexed);
filename = path.basename(filename, '.md');
@ -43,7 +43,7 @@ function render(lexed, filename, template, cb) {
// generate the table of contents.
// this mutates the lexed contents in-place.
buildToc(lexed, filename, function(er, toc) {
buildToc(lexed, filename, (er, toc) => {
if (er) return cb(er);
template = template.replace(/__FILENAME__/g, filename);
@ -63,11 +63,11 @@ function render(lexed, filename, template, cb) {
// just update the list item text in-place.
// lists that come right after a heading are what we're after.
function parseLists(input) {
var state = null;
var depth = 0;
var output = [];
let state = null;
let depth = 0;
const output = [];
output.links = input.links;
input.forEach(function(tok) {
input.forEach((tok) => {
if (state === null) {
if (tok.type === 'heading') {
state = 'AFTERHEADING';
@ -79,7 +79,7 @@ function parseLists(input) {
if (tok.type === 'list_start') {
state = 'LIST';
if (depth === 0) {
output.push({ type:'html', text: '<div class="signature">' });
output.push({type: 'html', text: '<div class="signature">'});
}
depth++;
output.push(tok);
@ -99,7 +99,7 @@ function parseLists(input) {
depth--;
if (depth === 0) {
state = null;
output.push({ type:'html', text: '</div>' });
output.push({type: 'html', text: '</div>'});
}
output.push(tok);
return;
@ -117,16 +117,16 @@ function parseLists(input) {
function parseListItem(text) {
text = text.replace(/\{([^\}]+)\}/, '<span class="type">$1</span>');
//XXX maybe put more stuff here?
// XXX maybe put more stuff here?
return text;
}
// section is just the first heading
function getSection(lexed) {
var section = '';
for (var i = 0, l = lexed.length; i < l; i++) {
var tok = lexed[i];
const section = '';
for (let i = 0, l = lexed.length; i < l; i++) {
const tok = lexed[i];
if (tok.type === 'heading') return tok.text;
}
return '';
@ -134,40 +134,39 @@ function getSection(lexed) {
function buildToc(lexed, filename, cb) {
var indent = 0;
var toc = [];
var depth = 0;
lexed.forEach(function(tok) {
const indent = 0;
let toc = [];
let depth = 0;
lexed.forEach((tok) => {
if (tok.type !== 'heading') return;
if (tok.depth - depth > 1) {
return cb(new Error('Inappropriate heading level\n' +
JSON.stringify(tok)));
return cb(new Error(`Inappropriate heading level\n${
JSON.stringify(tok)}`));
}
depth = tok.depth;
var id = getId(filename + '_' + tok.text.trim());
toc.push(new Array((depth - 1) * 2 + 1).join(' ') +
'* <a href="#' + id + '">' +
tok.text + '</a>');
tok.text += '<span><a class="mark" href="#' + id + '" ' +
'id="' + id + '">#</a></span>';
const id = getId(`${filename}_${tok.text.trim()}`);
toc.push(`${new Array((depth - 1) * 2 + 1).join(' ')
}* <a href="#${id}">${
tok.text}</a>`);
tok.text += `<span><a class="mark" href="#${id}" ` +
`id="${id}">#</a></span>`;
});
toc = marked.parse(toc.join('\n'));
cb(null, toc);
}
var idCounters = {};
const idCounters = {};
function getId(text) {
text = text.toLowerCase();
text = text.replace(/[^a-z0-9]+/g, '_');
text = text.replace(/^_+|_+$/, '');
text = text.replace(/^([^a-z])/, '_$1');
if (idCounters.hasOwnProperty(text)) {
text += '_' + (++idCounters[text]);
text += `_${++idCounters[text]}`;
} else {
idCounters[text] = 0;
}
return text;
}

View file

@ -24,24 +24,24 @@ module.exports = doJSON;
// Take the lexed input, and return a JSON-encoded object
// A module looks like this: https://gist.github.com/1777387
var marked = require('marked');
const marked = require('marked');
function doJSON(input, filename, cb) {
var root = {source: filename};
var stack = [root];
var depth = 0;
var current = root;
var state = null;
var lexed = marked.lexer(input);
lexed.forEach(function (tok) {
var type = tok.type;
var text = tok.text;
const root = {source: filename};
const stack = [root];
let depth = 0;
let current = root;
let state = null;
const lexed = marked.lexer(input);
lexed.forEach((tok) => {
const type = tok.type;
let text = tok.text;
// <!-- type = module -->
// This is for cases where the markdown semantic structure is lacking.
if (type === 'paragraph' || type === 'html') {
var metaExpr = /<!--([^=]+)=([^\-]+)-->\n*/g;
text = text.replace(metaExpr, function(_0, k, v) {
const metaExpr = /<!--([^=]+)=([^\-]+)-->\n*/g;
text = text.replace(metaExpr, (_0, k, v) => {
current[k.trim()] = v.trim();
return '';
});
@ -52,8 +52,8 @@ function doJSON(input, filename, cb) {
if (type === 'heading' &&
!text.trim().match(/^example/i)) {
if (tok.depth - depth > 1) {
return cb(new Error('Inappropriate heading level\n'+
JSON.stringify(tok)));
return cb(new Error(`Inappropriate heading level\n${
JSON.stringify(tok)}`));
}
// Sometimes we have two headings with a single
@ -61,7 +61,7 @@ function doJSON(input, filename, cb) {
if (current &&
state === 'AFTERHEADING' &&
depth === tok.depth) {
var clone = current;
const clone = current;
current = newSection(tok);
current.clone = clone;
// don't keep it around on the stack.
@ -75,7 +75,7 @@ function doJSON(input, filename, cb) {
// root is always considered the level=0 section,
// and the lowest heading is 1, so this should always
// result in having a valid parent node.
var d = tok.depth;
let d = tok.depth;
while (d <= depth) {
finishSection(stack.pop(), stack[stack.length - 1]);
d++;
@ -98,7 +98,7 @@ function doJSON(input, filename, cb) {
//
// If one of these isn't found, then anything that comes between
// here and the next heading should be parsed as the desc.
var stability
let stability;
if (state === 'AFTERHEADING') {
if (type === 'code' &&
(stability = text.match(/^Stability: ([0-5])(?:\s*-\s*)?(.*)$/))) {
@ -138,7 +138,6 @@ function doJSON(input, filename, cb) {
current.desc = current.desc || [];
current.desc.push(tok);
});
// finish any sections left open
@ -146,7 +145,7 @@ function doJSON(input, filename, cb) {
finishSection(current, stack[stack.length - 1]);
}
return cb(null, root)
return cb(null, root);
}
@ -193,14 +192,14 @@ function doJSON(input, filename, cb) {
// default: 'false' } ] } ]
function processList(section) {
var list = section.list;
var values = [];
var current;
var stack = [];
const list = section.list;
const values = [];
let current;
const stack = [];
// for now, *just* build the hierarchical list
list.forEach(function(tok) {
var type = tok.type;
list.forEach((tok) => {
const type = tok.type;
if (type === 'space') return;
if (type === 'list_item_start') {
if (!current) {
@ -217,26 +216,26 @@ function processList(section) {
return;
} else if (type === 'list_item_end') {
if (!current) {
throw new Error('invalid list - end without current item\n' +
JSON.stringify(tok) + '\n' +
JSON.stringify(list));
throw new Error(`invalid list - end without current item\n${
JSON.stringify(tok)}\n${
JSON.stringify(list)}`);
}
current = stack.pop();
} else if (type === 'text') {
if (!current) {
throw new Error('invalid list - text without current item\n' +
JSON.stringify(tok) + '\n' +
JSON.stringify(list));
throw new Error(`invalid list - text without current item\n${
JSON.stringify(tok)}\n${
JSON.stringify(list)}`);
}
current.textRaw = current.textRaw || '';
current.textRaw += tok.text + ' ';
current.textRaw += `${tok.text} `;
}
});
// shove the name in there for properties, since they are always
// just going to be the value etc.
if (section.type === 'property' && values[0]) {
values[0].textRaw = '`' + section.name + '` ' + values[0].textRaw;
values[0].textRaw = `\`${section.name}\` ${values[0].textRaw}`;
}
// now pull the actual values out of the text bits.
@ -252,9 +251,9 @@ function processList(section) {
// each item is an argument, unless the name is 'return',
// in which case it's the return value.
section.signatures = section.signatures || [];
var sig = {}
var sig = {};
section.signatures.push(sig);
sig.params = values.filter(function(v) {
sig.params = values.filter((v) => {
if (v.name === 'return') {
sig.return = v;
return false;
@ -271,7 +270,7 @@ function processList(section) {
delete value.name;
section.typeof = value.type;
delete value.type;
Object.keys(value).forEach(function(k) {
Object.keys(value).forEach((k) => {
section[k] = value[k];
});
break;
@ -289,36 +288,36 @@ function processList(section) {
// textRaw = "someobject.someMethod(a, [b=100], [c])"
function parseSignature(text, sig) {
var params = text.match(paramExpr);
let params = text.match(paramExpr);
if (!params) return;
params = params[1];
// the ] is irrelevant. [ indicates optionalness.
params = params.replace(/\]/g, '');
params = params.split(/,/)
params.forEach(function(p, i, _) {
params = params.split(/,/);
params.forEach((p, i, _) => {
p = p.trim();
if (!p) return;
var param = sig.params[i];
var optional = false;
var def;
let param = sig.params[i];
let optional = false;
let def;
// [foo] -> optional
if (p.charAt(0) === '[') {
optional = true;
p = p.substr(1);
}
var eq = p.indexOf('=');
const eq = p.indexOf('=');
if (eq !== -1) {
def = p.substr(eq + 1);
p = p.substr(0, eq);
}
if (!param) {
param = sig.params[i] = { name: p };
param = sig.params[i] = {name: p};
}
// at this point, the name should match.
if (p !== param.name) {
console.error('Warning: invalid param "%s"', p);
console.error(' > ' + JSON.stringify(param));
console.error(' > ' + text);
console.error(` > ${JSON.stringify(param)}`);
console.error(` > ${text}`);
}
if (optional) param.optional = true;
if (def !== undefined) param.default = def;
@ -332,18 +331,18 @@ function parseListItem(item) {
// the goal here is to find the name, type, default, and optional.
// anything left over is 'desc'
var text = item.textRaw.trim();
let text = item.textRaw.trim();
// text = text.replace(/^(Argument|Param)s?\s*:?\s*/i, '');
text = text.replace(/^, /, '').trim();
var retExpr = /^returns?\s*:?\s*/i;
var ret = text.match(retExpr);
const retExpr = /^returns?\s*:?\s*/i;
const ret = text.match(retExpr);
if (ret) {
item.name = 'return';
text = text.replace(retExpr, '');
} else {
var nameExpr = /^['`"]?([^'`": \{]+)['`"]?\s*:?\s*/;
var name = text.match(nameExpr);
const nameExpr = /^['`"]?([^'`": \{]+)['`"]?\s*:?\s*/;
const name = text.match(nameExpr);
if (name) {
item.name = name[1];
text = text.replace(nameExpr, '');
@ -351,24 +350,24 @@ function parseListItem(item) {
}
text = text.trim();
var defaultExpr = /\(default\s*[:=]?\s*['"`]?([^, '"`]*)['"`]?\)/i;
var def = text.match(defaultExpr);
const defaultExpr = /\(default\s*[:=]?\s*['"`]?([^, '"`]*)['"`]?\)/i;
const def = text.match(defaultExpr);
if (def) {
item.default = def[1];
text = text.replace(defaultExpr, '');
}
text = text.trim();
var typeExpr = /^\{([^\}]+)\}/;
var type = text.match(typeExpr);
const typeExpr = /^\{([^\}]+)\}/;
const type = text.match(typeExpr);
if (type) {
item.type = type[1];
text = text.replace(typeExpr, '');
}
text = text.trim();
var optExpr = /^Optional\.|(?:, )?Optional$/;
var optional = text.match(optExpr);
const optExpr = /^Optional\.|(?:, )?Optional$/;
const optional = text.match(optExpr);
if (optional) {
item.optional = true;
text = text.replace(optExpr, '');
@ -382,9 +381,9 @@ function parseListItem(item) {
function finishSection(section, parent) {
if (!section || !parent) {
throw new Error('Invalid finishSection call\n'+
JSON.stringify(section) + '\n' +
JSON.stringify(parent));
throw new Error(`Invalid finishSection call\n${
JSON.stringify(section)}\n${
JSON.stringify(parent)}`);
}
if (!section.type) {
@ -394,7 +393,7 @@ function finishSection(section, parent) {
}
section.displayName = section.name;
section.name = section.name.toLowerCase()
.trim().replace(/\s+/g, '_');
.trim().replace(/\s+/g, '_');
}
if (section.desc && Array.isArray(section.desc)) {
@ -411,10 +410,10 @@ function finishSection(section, parent) {
// Merge them into the parent.
if (section.type === 'class' && section.ctors) {
section.signatures = section.signatures || [];
var sigs = section.signatures;
section.ctors.forEach(function(ctor) {
const sigs = section.signatures;
section.ctors.forEach((ctor) => {
ctor.signatures = ctor.signatures || [{}];
ctor.signatures.forEach(function(sig) {
ctor.signatures.forEach((sig) => {
sig.desc = ctor.desc;
});
sigs.push.apply(sigs, ctor.signatures);
@ -425,7 +424,7 @@ function finishSection(section, parent) {
// properties are a bit special.
// their "type" is the type of object, not "property"
if (section.properties) {
section.properties.forEach(function (p) {
section.properties.forEach((p) => {
if (p.typeof) p.type = p.typeof;
else delete p.type;
delete p.typeof;
@ -434,27 +433,27 @@ function finishSection(section, parent) {
// handle clones
if (section.clone) {
var clone = section.clone;
const clone = section.clone;
delete section.clone;
delete clone.clone;
deepCopy(section, clone);
finishSection(clone, parent);
}
var plur;
let plur;
if (section.type.slice(-1) === 's') {
plur = section.type + 'es';
plur = `${section.type}es`;
} else if (section.type.slice(-1) === 'y') {
plur = section.type.replace(/y$/, 'ies');
} else {
plur = section.type + 's';
plur = `${section.type}s`;
}
// if the parent's type is 'misc', then it's just a random
// collection of stuff, like the "globals" section.
// Make the children top-level items.
if (section.type === 'misc') {
Object.keys(section).forEach(function(k) {
Object.keys(section).forEach((k) => {
switch (k) {
case 'textRaw':
case 'name':
@ -486,9 +485,7 @@ function finishSection(section, parent) {
// Not a general purpose deep copy.
// But sufficient for these basic things.
function deepCopy(src, dest) {
Object.keys(src).filter(function(k) {
return !dest.hasOwnProperty(k);
}).forEach(function(k) {
Object.keys(src).filter((k) => !dest.hasOwnProperty(k)).forEach((k) => {
dest[k] = deepCopy_(src[k]);
});
}
@ -497,14 +494,14 @@ function deepCopy_(src) {
if (!src) return src;
if (Array.isArray(src)) {
var c = new Array(src.length);
src.forEach(function(v, i) {
src.forEach((v, i) => {
c[i] = deepCopy_(v);
});
return c;
}
if (typeof src === 'object') {
var c = {};
Object.keys(src).forEach(function(k) {
Object.keys(src).forEach((k) => {
c[k] = deepCopy_(src[k]);
});
return c;
@ -514,21 +511,21 @@ function deepCopy_(src) {
// these parse out the contents of an H# tag
var eventExpr = /^Event(?::|\s)+['"]?([^"']+).*$/i;
var classExpr = /^Class:\s*([^ ]+).*?$/i;
var propExpr = /^(?:property:?\s*)?[^\.]+\.([^ \.\(\)]+)\s*?$/i;
var braceExpr = /^(?:property:?\s*)?[^\.\[]+(\[[^\]]+\])\s*?$/i;
var classMethExpr =
const eventExpr = /^Event(?::|\s)+['"]?([^"']+).*$/i;
const classExpr = /^Class:\s*([^ ]+).*?$/i;
const propExpr = /^(?:property:?\s*)?[^\.]+\.([^ \.\(\)]+)\s*?$/i;
const braceExpr = /^(?:property:?\s*)?[^\.\[]+(\[[^\]]+\])\s*?$/i;
const classMethExpr =
/^class\s*method\s*:?[^\.]+\.([^ \.\(\)]+)\([^\)]*\)\s*?$/i;
var methExpr =
const methExpr =
/^(?:method:?\s*)?(?:[^\.]+\.)?([^ \.\(\)]+)\([^\)]*\)\s*?$/i;
var newExpr = /^new ([A-Z][a-z]+)\([^\)]*\)\s*?$/;
const newExpr = /^new ([A-Z][a-z]+)\([^\)]*\)\s*?$/;
var paramExpr = /\((.*)\);?$/;
function newSection(tok) {
var section = {};
const section = {};
// infer the type from the text.
var text = section.textRaw = tok.text;
const text = section.textRaw = tok.text;
if (text.match(eventExpr)) {
section.type = 'event';
section.name = text.replace(eventExpr, '$1');

View file

@ -5,60 +5,60 @@
*/
if (process.argv.length != 3) {
console.error("Use: node extractPadData.js $PADID");
console.error('Use: node extractPadData.js $PADID');
process.exit(1);
}
// get the padID
let padId = process.argv[2];
const padId = process.argv[2];
let npm = require('../src/node_modules/npm');
const npm = require('../src/node_modules/npm');
npm.load({}, async function(er) {
npm.load({}, async (er) => {
if (er) {
console.error("Could not load NPM: " + er)
console.error(`Could not load NPM: ${er}`);
process.exit(1);
}
try {
// initialize database
let settings = require('../src/node/utils/Settings');
let db = require('../src/node/db/DB');
const settings = require('../src/node/utils/Settings');
const db = require('../src/node/db/DB');
await db.init();
// load extra modules
let dirtyDB = require('../src/node_modules/dirty');
let padManager = require('../src/node/db/PadManager');
let util = require('util');
const dirtyDB = require('../src/node_modules/dirty');
const padManager = require('../src/node/db/PadManager');
const util = require('util');
// initialize output database
let dirty = dirtyDB(padId + '.db');
const dirty = dirtyDB(`${padId}.db`);
// Promise wrapped get and set function
let wrapped = db.db.db.wrappedDB;
let get = util.promisify(wrapped.get.bind(wrapped));
let set = util.promisify(dirty.set.bind(dirty));
const wrapped = db.db.db.wrappedDB;
const get = util.promisify(wrapped.get.bind(wrapped));
const set = util.promisify(dirty.set.bind(dirty));
// array in which required key values will be accumulated
let neededDBValues = ['pad:' + padId];
const neededDBValues = [`pad:${padId}`];
// get the actual pad object
let pad = await padManager.getPad(padId);
const pad = await padManager.getPad(padId);
// add all authors
neededDBValues.push(...pad.getAllAuthors().map(author => 'globalAuthor:' + author));
neededDBValues.push(...pad.getAllAuthors().map((author) => `globalAuthor:${author}`));
// add all revisions
for (let rev = 0; rev <= pad.head; ++rev) {
neededDBValues.push('pad:' + padId + ':revs:' + rev);
neededDBValues.push(`pad:${padId}:revs:${rev}`);
}
// add all chat values
for (let chat = 0; chat <= pad.chatHead; ++chat) {
neededDBValues.push('pad:' + padId + ':chat:' + chat);
neededDBValues.push(`pad:${padId}:chat:${chat}`);
}
for (let dbkey of neededDBValues) {
for (const dbkey of neededDBValues) {
let dbvalue = await get(dbkey);
if (dbvalue && typeof dbvalue !== 'object') {
dbvalue = JSON.parse(dbvalue);

View file

@ -1,69 +1,64 @@
var startTime = Date.now();
const startTime = Date.now();
require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
require('ep_etherpad-lite/node_modules/npm').load({}, (er, npm) => {
const fs = require('fs');
var fs = require("fs");
const ueberDB = require('ep_etherpad-lite/node_modules/ueberdb2');
const settings = require('ep_etherpad-lite/node/utils/Settings');
const log4js = require('ep_etherpad-lite/node_modules/log4js');
var ueberDB = require("ep_etherpad-lite/node_modules/ueberdb2");
var settings = require("ep_etherpad-lite/node/utils/Settings");
var log4js = require('ep_etherpad-lite/node_modules/log4js');
var dbWrapperSettings = {
const dbWrapperSettings = {
cache: 0,
writeInterval: 100,
json: false // data is already json encoded
json: false, // data is already json encoded
};
var db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger("ueberDB"));
const db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger('ueberDB'));
var sqlFile = process.argv[2];
const sqlFile = process.argv[2];
//stop if the settings file is not set
if(!sqlFile)
{
console.error("Use: node importSqlFile.js $SQLFILE");
// stop if the settings file is not set
if (!sqlFile) {
console.error('Use: node importSqlFile.js $SQLFILE');
process.exit(1);
}
log("initializing db");
db.init(function(err) {
//there was an error while initializing the database, output it and stop
if(err)
{
console.error("ERROR: Problem while initializing the database");
log('initializing db');
db.init((err) => {
// there was an error while initializing the database, output it and stop
if (err) {
console.error('ERROR: Problem while initializing the database');
console.error(err.stack ? err.stack : err);
process.exit(1);
}
else
{
log("done");
} else {
log('done');
log("open output file...");
var lines = fs.readFileSync(sqlFile, 'utf8').split("\n");
log('open output file...');
const lines = fs.readFileSync(sqlFile, 'utf8').split('\n');
var count = lines.length;
var keyNo = 0;
const count = lines.length;
let keyNo = 0;
process.stdout.write("Start importing " + count + " keys...\n");
lines.forEach(function(l) {
if (l.substr(0, 27) == "REPLACE INTO store VALUES (") {
var pos = l.indexOf("', '");
var key = l.substr(28, pos - 28);
var value = l.substr(pos + 3);
process.stdout.write(`Start importing ${count} keys...\n`);
lines.forEach((l) => {
if (l.substr(0, 27) == 'REPLACE INTO store VALUES (') {
const pos = l.indexOf("', '");
const key = l.substr(28, pos - 28);
let value = l.substr(pos + 3);
value = value.substr(0, value.length - 2);
console.log("key: " + key + " val: " + value);
console.log("unval: " + unescape(value));
console.log(`key: ${key} val: ${value}`);
console.log(`unval: ${unescape(value)}`);
db.set(key, unescape(value), null);
keyNo++;
if (keyNo % 1000 == 0) {
process.stdout.write(" " + keyNo + "/" + count + "\n");
process.stdout.write(` ${keyNo}/${count}\n`);
}
}
});
process.stdout.write("\n");
process.stdout.write("done. waiting for db to finish transaction. depended on dbms this may take some time...\n");
process.stdout.write('\n');
process.stdout.write('done. waiting for db to finish transaction. depended on dbms this may take some time...\n');
db.doShutdown(function() {
log("finished, imported " + keyNo + " keys.");
db.doShutdown(() => {
log(`finished, imported ${keyNo} keys.`);
process.exit(0);
});
}
@ -71,22 +66,22 @@ require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
});
function log(str) {
console.log((Date.now() - startTime)/1000 + "\t" + str);
console.log(`${(Date.now() - startTime) / 1000}\t${str}`);
}
unescape = function(val) {
unescape = function (val) {
// value is a string
if (val.substr(0, 1) == "'") {
val = val.substr(0, val.length - 1).substr(1);
return val.replace(/\\[0nrbtZ\\'"]/g, function(s) {
switch(s) {
case "\\0": return "\0";
case "\\n": return "\n";
case "\\r": return "\r";
case "\\b": return "\b";
case "\\t": return "\t";
case "\\Z": return "\x1a";
return val.replace(/\\[0nrbtZ\\'"]/g, (s) => {
switch (s) {
case '\\0': return '\0';
case '\\n': return '\n';
case '\\r': return '\r';
case '\\b': return '\b';
case '\\t': return '\t';
case '\\Z': return '\x1a';
default: return s.substr(1);
}
});

View file

@ -1,6 +1,5 @@
require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
process.chdir(npm.root+'/..')
require('ep_etherpad-lite/node_modules/npm').load({}, (er, npm) => {
process.chdir(`${npm.root}/..`);
// This script requires that you have modified your settings.json file
// to work with a real database. Please make a backup of your dirty.db
@ -10,40 +9,40 @@ require("ep_etherpad-lite/node_modules/npm").load({}, function(er,npm) {
// `node --max-old-space-size=4096 bin/migrateDirtyDBtoRealDB.js`
var settings = require("ep_etherpad-lite/node/utils/Settings");
var dirty = require("../src/node_modules/dirty");
var ueberDB = require("../src/node_modules/ueberdb2");
var log4js = require("../src/node_modules/log4js");
var dbWrapperSettings = {
"cache": "0", // The cache slows things down when you're mostly writing.
"writeInterval": 0 // Write directly to the database, don't buffer
const settings = require('ep_etherpad-lite/node/utils/Settings');
let dirty = require('../src/node_modules/dirty');
const ueberDB = require('../src/node_modules/ueberdb2');
const log4js = require('../src/node_modules/log4js');
const dbWrapperSettings = {
cache: '0', // The cache slows things down when you're mostly writing.
writeInterval: 0, // Write directly to the database, don't buffer
};
var db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger("ueberDB"));
var i = 0;
var length = 0;
const db = new ueberDB.database(settings.dbType, settings.dbSettings, dbWrapperSettings, log4js.getLogger('ueberDB'));
let i = 0;
let length = 0;
db.init(function() {
console.log("Waiting for dirtyDB to parse its file.");
dirty = dirty('var/dirty.db').on("load", function() {
dirty.forEach(function(){
db.init(() => {
console.log('Waiting for dirtyDB to parse its file.');
dirty = dirty('var/dirty.db').on('load', () => {
dirty.forEach(() => {
length++;
});
console.log(`Found ${length} records, processing now.`);
dirty.forEach(async function(key, value) {
let error = await db.set(key, value);
dirty.forEach(async (key, value) => {
const error = await db.set(key, value);
console.log(`Wrote record ${i}`);
i++;
if (i === length) {
console.log("finished, just clearing up for a bit...");
setTimeout(function() {
console.log('finished, just clearing up for a bit...');
setTimeout(() => {
process.exit(0);
}, 5000);
}
});
console.log("Please wait for all records to flush to database, then kill this process.");
console.log('Please wait for all records to flush to database, then kill this process.');
});
console.log("done?")
console.log('done?');
});
});

View file

@ -9,96 +9,96 @@ node bin/plugins/checkPlugins.js ep_whatever autofix autocommit
*/
const fs = require("fs");
const { exec } = require("child_process");
const fs = require('fs');
const {exec} = require('child_process');
// get plugin name & path from user input
const pluginName = process.argv[2];
if(!pluginName){
console.error("no plugin name specified");
if (!pluginName) {
console.error('no plugin name specified');
process.exit(1);
}
const pluginPath = "node_modules/"+pluginName;
const pluginPath = `node_modules/${pluginName}`;
console.log("Checking the plugin: "+ pluginName)
console.log(`Checking the plugin: ${pluginName}`);
// Should we autofix?
if (process.argv[3] && process.argv[3] === "autofix") var autoFix = true;
if (process.argv[3] && process.argv[3] === 'autofix') var autoFix = true;
// Should we update files where possible?
if (process.argv[5] && process.argv[5] === "autoupdate") var autoUpdate = true;
if (process.argv[5] && process.argv[5] === 'autoupdate') var autoUpdate = true;
// Should we automcommit and npm publish?!
if (process.argv[4] && process.argv[4] === "autocommit") var autoCommit = true;
if (process.argv[4] && process.argv[4] === 'autocommit') var autoCommit = true;
if(autoCommit){
console.warn("Auto commit is enabled, I hope you know what you are doing...")
if (autoCommit) {
console.warn('Auto commit is enabled, I hope you know what you are doing...');
}
fs.readdir(pluginPath, function (err, rootFiles) {
//handling error
fs.readdir(pluginPath, (err, rootFiles) => {
// handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
return console.log(`Unable to scan directory: ${err}`);
}
// rewriting files to lower case
var files = [];
const files = [];
// some files we need to know the actual file name. Not compulsory but might help in the future.
var readMeFileName;
var repository;
var hasAutoFixed = false;
let readMeFileName;
let repository;
let hasAutoFixed = false;
for (var i = 0; i < rootFiles.length; i++) {
if(rootFiles[i].toLowerCase().indexOf("readme") !== -1) readMeFileName = rootFiles[i];
for (let i = 0; i < rootFiles.length; i++) {
if (rootFiles[i].toLowerCase().indexOf('readme') !== -1) readMeFileName = rootFiles[i];
files.push(rootFiles[i].toLowerCase());
}
if(files.indexOf(".git") === -1){
console.error("No .git folder, aborting");
if (files.indexOf('.git') === -1) {
console.error('No .git folder, aborting');
process.exit(1);
}
// do a git pull...
var child_process = require('child_process');
try{
child_process.execSync('git pull ',{"cwd":pluginPath+"/"});
}catch(e){
console.error("Error git pull", e);
};
try {
child_process.execSync('git pull ', {cwd: `${pluginPath}/`});
} catch (e) {
console.error('Error git pull', e);
}
try {
const path = pluginPath + '/.github/workflows/npmpublish.yml';
const path = `${pluginPath}/.github/workflows/npmpublish.yml`;
if (!fs.existsSync(path)) {
console.log('no .github/workflows/npmpublish.yml, create one and set npm secret to auto publish to npm on commit');
if (autoFix) {
const npmpublish =
fs.readFileSync('bin/plugins/lib/npmpublish.yml', {encoding: 'utf8', flag: 'r'});
fs.mkdirSync(pluginPath + '/.github/workflows', {recursive: true});
fs.mkdirSync(`${pluginPath}/.github/workflows`, {recursive: true});
fs.writeFileSync(path, npmpublish);
hasAutoFixed = true;
console.log("If you haven't already, setup autopublish for this plugin https://github.com/ether/etherpad-lite/wiki/Plugins:-Automatically-publishing-to-npm-on-commit-to-Github-Repo");
} else {
console.log('Setup autopublish for this plugin https://github.com/ether/etherpad-lite/wiki/Plugins:-Automatically-publishing-to-npm-on-commit-to-Github-Repo');
}
}else{
} else {
// autopublish exists, we should check the version..
// checkVersion takes two file paths and checks for a version string in them.
const currVersionFile = fs.readFileSync(path, {encoding: 'utf8', flag: 'r'});
const existingConfigLocation = currVersionFile.indexOf("##ETHERPAD_NPM_V=");
const existingValue = parseInt(currVersionFile.substr(existingConfigLocation+17, existingConfigLocation.length));
const existingConfigLocation = currVersionFile.indexOf('##ETHERPAD_NPM_V=');
const existingValue = parseInt(currVersionFile.substr(existingConfigLocation + 17, existingConfigLocation.length));
const reqVersionFile = fs.readFileSync('bin/plugins/lib/npmpublish.yml', {encoding: 'utf8', flag: 'r'});
const reqConfigLocation = reqVersionFile.indexOf("##ETHERPAD_NPM_V=");
const reqValue = parseInt(reqVersionFile.substr(reqConfigLocation+17, reqConfigLocation.length));
const reqConfigLocation = reqVersionFile.indexOf('##ETHERPAD_NPM_V=');
const reqValue = parseInt(reqVersionFile.substr(reqConfigLocation + 17, reqConfigLocation.length));
if(!existingValue || (reqValue > existingValue)){
if (!existingValue || (reqValue > existingValue)) {
const npmpublish =
fs.readFileSync('bin/plugins/lib/npmpublish.yml', {encoding: 'utf8', flag: 'r'});
fs.mkdirSync(pluginPath + '/.github/workflows', {recursive: true});
fs.mkdirSync(`${pluginPath}/.github/workflows`, {recursive: true});
fs.writeFileSync(path, npmpublish);
hasAutoFixed = true;
}
@ -107,152 +107,151 @@ fs.readdir(pluginPath, function (err, rootFiles) {
console.error(err);
}
if(files.indexOf("package.json") === -1){
console.warn("no package.json, please create");
if (files.indexOf('package.json') === -1) {
console.warn('no package.json, please create');
}
if(files.indexOf("package.json") !== -1){
let packageJSON = fs.readFileSync(pluginPath+"/package.json", {encoding:'utf8', flag:'r'});
let parsedPackageJSON = JSON.parse(packageJSON);
if(autoFix){
var updatedPackageJSON = false;
if(!parsedPackageJSON.funding){
if (files.indexOf('package.json') !== -1) {
const packageJSON = fs.readFileSync(`${pluginPath}/package.json`, {encoding: 'utf8', flag: 'r'});
const parsedPackageJSON = JSON.parse(packageJSON);
if (autoFix) {
let updatedPackageJSON = false;
if (!parsedPackageJSON.funding) {
updatedPackageJSON = true;
parsedPackageJSON.funding = {
"type": "individual",
"url": "http://etherpad.org/"
}
type: 'individual',
url: 'http://etherpad.org/',
};
}
if(updatedPackageJSON){
if (updatedPackageJSON) {
hasAutoFixed = true;
fs.writeFileSync(pluginPath+"/package.json", JSON.stringify(parsedPackageJSON, null, 2));
fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
}
}
if(packageJSON.toLowerCase().indexOf("repository") === -1){
console.warn("No repository in package.json");
if(autoFix){
console.warn("Repository not detected in package.json. Please add repository section manually.")
if (packageJSON.toLowerCase().indexOf('repository') === -1) {
console.warn('No repository in package.json');
if (autoFix) {
console.warn('Repository not detected in package.json. Please add repository section manually.');
}
}else{
} else {
// useful for creating README later.
repository = parsedPackageJSON.repository.url;
}
// include lint config
if(packageJSON.toLowerCase().indexOf("devdependencies") === -1 || !parsedPackageJSON.devDependencies.eslint){
console.warn("Missing eslint reference in devDependencies");
if(autoFix){
let devDependencies = {
"eslint": "^7.14.0",
"eslint-config-etherpad": "^1.0.10",
"eslint-plugin-mocha": "^8.0.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prefer-arrow": "^1.2.2",
"eslint-plugin-promise": "^4.2.1"
}
if (packageJSON.toLowerCase().indexOf('devdependencies') === -1 || !parsedPackageJSON.devDependencies.eslint) {
console.warn('Missing eslint reference in devDependencies');
if (autoFix) {
const devDependencies = {
'eslint': '^7.14.0',
'eslint-config-etherpad': '^1.0.10',
'eslint-plugin-mocha': '^8.0.0',
'eslint-plugin-node': '^11.1.0',
'eslint-plugin-prefer-arrow': '^1.2.2',
'eslint-plugin-promise': '^4.2.1',
};
hasAutoFixed = true;
parsedPackageJSON.devDependencies = devDependencies;
fs.writeFileSync(pluginPath+"/package.json", JSON.stringify(parsedPackageJSON, null, 2));
fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
let child_process = require('child_process');
try{
child_process.execSync('npm install',{"cwd":pluginPath+"/"});
const child_process = require('child_process');
try {
child_process.execSync('npm install', {cwd: `${pluginPath}/`});
hasAutoFixed = true;
}catch(e){
console.error("Failed to create package-lock.json");
} catch (e) {
console.error('Failed to create package-lock.json');
}
}
}
if(packageJSON.toLowerCase().indexOf("eslintconfig") === -1){
console.warn("No esLintConfig in package.json");
if(autoFix){
let eslintConfig = {
"root": true,
"extends": "etherpad/plugin"
}
if (packageJSON.toLowerCase().indexOf('eslintconfig') === -1) {
console.warn('No esLintConfig in package.json');
if (autoFix) {
const eslintConfig = {
root: true,
extends: 'etherpad/plugin',
};
hasAutoFixed = true;
parsedPackageJSON.eslintConfig = eslintConfig;
fs.writeFileSync(pluginPath+"/package.json", JSON.stringify(parsedPackageJSON, null, 2));
fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
}
}
if(packageJSON.toLowerCase().indexOf("scripts") === -1){
console.warn("No scripts in package.json");
if(autoFix){
let scripts = {
"lint": "eslint .",
"lint:fix": "eslint --fix ."
}
if (packageJSON.toLowerCase().indexOf('scripts') === -1) {
console.warn('No scripts in package.json');
if (autoFix) {
const scripts = {
'lint': 'eslint .',
'lint:fix': 'eslint --fix .',
};
hasAutoFixed = true;
parsedPackageJSON.scripts = scripts;
fs.writeFileSync(pluginPath+"/package.json", JSON.stringify(parsedPackageJSON, null, 2));
fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
}
}
if(packageJSON.toLowerCase().indexOf("engines") === -1){
console.warn("No engines in package.json");
if(autoFix){
let engines = {
"lint": "eslint ."
}
if (packageJSON.toLowerCase().indexOf('engines') === -1) {
console.warn('No engines in package.json');
if (autoFix) {
const engines = {
lint: 'eslint .',
};
hasAutoFixed = true;
parsedPackageJSON.engines = engines;
fs.writeFileSync(pluginPath+"/package.json", JSON.stringify(parsedPackageJSON, null, 2));
fs.writeFileSync(`${pluginPath}/package.json`, JSON.stringify(parsedPackageJSON, null, 2));
}
}
}
if(files.indexOf("package-lock.json") === -1){
console.warn("package-lock.json file not found. Please run npm install in the plugin folder and commit the package-lock.json file.")
if(autoFix){
if (files.indexOf('package-lock.json') === -1) {
console.warn('package-lock.json file not found. Please run npm install in the plugin folder and commit the package-lock.json file.');
if (autoFix) {
var child_process = require('child_process');
try{
child_process.execSync('npm install',{"cwd":pluginPath+"/"});
console.log("Making package-lock.json");
try {
child_process.execSync('npm install', {cwd: `${pluginPath}/`});
console.log('Making package-lock.json');
hasAutoFixed = true;
}catch(e){
console.error("Failed to create package-lock.json");
} catch (e) {
console.error('Failed to create package-lock.json');
}
}
}
if(files.indexOf("readme") === -1 && files.indexOf("readme.md") === -1){
console.warn("README.md file not found, please create");
if(autoFix){
console.log("Autofixing missing README.md file, please edit the README.md file further to include plugin specific details.");
let readme = fs.readFileSync("bin/plugins/lib/README.md", {encoding:'utf8', flag:'r'})
if (files.indexOf('readme') === -1 && files.indexOf('readme.md') === -1) {
console.warn('README.md file not found, please create');
if (autoFix) {
console.log('Autofixing missing README.md file, please edit the README.md file further to include plugin specific details.');
let readme = fs.readFileSync('bin/plugins/lib/README.md', {encoding: 'utf8', flag: 'r'});
readme = readme.replace(/\[plugin_name\]/g, pluginName);
if(repository){
let org = repository.split("/")[3];
let name = repository.split("/")[4];
if (repository) {
const org = repository.split('/')[3];
const name = repository.split('/')[4];
readme = readme.replace(/\[org_name\]/g, org);
readme = readme.replace(/\[repo_url\]/g, name);
fs.writeFileSync(pluginPath+"/README.md", readme);
}else{
console.warn("Unable to find repository in package.json, aborting.")
fs.writeFileSync(`${pluginPath}/README.md`, readme);
} else {
console.warn('Unable to find repository in package.json, aborting.');
}
}
}
if(files.indexOf("readme") !== -1 && files.indexOf("readme.md") !== -1){
let readme = fs.readFileSync(pluginPath+"/"+readMeFileName, {encoding:'utf8', flag:'r'});
if(readme.toLowerCase().indexOf("license") === -1){
console.warn("No license section in README");
if(autoFix){
console.warn("Please add License section to README manually.")
if (files.indexOf('readme') !== -1 && files.indexOf('readme.md') !== -1) {
const readme = fs.readFileSync(`${pluginPath}/${readMeFileName}`, {encoding: 'utf8', flag: 'r'});
if (readme.toLowerCase().indexOf('license') === -1) {
console.warn('No license section in README');
if (autoFix) {
console.warn('Please add License section to README manually.');
}
}
}
if(files.indexOf("license") === -1 && files.indexOf("license.md") === -1){
console.warn("LICENSE.md file not found, please create");
if(autoFix){
if (files.indexOf('license') === -1 && files.indexOf('license.md') === -1) {
console.warn('LICENSE.md file not found, please create');
if (autoFix) {
hasAutoFixed = true;
console.log("Autofixing missing LICENSE.md file, including Apache 2 license.");
exec("git config user.name", (error, name, stderr) => {
console.log('Autofixing missing LICENSE.md file, including Apache 2 license.');
exec('git config user.name', (error, name, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return;
@ -261,120 +260,118 @@ fs.readdir(pluginPath, function (err, rootFiles) {
console.log(`stderr: ${stderr}`);
return;
}
let license = fs.readFileSync("bin/plugins/lib/LICENSE.md", {encoding:'utf8', flag:'r'});
license = license.replace("[yyyy]", new Date().getFullYear());
license = license.replace("[name of copyright owner]", name)
fs.writeFileSync(pluginPath+"/LICENSE.md", license);
let license = fs.readFileSync('bin/plugins/lib/LICENSE.md', {encoding: 'utf8', flag: 'r'});
license = license.replace('[yyyy]', new Date().getFullYear());
license = license.replace('[name of copyright owner]', name);
fs.writeFileSync(`${pluginPath}/LICENSE.md`, license);
});
}
}
var travisConfig = fs.readFileSync("bin/plugins/lib/travis.yml", {encoding:'utf8', flag:'r'});
let travisConfig = fs.readFileSync('bin/plugins/lib/travis.yml', {encoding: 'utf8', flag: 'r'});
travisConfig = travisConfig.replace(/\[plugin_name\]/g, pluginName);
if(files.indexOf(".travis.yml") === -1){
console.warn(".travis.yml file not found, please create. .travis.yml is used for automatically CI testing Etherpad. It is useful to know if your plugin breaks another feature for example.")
if (files.indexOf('.travis.yml') === -1) {
console.warn('.travis.yml file not found, please create. .travis.yml is used for automatically CI testing Etherpad. It is useful to know if your plugin breaks another feature for example.');
// TODO: Make it check version of the .travis file to see if it needs an update.
if(autoFix){
if (autoFix) {
hasAutoFixed = true;
console.log("Autofixing missing .travis.yml file");
fs.writeFileSync(pluginPath+"/.travis.yml", travisConfig);
console.log("Travis file created, please sign into travis and enable this repository")
console.log('Autofixing missing .travis.yml file');
fs.writeFileSync(`${pluginPath}/.travis.yml`, travisConfig);
console.log('Travis file created, please sign into travis and enable this repository');
}
}
if(autoFix && autoUpdate){
if (autoFix && autoUpdate) {
// checks the file versioning of .travis and updates it to the latest.
let existingConfig = fs.readFileSync(pluginPath + "/.travis.yml", {encoding:'utf8', flag:'r'});
let existingConfigLocation = existingConfig.indexOf("##ETHERPAD_TRAVIS_V=");
let existingValue = parseInt(existingConfig.substr(existingConfigLocation+20, existingConfig.length));
const existingConfig = fs.readFileSync(`${pluginPath}/.travis.yml`, {encoding: 'utf8', flag: 'r'});
const existingConfigLocation = existingConfig.indexOf('##ETHERPAD_TRAVIS_V=');
const existingValue = parseInt(existingConfig.substr(existingConfigLocation + 20, existingConfig.length));
let newConfigLocation = travisConfig.indexOf("##ETHERPAD_TRAVIS_V=");
let newValue = parseInt(travisConfig.substr(newConfigLocation+20, travisConfig.length));
if(existingConfigLocation === -1){
console.warn("no previous .travis.yml version found so writing new.")
const newConfigLocation = travisConfig.indexOf('##ETHERPAD_TRAVIS_V=');
const newValue = parseInt(travisConfig.substr(newConfigLocation + 20, travisConfig.length));
if (existingConfigLocation === -1) {
console.warn('no previous .travis.yml version found so writing new.');
// we will write the newTravisConfig to the location.
fs.writeFileSync(pluginPath + "/.travis.yml", travisConfig);
}else{
if(newValue > existingValue){
console.log("updating .travis.yml");
fs.writeFileSync(pluginPath + "/.travis.yml", travisConfig);
hasAutoFixed = true;
}
fs.writeFileSync(`${pluginPath}/.travis.yml`, travisConfig);
} else if (newValue > existingValue) {
console.log('updating .travis.yml');
fs.writeFileSync(`${pluginPath}/.travis.yml`, travisConfig);
hasAutoFixed = true;
}
}
if(files.indexOf(".gitignore") === -1){
console.warn(".gitignore file not found, please create. .gitignore files are useful to ensure files aren't incorrectly commited to a repository.")
if(autoFix){
if (files.indexOf('.gitignore') === -1) {
console.warn(".gitignore file not found, please create. .gitignore files are useful to ensure files aren't incorrectly commited to a repository.");
if (autoFix) {
hasAutoFixed = true;
console.log("Autofixing missing .gitignore file");
let gitignore = fs.readFileSync("bin/plugins/lib/gitignore", {encoding:'utf8', flag:'r'});
fs.writeFileSync(pluginPath+"/.gitignore", gitignore);
console.log('Autofixing missing .gitignore file');
const gitignore = fs.readFileSync('bin/plugins/lib/gitignore', {encoding: 'utf8', flag: 'r'});
fs.writeFileSync(`${pluginPath}/.gitignore`, gitignore);
}
}
// if we include templates but don't have translations...
if(files.indexOf("templates") !== -1 && files.indexOf("locales") === -1){
console.warn("Translations not found, please create. Translation files help with Etherpad accessibility.");
if (files.indexOf('templates') !== -1 && files.indexOf('locales') === -1) {
console.warn('Translations not found, please create. Translation files help with Etherpad accessibility.');
}
if(files.indexOf(".ep_initialized") !== -1){
console.warn(".ep_initialized found, please remove. .ep_initialized should never be commited to git and should only exist once the plugin has been executed one time.")
if(autoFix){
if (files.indexOf('.ep_initialized') !== -1) {
console.warn('.ep_initialized found, please remove. .ep_initialized should never be commited to git and should only exist once the plugin has been executed one time.');
if (autoFix) {
hasAutoFixed = true;
console.log("Autofixing incorrectly existing .ep_initialized file");
fs.unlinkSync(pluginPath+"/.ep_initialized");
console.log('Autofixing incorrectly existing .ep_initialized file');
fs.unlinkSync(`${pluginPath}/.ep_initialized`);
}
}
if(files.indexOf("npm-debug.log") !== -1){
console.warn("npm-debug.log found, please remove. npm-debug.log should never be commited to your repository.")
if(autoFix){
if (files.indexOf('npm-debug.log') !== -1) {
console.warn('npm-debug.log found, please remove. npm-debug.log should never be commited to your repository.');
if (autoFix) {
hasAutoFixed = true;
console.log("Autofixing incorrectly existing npm-debug.log file");
fs.unlinkSync(pluginPath+"/npm-debug.log");
console.log('Autofixing incorrectly existing npm-debug.log file');
fs.unlinkSync(`${pluginPath}/npm-debug.log`);
}
}
if(files.indexOf("static") !== -1){
fs.readdir(pluginPath+"/static", function (errRead, staticFiles) {
if(staticFiles.indexOf("tests") === -1){
console.warn("Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin")
if (files.indexOf('static') !== -1) {
fs.readdir(`${pluginPath}/static`, (errRead, staticFiles) => {
if (staticFiles.indexOf('tests') === -1) {
console.warn('Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin');
}
})
}else{
console.warn("Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin")
});
} else {
console.warn('Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin');
}
// linting begins
if(autoFix){
if (autoFix) {
var lintCmd = 'npm run lint:fix';
}else{
} else {
var lintCmd = 'npm run lint';
}
try{
child_process.execSync(lintCmd,{"cwd":pluginPath+"/"});
console.log("Linting...");
if(autoFix){
try {
child_process.execSync(lintCmd, {cwd: `${pluginPath}/`});
console.log('Linting...');
if (autoFix) {
// todo: if npm run lint doesn't do anything no need for...
hasAutoFixed = true;
}
}catch(e){
} catch (e) {
// it is gonna throw an error anyway
console.log("Manual linting probably required, check with: npm run lint");
console.log('Manual linting probably required, check with: npm run lint');
}
// linting ends.
if(hasAutoFixed){
console.log("Fixes applied, please check git diff then run the following command:\n\n")
if (hasAutoFixed) {
console.log('Fixes applied, please check git diff then run the following command:\n\n');
// bump npm Version
if(autoCommit){
if (autoCommit) {
// holy shit you brave.
console.log("Attempting autocommit and auto publish to npm")
console.log('Attempting autocommit and auto publish to npm');
// github should push to npm for us :)
exec("cd node_modules/"+ pluginName + " && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && git push && cd ../..", (error, name, stderr) => {
exec(`cd node_modules/${pluginName} && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && git push && cd ../..`, (error, name, stderr) => {
if (error) {
console.log(`error: ${error.message}`);
return;
@ -383,14 +380,13 @@ fs.readdir(pluginPath, function (err, rootFiles) {
console.log(`stderr: ${stderr}`);
return;
}
console.log("I think she's got it! By George she's got it!")
process.exit(0)
console.log("I think she's got it! By George she's got it!");
process.exit(0);
});
}else{
console.log("cd node_modules/"+ pluginName + " && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && npm version patch && git add package.json && git commit --allow-empty -m 'bump version' && git push && npm publish && cd ../..")
} else {
console.log(`cd node_modules/${pluginName} && git add -A && git commit --allow-empty -m 'autofixes from Etherpad checkPlugins.js' && npm version patch && git add package.json && git commit --allow-empty -m 'bump version' && git push && npm publish && cd ../..`);
}
}
console.log("Finished");
console.log('Finished');
});

View file

@ -3,120 +3,124 @@
known "good" revision.
*/
if(process.argv.length != 4 && process.argv.length != 5) {
console.error("Use: node bin/repairPad.js $PADID $REV [$NEWPADID]");
if (process.argv.length != 4 && process.argv.length != 5) {
console.error('Use: node bin/repairPad.js $PADID $REV [$NEWPADID]');
process.exit(1);
}
var npm = require("../src/node_modules/npm");
var async = require("../src/node_modules/async");
var ueberDB = require("../src/node_modules/ueberdb2");
const npm = require('../src/node_modules/npm');
const async = require('../src/node_modules/async');
const ueberDB = require('../src/node_modules/ueberdb2');
var padId = process.argv[2];
var newRevHead = process.argv[3];
var newPadId = process.argv[4] || padId + "-rebuilt";
const padId = process.argv[2];
const newRevHead = process.argv[3];
const newPadId = process.argv[4] || `${padId}-rebuilt`;
var db, oldPad, newPad, settings;
var AuthorManager, ChangeSet, Pad, PadManager;
let db, oldPad, newPad, settings;
let AuthorManager, ChangeSet, Pad, PadManager;
async.series([
function(callback) {
npm.load({}, function(err) {
if(err) {
console.error("Could not load NPM: " + err)
function (callback) {
npm.load({}, (err) => {
if (err) {
console.error(`Could not load NPM: ${err}`);
process.exit(1);
} else {
callback();
}
})
});
},
function(callback) {
function (callback) {
// Get a handle into the database
db = require('../src/node/db/DB');
db.init(callback);
}, function(callback) {
PadManager = require('../src/node/db/PadManager');
Pad = require('../src/node/db/Pad').Pad;
// Get references to the original pad and to a newly created pad
// HACK: This is a standalone script, so we want to write everything
// out to the database immediately. The only problem with this is
// that a driver (like the mysql driver) can hardcode these values.
db.db.db.settings = {cache: 0, writeInterval: 0, json: true};
// Validate the newPadId if specified and that a pad with that ID does
// not already exist to avoid overwriting it.
if (!PadManager.isValidPadId(newPadId)) {
console.error("Cannot create a pad with that id as it is invalid");
process.exit(1);
}
PadManager.doesPadExists(newPadId, function(err, exists) {
if (exists) {
console.error("Cannot create a pad with that id as it already exists");
process.exit(1);
}
});
PadManager.getPad(padId, function(err, pad) {
oldPad = pad;
newPad = new Pad(newPadId);
callback();
});
}, function(callback) {
},
function (callback) {
PadManager = require('../src/node/db/PadManager');
Pad = require('../src/node/db/Pad').Pad;
// Get references to the original pad and to a newly created pad
// HACK: This is a standalone script, so we want to write everything
// out to the database immediately. The only problem with this is
// that a driver (like the mysql driver) can hardcode these values.
db.db.db.settings = {cache: 0, writeInterval: 0, json: true};
// Validate the newPadId if specified and that a pad with that ID does
// not already exist to avoid overwriting it.
if (!PadManager.isValidPadId(newPadId)) {
console.error('Cannot create a pad with that id as it is invalid');
process.exit(1);
}
PadManager.doesPadExists(newPadId, (err, exists) => {
if (exists) {
console.error('Cannot create a pad with that id as it already exists');
process.exit(1);
}
});
PadManager.getPad(padId, (err, pad) => {
oldPad = pad;
newPad = new Pad(newPadId);
callback();
});
},
function (callback) {
// Clone all Chat revisions
var chatHead = oldPad.chatHead;
for(var i = 0, curHeadNum = 0; i <= chatHead; i++) {
db.db.get("pad:" + padId + ":chat:" + i, function (err, chat) {
db.db.set("pad:" + newPadId + ":chat:" + curHeadNum++, chat);
console.log("Created: Chat Revision: pad:" + newPadId + ":chat:" + curHeadNum);
const chatHead = oldPad.chatHead;
for (var i = 0, curHeadNum = 0; i <= chatHead; i++) {
db.db.get(`pad:${padId}:chat:${i}`, (err, chat) => {
db.db.set(`pad:${newPadId}:chat:${curHeadNum++}`, chat);
console.log(`Created: Chat Revision: pad:${newPadId}:chat:${curHeadNum}`);
});
}
callback();
}, function(callback) {
},
function (callback) {
// Rebuild Pad from revisions up to and including the new revision head
AuthorManager = require("../src/node/db/AuthorManager");
Changeset = require("ep_etherpad-lite/static/js/Changeset");
AuthorManager = require('../src/node/db/AuthorManager');
Changeset = require('ep_etherpad-lite/static/js/Changeset');
// Author attributes are derived from changesets, but there can also be
// non-author attributes with specific mappings that changesets depend on
// and, AFAICT, cannot be recreated any other way
newPad.pool.numToAttrib = oldPad.pool.numToAttrib;
for(var curRevNum = 0; curRevNum <= newRevHead; curRevNum++) {
db.db.get("pad:" + padId + ":revs:" + curRevNum, function(err, rev) {
for (let curRevNum = 0; curRevNum <= newRevHead; curRevNum++) {
db.db.get(`pad:${padId}:revs:${curRevNum}`, (err, rev) => {
if (rev.meta) {
throw "The specified revision number could not be found.";
throw 'The specified revision number could not be found.';
}
var newRevNum = ++newPad.head;
var newRevId = "pad:" + newPad.id + ":revs:" + newRevNum;
const newRevNum = ++newPad.head;
const newRevId = `pad:${newPad.id}:revs:${newRevNum}`;
db.db.set(newRevId, rev);
AuthorManager.addPad(rev.meta.author, newPad.id);
newPad.atext = Changeset.applyToAText(rev.changeset, newPad.atext, newPad.pool);
console.log("Created: Revision: pad:" + newPad.id + ":revs:" + newRevNum);
console.log(`Created: Revision: pad:${newPad.id}:revs:${newRevNum}`);
if (newRevNum == newRevHead) {
callback();
}
});
}
}, function(callback) {
},
function (callback) {
// Add saved revisions up to the new revision head
console.log(newPad.head);
var newSavedRevisions = [];
for(var i in oldPad.savedRevisions) {
savedRev = oldPad.savedRevisions[i]
const newSavedRevisions = [];
for (const i in oldPad.savedRevisions) {
savedRev = oldPad.savedRevisions[i];
if (savedRev.revNum <= newRevHead) {
newSavedRevisions.push(savedRev);
console.log("Added: Saved Revision: " + savedRev.revNum);
console.log(`Added: Saved Revision: ${savedRev.revNum}`);
}
}
newPad.savedRevisions = newSavedRevisions;
callback();
}, function(callback) {
},
function (callback) {
// Save the source pad
db.db.set("pad:"+newPadId, newPad, function(err) {
console.log("Created: Source Pad: pad:" + newPadId);
db.db.set(`pad:${newPadId}`, newPad, (err) => {
console.log(`Created: Source Pad: pad:${newPadId}`);
newPad.saveToDatabase().then(() => callback(), callback);
});
}
], function (err) {
if(err) throw err;
else {
console.info("finished");
},
], (err) => {
if (err) { throw err; } else {
console.info('finished');
process.exit(0);
}
});

View file

@ -2,47 +2,47 @@
* This is a repair tool. It extracts all datas of a pad, removes and inserts them again.
*/
console.warn("WARNING: This script must not be used while etherpad is running!");
console.warn('WARNING: This script must not be used while etherpad is running!');
if (process.argv.length != 3) {
console.error("Use: node bin/repairPad.js $PADID");
console.error('Use: node bin/repairPad.js $PADID');
process.exit(1);
}
// get the padID
var padId = process.argv[2];
const padId = process.argv[2];
let npm = require("../src/node_modules/npm");
npm.load({}, async function(er) {
const npm = require('../src/node_modules/npm');
npm.load({}, async (er) => {
if (er) {
console.error("Could not load NPM: " + er)
console.error(`Could not load NPM: ${er}`);
process.exit(1);
}
try {
// intialize database
let settings = require('../src/node/utils/Settings');
let db = require('../src/node/db/DB');
const settings = require('../src/node/utils/Settings');
const db = require('../src/node/db/DB');
await db.init();
// get the pad
let padManager = require('../src/node/db/PadManager');
let pad = await padManager.getPad(padId);
const padManager = require('../src/node/db/PadManager');
const pad = await padManager.getPad(padId);
// accumulate the required keys
let neededDBValues = ["pad:" + padId];
const neededDBValues = [`pad:${padId}`];
// add all authors
neededDBValues.push(...pad.getAllAuthors().map(author => "globalAuthor:"));
neededDBValues.push(...pad.getAllAuthors().map((author) => 'globalAuthor:'));
// add all revisions
for (let rev = 0; rev <= pad.head; ++rev) {
neededDBValues.push("pad:" + padId + ":revs:" + rev);
neededDBValues.push(`pad:${padId}:revs:${rev}`);
}
// add all chat values
for (let chat = 0; chat <= pad.chatHead; ++chat) {
neededDBValues.push("pad:" + padId + ":chat:" + chat);
neededDBValues.push(`pad:${padId}:chat:${chat}`);
}
//
@ -55,21 +55,20 @@ npm.load({}, async function(er) {
//
// See gitlab issue #3545
//
console.info("aborting [gitlab #3545]");
console.info('aborting [gitlab #3545]');
process.exit(1);
// now fetch and reinsert every key
neededDBValues.forEach(function(key, value) {
console.log("Key: " + key+ ", value: " + value);
neededDBValues.forEach((key, value) => {
console.log(`Key: ${key}, value: ${value}`);
db.remove(key);
db.set(key, value);
});
console.info("finished");
console.info('finished');
process.exit(0);
} catch (er) {
if (er.name === "apierror") {
if (er.name === 'apierror') {
console.error(er);
} else {
console.trace(er);