mirror of
https://github.com/ether/etherpad-lite.git
synced 2025-05-01 20:59:14 -04:00
Moved to ts for other dependencies.
This commit is contained in:
parent
3c2129b1cc
commit
7b99edc471
47 changed files with 1056 additions and 1363 deletions
|
@ -22,6 +22,8 @@
|
|||
* https://github.com/ether/pad/blob/master/infrastructure/ace/www/easysync2.js
|
||||
*/
|
||||
|
||||
import {CustomError} from "../../node/utils/customError";
|
||||
|
||||
const AttributeMap = require('./AttributeMap');
|
||||
const AttributePool = require('./AttributePool');
|
||||
const attributes = require('./attributes');
|
||||
|
@ -47,8 +49,8 @@ const {padutils} = require('./pad_utils');
|
|||
*
|
||||
* @param {string} msg - Just some message
|
||||
*/
|
||||
const error = (msg) => {
|
||||
const e = new Error(msg);
|
||||
export const error = (msg) => {
|
||||
const e = new CustomError(msg);
|
||||
e.easysync = true;
|
||||
throw e;
|
||||
};
|
||||
|
@ -71,7 +73,7 @@ const assert = (b, msg) => {
|
|||
* @param {string} str - string of the number in base 36
|
||||
* @returns {number} number
|
||||
*/
|
||||
exports.parseNum = (str) => parseInt(str, 36);
|
||||
export const parseNum = (str) => parseInt(str, 36);
|
||||
|
||||
/**
|
||||
* Writes a number in base 36 and puts it in a string.
|
||||
|
@ -79,12 +81,16 @@ exports.parseNum = (str) => parseInt(str, 36);
|
|||
* @param {number} num - number
|
||||
* @returns {string} string
|
||||
*/
|
||||
exports.numToString = (num) => num.toString(36).toLowerCase();
|
||||
export const numToString = (num) => num.toString(36).toLowerCase();
|
||||
|
||||
/**
|
||||
* An operation to apply to a shared document.
|
||||
*/
|
||||
class Op {
|
||||
opcode: string;
|
||||
chars: number;
|
||||
lines: number;
|
||||
attribs: string;
|
||||
/**
|
||||
* @param {(''|'='|'+'|'-')} [opcode=''] - Initial value of the `opcode` property.
|
||||
*/
|
||||
|
@ -146,11 +152,11 @@ class Op {
|
|||
toString() {
|
||||
if (!this.opcode) throw new TypeError('null op');
|
||||
if (typeof this.attribs !== 'string') throw new TypeError('attribs must be a string');
|
||||
const l = this.lines ? `|${exports.numToString(this.lines)}` : '';
|
||||
return this.attribs + l + this.opcode + exports.numToString(this.chars);
|
||||
const l = this.lines ? `|${numToString(this.lines)}` : '';
|
||||
return this.attribs + l + this.opcode + numToString(this.chars);
|
||||
}
|
||||
}
|
||||
exports.Op = Op;
|
||||
export const Op = Op;
|
||||
|
||||
/**
|
||||
* Describes changes to apply to a document. Does not include the attribute pool or the original
|
||||
|
@ -170,7 +176,7 @@ exports.Op = Op;
|
|||
* @param {string} cs - String representation of the Changeset
|
||||
* @returns {number} oldLen property
|
||||
*/
|
||||
exports.oldLen = (cs) => exports.unpack(cs).oldLen;
|
||||
export const oldLen = (cs) => exports.unpack(cs).oldLen;
|
||||
|
||||
/**
|
||||
* Returns the length of the text after changeset is applied.
|
||||
|
@ -178,7 +184,7 @@ exports.oldLen = (cs) => exports.unpack(cs).oldLen;
|
|||
* @param {string} cs - String representation of the Changeset
|
||||
* @returns {number} newLen property
|
||||
*/
|
||||
exports.newLen = (cs) => exports.unpack(cs).newLen;
|
||||
export const newLen = (cs) => exports.unpack(cs).newLen;
|
||||
|
||||
/**
|
||||
* Parses a string of serialized changeset operations.
|
||||
|
@ -187,7 +193,7 @@ exports.newLen = (cs) => exports.unpack(cs).newLen;
|
|||
* @yields {Op}
|
||||
* @returns {Generator<Op>}
|
||||
*/
|
||||
exports.deserializeOps = function* (ops) {
|
||||
export const deserializeOps = function* (ops) {
|
||||
// TODO: Migrate to String.prototype.matchAll() once there is enough browser support.
|
||||
const regex = /((?:\*[0-9a-z]+)*)(?:\|([0-9a-z]+))?([-+=])([0-9a-z]+)|(.)/g;
|
||||
let match;
|
||||
|
@ -195,8 +201,8 @@ exports.deserializeOps = function* (ops) {
|
|||
if (match[5] === '$') return; // Start of the insert operation character bank.
|
||||
if (match[5] != null) error(`invalid operation: ${ops.slice(regex.lastIndex - 1)}`);
|
||||
const op = new Op(match[3]);
|
||||
op.lines = exports.parseNum(match[2] || '0');
|
||||
op.chars = exports.parseNum(match[4]);
|
||||
op.lines = parseNum(match[2] || '0');
|
||||
op.chars = parseNum(match[4]);
|
||||
op.attribs = match[1];
|
||||
yield op;
|
||||
}
|
||||
|
@ -210,11 +216,18 @@ exports.deserializeOps = function* (ops) {
|
|||
* @deprecated Use `deserializeOps` instead.
|
||||
*/
|
||||
class OpIter {
|
||||
private _next: {
|
||||
value: any;
|
||||
done: boolean,
|
||||
|
||||
|
||||
};
|
||||
private _gen: any;
|
||||
/**
|
||||
* @param {string} ops - String encoding the change operations to iterate over.
|
||||
*/
|
||||
constructor(ops) {
|
||||
this._gen = exports.deserializeOps(ops);
|
||||
this._gen = deserializeOps(ops);
|
||||
this._next = this._gen.next();
|
||||
}
|
||||
|
||||
|
@ -252,7 +265,7 @@ class OpIter {
|
|||
* @param {string} opsStr - String encoding of the change operations to perform.
|
||||
* @returns {OpIter} Operator iterator object.
|
||||
*/
|
||||
exports.opIterator = (opsStr) => {
|
||||
export const opIterator = (opsStr) => {
|
||||
padutils.warnDeprecated(
|
||||
'Changeset.opIterator() is deprecated; use Changeset.deserializeOps() instead');
|
||||
return new OpIter(opsStr);
|
||||
|
@ -277,7 +290,7 @@ const clearOp = (op) => {
|
|||
* @param {('+'|'-'|'='|'')} [optOpcode=''] - The operation's operator.
|
||||
* @returns {Op}
|
||||
*/
|
||||
exports.newOp = (optOpcode) => {
|
||||
export const newOp = (optOpcode) => {
|
||||
padutils.warnDeprecated('Changeset.newOp() is deprecated; use the Changeset.Op class instead');
|
||||
return new Op(optOpcode);
|
||||
};
|
||||
|
@ -370,17 +383,17 @@ const opsFromText = function* (opcode, text, attribs = '', pool = null) {
|
|||
* @param {string} cs - Changeset to check
|
||||
* @returns {string} the checked Changeset
|
||||
*/
|
||||
exports.checkRep = (cs) => {
|
||||
const unpacked = exports.unpack(cs);
|
||||
export const checkRep = (cs) => {
|
||||
const unpacked = unpack(cs);
|
||||
const oldLen = unpacked.oldLen;
|
||||
const newLen = unpacked.newLen;
|
||||
const ops = unpacked.ops;
|
||||
let charBank = unpacked.charBank;
|
||||
|
||||
const assem = exports.smartOpAssembler();
|
||||
const assem = smartOpAssembler();
|
||||
let oldPos = 0;
|
||||
let calcNewLen = 0;
|
||||
for (const o of exports.deserializeOps(ops)) {
|
||||
for (const o of deserializeOps(ops)) {
|
||||
switch (o.opcode) {
|
||||
case '=':
|
||||
oldPos += o.chars;
|
||||
|
@ -413,7 +426,7 @@ exports.checkRep = (cs) => {
|
|||
assert(calcNewLen === newLen, 'Invalid changeset: claimed length does not match actual length');
|
||||
assert(charBank === '', 'Invalid changeset: excess characters in the charBank');
|
||||
assem.endDocument();
|
||||
const normalized = exports.pack(oldLen, calcNewLen, assem.toString(), unpacked.charBank);
|
||||
const normalized = pack(oldLen, calcNewLen, assem.toString(), unpacked.charBank);
|
||||
assert(normalized === cs, 'Invalid changeset: not in canonical form');
|
||||
return cs;
|
||||
};
|
||||
|
@ -421,11 +434,11 @@ exports.checkRep = (cs) => {
|
|||
/**
|
||||
* @returns {SmartOpAssembler}
|
||||
*/
|
||||
exports.smartOpAssembler = () => {
|
||||
const minusAssem = exports.mergingOpAssembler();
|
||||
const plusAssem = exports.mergingOpAssembler();
|
||||
const keepAssem = exports.mergingOpAssembler();
|
||||
const assem = exports.stringAssembler();
|
||||
export const smartOpAssembler = () => {
|
||||
const minusAssem = mergingOpAssembler();
|
||||
const plusAssem = mergingOpAssembler();
|
||||
const keepAssem = mergingOpAssembler();
|
||||
const assem = stringAssembler();
|
||||
let lastOpcode = '';
|
||||
let lengthChange = 0;
|
||||
|
||||
|
@ -515,8 +528,8 @@ exports.smartOpAssembler = () => {
|
|||
/**
|
||||
* @returns {MergingOpAssembler}
|
||||
*/
|
||||
exports.mergingOpAssembler = () => {
|
||||
const assem = exports.opAssembler();
|
||||
export const mergingOpAssembler = () => {
|
||||
const assem = opAssembler();
|
||||
const bufOp = new Op();
|
||||
|
||||
// If we get, for example, insertions [xxx\n,yyy], those don't merge,
|
||||
|
@ -528,7 +541,7 @@ exports.mergingOpAssembler = () => {
|
|||
/**
|
||||
* @param {boolean} [isEndDocument]
|
||||
*/
|
||||
const flush = (isEndDocument) => {
|
||||
const flush = (isEndDocument?: boolean) => {
|
||||
if (!bufOp.opcode) return;
|
||||
if (isEndDocument && bufOp.opcode === '=' && !bufOp.attribs) {
|
||||
// final merged keep, leave it implicit
|
||||
|
@ -589,7 +602,7 @@ exports.mergingOpAssembler = () => {
|
|||
/**
|
||||
* @returns {OpAssembler}
|
||||
*/
|
||||
exports.opAssembler = () => {
|
||||
export const opAssembler = () => {
|
||||
let serialized = '';
|
||||
|
||||
/**
|
||||
|
@ -627,7 +640,7 @@ exports.opAssembler = () => {
|
|||
* @param {string} str - String to iterate over
|
||||
* @returns {StringIterator}
|
||||
*/
|
||||
exports.stringIterator = (str) => {
|
||||
export const stringIterator = (str) => {
|
||||
let curIndex = 0;
|
||||
// newLines is the number of \n between curIndex and str.length
|
||||
let newLines = str.split('\n').length - 1;
|
||||
|
@ -677,7 +690,7 @@ exports.stringIterator = (str) => {
|
|||
/**
|
||||
* @returns {StringAssembler}
|
||||
*/
|
||||
exports.stringAssembler = () => ({
|
||||
export const stringAssembler = () => ({
|
||||
_str: '',
|
||||
clear() { this._str = ''; },
|
||||
/**
|
||||
|
@ -711,6 +724,11 @@ exports.stringAssembler = () => ({
|
|||
* with no newlines.
|
||||
*/
|
||||
class TextLinesMutator {
|
||||
private _lines: any;
|
||||
private _curSplice: number[]|{[key: string]:any};
|
||||
private _inSplice: boolean;
|
||||
private _curLine: number;
|
||||
private _curCol: number;
|
||||
/**
|
||||
* @param {(string[]|StringArrayLike)} lines - Lines to mutate (in place).
|
||||
*/
|
||||
|
@ -798,10 +816,12 @@ class TextLinesMutator {
|
|||
* close or TODO(doc).
|
||||
*/
|
||||
_leaveSplice() {
|
||||
this._lines.splice(...this._curSplice);
|
||||
this._curSplice.length = 2;
|
||||
this._curSplice[0] = this._curSplice[1] = 0;
|
||||
this._inSplice = false;
|
||||
if(this._curSplice instanceof Array) {
|
||||
this._lines.splice(...this._curSplice);
|
||||
this._curSplice.length = 2;
|
||||
this._curSplice[0] = this._curSplice[1] = 0;
|
||||
this._inSplice = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -837,7 +857,7 @@ class TextLinesMutator {
|
|||
* @param {number} L -
|
||||
* @param {boolean} includeInSplice - Indicates that attributes are present.
|
||||
*/
|
||||
skipLines(L, includeInSplice) {
|
||||
skipLines(L, includeInSplice?:boolean) {
|
||||
if (!L) return;
|
||||
if (includeInSplice) {
|
||||
if (!this._inSplice) this._enterSplice();
|
||||
|
@ -905,7 +925,7 @@ class TextLinesMutator {
|
|||
return this._linesSlice(m, m + k).join('');
|
||||
};
|
||||
|
||||
let removed = '';
|
||||
let removed:string|number = '';
|
||||
if (this._isCurLineInSplice()) {
|
||||
if (this._curCol === 0) {
|
||||
removed = this._curSplice[this._curSplice.length - 1];
|
||||
|
@ -958,7 +978,7 @@ class TextLinesMutator {
|
|||
if (!text) return;
|
||||
if (!this._inSplice) this._enterSplice();
|
||||
if (L) {
|
||||
const newLines = exports.splitTextLines(text);
|
||||
const newLines = splitTextLines(text);
|
||||
if (this._isCurLineInSplice()) {
|
||||
const sline = this._curSplice.length - 1;
|
||||
/** @type {string} */
|
||||
|
@ -1039,11 +1059,11 @@ class TextLinesMutator {
|
|||
* @returns {string} the integrated changeset
|
||||
*/
|
||||
const applyZip = (in1, in2, func) => {
|
||||
const ops1 = exports.deserializeOps(in1);
|
||||
const ops2 = exports.deserializeOps(in2);
|
||||
const ops1 = deserializeOps(in1);
|
||||
const ops2 = deserializeOps(in2);
|
||||
let next1 = ops1.next();
|
||||
let next2 = ops2.next();
|
||||
const assem = exports.smartOpAssembler();
|
||||
const assem = smartOpAssembler();
|
||||
while (!next1.done || !next2.done) {
|
||||
if (!next1.done && !next1.value.opcode) next1 = ops1.next();
|
||||
if (!next2.done && !next2.value.opcode) next2 = ops2.next();
|
||||
|
@ -1063,13 +1083,13 @@ const applyZip = (in1, in2, func) => {
|
|||
* @param {string} cs - The encoded changeset.
|
||||
* @returns {Changeset}
|
||||
*/
|
||||
exports.unpack = (cs) => {
|
||||
export const unpack = (cs) => {
|
||||
const headerRegex = /Z:([0-9a-z]+)([><])([0-9a-z]+)|/;
|
||||
const headerMatch = headerRegex.exec(cs);
|
||||
if ((!headerMatch) || (!headerMatch[0])) error(`Not a changeset: ${cs}`);
|
||||
const oldLen = exports.parseNum(headerMatch[1]);
|
||||
const oldLen = parseNum(headerMatch[1]);
|
||||
const changeSign = (headerMatch[2] === '>') ? 1 : -1;
|
||||
const changeMag = exports.parseNum(headerMatch[3]);
|
||||
const changeMag = parseNum(headerMatch[3]);
|
||||
const newLen = oldLen + changeSign * changeMag;
|
||||
const opsStart = headerMatch[0].length;
|
||||
let opsEnd = cs.indexOf('$');
|
||||
|
@ -1091,12 +1111,12 @@ exports.unpack = (cs) => {
|
|||
* @param {string} bank - Characters for insert operations.
|
||||
* @returns {string} The encoded changeset.
|
||||
*/
|
||||
exports.pack = (oldLen, newLen, opsStr, bank) => {
|
||||
export const pack = (oldLen, newLen, opsStr, bank) => {
|
||||
const lenDiff = newLen - oldLen;
|
||||
const lenDiffStr = (lenDiff >= 0 ? `>${exports.numToString(lenDiff)}`
|
||||
: `<${exports.numToString(-lenDiff)}`);
|
||||
const lenDiffStr = (lenDiff >= 0 ? `>${numToString(lenDiff)}`
|
||||
: `<${numToString(-lenDiff)}`);
|
||||
const a = [];
|
||||
a.push('Z:', exports.numToString(oldLen), lenDiffStr, opsStr, '$', bank);
|
||||
a.push('Z:', numToString(oldLen), lenDiffStr, opsStr, '$', bank);
|
||||
return a.join('');
|
||||
};
|
||||
|
||||
|
@ -1107,13 +1127,13 @@ exports.pack = (oldLen, newLen, opsStr, bank) => {
|
|||
* @param {string} str - String to which a Changeset should be applied
|
||||
* @returns {string}
|
||||
*/
|
||||
exports.applyToText = (cs, str) => {
|
||||
const unpacked = exports.unpack(cs);
|
||||
export const applyToText = (cs, str) => {
|
||||
const unpacked = unpack(cs);
|
||||
assert(str.length === unpacked.oldLen, `mismatched apply: ${str.length} / ${unpacked.oldLen}`);
|
||||
const bankIter = exports.stringIterator(unpacked.charBank);
|
||||
const strIter = exports.stringIterator(str);
|
||||
const assem = exports.stringAssembler();
|
||||
for (const op of exports.deserializeOps(unpacked.ops)) {
|
||||
const bankIter = stringIterator(unpacked.charBank);
|
||||
const strIter = stringIterator(str);
|
||||
const assem = stringAssembler();
|
||||
for (const op of deserializeOps(unpacked.ops)) {
|
||||
switch (op.opcode) {
|
||||
case '+':
|
||||
// op is + and op.lines 0: no newlines must be in op.chars
|
||||
|
@ -1151,11 +1171,11 @@ exports.applyToText = (cs, str) => {
|
|||
* @param {string} cs - the changeset to apply
|
||||
* @param {string[]} lines - The lines to which the changeset needs to be applied
|
||||
*/
|
||||
exports.mutateTextLines = (cs, lines) => {
|
||||
const unpacked = exports.unpack(cs);
|
||||
const bankIter = exports.stringIterator(unpacked.charBank);
|
||||
export const mutateTextLines = (cs, lines) => {
|
||||
const unpacked = unpack(cs);
|
||||
const bankIter = stringIterator(unpacked.charBank);
|
||||
const mut = new TextLinesMutator(lines);
|
||||
for (const op of exports.deserializeOps(unpacked.ops)) {
|
||||
for (const op of deserializeOps(unpacked.ops)) {
|
||||
switch (op.opcode) {
|
||||
case '+':
|
||||
mut.insert(bankIter.take(op.chars), op.lines);
|
||||
|
@ -1180,7 +1200,7 @@ exports.mutateTextLines = (cs, lines) => {
|
|||
* @param {AttributePool} pool - attribute pool
|
||||
* @returns {string}
|
||||
*/
|
||||
exports.composeAttributes = (att1, att2, resultIsMutation, pool) => {
|
||||
export const composeAttributes = (att1, att2, resultIsMutation, pool) => {
|
||||
// att1 and att2 are strings like "*3*f*1c", asMutation is a boolean.
|
||||
// Sometimes attribute (key,value) pairs are treated as attribute presence
|
||||
// information, while other times they are treated as operations that
|
||||
|
@ -1258,7 +1278,7 @@ const slicerZipperFunc = (attOp, csOp, pool) => {
|
|||
// normally be the empty string. However, padDiff.js adds attributes to remove ops and needs
|
||||
// them preserved so they are copied here.
|
||||
? csOp.attribs
|
||||
: exports.composeAttributes(attOp.attribs, csOp.attribs, attOp.opcode === '=', pool);
|
||||
: composeAttributes(attOp.attribs, csOp.attribs, attOp.opcode === '=', pool);
|
||||
partiallyConsumedOp.chars -= fullyConsumedOp.chars;
|
||||
partiallyConsumedOp.lines -= fullyConsumedOp.lines;
|
||||
if (!partiallyConsumedOp.chars) partiallyConsumedOp.opcode = '';
|
||||
|
@ -1275,8 +1295,8 @@ const slicerZipperFunc = (attOp, csOp, pool) => {
|
|||
* @param {AttributePool} pool - the attibutes pool
|
||||
* @returns {string}
|
||||
*/
|
||||
exports.applyToAttribution = (cs, astr, pool) => {
|
||||
const unpacked = exports.unpack(cs);
|
||||
export const applyToAttribution = (cs, astr, pool) => {
|
||||
const unpacked = unpack(cs);
|
||||
return applyZip(astr, unpacked.ops, (op1, op2) => slicerZipperFunc(op1, op2, pool));
|
||||
};
|
||||
|
||||
|
@ -1749,7 +1769,7 @@ exports.mapAttribNumbers = (cs, func) => {
|
|||
* attributes
|
||||
* @returns {AText}
|
||||
*/
|
||||
exports.makeAText = (text, attribs) => ({
|
||||
export const makeAText = (text, attribs) => ({
|
||||
text,
|
||||
attribs: (attribs || exports.makeAttribution(text)),
|
||||
});
|
||||
|
@ -1762,7 +1782,7 @@ exports.makeAText = (text, attribs) => ({
|
|||
* @param {AttributePool} pool - Attribute Pool to add to
|
||||
* @returns {AText}
|
||||
*/
|
||||
exports.applyToAText = (cs, atext, pool) => ({
|
||||
export const applyToAText = (cs, atext, pool) => ({
|
||||
text: exports.applyToText(cs, atext.text),
|
||||
attribs: exports.applyToAttribution(cs, atext.attribs, pool),
|
||||
});
|
||||
|
@ -1787,7 +1807,7 @@ exports.cloneAText = (atext) => {
|
|||
* @param {AText} atext1 -
|
||||
* @param {AText} atext2 -
|
||||
*/
|
||||
exports.copyAText = (atext1, atext2) => {
|
||||
export const copyAText = (atext1, atext2) => {
|
||||
atext2.text = atext1.text;
|
||||
atext2.attribs = atext1.attribs;
|
||||
};
|
||||
|
@ -1799,10 +1819,10 @@ exports.copyAText = (atext1, atext2) => {
|
|||
* @yields {Op}
|
||||
* @returns {Generator<Op>}
|
||||
*/
|
||||
exports.opsFromAText = function* (atext) {
|
||||
export const opsFromAText = function* (atext) {
|
||||
// intentionally skips last newline char of atext
|
||||
let lastOp = null;
|
||||
for (const op of exports.deserializeOps(atext.attribs)) {
|
||||
for (const op of deserializeOps(atext.attribs)) {
|
||||
if (lastOp != null) yield lastOp;
|
||||
lastOp = op;
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
const pluginDefs = require('./plugin_defs');
|
||||
import {hooks} from './plugin_defs';
|
||||
|
||||
// Maps the name of a server-side hook to a string explaining the deprecation
|
||||
// (e.g., 'use the foo hook instead').
|
||||
|
@ -10,12 +10,15 @@ const pluginDefs = require('./plugin_defs');
|
|||
// const hooks = require('ep_etherpad-lite/static/js/pluginfw/hooks');
|
||||
// hooks.deprecationNotices.fooBar = 'use the newSpiffy hook instead';
|
||||
//
|
||||
exports.deprecationNotices = {};
|
||||
export const deprecationNotices:{
|
||||
authFailure?: string;
|
||||
clientReady?: string
|
||||
} = {};
|
||||
|
||||
const deprecationWarned = {};
|
||||
|
||||
const checkDeprecation = (hook) => {
|
||||
const notice = exports.deprecationNotices[hook.hook_name];
|
||||
const notice = deprecationNotices[hook.hook_name];
|
||||
if (notice == null) return;
|
||||
if (deprecationWarned[hook.hook_fn_name]) return;
|
||||
console.warn(`${hook.hook_name} hook used by the ${hook.part.plugin} plugin ` +
|
||||
|
@ -189,10 +192,10 @@ const callHookFnSync = (hook, context) => {
|
|||
// 1. Collect all values returned by the hook functions into an array.
|
||||
// 2. Convert each `undefined` entry into `[]`.
|
||||
// 3. Flatten one level.
|
||||
exports.callAll = (hookName, context) => {
|
||||
export const callAll = (hookName, context?) => {
|
||||
if (context == null) context = {};
|
||||
const hooks = pluginDefs.hooks[hookName] || [];
|
||||
return flatten1(hooks.map((hook) => normalizeValue(callHookFnSync(hook, context))));
|
||||
const hooksResult = hooks[hookName] || [];
|
||||
return flatten1(hooksResult.map((hook) => normalizeValue(callHookFnSync(hook, context))));
|
||||
};
|
||||
|
||||
// Calls the hook function asynchronously and returns a Promise that either resolves to the hook
|
||||
|
@ -342,23 +345,23 @@ const callHookFnAsync = async (hook, context) => {
|
|||
// 2. Convert each `undefined` entry into `[]`.
|
||||
// 3. Flatten one level.
|
||||
// If cb is non-null, this function resolves to the value returned by cb.
|
||||
exports.aCallAll = async (hookName, context, cb = null) => {
|
||||
if (cb != null) return await attachCallback(exports.aCallAll(hookName, context), cb);
|
||||
export const aCallAll = async (hookName, context?, cb = null) => {
|
||||
if (cb != null) return await attachCallback(aCallAll(hookName, context), cb);
|
||||
if (context == null) context = {};
|
||||
const hooks = pluginDefs.hooks[hookName] || [];
|
||||
const hooksResult = hooks[hookName] || [];
|
||||
const results = await Promise.all(
|
||||
hooks.map(async (hook) => normalizeValue(await callHookFnAsync(hook, context))));
|
||||
hooksResult.map(async (hook) => normalizeValue(await callHookFnAsync(hook, context))));
|
||||
return flatten1(results);
|
||||
};
|
||||
|
||||
// Like `aCallAll()` except the hook functions are called one at a time instead of concurrently.
|
||||
// Only use this function if the hook functions must be called one at a time, otherwise use
|
||||
// `aCallAll()`.
|
||||
exports.callAllSerial = async (hookName, context) => {
|
||||
export const callAllSerial = async (hookName, context) => {
|
||||
if (context == null) context = {};
|
||||
const hooks = pluginDefs.hooks[hookName] || [];
|
||||
const hooksResult = hooks[hookName] || [];
|
||||
const results = [];
|
||||
for (const hook of hooks) {
|
||||
for (const hook of hooksResult) {
|
||||
results.push(normalizeValue(await callHookFnAsync(hook, context)));
|
||||
}
|
||||
return flatten1(results);
|
||||
|
@ -367,11 +370,11 @@ exports.callAllSerial = async (hookName, context) => {
|
|||
// DEPRECATED: Use `aCallFirst()` instead.
|
||||
//
|
||||
// Like `aCallFirst()`, but synchronous. Hook functions must provide their values synchronously.
|
||||
exports.callFirst = (hookName, context) => {
|
||||
export const callFirst = (hookName, context) => {
|
||||
if (context == null) context = {};
|
||||
const predicate = (val) => val.length;
|
||||
const hooks = pluginDefs.hooks[hookName] || [];
|
||||
for (const hook of hooks) {
|
||||
const hooksResult = hooks[hookName] || [];
|
||||
for (const hook of hooksResult) {
|
||||
const val = normalizeValue(callHookFnSync(hook, context));
|
||||
if (predicate(val)) return val;
|
||||
}
|
||||
|
@ -399,21 +402,21 @@ exports.callFirst = (hookName, context) => {
|
|||
// If cb is nullish, resolves to an array that is either the normalized value that satisfied the
|
||||
// predicate or empty if the predicate was never satisfied. If cb is non-nullish, resolves to the
|
||||
// value returned from cb().
|
||||
exports.aCallFirst = async (hookName, context, cb = null, predicate = null) => {
|
||||
export const aCallFirst = async (hookName, context, cb = null, predicate = null) => {
|
||||
if (cb != null) {
|
||||
return await attachCallback(exports.aCallFirst(hookName, context, null, predicate), cb);
|
||||
return await attachCallback(aCallFirst(hookName, context, null, predicate), cb);
|
||||
}
|
||||
if (context == null) context = {};
|
||||
if (predicate == null) predicate = (val) => val.length;
|
||||
const hooks = pluginDefs.hooks[hookName] || [];
|
||||
for (const hook of hooks) {
|
||||
const hooksResult = hooks[hookName] || [];
|
||||
for (const hook of hooksResult) {
|
||||
const val = normalizeValue(await callHookFnAsync(hook, context));
|
||||
if (predicate(val)) return val;
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
exports.exportedForTestingOnly = {
|
||||
export const exportedForTestingOnly = {
|
||||
callHookFnAsync,
|
||||
callHookFnSync,
|
||||
deprecationWarned,
|
|
@ -1,18 +1,23 @@
|
|||
'use strict';
|
||||
|
||||
const log4js = require('log4js');
|
||||
const plugins = require('./plugins');
|
||||
const hooks = require('./hooks');
|
||||
const request = require('request');
|
||||
const runCmd = require('../../../node/utils/run_cmd');
|
||||
const settings = require('../../../node/utils/Settings');
|
||||
import log4js from 'log4js';
|
||||
import {prefix, update} from "./plugins";
|
||||
|
||||
import {aCallAll} from "./hooks";
|
||||
|
||||
import request from "request";
|
||||
|
||||
import {exportCMD} from "../../../node/utils/run_cmd";
|
||||
|
||||
import {reloadSettings} from "../../../node/utils/Settings";
|
||||
import {InstallerModel} from "../../module/InstallerModel";
|
||||
|
||||
const logger = log4js.getLogger('plugins');
|
||||
|
||||
const onAllTasksFinished = async () => {
|
||||
settings.reloadSettings();
|
||||
await hooks.aCallAll('loadSettings', {settings});
|
||||
await hooks.aCallAll('restartServer');
|
||||
const settings = reloadSettings();
|
||||
await aCallAll('loadSettings', {settings});
|
||||
await aCallAll('restartServer');
|
||||
};
|
||||
|
||||
let tasks = 0;
|
||||
|
@ -27,54 +32,54 @@ const wrapTaskCb = (cb) => {
|
|||
};
|
||||
};
|
||||
|
||||
exports.uninstall = async (pluginName, cb = null) => {
|
||||
export const uninstall = async (pluginName, cb = null) => {
|
||||
cb = wrapTaskCb(cb);
|
||||
logger.info(`Uninstalling plugin ${pluginName}...`);
|
||||
try {
|
||||
// The --no-save flag prevents npm from creating package.json or package-lock.json.
|
||||
// The --legacy-peer-deps flag is required to work around a bug in npm v7:
|
||||
// https://github.com/npm/cli/issues/2199
|
||||
await runCmd(['npm', 'uninstall', '--no-save', '--legacy-peer-deps', pluginName]);
|
||||
await exportCMD(['npm', 'uninstall', '--no-save', '--legacy-peer-deps', pluginName]);
|
||||
} catch (err) {
|
||||
logger.error(`Failed to uninstall plugin ${pluginName}`);
|
||||
cb(err || new Error(err));
|
||||
throw err;
|
||||
}
|
||||
logger.info(`Successfully uninstalled plugin ${pluginName}`);
|
||||
await hooks.aCallAll('pluginUninstall', {pluginName});
|
||||
await plugins.update();
|
||||
await aCallAll('pluginUninstall', {pluginName});
|
||||
await update();
|
||||
cb(null);
|
||||
};
|
||||
|
||||
exports.install = async (pluginName, cb = null) => {
|
||||
export const install = async (pluginName, cb = null) => {
|
||||
cb = wrapTaskCb(cb);
|
||||
logger.info(`Installing plugin ${pluginName}...`);
|
||||
try {
|
||||
// The --no-save flag prevents npm from creating package.json or package-lock.json.
|
||||
// The --legacy-peer-deps flag is required to work around a bug in npm v7:
|
||||
// https://github.com/npm/cli/issues/2199
|
||||
await runCmd(['npm', 'install', '--no-save', '--legacy-peer-deps', pluginName]);
|
||||
await exportCMD(['npm', 'install', '--no-save', '--legacy-peer-deps', pluginName]);
|
||||
} catch (err) {
|
||||
logger.error(`Failed to install plugin ${pluginName}`);
|
||||
cb(err || new Error(err));
|
||||
throw err;
|
||||
}
|
||||
logger.info(`Successfully installed plugin ${pluginName}`);
|
||||
await hooks.aCallAll('pluginInstall', {pluginName});
|
||||
await plugins.update();
|
||||
await aCallAll('pluginInstall', {pluginName});
|
||||
await update();
|
||||
cb(null);
|
||||
};
|
||||
|
||||
exports.availablePlugins = null;
|
||||
export let availablePlugins = null;
|
||||
let cacheTimestamp = 0;
|
||||
|
||||
exports.getAvailablePlugins = (maxCacheAge) => {
|
||||
export const getAvailablePlugins = (maxCacheAge) => {
|
||||
const nowTimestamp = Math.round(Date.now() / 1000);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// check cache age before making any request
|
||||
if (exports.availablePlugins && maxCacheAge && (nowTimestamp - cacheTimestamp) <= maxCacheAge) {
|
||||
return resolve(exports.availablePlugins);
|
||||
if (availablePlugins && maxCacheAge && (nowTimestamp - cacheTimestamp) <= maxCacheAge) {
|
||||
return resolve(availablePlugins);
|
||||
}
|
||||
|
||||
request('https://static.etherpad.org/plugins.json', (er, response, plugins) => {
|
||||
|
@ -87,7 +92,7 @@ exports.getAvailablePlugins = (maxCacheAge) => {
|
|||
plugins = [];
|
||||
}
|
||||
|
||||
exports.availablePlugins = plugins;
|
||||
availablePlugins = plugins;
|
||||
cacheTimestamp = nowTimestamp;
|
||||
resolve(plugins);
|
||||
});
|
||||
|
@ -95,18 +100,19 @@ exports.getAvailablePlugins = (maxCacheAge) => {
|
|||
};
|
||||
|
||||
|
||||
exports.search = (searchTerm, maxCacheAge) => exports.getAvailablePlugins(maxCacheAge).then(
|
||||
(results) => {
|
||||
export const search = (searchTerm, maxCacheAge) => getAvailablePlugins(maxCacheAge).then(
|
||||
(results: InstallerModel[]) => {
|
||||
const res = {};
|
||||
|
||||
if (searchTerm) {
|
||||
searchTerm = searchTerm.toLowerCase();
|
||||
}
|
||||
|
||||
|
||||
for (const pluginName in results) {
|
||||
// for every available plugin
|
||||
// TODO: Also search in keywords here!
|
||||
if (pluginName.indexOf(plugins.prefix) !== 0) continue;
|
||||
if (pluginName.indexOf(prefix) !== 0) continue;
|
||||
|
||||
if (searchTerm && !~results[pluginName].name.toLowerCase().indexOf(searchTerm) &&
|
||||
(typeof results[pluginName].description !== 'undefined' &&
|
|
@ -8,13 +8,13 @@
|
|||
// * hook_fn: Plugin-supplied hook function.
|
||||
// * hook_fn_name: Name of the hook function, with the form <filename>:<functionName>.
|
||||
// * part: The ep.json part object that declared the hook. See exports.plugins.
|
||||
exports.hooks = {};
|
||||
export const hooks = {};
|
||||
|
||||
// Whether the plugins have been loaded.
|
||||
exports.loaded = false;
|
||||
export let loaded = false;
|
||||
|
||||
// Topologically sorted list of parts from exports.plugins.
|
||||
exports.parts = [];
|
||||
export const parts = [];
|
||||
|
||||
// Maps the name of a plugin to the plugin's definition provided in ep.json. The ep.json object is
|
||||
// augmented with additional metadata:
|
||||
|
@ -25,4 +25,20 @@ exports.parts = [];
|
|||
// - version
|
||||
// - path
|
||||
// - realPath
|
||||
exports.plugins = {};
|
||||
export const plugins = {};
|
||||
|
||||
export const setPlugins = (newPlugins) => {
|
||||
Object.assign(plugins, newPlugins);
|
||||
}
|
||||
|
||||
export const setParts = (newParts) => {
|
||||
Object.assign(parts, newParts);
|
||||
}
|
||||
|
||||
export const setHooks = (newHooks) => {
|
||||
Object.assign(hooks, newHooks);
|
||||
}
|
||||
|
||||
export const setLoaded = (newLoaded) => {
|
||||
loaded = newLoaded;
|
||||
}
|
|
@ -1,20 +1,28 @@
|
|||
'use strict';
|
||||
|
||||
const fs = require('fs').promises;
|
||||
const hooks = require('./hooks');
|
||||
const log4js = require('log4js');
|
||||
const path = require('path');
|
||||
const runCmd = require('../../../node/utils/run_cmd');
|
||||
const tsort = require('./tsort');
|
||||
const pluginUtils = require('./shared');
|
||||
const defs = require('./plugin_defs');
|
||||
import {promises as fs} from "fs";
|
||||
|
||||
import {aCallAll} from "./hooks";
|
||||
|
||||
import log4js from "log4js";
|
||||
|
||||
import path from "path";
|
||||
|
||||
import {exportCMD} from "../../../node/utils/run_cmd";
|
||||
|
||||
import {tsort} from "./tsort";
|
||||
|
||||
import {extractHooks} from "./shared";
|
||||
|
||||
import {loaded, parts, plugins, setHooks, setLoaded, setParts, setPlugins} from "./plugin_defs";
|
||||
import {PluginInfo} from "../../module/PluginInfo";
|
||||
|
||||
const logger = log4js.getLogger('plugins');
|
||||
|
||||
// Log the version of npm at startup.
|
||||
(async () => {
|
||||
try {
|
||||
const version = await runCmd(['npm', '--version'], {stdio: [null, 'string']});
|
||||
const version = await exportCMD(['npm', '--version'], {stdio: [null, 'string']});
|
||||
logger.info(`npm --version: ${version}`);
|
||||
} catch (err) {
|
||||
logger.error(`Failed to get npm version: ${err.stack || err}`);
|
||||
|
@ -22,16 +30,19 @@ const logger = log4js.getLogger('plugins');
|
|||
}
|
||||
})();
|
||||
|
||||
exports.prefix = 'ep_';
|
||||
type PartType = {
|
||||
[keys: string]:any
|
||||
}
|
||||
export const prefix = 'ep_';
|
||||
|
||||
exports.formatPlugins = () => Object.keys(defs.plugins).join(', ');
|
||||
export const formatPlugins = () => Object.keys(plugins).join(', ');
|
||||
|
||||
exports.formatParts = () => defs.parts.map((part) => part.full_name).join('\n');
|
||||
export const formatParts = () => parts.map((part) => part.full_name).join('\n');
|
||||
|
||||
exports.formatHooks = (hookSetName, html) => {
|
||||
export const formatHooks = (hookSetName, html) => {
|
||||
let hooks = new Map();
|
||||
for (const [pluginName, def] of Object.entries(defs.plugins)) {
|
||||
for (const part of def.parts) {
|
||||
for (const [pluginName, def] of Object.entries(plugins)) {
|
||||
for (const part of parts) {
|
||||
for (const [hookName, hookFnName] of Object.entries(part[hookSetName] || {})) {
|
||||
let hookEntry = hooks.get(hookName);
|
||||
if (!hookEntry) {
|
||||
|
@ -53,7 +64,7 @@ exports.formatHooks = (hookSetName, html) => {
|
|||
hooks = new Map([...hooks].sort(sortStringKeys));
|
||||
for (const [hookName, hookEntry] of hooks) {
|
||||
lines.push(html ? ` <dt>${hookName}:</dt><dd><dl>` : ` ${hookName}:`);
|
||||
const sortedHookEntry = new Map([...hookEntry].sort(sortStringKeys));
|
||||
const sortedHookEntry = new Map<any,any>([...hookEntry].sort(sortStringKeys));
|
||||
hooks.set(hookName, sortedHookEntry);
|
||||
for (const [pluginName, pluginEntry] of sortedHookEntry) {
|
||||
lines.push(html ? ` <dt>${pluginName}:</dt><dd><dl>` : ` ${pluginName}:`);
|
||||
|
@ -72,20 +83,20 @@ exports.formatHooks = (hookSetName, html) => {
|
|||
return lines.join('\n');
|
||||
};
|
||||
|
||||
exports.pathNormalization = (part, hookFnName, hookName) => {
|
||||
export const pathNormalization = (part, hookFnName, hookName) => {
|
||||
const tmp = hookFnName.split(':'); // hookFnName might be something like 'C:\\foo.js:myFunc'.
|
||||
// If there is a single colon assume it's 'filename:funcname' not 'C:\\filename'.
|
||||
const functionName = (tmp.length > 1 ? tmp.pop() : null) || hookName;
|
||||
const moduleName = tmp.join(':') || part.plugin;
|
||||
const packageDir = path.dirname(defs.plugins[part.plugin].package.path);
|
||||
const packageDir = path.dirname(plugins[part.plugin].package.path);
|
||||
const fileName = path.join(packageDir, moduleName);
|
||||
return `${fileName}:${functionName}`;
|
||||
};
|
||||
|
||||
exports.update = async () => {
|
||||
export const update = async () => {
|
||||
const packages = await exports.getPackages();
|
||||
const parts = {}; // Key is full name. sortParts converts this into a topologically sorted array.
|
||||
const plugins = {};
|
||||
let parts:{[keys: string]:any} = {}; // Key is full name. sortParts converts this into a topologically sorted array.
|
||||
let plugins = {};
|
||||
|
||||
// Load plugin metadata ep.json
|
||||
await Promise.all(Object.keys(packages).map(async (pluginName) => {
|
||||
|
@ -94,13 +105,13 @@ exports.update = async () => {
|
|||
}));
|
||||
logger.info(`Loaded ${Object.keys(packages).length} plugins`);
|
||||
|
||||
defs.plugins = plugins;
|
||||
defs.parts = sortParts(parts);
|
||||
defs.hooks = pluginUtils.extractHooks(defs.parts, 'hooks', exports.pathNormalization);
|
||||
defs.loaded = true;
|
||||
await Promise.all(Object.keys(defs.plugins).map(async (p) => {
|
||||
setPlugins(plugins);
|
||||
setParts(sortParts(parts))
|
||||
setHooks(extractHooks(parts, 'hooks', exports.pathNormalization));
|
||||
setLoaded(true)
|
||||
await Promise.all(Object.keys(plugins).map(async (p) => {
|
||||
const logger = log4js.getLogger(`plugin:${p}`);
|
||||
await hooks.aCallAll(`init_${p}`, {logger});
|
||||
await aCallAll(`init_${p}`, {logger});
|
||||
}));
|
||||
};
|
||||
|
||||
|
@ -112,13 +123,15 @@ exports.getPackages = async () => {
|
|||
// unset or set to `development`) because otherwise `npm ls` will not mention any packages
|
||||
// that are not included in `package.json` (which is expected to not exist).
|
||||
const cmd = ['npm', 'ls', '--long', '--json', '--depth=0', '--no-production'];
|
||||
const {dependencies = {}} = JSON.parse(await runCmd(cmd, {stdio: [null, 'string']}));
|
||||
const {dependencies = {}} = JSON.parse(await exportCMD(cmd, {stdio: [null, 'string']}) as unknown as string);
|
||||
await Promise.all(Object.entries(dependencies).map(async ([pkg, info]) => {
|
||||
if (!pkg.startsWith(exports.prefix)) {
|
||||
delete dependencies[pkg];
|
||||
return;
|
||||
}
|
||||
info.realPath = await fs.realpath(info.path);
|
||||
const mappedInfo = info as PluginInfo
|
||||
|
||||
mappedInfo.realPath = await fs.realpath(mappedInfo.path);
|
||||
}));
|
||||
return dependencies;
|
||||
};
|
||||
|
@ -126,7 +139,7 @@ exports.getPackages = async () => {
|
|||
const loadPlugin = async (packages, pluginName, plugins, parts) => {
|
||||
const pluginPath = path.resolve(packages[pluginName].path, 'ep.json');
|
||||
try {
|
||||
const data = await fs.readFile(pluginPath);
|
||||
const data = await fs.readFile(pluginPath, "utf8");
|
||||
try {
|
||||
const plugin = JSON.parse(data);
|
||||
plugin.package = packages[pluginName];
|
||||
|
@ -145,7 +158,7 @@ const loadPlugin = async (packages, pluginName, plugins, parts) => {
|
|||
};
|
||||
|
||||
const partsToParentChildList = (parts) => {
|
||||
const res = [];
|
||||
const res:(string|number)[][] = [];
|
||||
for (const name of Object.keys(parts)) {
|
||||
for (const childName of parts[name].post || []) {
|
||||
res.push([name, childName]);
|
||||
|
@ -161,6 +174,7 @@ const partsToParentChildList = (parts) => {
|
|||
};
|
||||
|
||||
// Used only in Node, so no need for _
|
||||
const sortParts = (parts) => tsort(partsToParentChildList(parts))
|
||||
.filter((name) => parts[name] !== undefined)
|
||||
.map((name) => parts[name]);
|
||||
//FIXME Is this better
|
||||
const sortParts = (parts:PartType) => tsort(partsToParentChildList(parts))
|
||||
.filter((name) => name !== undefined)
|
||||
.map((name) => name);
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
const defs = require('./plugin_defs');
|
||||
import {parts} from './plugin_defs';
|
||||
|
||||
const disabledHookReasons = {
|
||||
hooks: {
|
||||
|
@ -9,7 +9,7 @@ const disabledHookReasons = {
|
|||
},
|
||||
};
|
||||
|
||||
const loadFn = (path, hookName) => {
|
||||
export const loadFn = (path, hookName) => {
|
||||
let functionName;
|
||||
const parts = path.split(':');
|
||||
|
||||
|
@ -33,7 +33,7 @@ const loadFn = (path, hookName) => {
|
|||
return fn;
|
||||
};
|
||||
|
||||
const extractHooks = (parts, hookSetName, normalizer) => {
|
||||
export const extractHooks = (parts, hookSetName, normalizer) => {
|
||||
const hooks = {};
|
||||
for (const part of parts) {
|
||||
for (const [hookName, regHookFnName] of Object.entries(part[hookSetName] || {})) {
|
||||
|
@ -72,9 +72,7 @@ const extractHooks = (parts, hookSetName, normalizer) => {
|
|||
}
|
||||
}
|
||||
return hooks;
|
||||
};
|
||||
|
||||
exports.extractHooks = extractHooks;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns an array containing the names of the installed client-side plugins
|
||||
|
@ -88,8 +86,8 @@ exports.extractHooks = extractHooks;
|
|||
* No plugins: []
|
||||
* Some plugins: [ 'ep_adminpads', 'ep_add_buttons', 'ep_activepads' ]
|
||||
*/
|
||||
exports.clientPluginNames = () => {
|
||||
const clientPluginNames = defs.parts
|
||||
export const clientPluginNames = () => {
|
||||
const clientPluginNames = parts
|
||||
.filter((part) => Object.prototype.hasOwnProperty.call(part, 'client_hooks'))
|
||||
.map((part) => `plugin-${part.plugin}`);
|
||||
return [...new Set(clientPluginNames)];
|
|
@ -4,14 +4,14 @@
|
|||
* general topological sort
|
||||
* from https://gist.github.com/1232505
|
||||
* @author SHIN Suzuki (shinout310@gmail.com)
|
||||
* @param Array<Array> edges : list of edges. each edge forms Array<ID,ID> e.g. [12 , 3]
|
||||
* @param edges Array<Array> edges : list of edges. each edge forms Array<ID,ID> e.g. [12 , 3]
|
||||
*
|
||||
* @returns Array : topological sorted list of IDs
|
||||
**/
|
||||
|
||||
const tsort = (edges) => {
|
||||
export const tsort = (edges: (string|number)[][]) => {
|
||||
const nodes = {}; // hash: stringified id of the node => { id: id, afters: lisf of ids }
|
||||
const sorted = []; // sorted list of IDs ( returned value )
|
||||
const sorted: (string|number)[][]= []; // sorted list of IDs ( returned value )
|
||||
const visited = {}; // hash: id of already visited node => true
|
||||
|
||||
const Node = function (id) {
|
||||
|
@ -62,7 +62,7 @@ const tsort = (edges) => {
|
|||
**/
|
||||
const tsortTest = () => {
|
||||
// example 1: success
|
||||
let edges = [
|
||||
let edges:(string|number)[][] = [
|
||||
[1, 2],
|
||||
[1, 3],
|
||||
[2, 4],
|
5
src/static/module/InstallerModel.ts
Normal file
5
src/static/module/InstallerModel.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
export type InstallerModel = {
|
||||
name: string,
|
||||
description: string,
|
||||
|
||||
}
|
4
src/static/module/PluginInfo.ts
Normal file
4
src/static/module/PluginInfo.ts
Normal file
|
@ -0,0 +1,4 @@
|
|||
export type PluginInfo = {
|
||||
realPath: string,
|
||||
path: string,
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue