cleanup after workspace refactoring (#6174)

* fix bin folder and workflows as far its possible

cleanup of dockerfile

changed paths of scripts

add lock file

fix working directory for workflows

fix windows bin

fix travis (is travis used anyway?)

fix package refs

remove pnpm-lock file in root as these conflicts with the docker volume setup

optimize comments

use install again

refactor prod image call to run

fix --workspace can only be used inside a workspace

correct comment

try fix pipeline

try fix pipeline for upgrade-from-latest-release

install all deps

smaller adjustments

save

update dockerfile

remove workspace command

fix run test command

start repair latest release workflow

start repair latest release workflow

start repair latest release workflow

further repairs

* remove test plugin from docker compose
This commit is contained in:
JannikStreek 2024-02-21 21:50:11 +01:00 committed by GitHub
parent 4f53142d7f
commit 04063d664b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
75 changed files with 158 additions and 192 deletions

View file

@ -1,44 +0,0 @@
#!/usr/bin/env bash
# IMPORTANT
# Protect against misspelling a var and rm -rf /
set -u
set -e
SRC=/tmp/etherpad-deb-src
DIST=/tmp/etherpad-deb-dist
SYSROOT=${SRC}/sysroot
DEBIAN=${SRC}/DEBIAN
rm -rf ${DIST}
mkdir -p ${DIST}/
rm -rf ${SRC}
rsync -a src/bin/deb-src/ ${SRC}/
mkdir -p ${SYSROOT}/opt/
rsync --exclude '.git' -a . ${SYSROOT}/opt/etherpad/ --delete
mkdir -p ${SYSROOT}/usr/share/doc
cp README.md ${SYSROOT}/usr/share/doc/etherpad
find ${SRC}/ -type d -exec chmod 0755 {} \;
find ${SRC}/ -type f -exec chmod go-w {} \;
chown -R root:root ${SRC}/
let SIZE=$(du -s ${SYSROOT} | sed s'/\s\+.*//')+8
pushd ${SYSROOT}/
tar czf ${DIST}/data.tar.gz [a-z]*
popd
sed s"/SIZE/${SIZE}/" -i ${DEBIAN}/control
pushd ${DEBIAN}
tar czf ${DIST}/control.tar.gz *
popd
pushd ${DIST}/
echo 2.0 > ./debian-binary
find ${DIST}/ -type d -exec chmod 0755 {} \;
find ${DIST}/ -type f -exec chmod go-w {} \;
chown -R root:root ${DIST}/
ar r ${DIST}/etherpad-1.deb debian-binary control.tar.gz data.tar.gz
popd
rsync -a ${DIST}/etherpad-1.deb ./

View file

@ -1,66 +0,0 @@
#!/bin/sh
set -e
pecho() { printf %s\\n "$*"; }
log() { pecho "$@"; }
error() { log "ERROR: $@" >&2; }
fatal() { error "$@"; exit 1; }
try() { "$@" || fatal "'$@' failed"; }
is_cmd() { command -v "$@" >/dev/null 2>&1; }
for x in git unzip wget zip; do
is_cmd "${x}" || fatal "Please install ${x}"
done
# Move to the folder where Etherpad is checked out
try cd "${0%/*}"
workdir=$(try git rev-parse --show-toplevel) || exit 1
try cd "${workdir}"
[ -f src/package.json ] || fatal "failed to cd to etherpad root directory"
# See https://github.com/msys2/MSYS2-packages/issues/1216
export MSYSTEM=winsymlinks:lnk
OUTPUT=${workdir}/etherpad-win.zip
TMP_FOLDER=$(try mktemp -d) || exit 1
trap 'exit 1' HUP INT TERM
trap 'log "cleaning up..."; try cd / && try rm -rf "${TMP_FOLDER}"' EXIT
log "create a clean environment in $TMP_FOLDER..."
try export GIT_WORK_TREE=${TMP_FOLDER}; git checkout HEAD -f \
|| fatal "failed to copy etherpad to temporary folder"
try mkdir "${TMP_FOLDER}"/.git
try git rev-parse HEAD >${TMP_FOLDER}/.git/HEAD
# Disable symlinks to avoid problems with Windows
#try pnpm i "${TMP_FOLDER}"/src/node_modules
try cd "${TMP_FOLDER}"
[ -f src/package.json ] || fatal "failed to copy etherpad to temporary folder"
# setting NODE_ENV=production ensures that dev dependencies are not installed,
# making the windows package smaller
export NODE_ENV=production
rm -rf node_modules || true
rm -rf src/node_modules || true
#log "do a normal unix install first..."
#$(try cd src && ./bin/installDeps.sh)
log "copy the windows settings template..."
try cp settings.json.template settings.json
#log "resolve symbolic links..."
#try cp -rL node_modules node_modules_resolved
#try rm -rf node_modules
#try mv node_modules_resolved node_modules
log "download windows node..."
try wget "https://nodejs.org/dist/latest-v20.x/win-x64/node.exe" -O node.exe
log "create the zip..."
try zip -9 -r "${OUTPUT}" ./*
log "Finished. You can find the zip at ${OUTPUT}"

View file

@ -1,27 +0,0 @@
'use strict';
/*
* This is a debug tool. It checks all revisions for data corruption
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
if (process.argv.length !== 2) throw new Error('Use: node src/bin/checkAllPads.js');
(async () => {
const db = require('../node/db/DB');
await db.init();
const padManager = require('../node/db/PadManager');
await Promise.all((await padManager.listAllPads()).padIDs.map(async (padId) => {
const pad = await padManager.getPad(padId);
try {
await pad.check();
} catch (err) {
console.error(`Error in pad ${padId}: ${err.stack || err}`);
return;
}
console.log(`Pad ${padId}: OK`);
}));
console.log('Finished.');
})();

View file

@ -1,20 +0,0 @@
'use strict';
/*
* This is a debug tool. It checks all revisions for data corruption
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
if (process.argv.length !== 3) throw new Error('Use: node src/bin/checkPad.js $PADID');
const padId = process.argv[2];
(async () => {
const db = require('../node/db/DB');
await db.init();
const padManager = require('../node/db/PadManager');
if (!await padManager.doesPadExists(padId)) throw new Error('Pad does not exist');
const pad = await padManager.getPad(padId);
await pad.check();
console.log('Finished.');
})();

View file

@ -1,39 +0,0 @@
#!/bin/sh
# Move to the Etherpad base directory.
MY_DIR=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${MY_DIR}/../.." || exit 1
# Source constants and useful functions
. src/bin/functions.sh
ignoreRoot=0
for ARG in "$@"
do
if [ "$ARG" = "--root" ]; then
ignoreRoot=1
fi
done
#Stop the script if it's started as root
if [ "$(id -u)" -eq 0 ] && [ $ignoreRoot -eq 0 ]; then
echo "You shouldn't start Etherpad as root!"
echo "Please type 'Etherpad rocks my socks' or supply the '--root' argument if you still want to start it as root"
read rocks
if [ ! $rocks = "Etherpad rocks my socks" ]
then
echo "Your input was incorrect"
exit 1
fi
fi
#Clean the current environment
rm -rf src/node_modules
#Prepare the environment
src/bin/installDeps.sh "$@" || exit 1
#Move to the node folder and start
echo "Starting Etherpad..."
cd src
exec node --import tsx ./node/server.ts "$@"

View file

@ -1,10 +0,0 @@
{
"etherpadDB":
{
"host": "localhost",
"port": 3306,
"database": "etherpad",
"user": "etherpaduser",
"password": "yourpassword"
}
}

View file

@ -1,203 +0,0 @@
#!/bin/bash
#
# WARNING: since Etherpad 1.7.0 (2018-08-17), this script is DEPRECATED, and
# will be removed/modified in a future version.
# It's left here just for documentation.
# The branching policies for releases have been changed.
#
# This script is used to publish a new release/version of etherpad on github
#
# Work that is done by this script:
# ETHER_REPO:
# - Add text to CHANGELOG.md
# - Replace version of etherpad in src/package.json
# - Create a release branch and push it to github
# - Merges this release branch into master branch
# - Creating the windows build and the docs
# ETHER_WEB_REPO:
# - Creating a new branch with the docs and the windows build
# - Replacing the version numbers in the index.html
# - Push this branch and merge it to master
# ETHER_REPO:
# - Create a new release on github
printf "WARNING: since Etherpad 1.7.0 this script is DEPRECATED, and will be removed/modified in a future version.\n\n"
while true; do
read -p "Do you want to continue? This is discouraged. [y/N]" yn
case $yn in
[Yy]* ) break;;
[Nn]* ) exit;;
* ) printf "Please answer yes or no.\n\n";;
esac
done
ETHER_REPO="https://github.com/ether/etherpad-lite.git"
ETHER_WEB_REPO="https://github.com/ether/ether.github.com.git"
TMP_DIR="/tmp/"
echo "WARNING: You can only run this script if your github api token is allowed to create and merge branches on $ETHER_REPO and $ETHER_WEB_REPO."
echo "This script automatically changes the version number in package.json and adds a text to CHANGELOG.md."
echo "When you use this script you should be in the branch that you want to release (develop probably) on latest version. Any changes that are currently not committed will be committed."
echo "-----"
# Get the latest version
LATEST_GIT_TAG=$(git tag | tail -n 1)
# Current environment
echo "Current environment: "
echo "- branch: $(git branch | grep '* ')"
echo "- last commit date: $(git show --quiet --pretty=format:%ad)"
echo "- current version: $LATEST_GIT_TAG"
echo "- temp dir: $TMP_DIR"
# Get new version number
# format: x.x.x
echo -n "Enter new version (x.x.x): "
read VERSION
# Get the message for the changelogs
read -p "Enter new changelog entries (press enter): "
tmp=$(mktemp)
"${EDITOR:-vi}" $tmp
changelogText=$(<$tmp)
echo "$changelogText"
rm $tmp
if [ "$changelogText" != "" ]; then
changelogText="# $VERSION\n$changelogText"
fi
# get the token for the github api
echo -n "Enter your github api token: "
read API_TOKEN
function check_api_token {
echo "Checking if github api token is valid..."
CURL_RESPONSE=$(curl --silent -i https://api.github.com/user?access_token=$API_TOKEN | iconv -f utf8)
HTTP_STATUS=$(echo $CURL_RESPONSE | head -1 | sed -r 's/.* ([0-9]{3}) .*/\1/')
[[ $HTTP_STATUS != "200" ]] && echo "Aborting: Invalid github api token" && exit 1
}
function modify_files {
# Add changelog text to first line of CHANGELOG.md
msg=""
# source: https://unix.stackexchange.com/questions/9784/how-can-i-read-line-by-line-from-a-variable-in-bash#9789
while IFS= read -r line
do
# replace newlines with literal "\n" for using with sed
msg+="$line\n"
done < <(printf '%s\n' "${changelogText}")
sed -i "1s/^/${msg}\n/" CHANGELOG.md
[[ $? != 0 ]] && echo "Aborting: Error modifying CHANGELOG.md" && exit 1
# Replace version number of etherpad in package.json
sed -i -r "s/(\"version\"[ ]*: \").*(\")/\1$VERSION\2/" src/package.json
[[ $? != 0 ]] && echo "Aborting: Error modifying package.json" && exit 1
}
function create_release_branch {
echo "Creating new release branch..."
git rev-parse --verify release/$VERSION 2>/dev/null
if [ $? == 0 ]; then
echo "Aborting: Release branch already present"
exit 1
fi
git checkout -b release/$VERSION
[[ $? != 0 ]] && echo "Aborting: Error creating release branch" && exit 1
echo "Committing CHANGELOG.md and package.json"
git add CHANGELOG.md
git add src/package.json
git commit -m "Release version $VERSION"
echo "Pushing release branch to github..."
git push -u $ETHER_REPO release/$VERSION
[[ $? != 0 ]] && echo "Aborting: Error pushing release branch to github" && exit 1
}
function merge_release_branch {
echo "Merging release to master branch on github..."
API_JSON=$(printf '{"base": "master","head": "release/%s","commit_message": "Merge new release into master branch!"}' $VERSION)
CURL_RESPONSE=$(curl --silent -i -N --data "$API_JSON" https://api.github.com/repos/ether/etherpad-lite/merges?access_token=$API_TOKEN | iconv -f utf8)
echo $CURL_RESPONSE
HTTP_STATUS=$(echo $CURL_RESPONSE | head -1 | sed -r 's/.* ([0-9]{3}) .*/\1/')
[[ $HTTP_STATUS != "200" ]] && echo "Aborting: Error merging release branch on github" && exit 1
}
function create_builds {
echo "Cloning etherpad-lite repo and ether.github.com repo..."
cd $TMP_DIR
rm -rf etherpad-lite ether.github.com
git clone $ETHER_REPO --branch master
git clone $ETHER_WEB_REPO
echo "Creating windows build..."
cd etherpad-lite
src/bin/buildForWindows.sh
[[ $? != 0 ]] && echo "Aborting: Error creating build for windows" && exit 1
echo "Creating docs..."
make docs
[[ $? != 0 ]] && echo "Aborting: Error generating docs" && exit 1
}
function push_builds {
cd $TMP_DIR/etherpad-lite/
echo "Copying windows build and docs to website repo..."
GIT_SHA=$(git rev-parse HEAD | cut -c1-10)
mv etherpad-win.zip $TMP_DIR/ether.github.com/downloads/etherpad-win-$VERSION-$GIT_SHA.zip
mv out/doc $TMP_DIR/ether.github.com/doc/v$VERSION
cd $TMP_DIR/ether.github.com/
sed -i "s/etherpad-win.*\.zip/etherpad-win-$VERSION-$GIT_SHA.zip/" index.html
sed -i "s/$LATEST_GIT_TAG/$VERSION/g" index.html
git checkout -b release_$VERSION
[[ $? != 0 ]] && echo "Aborting: Error creating new release branch" && exit 1
git add doc/
git add downloads/
git commit -a -m "Release version $VERSION"
git push -u $ETHER_WEB_REPO release_$VERSION
[[ $? != 0 ]] && echo "Aborting: Error pushing release branch to github" && exit 1
}
function merge_web_branch {
echo "Merging release to master branch on github..."
API_JSON=$(printf '{"base": "master","head": "release_%s","commit_message": "Release version %s"}' $VERSION $VERSION)
CURL_RESPONSE=$(curl --silent -i -N --data "$API_JSON" https://api.github.com/repos/ether/ether.github.com/merges?access_token=$API_TOKEN | iconv -f utf8)
echo $CURL_RESPONSE
HTTP_STATUS=$(echo $CURL_RESPONSE | head -1 | sed -r 's/.* ([0-9]{3}) .*/\1/')
[[ $HTTP_STATUS != "200" ]] && echo "Aborting: Error merging release branch" && exit 1
}
function publish_release {
echo -n "Do you want to publish a new release on github (y/n)? "
read PUBLISH_RELEASE
if [ $PUBLISH_RELEASE = "y" ]; then
# create a new release on github
API_JSON=$(printf '{"tag_name": "%s","target_commitish": "master","name": "Release %s","body": "%s","draft": false,"prerelease": false}' $VERSION $VERSION $changelogText)
CURL_RESPONSE=$(curl --silent -i -N --data "$API_JSON" https://api.github.com/repos/ether/etherpad-lite/releases?access_token=$API_TOKEN | iconv -f utf8)
HTTP_STATUS=$(echo $CURL_RESPONSE | head -1 | sed -r 's/.* ([0-9]{3}) .*/\1/')
[[ $HTTP_STATUS != "201" ]] && echo "Aborting: Error publishing release on github" && exit 1
else
echo "No release published on github!"
fi
}
function todo_notification {
echo "Release procedure was successful, but you have to do some steps manually:"
echo "- Update the wiki at https://github.com/ether/etherpad-lite/wiki"
echo "- Create a pull request on github to merge the master branch back to develop"
echo "- Announce the new release on the mailing list, blog.etherpad.org and Twitter"
}
# Call functions
check_api_token
modify_files
create_release_branch
merge_release_branch
create_builds
push_builds
merge_web_branch
publish_release
todo_notification

View file

@ -1,51 +0,0 @@
'use strict';
/*
* A tool for generating a test user session which can be used for debugging configs
* that require sessions.
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const fs = require('fs');
const path = require('path');
const querystring = require('querystring');
const settings = require('../node/utils/Settings');
const supertest = require('supertest');
(async () => {
const api = supertest(`http://${settings.ip}:${settings.port}`);
const filePath = path.join(__dirname, '../../APIKEY.txt');
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
let res;
res = await api.get('/api/');
const apiVersion = res.body.currentVersion;
if (!apiVersion) throw new Error('No version set in API');
const uri = (cmd, args) => `/api/${apiVersion}/${cmd}?${querystring.stringify(args)}`;
res = await api.post(uri('createGroup', {apikey}));
if (res.body.code === 1) throw new Error(`Error creating group: ${res.body}`);
const groupID = res.body.data.groupID;
console.log('groupID', groupID);
res = await api.post(uri('createGroupPad', {apikey, groupID}));
if (res.body.code === 1) throw new Error(`Error creating group pad: ${res.body}`);
console.log('Test Pad ID ====> ', res.body.data.padID);
res = await api.post(uri('createAuthor', {apikey}));
if (res.body.code === 1) throw new Error(`Error creating author: ${res.body}`);
const authorID = res.body.data.authorID;
console.log('authorID', authorID);
const validUntil = Math.floor(new Date() / 1000) + 60000;
console.log('validUntil', validUntil);
res = await api.post(uri('createSession', {apikey, groupID, authorID, validUntil}));
if (res.body.code === 1) throw new Error(`Error creating session: ${res.body}`);
console.log('Session made: ====> create a cookie named sessionID and set the value to',
res.body.data.sessionID);
})();

View file

@ -1,9 +0,0 @@
Package: etherpad
Version: 1.3
Section: base
Priority: optional
Architecture: i386
Installed-Size: SIZE
Depends:
Maintainer: John McLear <john@mclear.co.uk>
Description: Etherpad is a collaborative editor.

View file

@ -1,7 +0,0 @@
#!/bin/bash
# Start the services!
service etherpad start
echo "Give Etherpad about 3 minutes to install dependencies then visit http://localhost:9001 in your web browser"
echo "To stop etherpad type 'service etherpad stop', To restart type 'service etherpad restart'".
rm -f /tmp/etherpad.log /tmp/etherpad.err

View file

@ -1,26 +0,0 @@
#!/bin/bash
# Installs node if it isn't already installed
#
# Don't steamroll over a previously installed node version
# TODO provide a local version of node?
VER="0.10.4"
ARCH="x86"
if [ `arch | grep 64` ]
then
ARCH="x64"
fi
# TODO test version
if [ ! -f /usr/local/bin/node ]
then
pushd /tmp
wget -c "http://nodejs.org/dist/v${VER}/node-v${VER}-linux-${ARCH}.tar.gz"
rm -rf /tmp/node-v${VER}-linux-${ARCH}
tar xf node-v${VER}-linux-${ARCH}.tar.gz -C /tmp/
cp -a /tmp/node-v${VER}-linux-${ARCH}/* /usr/local/
fi
# Create Etherpad user
adduser --system etherpad

View file

@ -1,4 +0,0 @@
#!/bin/bash
# Stop the appserver:
service etherpad stop || true

View file

@ -1,28 +0,0 @@
description "etherpad"
start on started networking
stop on runlevel [!2345]
env EPHOME=/opt/etherpad
env EPLOGS=/var/log/etherpad
env EPUSER=etherpad
respawn
pre-start script
cd $EPHOME
mkdir $EPLOGS ||true
chown $EPUSER $EPLOGS ||true
chmod 0755 $EPLOGS ||true
chown -R $EPUSER $EPHOME/var ||true
$EPHOME/src/bin/installDeps.sh >> $EPLOGS/error.log || { stop; exit 1; }
end script
script
cd $EPHOME/
exec su -s /bin/sh -c 'exec "$0" "$@"' $EPUSER -- node --import tsx src/node/server.ts \
>> $EPLOGS/access.log \
2>> $EPLOGS/error.log
echo "Etherpad is running on http://localhost:9001 - To change settings edit /opt/etherpad/settings.json"
end script

View file

@ -1,20 +0,0 @@
#!/bin/sh
# Move to the Etherpad base directory.
MY_DIR=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${MY_DIR}/../.." || exit 1
# Source constants and useful functions
. src/bin/functions.sh
# Prepare the environment
src/bin/installDeps.sh || exit 1
echo "If you are new to debugging Node.js with Chrome DevTools, take a look at this page:"
echo "https://medium.com/@paul_irish/debugging-node-js-nightlies-with-chrome-devtools-7c4a1b95ae27"
echo "Open 'chrome://inspect' on Chrome to start debugging."
cd src
# Use 0.0.0.0 to allow external connections to the debugger
# (ex: running Etherpad on a docker container). Use default port # (9229)
exec node --import tsx --inspect=0.0.0.0:9229 ./node/server.ts "$@"

View file

@ -1,47 +0,0 @@
'use strict';
/*
* A tool for deleting ALL GROUP sessions Etherpad user sessions from the CLI,
* because sometimes a brick is required to fix a face.
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const path = require('path');
const fs = require('fs');
const supertest = require('supertest');
// Set a delete counter which will increment on each delete attempt
// TODO: Check delete is successful before incrementing
let deleteCount = 0;
// get the API Key
const filePath = path.join(__dirname, '../../APIKEY.txt');
console.log('Deleting all group sessions, please be patient.');
(async () => {
const settings = require('../tests/container/loadSettings').loadSettings();
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
const api = supertest(`http://${settings.ip}:${settings.port}`);
const apiVersionResponse = await api.get('/api/');
const apiVersion = apiVersionResponse.body.currentVersion; // 1.12.5
const groupsResponse = await api.get(`/api/${apiVersion}/listAllGroups?apikey=${apikey}`);
const groups = groupsResponse.body.data.groupIDs; // ['whateverGroupID']
for (const groupID of groups) {
const sessionURI = `/api/${apiVersion}/listSessionsOfGroup?apikey=${apikey}&groupID=${groupID}`;
const sessionsResponse = await api.get(sessionURI);
const sessions = sessionsResponse.body.data;
for (const sessionID of Object.keys(sessions)) {
const deleteURI = `/api/${apiVersion}/deleteSession?apikey=${apikey}&sessionID=${sessionID}`;
await api.post(deleteURI); // delete
deleteCount++;
}
}
console.log(`Deleted ${deleteCount} sessions`);
})();

View file

@ -1,38 +0,0 @@
'use strict';
/*
* A tool for deleting pads from the CLI, because sometimes a brick is required
* to fix a window.
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const settings = require('../tests/container/loadSettings').loadSettings();
const path = require('path');
const fs = require('fs');
const supertest = require('supertest');
const api = supertest(`http://${settings.ip}:${settings.port}`);
if (process.argv.length !== 3) throw new Error('Use: node deletePad.js $PADID');
// get the padID
const padId = process.argv[2];
// get the API Key
const filePath = path.join(__dirname, '../../APIKEY.txt');
const apikey = fs.readFileSync(filePath, {encoding: 'utf-8'});
(async () => {
let apiVersion = await api.get('/api/');
apiVersion = apiVersion.body.currentVersion;
if (!apiVersion) throw new Error('No version set in API');
// Now we know the latest API version, let's delete pad
const uri = `/api/${apiVersion}/deletePad?apikey=${apikey}&padID=${padId}`;
const deleteAttempt = await api.post(uri);
if (deleteAttempt.body.code === 1) throw new Error(`Error deleting pad ${deleteAttempt.body}`);
console.log('Deleted pad', deleteAttempt.body);
})();

View file

@ -1,18 +0,0 @@
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.

View file

@ -1,76 +0,0 @@
Here's how the node docs work.
Each type of heading has a description block.
## module
Stability: 3 - Stable
description and examples.
### module.property
* Type
description of the property.
### module.someFunction(x, y, [z=100])
* `x` {String} the description of the string
* `y` {Boolean} Should I stay or should I go?
* `z` {Number} How many zebras to bring.
A description of the function.
### Event: 'blerg'
* Argument: SomeClass object.
Modules don't usually raise events on themselves. `cluster` is the
only exception.
## Class: SomeClass
description of the class.
### Class Method: SomeClass.classMethod(anArg)
* `anArg` {Object} Just an argument
* `field` {String} anArg can have this field.
* `field2` {Boolean} Another field. Default: `false`.
* Return: {Boolean} `true` if it worked.
Description of the method for humans.
### someClass.nextSibling()
* Return: {SomeClass object | null} The next someClass in line.
### someClass.someProperty
* String
The indication of what someProperty is.
### Event: 'grelb'
* `isBlerg` {Boolean}
This event is emitted on instances of SomeClass, not on the module itself.
* Modules have (description, Properties, Functions, Classes, Examples)
* Properties have (type, description)
* Functions have (list of arguments, description)
* Classes have (description, Properties, Methods, Events)
* Events have (list of arguments, description)
* Methods have (list of arguments, description)
* Properties have (type, description)
# CLI usage
Run the following from the etherpad-lite root directory:
```sh
$ node src/bin/doc/generate doc/index.md --format=html --template=doc/template.html > out.html
```

View file

@ -1,122 +0,0 @@
#!/usr/bin/env node
'use strict';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
const fs = require('fs');
const path = require('path');
// parse the args.
// Don't use nopt or whatever for this. It's simple enough.
const args = process.argv.slice(2);
let format = 'json';
let template = null;
let inputFile = null;
args.forEach((arg) => {
if (!arg.match(/^--/)) {
inputFile = arg;
} else if (arg.match(/^--format=/)) {
format = arg.replace(/^--format=/, '');
} else if (arg.match(/^--template=/)) {
template = arg.replace(/^--template=/, '');
}
});
if (!inputFile) {
throw new Error('No input file specified');
}
console.error('Input file = %s', inputFile);
fs.readFile(inputFile, 'utf8', (er, input) => {
if (er) throw er;
// process the input for @include lines
processIncludes(inputFile, input, next);
});
const includeExpr = /^@include\s+([A-Za-z0-9-_/]+)(?:\.)?([a-zA-Z]*)$/gmi;
const includeData = {};
const processIncludes = (inputFile, input, cb) => {
const includes = input.match(includeExpr);
if (includes == null) return cb(null, input);
let errState = null;
console.error(includes);
let incCount = includes.length;
if (incCount === 0) cb(null, input);
includes.forEach((include) => {
let fname = include.replace(/^@include\s+/, '');
if (!fname.match(/\.md$/)) fname += '.md';
if (Object.prototype.hasOwnProperty.call(includeData, fname)) {
input = input.split(include).join(includeData[fname]);
incCount--;
if (incCount === 0) {
return cb(null, input);
}
}
const fullFname = path.resolve(path.dirname(inputFile), fname);
fs.readFile(fullFname, 'utf8', (er, inc) => {
if (errState) return;
if (er) return cb(errState = er);
processIncludes(fullFname, inc, (er, inc) => {
if (errState) return;
if (er) return cb(errState = er);
incCount--;
includeData[fname] = inc;
input = input.split(include).join(includeData[fname]);
if (incCount === 0) {
return cb(null, input);
}
});
});
});
};
const next = (er, input) => {
if (er) throw er;
switch (format) {
case 'json':
require('./json.js')(input, inputFile, (er, obj) => {
console.log(JSON.stringify(obj, null, 2));
if (er) throw er;
});
break;
case 'html':
require('./html.js')(input, inputFile, template, (er, html) => {
if (er) throw er;
console.log(html);
});
break;
default:
throw new Error(`Invalid format: ${format}`);
}
};

View file

@ -1,165 +0,0 @@
'use strict';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
const fs = require('fs');
const marked = require('marked');
const path = require('path');
const toHTML = (input, filename, template, cb) => {
const lexed = marked.lexer(input);
fs.readFile(template, 'utf8', (er, template) => {
if (er) return cb(er);
render(lexed, filename, template, cb);
});
};
module.exports = toHTML;
const render = (lexed, filename, template, cb) => {
// get the section
const section = getSection(lexed);
filename = path.basename(filename, '.md');
lexed = parseLists(lexed);
// generate the table of contents.
// this mutates the lexed contents in-place.
buildToc(lexed, filename, (er, toc) => {
if (er) return cb(er);
template = template.replace(/__FILENAME__/g, filename);
template = template.replace(/__SECTION__/g, section);
template = template.replace(/__TOC__/g, toc);
// content has to be the last thing we do with
// the lexed tokens, because it's destructive.
const content = marked.parser(lexed);
template = template.replace(/__CONTENT__/g, content);
cb(null, template);
});
};
// just update the list item text in-place.
// lists that come right after a heading are what we're after.
const parseLists = (input) => {
let state = null;
let depth = 0;
const output = [];
output.links = input.links;
input.forEach((tok) => {
if (state == null) {
if (tok.type === 'heading') {
state = 'AFTERHEADING';
}
output.push(tok);
return;
}
if (state === 'AFTERHEADING') {
if (tok.type === 'list_start') {
state = 'LIST';
if (depth === 0) {
output.push({type: 'html', text: '<div class="signature">'});
}
depth++;
output.push(tok);
return;
}
state = null;
output.push(tok);
return;
}
if (state === 'LIST') {
if (tok.type === 'list_start') {
depth++;
output.push(tok);
return;
}
if (tok.type === 'list_end') {
depth--;
if (depth === 0) {
state = null;
output.push({type: 'html', text: '</div>'});
}
output.push(tok);
return;
}
if (tok.text) {
tok.text = parseListItem(tok.text);
}
}
output.push(tok);
});
return output;
};
const parseListItem = (text) => {
text = text.replace(/\{([^}]+)\}/, '<span class="type">$1</span>');
// XXX maybe put more stuff here?
return text;
};
// section is just the first heading
const getSection = (lexed) => {
for (let i = 0, l = lexed.length; i < l; i++) {
const tok = lexed[i];
if (tok.type === 'heading') return tok.text;
}
return '';
};
const buildToc = (lexed, filename, cb) => {
let toc = [];
let depth = 0;
marked.setOptions({
headerIds: true,
headerPrefix: `${filename}_`,
});
lexed.forEach((tok) => {
if (tok.type !== 'heading') return;
if (tok.depth - depth > 1) {
return cb(new Error(`Inappropriate heading level\n${JSON.stringify(tok)}`));
}
depth = tok.depth;
const slugger = new marked.Slugger();
const id = slugger.slug(`${filename}_${tok.text.trim()}`);
toc.push(`${new Array((depth - 1) * 2 + 1).join(' ')}* <a href="#${id}">${tok.text}</a>`);
tok.text += `<span><a class="mark" href="#${id}" ` +
`id="${id}">#</a></span>`;
});
toc = marked.parse(toc.join('\n'));
cb(null, toc);
};

View file

@ -1,556 +0,0 @@
'use strict';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
module.exports = doJSON;
// Take the lexed input, and return a JSON-encoded object
// A module looks like this: https://gist.github.com/1777387
const marked = require('marked');
const doJSON = (input, filename, cb) => {
const root = {source: filename};
const stack = [root];
let depth = 0;
let current = root;
let state = null;
const lexed = marked.lexer(input);
lexed.forEach((tok) => {
const type = tok.type;
let text = tok.text;
// <!-- type = module -->
// This is for cases where the markdown semantic structure is lacking.
if (type === 'paragraph' || type === 'html') {
const metaExpr = /<!--([^=]+)=([^-]+)-->\n*/g;
text = text.replace(metaExpr, (_0, k, v) => {
current[k.trim()] = v.trim();
return '';
});
text = text.trim();
if (!text) return;
}
if (type === 'heading' &&
!text.trim().match(/^example/i)) {
if (tok.depth - depth > 1) {
return cb(new Error(`Inappropriate heading level\n${
JSON.stringify(tok)}`));
}
// Sometimes we have two headings with a single
// blob of description. Treat as a clone.
if (current &&
state === 'AFTERHEADING' &&
depth === tok.depth) {
const clone = current;
current = newSection(tok);
current.clone = clone;
// don't keep it around on the stack.
stack.pop();
} else {
// if the level is greater than the current depth,
// then it's a child, so we should just leave the stack
// as it is.
// However, if it's a sibling or higher, then it implies
// the closure of the other sections that came before.
// root is always considered the level=0 section,
// and the lowest heading is 1, so this should always
// result in having a valid parent node.
let d = tok.depth;
while (d <= depth) {
finishSection(stack.pop(), stack[stack.length - 1]);
d++;
}
current = newSection(tok);
}
depth = tok.depth;
stack.push(current);
state = 'AFTERHEADING';
return;
} // heading
// Immediately after a heading, we can expect the following
//
// { type: 'code', text: 'Stability: ...' },
//
// a list: starting with list_start, ending with list_end,
// maybe containing other nested lists in each item.
//
// If one of these isn't found, then anything that comes between
// here and the next heading should be parsed as the desc.
let stability;
if (state === 'AFTERHEADING') {
if (type === 'code' &&
(stability = text.match(/^Stability: ([0-5])(?:\s*-\s*)?(.*)$/))) {
current.stability = parseInt(stability[1], 10);
current.stabilityText = stability[2].trim();
return;
} else if (type === 'list_start' && !tok.ordered) {
state = 'AFTERHEADING_LIST';
current.list = current.list || [];
current.list.push(tok);
current.list.level = 1;
} else {
current.desc = current.desc || [];
if (!Array.isArray(current.desc)) {
current.shortDesc = current.desc;
current.desc = [];
}
current.desc.push(tok);
state = 'DESC';
}
return;
}
if (state === 'AFTERHEADING_LIST') {
current.list.push(tok);
if (type === 'list_start') {
current.list.level++;
} else if (type === 'list_end') {
current.list.level--;
}
if (current.list.level === 0) {
state = 'AFTERHEADING';
processList(current);
}
return;
}
current.desc = current.desc || [];
current.desc.push(tok);
});
// finish any sections left open
while (root !== (current = stack.pop())) {
finishSection(current, stack[stack.length - 1]);
}
return cb(null, root);
};
// go from something like this:
// [ { type: 'list_item_start' },
// { type: 'text',
// text: '`settings` Object, Optional' },
// { type: 'list_start', ordered: false },
// { type: 'list_item_start' },
// { type: 'text',
// text: 'exec: String, file path to worker file. Default: `__filename`' },
// { type: 'list_item_end' },
// { type: 'list_item_start' },
// { type: 'text',
// text: 'args: Array, string arguments passed to worker.' },
// { type: 'text',
// text: 'Default: `process.argv.slice(2)`' },
// { type: 'list_item_end' },
// { type: 'list_item_start' },
// { type: 'text',
// text: 'silent: Boolean, whether or not to send output to parent\'s stdio.' },
// { type: 'text', text: 'Default: `false`' },
// { type: 'space' },
// { type: 'list_item_end' },
// { type: 'list_end' },
// { type: 'list_item_end' },
// { type: 'list_end' } ]
// to something like:
// [ { name: 'settings',
// type: 'object',
// optional: true,
// settings:
// [ { name: 'exec',
// type: 'string',
// desc: 'file path to worker file',
// default: '__filename' },
// { name: 'args',
// type: 'array',
// default: 'process.argv.slice(2)',
// desc: 'string arguments passed to worker.' },
// { name: 'silent',
// type: 'boolean',
// desc: 'whether or not to send output to parent\'s stdio.',
// default: 'false' } ] } ]
const processList = (section) => {
const list = section.list;
const values = [];
let current;
const stack = [];
// for now, *just* build the hierarchical list
list.forEach((tok) => {
const type = tok.type;
if (type === 'space') return;
if (type === 'list_item_start') {
if (!current) {
const n = {};
values.push(n);
current = n;
} else {
current.options = current.options || [];
stack.push(current);
const n = {};
current.options.push(n);
current = n;
}
return;
} else if (type === 'list_item_end') {
if (!current) {
throw new Error(`invalid list - end without current item\n${
JSON.stringify(tok)}\n${
JSON.stringify(list)}`);
}
current = stack.pop();
} else if (type === 'text') {
if (!current) {
throw new Error(`invalid list - text without current item\n${
JSON.stringify(tok)}\n${
JSON.stringify(list)}`);
}
current.textRaw = current.textRaw || '';
current.textRaw += `${tok.text} `;
}
});
// shove the name in there for properties, since they are always
// just going to be the value etc.
if (section.type === 'property' && values[0]) {
values[0].textRaw = `\`${section.name}\` ${values[0].textRaw}`;
}
// now pull the actual values out of the text bits.
values.forEach(parseListItem);
// Now figure out what this list actually means.
// depending on the section type, the list could be different things.
switch (section.type) {
case 'ctor':
case 'classMethod':
case 'method': {
// each item is an argument, unless the name is 'return',
// in which case it's the return value.
section.signatures = section.signatures || [];
const sig = {};
section.signatures.push(sig);
sig.params = values.filter((v) => {
if (v.name === 'return') {
sig.return = v;
return false;
}
return true;
});
parseSignature(section.textRaw, sig);
break;
}
case 'property': {
// there should be only one item, which is the value.
// copy the data up to the section.
const value = values[0] || {};
delete value.name;
section.typeof = value.type;
delete value.type;
Object.keys(value).forEach((k) => {
section[k] = value[k];
});
break;
}
case 'event': {
// event: each item is an argument.
section.params = values;
break;
}
}
delete section.list;
};
// textRaw = "someobject.someMethod(a, [b=100], [c])"
const parseSignature = (text, sig) => {
let params = text.match(paramExpr);
if (!params) return;
params = params[1];
// the ] is irrelevant. [ indicates optionalness.
params = params.replace(/\]/g, '');
params = params.split(/,/);
params.forEach((p, i, _) => {
p = p.trim();
if (!p) return;
let param = sig.params[i];
let optional = false;
let def;
// [foo] -> optional
if (p.charAt(0) === '[') {
optional = true;
p = p.substr(1);
}
const eq = p.indexOf('=');
if (eq !== -1) {
def = p.substr(eq + 1);
p = p.substr(0, eq);
}
if (!param) {
param = sig.params[i] = {name: p};
}
// at this point, the name should match.
if (p !== param.name) {
console.error('Warning: invalid param "%s"', p);
console.error(` > ${JSON.stringify(param)}`);
console.error(` > ${text}`);
}
if (optional) param.optional = true;
if (def !== undefined) param.default = def;
});
};
const parseListItem = (item) => {
if (item.options) item.options.forEach(parseListItem);
if (!item.textRaw) return;
// the goal here is to find the name, type, default, and optional.
// anything left over is 'desc'
let text = item.textRaw.trim();
// text = text.replace(/^(Argument|Param)s?\s*:?\s*/i, '');
text = text.replace(/^, /, '').trim();
const retExpr = /^returns?\s*:?\s*/i;
const ret = text.match(retExpr);
if (ret) {
item.name = 'return';
text = text.replace(retExpr, '');
} else {
const nameExpr = /^['`"]?([^'`": {]+)['`"]?\s*:?\s*/;
const name = text.match(nameExpr);
if (name) {
item.name = name[1];
text = text.replace(nameExpr, '');
}
}
text = text.trim();
const defaultExpr = /\(default\s*[:=]?\s*['"`]?([^, '"`]*)['"`]?\)/i;
const def = text.match(defaultExpr);
if (def) {
item.default = def[1];
text = text.replace(defaultExpr, '');
}
text = text.trim();
const typeExpr = /^\{([^}]+)\}/;
const type = text.match(typeExpr);
if (type) {
item.type = type[1];
text = text.replace(typeExpr, '');
}
text = text.trim();
const optExpr = /^Optional\.|(?:, )?Optional$/;
const optional = text.match(optExpr);
if (optional) {
item.optional = true;
text = text.replace(optExpr, '');
}
text = text.replace(/^\s*-\s*/, '');
text = text.trim();
if (text) item.desc = text;
};
const finishSection = (section, parent) => {
if (!section || !parent) {
throw new Error(`Invalid finishSection call\n${
JSON.stringify(section)}\n${
JSON.stringify(parent)}`);
}
if (!section.type) {
section.type = 'module';
if (parent && (parent.type === 'misc')) {
section.type = 'misc';
}
section.displayName = section.name;
section.name = section.name.toLowerCase()
.trim().replace(/\s+/g, '_');
}
if (section.desc && Array.isArray(section.desc)) {
section.desc.links = section.desc.links || [];
section.desc = marked.parser(section.desc);
}
if (!section.list) section.list = [];
processList(section);
// classes sometimes have various 'ctor' children
// which are actually just descriptions of a constructor
// class signature.
// Merge them into the parent.
if (section.type === 'class' && section.ctors) {
section.signatures = section.signatures || [];
const sigs = section.signatures;
section.ctors.forEach((ctor) => {
ctor.signatures = ctor.signatures || [{}];
ctor.signatures.forEach((sig) => {
sig.desc = ctor.desc;
});
sigs.push(...ctor.signatures);
});
delete section.ctors;
}
// properties are a bit special.
// their "type" is the type of object, not "property"
if (section.properties) {
section.properties.forEach((p) => {
if (p.typeof) p.type = p.typeof;
else delete p.type;
delete p.typeof;
});
}
// handle clones
if (section.clone) {
const clone = section.clone;
delete section.clone;
delete clone.clone;
deepCopy(section, clone);
finishSection(clone, parent);
}
let plur;
if (section.type.slice(-1) === 's') {
plur = `${section.type}es`;
} else if (section.type.slice(-1) === 'y') {
plur = section.type.replace(/y$/, 'ies');
} else {
plur = `${section.type}s`;
}
// if the parent's type is 'misc', then it's just a random
// collection of stuff, like the "globals" section.
// Make the children top-level items.
if (section.type === 'misc') {
Object.keys(section).forEach((k) => {
switch (k) {
case 'textRaw':
case 'name':
case 'type':
case 'desc':
case 'miscs':
return;
default:
if (parent.type === 'misc') {
return;
}
if (Array.isArray(k) && parent[k]) {
parent[k] = parent[k].concat(section[k]);
} else if (!parent[k]) {
parent[k] = section[k];
} else {
// parent already has, and it's not an array.
return;
}
}
});
}
parent[plur] = parent[plur] || [];
parent[plur].push(section);
};
// Not a general purpose deep copy.
// But sufficient for these basic things.
const deepCopy = (src, dest) => {
Object.keys(src).filter((k) => !Object.prototype.hasOwnProperty.call(dest, k)).forEach((k) => {
dest[k] = deepCopy_(src[k]);
});
};
const deepCopy_ = (src) => {
if (!src) return src;
if (Array.isArray(src)) {
const c = new Array(src.length);
src.forEach((v, i) => {
c[i] = deepCopy_(v);
});
return c;
}
if (typeof src === 'object') {
const c = {};
Object.keys(src).forEach((k) => {
c[k] = deepCopy_(src[k]);
});
return c;
}
return src;
};
// these parse out the contents of an H# tag
const eventExpr = /^Event(?::|\s)+['"]?([^"']+).*$/i;
const classExpr = /^Class:\s*([^ ]+).*?$/i;
const propExpr = /^(?:property:?\s*)?[^.]+\.([^ .()]+)\s*?$/i;
const braceExpr = /^(?:property:?\s*)?[^.[]+(\[[^\]]+\])\s*?$/i;
const classMethExpr =
/^class\s*method\s*:?[^.]+\.([^ .()]+)\([^)]*\)\s*?$/i;
const methExpr =
/^(?:method:?\s*)?(?:[^.]+\.)?([^ .()]+)\([^)]*\)\s*?$/i;
const newExpr = /^new ([A-Z][a-z]+)\([^)]*\)\s*?$/;
const paramExpr = /\((.*)\);?$/;
const newSection = (tok) => {
const section = {};
// infer the type from the text.
const text = section.textRaw = tok.text;
if (text.match(eventExpr)) {
section.type = 'event';
section.name = text.replace(eventExpr, '$1');
} else if (text.match(classExpr)) {
section.type = 'class';
section.name = text.replace(classExpr, '$1');
} else if (text.match(braceExpr)) {
section.type = 'property';
section.name = text.replace(braceExpr, '$1');
} else if (text.match(propExpr)) {
section.type = 'property';
section.name = text.replace(propExpr, '$1');
} else if (text.match(classMethExpr)) {
section.type = 'classMethod';
section.name = text.replace(classMethExpr, '$1');
} else if (text.match(methExpr)) {
section.type = 'method';
section.name = text.replace(methExpr, '$1');
} else if (text.match(newExpr)) {
section.type = 'ctor';
section.name = text.replace(newExpr, '$1');
} else {
section.name = text;
}
return section;
};

View file

@ -1,26 +0,0 @@
#!/usr/bin/env node
// Checks the health of Etherpad by visiting http://localhost:9001/health. Returns 0 on success, 1
// on error as required by the Dockerfile HEALTHCHECK instruction.
'use strict';
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const assert = require('assert').strict;
const superagent = require('superagent');
(async () => {
const res = await superagent.get('http://localhost:9001/health')
.accept('application/health+json')
.buffer(true)
.parse(superagent.parse['application/json']);
assert(res.ok, `Unexpected HTTP status: ${res.status}`);
assert.equal(res.type, 'application/health+json');
const {body: {status} = {}} = res;
assert(status != null);
assert.equal(typeof status, 'string');
assert(['pass', 'ok', 'up'].includes(status.toLowerCase()), `Unexpected status: ${status}`);
})();

View file

@ -1,64 +0,0 @@
'use strict';
/*
* This is a debug tool. It helps to extract all datas of a pad and move it from
* a productive environment and to a develop environment to reproduce bugs
* there. It outputs a dirtydb file
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const util = require('util');
if (process.argv.length !== 3) throw new Error('Use: node extractPadData.js $PADID');
// get the padID
const padId = process.argv[2];
(async () => {
// initialize database
require('../node/utils/Settings');
const db = require('../node/db/DB');
await db.init();
// load extra modules
const dirtyDB = require('dirty');
const padManager = require('../node/db/PadManager');
// initialize output database
const dirty = dirtyDB(`${padId}.db`);
// Promise set function
const set = util.promisify(dirty.set.bind(dirty));
// array in which required key values will be accumulated
const neededDBValues = [`pad:${padId}`];
// get the actual pad object
const pad = await padManager.getPad(padId);
// add all authors
neededDBValues.push(...pad.getAllAuthors().map((author) => `globalAuthor:${author}`));
// add all revisions
for (let rev = 0; rev <= pad.head; ++rev) {
neededDBValues.push(`pad:${padId}:revs:${rev}`);
}
// add all chat values
for (let chat = 0; chat <= pad.chatHead; ++chat) {
neededDBValues.push(`pad:${padId}:chat:${chat}`);
}
for (const dbkey of neededDBValues) {
let dbvalue = await db.get(dbkey);
if (dbvalue && typeof dbvalue !== 'object') {
dbvalue = JSON.parse(dbvalue);
}
await set(dbkey, dbvalue);
}
console.log('finished');
})();

View file

@ -1,22 +0,0 @@
#!/bin/bash
#
# Run Etherpad directly, assuming all the dependencies are already installed.
#
# Useful for developers, or users that know what they are doing. If you just
# upgraded Etherpad version, installed a new dependency, or are simply unsure
# of what to do, please execute bin/installDeps.sh once before running this
# script.
set -eu
# Move to the Etherpad base directory.
MY_DIR=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${MY_DIR}/../.." || exit 1
# Source constants and useful functions
. src/bin/functions.sh
echo "Running directly, without checking/installing dependencies"
# run Etherpad main class
exec node --import tsx src/node/server.ts "$@"

View file

@ -1,64 +0,0 @@
# minimum required node version
REQUIRED_NODE_MAJOR=12
REQUIRED_NODE_MINOR=13
# minimum required npm version
REQUIRED_NPM_MAJOR=5
REQUIRED_NPM_MINOR=5
pecho() { printf %s\\n "$*"; }
log() { pecho "$@"; }
error() { log "ERROR: $@" >&2; }
fatal() { error "$@"; exit 1; }
is_cmd() { command -v "$@" >/dev/null 2>&1; }
get_program_version() {
PROGRAM="$1"
KIND="${2:-full}"
PROGRAM_VERSION_STRING=$($PROGRAM --version)
PROGRAM_VERSION_STRING=${PROGRAM_VERSION_STRING#"v"}
DETECTED_MAJOR=$(pecho "$PROGRAM_VERSION_STRING" | cut -s -d "." -f 1)
[ -n "$DETECTED_MAJOR" ] || fatal "Cannot extract $PROGRAM major version from version string \"$PROGRAM_VERSION_STRING\""
case "$DETECTED_MAJOR" in
''|*[!0-9]*)
fatal "$PROGRAM_LABEL major version from \"$VERSION_STRING\" is not a number. Detected: \"$DETECTED_MAJOR\""
;;
esac
DETECTED_MINOR=$(pecho "$PROGRAM_VERSION_STRING" | cut -s -d "." -f 2)
[ -n "$DETECTED_MINOR" ] || fatal "Cannot extract $PROGRAM minor version from version string \"$PROGRAM_VERSION_STRING\""
case "$DETECTED_MINOR" in
''|*[!0-9]*)
fatal "$PROGRAM_LABEL minor version from \"$VERSION_STRING\" is not a number. Detected: \"$DETECTED_MINOR\""
esac
case $KIND in
major)
echo $DETECTED_MAJOR
exit;;
minor)
echo $DETECTED_MINOR
exit;;
*)
echo $DETECTED_MAJOR.$DETECTED_MINOR
exit;;
esac
echo $VERSION
}
require_minimal_version() {
PROGRAM_LABEL="$1"
VERSION="$2"
REQUIRED_MAJOR="$3"
REQUIRED_MINOR="$4"
VERSION_MAJOR=$(pecho "$VERSION" | cut -s -d "." -f 1)
VERSION_MINOR=$(pecho "$VERSION" | cut -s -d "." -f 2)
[ "$VERSION_MAJOR" -gt "$REQUIRED_MAJOR" ] || ([ "$VERSION_MAJOR" -eq "$REQUIRED_MAJOR" ] && [ "$VERSION_MINOR" -ge "$REQUIRED_MINOR" ]) \
|| fatal "Your $PROGRAM_LABEL version \"$VERSION_MAJOR.$VERSION_MINOR\" is too old. $PROGRAM_LABEL $REQUIRED_MAJOR.$REQUIRED_MINOR.x or higher is required."
}

View file

@ -1,99 +0,0 @@
'use strict';
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const util = require('util');
const startTime = Date.now();
const log = (str) => {
console.log(`${(Date.now() - startTime) / 1000}\t${str}`);
};
const unescape = (val) => {
// value is a string
if (val.substr(0, 1) === "'") {
val = val.substr(0, val.length - 1).substr(1);
return val.replace(/\\[0nrbtZ\\'"]/g, (s) => {
switch (s) {
case '\\0': return '\0';
case '\\n': return '\n';
case '\\r': return '\r';
case '\\b': return '\b';
case '\\t': return '\t';
case '\\Z': return '\x1a';
default: return s.substr(1);
}
});
}
// value is a boolean or NULL
if (val === 'NULL') {
return null;
}
if (val === 'true') {
return true;
}
if (val === 'false') {
return false;
}
// value is a number
return val;
};
(async () => {
const fs = require('fs');
const log4js = require('log4js');
const readline = require('readline');
const settings = require('../node/utils/Settings');
const ueberDB = require('ueberdb2');
const dbWrapperSettings = {
cache: 0,
writeInterval: 100,
json: false, // data is already json encoded
};
const db = new ueberDB.database( // eslint-disable-line new-cap
settings.dbType,
settings.dbSettings,
dbWrapperSettings,
log4js.getLogger('ueberDB'));
const sqlFile = process.argv[2];
// stop if the settings file is not set
if (!sqlFile) throw new Error('Use: node importSqlFile.js $SQLFILE');
log('initializing db');
await util.promisify(db.init.bind(db))();
log('done');
log(`Opening ${sqlFile}...`);
const stream = fs.createReadStream(sqlFile, {encoding: 'utf8'});
log(`Reading ${sqlFile}...`);
let keyNo = 0;
for await (const l of readline.createInterface({input: stream, crlfDelay: Infinity})) {
if (l.substr(0, 27) === 'REPLACE INTO store VALUES (') {
const pos = l.indexOf("', '");
const key = l.substr(28, pos - 28);
let value = l.substr(pos + 3);
value = value.substr(0, value.length - 2);
console.log(`key: ${key} val: ${value}`);
console.log(`unval: ${unescape(value)}`);
db.set(key, unescape(value), null);
keyNo++;
if (keyNo % 1000 === 0) log(` ${keyNo}`);
}
}
process.stdout.write('\n');
process.stdout.write('done. waiting for db to finish transaction. ' +
'depended on dbms this may take some time..\n');
await util.promisify(db.close.bind(db))();
log(`finished, imported ${keyNo} keys.`);
})();

View file

@ -1,52 +0,0 @@
#!/bin/sh
# Move to the Etherpad base directory.
MY_DIR=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${MY_DIR}/../.." || exit 1
# Source constants and useful functions
. src/bin/functions.sh
is_cmd pnpm || npm install pnpm -g
# Is node installed?
# Not checking io.js, default installation creates a symbolic link to node
is_cmd node || fatal "Please install node.js ( https://nodejs.org )"
# Check node version
require_minimal_version "nodejs" "$(get_program_version "node")" \
"$REQUIRED_NODE_MAJOR" "$REQUIRED_NODE_MINOR"
# Get the name of the settings file
settings="settings.json"
a='';
for arg in "$@"; do
if [ "$a" = "--settings" ] || [ "$a" = "-s" ]; then settings=$arg; fi
a=$arg
done
# Does a $settings exist? if not copy the template
if [ ! -f "$settings" ]; then
log "Copy the settings template to $settings..."
cp settings.json.template "$settings" || exit 1
fi
log "Installing dependencies..."
cd src
if [ -z "${ETHERPAD_PRODUCTION}" ]; then
log "Installing dev dependencies"
pnpm i || exit 1
else
log "Installing production dependencies"
pnpm i --production || exit 1
fi
# Remove all minified data to force node creating it new
log "Clearing minified cache..."
rm -f var/minified*
exit 0

View file

@ -1,34 +0,0 @@
@echo off
:: Change directory to etherpad-lite root
cd /D "%~dp0\..\.."
:: Is node installed?
cmd /C node -e "" || ( echo "Please install node.js ( https://nodejs.org )" && exit /B 1 )
echo _
echo Ensure that all dependencies are up to date... If this is the first time you have run Etherpad please be patient.
mkdir node_modules
cd /D node_modules
mklink /D "ep_etherpad-lite" "..\src"
cd /D ..\src
cmd /C pnpm i || exit /B 1
cd /D "%~dp0\..\.."
echo _
echo Clearing cache...
del /S var\minified*
echo _
echo Setting up settings.json...
IF NOT EXIST settings.json (
echo Can't find settings.json.
echo Copying settings.json.template...
cmd /C copy settings.json.template settings.json || exit /B 1
)
echo _
echo Installed Etherpad! To run Etherpad type start.bat

View file

@ -1,56 +0,0 @@
'use strict';
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
(async () => {
// This script requires that you have modified your settings.json file
// to work with a real database. Please make a backup of your dirty.db
// file before using this script, just to be safe.
// It might be necessary to run the script using more memory:
// `node --max-old-space-size=4096 src/bin/migrateDirtyDBtoRealDB.js`
const dirtyDb = require('dirty');
const log4js = require('log4js');
const settings = require('../node/utils/Settings');
const ueberDB = require('ueberdb2');
const util = require('util');
const dbWrapperSettings = {
cache: '0', // The cache slows things down when you're mostly writing.
writeInterval: 0, // Write directly to the database, don't buffer
};
const db = new ueberDB.database( // eslint-disable-line new-cap
settings.dbType,
settings.dbSettings,
dbWrapperSettings,
log4js.getLogger('ueberDB'));
await db.init();
console.log('Waiting for dirtyDB to parse its file.');
const dirty = dirtyDb(`${__dirname}/../../var/dirty.db`);
const length = await new Promise((resolve) => { dirty.once('load', resolve); });
console.log(`Found ${length} records, processing now.`);
const p = [];
let numWritten = 0;
dirty.forEach((key, value) => {
let bcb, wcb;
p.push(new Promise((resolve, reject) => {
bcb = (err) => { if (err != null) return reject(err); };
wcb = (err) => {
if (err != null) return reject(err);
if (++numWritten % 100 === 0) console.log(`Wrote record ${numWritten} of ${length}`);
resolve();
};
}));
db.set(key, value, bcb, wcb);
});
await Promise.all(p);
console.log(`Wrote all ${numWritten} records`);
await util.promisify(db.close.bind(db))();
console.log('Finished.');
})();

View file

@ -1,9 +0,0 @@
A simple NSIS script to Install Etherpad (Server) on Windows and start it.
# TODO
1. i18n
1. Run as Service
1. Display messages during install
# License
Apache 2

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

View file

@ -1,55 +0,0 @@
;Include Modern UI
!include "MUI2.nsh"
!include x64.nsh
;--------------------------------
;Styling
!define MUI_ICON "brand.ico"
Icon "brand.ico"
BrandingText "Etherpad Foundation"
Name "Etherpad Server"
OutFile "..\..\..\etherpad-win.exe"
!insertmacro MUI_LANGUAGE "English"
Page directory
Page instfiles
; The default installation directory
InstallDir "$PROGRAMFILES64\Etherpad Foundation\Etherpad Server"
Section
SectionIn RO
${If} ${RunningX64}
DetailPrint "Installer running on x64 host"
${Else}
Abort "Unsupported CPU architecture (only x64 is supported)"
${Endif}
; Set output path to the installation directory.
SetOutPath $INSTDIR
; Put files there
File /r "..\..\..\..\etherpad-zip\*"
SectionEnd
Section
CreateDirectory "$SMPROGRAMS\Etherpad Foundation"
CreateShortCut "$SMPROGRAMS\Etherpad Foundation\Etherpad Server.lnk" "$INSTDIR\start.bat" "brand.ico" "Etherpad Server"
CreateShortCut "$SMPROGRAMS\Etherpad Foundation\Etherpad.lnk" "http://127.0.0.1:9001" "brand.ico" "Etherpad"
CreateShortCut "$SMPROGRAMS\Etherpad Foundation\Etherpad Admin.lnk" "http://127.0.0.1:9001/admin" "brand.ico" "Etherpad Admin"
CreateShortCut "$SMPROGRAMS\Etherpad Foundation\Uninstall Etherpad Server.lnk" "$INSTDIR\uninstall.exe"
WriteUninstaller "$INSTDIR\uninstall.exe"
Exec '$INSTDIR\start.bat'
SectionEnd
UninstPage instfiles
Section Uninstall
Delete "$INSTDIR\*"
Delete "$INSTDIR\uninstall.exe"
RMDir "$INSTDIR"
SetAutoClose false
SectionEnd

View file

@ -1,59 +0,0 @@
The files in this folder are for Plugin developers.
# Get suggestions to improve your Plugin
This code will check your plugin for known usual issues and some suggestions for
improvements. No changes will be made to your project.
```
node src/bin/plugins/checkPlugin.js $PLUGIN_NAME$
```
# Basic Example:
```
node src/bin/plugins/checkPlugin.js ep_webrtc
```
## Autofixing - will autofix any issues it can
```
node src/bin/plugins/checkPlugin.js ep_whatever autofix
```
## Autocommitting - fix issues and commit
```
node src/bin/plugins/checkPlugin.js ep_whatever autocommit
```
## Autopush - fix issues, commit, push, and publish (highly dangerous)
```
node src/bin/plugins/checkPlugin.js ep_whatever autopush
```
# All the plugins
Replace johnmclear with your github username
```
# Clones
cd node_modules
GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
cd ..
# autofixes and autocommits /pushes & npm publishes
for dir in node_modules/ep_*; do
dir=${dir#node_modules/}
[ "$dir" != ep_etherpad-lite ] || continue
node src/bin/plugins/checkPlugin.js "$dir" autocommit
done
```
# Automating update of ether organization plugins
```
getCorePlugins.sh
updateCorePlugins.sh
```

View file

@ -1,420 +0,0 @@
'use strict';
/*
* Usage -- see README.md
*
* Normal usage: node src/bin/plugins/checkPlugin.js ep_whatever
* Auto fix the things it can: node src/bin/plugins/checkPlugin.js ep_whatever autofix
* Auto fix and commit: node src/bin/plugins/checkPlugin.js ep_whatever autocommit
* Auto fix, commit, push and publish to npm (highly dangerous):
* node src/bin/plugins/checkPlugin.js ep_whatever autopush
*/
const process = require('process');
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const assert = require('assert').strict;
const fs = require('fs');
const fsp = fs.promises;
const childProcess = require('child_process');
const log4js = require('log4js');
const path = require('path');
const logger = log4js.getLogger('checkPlugin');
(async () => {
// get plugin name & path from user input
const pluginName = process.argv[2];
if (!pluginName) throw new Error('no plugin name specified');
logger.info(`Checking the plugin: ${pluginName}`);
const epRootDir = await fsp.realpath(path.join(await fsp.realpath(__dirname), '../../..'));
logger.info(`Etherpad root directory: ${epRootDir}`);
process.chdir(epRootDir);
const pluginPath = await fsp.realpath(`node_modules/${pluginName}`);
logger.info(`Plugin directory: ${pluginPath}`);
const epSrcDir = await fsp.realpath(path.join(epRootDir, 'src'));
const optArgs = process.argv.slice(3);
const autoPush = optArgs.includes('autopush');
const autoCommit = autoPush || optArgs.includes('autocommit');
const autoFix = autoCommit || optArgs.includes('autofix');
const execSync = (cmd, opts = {}) => (childProcess.execSync(cmd, {
cwd: `${pluginPath}/`,
...opts,
}) || '').toString().replace(/\n+$/, '');
const writePackageJson = async (obj) => {
let s = JSON.stringify(obj, null, 2);
if (s.length && s.slice(s.length - 1) !== '\n') s += '\n';
return await fsp.writeFile(`${pluginPath}/package.json`, s);
};
const checkEntries = (got, want) => {
let changed = false;
for (const [key, val] of Object.entries(want)) {
try {
assert.deepEqual(got[key], val);
} catch (err) {
logger.warn(`${key} possibly outdated.`);
logger.warn(err.message);
if (autoFix) {
got[key] = val;
changed = true;
}
}
}
return changed;
};
const updateDeps = async (parsedPackageJson, key, wantDeps) => {
const {[key]: deps = {}} = parsedPackageJson;
let changed = false;
for (const [pkg, verInfo] of Object.entries(wantDeps)) {
const {ver, overwrite = true} =
typeof verInfo === 'string' || verInfo == null ? {ver: verInfo} : verInfo;
if (deps[pkg] === ver || (deps[pkg] == null && ver == null)) continue;
if (deps[pkg] == null) {
logger.warn(`Missing dependency in ${key}: '${pkg}': '${ver}'`);
} else {
if (!overwrite) continue;
logger.warn(`Dependency mismatch in ${key}: '${pkg}': '${ver}' (current: ${deps[pkg]})`);
}
if (autoFix) {
if (ver == null) delete deps[pkg];
else deps[pkg] = ver;
changed = true;
}
}
if (changed) {
parsedPackageJson[key] = deps;
await writePackageJson(parsedPackageJson);
}
};
const prepareRepo = () => {
const modified = execSync('git diff-files --name-status');
if (modified !== '') throw new Error(`working directory has modifications:\n${modified}`);
const untracked = execSync('git ls-files -o --exclude-standard');
if (untracked !== '') throw new Error(`working directory has untracked files:\n${untracked}`);
const indexStatus = execSync('git diff-index --cached --name-status HEAD');
if (indexStatus !== '') throw new Error(`uncommitted staged changes to files:\n${indexStatus}`);
let br;
if (autoCommit) {
br = execSync('git symbolic-ref HEAD');
if (!br.startsWith('refs/heads/')) throw new Error('detached HEAD');
br = br.replace(/^refs\/heads\//, '');
execSync('git rev-parse --verify -q HEAD^0 || ' +
`{ echo "Error: no commits on ${br}" >&2; exit 1; }`);
execSync('git config --get user.name');
execSync('git config --get user.email');
}
if (autoPush) {
if (!['master', 'main'].includes(br)) throw new Error('master/main not checked out');
execSync('git rev-parse --verify @{u}');
execSync('git pull --ff-only', {stdio: 'inherit'});
if (execSync('git rev-list @{u}...') !== '') throw new Error('repo contains unpushed commits');
}
};
const checkFile = async (srcFn, dstFn, overwrite = true) => {
const outFn = path.join(pluginPath, dstFn);
const wantContents = await fsp.readFile(srcFn, {encoding: 'utf8'});
let gotContents = null;
try {
gotContents = await fsp.readFile(outFn, {encoding: 'utf8'});
} catch (err) { /* treat as if the file doesn't exist */ }
try {
assert.equal(gotContents, wantContents);
} catch (err) {
logger.warn(`File ${dstFn} does not match the default`);
logger.warn(err.message);
if (!overwrite && gotContents != null) {
logger.warn('Leaving existing contents alone.');
return;
}
if (autoFix) {
await fsp.mkdir(path.dirname(outFn), {recursive: true});
await fsp.writeFile(outFn, wantContents);
}
}
};
if (autoPush) {
logger.warn('Auto push is enabled, I hope you know what you are doing...');
}
const files = await fsp.readdir(pluginPath);
// some files we need to know the actual file name. Not compulsory but might help in the future.
const readMeFileName = files.filter((f) => f === 'README' || f === 'README.md')[0];
if (!files.includes('.git')) throw new Error('No .git folder, aborting');
prepareRepo();
const workflows = ['backend-tests.yml', 'frontend-tests.yml', 'npmpublish.yml'];
await Promise.all(workflows.map(async (fn) => {
await checkFile(`src/bin/plugins/lib/${fn}`, `.github/workflows/${fn}`);
}));
await checkFile('src/bin/plugins/lib/dependabot.yml', '.github/dependabot.yml');
if (!files.includes('package.json')) {
logger.warn('no package.json, please create');
} else {
const packageJSON =
await fsp.readFile(`${pluginPath}/package.json`, {encoding: 'utf8', flag: 'r'});
const parsedPackageJSON = JSON.parse(packageJSON);
await updateDeps(parsedPackageJSON, 'devDependencies', {
'eslint': '^8.14.0',
'eslint-config-etherpad': '^3.0.13',
// Changing the TypeScript version can break plugin code, so leave it alone if present.
'typescript': {ver: '^4.6.4', overwrite: false},
// These were moved to eslint-config-etherpad's dependencies so they can be removed:
'@typescript-eslint/eslint-plugin': null,
'@typescript-eslint/parser': null,
'eslint-import-resolver-typescript': null,
'eslint-plugin-cypress': null,
'eslint-plugin-eslint-comments': null,
'eslint-plugin-import': null,
'eslint-plugin-mocha': null,
'eslint-plugin-node': null,
'eslint-plugin-prefer-arrow': null,
'eslint-plugin-promise': null,
'eslint-plugin-you-dont-need-lodash-underscore': null,
});
await updateDeps(parsedPackageJSON, 'peerDependencies', {
// Some plugins require a newer version of Etherpad so don't overwrite if already set.
'ep_etherpad-lite': {ver: '>=1.8.6', overwrite: false},
});
await updateDeps(parsedPackageJSON, 'engines', {
node: '>=12.17.0',
});
if (parsedPackageJSON.eslintConfig != null && autoFix) {
delete parsedPackageJSON.eslintConfig;
await writePackageJson(parsedPackageJSON);
}
if (files.includes('.eslintrc.js')) {
const [from, to] = [`${pluginPath}/.eslintrc.js`, `${pluginPath}/.eslintrc.cjs`];
if (!files.includes('.eslintrc.cjs')) {
if (autoFix) {
await fsp.rename(from, to);
} else {
logger.warn(`please rename ${from} to ${to}`);
}
} else {
logger.error(`both ${from} and ${to} exist; delete ${from}`);
}
} else {
checkFile('src/bin/plugins/lib/eslintrc.cjs', '.eslintrc.cjs', false);
}
if (checkEntries(parsedPackageJSON, {
funding: {
type: 'individual',
url: 'https://etherpad.org/',
},
})) await writePackageJson(parsedPackageJSON);
if (parsedPackageJSON.scripts == null) parsedPackageJSON.scripts = {};
if (checkEntries(parsedPackageJSON.scripts, {
'lint': 'eslint .',
'lint:fix': 'eslint --fix .',
})) await writePackageJson(parsedPackageJSON);
}
if (!files.includes('package-lock.json')) {
logger.warn('package-lock.json not found');
if (!autoFix) {
logger.warn('Run npm install in the plugin folder and commit the package-lock.json file.');
}
}
const fillTemplate = async (templateFilename, outputFilename) => {
const contents = (await fsp.readFile(templateFilename, 'utf8'))
.replace(/\[name of copyright owner\]/g, execSync('git config user.name'))
.replace(/\[plugin_name\]/g, pluginName)
.replace(/\[yyyy\]/g, new Date().getFullYear());
await fsp.writeFile(outputFilename, contents);
};
if (!readMeFileName) {
logger.warn('README.md file not found, please create');
if (autoFix) {
logger.info('Autofixing missing README.md file');
logger.info('please edit the README.md file further to include plugin specific details.');
await fillTemplate('src/bin/plugins/lib/README.md', `${pluginPath}/README.md`);
}
}
if (!files.includes('CONTRIBUTING') && !files.includes('CONTRIBUTING.md')) {
logger.warn('CONTRIBUTING.md file not found, please create');
if (autoFix) {
logger.info('Autofixing missing CONTRIBUTING.md file, please edit the CONTRIBUTING.md ' +
'file further to include plugin specific details.');
await fillTemplate('src/bin/plugins/lib/CONTRIBUTING.md', `${pluginPath}/CONTRIBUTING.md`);
}
}
if (readMeFileName) {
let readme =
await fsp.readFile(`${pluginPath}/${readMeFileName}`, {encoding: 'utf8', flag: 'r'});
if (!readme.toLowerCase().includes('license')) {
logger.warn('No license section in README');
if (autoFix) {
logger.warn('Please add License section to README manually.');
}
}
// eslint-disable-next-line max-len
const publishBadge = `![Publish Status](https://github.com/ether/${pluginName}/workflows/Node.js%20Package/badge.svg)`;
// eslint-disable-next-line max-len
const testBadge = `![Backend Tests Status](https://github.com/ether/${pluginName}/workflows/Backend%20tests/badge.svg)`;
if (readme.toLowerCase().includes('travis')) {
logger.warn('Remove Travis badges');
}
if (!readme.includes('workflows/Node.js%20Package/badge.svg')) {
logger.warn('No Github workflow badge detected');
if (autoFix) {
readme = `${publishBadge} ${testBadge}\n\n${readme}`;
// write readme to file system
await fsp.writeFile(`${pluginPath}/${readMeFileName}`, readme);
logger.info('Wrote Github workflow badges to README');
}
}
}
if (!files.includes('LICENSE') && !files.includes('LICENSE.md')) {
logger.warn('LICENSE file not found, please create');
if (autoFix) {
logger.info('Autofixing missing LICENSE file (Apache 2.0).');
await fsp.copyFile('src/bin/plugins/lib/LICENSE', `${pluginPath}/LICENSE`);
}
}
if (!files.includes('.gitignore')) {
logger.warn('.gitignore file not found, please create. .gitignore files are useful to ' +
"ensure files aren't incorrectly commited to a repository.");
if (autoFix) {
logger.info('Autofixing missing .gitignore file');
const gitignore =
await fsp.readFile('src/bin/plugins/lib/gitignore', {encoding: 'utf8', flag: 'r'});
await fsp.writeFile(`${pluginPath}/.gitignore`, gitignore);
}
} else {
let gitignore =
await fsp.readFile(`${pluginPath}/.gitignore`, {encoding: 'utf8', flag: 'r'});
if (!gitignore.includes('node_modules/')) {
logger.warn('node_modules/ missing from .gitignore');
if (autoFix) {
gitignore += 'node_modules/';
await fsp.writeFile(`${pluginPath}/.gitignore`, gitignore);
}
}
}
// if we include templates but don't have translations...
if (files.includes('templates') && !files.includes('locales')) {
logger.warn('Translations not found, please create. ' +
'Translation files help with Etherpad accessibility.');
}
if (files.includes('.ep_initialized')) {
logger.warn(
'.ep_initialized found, please remove. .ep_initialized should never be commited to git ' +
'and should only exist once the plugin has been executed one time.');
if (autoFix) {
logger.info('Autofixing incorrectly existing .ep_initialized file');
await fsp.unlink(`${pluginPath}/.ep_initialized`);
}
}
if (files.includes('npm-debug.log')) {
logger.warn('npm-debug.log found, please remove. npm-debug.log should never be commited to ' +
'your repository.');
if (autoFix) {
logger.info('Autofixing incorrectly existing npm-debug.log file');
await fsp.unlink(`${pluginPath}/npm-debug.log`);
}
}
if (files.includes('static')) {
const staticFiles = await fsp.readdir(`${pluginPath}/static`);
if (!staticFiles.includes('tests')) {
logger.warn('Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin');
}
} else {
logger.warn('Test files not found, please create tests. https://github.com/ether/etherpad-lite/wiki/Creating-a-plugin#writing-and-running-front-end-tests-for-your-plugin');
}
// Install dependencies so we can run ESLint. This should also create or update package-lock.json
// if autoFix is enabled.
const npmInstall = `npm install${autoFix ? '' : ' --no-package-lock'}`;
execSync(npmInstall, {stdio: 'inherit'});
// Create the ep_etherpad-lite symlink if necessary. This must be done after running `npm install`
// because that command nukes the symlink.
try {
const d = await fsp.realpath(path.join(pluginPath, 'node_modules/ep_etherpad-lite'));
assert.equal(d, epSrcDir);
} catch (err) {
execSync(`${npmInstall} --no-save ep_etherpad-lite@file:${epSrcDir}`, {stdio: 'inherit'});
}
// linting begins
try {
logger.info('Linting...');
const lintCmd = autoFix ? 'npx eslint --fix .' : 'npx eslint';
execSync(lintCmd, {stdio: 'inherit'});
} catch (e) {
// it is gonna throw an error anyway
logger.info('Manual linting probably required, check with: npm run lint');
}
// linting ends.
if (autoFix) {
const unchanged = JSON.parse(execSync(
'untracked=$(git ls-files -o --exclude-standard) || exit 1; ' +
'git diff-files --quiet && [ -z "$untracked" ] && echo true || echo false'));
if (!unchanged) {
// Display a diff of changes. Git doesn't diff untracked files, so they must be added to the
// index. Use a temporary index file to avoid modifying Git's default index file.
execSync('git read-tree HEAD; git add -A && git diff-index -p --cached HEAD && echo ""', {
env: {...process.env, GIT_INDEX_FILE: '.git/checkPlugin.index'},
stdio: 'inherit',
});
await fsp.unlink(`${pluginPath}/.git/checkPlugin.index`);
const commitCmd = [
'git add -A',
'git commit -m "autofixes from Etherpad checkPlugin.js"',
].join(' && ');
if (autoCommit) {
logger.info('Committing changes...');
execSync(commitCmd, {stdio: 'inherit'});
} else {
logger.info('Fixes applied. Check the above git diff then run the following command:');
logger.info(`(cd node_modules/${pluginName} && ${commitCmd})`);
}
const pushCmd = 'git push';
if (autoPush) {
logger.info('Pushing new commit...');
execSync(pushCmd, {stdio: 'inherit'});
} else {
logger.info('Changes committed. To push, run the following command:');
logger.info(`(cd node_modules/${pluginName} && ${pushCmd})`);
}
} else {
logger.info('No changes.');
}
}
logger.info('Finished');
})();

View file

@ -1,39 +0,0 @@
#!/bin/sh
set -e
newline='
'
pecho () { printf %s\\n "$*"; }
log () { pecho "$@"; }
error () { log "ERROR: $@" >&2; }
fatal () { error "$@"; exit 1; }
mydir=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${mydir}/../../.."
pdir=$(cd .. && pwd -P) || exit 1
plugins=$("${mydir}/listOfficialPlugins") || exit 1
for d in ${plugins}; do
log "============================================================"
log "${d}"
log "============================================================"
fd=${pdir}/${d}
repo=git@github.com:ether/${plugin}.git
[ -d "${fd}" ] || {
log "Cloning ${repo} to ${fd}..."
(cd "${pdir}" && git clone "${repo}" "${d}") || exit 1
} || exit 1
log "Fetching latest commits..."
(cd "${fd}" && git pull --ff-only) || exit 1
log "Getting plugin name..."
pn=$(cd "${fd}" && npx -c 'printf %s\\n "${npm_package_name}"') || exit 1
[ -n "${pn}" ] || fatal "Unable to determine plugin name for ${d}"
md=node_modules/${pn}
[ -d "${md}" ] || {
log "Installing plugin to ${md}..."
ln -s ../../"${d}" "${md}"
} || exit 1
[ "${md}" -ef "${fd}" ] || fatal "${md} is not a symlink to ${fd}"
done

View file

@ -1,135 +0,0 @@
# Contributor Guidelines
(Please talk to people on the mailing list before you change this page, see our section on [how to get in touch](https://github.com/ether/etherpad-lite#get-in-touch))
## Pull requests
* the commit series in the PR should be _linear_ (it **should not contain merge commits**). This is necessary because we want to be able to [bisect](https://en.wikipedia.org/wiki/Bisection_(software_engineering)) bugs easily. Rewrite history/perform a rebase if necessary
* PRs should be issued against the **develop** branch: we never pull directly into **master**
* PRs **should not have conflicts** with develop. If there are, please resolve them rebasing and force-pushing
* when preparing your PR, please make sure that you have included the relevant **changes to the documentation** (preferably with usage examples)
* contain meaningful and detailed **commit messages** in the form:
```
submodule: description
longer description of the change you have made, eventually mentioning the
number of the issue that is being fixed, in the form: Fixes #someIssueNumber
```
* if the PR is a **bug fix**:
* the first commit in the series must be a test that shows the failure
* subsequent commits will fix the bug and make the test pass
* the final commit message should include the text `Fixes: #xxx` to link it to its bug report
* think about stability: code has to be backwards compatible as much as possible. Always **assume your code will be run with an older version of the DB/config file**
* if you want to remove a feature, **deprecate it instead**:
* write an issue with your deprecation plan
* output a `WARN` in the log informing that the feature is going to be removed
* remove the feature in the next version
* if you want to add a new feature, put it under a **feature flag**:
* once the new feature has reached a minimal level of stability, do a PR for it, so it can be integrated early
* expose a mechanism for enabling/disabling the feature
* the new feature should be **disabled** by default. With the feature disabled, the code path should be exactly the same as before your contribution. This is a __necessary condition__ for early integration
* think of the PR not as something that __you wrote__, but as something that __someone else is going to read__. The commit series in the PR should tell a novice developer the story of your thoughts when developing it
## How to write a bug report
* Please be polite, we all are humans and problems can occur.
* Please add as much information as possible, for example
* client os(s) and version(s)
* browser(s) and version(s), is the problem reproducible on different clients
* special environments like firewalls or antivirus
* host os and version
* npm and nodejs version
* Logfiles if available
* steps to reproduce
* what you expected to happen
* what actually happened
* Please format logfiles and code examples with markdown see github Markdown help below the issue textarea for more information.
If you send logfiles, please set the loglevel switch DEBUG in your settings.json file:
```
/* The log level we are using, can be: DEBUG, INFO, WARN, ERROR */
"loglevel": "DEBUG",
```
The logfile location is defined in startup script or the log is directly shown in the commandline after you have started etherpad.
## General goals of Etherpad
To make sure everybody is going in the same direction:
* easy to install for admins and easy to use for people
* easy to integrate into other apps, but also usable as standalone
* lightweight and scalable
* extensible, as much functionality should be extendable with plugins so changes don't have to be done in core.
Also, keep it maintainable. We don't wanna end up as the monster Etherpad was!
## How to work with git?
* Don't work in your master branch.
* Make a new branch for every feature you're working on. (This ensures that you can work you can do lots of small, independent pull requests instead of one big one with complete different features)
* Don't use the online edit function of github (this only creates ugly and not working commits!)
* Try to make clean commits that are easy readable (including descriptive commit messages!)
* Test before you push. Sounds easy, it isn't!
* Don't check in stuff that gets generated during build or runtime
* Make small pull requests that are easy to review but make sure they do add value by themselves / individually
## Coding style
* Do write comments. (You don't have to comment every line, but if you come up with something that's a bit complex/weird, just leave a comment. Bear in mind that you will probably leave the project at some point and that other people will read your code. Undocumented huge amounts of code are worthless!)
* Never ever use tabs
* Indentation: JS/CSS: 2 spaces; HTML: 4 spaces
* Don't overengineer. Don't try to solve any possible problem in one step, but try to solve problems as easy as possible and improve the solution over time!
* Do generalize sooner or later! (if an old solution, quickly hacked together, poses more problems than it solves today, refactor it!)
* Keep it compatible. Do not introduce changes to the public API, db schema or configurations too lightly. Don't make incompatible changes without good reasons!
* If you do make changes, document them! (see below)
* Use protocol independent urls "//"
## Branching model / git workflow
see git flow http://nvie.com/posts/a-successful-git-branching-model/
### `master` branch
* the stable
* This is the branch everyone should use for production stuff
### `develop`branch
* everything that is READY to go into master at some point in time
* This stuff is tested and ready to go out
### release branches
* stuff that should go into master very soon
* only bugfixes go into these (see http://nvie.com/posts/a-successful-git-branching-model/ for why)
* we should not be blocking new features to develop, just because we feel that we should be releasing it to master soon. This is the situation that release branches solve/handle.
### hotfix branches
* fixes for bugs in master
### feature branches (in your own repos)
* these are the branches where you develop your features in
* If it's ready to go out, it will be merged into develop
Over the time we pull features from feature branches into the develop branch. Every month we pull from develop into master. Bugs in master get fixed in hotfix branches. These branches will get merged into master AND develop. There should never be commits in master that aren't in develop
## Documentation
The docs are in the `doc/` folder in the git repository, so people can easily find the suitable docs for the current git revision.
Documentation should be kept up-to-date. This means, whenever you add a new API method, add a new hook or change the database model, pack the relevant changes to the docs in the same pull request.
You can build the docs e.g. produce html, using `make docs`. At some point in the future we will provide an online documentation. The current documentation in the github wiki should always reflect the state of `master` (!), since there are no docs in master, yet.
## Testing
Front-end tests are found in the `src/tests/frontend/` folder in the repository.
Run them by pointing your browser to `<yourdomainhere>/tests/frontend`.
Back-end tests can be run from the `src` directory, via `npm test`.
## Things you can help with
Etherpad is much more than software. So if you aren't a developer then worry not, there is still a LOT you can do! A big part of what we do is community engagement. You can help in the following ways
* Triage bugs (applying labels) and confirming their existence
* Testing fixes (simply applying them and seeing if it fixes your issue or not) - Some git experience required
* Notifying large site admins of new releases
* Writing Changelogs for releases
* Creating Windows packages
* Creating releases
* Bumping dependencies periodically and checking they don't break anything
* Write proposals for grants
* Co-Author and Publish CVEs
* Work with SFC to maintain legal side of project
* Maintain TODO page - https://github.com/ether/etherpad-lite/wiki/TODO#IMPORTANT_TODOS

View file

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,47 +0,0 @@
# [plugin_name]
TODO: Describe the plugin.
## Example animated gif of usage if appropriate
![screenshot](https://user-images.githubusercontent.com/220864/99979953-97841d80-2d9f-11eb-9782-5f65817c58f4.PNG)
## Installation
From the Etherpad working directory, run:
```shell
npm install --no-save --legacy-peer-deps [plugin_name]
```
Or, install from Etherpad's `/admin/plugins` page.
## Configuration
TODO
## Testing
To run the backend tests, run the following from the Etherpad working directory:
```shell
(cd src && pnpm test)
```
To run the frontend tests, visit: http://localhost:9001/tests/frontend/
## Copyright and License
Copyright © [yyyy] [name of copyright owner]
and the [plugin_name] authors and contributors
Licensed under the [Apache License, Version 2.0](LICENSE) (the "License"); you
may not use this file except in compliance with the License. You may obtain a
copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.

View file

@ -1,75 +0,0 @@
name: "Backend tests"
# any branch is useful for testing before a PR is submitted
on: [push, pull_request]
jobs:
withplugins:
# run on pushes to any branch
# run on PRs from external forks
if: |
(github.event_name != 'pull_request')
|| (github.event.pull_request.head.repo.id != github.event.pull_request.base.repo.id)
name: with Plugins
runs-on: ubuntu-latest
steps:
-
name: Install libreoffice
run: |
sudo add-apt-repository -y ppa:libreoffice/ppa
sudo apt update
sudo apt install -y --no-install-recommends libreoffice libreoffice-pdfimport
-
name: Install etherpad core
uses: actions/checkout@v3
with:
repository: ether/etherpad-lite
-
name: Checkout plugin repository
uses: actions/checkout@v3
with:
path: ./node_modules/__tmp
-
name: Determine plugin name
id: plugin_name
run: |
cd ./node_modules/__tmp
npx -c 'printf %s\\n "::set-output name=plugin_name::${npm_package_name}"'
-
name: Rename plugin directory
run: |
mv ./node_modules/__tmp ./node_modules/"${PLUGIN_NAME}"
env:
PLUGIN_NAME: ${{ steps.plugin_name.outputs.plugin_name }}
-
uses: actions/setup-node@v3
with:
node-version: 12
cache: 'npm'
cache-dependency-path: |
src/package-lock.json
src/bin/doc/package-lock.json
node_modules/${{ steps.plugin_name.outputs.plugin_name }}/package-lock.json
-
name: Install plugin dependencies
run: |
cd ./node_modules/"${PLUGIN_NAME}"
npm ci
env:
PLUGIN_NAME: ${{ steps.plugin_name.outputs.plugin_name }}
# Etherpad core dependencies must be installed after installing the
# plugin's dependencies, otherwise npm will try to hoist common
# dependencies by removing them from src/node_modules and installing them
# in the top-level node_modules. As of v6.14.10, npm's hoist logic appears
# to be buggy, because it sometimes removes dependencies from
# src/node_modules but fails to add them to the top-level node_modules.
# Even if npm correctly hoists the dependencies, the hoisting seems to
# confuse tools such as `npm outdated`, `npm update`, and some ESLint
# rules.
-
name: Install Etherpad core dependencies
run: src/bin/installDeps.sh
-
name: Run the backend tests
run: cd src && pnpm test

View file

@ -1,11 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
versioning-strategy: "increase"

View file

@ -1,9 +0,0 @@
'use strict';
// This is a workaround for https://github.com/eslint/eslint/issues/3458
require('eslint-config-etherpad/patch/modern-module-resolution');
module.exports = {
root: true,
extends: 'etherpad/plugin',
};

View file

@ -1,110 +0,0 @@
# Publicly credit Sauce Labs because they generously support open source
# projects.
name: "frontend tests powered by Sauce Labs"
on: [push]
jobs:
test:
runs-on: ubuntu-latest
steps:
-
name: Fail if Dependabot
if: github.actor == 'dependabot[bot]'
run: |
cat <<EOF >&2
Frontend tests skipped because Dependabot can't access secrets.
Manually re-run the jobs to run the frontend tests.
For more information, see:
https://github.blog/changelog/2021-02-19-github-actions-workflows-triggered-by-dependabot-prs-will-run-with-read-only-permissions/
EOF
exit 1
-
name: Generate Sauce Labs strings
id: sauce_strings
run: |
printf %s\\n '::set-output name=name::${{github.event.repository.name}} ${{ github.workflow }} - ${{ github.job }}'
printf %s\\n '::set-output name=tunnel_id::${{ github.run_id }}-${{ github.run_number }}-${{ github.job }}'
-
name: Check out Etherpad core
uses: actions/checkout@v3
with:
repository: ether/etherpad-lite
-
uses: actions/setup-node@v3
with:
node-version: 12
cache: 'npm'
cache-dependency-path: |
src/package-lock.json
src/bin/doc/package-lock.json
-
name: Check out the plugin
uses: actions/checkout@v3
with:
path: ./node_modules/__tmp
-
name: export GIT_HASH to env
id: environment
run: |
cd ./node_modules/__tmp
echo "::set-output name=sha_short::$(git rev-parse --short ${{ github.sha }})"
-
name: Determine plugin name
id: plugin_name
run: |
cd ./node_modules/__tmp
npx -c 'printf %s\\n "::set-output name=plugin_name::${npm_package_name}"'
-
name: Rename plugin directory
env:
PLUGIN_NAME: ${{ steps.plugin_name.outputs.plugin_name }}
run: |
mv ./node_modules/__tmp ./node_modules/"${PLUGIN_NAME}"
-
name: Install plugin dependencies
env:
PLUGIN_NAME: ${{ steps.plugin_name.outputs.plugin_name }}
run: |
cd ./node_modules/"${PLUGIN_NAME}"
npm ci
# Etherpad core dependencies must be installed after installing the
# plugin's dependencies, otherwise npm will try to hoist common
# dependencies by removing them from src/node_modules and installing them
# in the top-level node_modules. As of v6.14.10, npm's hoist logic appears
# to be buggy, because it sometimes removes dependencies from
# src/node_modules but fails to add them to the top-level node_modules.
# Even if npm correctly hoists the dependencies, the hoisting seems to
# confuse tools such as `npm outdated`, `npm update`, and some ESLint
# rules.
-
name: Install Etherpad core dependencies
run: src/bin/installDeps.sh
-
name: Create settings.json
run: cp settings.json.template settings.json
-
name: Disable import/export rate limiting
run: |
sed -e '/^ *"importExportRateLimiting":/,/^ *\}/ s/"max":.*/"max": 0/' -i settings.json
-
name: Remove standard frontend test files
run: rm -rf src/tests/frontend/specs
-
uses: saucelabs/sauce-connect-action@v2.1.1
with:
username: ${{ secrets.SAUCE_USERNAME }}
accessKey: ${{ secrets.SAUCE_ACCESS_KEY }}
tunnelIdentifier: ${{ steps.sauce_strings.outputs.tunnel_id }}
-
name: Run the frontend tests
shell: bash
env:
SAUCE_USERNAME: ${{ secrets.SAUCE_USERNAME }}
SAUCE_ACCESS_KEY: ${{ secrets.SAUCE_ACCESS_KEY }}
SAUCE_NAME: ${{ steps.sauce_strings.outputs.name }}
TRAVIS_JOB_NUMBER: ${{ steps.sauce_strings.outputs.tunnel_id }}
GIT_HASH: ${{ steps.environment.outputs.sha_short }}
run: |
src/tests/frontend/travis/runner.sh

View file

@ -1,3 +0,0 @@
.DS_Store
node_modules/
npm-debug.log

View file

@ -1,122 +0,0 @@
# This workflow will run tests using node and then publish a package to the npm registry when a release is created
# For more information see: https://help.github.com/actions/language-and-framework-guides/publishing-nodejs-packages
name: Node.js Package
on:
pull_request:
push:
branches:
- main
- master
jobs:
test:
runs-on: ubuntu-latest
steps:
# Clone ether/etherpad-lite to ../etherpad-lite so that ep_etherpad-lite
# can be "installed" in this plugin's node_modules. The checkout v2 action
# doesn't support cloning outside of $GITHUB_WORKSPACE (see
# https://github.com/actions/checkout/issues/197), so the repo is first
# cloned to etherpad-lite then moved to ../etherpad-lite. To avoid
# conflicts with this plugin's clone, etherpad-lite must be cloned and
# moved out before this plugin's repo is cloned to $GITHUB_WORKSPACE.
-
uses: actions/checkout@v3
with:
repository: ether/etherpad-lite
path: etherpad-lite
-
run: mv etherpad-lite ..
# etherpad-lite has been moved outside of $GITHUB_WORKSPACE, so it is now
# safe to clone this plugin's repo to $GITHUB_WORKSPACE.
-
uses: actions/checkout@v3
# This is necessary for actions/setup-node because '..' can't be used in
# cache-dependency-path.
-
name: Create ep_etherpad-lite symlink
run: |
mkdir -p node_modules
ln -s ../../etherpad-lite/src node_modules/ep_etherpad-lite
-
uses: actions/setup-node@v3
with:
node-version: 12
cache: 'npm'
cache-dependency-path: |
node_modules/ep_etherpad-lite/package-lock.json
node_modules/ep_etherpad-lite/bin/doc/package-lock.json
package-lock.json
# All of ep_etherpad-lite's devDependencies are installed because the
# plugin might do `require('ep_etherpad-lite/node_modules/${devDep}')`.
# Eventually it would be nice to create an ESLint plugin that prohibits
# Etherpad plugins from piggybacking off of ep_etherpad-lite's
# devDependencies. If we had that, we could change this line to only
# install production dependencies.
-
run: cd ../etherpad-lite/src && npm ci
-
run: npm ci
# This runs some sanity checks and creates a symlink at
# node_modules/ep_etherpad-lite that points to ../../etherpad-lite/src.
# This step must be done after `npm ci` installs the plugin's dependencies
# because npm "helpfully" cleans up such symlinks. :( Installing
# ep_etherpad-lite in the plugin's node_modules prevents lint errors and
# unit test failures if the plugin does `require('ep_etherpad-lite/foo')`.
-
run: npm install --no-save ep_etherpad-lite@file:../etherpad-lite/src
-
run: npm test
-
run: npm run lint
publish-npm:
if: github.event_name == 'push'
needs: test
runs-on: ubuntu-latest
steps:
-
uses: actions/checkout@v3
with:
fetch-depth: 0
-
uses: actions/setup-node@v3
with:
node-version: 12
registry-url: https://registry.npmjs.org/
cache: 'npm'
-
name: Bump version (patch)
run: |
LATEST_TAG=$(git describe --tags --abbrev=0) || exit 1
NEW_COMMITS=$(git rev-list --count "${LATEST_TAG}"..) || exit 1
[ "${NEW_COMMITS}" -gt 0 ] || exit 0
git config user.name 'github-actions[bot]'
git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
npm ci
npm version patch
git push --follow-tags
# This is required if the package has a prepare script that uses something
# in dependencies or devDependencies.
-
run: npm ci
# `npm publish` must come after `git push` otherwise there is a race
# condition: If two PRs are merged back-to-back then master/main will be
# updated with the commits from the second PR before the first PR's
# workflow has a chance to push the commit generated by `npm version
# patch`. This causes the first PR's `git push` step to fail after the
# package has already been published, which in turn will cause all future
# workflow runs to fail because they will all attempt to use the same
# already-used version number. By running `npm publish` after `git push`,
# back-to-back merges will cause the first merge's workflow to fail but
# the second's will succeed.
-
run: npm publish
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
-
name: Add package to etherpad organization
run: npm access grant read-write etherpad:developers
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}

View file

@ -1,14 +0,0 @@
#!/bin/sh
set -e
newline='
'
mydir=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${mydir}/../../.."
pdir=$(cd .. && pwd -P) || exit 1
plugins=
for p in "" "&page=2" "&page=3"; do
curlOut=$(curl "https://api.github.com/users/ether/repos?per_page=100${p}") || exit 1
plugins=${plugins}${newline}$(printf %s\\n "${curlOut}" \
| sed -n -e 's;.*git@github.com:ether/\(ep_[^"]*\)\.git.*;\1;p');
done
printf %s\\n "${plugins}" | sort -u | grep -v '^[[:space:]]*$'

View file

@ -1,14 +0,0 @@
echo "herp";
for dir in `ls node_modules`;
do
echo $dir
if [[ $dir == *"ep_"* ]]; then
if [[ $dir != "ep_etherpad-lite" ]]; then
# node src/bin/plugins/checkPlugin.js $dir autopush
cd node_modules/$dir
git commit -m "Automatic update: bump update to re-run latest Etherpad tests" --allow-empty
git push origin master
cd ../..
fi
fi
done

View file

@ -1,20 +0,0 @@
'use strict';
// Returns a list of stale plugins and their authors email
const superagent = require('superagent');
const currentTime = new Date();
(async () => {
const res = await superagent.get('https://static.etherpad.org/plugins.full.json');
const plugins = JSON.parse(res.text);
for (const plugin of Object.keys(plugins)) {
const name = plugins[plugin].data.name;
const date = new Date(plugins[plugin].time);
const diffTime = Math.abs(currentTime - date);
const diffDays = Math.ceil(diffTime / (1000 * 60 * 60 * 24));
if (diffDays > (365 * 2)) {
console.log(`${name}, ${plugins[plugin].data.maintainers[0].email}`);
}
}
})();

View file

@ -1,17 +0,0 @@
cd node_modules
GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000&page=2" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000&page=3" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
GHUSER=johnmclear; curl "https://api.github.com/users/$GHUSER/repos?per_page=1000&page=4" | grep -o 'git@[^"]*' | grep /ep_ | xargs -L1 git clone
cd ..
for dir in `ls node_modules`;
do
# echo $0
if [[ $dir == *"ep_"* ]]; then
if [[ $dir != "ep_etherpad-lite" ]]; then
node src/bin/plugins/checkPlugin.js $dir autopush
fi
fi
# echo $dir
done

View file

@ -1,9 +0,0 @@
#!/bin/sh
set -e
for dir in node_modules/ep_*; do
dir=${dir#node_modules/}
[ "$dir" != ep_etherpad-lite ] || continue
node src/bin/plugins/checkPlugin.js "$dir" autopush
done

View file

@ -1,16 +0,0 @@
#!/bin/bash
# Specify the path to your package.json file
PACKAGE_JSON_PATH="./src//package.json"
# Check if the file exists
if [ ! -f "$PACKAGE_JSON_PATH" ]; then
echo "Error: package.json not found in the specified path."
exit 1
fi
# Read the version from package.json into a variable
VERSION=$(jq -r '.version' "$PACKAGE_JSON_PATH")
git push origin master develop $VERSION
git push --tags
(cd ../ether.github.com && git push)

View file

@ -1,84 +0,0 @@
'use strict';
/*
This is a repair tool. It rebuilds an old pad at a new pad location up to a
known "good" revision.
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
if (process.argv.length !== 4 && process.argv.length !== 5) {
throw new Error('Use: node src/bin/repairPad.js $PADID $REV [$NEWPADID]');
}
const padId = process.argv[2];
const newRevHead = process.argv[3];
const newPadId = process.argv[4] || `${padId}-rebuilt`;
(async () => {
const db = require('../node/db/DB');
await db.init();
const PadManager = require('../node/db/PadManager');
const Pad = require('../node/db/Pad').Pad;
// Validate the newPadId if specified and that a pad with that ID does
// not already exist to avoid overwriting it.
if (!PadManager.isValidPadId(newPadId)) {
throw new Error('Cannot create a pad with that id as it is invalid');
}
const exists = await PadManager.doesPadExist(newPadId);
if (exists) throw new Error('Cannot create a pad with that id as it already exists');
const oldPad = await PadManager.getPad(padId);
const newPad = new Pad(newPadId);
// Clone all Chat revisions
const chatHead = oldPad.chatHead;
await Promise.all([...Array(chatHead + 1).keys()].map(async (i) => {
const chat = await db.get(`pad:${padId}:chat:${i}`);
await db.set(`pad:${newPadId}:chat:${i}`, chat);
console.log(`Created: Chat Revision: pad:${newPadId}:chat:${i}`);
}));
// Rebuild Pad from revisions up to and including the new revision head
const AuthorManager = require('../node/db/AuthorManager');
const Changeset = require('../static/js/Changeset');
// Author attributes are derived from changesets, but there can also be
// non-author attributes with specific mappings that changesets depend on
// and, AFAICT, cannot be recreated any other way
newPad.pool.numToAttrib = oldPad.pool.numToAttrib;
for (let curRevNum = 0; curRevNum <= newRevHead; curRevNum++) {
const rev = await db.get(`pad:${padId}:revs:${curRevNum}`);
if (!rev || !rev.meta) throw new Error('The specified revision number could not be found.');
const newRevNum = ++newPad.head;
const newRevId = `pad:${newPad.id}:revs:${newRevNum}`;
await Promise.all([
db.set(newRevId, rev),
AuthorManager.addPad(rev.meta.author, newPad.id),
]);
newPad.atext = Changeset.applyToAText(rev.changeset, newPad.atext, newPad.pool);
console.log(`Created: Revision: pad:${newPad.id}:revs:${newRevNum}`);
}
// Add saved revisions up to the new revision head
console.log(newPad.head);
const newSavedRevisions = [];
for (const savedRev of oldPad.savedRevisions) {
if (savedRev.revNum <= newRevHead) {
newSavedRevisions.push(savedRev);
console.log(`Added: Saved Revision: ${savedRev.revNum}`);
}
}
newPad.savedRevisions = newSavedRevisions;
// Save the source pad
await db.set(`pad:${newPadId}`, newPad);
console.log(`Created: Source Pad: pad:${newPadId}`);
await newPad.saveToDatabase();
await db.shutdown();
console.info('finished');
})();

View file

@ -1,216 +0,0 @@
'use strict';
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
const fs = require('fs');
const childProcess = require('child_process');
const log4js = require('log4js');
const path = require('path');
const semver = require('semver');
const {exec} = require('child_process');
log4js.configure({appenders: {console: {type: 'console'}},
categories: {
default: {appenders: ['console'], level: 'info'},
}});
/*
Usage
node src/bin/release.js patch
*/
const usage =
'node src/bin/release.js [patch/minor/major] -- example: "node src/bin/release.js patch"';
const release = process.argv[2];
if (!release) {
console.log(usage);
throw new Error('No release type included');
}
const cwd = path.join(fs.realpathSync(__dirname), '../../');
process.chdir(cwd);
// Run command capturing stdout. Trailing newlines are stripped (like the shell does).
const runc =
(cmd, opts = {}) => childProcess.execSync(cmd, {encoding: 'utf8', ...opts}).replace(/\n+$/, '');
// Run command without capturing stdout.
const run = (cmd, opts = {}) => childProcess.execSync(cmd, {stdio: 'inherit', ...opts});
const readJson = (filename) => JSON.parse(fs.readFileSync(filename, {encoding: 'utf8', flag: 'r'}));
const writeJson = (filename, obj) => {
let json = JSON.stringify(obj, null, 2);
if (json !== '' && !json.endsWith('\n')) json += '\n';
fs.writeFileSync(filename, json);
};
const assertWorkDirClean = (opts = {}) => {
opts.cwd = runc('git rev-parse --show-cdup', opts) || cwd;
const m = runc('git diff-files --name-status', opts);
if (m !== '') throw new Error(`modifications in working directory ${opts.cwd}:\n${m}`);
const u = runc('git ls-files -o --exclude-standard', opts);
if (u !== '') throw new Error(`untracked files in working directory ${opts.cwd}:\n${u}`);
const s = runc('git diff-index --cached --name-status HEAD', opts);
if (s !== '') throw new Error(`uncommitted changes in working directory ${opts.cwd}:\n${s}`);
};
const assertBranchCheckedOut = (branch, opts = {}) => {
const b = runc('git symbolic-ref HEAD', opts);
if (b !== `refs/heads/${branch}`) {
const d = opts.cwd ? path.resolve(cwd, opts.cwd) : cwd;
throw new Error(`${branch} must be checked out (cwd: ${d})`);
}
};
const assertUpstreamOk = (branch, opts = {}) => {
const upstream = runc(`git rev-parse --symbolic-full-name ${branch}@{u}`, opts);
if (!(new RegExp(`^refs/remotes/[^/]+/${branch}`)).test(upstream)) {
throw new Error(`${branch} should track origin/${branch}; see git branch --set-upstream-to`);
}
try {
run(`git merge-base --is-ancestor ${branch} ${branch}@{u}`);
} catch (err) {
if (err.status !== 1) throw err;
throw new Error(`${branch} is ahead of origin/${branch}; do you need to push?`);
}
};
// Check if asciidoctor is installed
exec('asciidoctor -v', (err, stdout) => {
if (err) {
console.log('Please install asciidoctor');
console.log('https://asciidoctor.org/docs/install-toolchain/');
process.exit(1);
}
});
const dirExists = (dir) => {
try {
return fs.statSync(dir).isDirectory();
} catch (err) {
if (err.code !== 'ENOENT') throw err;
return false;
}
};
// Sanity checks for Etherpad repo.
assertWorkDirClean();
assertBranchCheckedOut('develop');
assertUpstreamOk('develop');
assertUpstreamOk('master');
// Sanity checks for documentation repo.
if (!dirExists('../ether.github.com')) {
throw new Error('please clone documentation repo: ' +
'(cd .. && git clone git@github.com:ether/ether.github.com.git)');
}
assertWorkDirClean({cwd: '../ether.github.com/'});
assertBranchCheckedOut('master', {cwd: '../ether.github.com/'});
assertUpstreamOk('master', {cwd: '../ether.github.com/'});
const changelog = fs.readFileSync('CHANGELOG.md', {encoding: 'utf8', flag: 'r'});
const pkg = readJson('./src/package.json');
const currentVersion = pkg.version;
const newVersion = semver.inc(currentVersion, release);
if (!newVersion) {
console.log(usage);
throw new Error('Unable to generate new version from input');
}
if (!changelog.startsWith(`# ${newVersion}\n`)) {
throw new Error(`No changelog record for ${newVersion}, please create changelog record`);
}
// ////////////////////////////////////////////////////////////////////////////////////////////////
// Done with sanity checks, now it's time to make changes.
try {
console.log('Updating develop branch...');
run('git pull --ff-only');
console.log(`Bumping ${release} version (to ${newVersion})...`);
pkg.version = newVersion;
writeJson('./src/package.json', pkg);
// run npm version `release` where release is patch, minor or major
run('npm install --package-lock-only', {cwd: 'src/'});
// run npm install --package-lock-only <-- required???
// Many users will be using the latest LTS version of npm, and the latest LTS version of npm uses
// lockfileVersion 1. Enforce v1 so that users don't see a (benign) compatibility warning.
const pkglock = readJson('./src/package-lock.json');
pkglock.lockfileVersion = 1;
writeJson('./src/package-lock.json', pkglock);
run('git add src/package.json');
run('git add src/package-lock.json');
run('git commit -m "bump version"');
console.log('Switching to master...');
run('git checkout master');
console.log('Updating master branch...');
run('git pull --ff-only');
console.log('Merging develop into master...');
run('git merge --no-ff --no-edit develop');
console.log(`Creating ${newVersion} tag...`);
run(`git tag -s '${newVersion}' -m '${newVersion}'`);
run(`git tag -s 'v${newVersion}' -m 'v${newVersion}'`);
console.log('Switching back to develop...');
run('git checkout develop');
console.log('Merging master into develop...');
run('git merge --no-ff --no-edit master');
} catch (err) {
console.error(err.toString());
console.warn('Resetting repository...');
console.warn('Resetting master...');
run('git checkout -f master');
run('git reset --hard @{u}');
console.warn('Resetting develop...');
run('git checkout -f develop');
run('git reset --hard @{u}');
console.warn(`Deleting ${newVersion} tag...`);
run(`git rev-parse -q --verify refs/tags/'${newVersion}' >/dev/null || exit 0; ` +
`git tag -d '${newVersion}'`);
run(`git rev-parse -q --verify refs/tags/'v${newVersion}' >/dev/null || exit 0; ` +
`git tag -d 'v${newVersion}'`);
throw err;
}
try {
console.log('Building documentation...');
run('node ./make_docs.js');
console.log('Updating ether.github.com master branch...');
run('git pull --ff-only', {cwd: '../ether.github.com/'});
console.log('Committing documentation...');
run(`cp -R out/doc/ ../ether.github.com/public/doc/v'${newVersion}'`);
run(`npm version ${newVersion}`, {cwd: '../ether.github.com'});
run('git add .', {cwd: '../ether.github.com/'});
run(`git commit -m '${newVersion} docs'`, {cwd: '../ether.github.com/'});
} catch (err) {
console.error(err.toString());
console.warn('Resetting repository...');
console.warn('Resetting master...');
run('git checkout -f master', {cwd: '../ether.github.com/'});
run('git reset --hard @{u}', {cwd: '../ether.github.com/'});
throw err;
}
console.log('Done.');
console.log('Review the new commits and the new tag:');
console.log(' git log --graph --date-order --boundary --oneline --decorate develop@{u}..develop');
console.log(` git show '${newVersion}'`);
console.log(' (cd ../ether.github.com && git show)');
console.log('If everything looks good then push:');
console.log('Run ./bin/push-after-release.sh');
console.log('Creating a Windows build is not necessary anymore and will be created by GitHub action');
console.log('After the windows binary is created a new release with the set version is created automatically.' +
' Just paste the release notes in there');
console.log('The docs are updated automatically with the new version. While the windows build' +
' is generated people can still download the older versions.');
console.log('Finally go public with an announcement via our comms channels :)');

View file

@ -1,58 +0,0 @@
'use strict';
/*
* This is a repair tool. It extracts all datas of a pad, removes and inserts them again.
*/
// As of v14, Node.js does not exit when there is an unhandled Promise rejection. Convert an
// unhandled rejection into an uncaught exception, which does cause Node.js to exit.
process.on('unhandledRejection', (err) => { throw err; });
console.warn('WARNING: This script must not be used while etherpad is running!');
if (process.argv.length !== 3) throw new Error('Use: node src/bin/repairPad.js $PADID');
// get the padID
const padId = process.argv[2];
let valueCount = 0;
(async () => {
// initialize database
require('../node/utils/Settings');
const db = require('../node/db/DB');
await db.init();
// get the pad
const padManager = require('../node/db/PadManager');
const pad = await padManager.getPad(padId);
// accumulate the required keys
const neededDBValues = [`pad:${padId}`];
// add all authors
neededDBValues.push(...pad.getAllAuthors().map((author) => `globalAuthor:${author}`));
// add all revisions
for (let rev = 0; rev <= pad.head; ++rev) {
neededDBValues.push(`pad:${padId}:revs:${rev}`);
}
// add all chat values
for (let chat = 0; chat <= pad.chatHead; ++chat) {
neededDBValues.push(`pad:${padId}:chat:${chat}`);
}
// now fetch and reinsert every key
console.log('Fetch and reinsert every key');
for (const key of neededDBValues) {
if (valueCount % 100 === 0) console.log(valueCount + "/" + neededDBValues.length);
const value = await db.get(key);
// if it isn't a globalAuthor value which we want to ignore..
// console.log(`Key: ${key}, value: ${JSON.stringify(value)}`);
await db.remove(key);
await db.set(key, value);
valueCount++;
}
console.info(`Finished: Replaced ${valueCount} values in the database`);
})();

View file

@ -1,35 +0,0 @@
#!/bin/sh
# Move to the Etherpad base directory.
MY_DIR=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${MY_DIR}/../.." || exit 1
# Source constants and useful functions
. src/bin/functions.sh
ignoreRoot=0
for ARG in "$@"; do
if [ "$ARG" = "--root" ]; then
ignoreRoot=1
fi
done
# Stop the script if it's started as root
if [ "$(id -u)" -eq 0 ] && [ "$ignoreRoot" -eq 0 ]; then
cat <<EOF >&2
You shouldn't start Etherpad as root!
Please type 'Etherpad rocks my socks' (or restart with the '--root'
argument) if you still want to start it as root:
EOF
printf "> " >&2
read rocks
[ "$rocks" = "Etherpad rocks my socks" ] || fatal "Your input was incorrect"
fi
# Prepare the environment
src/bin/installDeps.sh "$@" || exit 1
# Move to the node folder and start
log "Starting Etherpad..."
exec pnpm run dev "$@"

View file

@ -1,70 +0,0 @@
#!/bin/sh
# This script ensures that ep-lite is automatically restarting after
# an error happens
# Handling Errors
# 0 silent
# 1 email
ERROR_HANDLING=0
# Your email address which should receive the error messages
EMAIL_ADDRESS="no-reply@example.com"
# Sets the minimum amount of time between the sending of error emails.
# This ensures you do not get spammed during an endless reboot loop
# It's the time in seconds
TIME_BETWEEN_EMAILS=600 # 10 minutes
# DON'T EDIT AFTER THIS LINE
pecho() { printf %s\\n "$*"; }
log() { pecho "$@"; }
error() { log "ERROR: $@" >&2; }
fatal() { error "$@"; exit 1; }
LAST_EMAIL_SEND=0
# Move to the Etherpad base directory.
MY_DIR=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${MY_DIR}/../.." || exit 1
# Check if a logfile parameter is set
LOG="$1"
[ -n "${LOG}" ] || fatal "Set a logfile as the first parameter"
shift
while true; do
# Try to touch the file if it doesn't exist
[ -f "${LOG}" ] || touch "${LOG}" || fatal "Logfile '${LOG}' is not writeable"
# Check if the file is writeable
[ -w "${LOG}" ] || fatal "Logfile '${LOG}' is not writeable"
# Start the application
src/bin/run.sh "$@" >>${LOG} 2>>${LOG}
TIME_FMT=$(date +%Y-%m-%dT%H:%M:%S%z)
# Send email
if [ "$ERROR_HANDLING" = 1 ]; then
TIME_NOW=$(date +%s)
TIME_SINCE_LAST_SEND=$(($TIME_NOW - $LAST_EMAIL_SEND))
if [ "$TIME_SINCE_LAST_SEND" -gt "$TIME_BETWEEN_EMAILS" ]; then
{
cat <<EOF
Server was restarted at: ${TIME_FMT}
The last 50 lines of the log before the server exited:
EOF
tail -n 50 "${LOG}"
} | mail -s "Etherpad restarted" "$EMAIL_ADDRESS"
LAST_EMAIL_SEND=$TIME_NOW
fi
fi
pecho "RESTART! ${TIME_FMT}" >>${LOG}
# Sleep 10 seconds before restart
sleep 10
done

View file

@ -1,11 +0,0 @@
#!/bin/sh
set -e
mydir=$(cd "${0%/*}" && pwd -P) || exit 1
cd "${mydir}"/../..
OUTDATED=$(npm outdated --depth=0 | awk '{print $1}' | grep '^ep_') || {
echo "All plugins are up-to-date"
exit 0
}
set -- ${OUTDATED}
echo "Updating plugins: $*"
exec npm install --no-save "$@"

View file

@ -75,7 +75,7 @@
"wtfnode": "^0.9.1"
},
"bin": {
"etherpad-healthcheck": "bin/etherpad-healthcheck",
"etherpad-healthcheck": "../bin/etherpad-healthcheck",
"etherpad-lite": "node/server.ts"
},
"devDependencies": {

View file

@ -10,7 +10,7 @@ try() { "$@" || fatal "'$@' failed"; }
MY_DIR=$(try cd "${0%/*}" && try pwd -P) || exit 1
try cd "${MY_DIR}/../../../.."
log "Assuming src/bin/installDeps.sh has already been run"
log "Assuming bin/installDeps.sh has already been run"
(cd src && npm run dev --experimental-worker "${@}" &
ep_pid=$!)