Fixing client side require, the minify code is completly unused now, very ugly solution

This commit is contained in:
Peter 'Pita' Martischka 2012-03-04 23:45:33 +01:00
parent 8b5925440a
commit 7b518eeb2d
4 changed files with 249 additions and 786 deletions

View file

@ -1,15 +1,60 @@
var path = require('path');
var minify = require('../../utils/Minify');
var plugins = require("ep_etherpad-lite/static/js/pluginfw/plugins");
var CachingMiddleware = require('../../utils/caching_middleware');
var settings = require("../../utils/Settings");
var Yajsml = require('yajsml');
var fs = require("fs");
var ERR = require("async-stacktrace");
exports.expressCreateServer = function (hook_name, args, cb) {
//serve static files
args.app.get('/static/js/require-kernel.js', function (req, res, next) {
res.header("Content-Type","application/javascript; charset: utf-8");
res.write(minify.requireDefinition()); // + "\n require.setLibraryURI('/plugins'); ");
res.end();
/* Handle static files for plugins:
paths like "/static/plugins/ep_myplugin/js/test.js"
are rewritten into ROOT_PATH_OF_MYPLUGIN/static/js/test.js,
commonly ETHERPAD_ROOT/node_modules/ep_myplugin/static/js/test.js
*/
args.app.get(/^\/minified\/plugins\/([^\/]+)\/static\/(.*)/, function(req, res, next) {
var plugin_name = req.params[0];
var modulePath = req.url.split("?")[0].substr("/minified/plugins/".length);
var fullPath = require.resolve(modulePath);
if (plugins.plugins[plugin_name] == undefined) {
return next();
}
fs.readFile(fullPath, "utf8", function(err, data){
if(ERR(err)) return;
res.send("require.define('" + modulePath + "', function (require, exports, module) {" + data + "})");
})
//require.define("/plugins.js", function (require, exports, module) {
//res.sendfile(fullPath);
});
// Cache both minified and static.
var assetCache = new CachingMiddleware;
args.app.all('/(minified|static)/*', assetCache.handle);
// Minify will serve static files compressed (minify enabled). It also has
// file-specific hacks for ace/require-kernel/etc.
args.app.all('/static/:filename(*)', minify.minify);
// Setup middleware that will package JavaScript files served by minify for
// CommonJS loader on the client-side.
var jsServer = new (Yajsml.Server)({
rootPath: 'minified/'
, rootURI: 'http://localhost:' + settings.port + '/static/js/'
});
var StaticAssociator = Yajsml.associators.StaticAssociator;
var associations =
Yajsml.associators.associationsForSimpleMapping(minify.tar);
var associator = new StaticAssociator(associations);
jsServer.setAssociator(associator);
args.app.use(jsServer);
// serve plugin definitions
// not very static, but served here so that client can do require("pluginfw/static/js/plugin-definitions.js");
args.app.get('/pluginfw/plugin-definitions.json', function (req, res, next) {
@ -17,30 +62,4 @@ exports.expressCreateServer = function (hook_name, args, cb) {
res.write(JSON.stringify({"plugins": plugins.plugins, "parts": plugins.parts}));
res.end();
});
/* Handle static files for plugins:
paths like "/static/plugins/ep_myplugin/js/test.js"
are rewritten into ROOT_PATH_OF_MYPLUGIN/static/js/test.js,
commonly ETHERPAD_ROOT/node_modules/ep_myplugin/static/js/test.js
*/
args.app.get(/^\/plugins\/([^\/]+)\/static\/(.*)/, function(req, res, next) {
var plugin_name = req.params[0];
var url = req.params[1].replace(/\.\./g, '').split("?")[0];
if (plugins.plugins[plugin_name] == undefined) {
return next();
}
var filePath = path.normalize(path.join(plugins.plugins[plugin_name].package.path, "static", url));
res.sendfile(filePath, { maxAge: exports.maxAge });
});
// Handle normal static files
args.app.get('/static/*', function(req, res) {
var filePath = path.normalize(__dirname + "/../../.." +
req.url.replace(/\.\./g, '').split("?")[0]);
res.sendfile(filePath, { maxAge: exports.maxAge });
});
}

View file

@ -27,316 +27,260 @@ var cleanCSS = require('clean-css');
var jsp = require("uglify-js").parser;
var pro = require("uglify-js").uglify;
var path = require('path');
var Buffer = require('buffer').Buffer;
var zlib = require('zlib');
var RequireKernel = require('require-kernel');
var server = require('../server');
var os = require('os');
var ROOT_DIR = path.normalize(__dirname + "/../" );
var JS_DIR = ROOT_DIR + '../static/js/';
var CSS_DIR = ROOT_DIR + '../static/css/';
var CACHE_DIR = path.join(settings.root, 'var');
var ROOT_DIR = path.normalize(__dirname + "/../../static/");
var TAR_PATH = path.join(__dirname, 'tar.json');
var tar = JSON.parse(fs.readFileSync(TAR_PATH, 'utf8'));
// Rewrite tar to include modules with no extensions and proper rooted paths.
exports.tar = {};
for (var key in tar) {
exports.tar['/' + key] =
tar[key].map(function (p) {return '/' + p}).concat(
tar[key].map(function (p) {return '/' + p.replace(/\.js$/, '')})
);
}
/**
* creates the minifed javascript for the given minified name
* @param req the Express request
* @param res the Express response
*/
exports.minifyJS = function(req, res, next)
exports.minify = function(req, res, next)
{
var jsFilename = req.params[0];
//choose the js files we need
var jsFiles = undefined;
if (Object.prototype.hasOwnProperty.call(tar, jsFilename)) {
jsFiles = tar[jsFilename];
var filename = req.params['filename'];
// No relative paths, especially if they may go up the file hierarchy.
filename = path.normalize(path.join(ROOT_DIR, filename));
if (filename.indexOf(ROOT_DIR) == 0) {
filename = filename.slice(ROOT_DIR.length);
filename = filename.replace(/\\/g, '/'); // Windows (safe generally?)
} else {
/* Not in tar list, but try anyways, if it fails, pass to `next`.
Actually try, not check in filesystem here because
we don't want to duplicate the require.resolve() handling
*/
jsFiles = [jsFilename];
res.writeHead(404, {});
res.end();
return;
}
_handle(req, res, jsFilename, jsFiles, function (err) {
console.log("Unable to load minified file " + jsFilename + ": " + err.toString());
/* Throw away error and generate a 404, not 500 */
next();
});
}
function _handle(req, res, jsFilename, jsFiles, next) {
res.header("Content-Type","text/javascript");
var cacheName = CACHE_DIR + "/minified_" + jsFilename.replace(/\//g, "_");
//minifying is enabled
if(settings.minify)
{
var result = undefined;
var latestModification = 0;
async.series([
//find out the highest modification date
function(callback)
{
var folders2check = [CSS_DIR, JS_DIR];
//go trough this two folders
async.forEach(folders2check, function(path, callback)
{
//read the files in the folder
fs.readdir(path, function(err, files)
{
if(ERR(err, callback)) return;
//we wanna check the directory itself for changes too
files.push(".");
//go trough all files in this folder
async.forEach(files, function(filename, callback)
{
//get the stat data of this file
fs.stat(path + "/" + filename, function(err, stats)
{
if(ERR(err, callback)) return;
//get the modification time
var modificationTime = stats.mtime.getTime();
//compare the modification time to the highest found
if(modificationTime > latestModification)
{
latestModification = modificationTime;
}
callback();
});
}, callback);
});
}, callback);
},
function(callback)
{
//check the modification time of the minified js
fs.stat(cacheName, function(err, stats)
{
if(err && err.code != "ENOENT")
{
ERR(err, callback);
return;
}
//there is no minfied file or there new changes since this file was generated, so continue generating this file
if((err && err.code == "ENOENT") || stats.mtime.getTime() < latestModification)
{
callback();
}
//the minified file is still up to date, stop minifying
else
{
callback("stop");
}
});
},
//load all js files
function (callback)
{
var values = [];
tarCode(
jsFiles
, function (content) {values.push(content)}
, function (err) {
if(ERR(err, next)) return;
result = values.join('');
callback();
});
},
//put all together and write it into a file
function(callback)
{
async.parallel([
//write the results plain in a file
function(callback)
{
fs.writeFile(cacheName, result, "utf8", callback);
},
//write the results compressed in a file
function(callback)
{
zlib.gzip(result, function(err, compressedResult){
//weird gzip bug that returns 0 instead of null if everything is ok
err = err === 0 ? null : err;
if(ERR(err, callback)) return;
fs.writeFile(cacheName + ".gz", compressedResult, callback);
});
}
],callback);
}
], function(err)
{
if(err && err != "stop")
{
if(ERR(err)) return;
}
//check if gzip is supported by this browser
var gzipSupport = req.header('Accept-Encoding', '').indexOf('gzip') != -1;
var pathStr;
if(gzipSupport && os.type().indexOf("Windows") == -1)
{
pathStr = path.normalize(cacheName + ".gz");
res.header('Content-Encoding', 'gzip');
}
else
{
pathStr = path.normalize(cacheName);
}
res.sendfile(pathStr, { maxAge: server.maxAge });
})
// What content type should this be?
// TODO: This should use a MIME module.
var contentType;
if (filename.match(/\.js$/)) {
contentType = "text/javascript";
} else if (filename.match(/\.css$/)) {
contentType = "text/css";
} else if (filename.match(/\.html$/)) {
contentType = "text/html";
} else if (filename.match(/\.txt$/)) {
contentType = "text/plain";
} else if (filename.match(/\.png$/)) {
contentType = "image/png";
} else if (filename.match(/\.gif$/)) {
contentType = "image/gif";
} else if (filename.match(/\.ico$/)) {
contentType = "image/x-icon";
} else {
contentType = "application/octet-stream";
}
//minifying is disabled, so put the files together in one file
else
{
tarCode(
jsFiles
, function (content) {res.write(content)}
, function (err) {
if(ERR(err, next)) return;
statFile(filename, function (error, date, exists) {
if (date) {
date = new Date(date);
res.setHeader('last-modified', date.toUTCString());
res.setHeader('date', (new Date()).toUTCString());
if (server.maxAge) {
var expiresDate = new Date((new Date()).getTime()+server.maxAge*1000);
res.setHeader('expires', expiresDate.toUTCString());
res.setHeader('cache-control', 'max-age=' + server.maxAge);
}
}
if (error) {
res.writeHead(500, {});
res.end();
});
}
} else if (!exists) {
res.writeHead(404, {});
res.end();
} else if (new Date(req.headers['if-modified-since']) >= date) {
res.writeHead(304, {});
res.end();
} else {
if (req.method == 'HEAD') {
res.header("Content-Type", contentType);
res.writeHead(200, {});
res.end();
} else if (req.method == 'GET') {
getFileCompressed(filename, contentType, function (error, content) {
if(ERR(error)) return;
res.header("Content-Type", contentType);
res.writeHead(200, {});
res.write(content);
res.end();
});
} else {
res.writeHead(405, {'allow': 'HEAD, GET'});
res.end();
}
}
});
}
// find all includes in ace.js and embed them.
function getAceFile(callback) {
fs.readFile(JS_DIR + 'ace.js', "utf8", function(err, data) {
fs.readFile(ROOT_DIR + 'js/ace.js', "utf8", function(err, data) {
if(ERR(err, callback)) return;
// Find all includes in ace.js and embed them
var founds = data.match(/\$\$INCLUDE_[a-zA-Z_]+\([a-zA-Z0-9.\/_"-]+\)/gi);
var founds = data.match(/\$\$INCLUDE_[a-zA-Z_]+\("[^"]*"\)/gi);
if (!settings.minify) {
founds = [];
}
// Always include the require kernel.
founds.push('$$INCLUDE_JS("../static/js/require-kernel.js")');
data += ';\n';
data += 'Ace2Editor.EMBEDED = Ace2Editor.EMBEDED || {};\n';
//go trough all includes
// Request the contents of the included file on the server-side and write
// them into the file.
async.forEach(founds, function (item, callback) {
var filename = item.match(/"([^"]*)"/)[1];
var type = item.match(/INCLUDE_([A-Z]+)/)[1];
var shortFilename = (filename.match(/^..\/static\/js\/(.*)$/, '')||[])[1];
var request = require('request');
//read the included files
if (shortFilename) {
if (shortFilename == 'require-kernel.js') {
// the kernel isnt actually on the file system.
handleEmbed(null, requireDefinition());
var baseURI = 'http://localhost:' + settings.port
request(baseURI + path.normalize(path.join('/static/', filename)), function (error, response, body) {
if (!error && response.statusCode == 200) {
data += 'Ace2Editor.EMBEDED[' + JSON.stringify(filename) + '] = '
+ JSON.stringify(body || '') + ';\n';
} else {
var contents = '';
tarCode(tar[shortFilename] || shortFilename
, function (content) {
contents += content;
}
, function () {
handleEmbed(null, contents);
}
);
// Silence?
}
} else {
fs.readFile(ROOT_DIR + filename, "utf8", handleEmbed);
}
function handleEmbed(error, data_) {
if (error) {
return; // Don't bother to include it.
}
if (settings.minify) {
if (type == "JS") {
try {
data_ = compressJS([data_]);
} catch (e) {
// Ignore, include uncompresseed, which will break in browser.
}
} else {
data_ = compressCSS([data_]);
}
}
data += 'Ace2Editor.EMBEDED[' + JSON.stringify(filename) + '] = '
+ JSON.stringify(data_) + ';\n';
callback();
}
});
}, function(error) {
callback(error, data);
});
});
}
exports.requireDefinition = requireDefinition;
// Check for the existance of the file and get the last modification date.
function statFile(filename, callback) {
if (filename == 'js/ace.js') {
// Sometimes static assets are inlined into this file, so we have to stat
// everything.
lastModifiedDateOfEverything(function (error, date) {
callback(error, date, !error);
});
} else if (filename == 'js/require-kernel.js') {
callback(null, requireLastModified(), true);
} else {
fs.stat(ROOT_DIR + filename, function (error, stats) {
if (error) {
if (error.code == "ENOENT") {
// Stat the directory instead.
fs.stat(path.dirname(ROOT_DIR + filename), function (error, stats) {
if (error) {
if (error.code == "ENOENT") {
callback(null, null, false);
} else {
callback(error);
}
} else {
callback(null, stats.mtime.getTime(), false);
}
});
} else {
callback(error);
}
} else {
callback(null, stats.mtime.getTime(), true);
}
});
}
}
function lastModifiedDateOfEverything(callback) {
var folders2check = [ROOT_DIR + 'js/', ROOT_DIR + 'css/'];
var latestModification = 0;
//go trough this two folders
async.forEach(folders2check, function(path, callback)
{
//read the files in the folder
fs.readdir(path, function(err, files)
{
if(ERR(err, callback)) return;
//we wanna check the directory itself for changes too
files.push(".");
//go trough all files in this folder
async.forEach(files, function(filename, callback)
{
//get the stat data of this file
fs.stat(path + "/" + filename, function(err, stats)
{
if(ERR(err, callback)) return;
//get the modification time
var modificationTime = stats.mtime.getTime();
//compare the modification time to the highest found
if(modificationTime > latestModification)
{
latestModification = modificationTime;
}
callback();
});
}, callback);
});
}, function () {
callback(null, latestModification);
});
}
// This should be provided by the module, but until then, just use startup
// time.
var _requireLastModified = new Date();
function requireLastModified() {
return _requireLastModified.toUTCString();
}
function requireDefinition() {
return 'var require = ' + RequireKernel.kernelSource + ';\n';
}
function tarCode(jsFiles, write, callback) {
write('require.define({');
var initialEntry = true;
async.forEach(jsFiles, function (filename, callback){
var path;
var srcPath;
if (filename.indexOf('plugins/') == 0) {
srcPath = filename.substring('plugins/'.length);
path = require.resolve(srcPath);
function getFileCompressed(filename, contentType, callback) {
getFile(filename, function (error, content) {
if (error || !content) {
callback(error, content);
} else {
srcPath = '/' + filename;
path = JS_DIR + filename;
}
srcPath = JSON.stringify(srcPath);
var srcPathAbbv = JSON.stringify(srcPath.replace(/\.js$/, ''));
if (filename == 'ace.js') {
getAceFile(handleFile);
} else {
fs.readFile(path, "utf8", handleFile);
}
function handleFile(err, data) {
if(ERR(err, callback)) return;
if (!initialEntry) {
write('\n,');
} else {
initialEntry = false;
}
write(srcPath + ': ')
data = '(function (require, exports, module) {' + data + '})';
if (settings.minify) {
write(compressJS([data]));
} else {
write(data);
if (contentType == 'text/javascript') {
try {
content = compressJS([content]);
} catch (error) {
// silence
}
} else if (contentType == 'text/css') {
content = compressCSS([content]);
}
}
if (srcPath != srcPathAbbv) {
write('\n,' + srcPathAbbv + ': null');
}
callback();
callback(null, content);
}
}, function (err) {
if(ERR(err, callback)) return;
write('});\n');
callback();
});
}
function getFile(filename, callback) {
if (filename == 'js/ace.js') {
getAceFile(callback);
} else if (filename == 'js/require-kernel.js') {
callback(undefined, requireDefinition());
} else {
fs.readFile(ROOT_DIR + filename, callback);
}
}
function compressJS(values)
{
var complete = values.join("\n");

View file

@ -0,0 +1,177 @@
/*
* 2011 Peter 'Pita' Martischka (Primary Technology Ltd)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var async = require('async');
var Buffer = require('buffer').Buffer;
var fs = require('fs');
var path = require('path');
var server = require('../server');
var zlib = require('zlib');
var util = require('util');
var ROOT_DIR = path.normalize(__dirname + "/../");
var CACHE_DIR = ROOT_DIR + '../var/';
var responseCache = {};
/*
This caches and compresses 200 and 404 responses to GET and HEAD requests.
TODO: Caching and compressing are solved problems, a middleware configuration
should replace this.
*/
function CachingMiddleware() {
}
CachingMiddleware.prototype = new function () {
function handle(req, res, next) {
if (!(req.method == "GET" || req.method == "HEAD")) {
return next(undefined, req, res);
}
var old_req = {};
var old_res = {};
var supportsGzip =
req.header('Accept-Encoding', '').indexOf('gzip') != -1;
var path = require('url').parse(req.url).path;
var cacheKey = (new Buffer(path)).toString('base64').replace(/[\/\+=]/g, '');
fs.stat(CACHE_DIR + 'minified_' + cacheKey, function (error, stats) {
var modifiedSince = (req.headers['if-modified-since']
&& new Date(req.headers['if-modified-since']));
var lastModifiedCache = !error && stats.mtime;
if (lastModifiedCache) {
req.headers['if-modified-since'] = lastModifiedCache.toUTCString();
} else {
delete req.headers['if-modified-since'];
}
// Always issue get to downstream.
old_req.method = req.method;
req.method = 'GET';
var expirationDate = new Date(((responseCache[cacheKey] || {}).headers || {})['expires']);
if (expirationDate > new Date()) {
// Our cached version is still valid.
return respond();
}
var _headers = {};
old_res.setHeader = res.setHeader;
res.setHeader = function (key, value) {
_headers[key.toLowerCase()] = value;
old_res.setHeader.call(res, key, value);
};
old_res.writeHead = res.writeHead;
res.writeHead = function (status, headers) {
var lastModified = (res.getHeader('last-modified')
&& new Date(res.getHeader('last-modified')));
res.writeHead = old_res.writeHead;
if (status == 200 || status == 404) {
// Update cache
var buffer = '';
Object.keys(headers || {}).forEach(function (key) {
res.setHeader(key, headers[key]);
});
headers = _headers;
old_res.write = res.write;
old_res.end = res.end;
res.write = function(data, encoding) {
buffer += data.toString(encoding);
};
res.end = function(data, encoding) {
async.parallel([
function (callback) {
var path = CACHE_DIR + 'minified_' + cacheKey;
fs.writeFile(path, buffer, function (error, stats) {
callback();
});
}
, function (callback) {
var path = CACHE_DIR + 'minified_' + cacheKey + '.gz';
zlib.gzip(buffer, function(error, content) {
if (error) {
callback();
} else {
fs.writeFile(path, content, function (error, stats) {
callback();
});
}
});
}
], function () {
responseCache[cacheKey] = {statusCode: status, headers: headers};
respond();
});
};
} else if (status == 304) {
// Nothing new changed from the cached version.
old_res.write = res.write;
old_res.end = res.end;
res.write = function(data, encoding) {};
res.end = function(data, encoding) { respond() };
} else {
res.writeHead(status, headers);
}
};
next(undefined, req, res);
// This handles read/write synchronization as well as its predecessor,
// which is to say, not at all.
// TODO: Implement locking on write or ditch caching of gzip and use
// existing middlewares.
function respond() {
req.method = old_req.method || req.method;
res.write = old_res.write || res.write;
res.end = old_res.end || res.end;
var headers = responseCache[cacheKey].headers;
var statusCode = responseCache[cacheKey].statusCode;
var pathStr = CACHE_DIR + 'minified_' + cacheKey;
if (supportsGzip && (headers['content-type'] || '').match(/^text\//)) {
pathStr = pathStr + '.gz';
headers['content-encoding'] = 'gzip';
}
var lastModified = (headers['last-modified']
&& new Date(headers['last-modified']));
if (statusCode == 200 && lastModified <= modifiedSince) {
res.writeHead(304, headers);
res.end();
} else if (req.method == 'GET') {
var readStream = fs.createReadStream(pathStr);
res.writeHead(statusCode, headers);
util.pump(readStream, res);
} else {
res.writeHead(statusCode, headers);
res.end();
}
}
});
}
this.handle = handle;
}();
module.exports = CachingMiddleware;