mirror of
https://github.com/ether/etherpad-lite.git
synced 2025-04-28 11:26:16 -04:00
Fixed transition algo not incrementing start
Added HUGE (1000 revs) bucket. There are still some libchangeset errors which need fixing.
This commit is contained in:
parent
805e492e74
commit
b4baba6155
1 changed files with 35 additions and 16 deletions
|
@ -75,7 +75,7 @@ $.Class("Revision",
|
||||||
{//statics
|
{//statics
|
||||||
// we rely on the fact that granularities are always traversed biggest to
|
// we rely on the fact that granularities are always traversed biggest to
|
||||||
// smallest. Changing this will break lots of stuff.
|
// smallest. Changing this will break lots of stuff.
|
||||||
granularities: {big: 100, medium: 10, small: 1}
|
granularities: {huge: 1000, big: 100, medium: 10, small: 1}
|
||||||
},
|
},
|
||||||
{//instance
|
{//instance
|
||||||
/**
|
/**
|
||||||
|
@ -311,7 +311,6 @@ $.Class("RevisionCache",
|
||||||
// build a path from the current revision (the start of the range
|
// build a path from the current revision (the start of the range
|
||||||
// in the response) to the target.
|
// in the response) to the target.
|
||||||
res = _this.findPath(_this.getRevision(current_revnum), target_revision);
|
res = _this.findPath(_this.getRevision(current_revnum), target_revision);
|
||||||
console.log(res);
|
|
||||||
console.log(print_path(res.path));
|
console.log(print_path(res.path));
|
||||||
// we can now request changesets from the end of that partial path
|
// we can now request changesets from the end of that partial path
|
||||||
// to the target:
|
// to the target:
|
||||||
|
@ -360,17 +359,24 @@ $.Class("RevisionCache",
|
||||||
//At the moment if you request changesets from 2 -> 12, it will request at granularity 10.
|
//At the moment if you request changesets from 2 -> 12, it will request at granularity 10.
|
||||||
//Not sure if we shouldn't only request granularities > 1 when we have a strict multiple of 10,100 etc.
|
//Not sure if we shouldn't only request granularities > 1 when we have a strict multiple of 10,100 etc.
|
||||||
//This is compounded by the fact that revisions are 1 based!
|
//This is compounded by the fact that revisions are 1 based!
|
||||||
|
//TODO: we need to deal with the case where we need MORE THAN 100 of a particular granularity
|
||||||
//console.log("[requestChangesets] start: %d, end: %d, delta: %d, adelta: %d", start, end, delta, adelta);
|
//console.log("[requestChangesets] start: %d, end: %d, delta: %d, adelta: %d", start, end, delta, adelta);
|
||||||
for (var g in Revision.granularities) {
|
for (var g in Revision.granularities) {
|
||||||
var granularity = Revision.granularities[g];
|
var granularity = Revision.granularities[g];
|
||||||
var remainder = Math.floor(adelta / granularity);
|
var remainder = Math.floor(adelta / granularity);
|
||||||
//console.log("\t[requestChangesets] start: %d, granularity: %d, adelta: %d, //: %d", start, granularity, adelta, remainder);
|
//console.log("\t[requestChangesets] start: %d, granularity: %d, adelta: %d, //: %d", start, granularity, adelta, remainder);
|
||||||
//console.log("\t rounddown delta: %d, start: %d", rounddown(adelta, granularity), rounddown(start, granularity));
|
//console.log("\t rounddown delta: %d, start: %d", rounddown(adelta, granularity), rounddown(start, granularity));
|
||||||
|
//console.log("\t new start:", newstart);
|
||||||
if (remainder) {
|
if (remainder) {
|
||||||
//this.loader.enqueue(start, granularity, process_received_changesets);
|
//this.loader.enqueue(start, granularity, process_received_changesets);
|
||||||
//console.log("\t[requestChangesets] REQUEST start: %d, end: %d, granularity: %d", rounddown(start, granularity), roundup(adelta, granularity), granularity);
|
console.log("\t[requestChangesets] REQUEST start: %d, end: %d, granularity: %d", rounddown(start, granularity), roundup(adelta, granularity), granularity);
|
||||||
this.loader.enqueue(rounddown(start, granularity), granularity, process_received_changesets);
|
this.loader.enqueue(rounddown(start, granularity), granularity, process_received_changesets);
|
||||||
}
|
}
|
||||||
|
// for the next granularity, we assume that we have now successfully navigated
|
||||||
|
// as far as required for this granularity. We should also make sure that only
|
||||||
|
// the significant part of the adelta is used in the next granularity.
|
||||||
|
start = rounddown(start, granularity) + rounddown(adelta, granularity);
|
||||||
|
adelta = adelta - rounddown(adelta, granularity);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -452,11 +458,10 @@ Thread("ChangesetLoader",
|
||||||
init: function (connection) {
|
init: function (connection) {
|
||||||
this._super(200);
|
this._super(200);
|
||||||
this.connection = connection;
|
this.connection = connection;
|
||||||
this.queues = {
|
this.queues = {};
|
||||||
small: [],
|
for (var granularity in Revision.granularities) {
|
||||||
medium: [],
|
this.queues[granularity] = [];
|
||||||
large: [],
|
}
|
||||||
};
|
|
||||||
this.pending = {};
|
this.pending = {};
|
||||||
var _this = this;
|
var _this = this;
|
||||||
this.connection.on("CHANGESET_REQ", function () {
|
this.connection.on("CHANGESET_REQ", function () {
|
||||||
|
@ -492,15 +497,15 @@ Thread("ChangesetLoader",
|
||||||
enqueue: function (start, granularity, callback) {
|
enqueue: function (start, granularity, callback) {
|
||||||
console.log("[changeset_loader] enqueue: %d, %d", start, granularity);
|
console.log("[changeset_loader] enqueue: %d, %d", start, granularity);
|
||||||
//TODO: check cache to see if we really need to fetch this
|
//TODO: check cache to see if we really need to fetch this
|
||||||
// maybe even to splices if we just need a smaller range
|
// maybe even do splices if we just need a smaller range
|
||||||
// in the middle
|
// in the middle
|
||||||
var queue = null;
|
var queue = null;
|
||||||
if (granularity == 1)
|
for (var g in Revision.granularities) {
|
||||||
queue = this.queues.small;
|
if (granularity == Revision.granularities[g]) {
|
||||||
else if (granularity == 10)
|
queue = this.queues[g];
|
||||||
queue = this.queues.medium;
|
break;
|
||||||
else
|
}
|
||||||
queue = this.queues.large;
|
}
|
||||||
|
|
||||||
var request = new ChangesetRequest(start, granularity, callback);
|
var request = new ChangesetRequest(start, granularity, callback);
|
||||||
if (! (request.getRequestID() in this.pending))
|
if (! (request.getRequestID() in this.pending))
|
||||||
|
@ -518,6 +523,12 @@ Thread("ChangesetLoader",
|
||||||
if (queue.length > 0) {
|
if (queue.length > 0) {
|
||||||
// TODO: pop and handle
|
// TODO: pop and handle
|
||||||
var request = queue.pop();
|
var request = queue.pop();
|
||||||
|
if (request.getRequestID() in this.pending) {
|
||||||
|
//this request is already pending!
|
||||||
|
var id = request.getRequestID();
|
||||||
|
console.log("ALREADY PENDING REQUEST: %d, start: %d, granularity: %d", id, id & 0xffff, id >> 16);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
//TODO: test AGAIN to make sure that it hasn't been retrieved and cached by
|
//TODO: test AGAIN to make sure that it hasn't been retrieved and cached by
|
||||||
//a previous request. This should handle the case when two requests for the
|
//a previous request. This should handle the case when two requests for the
|
||||||
//same changesets are enqueued (which would be fine, as at enqueue time, we
|
//same changesets are enqueued (which would be fine, as at enqueue time, we
|
||||||
|
@ -678,7 +689,15 @@ $.Class("PadClient",
|
||||||
for (p in path) {
|
for (p in path) {
|
||||||
changeset = path[p];
|
changeset = path[p];
|
||||||
time += changeset.deltatime * 1000;
|
time += changeset.deltatime * 1000;
|
||||||
|
try {
|
||||||
changeset.apply(_this);
|
changeset.apply(_this);
|
||||||
|
} catch (err) {
|
||||||
|
console.log("Error applying changeset: ");
|
||||||
|
console.log("\t", changeset.value);
|
||||||
|
console.log("\t %d -> %d ", changeset.from_revision.revnum, changeset.to_revision.revnum);
|
||||||
|
console.log(err);
|
||||||
|
console.log("--------------");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue