Skip to content

Commit

Permalink
Protect multiple RSA requesters from each other (#866)
Browse files Browse the repository at this point in the history
If two ReactServerAgent requests are made to a given endpoint only one
upstream http request is actually issued, and the result is provided to both
requesters.  Previously this result was passed by reference, so mutations
by one requester interfered with the data for others.

This patch provides a fresh deep copy to each requester.

This has the unfortunate side effect of introducing a deep copy in the browser
where we previously thought we could get away without one.  It's a minor perf
hit, but it's important for data integrity.
  • Loading branch information
gigabo authored Mar 10, 2017
1 parent 9f68b0f commit 35bb392
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 22 deletions.
38 changes: 18 additions & 20 deletions packages/react-server/core/ReactServerAgent/Cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -177,22 +177,16 @@ class CacheEntry {
this.res = res;
this.loaded = true;

if (SERVER_SIDE){

// Deep copy.
//
// Leave ourselves with a clean copy of the original
// response regardless of what mutation might happen
// once stores get ahold of it.
//
// This is important to ensure that we provide the same
// data from the cache when we wake up in the browser
// as we initially provide on the server.
//
res = JSON.parse(JSON.stringify(res));
}

this.dfd.resolve(res);
// Resolve with a serialized copy. We'll unserialize for each
// requester. This way we provide a fresh copy each time so mutations
// don't leak.
//
// This also leaves _us_ with a clean copy of the original response.
// This is important to ensure that we provide the same data from the
// cache when we wake up in the browser as we initially provide on the
// server.
//
this.dfd.resolve(JSON.stringify(res));
}

setError (err) {
Expand All @@ -218,17 +212,21 @@ class CacheEntry {
this.dfd.reject(err);
}

_parsePromise(dfd) {
return dfd.promise.then(val => JSON.parse(val));
}

whenDataReady () {
if (SERVER_SIDE) {
// server-side, we increment the number of requesters
// we expect to retrieve the data on the frontend
this.requesters += 1;
return this.dfd.promise;
return this._parsePromise(this.dfd);
} else {
// client-side, whenever someone retrieves data from the cache,
// we decrement the number of retrievals expected, and when we
// hit zero, remove the cache entry.
return this._requesterDecrementingPromise(this.dfd.promise);
return this._requesterDecrementingPromise(this.dfd);
}
}

Expand All @@ -250,11 +248,11 @@ class CacheEntry {
* Chain a promise with another promise that decrements
* the number of expected requesters.
*/
_requesterDecrementingPromise (promise) {
_requesterDecrementingPromise (dfd) {
// regardless of whether we're resolved with a 'res' or 'err',
// we want to decrement requests. the appropriate 'success' or 'error'
// callback will be executed on whatever is chained after this method
return promise.fin( resOrErr => {
return this._parsePromise(dfd).fin( resOrErr => {
this.decrementRequesters();
return resOrErr;
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,10 @@ describe("ReactServerAgent", () => {
]).then(results => {
var [res1, res2] = results;

expect(res1).toBe(res2);
expect(res1).toEqual(res2);

// Must be a deep copy, not a reference.
expect(res1).not.toBe(res2);

var cache = ReactServerAgent.cache();
var dehydrated = cache.dehydrate();
Expand Down Expand Up @@ -566,7 +569,10 @@ describe("ReactServerAgent", () => {
]).then(results => {
var [res1, res2] = results;

expect(res1).toBe(res2);
expect(res1).toEqual(res2);

// Must be a deep copy, not a reference.
expect(res1).not.toBe(res2);

var cache = ReactServerAgent.cache();
var dehydrated = cache.dehydrate();
Expand Down

0 comments on commit 35bb392

Please sign in to comment.