90 lines
3.9 KiB
JavaScript
Executable File
90 lines
3.9 KiB
JavaScript
Executable File
"use strict";
|
|
Object.defineProperty(exports, "__esModule", {
|
|
value: true
|
|
});
|
|
Object.defineProperty(exports, "useRouterBFCache", {
|
|
enumerable: true,
|
|
get: function() {
|
|
return useRouterBFCache;
|
|
}
|
|
});
|
|
const _react = require("react");
|
|
// When the flag is disabled, only track the currently active tree
|
|
const MAX_BF_CACHE_ENTRIES = process.env.__NEXT_ROUTER_BF_CACHE ? 3 : 1;
|
|
function useRouterBFCache(activeTree, activeStateKey) {
|
|
// The currently active entry. The entries form a linked list, sorted in
|
|
// order of most recently active. This allows us to reuse parts of the list
|
|
// without cloning, unless there's a reordering or removal.
|
|
// TODO: Once we start tracking back/forward history at each route level,
|
|
// we should use the history order instead. In other words, when traversing
|
|
// to an existing entry as a result of a popstate event, we should maintain
|
|
// the existing order instead of moving it to the front of the list. I think
|
|
// an initial implementation of this could be to pass an incrementing id
|
|
// to history.pushState/replaceState, then use that here for ordering.
|
|
const [prevActiveEntry, setPrevActiveEntry] = (0, _react.useState)(()=>{
|
|
const initialEntry = {
|
|
tree: activeTree,
|
|
stateKey: activeStateKey,
|
|
next: null
|
|
};
|
|
return initialEntry;
|
|
});
|
|
if (prevActiveEntry.tree === activeTree) {
|
|
// Fast path. The active tree hasn't changed, so we can reuse the
|
|
// existing state.
|
|
return prevActiveEntry;
|
|
}
|
|
// The route tree changed. Note that this doesn't mean that the tree changed
|
|
// *at this level* — the change may be due to a child route. Either way, we
|
|
// need to either add or update the router tree in the bfcache.
|
|
//
|
|
// The rest of the code looks more complicated than it actually is because we
|
|
// can't mutate the state in place; we have to copy-on-write.
|
|
// Create a new entry for the active cache key. This is the head of the new
|
|
// linked list.
|
|
const newActiveEntry = {
|
|
tree: activeTree,
|
|
stateKey: activeStateKey,
|
|
next: null
|
|
};
|
|
// We need to append the old list onto the new list. If the head of the new
|
|
// list was already present in the cache, then we'll need to clone everything
|
|
// that came before it. Then we can reuse the rest.
|
|
let n = 1;
|
|
let oldEntry = prevActiveEntry;
|
|
let clonedEntry = newActiveEntry;
|
|
while(oldEntry !== null && n < MAX_BF_CACHE_ENTRIES){
|
|
if (oldEntry.stateKey === activeStateKey) {
|
|
// Fast path. This entry in the old list that corresponds to the key that
|
|
// is now active. We've already placed a clone of this entry at the front
|
|
// of the new list. We can reuse the rest of the old list without cloning.
|
|
// NOTE: We don't need to worry about eviction in this case because we
|
|
// haven't increased the size of the cache, and we assume the max size
|
|
// is constant across renders. If we were to change it to a dynamic limit,
|
|
// then the implementation would need to account for that.
|
|
clonedEntry.next = oldEntry.next;
|
|
break;
|
|
} else {
|
|
// Clone the entry and append it to the list.
|
|
n++;
|
|
const entry = {
|
|
tree: oldEntry.tree,
|
|
stateKey: oldEntry.stateKey,
|
|
next: null
|
|
};
|
|
clonedEntry.next = entry;
|
|
clonedEntry = entry;
|
|
}
|
|
oldEntry = oldEntry.next;
|
|
}
|
|
setPrevActiveEntry(newActiveEntry);
|
|
return newActiveEntry;
|
|
}
|
|
|
|
if ((typeof exports.default === 'function' || (typeof exports.default === 'object' && exports.default !== null)) && typeof exports.default.__esModule === 'undefined') {
|
|
Object.defineProperty(exports.default, '__esModule', { value: true });
|
|
Object.assign(exports.default, exports);
|
|
module.exports = exports.default;
|
|
}
|
|
|
|
//# sourceMappingURL=bfcache.js.map
|