gh-10649: Implement JSON store length hints (gh-13260)

This commit is contained in:
mr. m
2026-04-15 18:59:40 +02:00
committed by GitHub
parent c128b79723
commit b988f23a14
7 changed files with 667 additions and 20 deletions

View File

@@ -24,6 +24,9 @@ def download_phab_patch(phab_id, output_file):
print(f"Downloading patch from {patch_url}")
response = requests.get(patch_url)
response.raise_for_status() # Raise an error for bad responses
folder = os.path.dirname(output_file)
if not os.path.exists(folder):
os.makedirs(folder)
with open(output_file, 'wb') as f:
f.write(response.content)
print(f"Patch saved to {output_file}")
@@ -51,12 +54,17 @@ def main():
expected_files = set()
for patch in manifest:
if patch.get("type") == "phabricator":
phab_id = patch.get("id")
phab_ids = [patch.get("id")] if patch.get("id") else patch.get("ids", [])
name = patch.get("name")
if not phab_id or not name:
if not phab_ids or not name:
die(f"Patch entry missing 'id' or 'name': {patch}")
name = name.replace(" ", "_").replace(".", "_").lower()
output_file = os.path.join(OUTPUT_DIR, "firefox", f"{name}.patch")
for phab_id in phab_ids:
output_file = (
os.path.join(OUTPUT_DIR, "firefox", f"{name}.patch")
if len(phab_ids) == 1 else
os.path.join(OUTPUT_DIR, "firefox", name, f"{phab_id}.patch")
)
print(f"Processing Phabricator patch: {phab_id} -> {output_file}")
download_phab_patch(phab_id, output_file)
replaces = patch.get("replaces", {})
@@ -64,8 +72,6 @@ def main():
value = replaces[replace]
with open(output_file, 'r') as f:
content = f.read()
if replace not in content:
die(f"Replace string '{replace}' not found in {output_file}")
with open(output_file, 'w') as f:
f.write(content.replace(replace, value))
expected_files.add(output_file)

View File

@@ -0,0 +1,61 @@
diff --git a/js/public/JSON.h b/js/public/JSON.h
--- a/js/public/JSON.h
+++ b/js/public/JSON.h
@@ -24,16 +24,23 @@
* writing stringified data by exactly one call of |callback|, passing |data| as
* argument.
*
* In cases where JSON.stringify would return undefined, this function calls
* |callback| with the string "null".
+ *
+ * If a length hint is passed, space will be reserved for at least that many
+ * characters.
*/
extern JS_PUBLIC_API bool JS_Stringify(JSContext* cx,
JS::MutableHandle<JS::Value> value,
JS::Handle<JSObject*> replacer,
JS::Handle<JS::Value> space,
JSONWriteCallback callback, void* data);
+extern JS_PUBLIC_API bool JS_StringifyWithLengthHint(
+ JSContext* cx, JS::MutableHandle<JS::Value> value,
+ JS::Handle<JSObject*> replacer, JS::Handle<JS::Value> space,
+ JSONWriteCallback callback, void* data, size_t lengthHint);
namespace JS {
/**
* An API akin to JS_Stringify but with the goal of not having observable
diff --git a/js/src/jsapi.cpp b/js/src/jsapi.cpp
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -3663,17 +3663,29 @@
}
JS_PUBLIC_API bool JS_Stringify(JSContext* cx, MutableHandleValue vp,
HandleObject replacer, HandleValue space,
JSONWriteCallback callback, void* data) {
+ return JS_StringifyWithLengthHint(cx, vp, replacer, space, callback, data, 0);
+}
+
+JS_PUBLIC_API bool JS_StringifyWithLengthHint(JSContext* cx,
+ MutableHandleValue vp,
+ HandleObject replacer,
+ HandleValue space,
+ JSONWriteCallback callback,
+ void* data, size_t lengthHint) {
AssertHeapIsIdle();
CHECK_THREAD(cx);
cx->check(replacer, space);
StringBuilder sb(cx);
if (!sb.ensureTwoByteChars()) {
return false;
}
+ if (lengthHint && !sb.reserve(lengthHint)) {
+ return false;
+ }
if (!Stringify(cx, vp, replacer, space, sb, StringifyBehavior::Normal)) {
return false;
}
if (sb.empty() && !sb.append(cx->names().null)) {
return false;

View File

@@ -0,0 +1,251 @@
diff --git a/dom/chrome-webidl/IOUtils.webidl b/dom/chrome-webidl/IOUtils.webidl
--- a/dom/chrome-webidl/IOUtils.webidl
+++ b/dom/chrome-webidl/IOUtils.webidl
@@ -94,23 +94,23 @@
* otherwise rejects with a DOMException.
*/
[NewObject]
Promise<unsigned long long> writeUTF8(DOMString path, UTF8String string, optional WriteOptions options = {});
/**
- * Attempts to serialize |value| into a JSON string and encode it as into a
- * UTF-8 string, then safely write the result to a file at |path|. Works
- * exactly like |write|.
+ * Attempts to serialize |value| into a JSON string and encode it as a UTF-8
+ * string, then safely write the result to a file at |path|. Works exactly
+ * like |write|.
*
* @param path An absolute file path
* @param value The value to be serialized.
* @param options Options for writing the file. The "append" mode is not supported.
*
* @return Resolves with the number of bytes successfully written to the file,
* otherwise rejects with a DOMException.
*/
[NewObject]
- Promise<unsigned long long> writeJSON(DOMString path, any value, optional WriteOptions options = {});
+ Promise<WriteJSONResult> writeJSON(DOMString path, any value, optional WriteJSONOptions options = {});
/**
* Moves the file from |sourcePath| to |destPath|, creating necessary parents.
* If |destPath| is a directory, then the source file will be moved into the
* destination directory.
*
@@ -567,10 +567,39 @@
* If true, compress the data with LZ4-encoding before writing to the file.
*/
boolean compress = false;
};
+/**
+ * Options to be passed to the |IOUtils.writeJSON| method.
+ */
+dictionary WriteJSONOptions: WriteOptions {
+ /**
+ * An optional length hint that will be used to pre-allocate the buffer that
+ * will hold the stringified JSON.
+ *
+ * This is the *length* and not the size (i.e., it is the number of UTF-16
+ * codepoints and not the number of bytes).
+ */
+ unsigned long long lengthHint = 0;
+};
+
+/**
+ * Information about a WriteJSON operation.
+ */
+dictionary WriteJSONResult {
+ /**
+ * The number of bytes written.
+ */
+ required unsigned long long size;
+
+ /**
+ * The length of the stringified JSON (in UTF-16 codepoints).
+ */
+ required unsigned long long jsonLength;
+};
+
/**
* Options to be passed to the |IOUtils.move| method.
*/
dictionary MoveOptions {
/**
diff --git a/xpcom/ioutils/IOUtils.h b/xpcom/ioutils/IOUtils.h
--- a/xpcom/ioutils/IOUtils.h
+++ b/xpcom/ioutils/IOUtils.h
@@ -94,11 +94,11 @@
const nsACString& aString, const dom::WriteOptions& aOptions,
ErrorResult& aError);
static already_AddRefed<dom::Promise> WriteJSON(
dom::GlobalObject& aGlobal, const nsAString& aPath,
- JS::Handle<JS::Value> aValue, const dom::WriteOptions& aOptions,
+ JS::Handle<JS::Value> aValue, const dom::WriteJSONOptions& aOptions,
ErrorResult& aError);
static already_AddRefed<dom::Promise> Move(dom::GlobalObject& aGlobal,
const nsAString& aSourcePath,
const nsAString& aDestPath,
@@ -736,13 +736,16 @@
RefPtr<nsIFile> mBackupFile;
RefPtr<nsIFile> mTmpFile;
dom::WriteMode mMode;
bool mFlush = false;
bool mCompress = false;
+ size_t mLengthHint = 0;
static Result<InternalWriteOpts, IOUtils::IOError> FromBinding(
const dom::WriteOptions& aOptions);
+ static Result<InternalWriteOpts, IOUtils::IOError> FromBinding(
+ const dom::WriteJSONOptions& aOptions);
};
/**
* Re-implements the file compression and decompression utilities found
* in toolkit/components/lz4/lz4.js
diff --git a/xpcom/ioutils/IOUtils.cpp b/xpcom/ioutils/IOUtils.cpp
--- a/xpcom/ioutils/IOUtils.cpp
+++ b/xpcom/ioutils/IOUtils.cpp
@@ -589,15 +589,21 @@
return WriteSync(file, AsBytes(Span(str)), opts);
});
});
}
+static bool AppendJSON(const char16_t* aBuf, uint32_t aLen, void* aStr) {
+ nsAString* str = static_cast<nsAString*>(aStr);
+
+ return str->Append(aBuf, aLen, fallible);
+}
+
/* static */
already_AddRefed<Promise> IOUtils::WriteJSON(GlobalObject& aGlobal,
const nsAString& aPath,
JS::Handle<JS::Value> aValue,
- const WriteOptions& aOptions,
+ const WriteJSONOptions& aOptions,
ErrorResult& aError) {
return WithPromiseAndState(
aGlobal, aError, [&](Promise* promise, auto& state) {
nsCOMPtr<nsIFile> file = new nsLocalFile();
REJECT_IF_INIT_PATH_FAILED(file, aPath, promise,
@@ -623,14 +629,15 @@
file->HumanReadablePath().get()));
return;
}
JSContext* cx = aGlobal.Context();
- JS::Rooted<JS::Value> rootedValue(cx, aValue);
+ JS::Rooted<JS::Value> value(cx, aValue);
nsString string;
- if (!nsContentUtils::StringifyJSON(cx, aValue, string,
- UndefinedIsNullStringLiteral)) {
+ if (!JS_StringifyWithLengthHint(cx, &value, nullptr,
+ JS::NullHandleValue, AppendJSON,
+ &string, opts.mLengthHint)) {
JS::Rooted<JS::Value> exn(cx, JS::UndefinedValue());
if (JS_GetPendingException(cx, &exn)) {
JS_ClearPendingException(cx);
promise->MaybeReject(exn);
} else {
@@ -639,22 +646,29 @@
"Could not serialize object to JSON"_ns));
}
return;
}
- DispatchAndResolve<uint32_t>(
+ DispatchAndResolve<dom::WriteJSONResult>(
state->mEventQueue, promise,
[file = std::move(file), string = std::move(string),
- opts = std::move(opts)]() -> Result<uint32_t, IOError> {
+ opts = std::move(opts)]() -> Result<WriteJSONResult, IOError> {
nsAutoCString utf8Str;
if (!CopyUTF16toUTF8(string, utf8Str, fallible)) {
return Err(IOError(
NS_ERROR_OUT_OF_MEMORY,
"Failed to write to `%s': could not allocate buffer",
file->HumanReadablePath().get()));
}
- return WriteSync(file, AsBytes(Span(utf8Str)), opts);
+
+ uint32_t size =
+ MOZ_TRY(WriteSync(file, AsBytes(Span(utf8Str)), opts));
+
+ dom::WriteJSONResult result;
+ result.mSize = size;
+ result.mJsonLength = static_cast<uint32_t>(string.Length());
+ return result;
});
});
}
/* static */
@@ -2840,10 +2854,20 @@
opts.mCompress = aOptions.mCompress;
return opts;
}
+Result<IOUtils::InternalWriteOpts, IOUtils::IOError>
+IOUtils::InternalWriteOpts::FromBinding(const WriteJSONOptions& aOptions) {
+ InternalWriteOpts opts =
+ MOZ_TRY(FromBinding(static_cast<const WriteOptions&>(aOptions)));
+
+ opts.mLengthHint = aOptions.mLengthHint;
+
+ return opts;
+}
+
/* static */
Result<IOUtils::JsBuffer, IOUtils::IOError> IOUtils::JsBuffer::Create(
IOUtils::BufferKind aBufferKind, size_t aCapacity) {
JsBuffer buffer(aBufferKind, aCapacity);
if (aCapacity != 0 && !buffer.mBuffer) {
diff --git a/xpcom/ioutils/tests/test_ioutils_read_write_json.html b/xpcom/ioutils/tests/test_ioutils_read_write_json.html
--- a/xpcom/ioutils/tests/test_ioutils_read_write_json.html
+++ b/xpcom/ioutils/tests/test_ioutils_read_write_json.html
@@ -140,10 +140,43 @@
);
await cleanup(filename);
});
+ add_task(async function test_writeJSON_return() {
+ const filename = PathUtils.join(PathUtils.tempDir, "test_ioutils_writeJSON_return.tmp");
+
+ const obj = { emoji: "☕️ ⚧️ 😀 🖖🏿 🤠 🏳️‍🌈 🥠 🏴‍☠️ 🪐" };
+
+ const expectedJson = JSON.stringify(obj);
+ const size = new TextEncoder().encode(expectedJson).byteLength;
+
+ {
+ const result = await IOUtils.writeJSON(filename, obj, { lengthHint: 0 });
+
+ is(await IOUtils.readUTF8(filename), expectedJson, "should have written expected JSON");
+
+ is(typeof result, "object", "writeJSON returns an object");
+ ok(result !== null, "writeJSON returns non-null");
+
+ ok(Object.hasOwn(result, "size"), "result has size property");
+ ok(Object.hasOwn(result, "jsonLength"), "result has jsonLength property");
+
+ is(result.size, size, "Should have written the expected number of bytes");
+ is(result.jsonLength, expectedJson.length, "Should have written the expected number of UTF-16 codepoints");
+ }
+
+ {
+ const result = await IOUtils.writeJSON(filename, obj, { lengthHint: expectedJson.length, compress: true });
+
+ isnot(result.size, size, "Should have written a different number of bytes due to compression");
+ is(result.jsonLength, expectedJson.length, "Should have written the same number of UTF-16 codepoints");
+ }
+
+ await cleanup(filename);
+ });
+
add_task(async function test_append_json() {
const filename = PathUtils.join(PathUtils.tempDir, "test_ioutils_append_json.tmp");
await IOUtils.writeJSON(filename, OBJECT);

View File

@@ -0,0 +1,279 @@
diff --git a/browser/components/sessionstore/SessionFile.sys.mjs b/browser/components/sessionstore/SessionFile.sys.mjs
--- a/browser/components/sessionstore/SessionFile.sys.mjs
+++ b/browser/components/sessionstore/SessionFile.sys.mjs
@@ -503,10 +503,12 @@
if (isFinalWrite) {
Services.obs.notifyObservers(
null,
"sessionstore-final-state-write-complete"
);
+
+ lazy.SessionWriter.deinit();
}
});
},
async wipe() {
diff --git a/browser/components/sessionstore/SessionWriter.sys.mjs b/browser/components/sessionstore/SessionWriter.sys.mjs
--- a/browser/components/sessionstore/SessionWriter.sys.mjs
+++ b/browser/components/sessionstore/SessionWriter.sys.mjs
@@ -6,10 +6,12 @@
ChromeUtils.defineESModuleGetters(lazy, {
sessionStoreLogger: "resource:///modules/sessionstore/SessionLogger.sys.mjs",
});
+const BROWSER_PURGE_SESSION_HISTORY = "browser:purge-session-history";
+
/**
* We just started (we haven't written anything to disk yet) from
* `Paths.clean`. The backup directory may not exist.
*/
const STATE_CLEAN = "clean";
@@ -58,10 +60,14 @@
export const SessionWriter = {
init(origin, useOldExtension, paths, prefs = {}) {
return SessionWriterInternal.init(origin, useOldExtension, paths, prefs);
},
+ deinit() {
+ return SessionWriterInternal.deinit();
+ },
+
/**
* Write the contents of the session file.
*
* @param state - May get changed on shutdown.
*/
@@ -80,10 +86,17 @@
return await SessionWriterInternal.wipe();
} finally {
unlock();
}
},
+
+ /**
+ * *Test Only* Return the SessionWriter's length hint for writing JSON.
+ */
+ get _jsonLengthHint() {
+ return SessionWriterInternal.jsonLengthHint;
+ },
};
const SessionWriterInternal = {
// Path to the files used by the SessionWriter
Paths: null,
@@ -104,10 +117,19 @@
/**
* Number of old upgrade backups that are being kept
*/
maxUpgradeBackups: null,
+ /**
+ * The size of the last write with IOUtils.writeJSON.
+ *
+ * Because SessionWriter writes such a large object graph we will otherwise
+ * spend a large portion of `write()` doing memory allocations and memcpy
+ * when serializing the session file to disk.
+ */
+ jsonLengthHint: 0,
+
/**
* Initialize (or reinitialize) the writer.
*
* @param {string} origin Which of sessionstore.js or its backups
* was used. One of the `STATE_*` constants defined above.
@@ -136,13 +158,20 @@
this.Paths = paths;
this.maxUpgradeBackups = prefs.maxUpgradeBackups;
this.maxSerializeBack = prefs.maxSerializeBack;
this.maxSerializeForward = prefs.maxSerializeForward;
this.upgradeBackupNeeded = paths.nextUpgradeBackup != paths.upgradeBackup;
+
+ Services.obs.addObserver(this, BROWSER_PURGE_SESSION_HISTORY);
+
return { result: true };
},
+ deinit() {
+ Services.obs.removeObserver(this, BROWSER_PURGE_SESSION_HISTORY);
+ },
+
/**
* Write the session to disk.
* Write the session to disk, performing any necessary backup
* along the way.
*
@@ -208,36 +237,42 @@
// We are shutting down. At this stage, we know that
// $Paths.clean is either absent or corrupted. If it was
// originally present and valid, it has been moved to
// $Paths.cleanBackup a long time ago. We can therefore write
// with the guarantees that we erase no important data.
- await IOUtils.writeJSON(this.Paths.clean, state, {
+ const result = await IOUtils.writeJSON(this.Paths.clean, state, {
tmpPath: this.Paths.clean + ".tmp",
compress: true,
+ lengthHint: this.jsonLengthHint,
});
+ this.jsonLengthHint = result.jsonLength;
fileStat = await IOUtils.stat(this.Paths.clean);
} else if (this.state == STATE_RECOVERY) {
// At this stage, either $Paths.recovery was written >= 15
// seconds ago during this session or we have just started
// from $Paths.recovery left from the previous session. Either
// way, $Paths.recovery is good. We can move $Path.backup to
// $Path.recoveryBackup without erasing a good file with a bad
// file.
- await IOUtils.writeJSON(this.Paths.recovery, state, {
+ const result = await IOUtils.writeJSON(this.Paths.recovery, state, {
tmpPath: this.Paths.recovery + ".tmp",
backupFile: this.Paths.recoveryBackup,
compress: true,
+ lengthHint: this.jsonLengthHint,
});
+ this.jsonLengthHint = result.jsonLength;
fileStat = await IOUtils.stat(this.Paths.recovery);
} else {
// In other cases, either $Path.recovery is not necessary, or
// it doesn't exist or it has been corrupted. Regardless,
// don't backup $Path.recovery.
- await IOUtils.writeJSON(this.Paths.recovery, state, {
+ const result = await IOUtils.writeJSON(this.Paths.recovery, state, {
tmpPath: this.Paths.recovery + ".tmp",
compress: true,
+ lengthHint: this.jsonLengthHint,
});
+ this.jsonLengthHint = result.jsonLength;
fileStat = await IOUtils.stat(this.Paths.recovery);
}
telemetry.writeFileMs = Date.now() - startWriteMs;
telemetry.fileSizeBytes = fileStat.size;
@@ -420,6 +455,18 @@
if (exn) {
throw exn;
}
},
+
+ observe(_subject, topic, _data) {
+ switch (topic) {
+ case BROWSER_PURGE_SESSION_HISTORY:
+ this._onPurgeSessionHistory();
+ break;
+ }
+ },
+
+ _onPurgeSessionHistory() {
+ this.jsonLengthHint = 0;
+ },
};
diff --git a/browser/components/sessionstore/test/unit/test_write_json_length_hint.js b/browser/components/sessionstore/test/unit/test_write_json_length_hint.js
new file mode 100644
--- /dev/null
+++ b/browser/components/sessionstore/test/unit/test_write_json_length_hint.js
@@ -0,0 +1,91 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { updateAppInfo } = ChromeUtils.importESModule(
+ "resource://testing-common/AppInfo.sys.mjs"
+);
+
+const profile = do_get_profile();
+
+updateAppInfo({
+ name: "SessionRestoreTest",
+ ID: "{230de50e-4cd1-11dc-8314-0800200c9a66}",
+ version: "1",
+ platformVersion: "",
+});
+
+const { SessionFile } = ChromeUtils.importESModule(
+ "resource:///modules/sessionstore/SessionFile.sys.mjs"
+);
+const { SessionWriter } = ChromeUtils.importESModule(
+ "resource:///modules/sessionstore/SessionWriter.sys.mjs"
+);
+
+add_setup(async function setup() {
+ const source = do_get_file("data/sessionstore_valid.js");
+ source.copyTo(profile, "sessionstore.js");
+
+ await writeCompressedFile(
+ SessionFile.Paths.clean.replace("jsonlz4", "js"),
+ SessionFile.Paths.clean
+ );
+
+ await SessionFile.read();
+});
+
+add_task(async function test_json_length_hint() {
+ await IOUtils.writeJSON(PathUtils.join(PathUtils.profileDir, "dingus"), {
+ gunk: true,
+ });
+
+ Assert.equal(
+ SessionWriter._jsonLengthHint,
+ 0,
+ "SessionWriter length hint starts at 0"
+ );
+
+ await SessionFile.write({});
+
+ const lengthHint = SessionWriter._jsonLengthHint;
+
+ Assert.equal(
+ SessionWriter._jsonLengthHint,
+ JSON.stringify({}).length,
+ "SessionWriter should cache length hint"
+ );
+
+ const contents = await IOUtils.readJSON(
+ PathUtils.join(do_get_cwd().path, "data", "sessionstore_complete.json")
+ );
+ await SessionFile.write(contents);
+
+ Assert.notEqual(
+ SessionWriter._jsonLengthHint,
+ lengthHint,
+ "SessionWriter length hint updated"
+ );
+
+ Assert.greater(
+ SessionWriter._jsonLengthHint,
+ lengthHint,
+ "SessionWriteLength hint is now larger"
+ );
+
+ Services.obs.notifyObservers(null, "browser:purge-session-history");
+
+ Assert.equal(
+ SessionWriter._jsonLengthHint,
+ 0,
+ "browser:purge-session-history notification cleans length hint"
+ );
+
+ await SessionFile.write(contents);
+
+ Assert.notEqual(
+ SessionWriter._jsonLengthHint,
+ lengthHint,
+ "SessionWriter length hint updated"
+ );
+});
diff --git a/browser/components/sessionstore/test/unit/xpcshell.toml b/browser/components/sessionstore/test/unit/xpcshell.toml
--- a/browser/components/sessionstore/test/unit/xpcshell.toml
+++ b/browser/components/sessionstore/test/unit/xpcshell.toml
@@ -39,5 +39,7 @@
skip-if = [
"condprof", # Bug 1769154
]
["test_startup_session_async.js"]
+
+["test_write_json_length_hint.js"]

View File

@@ -55,5 +55,14 @@
"type": "phabricator",
"id": "D281762",
"name": "FF150 3 PGO patch for bug-2014422"
},
{
"type": "phabricator",
"ids": [
"D247145",
"D247215",
"D247217"
],
"name": "Session store use size hint"
}
]

View File

@@ -0,0 +1,29 @@
diff --git a/toolkit/modules/JSONFile.sys.mjs b/toolkit/modules/JSONFile.sys.mjs
index 397991e4af8f49b6365d729fc11267b5c1113400..1955b7ff1d428e891f5ef066e7a4ac25aa5ec9b4 100644
--- a/toolkit/modules/JSONFile.sys.mjs
+++ b/toolkit/modules/JSONFile.sys.mjs
@@ -132,6 +132,7 @@ export function JSONFile(config) {
this._finalizeInternalBound,
() => ({ sanitizedBasename: this.sanitizedBasename })
);
+ this._useSizeHints = config.useSizeHints ?? false;
}
JSONFile.prototype = {
@@ -423,11 +424,15 @@ JSONFile.prototype = {
}
try {
- await IOUtils.writeJSON(
+ if (this._useSizeHints && this._lastSavedSize) {
+ this._options.lengthHint = this._lastSavedSize;
+ }
+ const result = await IOUtils.writeJSON(
this.path,
this._data,
Object.assign({ tmpPath: this.path + ".tmp" }, this._options)
);
+ this._lastSavedSize = this._useSizeHints ? result.jsonLength : null;
} catch (ex) {
if (typeof this._data.toJSONSafe == "function") {
// If serialization fails, try fallback safe JSON converter.

View File

@@ -64,6 +64,10 @@ class nsZenSidebarObject {
return Cu.cloneInto(this.#sidebar, {});
}
get dataWithoutCloning() {
return this.#sidebar;
}
set data(data) {
if (typeof data !== "object") {
throw new Error("Sidebar data must be an object");
@@ -100,6 +104,10 @@ export class nsZenSessionManager {
path: this.#storeFilePath,
compression: "lz4",
backupTo,
useSizeHints: Services.prefs.getBoolPref(
"zen.session-store.use-size-hints",
true
),
});
this.log("Session file path:", this.#file.path);
this.#deferredBackupTask = new lazy.DeferredTask(async () => {
@@ -412,10 +420,10 @@ export class nsZenSessionManager {
if (
this.#shouldRestoreOnlyPinned &&
!this.#shouldRestoreFromCrash &&
this.#sidebar?.tabs
this.#sidebarWithoutCloning?.tabs
) {
this.log("Restoring only pinned tabs into windows");
const sidebar = this.#sidebar;
const sidebar = this.#sidebarWithoutCloning;
sidebar.tabs = (sidebar.tabs || []).filter(tab => tab.pinned);
this.#sidebar = sidebar;
}
@@ -449,6 +457,10 @@ export class nsZenSessionManager {
return this.#sidebarObject.data;
}
get #sidebarWithoutCloning() {
return this.#sidebarObject.dataWithoutCloning;
}
set #sidebar(data) {
this.#sidebarObject.data = data;
}
@@ -590,7 +602,7 @@ export class nsZenSessionManager {
);
this.#collectWindowData(windows);
// This would save the data to disk asynchronously or when quitting the app.
let sidebar = this.#sidebar;
let sidebar = this.#sidebarWithoutCloning;
this.#file.data = sidebar;
if (soon) {
this.#file.saveSoon();
@@ -897,7 +909,7 @@ export class nsZenSessionManager {
onNewEmptySession(aWindow) {
this.log("Restoring empty session with Zen session data");
aWindow.gZenWorkspaces.restoreWorkspacesFromSessionStore({
spaces: this.#sidebar.spaces || [],
spaces: this.#sidebarWithoutCloning.spaces || [],
});
}
@@ -909,7 +921,7 @@ export class nsZenSessionManager {
* @returns {Array} The cloned spaces data.
*/
getClonedSpaces() {
const sidebar = this.#sidebar;
const sidebar = this.#sidebarWithoutCloning;
if (!sidebar || !sidebar.spaces) {
return [];
}