diff --git a/accessible/generic/DocAccessible.cpp b/accessible/generic/DocAccessible.cpp index 31c578e7e179912b0c760ac030bcc5048a0597bf..02451b1c46819b581e6a5767273b1fd99d8459e6 100644 --- a/accessible/generic/DocAccessible.cpp +++ b/accessible/generic/DocAccessible.cpp @@ -2144,6 +2144,11 @@ DocAccessible::DoARIAOwnsRelocation(Accessible* aOwner) // A new child is found, check for loops. if (child->Parent() != aOwner) { + // Child is aria-owned by another container, skip. + if (child->IsRelocated()) { + continue; + } + Accessible* parent = aOwner; while (parent && parent != child && !parent->IsDoc()) { parent = parent->Parent(); @@ -2155,8 +2160,10 @@ DocAccessible::DoARIAOwnsRelocation(Accessible* aOwner) } if (MoveChild(child, aOwner, insertIdx)) { + nsTArray<RefPtr<Accessible> >* relocated = mARIAOwnsHash.LookupOrAdd(aOwner); + MOZ_ASSERT(relocated == owned); child->SetRelocated(true); - owned->InsertElementAt(idx, child); + relocated->InsertElementAt(idx, child); idx++; } } @@ -2222,7 +2229,8 @@ DocAccessible::PutChildrenBack(nsTArray<RefPtr<Accessible> >* aChildren, // after load: $("list").setAttribute("aria-owns", "a b"); // later: $("list").setAttribute("aria-owns", ""); if (origContainer != owner || child->IndexInParent() != idxInParent) { - MoveChild(child, origContainer, idxInParent); + DebugOnly<bool> moved = MoveChild(child, origContainer, idxInParent); + MOZ_ASSERT(moved, "Failed to put child back."); } else { MOZ_ASSERT(!child->PrevSibling() || !child->PrevSibling()->IsRelocated(), "No relocated child should appear before this one"); @@ -2245,6 +2253,10 @@ DocAccessible::MoveChild(Accessible* aChild, Accessible* aNewParent, Accessible* curParent = aChild->Parent(); + if (!aNewParent->IsAcceptableChild(aChild->GetContent())) { + return false; + } + #ifdef A11Y_LOG logging::TreeInfo("move child", 0, "old parent", curParent, "new parent", aNewParent, @@ -2276,10 +2288,6 @@ DocAccessible::MoveChild(Accessible* aChild, Accessible* aNewParent, return true; } - if (!aNewParent->IsAcceptableChild(aChild->GetContent())) { - return false; - } - MOZ_ASSERT(aIdxInParent <= static_cast<int32_t>(aNewParent->ChildCount()), "Wrong insertion point for a moving child"); diff --git a/accessible/tests/browser/e10s/browser_treeupdate_ariaowns.js b/accessible/tests/browser/e10s/browser_treeupdate_ariaowns.js index dbcf69599398adc207a18a42881e8dfb18fffa75..a186c8df12d2ec56acd095b1f7aa47356a7a56ff 100644 --- a/accessible/tests/browser/e10s/browser_treeupdate_ariaowns.js +++ b/accessible/tests/browser/e10s/browser_treeupdate_ariaowns.js @@ -189,25 +189,9 @@ async function stealAndRecacheChildren(browser, accDoc) { const acc1 = findAccessibleChildByID(accDoc, id1); const acc2 = findAccessibleChildByID(accDoc, id2); - /* ================ Steal from other ARIA owns ============================ */ + /* ================ Attempt to steal from other ARIA owns ================= */ let onReorder = waitForEvent(EVENT_REORDER, id2); await invokeSetAttribute(browser, id2, "aria-owns", "t3_child"); - await onReorder; - - let tree = { - SECTION: [ ] - }; - testAccessibleTree(acc1, tree); - - tree = { - SECTION: [ - { CHECKBUTTON: [ ] } - ] - }; - testAccessibleTree(acc2, tree); - - /* ================ Append element to recache children ==================== */ - onReorder = waitForEvent(EVENT_REORDER, id2); await ContentTask.spawn(browser, id2, id => { let div = content.document.createElement("div"); div.setAttribute("role", "radio"); @@ -215,15 +199,16 @@ async function stealAndRecacheChildren(browser, accDoc) { }); await onReorder; - tree = { - SECTION: [ ] + let tree = { + SECTION: [ + { CHECKBUTTON: [ ] } // ARIA owned + ] }; testAccessibleTree(acc1, tree); tree = { SECTION: [ - { RADIOBUTTON: [ ] }, - { CHECKBUTTON: [ ] } // ARIA owned + { RADIOBUTTON: [ ] } ] }; testAccessibleTree(acc2, tree); diff --git a/accessible/tests/browser/tree/browser_test_aria_owns.js b/accessible/tests/browser/tree/browser_test_aria_owns.js index 2194189a398ca94f295e32a6764db2a206d47024..0c3c24020212ef3af2064117fa308456b30c644c 100644 --- a/accessible/tests/browser/tree/browser_test_aria_owns.js +++ b/accessible/tests/browser/tree/browser_test_aria_owns.js @@ -55,25 +55,34 @@ async function runTests(browser, accDoc) { await onReorders; + // aria-owned child should be after ordinal children. testChildrenIds(one, ["aa", "a"]); - onReorders = waitForEvents([ - [EVENT_REORDER, "two"], // "b" will go to "three" - [EVENT_REORDER, "three"], // some children will be reclaimed and acquired - [EVENT_REORDER, "one"]]); // removing aria-owns will reorder native children + onReorders = waitForEvent(EVENT_REORDER, "one"); await ContentTask.spawn(browser, null, async function() { // removing aria-owns should reorder the children document.getElementById("one").removeAttribute("aria-owns"); - // child order will be overridden by aria-owns - document.getElementById("three").setAttribute("aria-owns", "b d"); }); await onReorders; + // with no aria-owns, layout order should prevail. testChildrenIds(one, ["a", "aa"]); - testChildrenIds(two, ["c"]); - testChildrenIds(three, ["b", "d"]); + + onReorders = waitForEvents([ + [EVENT_REORDER, "four"], // "b" will go to "three" + [EVENT_REORDER, "two"]]); // some children will be reclaimed and acquired + + await ContentTask.spawn(browser, null, async function() { + // child order will be overridden by aria-owns + document.getElementById("four").setAttribute("aria-owns", "b e"); + }); + + await onReorders; + + testChildrenIds(four, ["b", "e"]); + testChildrenIds(two, ["d", "c"]); } /** diff --git a/accessible/tests/mochitest/treeupdate/test_ariaowns.html b/accessible/tests/mochitest/treeupdate/test_ariaowns.html index d9cf911f48150cb6c5a28efe446e81d683badff1..7d88b9a0698b3dc3f64be8a8204547e741c68396 100644 --- a/accessible/tests/mochitest/treeupdate/test_ariaowns.html +++ b/accessible/tests/mochitest/treeupdate/test_ariaowns.html @@ -306,30 +306,38 @@ } /** - * Steal an element from other ARIA owns element. This use case guarantees - * that result of setAttribute/removeAttribute doesn't depend on their order. + * Attempt to steal an element from other ARIA owns element. This should + * not be possible. The only child that will get owned into this + * container is a previously not aria-owned one. */ function stealFromOtherARIAOwns() { this.eventSeq = [ - new invokerChecker(EVENT_REORDER, getNode("t3_container2")) + new invokerChecker(EVENT_REORDER, getNode("t3_container3")) ]; this.invoke = function stealFromOtherARIAOwns_invoke() { - getNode("t3_container2").setAttribute("aria-owns", "t3_child"); + getNode("t3_container3").setAttribute("aria-owns", "t3_child t3_child2"); } this.finalCheck = function stealFromOtherARIAOwns_finalCheck() { var tree = { SECTION: [ + { CHECKBUTTON: [ + ] } ] }; testAccessibleTree("t3_container1", tree); + tree = + { SECTION: [ + ] }; + testAccessibleTree("t3_container2", tree); + tree = { SECTION: [ { CHECKBUTTON: [ ] } ] }; - testAccessibleTree("t3_container2", tree); + testAccessibleTree("t3_container3", tree); } this.getID = function stealFromOtherARIAOwns_getID() { @@ -339,27 +347,27 @@ function appendElToRecacheChildren() { this.eventSeq = [ - new invokerChecker(EVENT_REORDER, getNode("t3_container2")) + new invokerChecker(EVENT_REORDER, getNode("t3_container3")) ]; this.invoke = function appendElToRecacheChildren_invoke() { var div = document.createElement("div"); div.setAttribute("role", "radio") - getNode("t3_container2").appendChild(div); + getNode("t3_container3").appendChild(div); } this.finalCheck = function appendElToRecacheChildren_finalCheck() { var tree = { SECTION: [ ] }; - testAccessibleTree("t3_container1", tree); + testAccessibleTree("t3_container2", tree); tree = { SECTION: [ { RADIOBUTTON: [ ] }, { CHECKBUTTON: [ ] } // ARIA owned ] }; - testAccessibleTree("t3_container2", tree); + testAccessibleTree("t3_container3", tree); } this.getID = function appendElToRecacheChildren_getID() { @@ -749,7 +757,10 @@ <div id="t3_container1" aria-owns="t3_child"></div> <div id="t3_child" role="checkbox"></div> - <div id="t3_container2"></div> + <div id="t3_container2"> + <div id="t3_child2" role="checkbox"></div> + </div> + <div id="t3_container3"></div> <div id="t4_container1" aria-owns="t4_child1 t4_child2"></div> <div id="t4_container2"> diff --git a/browser/base/content/browser-places.js b/browser/base/content/browser-places.js index 24d95fbd7affa500ced0b7ccc97320460040f267..2ef6c2240da699b66864975ac831acc7f3340627 100644 --- a/browser/base/content/browser-places.js +++ b/browser/base/content/browser-places.js @@ -1931,8 +1931,8 @@ var BookmarkingUI = { }, onStarCommand(aEvent) { - // Ignore clicks on the star if we are updating its state. - if (!this._pendingUpdate) { + // Ignore non-left clicks on the star, or if we are updating its state. + if (!this._pendingUpdate && (aEvent.type != "click" || aEvent.button == 0)) { let isBookmarked = this._itemGuids.size > 0; // Disable the old animation in photon if (!isBookmarked && !AppConstants.MOZ_PHOTON_THEME) diff --git a/browser/components/downloads/DownloadsCommon.jsm b/browser/components/downloads/DownloadsCommon.jsm index a95f63586f387cdc390cb7188fe44e2fb8a24d47..4ce0b43750bcc468c96e5e2d241b6ab2123f1bd5 100644 --- a/browser/components/downloads/DownloadsCommon.jsm +++ b/browser/components/downloads/DownloadsCommon.jsm @@ -196,16 +196,22 @@ this.DownloadsCommon = { }, /** - * Get access to one of the DownloadsData or PrivateDownloadsData objects, - * depending on the privacy status of the window in question. + * Get access to one of the DownloadsData, PrivateDownloadsData, or + * HistoryDownloadsData objects, depending on the privacy status of the + * specified window and on whether history downloads should be included. * - * @param aWindow + * @param window * The browser window which owns the download button. + * @param [optional] history + * True to include history downloads when the window is public. */ - getData(aWindow) { - if (PrivateBrowsingUtils.isContentWindowPrivate(aWindow)) { + getData(window, history = false) { + if (PrivateBrowsingUtils.isContentWindowPrivate(window)) { return PrivateDownloadsData; } + if (history) { + return HistoryDownloadsData; + } return DownloadsData; }, @@ -284,17 +290,6 @@ this.DownloadsCommon = { return DownloadsCommon.DOWNLOAD_NOTSTARTED; }, - /** - * Helper function required because the Downloads Panel and the Downloads View - * don't share the controller yet. - */ - removeAndFinalizeDownload(download) { - Downloads.getList(Downloads.ALL) - .then(list => list.remove(download)) - .then(() => download.finalize(true)) - .catch(Cu.reportError); - }, - /** * Given an iterable collection of Download objects, generates and returns * statistics about that collection. @@ -649,24 +644,34 @@ XPCOMUtils.defineLazyGetter(DownloadsCommon, "isWinVistaOrHigher", function() { * downloads. This is useful to display a neutral progress indicator in * the main browser window until the autostart timeout elapses. * - * Note that DownloadsData and PrivateDownloadsData are two equivalent singleton - * objects, one accessing non-private downloads, and the other accessing private - * ones. + * This powers the DownloadsData, PrivateDownloadsData, and HistoryDownloadsData + * singleton objects. */ -function DownloadsDataCtor(aPrivate) { - this._isPrivate = aPrivate; +function DownloadsDataCtor({ isPrivate, isHistory } = {}) { + this._isPrivate = !!isPrivate; // Contains all the available Download objects and their integer state. this.oldDownloadStates = new Map(); + // For the history downloads list we don't need to register this as a view, + // but we have to ensure that the DownloadsData object is initialized before + // we register more views. This ensures that the view methods of DownloadsData + // are invoked before those of views registered on HistoryDownloadsData, + // allowing the endTime property to be set correctly. + if (isHistory) { + DownloadsData.initializeDataLink(); + this._promiseList = DownloadsData._promiseList + .then(() => DownloadHistory.getList()); + return; + } + // This defines "initializeDataLink" and "_promiseList" synchronously, then // continues execution only when "initializeDataLink" is called, allowing the // underlying data to be loaded only when actually needed. this._promiseList = (async () => { await new Promise(resolve => this.initializeDataLink = resolve); - - let list = await Downloads.getList(this._isPrivate ? Downloads.PRIVATE - : Downloads.PUBLIC); + let list = await Downloads.getList(isPrivate ? Downloads.PRIVATE + : Downloads.PUBLIC); await list.addView(this); return list; })(); @@ -710,7 +715,9 @@ DownloadsDataCtor.prototype = { * is only called after the data link has been initialized. */ removeFinished() { - this._promiseList.then(list => list.removeFinished()).catch(Cu.reportError); + Downloads.getList(this._isPrivate ? Downloads.PRIVATE : Downloads.PUBLIC) + .then(list => list.removeFinished()) + .catch(Cu.reportError); let indicatorData = this._isPrivate ? PrivateDownloadsIndicatorData : DownloadsIndicatorData; indicatorData.attention = DownloadsCommon.ATTENTION_NONE; @@ -835,12 +842,16 @@ DownloadsDataCtor.prototype = { } }; +XPCOMUtils.defineLazyGetter(this, "HistoryDownloadsData", function() { + return new DownloadsDataCtor({ isHistory: true }); +}); + XPCOMUtils.defineLazyGetter(this, "PrivateDownloadsData", function() { - return new DownloadsDataCtor(true); + return new DownloadsDataCtor({ isPrivate: true }); }); XPCOMUtils.defineLazyGetter(this, "DownloadsData", function() { - return new DownloadsDataCtor(false); + return new DownloadsDataCtor(); }); // DownloadsViewPrototype diff --git a/browser/components/downloads/DownloadsViewUI.jsm b/browser/components/downloads/DownloadsViewUI.jsm index ef366e0b94c038aacda1cb62f62116bda63ae0c0..0c977a19873879b1ea2ded4a1f33267c24324eb5 100644 --- a/browser/components/downloads/DownloadsViewUI.jsm +++ b/browser/components/downloads/DownloadsViewUI.jsm @@ -25,6 +25,8 @@ XPCOMUtils.defineLazyModuleGetter(this, "DownloadsCommon", "resource:///modules/DownloadsCommon.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "OS", "resource://gre/modules/osfile.jsm"); +XPCOMUtils.defineLazyModuleGetter(this, "PlacesUtils", + "resource://gre/modules/PlacesUtils.jsm"); this.DownloadsViewUI = { /** @@ -364,6 +366,8 @@ this.DownloadsViewUI.DownloadElementShell.prototype = { case "downloadsCmd_unblock": case "downloadsCmd_unblockAndOpen": return this.download.hasBlockedData; + case "downloadsCmd_cancel": + return this.download.hasPartialData || !this.download.stopped; } return false; }, @@ -390,4 +394,20 @@ this.DownloadsViewUI.DownloadElementShell.prototype = { downloadsCmd_confirmBlock() { this.download.confirmBlock().catch(Cu.reportError); }, + + cmd_delete() { + (async () => { + // Remove the associated history element first, if any, so that the views + // that combine history and session downloads won't resurrect the history + // download into the view just before it is deleted permanently. + try { + await PlacesUtils.history.remove(this.download.source.url); + } catch (ex) { + Cu.reportError(ex); + } + let list = await Downloads.getList(Downloads.ALL); + await list.remove(this.download); + await this.download.finalize(true); + })().catch(Cu.reportError); + }, }; diff --git a/browser/components/downloads/content/allDownloadsViewOverlay.js b/browser/components/downloads/content/allDownloadsViewOverlay.js index 64052c0259f113db327b527a33b3c46dc072544c..0a3f6c58c63735157995c19aba936c9d642bec65 100644 --- a/browser/components/downloads/content/allDownloadsViewOverlay.js +++ b/browser/components/downloads/content/allDownloadsViewOverlay.js @@ -7,8 +7,8 @@ var { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components; Cu.import("resource://gre/modules/XPCOMUtils.jsm"); -XPCOMUtils.defineLazyModuleGetter(this, "DownloadUtils", - "resource://gre/modules/DownloadUtils.jsm"); +XPCOMUtils.defineLazyModuleGetter(this, "Downloads", + "resource://gre/modules/Downloads.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "DownloadsCommon", "resource:///modules/DownloadsCommon.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "DownloadsViewUI", @@ -19,152 +19,11 @@ XPCOMUtils.defineLazyModuleGetter(this, "NetUtil", "resource://gre/modules/NetUtil.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "OS", "resource://gre/modules/osfile.jsm"); -XPCOMUtils.defineLazyModuleGetter(this, "PlacesUtils", - "resource://gre/modules/PlacesUtils.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "RecentWindow", "resource:///modules/RecentWindow.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "Services", "resource://gre/modules/Services.jsm"); -const DESTINATION_FILE_URI_ANNO = "downloads/destinationFileURI"; -const DOWNLOAD_META_DATA_ANNO = "downloads/metaData"; - -/** - * Represents a download from the browser history. It implements part of the - * interface of the Download object. - * - * @param aPlacesNode - * The Places node from which the history download should be initialized. - */ -function HistoryDownload(aPlacesNode) { - // TODO (bug 829201): history downloads should get the referrer from Places. - this.source = { - url: aPlacesNode.uri, - }; - this.target = { - path: undefined, - exists: false, - size: undefined, - }; - - // In case this download cannot obtain its end time from the Places metadata, - // use the time from the Places node, that is the start time of the download. - this.endTime = aPlacesNode.time / 1000; -} - -HistoryDownload.prototype = { - /** - * Pushes information from Places metadata into this object. - */ - updateFromMetaData(metaData) { - try { - this.target.path = Cc["@mozilla.org/network/protocol;1?name=file"] - .getService(Ci.nsIFileProtocolHandler) - .getFileFromURLSpec(metaData.targetFileSpec).path; - } catch (ex) { - this.target.path = undefined; - } - - if ("state" in metaData) { - this.succeeded = metaData.state == DownloadsCommon.DOWNLOAD_FINISHED; - this.canceled = metaData.state == DownloadsCommon.DOWNLOAD_CANCELED || - metaData.state == DownloadsCommon.DOWNLOAD_PAUSED; - this.endTime = metaData.endTime; - - // Recreate partial error information from the state saved in history. - if (metaData.state == DownloadsCommon.DOWNLOAD_FAILED) { - this.error = { message: "History download failed." }; - } else if (metaData.state == DownloadsCommon.DOWNLOAD_BLOCKED_PARENTAL) { - this.error = { becauseBlockedByParentalControls: true }; - } else if (metaData.state == DownloadsCommon.DOWNLOAD_DIRTY) { - this.error = { - becauseBlockedByReputationCheck: true, - reputationCheckVerdict: metaData.reputationCheckVerdict || "", - }; - } else { - this.error = null; - } - - // Normal history downloads are assumed to exist until the user interface - // is refreshed, at which point these values may be updated. - this.target.exists = true; - this.target.size = metaData.fileSize; - } else { - // Metadata might be missing from a download that has started but hasn't - // stopped already. Normally, this state is overridden with the one from - // the corresponding in-progress session download. But if the browser is - // terminated abruptly and additionally the file with information about - // in-progress downloads is lost, we may end up using this state. We use - // the failed state to allow the download to be restarted. - // - // On the other hand, if the download is missing the target file - // annotation as well, it is just a very old one, and we can assume it - // succeeded. - this.succeeded = !this.target.path; - this.error = this.target.path ? { message: "Unstarted download." } : null; - this.canceled = false; - - // These properties may be updated if the user interface is refreshed. - this.target.exists = false; - this.target.size = undefined; - } - }, - - /** - * History downloads are never in progress. - */ - stopped: true, - - /** - * No percentage indication is shown for history downloads. - */ - hasProgress: false, - - /** - * History downloads cannot be restarted using their partial data, even if - * they are indicated as paused in their Places metadata. The only way is to - * use the information from a persisted session download, that will be shown - * instead of the history download. In case this session download is not - * available, we show the history download as canceled, not paused. - */ - hasPartialData: false, - - /** - * This method mimicks the "start" method of session downloads, and is called - * when the user retries a history download. - * - * At present, we always ask the user for a new target path when retrying a - * history download. In the future we may consider reusing the known target - * path if the folder still exists and the file name is not already used, - * except when the user preferences indicate that the target path should be - * requested every time a new download is started. - */ - start() { - let browserWin = RecentWindow.getMostRecentBrowserWindow(); - let initiatingDoc = browserWin ? browserWin.document : document; - - // Do not suggest a file name if we don't know the original target. - let leafName = this.target.path ? OS.Path.basename(this.target.path) : null; - DownloadURL(this.source.url, leafName, initiatingDoc); - - return Promise.resolve(); - }, - - /** - * This method mimicks the "refresh" method of session downloads, except that - * it cannot notify that the data changed to the Downloads View. - */ - async refresh() { - try { - this.target.size = (await OS.File.stat(this.target.path)).size; - this.target.exists = true; - } catch (ex) { - // We keep the known file size from the metadata, if any. - this.target.exists = false; - } - }, -}; - /** * A download element shell is responsible for handling the commands and the * displayed data for a single download view element. @@ -177,43 +36,35 @@ HistoryDownload.prototype = { * the |element| getter. The shell doesn't insert the item in a richlistbox, the * caller must do it and remove the element when it's no longer needed. * - * The caller is also responsible for forwarding status notifications for - * session downloads, calling the onSessionDownloadChanged method. + * The caller is also responsible for forwarding status notifications, calling + * the onChanged method. * - * @param [optional] aSessionDownload - * The session download, required if aHistoryDownload is not set. - * @param [optional] aHistoryDownload - * The history download, required if aSessionDownload is not set. + * @param download + * The Download object from the DownloadHistoryList. */ -function HistoryDownloadElementShell(aSessionDownload, aHistoryDownload) { +function HistoryDownloadElementShell(download) { + this._download = download; + this.element = document.createElement("richlistitem"); this.element._shell = this; this.element.classList.add("download"); this.element.classList.add("download-state"); - - if (aSessionDownload) { - this.sessionDownload = aSessionDownload; - } - if (aHistoryDownload) { - this.historyDownload = aHistoryDownload; - } } HistoryDownloadElementShell.prototype = { __proto__: DownloadsViewUI.DownloadElementShell.prototype, /** - * Manages the "active" state of the shell. By default all the shells without - * a session download are inactive, thus their UI is not updated. They must - * be activated when entering the visible area. Session downloads are always - * active. + * Manages the "active" state of the shell. By default all the shells are + * inactive, thus their UI is not updated. They must be activated when + * entering the visible area. */ ensureActive() { if (!this._active) { this._active = true; this.element.setAttribute("active", true); - this._updateUI(); + this.onChanged(); } }, get active() { @@ -225,65 +76,14 @@ HistoryDownloadElementShell.prototype = { * for displaying information and executing commands in the user interface. */ get download() { - return this._sessionDownload || this._historyDownload; + return this._download; }, - _sessionDownload: null, - get sessionDownload() { - return this._sessionDownload; - }, - set sessionDownload(aValue) { - if (this._sessionDownload != aValue) { - if (!aValue && !this._historyDownload) { - throw new Error("Should always have either a Download or a HistoryDownload"); - } - - this._sessionDownload = aValue; - if (aValue) { - this.sessionDownloadState = DownloadsCommon.stateOfDownload(aValue); - } - - this.ensureActive(); - this._updateUI(); - } - return aValue; - }, - - _historyDownload: null, - get historyDownload() { - return this._historyDownload; - }, - set historyDownload(aValue) { - if (this._historyDownload != aValue) { - if (!aValue && !this._sessionDownload) { - throw new Error("Should always have either a Download or a HistoryDownload"); - } - - this._historyDownload = aValue; - - // We don't need to update the UI if we had a session data item, because - // the places information isn't used in this case. - if (!this._sessionDownload) { - this._updateUI(); - } - } - return aValue; - }, - - _updateUI() { - // There is nothing to do if the item has always been invisible. - if (!this.active) { - return; - } - + onStateChanged() { // Since the state changed, we may need to check the target file again. this._targetFileChecked = false; this._updateState(); - }, - - onStateChanged() { - this._updateState(); if (this.element.selected) { goUpdateDownloadCommands(); @@ -294,10 +94,15 @@ HistoryDownloadElementShell.prototype = { } }, - onSessionDownloadChanged() { - let newState = DownloadsCommon.stateOfDownload(this.sessionDownload); - if (this.sessionDownloadState != newState) { - this.sessionDownloadState = newState; + onChanged() { + // There is nothing to do if the item has always been invisible. + if (!this.active) { + return; + } + + let newState = DownloadsCommon.stateOfDownload(this.download); + if (this._downloadState !== newState) { + this._downloadState = newState; this.onStateChanged(); } @@ -308,6 +113,7 @@ HistoryDownloadElementShell.prototype = { !!this.download.hasBlockedData); this._updateProgress(); }, + _downloadState: null, isCommandEnabled(aCommand) { // The only valid command for inactive elements is cmd_delete. @@ -320,7 +126,7 @@ HistoryDownloadElementShell.prototype = { return this.download.target.exists; case "downloadsCmd_show": // TODO: Bug 827010 - Handle part-file asynchronously. - if (this._sessionDownload && this.download.target.partFilePath) { + if (this.download.target.partFilePath) { let partFile = new FileUtils.File(this.download.target.partFilePath); if (partFile.exists()) { return true; @@ -332,8 +138,6 @@ HistoryDownloadElementShell.prototype = { case "cmd_delete": // We don't want in-progress downloads to be removed accidentally. return this.download.stopped; - case "downloadsCmd_cancel": - return !!this._sessionDownload; } return DownloadsViewUI.DownloadElementShell.prototype .isCommandEnabled.call(this, aCommand); @@ -345,6 +149,22 @@ HistoryDownloadElementShell.prototype = { } }, + downloadsCmd_retry() { + if (this.download.start) { + DownloadsViewUI.DownloadElementShell.prototype + .downloadsCmd_retry.call(this); + return; + } + + let browserWin = RecentWindow.getMostRecentBrowserWindow(); + let initiatingDoc = browserWin ? browserWin.document : document; + + // Do not suggest a file name if we don't know the original target. + let targetPath = this.download.target.path ? + OS.Path.basename(this.download.target.path) : null; + DownloadURL(this.download.source.url, targetPath, initiatingDoc); + }, + downloadsCmd_open() { let file = new FileUtils.File(this.download.target.path); DownloadsCommon.openDownloadedFile(file, null, window); @@ -359,15 +179,6 @@ HistoryDownloadElementShell.prototype = { openURL(this.download.source.referrer); }, - cmd_delete() { - if (this._sessionDownload) { - DownloadsCommon.removeAndFinalizeDownload(this.download); - } - if (this._historyDownload) { - PlacesUtils.history.remove(this.download.source.url); - } - }, - downloadsCmd_unblock() { this.confirmUnblock(window, "unblock"); }, @@ -421,27 +232,12 @@ HistoryDownloadElementShell.prototype = { // Start checking for existence. This may be done twice if onSelect is // called again before the information is collected. if (!this._targetFileChecked) { - this._checkTargetFileOnSelect().catch(Cu.reportError); + this.download.refresh().catch(Cu.reportError).then(() => { + // Do not try to check for existence again even if this failed. + this._targetFileChecked = true; + }); } }, - - async _checkTargetFileOnSelect() { - try { - await this.download.refresh(); - } finally { - // Do not try to check for existence again if this failed once. - this._targetFileChecked = true; - } - - // Update the commands only if the element is still selected. - if (this.element.selected) { - goUpdateDownloadCommands(); - } - - // Ensure the interface has been updated based on the new values. We need to - // do this because history downloads can't trigger update notifications. - this._updateProgress(); - }, }; /** @@ -472,16 +268,9 @@ function DownloadsPlacesView(aRichListBox, aActive = true) { this._richlistbox._placesView = this; window.controllers.insertControllerAt(0, this); - // Map download URLs to download element shells regardless of their type - this._downloadElementsShellsForURI = new Map(); - - // Map download data items to their element shells. + // Map downloads to their element shells. this._viewItemsForDownloads = new WeakMap(); - // Points to the last session download element. We keep track of this - // in order to keep all session downloads above past downloads. - this._lastSessionDownloadElement = null; - this._searchTerm = ""; this._active = aActive; @@ -489,7 +278,7 @@ function DownloadsPlacesView(aRichListBox, aActive = true) { // Register as a downloads view. The places data will be initialized by // the places setter. this._initiallySelectedElement = null; - this._downloadsData = DownloadsCommon.getData(window.opener || window); + this._downloadsData = DownloadsCommon.getData(window.opener || window, true); this._downloadsData.addView(this); // Get the Download button out of the attention state since we're about to @@ -523,325 +312,6 @@ DownloadsPlacesView.prototype = { return this._active; }, - /** - * This cache exists in order to optimize the load of the Downloads View, when - * Places annotations for history downloads must be read. In fact, annotations - * are stored in a single table, and reading all of them at once is much more - * efficient than an individual query. - * - * When this property is first requested, it reads the annotations for all the - * history downloads and stores them indefinitely. - * - * The historical annotations are not expected to change for the duration of - * the session, except in the case where a session download is running for the - * same URI as a history download. To ensure we don't use stale data, URIs - * corresponding to session downloads are permanently removed from the cache. - * This is a very small mumber compared to history downloads. - * - * This property returns a Map from each download source URI found in Places - * annotations to an object with the format: - * - * { targetFileSpec, state, endTime, fileSize, ... } - * - * The targetFileSpec property is the value of "downloads/destinationFileURI", - * while the other properties are taken from "downloads/metaData". Any of the - * properties may be missing from the object. - */ - get _cachedPlacesMetaData() { - if (!this.__cachedPlacesMetaData) { - this.__cachedPlacesMetaData = new Map(); - - // Read the metadata annotations first, but ignore invalid JSON. - for (let result of PlacesUtils.annotations.getAnnotationsWithName( - DOWNLOAD_META_DATA_ANNO)) { - try { - this.__cachedPlacesMetaData.set(result.uri.spec, - JSON.parse(result.annotationValue)); - } catch (ex) {} - } - - // Add the target file annotations to the metadata. - for (let result of PlacesUtils.annotations.getAnnotationsWithName( - DESTINATION_FILE_URI_ANNO)) { - let metaData = this.__cachedPlacesMetaData.get(result.uri.spec); - if (!metaData) { - metaData = {}; - this.__cachedPlacesMetaData.set(result.uri.spec, metaData); - } - metaData.targetFileSpec = result.annotationValue; - } - } - - return this.__cachedPlacesMetaData; - }, - __cachedPlacesMetaData: null, - - /** - * Reads current metadata from Places annotations for the specified URI, and - * returns an object with the format: - * - * { targetFileSpec, state, endTime, fileSize, ... } - * - * The targetFileSpec property is the value of "downloads/destinationFileURI", - * while the other properties are taken from "downloads/metaData". Any of the - * properties may be missing from the object. - */ - _getPlacesMetaDataFor(spec) { - let metaData = {}; - - try { - let uri = NetUtil.newURI(spec); - try { - metaData = JSON.parse(PlacesUtils.annotations.getPageAnnotation( - uri, DOWNLOAD_META_DATA_ANNO)); - } catch (ex) {} - metaData.targetFileSpec = PlacesUtils.annotations.getPageAnnotation( - uri, DESTINATION_FILE_URI_ANNO); - } catch (ex) {} - - return metaData; - }, - - /** - * Given a data item for a session download, or a places node for a past - * download, updates the view as necessary. - * 1. If the given data is a places node, we check whether there are any - * elements for the same download url. If there are, then we just reset - * their places node. Otherwise we add a new download element. - * 2. If the given data is a data item, we first check if there's a history - * download in the list that is not associated with a data item. If we - * found one, we use it for the data item as well and reposition it - * alongside the other session downloads. If we don't, then we go ahead - * and create a new element for the download. - * - * @param [optional] sessionDownload - * A Download object, or null for history downloads. - * @param [optional] aPlacesNode - * The Places node for a history download, or null for session downloads. - * @param [optional] aNewest - * Whether the download should be added at the top of the list. - * @param [optional] aDocumentFragment - * To speed up the appending of multiple elements to the end of the - * list which are coming in a single batch (i.e. invalidateContainer), - * a document fragment may be passed to which the new elements would - * be appended. It's the caller's job to ensure the fragment is merged - * to the richlistbox at the end. - */ - _addDownloadData(sessionDownload, aPlacesNode, aNewest = false, - aDocumentFragment = null) { - let downloadURI = aPlacesNode ? aPlacesNode.uri - : sessionDownload.source.url; - let shellsForURI = this._downloadElementsShellsForURI.get(downloadURI); - if (!shellsForURI) { - shellsForURI = new Set(); - this._downloadElementsShellsForURI.set(downloadURI, shellsForURI); - } - - // When a session download is attached to a shell, we ensure not to keep - // stale metadata around for the corresponding history download. This - // prevents stale state from being used if the view is rebuilt. - // - // Note that we will eagerly load the data in the cache at this point, even - // if we have seen no history download. The case where no history download - // will appear at all is rare enough in normal usage, so we can apply this - // simpler solution rather than keeping a list of cache items to ignore. - if (sessionDownload) { - this._cachedPlacesMetaData.delete(sessionDownload.source.url); - } - - let newOrUpdatedShell = null; - - // Trivial: if there are no shells for this download URI, we always - // need to create one. - let shouldCreateShell = shellsForURI.size == 0; - - // However, if we do have shells for this download uri, there are - // few options: - // 1) There's only one shell and it's for a history download (it has - // no data item). In this case, we update this shell and move it - // if necessary - // 2) There are multiple shells, indicating multiple downloads for - // the same download uri are running. In this case we create - // another shell for the download (so we have one shell for each data - // item). - // - // Note: If a cancelled session download is already in the list, and the - // download is retried, onDownloadAdded is called again for the same - // data item. Thus, we also check that we make sure we don't have a view item - // already. - if (!shouldCreateShell && - sessionDownload && !this._viewItemsForDownloads.has(sessionDownload)) { - // If there's a past-download-only shell for this download-uri with no - // associated data item, use it for the new data item. Otherwise, go ahead - // and create another shell. - shouldCreateShell = true; - for (let shell of shellsForURI) { - if (!shell.sessionDownload) { - shouldCreateShell = false; - shell.sessionDownload = sessionDownload; - newOrUpdatedShell = shell; - this._viewItemsForDownloads.set(sessionDownload, shell); - break; - } - } - } - - if (shouldCreateShell) { - // If we are adding a new history download here, it means there is no - // associated session download, thus we must read the Places metadata, - // because it will not be obscured by the session download. - let historyDownload = null; - if (aPlacesNode) { - let metaData = this._cachedPlacesMetaData.get(aPlacesNode.uri) || - this._getPlacesMetaDataFor(aPlacesNode.uri); - historyDownload = new HistoryDownload(aPlacesNode); - historyDownload.updateFromMetaData(metaData); - } - let shell = new HistoryDownloadElementShell(sessionDownload, - historyDownload); - shell.element._placesNode = aPlacesNode; - newOrUpdatedShell = shell; - shellsForURI.add(shell); - if (sessionDownload) { - this._viewItemsForDownloads.set(sessionDownload, shell); - } - } else if (aPlacesNode) { - // We are updating information for a history download for which we have - // at least one download element shell already. There are two cases: - // 1) There are one or more download element shells for this source URI, - // each with an associated session download. We update the Places node - // because we may need it later, but we don't need to read the Places - // metadata until the last session download is removed. - // 2) Occasionally, we may receive a duplicate notification for a history - // download with no associated session download. We have exactly one - // download element shell in this case, but the metdata cannot have - // changed, just the reference to the Places node object is different. - // So, we update all the node references and keep the metadata intact. - for (let shell of shellsForURI) { - if (!shell.historyDownload) { - // Create the element to host the metadata when needed. - shell.historyDownload = new HistoryDownload(aPlacesNode); - } - shell.element._placesNode = aPlacesNode; - } - } - - if (newOrUpdatedShell) { - if (aNewest) { - this._richlistbox.insertBefore(newOrUpdatedShell.element, - this._richlistbox.firstChild); - if (!this._lastSessionDownloadElement) { - this._lastSessionDownloadElement = newOrUpdatedShell.element; - } - // Some operations like retrying an history download move an element to - // the top of the richlistbox, along with other session downloads. - // More generally, if a new download is added, should be made visible. - this._richlistbox.ensureElementIsVisible(newOrUpdatedShell.element); - } else if (sessionDownload) { - let before = this._lastSessionDownloadElement ? - this._lastSessionDownloadElement.nextSibling : this._richlistbox.firstChild; - this._richlistbox.insertBefore(newOrUpdatedShell.element, before); - this._lastSessionDownloadElement = newOrUpdatedShell.element; - } else { - let appendTo = aDocumentFragment || this._richlistbox; - appendTo.appendChild(newOrUpdatedShell.element); - } - - if (this.searchTerm) { - newOrUpdatedShell.element.hidden = - !newOrUpdatedShell.element._shell.matchesSearchTerm(this.searchTerm); - } - } - - // If aDocumentFragment is defined this is a batch change, so it's up to - // the caller to append the fragment and activate the visible shells. - if (!aDocumentFragment) { - this._ensureVisibleElementsAreActive(); - goUpdateCommand("downloadsCmd_clearDownloads"); - } - }, - - _removeElement(aElement) { - // If the element was selected exclusively, select its next - // sibling first, if not, try for previous sibling, if any. - if ((aElement.nextSibling || aElement.previousSibling) && - this._richlistbox.selectedItems && - this._richlistbox.selectedItems.length == 1 && - this._richlistbox.selectedItems[0] == aElement) { - this._richlistbox.selectItem(aElement.nextSibling || - aElement.previousSibling); - } - - if (this._lastSessionDownloadElement == aElement) { - this._lastSessionDownloadElement = aElement.previousSibling; - } - - this._richlistbox.removeItemFromSelection(aElement); - this._richlistbox.removeChild(aElement); - this._ensureVisibleElementsAreActive(); - goUpdateCommand("downloadsCmd_clearDownloads"); - }, - - _removeHistoryDownloadFromView(aPlacesNode) { - let downloadURI = aPlacesNode.uri; - let shellsForURI = this._downloadElementsShellsForURI.get(downloadURI); - if (shellsForURI) { - for (let shell of shellsForURI) { - if (shell.sessionDownload) { - shell.historyDownload = null; - } else { - this._removeElement(shell.element); - shellsForURI.delete(shell); - if (shellsForURI.size == 0) - this._downloadElementsShellsForURI.delete(downloadURI); - } - } - } - }, - - _removeSessionDownloadFromView(download) { - let shells = this._downloadElementsShellsForURI - .get(download.source.url); - if (shells.size == 0) { - throw new Error("Should have had at leaat one shell for this uri"); - } - - let shell = this._viewItemsForDownloads.get(download); - if (!shells.has(shell)) { - throw new Error("Missing download element shell in shells list for url"); - } - - // If there's more than one item for this download uri, we can let the - // view item for this this particular data item go away. - // If there's only one item for this download uri, we should only - // keep it if it is associated with a history download. - if (shells.size > 1 || !shell.historyDownload) { - this._removeElement(shell.element); - shells.delete(shell); - if (shells.size == 0) { - this._downloadElementsShellsForURI.delete(download.source.url); - } - } else { - // We have one download element shell containing both a session download - // and a history download, and we are now removing the session download. - // Previously, we did not use the Places metadata because it was obscured - // by the session download. Since this is no longer the case, we have to - // read the latest metadata before removing the session download. - let url = shell.historyDownload.source.url; - let metaData = this._getPlacesMetaDataFor(url); - shell.historyDownload.updateFromMetaData(metaData); - shell.sessionDownload = null; - // Move it below the session-download items; - if (this._lastSessionDownloadElement == shell.element) { - this._lastSessionDownloadElement = shell.element.previousSibling; - } else { - let before = this._lastSessionDownloadElement ? - this._lastSessionDownloadElement.nextSibling : this._richlistbox.firstChild; - this._richlistbox.insertBefore(shell.element, before); - } - } - }, - _ensureVisibleElementsAreActive() { if (!this.active || this._ensureVisibleTimer || !this._richlistbox.firstChild) { @@ -897,58 +367,17 @@ DownloadsPlacesView.prototype = { return this._place; }, set place(val) { - // Don't reload everything if we don't have to. if (this._place == val) { // XXXmano: places.js relies on this behavior (see Bug 822203). this.searchTerm = ""; - return val; - } - - this._place = val; - - let history = PlacesUtils.history; - let queries = { }, options = { }; - history.queryStringToQueries(val, queries, { }, options); - if (!queries.value.length) { - queries.value = [history.getNewQuery()]; - } - - let result = history.executeQueries(queries.value, queries.value.length, - options.value); - result.addObserver(this); - return val; - }, - - _result: null, - get result() { - return this._result; - }, - set result(val) { - if (this._result == val) { - return val; - } - - if (this._result) { - this._result.removeObserver(this); - this._resultNode.containerOpen = false; - } - - if (val) { - this._result = val; - this._resultNode = val.root; - this._resultNode.containerOpen = true; - this._ensureInitialSelection(); } else { - delete this._resultNode; - delete this._result; + this._place = val; } - - return val; }, get selectedNodes() { return Array.filter(this._richlistbox.selectedItems, - element => element._placesNode); + element => element._shell.download.placesNode); }, get selectedNode() { @@ -960,100 +389,6 @@ DownloadsPlacesView.prototype = { return this.selectedNodes.length > 0; }, - containerStateChanged(aNode, aOldState, aNewState) { - this.invalidateContainer(aNode) - }, - - invalidateContainer(aContainer) { - if (aContainer != this._resultNode) { - throw new Error("Unexpected container node"); - } - if (!aContainer.containerOpen) { - throw new Error("Root container for the downloads query cannot be closed"); - } - - let suppressOnSelect = this._richlistbox.suppressOnSelect; - this._richlistbox.suppressOnSelect = true; - try { - // Remove the invalidated history downloads from the list and unset the - // places node for data downloads. - // Loop backwards since _removeHistoryDownloadFromView may removeChild(). - for (let i = this._richlistbox.childNodes.length - 1; i >= 0; --i) { - let element = this._richlistbox.childNodes[i]; - if (element._placesNode) { - this._removeHistoryDownloadFromView(element._placesNode); - } - } - } finally { - this._richlistbox.suppressOnSelect = suppressOnSelect; - } - - if (aContainer.childCount > 0) { - let elementsToAppendFragment = document.createDocumentFragment(); - for (let i = 0; i < aContainer.childCount; i++) { - try { - this._addDownloadData(null, aContainer.getChild(i), false, - elementsToAppendFragment); - } catch (ex) { - Cu.reportError(ex); - } - } - - // _addDownloadData may not add new elements if there were already - // data items in place. - if (elementsToAppendFragment.firstChild) { - this._appendDownloadsFragment(elementsToAppendFragment); - this._ensureVisibleElementsAreActive(); - } - } - - goUpdateDownloadCommands(); - }, - - _appendDownloadsFragment(aDOMFragment) { - // Workaround multiple reflows hang by removing the richlistbox - // and adding it back when we're done. - - // Hack for bug 836283: reset xbl fields to their old values after the - // binding is reattached to avoid breaking the selection state - let xblFields = new Map(); - for (let key of Object.getOwnPropertyNames(this._richlistbox)) { - let value = this._richlistbox[key]; - xblFields.set(key, value); - } - - let parentNode = this._richlistbox.parentNode; - let nextSibling = this._richlistbox.nextSibling; - parentNode.removeChild(this._richlistbox); - this._richlistbox.appendChild(aDOMFragment); - parentNode.insertBefore(this._richlistbox, nextSibling); - - for (let [key, value] of xblFields) { - this._richlistbox[key] = value; - } - }, - - nodeInserted(aParent, aPlacesNode) { - this._addDownloadData(null, aPlacesNode); - }, - - nodeRemoved(aParent, aPlacesNode, aOldIndex) { - this._removeHistoryDownloadFromView(aPlacesNode); - }, - - nodeAnnotationChanged() {}, - nodeIconChanged() {}, - nodeTitleChanged() {}, - nodeKeywordChanged() {}, - nodeDateAddedChanged() {}, - nodeLastModifiedChanged() {}, - nodeHistoryDetailsChanged() {}, - nodeTagsChanged() {}, - sortingChanged() {}, - nodeMoved() {}, - nodeURIChanged() {}, - batching() {}, - get controller() { return this._richlistbox.controller; }, @@ -1105,20 +440,108 @@ DownloadsPlacesView.prototype = { } }, + /** + * DocumentFragment object that contains all the new elements added during a + * batch operation, or null if no batch is in progress. + * + * Since newest downloads are displayed at the top, elements are normally + * prepended to the fragment, and then the fragment is prepended to the list. + */ + batchFragment: null, + + onDownloadBatchStarting() { + this.batchFragment = document.createDocumentFragment(); + + this.oldSuppressOnSelect = this._richlistbox.suppressOnSelect; + this._richlistbox.suppressOnSelect = true; + }, + onDownloadBatchEnded() { + this._richlistbox.suppressOnSelect = this.oldSuppressOnSelect; + delete this.oldSuppressOnSelect; + + if (this.batchFragment.childElementCount) { + this._prependBatchFragment(); + } + this.batchFragment = null; + this._ensureInitialSelection(); + this._ensureVisibleElementsAreActive(); + goUpdateDownloadCommands(); + }, + + _prependBatchFragment() { + // Workaround multiple reflows hang by removing the richlistbox + // and adding it back when we're done. + + // Hack for bug 836283: reset xbl fields to their old values after the + // binding is reattached to avoid breaking the selection state + let xblFields = new Map(); + for (let key of Object.getOwnPropertyNames(this._richlistbox)) { + let value = this._richlistbox[key]; + xblFields.set(key, value); + } + + let parentNode = this._richlistbox.parentNode; + let nextSibling = this._richlistbox.nextSibling; + parentNode.removeChild(this._richlistbox); + this._richlistbox.prepend(this.batchFragment); + parentNode.insertBefore(this._richlistbox, nextSibling); + + for (let [key, value] of xblFields) { + this._richlistbox[key] = value; + } }, - onDownloadAdded(download) { - this._addDownloadData(download, null, true); + onDownloadAdded(download, { insertBefore } = {}) { + let shell = new HistoryDownloadElementShell(download); + this._viewItemsForDownloads.set(download, shell); + + // Since newest downloads are displayed at the top, either prepend the new + // element or insert it after the one indicated by the insertBefore option. + if (insertBefore) { + this._viewItemsForDownloads.get(insertBefore) + .element.insertAdjacentElement("afterend", shell.element); + } else { + (this.batchFragment || this._richlistbox).prepend(shell.element); + } + + if (this.searchTerm) { + shell.element.hidden = !shell.matchesSearchTerm(this.searchTerm); + } + + // Don't update commands and visible elements during a batch change. + if (!this.batchFragment) { + this._ensureVisibleElementsAreActive(); + goUpdateCommand("downloadsCmd_clearDownloads"); + } }, onDownloadChanged(download) { - this._viewItemsForDownloads.get(download).onSessionDownloadChanged(); + this._viewItemsForDownloads.get(download).onChanged(); }, onDownloadRemoved(download) { - this._removeSessionDownloadFromView(download); + let element = this._viewItemsForDownloads.get(download).element; + + // If the element was selected exclusively, select its next + // sibling first, if not, try for previous sibling, if any. + if ((element.nextSibling || element.previousSibling) && + this._richlistbox.selectedItems && + this._richlistbox.selectedItems.length == 1 && + this._richlistbox.selectedItems[0] == element) { + this._richlistbox.selectItem(element.nextSibling || + element.previousSibling); + } + + this._richlistbox.removeItemFromSelection(element); + element.remove(); + + // Don't update commands and visible elements during a batch change. + if (!this.batchFragment) { + this._ensureVisibleElementsAreActive(); + goUpdateCommand("downloadsCmd_clearDownloads"); + } }, // nsIController @@ -1260,7 +683,7 @@ DownloadsPlacesView.prototype = { downloadsCmd_clearDownloads() { this._downloadsData.removeFinished(); - if (this.result) { + if (this._place) { Cc["@mozilla.org/browser/download-history;1"] .getService(Ci.nsIDownloadHistory) .removeAllDownloads(); diff --git a/browser/components/downloads/content/downloads.js b/browser/components/downloads/content/downloads.js index 8d2f2d89f8a55dc6fd6f03f7f48d8036de51341c..f46af5d4dd10734455dbeab803282b22c64e540d 100644 --- a/browser/components/downloads/content/downloads.js +++ b/browser/components/downloads/content/downloads.js @@ -1095,7 +1095,6 @@ DownloadsViewItem.prototype = { return partFile.exists(); } case "cmd_delete": - case "downloadsCmd_cancel": case "downloadsCmd_copyLocation": case "downloadsCmd_doDefault": return true; @@ -1114,11 +1113,6 @@ DownloadsViewItem.prototype = { // Item commands - cmd_delete() { - DownloadsCommon.removeAndFinalizeDownload(this.download); - PlacesUtils.history.remove(this.download.source.url).catch(Cu.reportError); - }, - downloadsCmd_unblock() { DownloadsPanel.hidePanel(); this.confirmUnblock(window, "unblock"); diff --git a/browser/components/places/content/places.js b/browser/components/places/content/places.js index 70c73ffb14d35e66e93ca1cc7ce4d7a456658bf3..21d3abe75106fc6eb7ddc0c88463a38be806959b 100644 --- a/browser/components/places/content/places.js +++ b/browser/components/places/content/places.js @@ -784,7 +784,6 @@ var PlacesSearchBox = { } let currentView = ContentArea.currentView; - let currentOptions = PO.getCurrentOptions(); // Search according to the current scope, which was set by // PQB_setScope() @@ -793,6 +792,7 @@ var PlacesSearchBox = { currentView.applyFilter(filterString, this.folders); break; case "history": { + let currentOptions = PO.getCurrentOptions(); if (currentOptions.queryType != Ci.nsINavHistoryQueryOptions.QUERY_TYPE_HISTORY) { let query = PlacesUtils.history.getNewQuery(); query.searchTerms = filterString; @@ -810,20 +810,8 @@ var PlacesSearchBox = { break; } case "downloads": { - if (currentView == ContentTree.view) { - let query = PlacesUtils.history.getNewQuery(); - query.searchTerms = filterString; - query.setTransitions([Ci.nsINavHistoryService.TRANSITION_DOWNLOAD], 1); - let options = currentOptions.clone(); - // Make sure we're getting uri results. - options.resultType = currentOptions.RESULTS_AS_URI; - options.queryType = Ci.nsINavHistoryQueryOptions.QUERY_TYPE_HISTORY; - options.includeHidden = true; - currentView.load([query], options); - } else { - // The new downloads view doesn't use places for searching downloads. - currentView.searchTerm = filterString; - } + // The new downloads view doesn't use places for searching downloads. + currentView.searchTerm = filterString; break; } default: diff --git a/browser/components/places/tests/browser/browser_library_downloads.js b/browser/components/places/tests/browser/browser_library_downloads.js index ff920817ee1f0253994d19f03f73c562e0c8c97f..9f26861df6929a2402aca17f5a056badd4fb911c 100644 --- a/browser/components/places/tests/browser/browser_library_downloads.js +++ b/browser/components/places/tests/browser/browser_library_downloads.js @@ -43,12 +43,11 @@ function test() { // Check results. - let contentRoot = win.ContentArea.currentView.result.root; - let len = contentRoot.childCount; - const TEST_URIS = ["http://ubuntu.org/", "http://google.com/"]; - for (let i = 0; i < len; i++) { - is(contentRoot.getChild(i).uri, TEST_URIS[i], - "Comparing downloads shown at index " + i); + let testURIs = ["http://ubuntu.org/", "http://google.com/"]; + for (let element of win.ContentArea.currentView + .associatedElement.children) { + is(element._shell.download.source.url, testURIs.shift(), + "URI matches"); } win.close(); diff --git a/browser/installer/windows/nsis/defines.nsi.in b/browser/installer/windows/nsis/defines.nsi.in index b8325e1b96fc8c676de68602e6b39a9d380e959c..988c0dd066c590d757db9800ed8056ab45b30b17 100644 --- a/browser/installer/windows/nsis/defines.nsi.in +++ b/browser/installer/windows/nsis/defines.nsi.in @@ -121,10 +121,7 @@ VIAddVersionKey "ProductVersion" "${AppVersion}" # Control positions in Dialog Units so they are placed correctly with # non-default DPI settings -!define OPTIONS_ITEM_EDGE_DU 90u -!define OPTIONS_ITEM_WIDTH_DU 356u -!define OPTIONS_SUBITEM_EDGE_DU 119u -!define OPTIONS_SUBITEM_WIDTH_DU 327u +!define PROFILE_CLEANUP_LABEL_TOP_DU 39u !define NOW_INSTALLING_TOP_DU 70u !define INSTALL_BLURB_TOP_DU 137u !define INSTALL_FOOTER_TOP_DU -48u diff --git a/browser/installer/windows/nsis/stub.nsi b/browser/installer/windows/nsis/stub.nsi index 398a0f10d33c17a6ea0a31f0c7c9ca570b3c2f6d..d5bed0ed2e84ced1a3c646918bf0e6c652ac983e 100644 --- a/browser/installer/windows/nsis/stub.nsi +++ b/browser/installer/windows/nsis/stub.nsi @@ -36,10 +36,12 @@ Var LabelBlurb Var BgBitmapImage Var HwndBgBitmapControl Var CurrentBlurbIdx +Var CheckboxCleanupProfile Var FontInstalling Var FontBlurb Var FontFooter +Var FontCheckbox Var CanWriteToInstallDir Var HasRequiredSpaceAvailable @@ -89,12 +91,15 @@ Var DownloadServerIP Var PostSigningData Var PreviousInstallDir Var PreviousInstallArch +Var ProfileCleanupPromptType +Var ProfileCleanupHeaderString +Var ProfileCleanupButtonString ; Uncomment the following to prevent pinging the metrics server when testing ; the stub installer ;!define STUB_DEBUG -!define StubURLVersion "v7" +!define StubURLVersion "v8" ; Successful install exit code !define ERR_SUCCESS 0 @@ -284,6 +289,7 @@ ChangeUI all "nsisui.exe" Caption "$(INSTALLER_WIN_CAPTION)" +Page custom createProfileCleanup Page custom createInstall ; Download / Installation page Function .onInit @@ -428,6 +434,7 @@ Function .onInit StrCpy $FirefoxLaunchCode "0" StrCpy $CheckboxShortcuts "1" StrCpy $CheckboxSendPing "1" + StrCpy $CheckboxCleanupProfile "0" !ifdef MOZ_MAINTENANCE_SERVICE ; We can only install the maintenance service if the user is an admin. Call IsUserAdmin @@ -462,9 +469,63 @@ Function .onInit CreateFont $FontInstalling "$0" "28" "400" CreateFont $FontBlurb "$0" "15" "400" CreateFont $FontFooter "$0" "13" "400" + CreateFont $FontCheckbox "$0" "10" "400" InitPluginsDir File /oname=$PLUGINSDIR\bgstub.bmp "bgstub.bmp" + + SetShellVarContext all ; Set SHCTX to All Users + ; If the user doesn't have write access to the installation directory set + ; the installation directory to a subdirectory of the All Users application + ; directory and if the user can't write to that location set the installation + ; directory to a subdirectory of the users local application directory + ; (e.g. non-roaming). + Call CanWrite + ${If} "$CanWriteToInstallDir" == "false" + StrCpy $INSTDIR "$APPDATA\${BrandFullName}\" + Call CanWrite + ${If} "$CanWriteToInstallDir" == "false" + ; This should never happen but just in case. + StrCpy $CanWriteToInstallDir "false" + ${Else} + StrCpy $INSTDIR "$LOCALAPPDATA\${BrandFullName}\" + Call CanWrite + ${EndIf} + ${EndIf} + + Call CheckSpace + + ${If} ${FileExists} "$INSTDIR" + ; Always display the long path if the path exists. + ${GetLongPath} "$INSTDIR" $INSTDIR + ${EndIf} + + ; Check whether the install requirements are satisfied using the default + ; values for metrics. + ${If} "$InitialInstallRequirementsCode" == "" + ${If} "$CanWriteToInstallDir" != "true" + ${AndIf} "$HasRequiredSpaceAvailable" != "true" + StrCpy $InitialInstallRequirementsCode "1" + ${ElseIf} "$CanWriteToInstallDir" != "true" + StrCpy $InitialInstallRequirementsCode "2" + ${ElseIf} "$HasRequiredSpaceAvailable" != "true" + StrCpy $InitialInstallRequirementsCode "3" + ${Else} + StrCpy $InitialInstallRequirementsCode "0" + ${EndIf} + ${EndIf} + + Call CanWrite + ${If} "$CanWriteToInstallDir" == "false" + MessageBox MB_OK|MB_ICONEXCLAMATION "$(WARN_WRITE_ACCESS_QUIT)\n\n$INSTDIR" + Quit + ${EndIf} + + Call CheckSpace + ${If} "$HasRequiredSpaceAvailable" == "false" + MessageBox MB_OK|MB_ICONEXCLAMATION "$(WARN_DISK_SPACE_QUIT)" + Quit + ${EndIf} FunctionEnd ; .onGUIInit isn't needed except for RTL locales @@ -498,310 +559,183 @@ Function .onUserAbort ${If} "$IsDownloadFinished" != "" Call DisplayDownloadError - ; Aborting the abort will allow SendPing which is called by - ; DisplayDownloadError to hide the installer window and close the installer - ; after it sends the metrics ping. - Abort + ${Else} + Call SendPing ${EndIf} -FunctionEnd - -Function SendPing - HideWindow - ; Try to send a ping if a download was attempted - ${If} $CheckboxSendPing == 1 - ${AndIf} $IsDownloadFinished != "" - ; Get the tick count for the completion of all phases. - System::Call "kernel32::GetTickCount()l .s" - Pop $EndFinishPhaseTickCount - - ; When the value of $IsDownloadFinished is false the download was started - ; but didn't finish. In this case the tick count stored in - ; $EndFinishPhaseTickCount is used to determine how long the download was - ; in progress. - ${If} "$IsDownloadFinished" == "false" - ${OrIf} "$EndDownloadPhaseTickCount" == "" - StrCpy $EndDownloadPhaseTickCount "$EndFinishPhaseTickCount" - ; Cancel the download in progress - InetBgDL::Get /RESET /END - ${EndIf} - - - ; When $DownloadFirstTransferSeconds equals an empty string the download - ; never successfully started so set the value to 0. It will be possible to - ; determine that the download didn't successfully start from the seconds for - ; the last download. - ${If} "$DownloadFirstTransferSeconds" == "" - StrCpy $DownloadFirstTransferSeconds "0" - ${EndIf} - - ; When $StartLastDownloadTickCount equals an empty string the download never - ; successfully started so set the value to $EndDownloadPhaseTickCount to - ; compute the correct value. - ${If} $StartLastDownloadTickCount == "" - ; This could happen if the download never successfully starts - StrCpy $StartLastDownloadTickCount "$EndDownloadPhaseTickCount" - ${EndIf} - - ; When $EndPreInstallPhaseTickCount equals 0 the installation phase was - ; never completed so set its value to $EndFinishPhaseTickCount to compute - ; the correct value. - ${If} "$EndPreInstallPhaseTickCount" == "0" - StrCpy $EndPreInstallPhaseTickCount "$EndFinishPhaseTickCount" - ${EndIf} - ; When $EndInstallPhaseTickCount equals 0 the installation phase was never - ; completed so set its value to $EndFinishPhaseTickCount to compute the - ; correct value. - ${If} "$EndInstallPhaseTickCount" == "0" - StrCpy $EndInstallPhaseTickCount "$EndFinishPhaseTickCount" - ${EndIf} - - ; Get the seconds elapsed from the start of the download phase to the end of - ; the download phase. - ${GetSecondsElapsed} "$StartDownloadPhaseTickCount" "$EndDownloadPhaseTickCount" $0 + ; Aborting the abort will allow SendPing which is called by + ; DisplayDownloadError to hide the installer window and close the installer + ; after it sends the metrics ping. + Abort +FunctionEnd - ; Get the seconds elapsed from the start of the last download to the end of - ; the last download. - ${GetSecondsElapsed} "$StartLastDownloadTickCount" "$EndDownloadPhaseTickCount" $1 +Function createProfileCleanup + Call ShouldPromptForProfileCleanup + ${Select} $ProfileCleanupPromptType + ${Case} 0 + StrCpy $CheckboxCleanupProfile 0 + Abort ; Skip this page + ${Case} 1 + StrCpy $ProfileCleanupHeaderString $(STUB_CLEANUP_REINSTALL_HEADER) + StrCpy $ProfileCleanupButtonString $(STUB_CLEANUP_REINSTALL_BUTTON) + ${Case} 2 + StrCpy $ProfileCleanupHeaderString $(STUB_CLEANUP_PAVEOVER_HEADER) + StrCpy $ProfileCleanupButtonString $(STUB_CLEANUP_PAVEOVER_BUTTON) + ${EndSelect} - ; Get the seconds elapsed from the end of the download phase to the - ; completion of the pre-installation check phase. - ${GetSecondsElapsed} "$EndDownloadPhaseTickCount" "$EndPreInstallPhaseTickCount" $2 + nsDialogs::Create /NOUNLOAD 1018 + Pop $Dialog - ; Get the seconds elapsed from the end of the pre-installation check phase - ; to the completion of the installation phase. - ${GetSecondsElapsed} "$EndPreInstallPhaseTickCount" "$EndInstallPhaseTickCount" $3 + SetCtlColors $HWNDPARENT ${FOOTER_CONTROL_TEXT_COLOR_NORMAL} ${FOOTER_BKGRD_COLOR} - ; Get the seconds elapsed from the end of the installation phase to the - ; completion of all phases. - ${GetSecondsElapsed} "$EndInstallPhaseTickCount" "$EndFinishPhaseTickCount" $4 + ; Since the text color for controls is set in this Dialog the foreground and + ; background colors of the Dialog must also be hardcoded. + SetCtlColors $Dialog ${COMMON_TEXT_COLOR_NORMAL} ${COMMON_BKGRD_COLOR} - ${If} $DroplistArch == "$(VERSION_64BIT)" - StrCpy $R0 "1" - ${Else} - StrCpy $R0 "0" - ${EndIf} + FindWindow $7 "#32770" "" $HWNDPARENT + ${GetDlgItemWidthHeight} $HWNDPARENT $8 $9 - ${If} ${RunningX64} - StrCpy $R1 "1" - ${Else} - StrCpy $R1 "0" - ${EndIf} + ; Resize the Dialog to fill the entire window + System::Call 'user32::MoveWindow(i$Dialog,i0,i0,i $8,i $9,i0)' - ; Though these values are sometimes incorrect due to bug 444664 it happens - ; so rarely it isn't worth working around it by reading the registry values. - ${WinVerGetMajor} $5 - ${WinVerGetMinor} $6 - ${WinVerGetBuild} $7 - ${WinVerGetServicePackLevel} $8 - ${If} ${IsServerOS} - StrCpy $9 "1" - ${Else} - StrCpy $9 "0" - ${EndIf} + GetDlgItem $0 $HWNDPARENT 1 ; Install button + ShowWindow $0 ${SW_HIDE} + EnableWindow $0 0 - ${If} "$ExitCode" == "${ERR_SUCCESS}" - ReadINIStr $R5 "$INSTDIR\application.ini" "App" "Version" - ReadINIStr $R6 "$INSTDIR\application.ini" "App" "BuildID" - ${Else} - StrCpy $R5 "0" - StrCpy $R6 "0" - ${EndIf} + GetDlgItem $0 $HWNDPARENT 3 ; Back button + ShowWindow $0 ${SW_HIDE} + EnableWindow $0 0 - ; Whether installed into the default installation directory - ${GetLongPath} "$INSTDIR" $R7 - ${GetLongPath} "$InitialInstallDir" $R8 - ${If} "$R7" == "$R8" - StrCpy $R7 "1" - ${Else} - StrCpy $R7 "0" - ${EndIf} + GetDlgItem $0 $HWNDPARENT 2 ; Cancel button + ; Hide the Cancel button, but don't disable it (or else it won't be possible + ; to close the window) + ShowWindow $0 ${SW_HIDE} - ClearErrors - WriteRegStr HKLM "Software\Mozilla" "${BrandShortName}InstallerTest" \ - "Write Test" - ${If} ${Errors} - StrCpy $R8 "0" - ${Else} - DeleteRegValue HKLM "Software\Mozilla" "${BrandShortName}InstallerTest" - StrCpy $R8 "1" - ${EndIf} + GetDlgItem $0 $HWNDPARENT 10 ; Default browser checkbox + ; Hiding and then disabling allows Esc to still exit the installer + ShowWindow $0 ${SW_HIDE} + EnableWindow $0 0 - ${If} "$DownloadServerIP" == "" - StrCpy $DownloadServerIP "Unknown" - ${EndIf} + GetDlgItem $0 $HWNDPARENT 11 ; Footer text + ShowWindow $0 ${SW_HIDE} + EnableWindow $0 0 - StrCpy $R2 "" - SetShellVarContext current ; Set SHCTX to the current user - ReadRegStr $R2 HKCU "Software\Classes\http\shell\open\command" "" - ${If} $R2 != "" - ${GetPathFromString} "$R2" $R2 - ${GetParent} "$R2" $R3 - ${GetLongPath} "$R3" $R3 - ${If} $R3 == $INSTDIR - StrCpy $R2 "1" ; This Firefox install is set as default. - ${Else} - StrCpy $R2 "$R2" "" -11 # length of firefox.exe - ${If} "$R2" == "${FileMainEXE}" - StrCpy $R2 "2" ; Another Firefox install is set as default. - ${Else} - StrCpy $R2 "0" - ${EndIf} - ${EndIf} - ${Else} - StrCpy $R2 "0" ; Firefox is not set as default. - ${EndIf} + ${GetDlgItemWidthHeight} $HWNDPARENT $R1 $R2 + ${GetTextWidthHeight} $ProfileCleanupHeaderString $FontInstalling $R1 $R1 $R2 + ${NSD_CreateLabelCenter} 0 ${PROFILE_CLEANUP_LABEL_TOP_DU} 100% $R2 \ + $ProfileCleanupHeaderString + Pop $0 + SendMessage $0 ${WM_SETFONT} $FontInstalling 0 + SetCtlColors $0 ${INSTALL_BLURB_TEXT_COLOR} transparent - ${If} "$R2" == "0" - StrCpy $R3 "" - ReadRegStr $R2 HKLM "Software\Classes\http\shell\open\command" "" - ${If} $R2 != "" - ${GetPathFromString} "$R2" $R2 - ${GetParent} "$R2" $R3 - ${GetLongPath} "$R3" $R3 - ${If} $R3 == $INSTDIR - StrCpy $R2 "1" ; This Firefox install is set as default. - ${Else} - StrCpy $R2 "$R2" "" -11 # length of firefox.exe - ${If} "$R2" == "${FileMainEXE}" - StrCpy $R2 "2" ; Another Firefox install is set as default. - ${Else} - StrCpy $R2 "0" - ${EndIf} - ${EndIf} - ${Else} - StrCpy $R2 "0" ; Firefox is not set as default. - ${EndIf} - ${EndIf} + ${GetDlgItemBottomDU} $Dialog $0 $1 + IntOp $1 $1 + 10 ; add a bit of padding between the header and the button + ${GetTextExtent} $ProfileCleanupButtonString $FontFooter $R1 $R2 + ; Add some padding to both dimensions of the button. + IntOp $R1 $R1 + 100 + IntOp $R2 $R2 + 10 + ; Now that we know the size and the Y coordinate for the button, we can find + ; the correct X coordinate to get it properly centered. + ${GetDlgItemWidthHeight} $HWNDPARENT $R3 $R4 + IntOp $R5 $R1 / 2 + IntOp $R3 $R3 / 2 + IntOp $R3 $R3 - $R5 + ; We need a custom button because the default ones get drawn underneath the + ; background image we're about to insert. + ${NSD_CreateButton} $R3 $1 $R1 $R2 $ProfileCleanupButtonString + Pop $0 + SendMessage $0 ${WM_SETFONT} $FontFooter 0 + ${NSD_OnClick} $0 gotoInstallPage + ${NSD_SetFocus} $0 - ${If} $CanSetAsDefault == "true" - ${If} $CheckboxSetAsDefault == "1" - StrCpy $R3 "2" - ${Else} - StrCpy $R3 "3" - ${EndIf} - ${Else} - ${If} ${AtLeastWin8} - StrCpy $R3 "1" - ${Else} - StrCpy $R3 "0" - ${EndIf} - ${EndIf} + ; For the checkbox, first we need to know the width of the checkbox itself, + ; since it can vary with the display scaling and the theme. + System::Call 'User32::GetSystemMetrics(i 71) i .r1' ; 71 == SM_CXMENUCHECK + ; Now get the width of the label test, if it were all on one line. + ${GetTextExtent} $(STUB_CLEANUP_CHECKBOX_LABEL) $FontCheckbox $R1 $R2 + ${GetDlgItemWidthHeight} $HWNDPARENT $R3 $R4 + ; Add the checkbox width to the text width, then figure out how many lines + ; we're going to need in order to display that text in our dialog. + IntOp $R1 $R1 + $1 + IntOp $R1 $R1 + 5 + StrCpy $R5 $R1 + StrCpy $R6 $R2 + IntOp $R3 $R3 - 150 ; leave some padding on the sides of the dialog + ${While} $R1 > $R3 + StrCpy $R5 $R3 + IntOp $R2 $R2 + $R6 + IntOp $R1 $R1 - $R3 + ${EndWhile} + ${GetDlgItemBottomDU} $Dialog $0 $1 + ; Now that we know the size for the checkbox, center it in the dialog. + ${GetDlgItemWidthHeight} $HWNDPARENT $R3 $R4 + IntOp $R6 $R5 / 2 + IntOp $R3 $R3 / 2 + IntOp $R3 $R3 - $R6 + IntOp $1 $1 + 20 ; add a bit of padding between the button and the checkbox + ${NSD_CreateCheckbox} $R3 $1 $R5 $R2 $(STUB_CLEANUP_CHECKBOX_LABEL) + Pop $CheckboxCleanupProfile + SendMessage $CheckboxCleanupProfile ${WM_SETFONT} $FontCheckbox 0 + ; The uxtheme must be disabled on checkboxes in order to override the system + ; colors and have a transparent background. + System::Call 'uxtheme::SetWindowTheme(i $CheckboxCleanupProfile, w " ", w " ")' + SetCtlColors $CheckboxCleanupProfile ${INSTALL_BLURB_TEXT_COLOR} transparent + ; Setting the background color to transparent isn't enough to actually make a + ; checkbox background transparent, you also have to set the right style. + ${NSD_AddExStyle} $CheckboxCleanupProfile ${WS_EX_TRANSPARENT} + ; For some reason, clicking on the checkbox causes its text to be redrawn + ; one pixel to the side of where it was, but without clearing away the + ; existing text first, so it looks like the weight increases when you click. + ; Hack around this by manually hiding and then re-showing the textbox when + ; it gets clicked on. + ${NSD_OnClick} $CheckboxCleanupProfile RedrawWindow + ${NSD_Check} $CheckboxCleanupProfile -!ifdef STUB_DEBUG - MessageBox MB_OK "${BaseURLStubPing} \ - $\nStub URL Version = ${StubURLVersion}${StubURLVersionAppend} \ - $\nBuild Channel = ${Channel} \ - $\nUpdate Channel = ${UpdateChannel} \ - $\nLocale = ${AB_CD} \ - $\nFirefox x64 = $R0 \ - $\nRunning x64 Windows = $R1 \ - $\nMajor = $5 \ - $\nMinor = $6 \ - $\nBuild = $7 \ - $\nServicePack = $8 \ - $\nIsServer = $9 \ - $\nExit Code = $ExitCode \ - $\nFirefox Launch Code = $FirefoxLaunchCode \ - $\nDownload Retry Count = $DownloadRetryCount \ - $\nDownloaded Bytes = $DownloadedBytes \ - $\nDownload Size Bytes = $DownloadSizeBytes \ - $\nIntroduction Phase Seconds = $IntroPhaseSeconds \ - $\nOptions Phase Seconds = $OptionsPhaseSeconds \ - $\nDownload Phase Seconds = $0 \ - $\nLast Download Seconds = $1 \ - $\nDownload First Transfer Seconds = $DownloadFirstTransferSeconds \ - $\nPreinstall Phase Seconds = $2 \ - $\nInstall Phase Seconds = $3 \ - $\nFinish Phase Seconds = $4 \ - $\nInitial Install Requirements Code = $InitialInstallRequirementsCode \ - $\nOpened Download Page = $OpenedDownloadPage \ - $\nExisting Profile = $ExistingProfile \ - $\nExisting Version = $ExistingVersion \ - $\nExisting Build ID = $ExistingBuildID \ - $\nNew Version = $R5 \ - $\nNew Build ID = $R6 \ - $\nDefault Install Dir = $R7 \ - $\nHas Admin = $R8 \ - $\nDefault Status = $R2 \ - $\nSet As Sefault Status = $R3 \ - $\nDownload Server IP = $DownloadServerIP \ - $\nPost-Signing Data = $PostSigningData" - ; The following will exit the installer - SetAutoClose true - StrCpy $R9 "2" - Call RelativeGotoPage -!else - ${NSD_CreateTimer} OnPing ${DownloadIntervalMS} - InetBgDL::Get "${BaseURLStubPing}/${StubURLVersion}${StubURLVersionAppend}/${Channel}/${UpdateChannel}/${AB_CD}/$R0/$R1/$5/$6/$7/$8/$9/$ExitCode/$FirefoxLaunchCode/$DownloadRetryCount/$DownloadedBytes/$DownloadSizeBytes/$IntroPhaseSeconds/$OptionsPhaseSeconds/$0/$1/$DownloadFirstTransferSeconds/$2/$3/$4/$InitialInstallRequirementsCode/$OpenedDownloadPage/$ExistingProfile/$ExistingVersion/$ExistingBuildID/$R5/$R6/$R7/$R8/$R2/$R3/$DownloadServerIP/$PostSigningData" \ - "$PLUGINSDIR\_temp" /END -!endif - ${Else} - ${If} "$IsDownloadFinished" == "false" - ; Cancel the download in progress - InetBgDL::Get /RESET /END - ${EndIf} - ; The following will exit the installer - SetAutoClose true - StrCpy $R9 "2" - Call RelativeGotoPage - ${EndIf} -FunctionEnd + ${GetTextWidthHeight} "$(STUB_BLURB_FOOTER2)" $FontFooter \ + ${INSTALL_FOOTER_WIDTH_DU} $R1 $R2 + !ifdef ${AB_CD}_rtl + nsDialogs::CreateControl STATIC ${DEFAULT_STYLES}|${SS_NOTIFY} \ + ${WS_EX_TRANSPARENT} 30u ${INSTALL_FOOTER_TOP_DU} ${INSTALL_FOOTER_WIDTH_DU} \ + "$R2u" "$(STUB_BLURB_FOOTER2)" + !else + nsDialogs::CreateControl STATIC ${DEFAULT_STYLES}|${SS_NOTIFY}|${SS_RIGHT} \ + ${WS_EX_TRANSPARENT} 175u ${INSTALL_FOOTER_TOP_DU} ${INSTALL_FOOTER_WIDTH_DU} \ + "$R2u" "$(STUB_BLURB_FOOTER2)" + !endif + Pop $0 + SendMessage $0 ${WM_SETFONT} $FontFooter 0 + SetCtlColors $0 ${INSTALL_BLURB_TEXT_COLOR} transparent -Function createInstall - SetShellVarContext all ; Set SHCTX to All Users - ; If the user doesn't have write access to the installation directory set - ; the installation directory to a subdirectory of the All Users application - ; directory and if the user can't write to that location set the installation - ; directory to a subdirectory of the users local application directory - ; (e.g. non-roaming). - Call CanWrite - ${If} "$CanWriteToInstallDir" == "false" - StrCpy $INSTDIR "$APPDATA\${BrandFullName}\" - Call CanWrite - ${If} "$CanWriteToInstallDir" == "false" - ; This should never happen but just in case. - StrCpy $CanWriteToInstallDir "false" - ${Else} - StrCpy $INSTDIR "$LOCALAPPDATA\${BrandFullName}\" - Call CanWrite - ${EndIf} - ${EndIf} + ${NSD_CreateBitmap} 0 0 100% 100% "" + Pop $HwndBgBitmapControl + ${NSD_SetStretchedImage} $HwndBgBitmapControl $PLUGINSDIR\bgstub.bmp $BgBitmapImage + ; transparent bg on control prevents flicker on redraw + SetCtlColors $HwndBgBitmapControl ${INSTALL_BLURB_TEXT_COLOR} transparent - Call CheckSpace + LockWindow off + nsDialogs::Show - ${If} ${FileExists} "$INSTDIR" - ; Always display the long path if the path exists. - ${GetLongPath} "$INSTDIR" $INSTDIR - ${EndIf} + ${NSD_FreeImage} $BgBitmapImage +FunctionEnd - ; Check whether the install requirements are satisfied using the default - ; values for metrics. - ${If} "$InitialInstallRequirementsCode" == "" - ${If} "$CanWriteToInstallDir" != "true" - ${AndIf} "$HasRequiredSpaceAvailable" != "true" - StrCpy $InitialInstallRequirementsCode "1" - ${ElseIf} "$CanWriteToInstallDir" != "true" - StrCpy $InitialInstallRequirementsCode "2" - ${ElseIf} "$HasRequiredSpaceAvailable" != "true" - StrCpy $InitialInstallRequirementsCode "3" - ${Else} - StrCpy $InitialInstallRequirementsCode "0" - ${EndIf} - ${EndIf} +Function RedrawWindow + Pop $0 + ShowWindow $0 ${SW_HIDE} + ShowWindow $0 ${SW_SHOW} +FunctionEnd - Call CanWrite - ${If} "$CanWriteToInstallDir" == "false" - MessageBox MB_OK|MB_ICONEXCLAMATION "$(WARN_WRITE_ACCESS_QUIT)\n\n$INSTDIR" - Quit - ${EndIf} +Function gotoInstallPage + ; Eat the parameter that NSD_OnClick always passes but that we don't need. + Pop $0 - Call CheckSpace - ${If} "$HasRequiredSpaceAvailable" == "false" - MessageBox MB_OK|MB_ICONEXCLAMATION "$(WARN_DISK_SPACE_QUIT)" - Quit - ${EndIf} + ; Save the state of the checkbox before it's destroyed. + ${NSD_GetState} $CheckboxCleanupProfile $CheckboxCleanupProfile + + StrCpy $R9 1 + Call RelativeGotoPage +FunctionEnd +Function createInstall ; Begin setting up the download/install window nsDialogs::Create /NOUNLOAD 1018 @@ -833,8 +767,6 @@ Function createInstall StrCpy $CurrentBlurbIdx "0" - ; In some locales, the footer message may be too long to fit on one line. - ; Figure out how much height it needs and give it that much. ${GetTextWidthHeight} "$(STUB_BLURB_FOOTER2)" $FontFooter \ ${INSTALL_FOOTER_WIDTH_DU} $R1 $R2 !ifdef ${AB_CD}_rtl @@ -866,7 +798,7 @@ Function createInstall EnableWindow $0 0 ShowWindow $0 ${SW_HIDE} - GetDlgItem $0 $HWNDPARENT 3 ; Back button used for Options + GetDlgItem $0 $HWNDPARENT 3 ; Back button EnableWindow $0 0 ShowWindow $0 ${SW_HIDE} @@ -1153,107 +1085,350 @@ Function OnDownload GetDlgItem $5 $HWNDPARENT 2 EnableWindow $5 0 - ; Open a handle to prevent modification of the full installer - StrCpy $R9 "${INVALID_HANDLE_VALUE}" - System::Call 'kernel32::CreateFileW(w "$PLUGINSDIR\download.exe", \ - i ${GENERIC_READ}, \ - i ${FILE_SHARE_READ}, i 0, \ - i ${OPEN_EXISTING}, i 0, i 0) i .R9' - StrCpy $HandleDownload "$R9" + ; Open a handle to prevent modification of the full installer + StrCpy $R9 "${INVALID_HANDLE_VALUE}" + System::Call 'kernel32::CreateFileW(w "$PLUGINSDIR\download.exe", \ + i ${GENERIC_READ}, \ + i ${FILE_SHARE_READ}, i 0, \ + i ${OPEN_EXISTING}, i 0, i 0) i .R9' + StrCpy $HandleDownload "$R9" + + ${If} $HandleDownload == ${INVALID_HANDLE_VALUE} + StrCpy $ExitCode "${ERR_PREINSTALL_INVALID_HANDLE}" + StrCpy $0 "0" + StrCpy $1 "0" + ${Else} + CertCheck::VerifyCertTrust "$PLUGINSDIR\download.exe" + Pop $0 + CertCheck::VerifyCertNameIssuer "$PLUGINSDIR\download.exe" \ + "${CertNameDownload}" "${CertIssuerDownload}" + Pop $1 + ${If} $0 == 0 + ${AndIf} $1 == 0 + StrCpy $ExitCode "${ERR_PREINSTALL_CERT_UNTRUSTED_AND_ATTRIBUTES}" + ${ElseIf} $0 == 0 + StrCpy $ExitCode "${ERR_PREINSTALL_CERT_UNTRUSTED}" + ${ElseIf} $1 == 0 + StrCpy $ExitCode "${ERR_PREINSTALL_CERT_ATTRIBUTES}" + ${EndIf} + ${EndIf} + + System::Call "kernel32::GetTickCount()l .s" + Pop $EndPreInstallPhaseTickCount + + ${If} $0 == 0 + ${OrIf} $1 == 0 + ; Use a timer so the UI has a chance to update + ${NSD_CreateTimer} DisplayDownloadError ${InstallIntervalMS} + Return + ${EndIf} + + ; Instead of extracting the files we use the downloaded installer to + ; install in case it needs to perform operations that the stub doesn't + ; know about. + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "InstallDirectoryPath" "$INSTDIR" + ; Don't create the QuickLaunch or Taskbar shortcut from the launched installer + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "QuickLaunchShortcut" "false" + + ; Always create a start menu shortcut, so the user always has some way + ; to access the application. + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "StartMenuShortcuts" "true" + + ; Either avoid or force adding a taskbar pin and desktop shortcut + ; based on the checkbox value. + ${If} $CheckboxShortcuts == 0 + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "TaskbarShortcut" "false" + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "DesktopShortcut" "false" + ${Else} + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "TaskbarShortcut" "true" + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "DesktopShortcut" "true" + ${EndIf} + +!ifdef MOZ_MAINTENANCE_SERVICE + ${If} $CheckboxInstallMaintSvc == 1 + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "MaintenanceService" "true" + ${Else} + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "MaintenanceService" "false" + ${EndIf} +!else + WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "MaintenanceService" "false" +!endif + + ; Delete the taskbar shortcut history to ensure we do the right thing based on + ; the config file above. + ${GetShortcutsLogPath} $0 + Delete "$0" + + GetFunctionAddress $0 RemoveFileProgressCallback + ${RemovePrecompleteEntries} $0 + + ; Delete the install.log and let the full installer create it. When the + ; installer closes it we can detect that it has completed. + Delete "$INSTDIR\install.log" + + ; Delete firefox.exe.moz-upgrade and firefox.exe.moz-delete if it exists + ; since it being present will require an OS restart for the full + ; installer. + Delete "$INSTDIR\${FileMainEXE}.moz-upgrade" + Delete "$INSTDIR\${FileMainEXE}.moz-delete" + + System::Call "kernel32::GetTickCount()l .s" + Pop $EndPreInstallPhaseTickCount + + Exec "$\"$PLUGINSDIR\download.exe$\" /INI=$PLUGINSDIR\${CONFIG_INI}" + ${NSD_CreateTimer} CheckInstall ${InstallIntervalMS} + ${Else} + ${If} $HalfOfDownload != "true" + ${AndIf} $3 > $HalfOfDownload + StrCpy $HalfOfDownload "true" + ${EndIf} + StrCpy $DownloadedBytes "$3" + StrCpy $ProgressCompleted "$DownloadedBytes" + Call SetProgressBars + ${EndIf} + ${EndIf} +FunctionEnd + +Function SendPing + HideWindow + ${If} $CheckboxSendPing == 1 + ; Get the tick count for the completion of all phases. + System::Call "kernel32::GetTickCount()l .s" + Pop $EndFinishPhaseTickCount + + ; When the value of $IsDownloadFinished is false the download was started + ; but didn't finish. In this case the tick count stored in + ; $EndFinishPhaseTickCount is used to determine how long the download was + ; in progress. + ${If} "$IsDownloadFinished" == "false" + ${OrIf} "$EndDownloadPhaseTickCount" == "" + StrCpy $EndDownloadPhaseTickCount "$EndFinishPhaseTickCount" + ; Cancel the download in progress + InetBgDL::Get /RESET /END + ${EndIf} + + + ; When $DownloadFirstTransferSeconds equals an empty string the download + ; never successfully started so set the value to 0. It will be possible to + ; determine that the download didn't successfully start from the seconds for + ; the last download. + ${If} "$DownloadFirstTransferSeconds" == "" + StrCpy $DownloadFirstTransferSeconds "0" + ${EndIf} + + ; When $StartLastDownloadTickCount equals an empty string the download never + ; successfully started so set the value to $EndDownloadPhaseTickCount to + ; compute the correct value. + ${If} $StartLastDownloadTickCount == "" + ; This could happen if the download never successfully starts + StrCpy $StartLastDownloadTickCount "$EndDownloadPhaseTickCount" + ${EndIf} + + ; When $EndPreInstallPhaseTickCount equals 0 the installation phase was + ; never completed so set its value to $EndFinishPhaseTickCount to compute + ; the correct value. + ${If} "$EndPreInstallPhaseTickCount" == "0" + StrCpy $EndPreInstallPhaseTickCount "$EndFinishPhaseTickCount" + ${EndIf} + + ; When $EndInstallPhaseTickCount equals 0 the installation phase was never + ; completed so set its value to $EndFinishPhaseTickCount to compute the + ; correct value. + ${If} "$EndInstallPhaseTickCount" == "0" + StrCpy $EndInstallPhaseTickCount "$EndFinishPhaseTickCount" + ${EndIf} + + ; Get the seconds elapsed from the start of the download phase to the end of + ; the download phase. + ${GetSecondsElapsed} "$StartDownloadPhaseTickCount" "$EndDownloadPhaseTickCount" $0 + + ; Get the seconds elapsed from the start of the last download to the end of + ; the last download. + ${GetSecondsElapsed} "$StartLastDownloadTickCount" "$EndDownloadPhaseTickCount" $1 + + ; Get the seconds elapsed from the end of the download phase to the + ; completion of the pre-installation check phase. + ${GetSecondsElapsed} "$EndDownloadPhaseTickCount" "$EndPreInstallPhaseTickCount" $2 + + ; Get the seconds elapsed from the end of the pre-installation check phase + ; to the completion of the installation phase. + ${GetSecondsElapsed} "$EndPreInstallPhaseTickCount" "$EndInstallPhaseTickCount" $3 + + ; Get the seconds elapsed from the end of the installation phase to the + ; completion of all phases. + ${GetSecondsElapsed} "$EndInstallPhaseTickCount" "$EndFinishPhaseTickCount" $4 + + ${If} $DroplistArch == "$(VERSION_64BIT)" + StrCpy $R0 "1" + ${Else} + StrCpy $R0 "0" + ${EndIf} + + ${If} ${RunningX64} + StrCpy $R1 "1" + ${Else} + StrCpy $R1 "0" + ${EndIf} + + ; Though these values are sometimes incorrect due to bug 444664 it happens + ; so rarely it isn't worth working around it by reading the registry values. + ${WinVerGetMajor} $5 + ${WinVerGetMinor} $6 + ${WinVerGetBuild} $7 + ${WinVerGetServicePackLevel} $8 + ${If} ${IsServerOS} + StrCpy $9 "1" + ${Else} + StrCpy $9 "0" + ${EndIf} + + ${If} "$ExitCode" == "${ERR_SUCCESS}" + ReadINIStr $R5 "$INSTDIR\application.ini" "App" "Version" + ReadINIStr $R6 "$INSTDIR\application.ini" "App" "BuildID" + ${Else} + StrCpy $R5 "0" + StrCpy $R6 "0" + ${EndIf} + + ; Whether installed into the default installation directory + ${GetLongPath} "$INSTDIR" $R7 + ${GetLongPath} "$InitialInstallDir" $R8 + ${If} "$R7" == "$R8" + StrCpy $R7 "1" + ${Else} + StrCpy $R7 "0" + ${EndIf} + + ClearErrors + WriteRegStr HKLM "Software\Mozilla" "${BrandShortName}InstallerTest" \ + "Write Test" + ${If} ${Errors} + StrCpy $R8 "0" + ${Else} + DeleteRegValue HKLM "Software\Mozilla" "${BrandShortName}InstallerTest" + StrCpy $R8 "1" + ${EndIf} - ${If} $HandleDownload == ${INVALID_HANDLE_VALUE} - StrCpy $ExitCode "${ERR_PREINSTALL_INVALID_HANDLE}" - StrCpy $0 "0" - StrCpy $1 "0" + ${If} "$DownloadServerIP" == "" + StrCpy $DownloadServerIP "Unknown" + ${EndIf} + + StrCpy $R2 "" + SetShellVarContext current ; Set SHCTX to the current user + ReadRegStr $R2 HKCU "Software\Classes\http\shell\open\command" "" + ${If} $R2 != "" + ${GetPathFromString} "$R2" $R2 + ${GetParent} "$R2" $R3 + ${GetLongPath} "$R3" $R3 + ${If} $R3 == $INSTDIR + StrCpy $R2 "1" ; This Firefox install is set as default. ${Else} - CertCheck::VerifyCertTrust "$PLUGINSDIR\download.exe" - Pop $0 - CertCheck::VerifyCertNameIssuer "$PLUGINSDIR\download.exe" \ - "${CertNameDownload}" "${CertIssuerDownload}" - Pop $1 - ${If} $0 == 0 - ${AndIf} $1 == 0 - StrCpy $ExitCode "${ERR_PREINSTALL_CERT_UNTRUSTED_AND_ATTRIBUTES}" - ${ElseIf} $0 == 0 - StrCpy $ExitCode "${ERR_PREINSTALL_CERT_UNTRUSTED}" - ${ElseIf} $1 == 0 - StrCpy $ExitCode "${ERR_PREINSTALL_CERT_ATTRIBUTES}" + StrCpy $R2 "$R2" "" -11 # length of firefox.exe + ${If} "$R2" == "${FileMainEXE}" + StrCpy $R2 "2" ; Another Firefox install is set as default. + ${Else} + StrCpy $R2 "0" ${EndIf} ${EndIf} + ${Else} + StrCpy $R2 "0" ; Firefox is not set as default. + ${EndIf} - System::Call "kernel32::GetTickCount()l .s" - Pop $EndPreInstallPhaseTickCount - - ${If} $0 == 0 - ${OrIf} $1 == 0 - ; Use a timer so the UI has a chance to update - ${NSD_CreateTimer} DisplayDownloadError ${InstallIntervalMS} - Return + ${If} "$R2" == "0" + StrCpy $R3 "" + ReadRegStr $R2 HKLM "Software\Classes\http\shell\open\command" "" + ${If} $R2 != "" + ${GetPathFromString} "$R2" $R2 + ${GetParent} "$R2" $R3 + ${GetLongPath} "$R3" $R3 + ${If} $R3 == $INSTDIR + StrCpy $R2 "1" ; This Firefox install is set as default. + ${Else} + StrCpy $R2 "$R2" "" -11 # length of firefox.exe + ${If} "$R2" == "${FileMainEXE}" + StrCpy $R2 "2" ; Another Firefox install is set as default. + ${Else} + StrCpy $R2 "0" + ${EndIf} + ${EndIf} + ${Else} + StrCpy $R2 "0" ; Firefox is not set as default. ${EndIf} + ${EndIf} - ; Instead of extracting the files we use the downloaded installer to - ; install in case it needs to perform operations that the stub doesn't - ; know about. - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "InstallDirectoryPath" "$INSTDIR" - ; Don't create the QuickLaunch or Taskbar shortcut from the launched installer - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "QuickLaunchShortcut" "false" - - ; Always create a start menu shortcut, so the user always has some way - ; to access the application. - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "StartMenuShortcuts" "true" - - ; Either avoid or force adding a taskbar pin and desktop shortcut - ; based on the checkbox value. - ${If} $CheckboxShortcuts == 0 - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "TaskbarShortcut" "false" - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "DesktopShortcut" "false" + ${If} $CanSetAsDefault == "true" + ${If} $CheckboxSetAsDefault == "1" + StrCpy $R3 "2" ${Else} - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "TaskbarShortcut" "true" - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "DesktopShortcut" "true" + StrCpy $R3 "3" ${EndIf} - -!ifdef MOZ_MAINTENANCE_SERVICE - ${If} $CheckboxInstallMaintSvc == 1 - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "MaintenanceService" "true" + ${Else} + ${If} ${AtLeastWin8} + StrCpy $R3 "1" ${Else} - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "MaintenanceService" "false" + StrCpy $R3 "0" ${EndIf} + ${EndIf} + +!ifdef STUB_DEBUG + MessageBox MB_OK "${BaseURLStubPing} \ + $\nStub URL Version = ${StubURLVersion}${StubURLVersionAppend} \ + $\nBuild Channel = ${Channel} \ + $\nUpdate Channel = ${UpdateChannel} \ + $\nLocale = ${AB_CD} \ + $\nFirefox x64 = $R0 \ + $\nRunning x64 Windows = $R1 \ + $\nMajor = $5 \ + $\nMinor = $6 \ + $\nBuild = $7 \ + $\nServicePack = $8 \ + $\nIsServer = $9 \ + $\nExit Code = $ExitCode \ + $\nFirefox Launch Code = $FirefoxLaunchCode \ + $\nDownload Retry Count = $DownloadRetryCount \ + $\nDownloaded Bytes = $DownloadedBytes \ + $\nDownload Size Bytes = $DownloadSizeBytes \ + $\nIntroduction Phase Seconds = $IntroPhaseSeconds \ + $\nOptions Phase Seconds = $OptionsPhaseSeconds \ + $\nDownload Phase Seconds = $0 \ + $\nLast Download Seconds = $1 \ + $\nDownload First Transfer Seconds = $DownloadFirstTransferSeconds \ + $\nPreinstall Phase Seconds = $2 \ + $\nInstall Phase Seconds = $3 \ + $\nFinish Phase Seconds = $4 \ + $\nInitial Install Requirements Code = $InitialInstallRequirementsCode \ + $\nOpened Download Page = $OpenedDownloadPage \ + $\nExisting Profile = $ExistingProfile \ + $\nExisting Version = $ExistingVersion \ + $\nExisting Build ID = $ExistingBuildID \ + $\nNew Version = $R5 \ + $\nNew Build ID = $R6 \ + $\nDefault Install Dir = $R7 \ + $\nHas Admin = $R8 \ + $\nDefault Status = $R2 \ + $\nSet As Sefault Status = $R3 \ + $\nDownload Server IP = $DownloadServerIP \ + $\nPost-Signing Data = $PostSigningData \ + $\nProfile cleanup prompt shown = $ProfileCleanupPromptType \ + $\nDid profile cleanup = $CheckboxCleanupProfile" + ; The following will exit the installer + SetAutoClose true + StrCpy $R9 "2" + Call RelativeGotoPage !else - WriteINIStr "$PLUGINSDIR\${CONFIG_INI}" "Install" "MaintenanceService" "false" + ${NSD_CreateTimer} OnPing ${DownloadIntervalMS} + InetBgDL::Get "${BaseURLStubPing}/${StubURLVersion}${StubURLVersionAppend}/${Channel}/${UpdateChannel}/${AB_CD}/$R0/$R1/$5/$6/$7/$8/$9/$ExitCode/$FirefoxLaunchCode/$DownloadRetryCount/$DownloadedBytes/$DownloadSizeBytes/$IntroPhaseSeconds/$OptionsPhaseSeconds/$0/$1/$DownloadFirstTransferSeconds/$2/$3/$4/$InitialInstallRequirementsCode/$OpenedDownloadPage/$ExistingProfile/$ExistingVersion/$ExistingBuildID/$R5/$R6/$R7/$R8/$R2/$R3/$DownloadServerIP/$PostSigningData/$ProfileCleanupPromptType/$CheckboxCleanupProfile" \ + "$PLUGINSDIR\_temp" /END !endif - - ; Delete the taskbar shortcut history to ensure we do the right thing based on - ; the config file above. - ${GetShortcutsLogPath} $0 - Delete "$0" - - GetFunctionAddress $0 RemoveFileProgressCallback - ${RemovePrecompleteEntries} $0 - - ; Delete the install.log and let the full installer create it. When the - ; installer closes it we can detect that it has completed. - Delete "$INSTDIR\install.log" - - ; Delete firefox.exe.moz-upgrade and firefox.exe.moz-delete if it exists - ; since it being present will require an OS restart for the full - ; installer. - Delete "$INSTDIR\${FileMainEXE}.moz-upgrade" - Delete "$INSTDIR\${FileMainEXE}.moz-delete" - - System::Call "kernel32::GetTickCount()l .s" - Pop $EndPreInstallPhaseTickCount - - Exec "$\"$PLUGINSDIR\download.exe$\" /INI=$PLUGINSDIR\${CONFIG_INI}" - ${NSD_CreateTimer} CheckInstall ${InstallIntervalMS} - ${Else} - ${If} $HalfOfDownload != "true" - ${AndIf} $3 > $HalfOfDownload - StrCpy $HalfOfDownload "true" - ${EndIf} - StrCpy $DownloadedBytes "$3" - StrCpy $ProgressCompleted "$DownloadedBytes" - Call SetProgressBars + ${Else} + ${If} "$IsDownloadFinished" == "false" + ; Cancel the download in progress + InetBgDL::Get /RESET /END ${EndIf} + ; The following will exit the installer + SetAutoClose true + StrCpy $R9 "2" + Call RelativeGotoPage ${EndIf} FunctionEnd @@ -1475,8 +1650,13 @@ Function LaunchApp ${GetParameters} $0 ${GetOptions} "$0" "/UAC:" $1 ${If} ${Errors} - Exec "$\"$INSTDIR\${FileMainEXE}$\"" + ${If} $CheckboxCleanupProfile == 1 + Exec "$\"$INSTDIR\${FileMainEXE}$\" -reset-profile -migration" + ${Else} + Exec "$\"$INSTDIR\${FileMainEXE}$\"" + ${EndIf} ${Else} + StrCpy $R1 $CheckboxCleanupProfile GetFunctionAddress $0 LaunchAppFromElevatedProcess UAC::ExecCodeSegment $0 ${EndIf} @@ -1485,7 +1665,11 @@ FunctionEnd Function LaunchAppFromElevatedProcess ; Set the current working directory to the installation directory SetOutPath "$INSTDIR" - Exec "$\"$INSTDIR\${FileMainEXE}$\"" + ${If} $R1 == 1 + Exec "$\"$INSTDIR\${FileMainEXE}$\" -reset-profile -migration" + ${Else} + Exec "$\"$INSTDIR\${FileMainEXE}$\"" + ${EndIf} FunctionEnd Function CopyPostSigningData @@ -1529,5 +1713,163 @@ Function OpenManualDownloadURL ExecShell "open" "${URLManualDownload}${URLManualDownloadAppend}" FunctionEnd +Function ShouldPromptForProfileCleanup + Call GetLatestReleasedVersion + + ; This will be our return value. + StrCpy $ProfileCleanupPromptType 0 + + ; Only consider installations of the same architecture we're installing. + ${If} $DroplistArch == "$(VERSION_64BIT)" + SetRegView 64 + ${Else} + SetRegView 32 + ${EndIf} + + ; Make sure $APPDATA is the user's AppData and not ProgramData. + ; We'll set this back to all at the end of the function. + SetShellVarContext current + + ; Check each Profile section in profiles.ini until we find the default profile. + StrCpy $R0 "" + ${If} ${FileExists} "$APPDATA\Mozilla\Firefox\profiles.ini" + StrCpy $0 0 + ${Do} + ClearErrors + ; Check if the section exists by reading a value that must be present. + ReadINIStr $1 "$APPDATA\Mozilla\Firefox\profiles.ini" "Profile$0" "Path" + ${If} ${Errors} + ; We've run out of profile sections. + ${Break} + ${EndIf} + + ClearErrors + ReadINIStr $1 "$APPDATA\Mozilla\Firefox\profiles.ini" "Profile$0" "Default" + ${IfNot} ${Errors} + ${AndIf} $1 == "1" + ; We've found the default profile + ReadINIStr $1 "$APPDATA\Mozilla\Firefox\profiles.ini" "Profile$0" "Path" + ReadINIStr $2 "$APPDATA\Mozilla\Firefox\profiles.ini" "Profile$0" "IsRelative" + ${If} $2 == "1" + StrCpy $R0 "$APPDATA\Mozilla\Firefox\$1" + ${Else} + StrCpy $R0 "$1" + ${EndIf} + GetFullPathName $R0 $R0 + ${Break} + ${EndIf} + + IntOp $0 $0 + 1 + ${Loop} + ${EndIf} + + ${If} $R0 == "" + ; No profile to clean up, so don't show the cleanup prompt. + GoTo end + ${EndIf} + + ; We have at least one profile present. If we don't have any installations, + ; then we need to show the re-install prompt. We'll say there's an + ; installation present if HKCR\FirefoxURL* exists and points to a real path. + StrCpy $0 0 + StrCpy $R9 "" + ${Do} + ClearErrors + EnumRegKey $1 HKCR "" $0 + ${If} ${Errors} + ${OrIf} $1 == "" + ${Break} + ${EndIf} + ${WordFind} "$1" "-" "+1{" $2 + ${If} $2 == "FirefoxURL" + ClearErrors + ReadRegStr $2 HKCR "$1\DefaultIcon" "" + ${IfNot} ${Errors} + ${GetPathFromString} $2 $1 + ${If} ${FileExists} $1 + StrCpy $R9 $1 + ${Break} + ${EndIf} + ${EndIf} + ${EndIf} + IntOp $0 $0 + 1 + ${Loop} + ${If} $R9 == "" + StrCpy $ProfileCleanupPromptType 1 + GoTo end + ${EndIf} + + ; Okay, there's at least one install, let's see if it's for this channel. + SetShellVarContext all + ${GetSingleInstallPath} "Software\Mozilla\${BrandFullNameInternal}" $0 + ${If} $0 == "false" + SetShellVarContext current + ${GetSingleInstallPath} "Software\Mozilla\${BrandFullNameInternal}" $0 + ${If} $0 == "false" + ; Existing installs are not for this channel. Don't show any prompt. + GoTo end + ${EndIf} + ${EndIf} + + ; Find out what version the default profile was last used on. + ${If} ${FileExists} "$R0\compatibility.ini" + ClearErrors + ReadINIStr $0 "$R0\compatibility.ini" "Compatibility" "LastVersion" + ${If} ${Errors} + GoTo end + ${EndIf} + ${WordFind} $0 "." "+1{" $0 + + ; We don't know what version we're about to install because we haven't + ; downloaded it yet. Find out what the latest version released on this + ; channel is and assume we'll be installing that one. + Call GetLatestReleasedVersion + ${If} ${Errors} + ; Use this stub installer's version as a fallback when we can't get the + ; real current version; this may be behind, but it's better than nothing. + StrCpy $1 ${AppVersion} + ${EndIf} + + ${WordFind} $1 "." "+1{" $1 + IntOp $1 $1 - 2 + + ${If} $1 > $0 + ; Default profile was last used more than two versions ago, so we need + ; to show the paveover version of the profile cleanup prompt. + StrCpy $ProfileCleanupPromptType 2 + ${EndIf} + ${EndIf} + + end: + SetRegView lastused + SetShellVarContext all +FunctionEnd + +Function GetLatestReleasedVersion + ClearErrors + nsJSON::Set /tree requestConfig /value \ + `{"Url": "https://product-details.mozilla.org/1.0/firefox_versions.json", "Async": false}` + IfErrors end + nsJSON::Set /http requestConfig + IfErrors end + ${Select} ${Channel} + ${Case} "unofficial" + StrCpy $1 "FIREFOX_NIGHTLY" + ${Case} "nightly" + StrCpy $1 "FIREFOX_NIGHTLY" + ${Case} "aurora" + StrCpy $1 "FIREFOX_AURORA" + ${Case} "beta" + StrCpy $1 "LATEST_FIREFOX_RELEASED_DEVEL_VERSION" + ${Case} "release" + StrCpy $1 "LATEST_FIREFOX_VERSION" + ${EndSelect} + nsJSON::Get "Output" $1 /end + IfErrors end + Pop $1 + + end: +FunctionEnd + Section SectionEnd diff --git a/browser/locales/en-US/installer/nsisstrings.properties b/browser/locales/en-US/installer/nsisstrings.properties index 60773b100a5f7ffd68047fea8e3a928e5bdd0b3e..94c971c3833354aa4b8242217b824bb6b3ccf0e1 100644 --- a/browser/locales/en-US/installer/nsisstrings.properties +++ b/browser/locales/en-US/installer/nsisstrings.properties @@ -20,6 +20,14 @@ INSTALLER_WIN_CAPTION=$BrandShortName Installer +# The \n in the next two strings can be moved or deleted as needed to make +# the string fit in the 3 lines of space available. +STUB_CLEANUP_PAVEOVER_HEADER=$BrandShortName is already installed.\nLet's update it. +STUB_CLEANUP_REINSTALL_HEADER=$BrandShortName has been installed before.\nLet's get you a new copy. +STUB_CLEANUP_PAVEOVER_BUTTON=&Update +STUB_CLEANUP_REINSTALL_BUTTON=Re-&install +STUB_CLEANUP_CHECKBOX_LABEL=&Restore default settings and remove old add-ons for optimal performance + STUB_INSTALLING_LABEL=Now installing STUB_BLURB1=Fast, responsive online experiences STUB_BLURB2=Compatibility with more of your favorite sites diff --git a/browser/modules/test/browser/browser_UsageTelemetry_content.js b/browser/modules/test/browser/browser_UsageTelemetry_content.js index 845040ab632cb5b34e3e6bedcc7f874c85c67d01..5cf39c6a98e528839501a54517c7298653d980e9 100644 --- a/browser/modules/test/browser/browser_UsageTelemetry_content.js +++ b/browser/modules/test/browser/browser_UsageTelemetry_content.js @@ -80,7 +80,7 @@ add_task(async function test_context_menu() { checkKeyedHistogram(search_hist, "other-MozSearch.contextmenu", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "contextmenu", null, {engine: "other-MozSearch"}]]); @@ -116,7 +116,7 @@ add_task(async function test_about_newtab() { checkKeyedHistogram(search_hist, "other-MozSearch.newtab", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "about_newtab", "enter", {engine: "other-MozSearch"}]]); diff --git a/browser/modules/test/browser/browser_UsageTelemetry_content_aboutHome.js b/browser/modules/test/browser/browser_UsageTelemetry_content_aboutHome.js index eef3b6f5078cff51e8673073d2f02c767a3e2ed8..b8fa5845355987828df5dc8595b2d2a5091a73ed 100644 --- a/browser/modules/test/browser/browser_UsageTelemetry_content_aboutHome.js +++ b/browser/modules/test/browser/browser_UsageTelemetry_content_aboutHome.js @@ -80,7 +80,7 @@ add_task(async function test_abouthome_simpleQuery() { checkKeyedHistogram(search_hist, "other-MozSearch.abouthome", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "about_home", "enter", {engine: "other-MozSearch"}]]); diff --git a/browser/modules/test/browser/browser_UsageTelemetry_searchbar.js b/browser/modules/test/browser/browser_UsageTelemetry_searchbar.js index 2f79c96f16b67ca73528884b35effd5dfaa0355a..7f991bd54adf39df40e2833861de66edafd6bbac 100644 --- a/browser/modules/test/browser/browser_UsageTelemetry_searchbar.js +++ b/browser/modules/test/browser/browser_UsageTelemetry_searchbar.js @@ -128,7 +128,7 @@ add_task(async function test_plainQuery() { checkKeyedHistogram(search_hist, "other-MozSearch.searchbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "searchbar", "enter", {engine: "other-MozSearch"}]]); @@ -172,7 +172,7 @@ add_task(async function test_oneOff_enter() { checkKeyedHistogram(search_hist, "other-MozSearch2.searchbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "searchbar", "oneoff", {engine: "other-MozSearch2"}]]); @@ -297,7 +297,7 @@ add_task(async function test_suggestion_click() { checkKeyedHistogram(search_hist, searchEngineId + ".searchbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "searchbar", "suggestion", {engine: searchEngineId}]]); diff --git a/browser/modules/test/browser/browser_UsageTelemetry_urlbar.js b/browser/modules/test/browser/browser_UsageTelemetry_urlbar.js index 20b70c978c4bdcf1a39b563b7ee8bc62acd56aff..ae86f92fca4f80cd8e7269ebda7cd2b9732e8883 100644 --- a/browser/modules/test/browser/browser_UsageTelemetry_urlbar.js +++ b/browser/modules/test/browser/browser_UsageTelemetry_urlbar.js @@ -137,7 +137,7 @@ add_task(async function test_simpleQuery() { checkKeyedHistogram(search_hist, "other-MozSearch.urlbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "urlbar", "enter", {engine: "other-MozSearch"}]]); @@ -196,7 +196,7 @@ add_task(async function test_searchAlias() { checkKeyedHistogram(search_hist, "other-MozSearch.urlbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "urlbar", "alias", {engine: "other-MozSearch"}]]); @@ -260,7 +260,7 @@ add_task(async function test_oneOff_enter() { checkKeyedHistogram(search_hist, "other-MozSearch.urlbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "urlbar", "oneoff", {engine: "other-MozSearch"}]]); @@ -407,7 +407,7 @@ add_task(async function test_suggestion_click() { checkKeyedHistogram(search_hist, searchEngineId + ".urlbar", 1); // Also check events. - let events = Services.telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + let events = Services.telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); events = (events.parent || []).filter(e => e[1] == "navigation" && e[2] == "search"); checkEvents(events, [["navigation", "search", "urlbar", "suggestion", {engine: searchEngineId}]]); diff --git a/build/moz.configure/old.configure b/build/moz.configure/old.configure index 9966a59d3f3c423382e0141420a716ce96446097..550e93d697f6576c94626c657ab8a80ec70df522 100644 --- a/build/moz.configure/old.configure +++ b/build/moz.configure/old.configure @@ -159,9 +159,6 @@ def old_configure_options(*options): '--enable-accessibility', '--enable-address-sanitizer', '--enable-alsa', - '--enable-b2g-bt', - '--enable-b2g-camera', - '--enable-b2g-ril', '--enable-bundled-fonts', '--enable-clang-plugin', '--enable-content-sandbox', @@ -193,7 +190,6 @@ def old_configure_options(*options): '--enable-maintenance-service', '--enable-memory-sanitizer', '--enable-mobile-optimize', - '--enable-mozril-geoloc', '--enable-necko-wifi', '--enable-negotiateauth', '--enable-nfc', @@ -203,14 +199,12 @@ def old_configure_options(*options): '--enable-optimize', '--enable-parental-controls', '--enable-pie', - '--enable-png-arm-neon-support', '--enable-posix-nspr-emulation', '--enable-pref-extensions', '--enable-pulseaudio', '--enable-raw', '--enable-readline', '--enable-reflow-perf', - '--enable-safe-browsing', '--enable-sandbox', '--enable-signmar', '--enable-simulator', @@ -231,7 +225,6 @@ def old_configure_options(*options): '--enable-ui-locale', '--enable-universalchardet', '--enable-updater', - '--enable-url-classifier', '--enable-valgrind', '--enable-verify-mar', '--enable-webrtc', diff --git a/build/moz.configure/toolchain.configure b/build/moz.configure/toolchain.configure index bdf1f139594831ffc749c3a2f79f6ae8e21c6bfc..638180c50f11cf568d5282c108a3398d16e29f05 100755 --- a/build/moz.configure/toolchain.configure +++ b/build/moz.configure/toolchain.configure @@ -1039,6 +1039,44 @@ set_define('HAVE_VISIBILITY_ATTRIBUTE', set_config('WRAP_SYSTEM_INCLUDES', wrap_system_includes) set_config('VISIBILITY_FLAGS', visibility_flags) +@depends(c_compiler) +@imports('multiprocessing') +@imports(_from='__builtin__', _import='min') +def pgo_flags(compiler): + if compiler.type in ('gcc', 'clang'): + return namespace( + gen_cflags=['-fprofile-generate'], + gen_ldflags=['-fprofile-generate'], + use_cflags=['-fprofile-use', '-fprofile-correction', + '-Wcoverage-mismatch'], + use_ldflags=['-fprofile-use'], + ) + + if compiler.type == 'msvc': + num_cores = min(8, multiprocessing.cpu_count()) + cgthreads = '-CGTHREADS:%s' % num_cores + + return namespace( + gen_cflags=['-GL'], + gen_ldflags=['-LTCG:PGINSTRUMENT', '-PogoSafeMode', cgthreads], + # XXX: PGO builds can fail with warnings treated as errors, + # specifically "no profile data available" appears to be + # treated as an error sometimes. This might be a consequence + # of using WARNINGS_AS_ERRORS in some modules, combined + # with the linker doing most of the work in the whole-program + # optimization/PGO case. I think it's probably a compiler bug, + # but we work around it here. + use_cflags=['-GL', '-wd4624', '-wd4952'], + # XXX: should be -LTCG:PGOPTIMIZE, but that fails on libxul. + # Probably also a compiler bug, but what can you do? + use_ldflags=['-LTCG:PGUPDATE', cgthreads], + ) + +set_config('PROFILE_GEN_CFLAGS', pgo_flags.gen_cflags) +set_config('PROFILE_GEN_LDFLAGS', pgo_flags.gen_ldflags) +set_config('PROFILE_USE_CFLAGS', pgo_flags.use_cflags) +set_config('PROFILE_USE_LDFLAGS', pgo_flags.use_ldflags) + # We only want to include windows.configure when we are compiling on # Windows, for Windows. @depends(target, host) diff --git a/devtools/client/inspector/rules/models/rule.js b/devtools/client/inspector/rules/models/rule.js index f8ee8292076371a1e72903bda40b73319a64a4aa..3edd99a0804ed802bbca087d37553d364baf476b 100644 --- a/devtools/client/inspector/rules/models/rule.js +++ b/devtools/client/inspector/rules/models/rule.js @@ -136,15 +136,21 @@ Rule.prototype = { * both the full and short version of the source string. */ getOriginalSourceStrings: function () { - return this.domRule.getOriginalLocation().then(({href, - line, mediaText}) => { + return this.domRule.getOriginalLocation().then(({href, line, mediaText}) => { let mediaString = mediaText ? " @" + mediaText : ""; let linePart = line > 0 ? (":" + line) : ""; + let decodedHref = href; + + if (decodedHref) { + try { + decodedHref = decodeURIComponent(href); + } catch (e) {} + } let sourceStrings = { - full: (href || CssLogic.l10n("rule.sourceInline")) + linePart + + full: (decodedHref || CssLogic.l10n("rule.sourceInline")) + linePart + mediaString, - short: CssLogic.shortSource({href: href}) + linePart + mediaString + short: CssLogic.shortSource({href: decodedHref}) + linePart + mediaString }; return sourceStrings; diff --git a/devtools/client/inspector/rules/test/browser_rules_style-editor-link.js b/devtools/client/inspector/rules/test/browser_rules_style-editor-link.js index 59f2a2cdc93d28e6334b1ef7bb6cae5480f04e3a..0294fbc617e94d15535be726fdbe7304880fc3ee 100644 --- a/devtools/client/inspector/rules/test/browser_rules_style-editor-link.js +++ b/devtools/client/inspector/rules/test/browser_rules_style-editor-link.js @@ -6,10 +6,12 @@ // Test the links from the rule-view to the styleeditor -const STYLESHEET_URL = "data:text/css," + encodeURIComponent( - ["#first {", - "color: blue", - "}"].join("\n")); +const STYLESHEET_DATA_URL_CONTENTS = ["#first {", + "color: blue", + "}"].join("\n"); +const STYLESHEET_DATA_URL = + `data:text/css,${encodeURIComponent(STYLESHEET_DATA_URL_CONTENTS)}`; +const STYLESHEET_DECODED_DATA_URL = `data:text/css,${STYLESHEET_DATA_URL_CONTENTS}`; const EXTERNAL_STYLESHEET_FILE_NAME = "doc_style_editor_link.css"; const EXTERNAL_STYLESHEET_URL = URL_ROOT + EXTERNAL_STYLESHEET_FILE_NAME; @@ -27,7 +29,7 @@ const DOCUMENT_URL = "data:text/html;charset=utf-8," + encodeURIComponent(` <style> div { font-weight: bold; } </style> - <link rel="stylesheet" type="text/css" href="${STYLESHEET_URL}"> + <link rel="stylesheet" type="text/css" href="${STYLESHEET_DATA_URL}"> <link rel="stylesheet" type="text/css" href="${EXTERNAL_STYLESHEET_URL}"> </head> <body> @@ -174,15 +176,28 @@ function* testDisabledStyleEditor(view, toolbox) { } function testRuleViewLinkLabel(view) { - let link = getRuleViewLinkByIndex(view, 2); + info("Checking the data URL link label"); + + let link = getRuleViewLinkByIndex(view, 1); let labelElem = link.querySelector(".ruleview-rule-source-label"); let value = labelElem.textContent; let tooltipText = labelElem.getAttribute("title"); - is(value, EXTERNAL_STYLESHEET_FILE_NAME + ":1", - "rule view stylesheet display value matches filename and line number"); - is(tooltipText, EXTERNAL_STYLESHEET_URL + ":1", - "rule view stylesheet tooltip text matches the full URI path"); + is(value, `${STYLESHEET_DATA_URL_CONTENTS}:1`, + "Rule view data URL stylesheet display value matches contents"); + is(tooltipText, `${STYLESHEET_DECODED_DATA_URL}:1`, + "Rule view data URL stylesheet tooltip text matches the full URI path"); + + info("Checking the external link label"); + link = getRuleViewLinkByIndex(view, 2); + labelElem = link.querySelector(".ruleview-rule-source-label"); + value = labelElem.textContent; + tooltipText = labelElem.getAttribute("title"); + + is(value, `${EXTERNAL_STYLESHEET_FILE_NAME}:1`, + "Rule view external stylesheet display value matches filename and line number"); + is(tooltipText, `${EXTERNAL_STYLESHEET_URL}:1`, + "Rule view external stylesheet tooltip text matches the full URI path"); } function testUnselectableRuleViewLink(view, index) { diff --git a/devtools/shared/inspector/css-logic.js b/devtools/shared/inspector/css-logic.js index f4ec561fac24c30639472f3155c73657ce8b8c87..59771b879a87c3382108c2fbd13077937a44eb32 100644 --- a/devtools/shared/inspector/css-logic.js +++ b/devtools/shared/inspector/css-logic.js @@ -9,6 +9,8 @@ const { getRootBindingParent } = require("devtools/shared/layout/utils"); const { getTabPrefs } = require("devtools/shared/indentation"); +const MAX_DATA_URL_LENGTH = 40; + /* * About the objects defined in this file: * - CssLogic contains style information about a view context. It provides @@ -109,6 +111,13 @@ exports.shortSource = function (sheet) { return exports.l10n("rule.sourceInline"); } + // If the sheet is a data URL, return a trimmed version of it. + let dataUrl = sheet.href.trim().match(/^data:.*?,((?:.|\r|\n)*)$/); + if (dataUrl) { + return dataUrl[1].length > MAX_DATA_URL_LENGTH ? + `${dataUrl[1].substr(0, MAX_DATA_URL_LENGTH - 1)}…` : dataUrl[1]; + } + // We try, in turn, the filename, filePath, query string, whole thing let url = {}; try { @@ -129,8 +138,7 @@ exports.shortSource = function (sheet) { return url.query; } - let dataUrl = sheet.href.match(/^(data:[^,]*),/); - return dataUrl ? dataUrl[1] : sheet.href; + return sheet.href; }; const TAB_CHARS = "\t"; diff --git a/dom/events/EventListenerManager.cpp b/dom/events/EventListenerManager.cpp index 58915b6ee852a741ce4070b8c9dfee67931ab4aa..cd5afd5dd1356ce91669c8952a7bdadbe6138dfb 100644 --- a/dom/events/EventListenerManager.cpp +++ b/dom/events/EventListenerManager.cpp @@ -1770,7 +1770,7 @@ EventListenerManager::TraceListeners(JSTracer* aTrc) } bool -EventListenerManager::HasUntrustedOrNonSystemGroupKeyEventListeners() +EventListenerManager::HasNonSystemGroupListenersForUntrustedKeyEvents() { uint32_t count = mListeners.Length(); for (uint32_t i = 0; i < count; ++i) { @@ -1786,6 +1786,24 @@ EventListenerManager::HasUntrustedOrNonSystemGroupKeyEventListeners() return false; } +bool +EventListenerManager::HasNonPassiveNonSystemGroupListenersForUntrustedKeyEvents() +{ + uint32_t count = mListeners.Length(); + for (uint32_t i = 0; i < count; ++i) { + Listener* listener = &mListeners.ElementAt(i); + if (!listener->mFlags.mPassive && + !listener->mFlags.mInSystemGroup && + listener->mFlags.mAllowUntrustedEvents && + (listener->mTypeAtom == nsGkAtoms::onkeydown || + listener->mTypeAtom == nsGkAtoms::onkeypress || + listener->mTypeAtom == nsGkAtoms::onkeyup)) { + return true; + } + } + return false; +} + bool EventListenerManager::HasApzAwareListeners() { diff --git a/dom/events/EventListenerManager.h b/dom/events/EventListenerManager.h index d7e851c404824ba98fd20259765921f29f81ad8e..0fae474a4fbc6e3d87e382c045ec23a15729b875 100644 --- a/dom/events/EventListenerManager.h +++ b/dom/events/EventListenerManager.h @@ -470,7 +470,8 @@ public: dom::EventTarget* GetTarget() { return mTarget; } - bool HasUntrustedOrNonSystemGroupKeyEventListeners(); + bool HasNonSystemGroupListenersForUntrustedKeyEvents(); + bool HasNonPassiveNonSystemGroupListenersForUntrustedKeyEvents(); bool HasApzAwareListeners(); bool IsApzAwareListener(Listener* aListener); diff --git a/dom/events/EventTarget.cpp b/dom/events/EventTarget.cpp index 0b022318c6d0c61191ba3e0f7bfbaa32bdd1c2ec..2762e1b6e526ebfa09fcef33e4cd440ceb2ccfe4 100644 --- a/dom/events/EventTarget.cpp +++ b/dom/events/EventTarget.cpp @@ -58,10 +58,17 @@ EventTarget::SetEventHandler(nsIAtom* aType, const nsAString& aTypeString, } bool -EventTarget::HasUntrustedOrNonSystemGroupKeyEventListeners() const +EventTarget::HasNonSystemGroupListenersForUntrustedKeyEvents() const { EventListenerManager* elm = GetExistingListenerManager(); - return elm && elm->HasUntrustedOrNonSystemGroupKeyEventListeners(); + return elm && elm->HasNonSystemGroupListenersForUntrustedKeyEvents(); +} + +bool +EventTarget::HasNonPassiveNonSystemGroupListenersForUntrustedKeyEvents() const +{ + EventListenerManager* elm = GetExistingListenerManager(); + return elm && elm->HasNonPassiveNonSystemGroupListenersForUntrustedKeyEvents(); } bool diff --git a/dom/events/EventTarget.h b/dom/events/EventTarget.h index 3655ffe01d6164ae407dd1594e4c66486c5c18f9..e96d5431646311a3b85413e694a3a010281c970e 100644 --- a/dom/events/EventTarget.h +++ b/dom/events/EventTarget.h @@ -99,9 +99,13 @@ public: // Called from AsyncEventDispatcher to notify it is running. virtual void AsyncEventRunning(AsyncEventDispatcher* aEvent) {} - // Used by FocusTarget to determine whether this event target has listeners - // for untrusted or non system group key events. - bool HasUntrustedOrNonSystemGroupKeyEventListeners() const; + // Used by APZ to determine whether this event target has non-chrome event + // listeners for untrusted key events. + bool HasNonSystemGroupListenersForUntrustedKeyEvents() const; + + // Used by APZ to determine whether this event target has non-chrome and + // non-passive event listeners for untrusted key events. + bool HasNonPassiveNonSystemGroupListenersForUntrustedKeyEvents() const; virtual bool IsApzAware() const; diff --git a/dom/workers/test/serviceworkers/browser_download.js b/dom/workers/test/serviceworkers/browser_download.js index 24cbb5b8e31b1d9152c0a8e857e2d4fd2e1d9dd8..c9adfb604d1246fadb63ee42cff0f55e776745fd 100644 --- a/dom/workers/test/serviceworkers/browser_download.js +++ b/dom/workers/test/serviceworkers/browser_download.js @@ -3,7 +3,6 @@ Cu.import('resource://gre/modules/Services.jsm'); var Downloads = Cu.import("resource://gre/modules/Downloads.jsm", {}).Downloads; -var DownloadsCommon = Cu.import("resource:///modules/DownloadsCommon.jsm", {}).DownloadsCommon; Cu.import('resource://gre/modules/NetUtil.jsm'); var gTestRoot = getRootDirectory(gTestPath).replace("chrome://mochitests/content/", @@ -59,9 +58,8 @@ function test() { ok(file.exists(), 'download completed'); is(file.fileSize, 33, 'downloaded file has correct size'); file.remove(false); - DownloadsCommon.removeAndFinalizeDownload(aDownload); - - downloadList.removeView(downloadListener); + downloadList.remove(aDownload).catch(Cu.reportError); + downloadList.removeView(downloadListener).catch(Cu.reportError); gBrowser.removeTab(tab); Services.ww.unregisterNotification(windowObserver); diff --git a/dom/xbl/nsXBLWindowKeyHandler.cpp b/dom/xbl/nsXBLWindowKeyHandler.cpp index 56c4d4f8d23c6b5341631c94c35a24f0a5f6297a..d057820c57879230915d743898789c41f119b65f 100644 --- a/dom/xbl/nsXBLWindowKeyHandler.cpp +++ b/dom/xbl/nsXBLWindowKeyHandler.cpp @@ -504,6 +504,13 @@ nsXBLWindowKeyHandler::HandleEvent(nsIDOMEvent* aEvent) } } + // If this event was handled by APZ then don't do the default action, and + // preventDefault to prevent any other listeners from handling the event. + if (widgetKeyboardEvent->mFlags.mHandledByAPZ) { + aEvent->PreventDefault(); + return NS_OK; + } + nsCOMPtr<nsIAtom> eventTypeAtom = ConvertEventToDOMEventType(*widgetKeyboardEvent); return WalkHandlers(keyEvent, eventTypeAtom); diff --git a/gfx/layers/PaintThread.cpp b/gfx/layers/PaintThread.cpp index 0301c61740e1eaa2f384388b2ce9f3fc1ab21dc4..e8053704c9c93c1a28eee616ad16082dca2819a3 100644 --- a/gfx/layers/PaintThread.cpp +++ b/gfx/layers/PaintThread.cpp @@ -22,6 +22,48 @@ StaticAutoPtr<PaintThread> PaintThread::sSingleton; StaticRefPtr<nsIThread> PaintThread::sThread; PlatformThreadId PaintThread::sThreadId; +// RAII make sure we clean up and restore our draw targets +// when we paint async. +struct AutoCapturedPaintSetup { + AutoCapturedPaintSetup(DrawTarget* aTarget, + DrawTargetCapture* aCapture, + CompositorBridgeChild* aBridge) + : mTarget(aTarget) + , mRestorePermitsSubpixelAA(aTarget->GetPermitSubpixelAA()) + , mOldTransform(aTarget->GetTransform()) + , mBridge(aBridge) + { + MOZ_ASSERT(mTarget); + MOZ_ASSERT(aCapture); + + mTarget->SetTransform(aCapture->GetTransform()); + mTarget->SetPermitSubpixelAA(aCapture->GetPermitSubpixelAA()); + } + + ~AutoCapturedPaintSetup() + { + mTarget->SetTransform(mOldTransform); + mTarget->SetPermitSubpixelAA(mRestorePermitsSubpixelAA); + + // Textureclient forces a flush once we "end paint", so + // users of this texture expect all the drawing to be complete. + // Force a flush now. + // TODO: This might be a performance bottleneck because + // main thread painting only does one flush at the end of all paints + // whereas we force a flush after each draw target paint. + mTarget->Flush(); + + if (mBridge) { + mBridge->NotifyFinishedAsyncPaint(); + } + } + + DrawTarget* mTarget; + bool mRestorePermitsSubpixelAA; + Matrix mOldTransform; + RefPtr<CompositorBridgeChild> mBridge; +}; + void PaintThread::Release() { @@ -112,48 +154,30 @@ PaintThread::IsOnPaintThread() void PaintThread::PaintContentsAsync(CompositorBridgeChild* aBridge, - gfx::DrawTargetCapture* aCapture, CapturedPaintState* aState, PrepDrawTargetForPaintingCallback aCallback) { MOZ_ASSERT(IsOnPaintThread()); - MOZ_ASSERT(aCapture); MOZ_ASSERT(aState); DrawTarget* target = aState->mTarget; + DrawTargetCapture* capture = aState->mCapture; - Matrix oldTransform = target->GetTransform(); - target->SetTransform(aState->mTargetTransform); - target->SetPermitSubpixelAA(aCapture->GetPermitSubpixelAA()); + AutoCapturedPaintSetup setup(target, capture, aBridge); if (!aCallback(aState)) { return; } // Draw all the things into the actual dest target. - target->DrawCapturedDT(aCapture, Matrix()); - target->SetTransform(oldTransform); - - // Textureclient forces a flush once we "end paint", so - // users of this texture expect all the drawing to be complete. - // Force a flush now. - // TODO: This might be a performance bottleneck because - // main thread painting only does one flush at the end of all paints - // whereas we force a flush after each draw target paint. - target->Flush(); - - if (aBridge) { - aBridge->NotifyFinishedAsyncPaint(); - } + target->DrawCapturedDT(capture, Matrix()); } void -PaintThread::PaintContents(DrawTargetCapture* aCapture, - CapturedPaintState* aState, +PaintThread::PaintContents(CapturedPaintState* aState, PrepDrawTargetForPaintingCallback aCallback) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(aCapture); MOZ_ASSERT(aState); // If painting asynchronously, we need to acquire the compositor bridge which @@ -164,14 +188,14 @@ PaintThread::PaintContents(DrawTargetCapture* aCapture, cbc = CompositorBridgeChild::Get(); cbc->NotifyBeginAsyncPaint(); } - RefPtr<DrawTargetCapture> capture(aCapture); RefPtr<CapturedPaintState> state(aState); + RefPtr<DrawTargetCapture> capture(aState->mCapture); RefPtr<PaintThread> self = this; RefPtr<Runnable> task = NS_NewRunnableFunction("PaintThread::PaintContents", [self, cbc, capture, state, aCallback]() -> void { - self->PaintContentsAsync(cbc, capture, + self->PaintContentsAsync(cbc, state, aCallback); }); diff --git a/gfx/layers/PaintThread.h b/gfx/layers/PaintThread.h index 9af2e44b88e6169dfc46f1cfc260abd491e472b6..1e4297279ed0c98daa2fc56b9b17e0138990e25c 100644 --- a/gfx/layers/PaintThread.h +++ b/gfx/layers/PaintThread.h @@ -27,12 +27,14 @@ class CapturedPaintState { NS_INLINE_DECL_THREADSAFE_REFCOUNTING(CapturedPaintState) public: CapturedPaintState(nsIntRegion& aRegionToDraw, + gfx::DrawTargetCapture* aCapture, gfx::DrawTarget* aTarget, gfx::DrawTarget* aTargetOnWhite, gfx::Matrix aTargetTransform, SurfaceMode aSurfaceMode, gfxContentType aContentType) : mRegionToDraw(aRegionToDraw) + , mCapture(aCapture) , mTarget(aTarget) , mTargetOnWhite(aTargetOnWhite) , mTargetTransform(aTargetTransform) @@ -41,6 +43,7 @@ public: {} nsIntRegion mRegionToDraw; + RefPtr<gfx::DrawTargetCapture> mCapture; RefPtr<gfx::DrawTarget> mTarget; RefPtr<gfx::DrawTarget> mTargetOnWhite; gfx::Matrix mTargetTransform; @@ -63,8 +66,7 @@ public: static void Start(); static void Shutdown(); static PaintThread* Get(); - void PaintContents(gfx::DrawTargetCapture* aCapture, - CapturedPaintState* aState, + void PaintContents(CapturedPaintState* aState, PrepDrawTargetForPaintingCallback aCallback); // Sync Runnables need threads to be ref counted, @@ -82,7 +84,6 @@ private: void ShutdownOnPaintThread(); void InitOnPaintThread(); void PaintContentsAsync(CompositorBridgeChild* aBridge, - gfx::DrawTargetCapture* aCapture, CapturedPaintState* aState, PrepDrawTargetForPaintingCallback aCallback); diff --git a/gfx/layers/RotatedBuffer.cpp b/gfx/layers/RotatedBuffer.cpp index 4dc0040d10aaceaf4d91f6077739f169f952d214..0e71c5fc6d71e72aed47f8e2118d38e53e8769ea 100644 --- a/gfx/layers/RotatedBuffer.cpp +++ b/gfx/layers/RotatedBuffer.cpp @@ -836,6 +836,7 @@ RotatedContentBuffer::BorrowDrawTargetForPainting(PaintState& aPaintState, // Can't stack allocate refcounted objects. RefPtr<CapturedPaintState> capturedPaintState = MakeAndAddRef<CapturedPaintState>(regionToDraw, + nullptr, mDTBuffer, mDTBufferOnWhite, Matrix(), diff --git a/gfx/layers/apz/src/APZCTreeManager.cpp b/gfx/layers/apz/src/APZCTreeManager.cpp index 8c99b1b71249acace8b2830b2d175da52da62953..bea4cee3d68742e56009bdde81166aeae832acae 100644 --- a/gfx/layers/apz/src/APZCTreeManager.cpp +++ b/gfx/layers/apz/src/APZCTreeManager.cpp @@ -1359,7 +1359,8 @@ APZCTreeManager::ReceiveInputEvent(InputData& aEvent, // Any keyboard event that is dispatched to the input queue at this point // should have been consumed - MOZ_ASSERT(result == nsEventStatus_eConsumeNoDefault); + MOZ_ASSERT(result == nsEventStatus_eConsumeDoDefault || + result == nsEventStatus_eConsumeNoDefault); keyInput.mHandledByAPZ = true; focusSetter.MarkAsNonFocusChanging(); diff --git a/gfx/layers/apz/src/AsyncPanZoomController.cpp b/gfx/layers/apz/src/AsyncPanZoomController.cpp index b07e118f78dc49836be6c5a9e838153832bf9031..0f6c807133f70d57e6638f9d73e8035b5b09672c 100644 --- a/gfx/layers/apz/src/AsyncPanZoomController.cpp +++ b/gfx/layers/apz/src/AsyncPanZoomController.cpp @@ -285,6 +285,16 @@ typedef GenericFlingAnimation FlingAnimation; * reflect the value of the async scroll offset and async zoom at the last time * SampleCompositedAsyncTransform() was called. * + * \li\b apz.keyboard.enabled + * Determines whether scrolling with the keyboard will be allowed to be handled + * by APZ. + * + * \li\b apz.keyboard.passive-listeners + * When enabled, APZ will interpret the passive event listener flag to mean + * that the event listener won't change the focused element or selection of + * the page. With this, web content can use passive key listeners and not have + * keyboard APZ disabled. + * * \li\b apz.max_velocity_inches_per_ms * Maximum velocity. Velocity will be capped at this value if a faster fling * occurs. Negative values indicate unlimited velocity.\n @@ -1750,7 +1760,7 @@ AsyncPanZoomController::OnKeyboard(const KeyboardInput& aEvent) // existing smooth scroll animation if there is one. APZC_LOG("%p keyboard scrolling to snap point %s\n", this, Stringify(*snapPoint).c_str()); SmoothScrollTo(*snapPoint); - return nsEventStatus_eConsumeNoDefault; + return nsEventStatus_eConsumeDoDefault; } // Use a keyboard scroll animation to scroll, reusing an existing one if it exists @@ -1775,7 +1785,7 @@ AsyncPanZoomController::OnKeyboard(const KeyboardInput& aEvent) CSSPixel::ToAppUnits(destination), nsSize(velocity.x, velocity.y)); - return nsEventStatus_eConsumeNoDefault; + return nsEventStatus_eConsumeDoDefault; } CSSPoint diff --git a/gfx/layers/apz/src/FocusTarget.cpp b/gfx/layers/apz/src/FocusTarget.cpp index e1338de538c3a5eaa6140aae55f8a971be89971d..e20b4595c1cd18b77791e5ee026797ed2b3ae1aa 100644 --- a/gfx/layers/apz/src/FocusTarget.cpp +++ b/gfx/layers/apz/src/FocusTarget.cpp @@ -64,7 +64,27 @@ HasListenersForKeyEvents(nsIContent* aContent) nullptr, nullptr, &targets); NS_ENSURE_SUCCESS(rv, false); for (size_t i = 0; i < targets.Length(); i++) { - if (targets[i]->HasUntrustedOrNonSystemGroupKeyEventListeners()) { + if (targets[i]->HasNonSystemGroupListenersForUntrustedKeyEvents()) { + return true; + } + } + return false; +} + +static bool +HasListenersForNonPassiveKeyEvents(nsIContent* aContent) +{ + if (!aContent) { + return false; + } + + WidgetEvent event(true, eVoidEvent); + nsTArray<EventTarget*> targets; + nsresult rv = EventDispatcher::Dispatch(aContent, nullptr, &event, nullptr, + nullptr, nullptr, &targets); + NS_ENSURE_SUCCESS(rv, false); + for (size_t i = 0; i < targets.Length(); i++) { + if (targets[i]->HasNonPassiveNonSystemGroupListenersForUntrustedKeyEvents()) { return true; } } @@ -116,16 +136,25 @@ FocusTarget::FocusTarget(nsIPresShell* aRootPresShell, // listeners or whether key events will be targeted at a different process // through a remote browser. nsCOMPtr<nsIContent> focusedContent = presShell->GetFocusedContentInOurWindow(); + nsCOMPtr<nsIContent> keyEventTarget = focusedContent; + + // If there is no focused element then event dispatch goes to the body of + // the page if it exists or the root element. + if (!keyEventTarget) { + keyEventTarget = document->GetUnfocusedKeyEventTarget(); + } // Check if there are key event listeners that could prevent default or change // the focus or selection of the page. - mFocusHasKeyEventListeners = - HasListenersForKeyEvents(focusedContent ? focusedContent.get() - : document->GetUnfocusedKeyEventTarget()); + if (gfxPrefs::APZKeyboardPassiveListeners()) { + mFocusHasKeyEventListeners = HasListenersForNonPassiveKeyEvents(keyEventTarget.get()); + } else { + mFocusHasKeyEventListeners = HasListenersForKeyEvents(keyEventTarget.get()); + } - // Check if the focused element is content editable or if the document + // Check if the key event target is content editable or if the document // is in design mode. - if (IsEditableNode(focusedContent) || + if (IsEditableNode(keyEventTarget) || IsEditableNode(document)) { FT_LOG("Creating nil target with seq=%" PRIu64 ", kl=%d (disabling for editable node)\n", aFocusSequenceNumber, @@ -135,8 +164,8 @@ FocusTarget::FocusTarget(nsIPresShell* aRootPresShell, return; } - // Check if the focused element is a remote browser - if (TabParent* browserParent = TabParent::GetFrom(focusedContent)) { + // Check if the key event target is a remote browser + if (TabParent* browserParent = TabParent::GetFrom(keyEventTarget)) { RenderFrameParent* rfp = browserParent->GetRenderFrame(); // The globally focused element for scrolling is in a remote layer tree diff --git a/gfx/layers/apz/src/InputQueue.cpp b/gfx/layers/apz/src/InputQueue.cpp index ce035f33b0146b0479f3ee84a376c27afd6026b7..a2516a0a4f7fc50e31dd4aabfec4c3b22a9fce0c 100644 --- a/gfx/layers/apz/src/InputQueue.cpp +++ b/gfx/layers/apz/src/InputQueue.cpp @@ -306,7 +306,10 @@ InputQueue::ReceiveKeyboardInput(const RefPtr<AsyncPanZoomController>& aTarget, ProcessQueue(); - return nsEventStatus_eConsumeNoDefault; + // If APZ is allowing passive listeners then we must dispatch the event to + // content, otherwise we can consume the event. + return gfxPrefs::APZKeyboardPassiveListeners() ? nsEventStatus_eConsumeDoDefault + : nsEventStatus_eConsumeNoDefault; } static bool diff --git a/gfx/layers/client/ClientPaintedLayer.cpp b/gfx/layers/client/ClientPaintedLayer.cpp index 8bb136e8704addc7d0a8c1050f16cc5691863a92..a1a4846122a983a2469eeb720ec95decfcd36975 100644 --- a/gfx/layers/client/ClientPaintedLayer.cpp +++ b/gfx/layers/client/ClientPaintedLayer.cpp @@ -262,13 +262,13 @@ ClientPaintedLayer::PaintOffMainThread() DrawTarget* targetOnWhite = nullptr; RefPtr<CapturedPaintState> capturedState = MakeAndAddRef<CapturedPaintState>(state.mRegionToDraw, + captureDT, target, targetOnWhite, capturedTransform, state.mMode, state.mContentType); - PaintThread::Get()->PaintContents(captureDT, - capturedState, + PaintThread::Get()->PaintContents(capturedState, RotatedContentBuffer::PrepareDrawTargetForPainting); mContentClient->ReturnDrawTargetToBuffer(target); diff --git a/gfx/thebes/gfxPrefs.h b/gfx/thebes/gfxPrefs.h index 4396f727cb01da40260a6bbb16b4a6a2f2fd9472..9a9add323deecbde0c4bb4cf101bc14c6dd86d5c 100644 --- a/gfx/thebes/gfxPrefs.h +++ b/gfx/thebes/gfxPrefs.h @@ -314,6 +314,7 @@ private: DECL_GFX_PREF(Live, "apz.fling_stopped_threshold", APZFlingStoppedThreshold, float, 0.01f); DECL_GFX_PREF(Live, "apz.frame_delay.enabled", APZFrameDelayEnabled, bool, false); DECL_GFX_PREF(Once, "apz.keyboard.enabled", APZKeyboardEnabled, bool, false); + DECL_GFX_PREF(Live, "apz.keyboard.passive-listeners", APZKeyboardPassiveListeners, bool, false); DECL_GFX_PREF(Live, "apz.max_velocity_inches_per_ms", APZMaxVelocity, float, -1.0f); DECL_GFX_PREF(Once, "apz.max_velocity_queue_size", APZMaxVelocityQueueSize, uint32_t, 5); DECL_GFX_PREF(Live, "apz.min_skate_speed", APZMinSkateSpeed, float, 1.0f); diff --git a/ipc/chromium/src/base/pickle.cc b/ipc/chromium/src/base/pickle.cc index d7fdad4fb0505803aacf11a0d967fd54ee9f0218..ef072f0b85d8ef8be87e64669e9e7892cceaa283 100644 --- a/ipc/chromium/src/base/pickle.cc +++ b/ipc/chromium/src/base/pickle.cc @@ -524,6 +524,109 @@ void Pickle::EndWrite(uint32_t length) { } } +bool Pickle::WriteBool(bool value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzBool(&value); +#endif + return WriteInt(value ? 1 : 0); +} + +bool Pickle::WriteInt16(int16_t value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzInt16(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteUInt16(uint16_t value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzUInt16(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteInt(int value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzInt(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteLong(long value) { + // Always written as a 64-bit value since the size for this type can + // differ between architectures. +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzLong(&value); +#endif + return WriteInt64(int64_t(value)); +} + +bool Pickle::WriteULong(unsigned long value) { + // Always written as a 64-bit value since the size for this type can + // differ between architectures. +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzULong(&value); +#endif + return WriteUInt64(uint64_t(value)); +} + +bool Pickle::WriteSize(size_t value) { + // Always written as a 64-bit value since the size for this type can + // differ between architectures. +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzSize(&value); +#endif + return WriteUInt64(uint64_t(value)); +} + +bool Pickle::WriteInt32(int32_t value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzInt(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteUInt32(uint32_t value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzUInt32(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteInt64(int64_t value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzInt64(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteUInt64(uint64_t value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzUInt64(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteDouble(double value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzDouble(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + +bool Pickle::WriteIntPtr(intptr_t value) { + // Always written as a 64-bit value since the size for this type can + // differ between architectures. + return WriteInt64(int64_t(value)); +} + +bool Pickle::WriteUnsignedChar(unsigned char value) { +#ifdef FUZZING + Singleton<mozilla::ipc::Faulty>::get()->FuzzUChar(&value); +#endif + return WriteBytes(&value, sizeof(value)); +} + bool Pickle::WriteBytes(const void* data, uint32_t data_len, uint32_t alignment) { DCHECK(alignment == 4 || alignment == 8); DCHECK(intptr_t(header_) % alignment == 0); diff --git a/ipc/chromium/src/base/pickle.h b/ipc/chromium/src/base/pickle.h index 5224657ba19ca8dd608d4318f94389204f3bbe1d..f9784e10e1b2e5e28676b3750ff4cf1f35802f4c 100644 --- a/ipc/chromium/src/base/pickle.h +++ b/ipc/chromium/src/base/pickle.h @@ -143,95 +143,20 @@ class Pickle { // appended to the end of the Pickle's payload. When reading values from a // Pickle, it is important to read them in the order in which they were added // to the Pickle. - bool WriteBool(bool value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzBool(&value); -#endif - return WriteInt(value ? 1 : 0); - } - bool WriteInt16(int16_t value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzInt16(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteUInt16(uint16_t value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzUInt16(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteInt(int value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzInt(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteLong(long value) { - // Always written as a 64-bit value since the size for this type can - // differ between architectures. -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzLong(&value); -#endif - return WriteInt64(int64_t(value)); - } - bool WriteULong(unsigned long value) { - // Always written as a 64-bit value since the size for this type can - // differ between architectures. -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzULong(&value); -#endif - return WriteUInt64(uint64_t(value)); - } - bool WriteSize(size_t value) { - // Always written as a 64-bit value since the size for this type can - // differ between architectures. -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzSize(&value); -#endif - return WriteUInt64(uint64_t(value)); - } - bool WriteInt32(int32_t value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzInt(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteUInt32(uint32_t value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzUInt32(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteInt64(int64_t value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzInt64(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteUInt64(uint64_t value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzUInt64(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteDouble(double value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzDouble(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } - bool WriteIntPtr(intptr_t value) { - // Always written as a 64-bit value since the size for this type can - // differ between architectures. - return WriteInt64(int64_t(value)); - } - bool WriteUnsignedChar(unsigned char value) { -#ifdef FUZZING - Singleton<mozilla::ipc::Faulty>::get()->FuzzUChar(&value); -#endif - return WriteBytes(&value, sizeof(value)); - } + bool WriteBool(bool value); + bool WriteInt16(int16_t value); + bool WriteUInt16(uint16_t value); + bool WriteInt(int value); + bool WriteLong(long value); + bool WriteULong(unsigned long value); + bool WriteSize(size_t value); + bool WriteInt32(int32_t value); + bool WriteUInt32(uint32_t value); + bool WriteInt64(int64_t value); + bool WriteUInt64(uint64_t value); + bool WriteDouble(double value); + bool WriteIntPtr(intptr_t value); + bool WriteUnsignedChar(unsigned char value); bool WriteString(const std::string& value); bool WriteWString(const std::wstring& value); bool WriteData(const char* data, uint32_t length); diff --git a/js/src/builtin/Promise.cpp b/js/src/builtin/Promise.cpp index 8197645e75876d6e22f42809e59778d9aa13751c..117485cdee90c58c3892c248f9cb0945ef1c2036 100644 --- a/js/src/builtin/Promise.cpp +++ b/js/src/builtin/Promise.cpp @@ -36,18 +36,30 @@ MillisecondsSinceStartup() enum PromiseHandler { PromiseHandlerIdentity = 0, PromiseHandlerThrower, - PromiseHandlerAsyncFunctionAwaitFulfilled, - PromiseHandlerAsyncFunctionAwaitRejected, - PromiseHandlerAsyncGeneratorAwaitFulfilled, - PromiseHandlerAsyncGeneratorAwaitRejected, - - // Async Iteration proposal 6.1.1.2.1. - // Async iterator handlers take the resolved value and create new iterator - // objects. To do so it needs to forward whether the iterator is done. In - // spec, this is achieved via the [[Done]] internal slot. We enumerate both - // true and false cases here. - PromiseHandlerAsyncIteratorValueUnwrapDone, - PromiseHandlerAsyncIteratorValueUnwrapNotDone, + + // ES 2018 draft 25.5.5.4-5. + PromiseHandlerAsyncFunctionAwaitedFulfilled, + PromiseHandlerAsyncFunctionAwaitedRejected, + + // Async Iteration proposal 4.1. + PromiseHandlerAsyncGeneratorAwaitedFulfilled, + PromiseHandlerAsyncGeneratorAwaitedRejected, + + // Async Iteration proposal 11.4.3.5.1-2. + PromiseHandlerAsyncGeneratorResumeNextReturnFulfilled, + PromiseHandlerAsyncGeneratorResumeNextReturnRejected, + + // Async Iteration proposal 11.4.3.7 steps 8.c-e. + PromiseHandlerAsyncGeneratorYieldReturnAwaitedFulfilled, + PromiseHandlerAsyncGeneratorYieldReturnAwaitedRejected, + + // Async Iteration proposal 11.1.3.2.5. + // Async-from-Sync iterator handlers take the resolved value and create new + // iterator objects. To do so it needs to forward whether the iterator is + // done. In spec, this is achieved via the [[Done]] internal slot. We + // enumerate both true and false cases here. + PromiseHandlerAsyncFromSyncIteratorValueUnwrapDone, + PromiseHandlerAsyncFromSyncIteratorValueUnwrapNotDone, }; enum ResolutionMode { @@ -197,8 +209,8 @@ enum ReactionRecordSlots { #define REACTION_FLAG_RESOLVED 0x1 #define REACTION_FLAG_FULFILLED 0x2 #define REACTION_FLAG_IGNORE_DEFAULT_RESOLUTION 0x4 -#define REACTION_FLAG_ASYNC_FUNCTION_AWAIT 0x8 -#define REACTION_FLAG_ASYNC_GENERATOR_AWAIT 0x10 +#define REACTION_FLAG_ASYNC_FUNCTION 0x8 +#define REACTION_FLAG_ASYNC_GENERATOR 0x10 // ES2016, 25.4.1.2. class PromiseReactionRecord : public NativeObject @@ -225,28 +237,28 @@ class PromiseReactionRecord : public NativeObject flags |= REACTION_FLAG_FULFILLED; setFixedSlot(ReactionRecordSlot_Flags, Int32Value(flags)); } - void setIsAsyncFunctionAwait() { + void setIsAsyncFunction() { int32_t flags = this->flags(); - flags |= REACTION_FLAG_ASYNC_FUNCTION_AWAIT; + flags |= REACTION_FLAG_ASYNC_FUNCTION; setFixedSlot(ReactionRecordSlot_Flags, Int32Value(flags)); } - bool isAsyncFunctionAwait() { + bool isAsyncFunction() { int32_t flags = this->flags(); - return flags & REACTION_FLAG_ASYNC_FUNCTION_AWAIT; + return flags & REACTION_FLAG_ASYNC_FUNCTION; } - void setIsAsyncGeneratorAwait(Handle<AsyncGeneratorObject*> asyncGenObj) { + void setIsAsyncGenerator(Handle<AsyncGeneratorObject*> asyncGenObj) { int32_t flags = this->flags(); - flags |= REACTION_FLAG_ASYNC_GENERATOR_AWAIT; + flags |= REACTION_FLAG_ASYNC_GENERATOR; setFixedSlot(ReactionRecordSlot_Flags, Int32Value(flags)); setFixedSlot(ReactionRecordSlot_Generator, ObjectValue(*asyncGenObj)); } - bool isAsyncGeneratorAwait() { + bool isAsyncGenerator() { int32_t flags = this->flags(); - return flags & REACTION_FLAG_ASYNC_GENERATOR_AWAIT; + return flags & REACTION_FLAG_ASYNC_GENERATOR; } AsyncGeneratorObject* asyncGenerator() { - MOZ_ASSERT(isAsyncGeneratorAwait()); + MOZ_ASSERT(isAsyncGenerator()); return &getFixedSlot(ReactionRecordSlot_Generator).toObject() .as<AsyncGeneratorObject>(); } @@ -859,10 +871,10 @@ TriggerPromiseReactions(JSContext* cx, HandleValue reactionsVal, JS::PromiseStat } static MOZ_MUST_USE bool -AsyncFunctionAwaitPromiseReactionJob(JSContext* cx, Handle<PromiseReactionRecord*> reaction, - MutableHandleValue rval) +AsyncFunctionPromiseReactionJob(JSContext* cx, Handle<PromiseReactionRecord*> reaction, + MutableHandleValue rval) { - MOZ_ASSERT(reaction->isAsyncFunctionAwait()); + MOZ_ASSERT(reaction->isAsyncFunction()); RootedValue handlerVal(cx, reaction->handler()); RootedValue argument(cx, reaction->handlerArg()); @@ -870,15 +882,14 @@ AsyncFunctionAwaitPromiseReactionJob(JSContext* cx, Handle<PromiseReactionRecord RootedValue generatorVal(cx, resultPromise->getFixedSlot(PromiseSlot_AwaitGenerator)); int32_t handlerNum = int32_t(handlerVal.toNumber()); - MOZ_ASSERT(handlerNum == PromiseHandlerAsyncFunctionAwaitFulfilled || - handlerNum == PromiseHandlerAsyncFunctionAwaitRejected); // Await's handlers don't return a value, nor throw exception. // They fail only on OOM. - if (handlerNum == PromiseHandlerAsyncFunctionAwaitFulfilled) { + if (handlerNum == PromiseHandlerAsyncFunctionAwaitedFulfilled) { if (!AsyncFunctionAwaitedFulfilled(cx, resultPromise, generatorVal, argument)) return false; } else { + MOZ_ASSERT(handlerNum == PromiseHandlerAsyncFunctionAwaitedRejected); if (!AsyncFunctionAwaitedRejected(cx, resultPromise, generatorVal, argument)) return false; } @@ -888,27 +899,48 @@ AsyncFunctionAwaitPromiseReactionJob(JSContext* cx, Handle<PromiseReactionRecord } static MOZ_MUST_USE bool -AsyncGeneratorAwaitPromiseReactionJob(JSContext* cx, Handle<PromiseReactionRecord*> reaction, - MutableHandleValue rval) +AsyncGeneratorPromiseReactionJob(JSContext* cx, Handle<PromiseReactionRecord*> reaction, + MutableHandleValue rval) { - MOZ_ASSERT(reaction->isAsyncGeneratorAwait()); + MOZ_ASSERT(reaction->isAsyncGenerator()); RootedValue handlerVal(cx, reaction->handler()); RootedValue argument(cx, reaction->handlerArg()); Rooted<AsyncGeneratorObject*> asyncGenObj(cx, reaction->asyncGenerator()); int32_t handlerNum = int32_t(handlerVal.toNumber()); - MOZ_ASSERT(handlerNum == PromiseHandlerAsyncGeneratorAwaitFulfilled || - handlerNum == PromiseHandlerAsyncGeneratorAwaitRejected); // Await's handlers don't return a value, nor throw exception. // They fail only on OOM. - if (handlerNum == PromiseHandlerAsyncGeneratorAwaitFulfilled) { + if (handlerNum == PromiseHandlerAsyncGeneratorAwaitedFulfilled) { + // 4.1.1. if (!AsyncGeneratorAwaitedFulfilled(cx, asyncGenObj, argument)) return false; - } else { + } else if (handlerNum == PromiseHandlerAsyncGeneratorAwaitedRejected) { + // 4.1.2. if (!AsyncGeneratorAwaitedRejected(cx, asyncGenObj, argument)) return false; + } else if (handlerNum == PromiseHandlerAsyncGeneratorResumeNextReturnFulfilled) { + asyncGenObj->setCompleted(); + // 11.4.3.5.1 step 1. + if (!AsyncGeneratorResolve(cx, asyncGenObj, argument, true)) + return false; + } else if (handlerNum == PromiseHandlerAsyncGeneratorResumeNextReturnRejected) { + asyncGenObj->setCompleted(); + // 11.4.3.5.2 step 1. + if (!AsyncGeneratorReject(cx, asyncGenObj, argument)) + return false; + } else if (handlerNum == PromiseHandlerAsyncGeneratorYieldReturnAwaitedFulfilled) { + asyncGenObj->setExecuting(); + // 11.4.3.7 steps 8.d-e. + if (!AsyncGeneratorYieldReturnAwaitedFulfilled(cx, asyncGenObj, argument)) + return false; + } else { + MOZ_ASSERT(handlerNum == PromiseHandlerAsyncGeneratorYieldReturnAwaitedRejected); + asyncGenObj->setExecuting(); + // 11.4.3.7 step 8.c. + if (!AsyncGeneratorYieldReturnAwaitedRejected(cx, asyncGenObj, argument)) + return false; } rval.setUndefined(); @@ -958,10 +990,10 @@ PromiseReactionJob(JSContext* cx, unsigned argc, Value* vp) // Steps 1-2. Rooted<PromiseReactionRecord*> reaction(cx, &reactionObj->as<PromiseReactionRecord>()); - if (reaction->isAsyncFunctionAwait()) - return AsyncFunctionAwaitPromiseReactionJob(cx, reaction, args.rval()); - if (reaction->isAsyncGeneratorAwait()) - return AsyncGeneratorAwaitPromiseReactionJob(cx, reaction, args.rval()); + if (reaction->isAsyncFunction()) + return AsyncFunctionPromiseReactionJob(cx, reaction, args.rval()); + if (reaction->isAsyncGenerator()) + return AsyncGeneratorPromiseReactionJob(cx, reaction, args.rval()); // Step 3. RootedValue handlerVal(cx, reaction->handler()); @@ -983,11 +1015,11 @@ PromiseReactionJob(JSContext* cx, unsigned argc, Value* vp) resolutionMode = RejectMode; handlerResult = argument; } else { - MOZ_ASSERT(handlerNum == PromiseHandlerAsyncIteratorValueUnwrapDone || - handlerNum == PromiseHandlerAsyncIteratorValueUnwrapNotDone); + MOZ_ASSERT(handlerNum == PromiseHandlerAsyncFromSyncIteratorValueUnwrapDone || + handlerNum == PromiseHandlerAsyncFromSyncIteratorValueUnwrapNotDone); - bool done = handlerNum == PromiseHandlerAsyncIteratorValueUnwrapDone; - // Async Iteration proposal 6.1.1.2.1 step 1. + bool done = handlerNum == PromiseHandlerAsyncFromSyncIteratorValueUnwrapDone; + // Async Iteration proposal 11.1.3.2.5 step 1. RootedObject resultObj(cx, CreateIterResultObject(cx, argument, done)); if (!resultObj) return false; @@ -2218,7 +2250,7 @@ static MOZ_MUST_USE bool PerformPromiseThenWithReaction(JSContext* cx, // Some async/await functions are implemented here instead of // js/src/builtin/AsyncFunction.cpp, to call Promise internal functions. -// Async Functions proposal 1.1.8 and 1.2.14 step 1. +// ES 2018 draft 14.6.11 and 14.7.14 step 1. MOZ_MUST_USE PromiseObject* js::CreatePromiseObjectForAsync(JSContext* cx, HandleValue generatorVal) { @@ -2232,7 +2264,7 @@ js::CreatePromiseObjectForAsync(JSContext* cx, HandleValue generatorVal) return promise; } -// Async Functions proposal 2.2 steps 3.f, 3.g. +// ES 2018 draft 25.5.5.2 steps 3.f, 3.g. MOZ_MUST_USE bool js::AsyncFunctionThrown(JSContext* cx, Handle<PromiseObject*> resultPromise) { @@ -2248,7 +2280,7 @@ js::AsyncFunctionThrown(JSContext* cx, Handle<PromiseObject*> resultPromise) return true; } -// Async Functions proposal 2.2 steps 3.d-e, 3.g. +// ES 2018 draft 25.5.5.2 steps 3.d-e, 3.g. MOZ_MUST_USE bool js::AsyncFunctionReturned(JSContext* cx, Handle<PromiseObject*> resultPromise, HandleValue value) { @@ -2260,28 +2292,30 @@ js::AsyncFunctionReturned(JSContext* cx, Handle<PromiseObject*> resultPromise, H return true; } -// Async Functions proposal 2.3 steps 2-8. -MOZ_MUST_USE bool -js::AsyncFunctionAwait(JSContext* cx, Handle<PromiseObject*> resultPromise, HandleValue value) +// Helper function that performs the equivalent steps as +// Async Iteration proposal 4.1 Await steps 2-3, 6-9 or similar. +template <typename T> +static MOZ_MUST_USE bool +InternalAwait(JSContext* cx, HandleValue value, HandleObject resultPromise, + HandleValue onFulfilled, HandleValue onRejected, T extraStep) { + MOZ_ASSERT(onFulfilled.isNumber() || onFulfilled.isObject()); + MOZ_ASSERT(onRejected.isNumber() || onRejected.isObject()); + // Step 2. Rooted<PromiseObject*> promise(cx, CreatePromiseObjectWithoutResolutionFunctions(cx)); if (!promise) return false; - // Steps 3. + // Step 3. if (!ResolvePromiseInternal(cx, promise, value)) return false; - // Steps 4-5. - RootedValue onFulfilled(cx, Int32Value(PromiseHandlerAsyncFunctionAwaitFulfilled)); - RootedValue onRejected(cx, Int32Value(PromiseHandlerAsyncFunctionAwaitRejected)); - RootedObject incumbentGlobal(cx); if (!GetObjectFromIncumbentGlobal(cx, &incumbentGlobal)) return false; - // Steps 6-7. + // Step 7-8. Rooted<PromiseReactionRecord*> reaction(cx, NewReactionRecord(cx, resultPromise, onFulfilled, onRejected, nullptr, nullptr, @@ -2289,53 +2323,47 @@ js::AsyncFunctionAwait(JSContext* cx, Handle<PromiseObject*> resultPromise, Hand if (!reaction) return false; - reaction->setIsAsyncFunctionAwait(); + // Step 6. + extraStep(reaction); - // Step 8. + // Step 9. return PerformPromiseThenWithReaction(cx, promise, reaction); } -// Async Iteration proposal 5.1 steps 2-9. +// ES 2018 draft 25.5.5.3 steps 2-10. MOZ_MUST_USE bool -js::AsyncGeneratorAwait(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, - HandleValue value) +js::AsyncFunctionAwait(JSContext* cx, Handle<PromiseObject*> resultPromise, HandleValue value) { - // Step 2. - Rooted<PromiseObject*> promise(cx, CreatePromiseObjectWithoutResolutionFunctions(cx)); - if (!promise) - return false; - - // Steps 3. - if (!ResolvePromiseInternal(cx, promise, value)) - return false; - // Steps 4-5. - RootedValue onFulfilled(cx, Int32Value(PromiseHandlerAsyncGeneratorAwaitFulfilled)); - RootedValue onRejected(cx, Int32Value(PromiseHandlerAsyncGeneratorAwaitRejected)); - - RootedObject incumbentGlobal(cx); - if (!GetObjectFromIncumbentGlobal(cx, &incumbentGlobal)) - return false; + RootedValue onFulfilled(cx, Int32Value(PromiseHandlerAsyncFunctionAwaitedFulfilled)); + RootedValue onRejected(cx, Int32Value(PromiseHandlerAsyncFunctionAwaitedRejected)); - // Step 6 (skipped). - - // Steps 7-8. - Rooted<PromiseReactionRecord*> reaction(cx, NewReactionRecord(cx, nullptr, - onFulfilled, onRejected, - nullptr, nullptr, - incumbentGlobal)); - if (!reaction) - return false; + // Steps 2-3, 6-10. + auto extra = [](Handle<PromiseReactionRecord*> reaction) { + reaction->setIsAsyncFunction(); + }; + return InternalAwait(cx, value, resultPromise, onFulfilled, onRejected, extra); +} - reaction->setIsAsyncGeneratorAwait(asyncGenObj); +// Async Iteration proposal 4.1 Await steps 2-9. +MOZ_MUST_USE bool +js::AsyncGeneratorAwait(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, + HandleValue value) +{ + // Steps 4-5. + RootedValue onFulfilled(cx, Int32Value(PromiseHandlerAsyncGeneratorAwaitedFulfilled)); + RootedValue onRejected(cx, Int32Value(PromiseHandlerAsyncGeneratorAwaitedRejected)); - // Step 9. - return PerformPromiseThenWithReaction(cx, promise, reaction); + // Steps 2-3, 6-9. + auto extra = [&](Handle<PromiseReactionRecord*> reaction) { + reaction->setIsAsyncGenerator(asyncGenObj); + }; + return InternalAwait(cx, value, nullptr, onFulfilled, onRejected, extra); } -// Async Iteration proposal 6.1.3.2.1 %AsyncFromSyncIteratorPrototype%.next -// Async Iteration proposal 6.1.3.2.2 %AsyncFromSyncIteratorPrototype%.return -// Async Iteration proposal 6.1.3.2.3 %AsyncFromSyncIteratorPrototype%.throw +// Async Iteration proposal 11.1.3.2.1 %AsyncFromSyncIteratorPrototype%.next +// Async Iteration proposal 11.1.3.2.2 %AsyncFromSyncIteratorPrototype%.return +// Async Iteration proposal 11.1.3.2.3 %AsyncFromSyncIteratorPrototype%.throw bool js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind completionKind) { @@ -2372,11 +2400,11 @@ js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind co RootedValue resultVal(cx); RootedValue func(cx); if (completionKind == CompletionKind::Normal) { - // 6.1.3.2.1 steps 5-6 (partially). + // 11.1.3.2.1 steps 5-6 (partially). if (!GetProperty(cx, iter, iter, cx->names().next, &func)) return AbruptRejectPromise(cx, args, resultPromise, nullptr); } else if (completionKind == CompletionKind::Return) { - // 6.1.3.2.2 steps 5-6. + // 11.1.3.2.2 steps 5-6. if (!GetProperty(cx, iter, iter, cx->names().return_, &func)) return AbruptRejectPromise(cx, args, resultPromise, nullptr); @@ -2398,7 +2426,7 @@ js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind co return true; } } else { - // 6.1.3.2.3 steps 5-6. + // 11.1.3.2.3 steps 5-6. MOZ_ASSERT(completionKind == CompletionKind::Throw); if (!GetProperty(cx, iter, iter, cx->names().throw_, &func)) return AbruptRejectPromise(cx, args, resultPromise, nullptr); @@ -2415,16 +2443,16 @@ js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind co } } - // 6.1.3.2.1 steps 5-6 (partially). - // 6.1.3.2.2, 6.1.3.2.3 steps 8-9. + // 11.1.3.2.1 steps 5-6 (partially). + // 11.1.3.2.2, 11.1.3.2.3 steps 8-9. RootedValue iterVal(cx, ObjectValue(*iter)); FixedInvokeArgs<1> args2(cx); args2[0].set(args.get(0)); if (!js::Call(cx, func, iterVal, args2, &resultVal)) return AbruptRejectPromise(cx, args, resultPromise, nullptr); - // 6.1.3.2.1 steps 5-6 (partially). - // 6.1.3.2.2, 6.1.3.2.3 steps 10. + // 11.1.3.2.1 steps 5-6 (partially). + // 11.1.3.2.2, 11.1.3.2.3 steps 10. if (!resultVal.isObject()) { CheckIsObjectKind kind; switch (completionKind) { @@ -2444,8 +2472,8 @@ js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind co RootedObject resultObj(cx, &resultVal.toObject()); - // Following step numbers are for 6.1.3.2.1. - // For 6.1.3.2.2 and 6.1.3.2.3, steps 7-16 corresponds to steps 11-20. + // Following step numbers are for 11.1.3.2.1. + // For 11.1.3.2.2 and 11.1.3.2.3, steps 7-16 corresponds to steps 11-20. // Steps 7-8. RootedValue doneVal(cx); @@ -2458,33 +2486,16 @@ js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind co if (!GetProperty(cx, resultObj, resultObj, cx->names().value, &value)) return AbruptRejectPromise(cx, args, resultPromise, nullptr); - // Step 11. - Rooted<PromiseObject*> promise(cx, CreatePromiseObjectWithoutResolutionFunctions(cx)); - if (!promise) - return false; - - // Step 12. - if (!ResolvePromiseInternal(cx, promise, value)) - return false; - // Steps 13-14. RootedValue onFulfilled(cx, Int32Value(done - ? PromiseHandlerAsyncIteratorValueUnwrapDone - : PromiseHandlerAsyncIteratorValueUnwrapNotDone)); + ? PromiseHandlerAsyncFromSyncIteratorValueUnwrapDone + : PromiseHandlerAsyncFromSyncIteratorValueUnwrapNotDone)); + RootedValue onRejected(cx, Int32Value(PromiseHandlerThrower)); - RootedObject incumbentGlobal(cx); - if (!GetObjectFromIncumbentGlobal(cx, &incumbentGlobal)) - return false; - - // Step 15. - Rooted<PromiseReactionRecord*> reaction(cx, NewReactionRecord(cx, resultPromise, onFulfilled, - UndefinedHandleValue, - nullptr, nullptr, - incumbentGlobal)); - if (!reaction) - return false; - - if (!PerformPromiseThenWithReaction(cx, promise, reaction)) + // Steps 11-12, 15. + auto extra = [](Handle<PromiseReactionRecord*> reaction) { + }; + if (!InternalAwait(cx, value, resultPromise, onFulfilled, onRejected, extra)) return false; // Step 16. @@ -2492,7 +2503,10 @@ js::AsyncFromSyncIteratorMethod(JSContext* cx, CallArgs& args, CompletionKind co return true; } -// Async Iteration proposal 6.4.3.3. +static MOZ_MUST_USE bool +AsyncGeneratorResumeNext(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj); + +// Async Iteration proposal 11.4.3.3. MOZ_MUST_USE bool js::AsyncGeneratorResolve(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, HandleValue value, bool done) @@ -2512,43 +2526,25 @@ js::AsyncGeneratorResolve(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenO RootedObject resultPromise(cx, request->promise()); // Step 6. - Rooted<PromiseObject*> promise(cx, CreatePromiseObjectWithoutResolutionFunctions(cx)); - if (!promise) - return false; - - // Step 7. - if (!ResolvePromiseInternal(cx, promise, value)) - return false; - - // Steps 8-9. - RootedValue onFulfilled(cx, Int32Value(done - ? PromiseHandlerAsyncIteratorValueUnwrapDone - : PromiseHandlerAsyncIteratorValueUnwrapNotDone)); - - RootedObject incumbentGlobal(cx); - if (!GetObjectFromIncumbentGlobal(cx, &incumbentGlobal)) + RootedObject resultObj(cx, CreateIterResultObject(cx, value, done)); + if (!resultObj) return false; - // Step 10. - Rooted<PromiseReactionRecord*> reaction(cx, NewReactionRecord(cx, resultPromise, onFulfilled, - UndefinedHandleValue, - nullptr, nullptr, - incumbentGlobal)); - if (!reaction) - return false; + RootedValue resultValue(cx, ObjectValue(*resultObj)); - if (!PerformPromiseThenWithReaction(cx, promise, reaction)) + // Step 7. + if (!ResolvePromiseInternal(cx, resultPromise, resultValue)) return false; - // Step 11. + // Step 8. if (!AsyncGeneratorResumeNext(cx, asyncGenObj)) return false; - // Step 12. + // Step 9. return true; } -// Async Iteration proposal 6.4.3.4. +// Async Iteration proposal 11.4.3.4. MOZ_MUST_USE bool js::AsyncGeneratorReject(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, HandleValue exception) @@ -2579,7 +2575,98 @@ js::AsyncGeneratorReject(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenOb return true; } -// Async Iteration proposal 6.4.3.6. +// Async Iteration proposal 11.4.3.5. +static MOZ_MUST_USE bool +AsyncGeneratorResumeNext(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj) +{ + // Step 1 (implicit). + + // Steps 2-3. + MOZ_ASSERT(!asyncGenObj->isExecuting()); + + // Step 4. + if (asyncGenObj->isAwaitingYieldReturn() || asyncGenObj->isAwaitingReturn()) + return true; + + // Steps 5-6. + if (asyncGenObj->isQueueEmpty()) + return true; + + // Steps 7-8. + Rooted<AsyncGeneratorRequest*> request( + cx, AsyncGeneratorObject::peekRequest(cx, asyncGenObj)); + if (!request) + return false; + + // Step 9. + CompletionKind completionKind = request->completionKind(); + + // Step 10. + if (completionKind != CompletionKind::Normal) { + // Step 10.a. + if (asyncGenObj->isSuspendedStart()) + asyncGenObj->setCompleted(); + + // Step 10.b. + if (asyncGenObj->isCompleted()) { + RootedValue value(cx, request->completionValue()); + + // Step 10.b.i. + if (completionKind == CompletionKind::Return) { + // Steps 10.b.i.1. + asyncGenObj->setAwaitingReturn(); + + // Steps 10.b.i.4-6 (reordered). + RootedValue onFulfilled(cx, Int32Value(PromiseHandlerAsyncGeneratorResumeNextReturnFulfilled)); + RootedValue onRejected(cx, Int32Value(PromiseHandlerAsyncGeneratorResumeNextReturnRejected)); + + // Steps 10.b.i.2-3, 7-10. + auto extra = [&](Handle<PromiseReactionRecord*> reaction) { + reaction->setIsAsyncGenerator(asyncGenObj); + }; + return InternalAwait(cx, value, nullptr, onFulfilled, onRejected, extra); + } + + // Step 10.b.ii.1. + MOZ_ASSERT(completionKind == CompletionKind::Throw); + + // Steps 10.b.ii.2-3. + return AsyncGeneratorReject(cx, asyncGenObj, value); + } + } else if (asyncGenObj->isCompleted()) { + // Step 11. + return AsyncGeneratorResolve(cx, asyncGenObj, UndefinedHandleValue, true); + } + + // Step 12. + MOZ_ASSERT(asyncGenObj->isSuspendedStart() || asyncGenObj->isSuspendedYield()); + + // Step 16 (reordered). + asyncGenObj->setExecuting(); + + RootedValue argument(cx, request->completionValue()); + + if (completionKind == CompletionKind::Return) { + // 11.4.3.7 AsyncGeneratorYield step 8.b-e. + // Since we don't have the place that handles return from yield + // inside the generator, handle the case here, with extra state + // State_AwaitingYieldReturn. + asyncGenObj->setAwaitingYieldReturn(); + + RootedValue onFulfilled(cx, Int32Value(PromiseHandlerAsyncGeneratorYieldReturnAwaitedFulfilled)); + RootedValue onRejected(cx, Int32Value(PromiseHandlerAsyncGeneratorYieldReturnAwaitedRejected)); + + auto extra = [&](Handle<PromiseReactionRecord*> reaction) { + reaction->setIsAsyncGenerator(asyncGenObj); + }; + return InternalAwait(cx, argument, nullptr, onFulfilled, onRejected, extra); + } + + // Steps 13-15, 17-21. + return AsyncGeneratorResume(cx, asyncGenObj, completionKind, argument); +} + +// Async Iteration proposal 11.4.3.6. MOZ_MUST_USE bool js::AsyncGeneratorEnqueue(JSContext* cx, HandleValue asyncGenVal, CompletionKind completionKind, HandleValue completionValue, diff --git a/js/src/frontend/BytecodeEmitter.cpp b/js/src/frontend/BytecodeEmitter.cpp index 11bec8e7198ed4401513dd19e346afcf60a62ce8..b318f5188408da6487a9bd83fad9b55e0f012492 100644 --- a/js/src/frontend/BytecodeEmitter.cpp +++ b/js/src/frontend/BytecodeEmitter.cpp @@ -8614,6 +8614,13 @@ BytecodeEmitter::emitReturn(ParseNode* pn) if (ParseNode* pn2 = pn->pn_kid) { if (!emitTree(pn2)) return false; + + bool isAsyncGenerator = sc->asFunctionBox()->isAsync() && + sc->asFunctionBox()->isStarGenerator(); + if (isAsyncGenerator) { + if (!emitAwait()) + return false; + } } else { /* No explicit return value provided */ if (!emit1(JSOP_UNDEFINED)) @@ -8726,6 +8733,14 @@ BytecodeEmitter::emitYield(ParseNode* pn) if (!emit1(JSOP_UNDEFINED)) return false; } + + // 11.4.3.7 AsyncGeneratorYield step 5. + bool isAsyncGenerator = sc->asFunctionBox()->isAsync(); + if (isAsyncGenerator) { + if (!emitAwait()) // RESULT + return false; + } + if (needsIteratorResult) { if (!emitFinishIteratorResult(false)) return false; @@ -8798,6 +8813,12 @@ BytecodeEmitter::emitYieldStar(ParseNode* iter) MOZ_ASSERT(this->stackDepth == startDepth); + // 11.4.3.7 AsyncGeneratorYield step 5. + if (isAsyncGenerator) { + if (!emitAwait()) // ITER RESULT + return false; + } + // Load the generator object. if (!emitGetDotGenerator()) // ITER RESULT GENOBJ return false; @@ -8947,11 +8968,6 @@ BytecodeEmitter::emitYieldStar(ParseNode* iter) if (!emitAtomOp(cx->names().value, JSOP_GETPROP)) // ITER OLDRESULT FTYPE FVALUE VALUE return false; - if (isAsyncGenerator) { - if (!emitAwait()) // ITER OLDRESULT FTYPE FVALUE VALUE - return false; - } - if (!emitPrepareIteratorResult()) // ITER OLDRESULT FTYPE FVALUE VALUE RESULT return false; if (!emit1(JSOP_SWAP)) // ITER OLDRESULT FTYPE FVALUE RESULT VALUE @@ -9042,11 +9058,6 @@ BytecodeEmitter::emitYieldStar(ParseNode* iter) if (!emitAtomOp(cx->names().value, JSOP_GETPROP)) // VALUE return false; - if (isAsyncGenerator) { - if (!emitAwait()) // VALUE - return false; - } - MOZ_ASSERT(this->stackDepth == startDepth - 1); return true; diff --git a/js/src/jit-test/tests/basic/testBug756919.js b/js/src/jit-test/tests/basic/testBug756919.js index 739f8e7d839e06b2bf3958225e6e24fb94b6ee50..e332ff2c315cf819d9bcf89c5a0d687f0db9bf14 100644 --- a/js/src/jit-test/tests/basic/testBug756919.js +++ b/js/src/jit-test/tests/basic/testBug756919.js @@ -1,10 +1,12 @@ -// |jit-test| allow-oom; allow-unhandlable-oom; allow-overrecursed +if (!('oomTest' in this)) + quit(); -gcparam("maxBytes", gcparam("gcBytes") + 1024); -test(); -function test() { - var upvar = ""; - function f() { upvar += ""; } - test(); - eval(''); +function test(x) { + var upvar = ""; + function f() { upvar += ""; } + if (x > 0) + test(x - 1); + eval(''); } + +oomTest(() => test(10)); diff --git a/js/src/old-configure.in b/js/src/old-configure.in index f8b6e2b8280ac02bc03032fb0fbba5ff875b66fc..2aef42ccfa27b29c71881cd8f934690ff7ad4e58 100644 --- a/js/src/old-configure.in +++ b/js/src/old-configure.in @@ -771,20 +771,6 @@ case "$target" in if test -z "$DEVELOPER_OPTIONS"; then LDFLAGS="$LDFLAGS -RELEASE" fi - dnl For profile-guided optimization - PROFILE_GEN_CFLAGS="-GL" - PROFILE_GEN_LDFLAGS="-LTCG:PGINSTRUMENT" - dnl XXX: PGO builds can fail with warnings treated as errors, - dnl specifically "no profile data available" appears to be - dnl treated as an error sometimes. This might be a consequence - dnl of using WARNINGS_AS_ERRORS in some modules, combined - dnl with the linker doing most of the work in the whole-program - dnl optimization/PGO case. I think it's probably a compiler bug, - dnl but we work around it here. - PROFILE_USE_CFLAGS="-GL -wd4624 -wd4952" - dnl XXX: should be -LTCG:PGOPTIMIZE, but that fails on libxul. - dnl Probably also a compiler bug, but what can you do? - PROFILE_USE_LDFLAGS="-LTCG:PGUPDATE" LDFLAGS="$LDFLAGS -DYNAMICBASE" RCFLAGS="-nologo" fi @@ -1817,34 +1803,6 @@ else AC_MSG_RESULT([no]) fi -dnl ======================================================== -dnl Profile guided optimization (gcc checks) -dnl ======================================================== -dnl Test for profiling options -dnl Under gcc 3.4+, use -fprofile-generate/-fprofile-use - -_SAVE_CFLAGS="$CFLAGS" -CFLAGS="$CFLAGS -fprofile-generate -fprofile-correction" - -AC_MSG_CHECKING([whether C compiler supports -fprofile-generate]) -AC_TRY_COMPILE([], [return 0;], - [ PROFILE_GEN_CFLAGS="-fprofile-generate" - result="yes" ], result="no") -AC_MSG_RESULT([$result]) - -if test $result = "yes"; then - PROFILE_GEN_LDFLAGS="-fprofile-generate" - PROFILE_USE_CFLAGS="-fprofile-use -fprofile-correction -Wcoverage-mismatch" - PROFILE_USE_LDFLAGS="-fprofile-use" -fi - -CFLAGS="$_SAVE_CFLAGS" - -AC_SUBST(PROFILE_GEN_CFLAGS) -AC_SUBST(PROFILE_GEN_LDFLAGS) -AC_SUBST(PROFILE_USE_CFLAGS) -AC_SUBST(PROFILE_USE_LDFLAGS) - AC_LANG_CPLUSPLUS dnl ======================================================== diff --git a/js/src/vm/AsyncIteration.cpp b/js/src/vm/AsyncIteration.cpp index 19d0617f33d70458ab8419f764cafdcd84758a03..0cdb7b97a75bdb801ac26c58e61008405e56c312 100644 --- a/js/src/vm/AsyncIteration.cpp +++ b/js/src/vm/AsyncIteration.cpp @@ -26,7 +26,7 @@ using namespace js::gc; #define UNWRAPPED_ASYNC_WRAPPED_SLOT 1 #define WRAPPED_ASYNC_UNWRAPPED_SLOT 0 -// Async Iteration proposal 2.3.10 Runtime Semantics: EvaluateBody. +// Async Iteration proposal 8.3.10 Runtime Semantics: EvaluateBody. static bool WrappedAsyncGenerator(JSContext* cx, unsigned argc, Value* vp) { @@ -128,11 +128,7 @@ js::GetUnwrappedAsyncGenerator(JSFunction* wrapped) return unwrapped; } -static MOZ_MUST_USE bool -AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, - CompletionKind completionKind, HandleValue argument); - -// Async Iteration proposal 5.1.1 Await Fulfilled Functions. +// Async Iteration proposal 4.1.1 Await Fulfilled Functions. MOZ_MUST_USE bool js::AsyncGeneratorAwaitedFulfilled(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, HandleValue value) @@ -140,7 +136,7 @@ js::AsyncGeneratorAwaitedFulfilled(JSContext* cx, Handle<AsyncGeneratorObject*> return AsyncGeneratorResume(cx, asyncGenObj, CompletionKind::Normal, value); } -// Async Iteration proposal 5.1.2 Await Rejected Functions. +// Async Iteration proposal 4.1.2 Await Rejected Functions. MOZ_MUST_USE bool js::AsyncGeneratorAwaitedRejected(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, HandleValue reason) @@ -148,12 +144,30 @@ js::AsyncGeneratorAwaitedRejected(JSContext* cx, Handle<AsyncGeneratorObject*> a return AsyncGeneratorResume(cx, asyncGenObj, CompletionKind::Throw, reason); } +// Async Iteration proposal 11.4.3.7 step 8.d-e. +MOZ_MUST_USE bool +js::AsyncGeneratorYieldReturnAwaitedFulfilled(JSContext* cx, + Handle<AsyncGeneratorObject*> asyncGenObj, + HandleValue value) +{ + return AsyncGeneratorResume(cx, asyncGenObj, CompletionKind::Return, value); +} + +// Async Iteration proposal 11.4.3.7 step 8.d-e. +MOZ_MUST_USE bool +js::AsyncGeneratorYieldReturnAwaitedRejected(JSContext* cx, + Handle<AsyncGeneratorObject*> asyncGenObj, + HandleValue reason) +{ + return AsyncGeneratorResume(cx, asyncGenObj, CompletionKind::Throw, reason); +} + const Class AsyncFromSyncIteratorObject::class_ = { "AsyncFromSyncIteratorObject", JSCLASS_HAS_RESERVED_SLOTS(AsyncFromSyncIteratorObject::Slots) }; -// Async Iteration proposal 6.1.3.1. +// Async Iteration proposal 11.1.3.1. JSObject* js::CreateAsyncFromSyncIterator(JSContext* cx, HandleObject iter) { @@ -164,7 +178,7 @@ js::CreateAsyncFromSyncIterator(JSContext* cx, HandleObject iter) return AsyncFromSyncIteratorObject::create(cx, iter); } -// Async Iteration proposal 6.1.3.1 steps 2-4. +// Async Iteration proposal 11.1.3.1 steps 2-4. /* static */ JSObject* AsyncFromSyncIteratorObject::create(JSContext* cx, HandleObject iter) { @@ -187,7 +201,7 @@ AsyncFromSyncIteratorObject::create(JSContext* cx, HandleObject iter) return asyncIter; } -// Async Iteration proposal 6.1.3.2.1 %AsyncFromSyncIteratorPrototype%.next. +// Async Iteration proposal 11.1.3.2.1 %AsyncFromSyncIteratorPrototype%.next. static bool AsyncFromSyncIteratorNext(JSContext* cx, unsigned argc, Value* vp) { @@ -195,7 +209,7 @@ AsyncFromSyncIteratorNext(JSContext* cx, unsigned argc, Value* vp) return AsyncFromSyncIteratorMethod(cx, args, CompletionKind::Normal); } -// Async Iteration proposal 6.1.3.2.2 %AsyncFromSyncIteratorPrototype%.return. +// Async Iteration proposal 11.1.3.2.2 %AsyncFromSyncIteratorPrototype%.return. static bool AsyncFromSyncIteratorReturn(JSContext* cx, unsigned argc, Value* vp) { @@ -203,7 +217,7 @@ AsyncFromSyncIteratorReturn(JSContext* cx, unsigned argc, Value* vp) return AsyncFromSyncIteratorMethod(cx, args, CompletionKind::Return); } -// Async Iteration proposal 6.1.3.2.3 %AsyncFromSyncIteratorPrototype%.throw. +// Async Iteration proposal 11.1.3.2.3 %AsyncFromSyncIteratorPrototype%.throw. static bool AsyncFromSyncIteratorThrow(JSContext* cx, unsigned argc, Value* vp) { @@ -211,7 +225,7 @@ AsyncFromSyncIteratorThrow(JSContext* cx, unsigned argc, Value* vp) return AsyncFromSyncIteratorMethod(cx, args, CompletionKind::Throw); } -// Async Iteration proposal 6.4.1.2 AsyncGenerator.prototype.next. +// Async Iteration proposal 11.4.1.2 AsyncGenerator.prototype.next. static bool AsyncGeneratorNext(JSContext* cx, unsigned argc, Value* vp) { @@ -222,7 +236,7 @@ AsyncGeneratorNext(JSContext* cx, unsigned argc, Value* vp) args.rval()); } -// Async Iteration proposal 6.4.1.3 AsyncGenerator.prototype.return. +// Async Iteration proposal 11.4.1.3 AsyncGenerator.prototype.return. static bool AsyncGeneratorReturn(JSContext* cx, unsigned argc, Value* vp) { @@ -233,7 +247,7 @@ AsyncGeneratorReturn(JSContext* cx, unsigned argc, Value* vp) args.rval()); } -// Async Iteration proposal 6.4.1.4 AsyncGenerator.prototype.throw. +// Async Iteration proposal 11.4.1.4 AsyncGenerator.prototype.throw. static bool AsyncGeneratorThrow(JSContext* cx, unsigned argc, Value* vp) { @@ -371,7 +385,7 @@ const Class AsyncGeneratorRequest::class_ = { JSCLASS_HAS_RESERVED_SLOTS(AsyncGeneratorRequest::Slots) }; -// Async Iteration proposal 6.4.3.1. +// Async Iteration proposal 11.4.3.1. /* static */ AsyncGeneratorRequest* AsyncGeneratorRequest::create(JSContext* cx, CompletionKind completionKind_, HandleValue completionValue_, HandleObject promise_) @@ -387,7 +401,7 @@ AsyncGeneratorRequest::create(JSContext* cx, CompletionKind completionKind_, return request; } -// Async Iteration proposal 6.4.3.2 steps 5.d-g. +// Async Iteration proposal 11.4.3.2 AsyncGeneratorStart steps 5.d-g. static MOZ_MUST_USE bool AsyncGeneratorReturned(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, HandleValue value) @@ -402,7 +416,7 @@ AsyncGeneratorReturned(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, return AsyncGeneratorResolve(cx, asyncGenObj, value, true); } -// Async Iteration proposal 6.4.3.2 steps 5.d, f. +// Async Iteration proposal 11.4.3.2 AsyncGeneratorStart steps 5.d, f. static MOZ_MUST_USE bool AsyncGeneratorThrown(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj) { @@ -422,85 +436,33 @@ AsyncGeneratorThrown(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj) return AsyncGeneratorReject(cx, asyncGenObj, value); } -// Async Iteration proposal 6.4.3.5. -MOZ_MUST_USE bool -js::AsyncGeneratorResumeNext(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj) -{ - // Step 1 (implicit). - - // Steps 2-3. - MOZ_ASSERT(!asyncGenObj->isExecuting()); - - // Steps 4-5. - if (asyncGenObj->isQueueEmpty()) - return true; - - // Steps 6-7. - Rooted<AsyncGeneratorRequest*> request( - cx, AsyncGeneratorObject::peekRequest(cx, asyncGenObj)); - if (!request) - return false; - - // Step 8. - CompletionKind completionKind = request->completionKind(); - - // Step 9. - if (completionKind != CompletionKind::Normal) { - // Step 9.a. - if (asyncGenObj->isSuspendedStart()) - asyncGenObj->setCompleted(); - - // Step 9.b. - if (asyncGenObj->isCompleted()) { - // Step 9.b.i. - RootedValue value(cx, request->completionValue()); - if (completionKind == CompletionKind::Return) - return AsyncGeneratorResolve(cx, asyncGenObj, value, true); - // Step 9.b.ii. - return AsyncGeneratorReject(cx, asyncGenObj, value); - } - } else if (asyncGenObj->isCompleted()) { - // Step 10. - return AsyncGeneratorResolve(cx, asyncGenObj, UndefinedHandleValue, true); - } - - // Step 11. - MOZ_ASSERT(asyncGenObj->isSuspendedStart() || asyncGenObj->isSuspendedYield()); - - // Step 15 (reordered). - asyncGenObj->setExecuting(); - - RootedValue argument(cx, request->completionValue()); - - // Steps 12-14, 16-20. - return AsyncGeneratorResume(cx, asyncGenObj, completionKind, argument); -} - -// Async Iteration proposal 6.2.1.3 (partially). +// Async Iteration proposal 11.4.3.7 (partially). // Most steps are done in generator. static MOZ_MUST_USE bool -AsyncGeneratorYield(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, - HandleValue value) +AsyncGeneratorYield(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, HandleValue value) { - // Step 5. + // Step 5 is done in bytecode. + + // Step 6. asyncGenObj->setSuspendedYield(); - // Step 8. + // Step 9. return AsyncGeneratorResolve(cx, asyncGenObj, value, false); } -// Async Iteration proposal 6.4.3.5 steps 12-14, 16-20. -// Async Iteration proposal 6.2.1.2 step 10. -// Async Iteration proposal 6.4.3.2 step 5.f-g. -// Async Iteration proposal 5.1 steps 2-9. +// Async Iteration proposal 4.1 Await steps 2-9. +// Async Iteration proposal 8.2.1 yield* steps 6.a.vii, 6.b.ii.7, 6.c.ix. +// Async Iteration proposal 11.4.3.2 AsyncGeneratorStart step 5.f-g. +// Async Iteration proposal 11.4.3.5 AsyncGeneratorResumeNext +// steps 12-14, 16-20. // Execution context switching is handled in generator. -static MOZ_MUST_USE bool -AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, - CompletionKind completionKind, HandleValue argument) +MOZ_MUST_USE bool +js::AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, + CompletionKind completionKind, HandleValue argument) { RootedValue generatorVal(cx, asyncGenObj->generatorVal()); - // 6.4.3.5 steps 12-14, 16-20. + // 11.4.3.5 steps 12-14, 16-20. HandlePropertyName funName = completionKind == CompletionKind::Normal ? cx->names().StarGeneratorNext : completionKind == CompletionKind::Throw @@ -510,10 +472,11 @@ AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, args[0].set(argument); RootedValue result(cx); if (!CallSelfHostedFunction(cx, funName, generatorVal, args, &result)) { - // 6.4.3.2 step 5.d, f. + // 11.4.3.2 step 5.d, f. return AsyncGeneratorThrown(cx, asyncGenObj); } + // 4.1 steps 2-9. if (asyncGenObj->generatorObj()->isAfterAwait()) return AsyncGeneratorAwait(cx, asyncGenObj, result); @@ -525,8 +488,10 @@ AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, // object and it's not observable. // For yield*, it's done on a possibly user-provided result object, and // it's observable. + // + // Note that IteratorComplete steps in 8.2.1 are done in bytecode. - // 2.2.1 yield* steps 6.a.vii, 6.b.ii.7, 6.c.ix. + // 8.2.1 yield* steps 6.a.vii, 6.b.ii.7, 6.c.ix. RootedObject resultObj(cx, &result.toObject()); RootedValue value(cx); if (!GetProperty(cx, resultObj, resultObj, cx->names().value, &value)) @@ -535,7 +500,7 @@ AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, if (asyncGenObj->generatorObj()->isAfterYield()) return AsyncGeneratorYield(cx, asyncGenObj, value); - // 6.4.3.2 step 5.d-g. + // 11.4.3.2 step 5.d-g. return AsyncGeneratorReturned(cx, asyncGenObj, value); } @@ -564,14 +529,14 @@ GlobalObject::initAsyncGenerators(JSContext* cx, Handle<GlobalObject*> global) if (global->getReservedSlot(ASYNC_ITERATOR_PROTO).isObject()) return true; - // Async Iteration proposal 6.1.2 %AsyncIteratorPrototype%. + // Async Iteration proposal 11.1.2 %AsyncIteratorPrototype%. RootedObject asyncIterProto(cx, GlobalObject::createBlankPrototype<PlainObject>(cx, global)); if (!asyncIterProto) return false; if (!DefinePropertiesAndFunctions(cx, asyncIterProto, nullptr, async_iterator_proto_methods)) return false; - // Async Iteration proposal 6.1.3.2 %AsyncFromSyncIteratorPrototype%. + // Async Iteration proposal 11.1.3.2 %AsyncFromSyncIteratorPrototype%. RootedObject asyncFromSyncIterProto( cx, GlobalObject::createBlankPrototypeInheriting(cx, global, &PlainObject::class_, asyncIterProto)); @@ -584,7 +549,7 @@ GlobalObject::initAsyncGenerators(JSContext* cx, Handle<GlobalObject*> global) return false; } - // Async Iteration proposal 6.4.1 %AsyncGeneratorPrototype%. + // Async Iteration proposal 11.4.1 %AsyncGeneratorPrototype%. RootedObject asyncGenProto( cx, GlobalObject::createBlankPrototypeInheriting(cx, global, &PlainObject::class_, asyncIterProto)); @@ -596,7 +561,7 @@ GlobalObject::initAsyncGenerators(JSContext* cx, Handle<GlobalObject*> global) return false; } - // Async Iteration proposal 6.3.3 %AsyncGenerator%. + // Async Iteration proposal 11.3.3 %AsyncGenerator%. RootedObject asyncGenerator(cx, NewSingletonObjectWithFunctionPrototype(cx, global)); if (!asyncGenerator) return false; @@ -615,7 +580,7 @@ GlobalObject::initAsyncGenerators(JSContext* cx, Handle<GlobalObject*> global) RootedObject proto(cx, &function.toObject()); RootedAtom name(cx, cx->names().AsyncGeneratorFunction); - // Async Iteration proposal 6.3.2 %AsyncGeneratorFunction%. + // Async Iteration proposal 11.3.2 %AsyncGeneratorFunction%. RootedObject asyncGenFunction( cx, NewFunctionWithProto(cx, AsyncGeneratorConstructor, 1, JSFunction::NATIVE_CTOR, nullptr, name, proto, gc::AllocKind::FUNCTION, SingletonObject)); diff --git a/js/src/vm/AsyncIteration.h b/js/src/vm/AsyncIteration.h index 974c209a01cfcc3ab1b356ee8c0820688c697000..58c43131b44bfef272b5680368042ae2840781d5 100644 --- a/js/src/vm/AsyncIteration.h +++ b/js/src/vm/AsyncIteration.h @@ -38,11 +38,18 @@ GetUnwrappedAsyncGenerator(JSFunction* wrapped); MOZ_MUST_USE bool AsyncGeneratorAwaitedFulfilled(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, - HandleValue value); - + HandleValue value); MOZ_MUST_USE bool AsyncGeneratorAwaitedRejected(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, - HandleValue reason); + HandleValue reason); +MOZ_MUST_USE bool +AsyncGeneratorYieldReturnAwaitedFulfilled(JSContext* cx, + Handle<AsyncGeneratorObject*> asyncGenObj, + HandleValue value); +MOZ_MUST_USE bool +AsyncGeneratorYieldReturnAwaitedRejected(JSContext* cx, + Handle<AsyncGeneratorObject*> asyncGenObj, + HandleValue reason); class AsyncGeneratorRequest : public NativeObject { @@ -97,6 +104,12 @@ class AsyncGeneratorObject : public NativeObject State_SuspendedStart, State_SuspendedYield, State_Executing, + // State_AwaitingYieldReturn corresponds to the case that + // AsyncGenerator#return is called while State_Executing, + // just like the case that AsyncGenerator#return is called + // while State_Completed. + State_AwaitingYieldReturn, + State_AwaitingReturn, State_Completed }; @@ -155,6 +168,12 @@ class AsyncGeneratorObject : public NativeObject bool isExecuting() const { return state() == State_Executing; } + bool isAwaitingYieldReturn() const { + return state() == State_AwaitingYieldReturn; + } + bool isAwaitingReturn() const { + return state() == State_AwaitingReturn; + } bool isCompleted() const { return state() == State_Completed; } @@ -168,6 +187,12 @@ class AsyncGeneratorObject : public NativeObject void setExecuting() { setState(State_Executing); } + void setAwaitingYieldReturn() { + setState(State_AwaitingYieldReturn); + } + void setAwaitingReturn() { + setState(State_AwaitingReturn); + } void setCompleted() { setState(State_Completed); } @@ -223,7 +248,8 @@ class AsyncFromSyncIteratorObject : public NativeObject }; MOZ_MUST_USE bool -AsyncGeneratorResumeNext(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj); +AsyncGeneratorResume(JSContext* cx, Handle<AsyncGeneratorObject*> asyncGenObj, + CompletionKind completionKind, HandleValue argument); } // namespace js diff --git a/layout/reftests/writing-mode/1089388-1-ref.html b/layout/reftests/writing-mode/1089388-1-ref.html index ead9d0edaab7f08010b1e930f58545b75791ed99..38ccba0e1671dcdad9687713a03265f0d2d9c4c5 100644 --- a/layout/reftests/writing-mode/1089388-1-ref.html +++ b/layout/reftests/writing-mode/1089388-1-ref.html @@ -11,6 +11,7 @@ div { height: 200px; background: #ddd; margin: 50px; + text-rendering: optimizeLegibility; } </style> </head> diff --git a/layout/reftests/writing-mode/1089388-1.html b/layout/reftests/writing-mode/1089388-1.html index e5277bae5c6fc76d53d2279e338818f724e790a6..fc1209b42c708dcda6b469f672f599c678be250d 100644 --- a/layout/reftests/writing-mode/1089388-1.html +++ b/layout/reftests/writing-mode/1089388-1.html @@ -11,6 +11,7 @@ div { height: 200px; background: #ddd; margin: 50px; + text-rendering: optimizeLegibility; } </style> diff --git a/layout/reftests/writing-mode/1089388-2-ref.html b/layout/reftests/writing-mode/1089388-2-ref.html index b5b24768101ea9248989b2c2f0f8b86404781a50..34fc1d548fb99458326d9ac1026f1a589b015525 100644 --- a/layout/reftests/writing-mode/1089388-2-ref.html +++ b/layout/reftests/writing-mode/1089388-2-ref.html @@ -11,6 +11,7 @@ div { height: 200px; background: #ddd; margin: 50px; + text-rendering: optimizeLegibility; } </style> </head> diff --git a/layout/reftests/writing-mode/1089388-2.html b/layout/reftests/writing-mode/1089388-2.html index 69e3ae9fc743f4451e8621a1895a6f00d7dfe18b..8214828917c0bbb0820fb1a3bd5523a0977ba0aa 100644 --- a/layout/reftests/writing-mode/1089388-2.html +++ b/layout/reftests/writing-mode/1089388-2.html @@ -11,6 +11,7 @@ div { height: 200px; background: #ddd; margin: 50px; + text-rendering: optimizeLegibility; } </style> diff --git a/modules/libpref/init/all.js b/modules/libpref/init/all.js index da7e41511e521634dde908b66baaa762c9bb542f..e4b8eedac121e28bfb04935f6441a6996b60adfe 100644 --- a/modules/libpref/init/all.js +++ b/modules/libpref/init/all.js @@ -714,6 +714,7 @@ pref("apz.keyboard.enabled", true); #else pref("apz.keyboard.enabled", false); #endif +pref("apz.keyboard.passive-listeners", false); pref("apz.max_velocity_inches_per_ms", "-1.0"); pref("apz.max_velocity_queue_size", 5); pref("apz.min_skate_speed", "1.0"); diff --git a/old-configure.in b/old-configure.in index c98707e43fb54d8bd46074956007730df7e781f2..6e512ac893f5135acb602fac804b1f4e6274e5ac 100644 --- a/old-configure.in +++ b/old-configure.in @@ -1020,22 +1020,6 @@ case "$target" in if test -z "$DEVELOPER_OPTIONS"; then LDFLAGS="$LDFLAGS -RELEASE" fi - dnl For profile-guided optimization - PROFILE_GEN_CFLAGS="-GL" - num_cores=$($PYTHON -c 'import multiprocessing; print(min(8,multiprocessing.cpu_count()))') - cgthreads="-CGTHREADS:${num_cores}" - PROFILE_GEN_LDFLAGS="-LTCG:PGINSTRUMENT -PogoSafeMode $cgthreads" - dnl XXX: PGO builds can fail with warnings treated as errors, - dnl specifically "no profile data available" appears to be - dnl treated as an error sometimes. This might be a consequence - dnl of using WARNINGS_AS_ERRORS in some modules, combined - dnl with the linker doing most of the work in the whole-program - dnl optimization/PGO case. I think it's probably a compiler bug, - dnl but we work around it here. - PROFILE_USE_CFLAGS="-GL -wd4624 -wd4952" - dnl XXX: should be -LTCG:PGOPTIMIZE, but that fails on libxul. - dnl Probably also a compiler bug, but what can you do? - PROFILE_USE_LDFLAGS="-LTCG:PGUPDATE $cgthreads" LDFLAGS="$LDFLAGS -DYNAMICBASE" RCFLAGS="-nologo" dnl Minimum reqiurement of Gecko is VS2015 or later which supports @@ -1919,7 +1903,7 @@ MOZ_ARG_WITH_BOOL(system-nss, _USE_SYSTEM_NSS=1 ) if test -n "$_USE_SYSTEM_NSS"; then - AM_PATH_NSS(3.32, [MOZ_SYSTEM_NSS=1], [AC_MSG_ERROR([you don't have NSS installed or your version is too old])]) + AM_PATH_NSS(3.33, [MOZ_SYSTEM_NSS=1], [AC_MSG_ERROR([you don't have NSS installed or your version is too old])]) fi if test -n "$MOZ_SYSTEM_NSS"; then @@ -2103,7 +2087,6 @@ MOZ_SPELLCHECK=1 MOZ_TOOLKIT_SEARCH=1 MOZ_UI_LOCALE=en-US MOZ_UNIVERSALCHARDET=1 -MOZ_URL_CLASSIFIER= MOZ_XUL=1 MOZ_ZIPWRITER=1 MOZ_NO_SMART_CARDS= @@ -3727,18 +3710,6 @@ if test -n "$MOZ_SYSTEM_SQLITE"; then fi AC_SUBST(MOZ_SYSTEM_SQLITE) -dnl ======================================================== -dnl = Enable url-classifier -dnl ======================================================== -MOZ_ARG_ENABLE_BOOL(url-classifier, -[ --enable-url-classifier Enable url classifier module], - MOZ_URL_CLASSIFIER=1, - MOZ_URL_CLASSIFIER= ) -if test -n "$MOZ_URL_CLASSIFIER"; then - AC_DEFINE(MOZ_URL_CLASSIFIER) -fi -AC_SUBST(MOZ_URL_CLASSIFIER) - dnl ======================================================== dnl = Disable zipwriter dnl ======================================================== @@ -4137,18 +4108,6 @@ MOZ_ARG_WITH_STRING(jitreport-granularity, AC_DEFINE_UNQUOTED(JS_DEFAULT_JITREPORT_GRANULARITY, $JITREPORT_GRANULARITY) -dnl ======================================================== -dnl = Disable Mozilla's versions of RIL and Geolocation -dnl ======================================================== -MOZ_ARG_DISABLE_BOOL(mozril-geoloc, -[ --disable-mozril-geoloc Disable Mozilla's RIL and geolocation], - DISABLE_MOZ_RIL_GEOLOC=1, - DISABLE_MOZ_RIL_GEOLOC= ) -if test -n "$DISABLE_MOZ_RIL_GEOLOC"; then - AC_DEFINE(DISABLE_MOZ_RIL_GEOLOC) -fi -AC_SUBST(DISABLE_MOZ_RIL_GEOLOC) - dnl ======================================================== dnl = dnl = Misc. Options @@ -4226,34 +4185,6 @@ else AC_MSG_RESULT([no]) fi -dnl ======================================================== -dnl Profile guided optimization (gcc checks) -dnl ======================================================== -dnl Test for profiling options -dnl Under gcc 3.4+, use -fprofile-generate/-fprofile-use - -_SAVE_CFLAGS="$CFLAGS" -CFLAGS="$CFLAGS -fprofile-generate -fprofile-correction" - -AC_MSG_CHECKING([whether C compiler supports -fprofile-generate]) -AC_TRY_COMPILE([], [return 0;], - [ PROFILE_GEN_CFLAGS="-fprofile-generate" - result="yes" ], result="no") -AC_MSG_RESULT([$result]) - -if test $result = "yes"; then - PROFILE_GEN_LDFLAGS="-fprofile-generate" - PROFILE_USE_CFLAGS="-fprofile-use -fprofile-correction -Wcoverage-mismatch" - PROFILE_USE_LDFLAGS="-fprofile-use" -fi - -CFLAGS="$_SAVE_CFLAGS" - -AC_SUBST(PROFILE_GEN_CFLAGS) -AC_SUBST(PROFILE_GEN_LDFLAGS) -AC_SUBST(PROFILE_USE_CFLAGS) -AC_SUBST(PROFILE_USE_LDFLAGS) - fi # ! SKIP_COMPILER_CHECKS AC_DEFINE(CPP_THROW_NEW, [throw()]) diff --git a/other-licenses/nsis/Plugins/nsJSON.dll b/other-licenses/nsis/Plugins/nsJSON.dll new file mode 100644 index 0000000000000000000000000000000000000000..36e5263d92f712a3047aa379f32c4d9063da9e8a Binary files /dev/null and b/other-licenses/nsis/Plugins/nsJSON.dll differ diff --git a/security/nss/TAG-INFO b/security/nss/TAG-INFO index 6c2d2b7cca1aeac0f8ea6d0e68071596849d5005..bc4662b8816b2ccb9d391944f01e22779fa5594e 100644 --- a/security/nss/TAG-INFO +++ b/security/nss/TAG-INFO @@ -1 +1 @@ -NSS_3_32_RTM +a0a4e05dcdd5 diff --git a/security/nss/automation/abi-check/previous-nss-release b/security/nss/automation/abi-check/previous-nss-release index b8d28cde031c75fabe62ae323be62835857d2a51..0b2cd988f3b9b94a22f09c4eda3a4e27bf32f358 100644 --- a/security/nss/automation/abi-check/previous-nss-release +++ b/security/nss/automation/abi-check/previous-nss-release @@ -1 +1 @@ -NSS_3_31_BRANCH +NSS_3_32_BRANCH diff --git a/security/nss/automation/clang-format/run_clang_format.sh b/security/nss/automation/clang-format/run_clang_format.sh index 2ba5ebeb1a0b1761caf950c5e29d6fe8dff81ba8..378b00ff091a30f089b24ab97664d77a79f05dc7 100755 --- a/security/nss/automation/clang-format/run_clang_format.sh +++ b/security/nss/automation/clang-format/run_clang_format.sh @@ -6,6 +6,8 @@ if [[ $(id -u) -eq 0 ]]; then exec su worker -c "$0 $*" fi +set -e + # Apply clang-format on the provided folder and verify that this doesn't change any file. # If any file differs after formatting, the script eventually exits with 1. # Any differences between formatted and unformatted files is printed to stdout to give a hint what's wrong. @@ -21,17 +23,16 @@ blacklist=( "./lib/zlib" \ "./lib/sqlite" \ "./gtests/google_test" \ - "./.hg" \ "./out" \ ) -top="$(dirname $0)/../.." -cd "$top" +top=$(cd "$(dirname $0)/../.."; pwd -P) if [ $# -gt 0 ]; then dirs=("$@") else - dirs=($(find . -maxdepth 2 -mindepth 1 -type d ! -path . \( ! -regex '.*/' \))) + cd "$top" + dirs=($(find . -maxdepth 2 -mindepth 1 -type d ! -path '*/.*' -print)) fi format_folder() @@ -46,20 +47,20 @@ format_folder() } for dir in "${dirs[@]}"; do - if format_folder "$dir" ; then + if format_folder "$dir"; then c="${dir//[^\/]}" echo "formatting $dir ..." - depth="" + depth=() if [ "${#c}" == "1" ]; then - depth="-maxdepth 1" + depth+=(-maxdepth 1) fi - find "$dir" $depth -type f \( -name '*.[ch]' -o -name '*.cc' \) -exec clang-format -i {} \+ + find "$dir" "${depth[@]}" -type f \( -name '*.[ch]' -o -name '*.cc' \) -exec clang-format -i {} \+ fi done TMPFILE=$(mktemp /tmp/$(basename $0).XXXXXX) -trap 'rm $TMPFILE' exit -if (cd $(dirname $0); hg root >/dev/null 2>&1); then +trap 'rm -f $TMPFILE' exit +if [[ -d "$top/.hg" ]]; then hg diff --git "$top" | tee $TMPFILE else git -C "$top" diff | tee $TMPFILE diff --git a/security/nss/automation/release/nspr-version.txt b/security/nss/automation/release/nspr-version.txt index 98783a615299f1445f0eaf51b0de7c00c26ec0f7..9a4d1308957ed3b6e0652fbeb7d1ce6fc2682c7b 100644 --- a/security/nss/automation/release/nspr-version.txt +++ b/security/nss/automation/release/nspr-version.txt @@ -1,4 +1,4 @@ -4.16 +4.15 # The first line of this file must contain the human readable NSPR # version number, which is the minimum required version of NSPR diff --git a/security/nss/cmd/lib/secutil.c b/security/nss/cmd/lib/secutil.c index cb4752df9ca7e1d94ed5ce62e7dc6bb89ad03f1e..25a58e10c528ebc9052656422ecef81517339ab3 100644 --- a/security/nss/cmd/lib/secutil.c +++ b/security/nss/cmd/lib/secutil.c @@ -991,7 +991,7 @@ secu_PrintUniversalString(FILE *out, const SECItem *i, const char *m, int level) for (s = my.data, d = tmp.data; len > 0; len--) { PRUint32 bmpChar = (s[0] << 24) | (s[1] << 16) | (s[2] << 8) | s[3]; s += 4; - if (!isprint(bmpChar)) + if (!isprint(bmpChar & 0xFF)) goto loser; *d++ = (unsigned char)bmpChar; } diff --git a/security/nss/cmd/modutil/error.h b/security/nss/cmd/modutil/error.h index b328afebc6f17205f983085e489292c51acc3596..a75314f62a8f4c649e7f104d57fccd4ab7b3c99c 100644 --- a/security/nss/cmd/modutil/error.h +++ b/security/nss/cmd/modutil/error.h @@ -109,7 +109,7 @@ static char *errStrings[] = { "ERROR: Failed to change default.\n", "ERROR: Unable to read from standard input.\n", "ERROR: Unknown error occurred.\n", - "ERROR: -nocertdb option can only be used with the -jar command.\n" + "ERROR: -nocertdb option can only be used with the -jar command.\n", "ERROR: NSS_Initialize() failed.\n" }; diff --git a/security/nss/cmd/pp/pp.c b/security/nss/cmd/pp/pp.c index 9f33d10a471e02cb922f03f72bbec9ef700e5a8c..d6e276834c3782c3838ce09031fe19baeb10d325 100644 --- a/security/nss/cmd/pp/pp.c +++ b/security/nss/cmd/pp/pp.c @@ -84,6 +84,8 @@ main(int argc, char **argv) if (!inFile) { fprintf(stderr, "%s: unable to open \"%s\" for reading\n", progName, optstate->value); + PORT_Free(typeTag); + PL_DestroyOptState(optstate); return -1; } break; @@ -93,6 +95,8 @@ main(int argc, char **argv) if (!outFile) { fprintf(stderr, "%s: unable to open \"%s\" for writing\n", progName, optstate->value); + PORT_Free(typeTag); + PL_DestroyOptState(optstate); return -1; } break; diff --git a/security/nss/cmd/tstclnt/tstclnt.c b/security/nss/cmd/tstclnt/tstclnt.c index 959afec597babce958099b13f930b672e8902d7d..31cd030e3ef79d95222ac55f967b3e0f11df448c 100644 --- a/security/nss/cmd/tstclnt/tstclnt.c +++ b/security/nss/cmd/tstclnt/tstclnt.c @@ -31,6 +31,7 @@ #include "ocsp.h" #include "ssl.h" #include "sslproto.h" +#include "sslexp.h" #include "pk11func.h" #include "secmod.h" #include "plgetopt.h" @@ -251,6 +252,7 @@ PrintParameterUsage(void) "%-20s The following values are valid:\n" "%-20s P256, P384, P521, x25519, FF2048, FF3072, FF4096, FF6144, FF8192\n", "-I", "", ""); + fprintf(stderr, "%-20s Enable alternate content type for TLS 1.3 ServerHello\n", "-X alt-server-hello"); } static void @@ -914,6 +916,7 @@ char *requestString = NULL; PRInt32 requestStringLen = 0; PRBool requestSent = PR_FALSE; PRBool enableZeroRtt = PR_FALSE; +PRBool enableAltServerHello = PR_FALSE; static int writeBytesToServer(PRFileDesc *s, const char *buf, int nb) @@ -1178,6 +1181,16 @@ run_client(void) } } + /* Alternate ServerHello content type (TLS 1.3 only) */ + if (enableAltServerHello) { + rv = SSL_UseAltServerHelloType(s, PR_TRUE); + if (rv != SECSuccess) { + SECU_PrintError(progName, "error enabling alternate ServerHello type"); + error = 1; + goto done; + } + } + /* require the use of fixed finite-field DH groups */ if (requireDHNamedGroups) { rv = SSL_OptionSet(s, SSL_REQUIRE_DH_NAMED_GROUPS, PR_TRUE); @@ -1512,7 +1525,7 @@ main(int argc, char **argv) /* XXX: 'B' was used in the past but removed in 3.28, * please leave some time before resuing it. */ optstate = PL_CreateOptState(argc, argv, - "46A:CDFGHI:KL:M:OR:STUV:W:YZa:bc:d:fgh:m:n:op:qr:st:uvw:z"); + "46A:CDFGHI:KL:M:OR:STUV:W:X:YZa:bc:d:fgh:m:n:op:qr:st:uvw:z"); while ((optstatus = PL_GetNextOpt(optstate)) == PL_OPT_OK) { switch (optstate->option) { case '?': @@ -1618,6 +1631,13 @@ main(int argc, char **argv) } break; + case 'X': + if (!strcmp(optstate->value, "alt-server-hello")) { + enableAltServerHello = PR_TRUE; + } else { + Usage(progName); + } + break; case 'Y': PrintCipherUsage(progName); exit(0); diff --git a/security/nss/coreconf/coreconf.dep b/security/nss/coreconf/coreconf.dep index 5182f75552c81540c315e8eb17ce933d5f2039b8..590d1bfaeee3f134b616ff41d59c05c9917afa3c 100644 --- a/security/nss/coreconf/coreconf.dep +++ b/security/nss/coreconf/coreconf.dep @@ -10,3 +10,4 @@ */ #error "Do not include this header file." + diff --git a/security/nss/cpputil/tls_parser.h b/security/nss/cpputil/tls_parser.h index 15ba3b175b7506676a1e2a84aa3fa6242ed0cfd9..7a08091656a4ae68924157c716ba5ca33271fb5a 100644 --- a/security/nss/cpputil/tls_parser.h +++ b/security/nss/cpputil/tls_parser.h @@ -24,6 +24,7 @@ const uint8_t kTlsChangeCipherSpecType = 20; const uint8_t kTlsAlertType = 21; const uint8_t kTlsHandshakeType = 22; const uint8_t kTlsApplicationDataType = 23; +const uint8_t kTlsAltHandshakeType = 24; const uint8_t kTlsHandshakeClientHello = 1; const uint8_t kTlsHandshakeServerHello = 2; diff --git a/security/nss/fuzz/config/clone_libfuzzer.sh b/security/nss/fuzz/config/clone_libfuzzer.sh index f1dc2e14bbf78c7969e8b881a6a32a9d33f3b65c..c516057d78740cf94300d0150139ad47165637c5 100755 --- a/security/nss/fuzz/config/clone_libfuzzer.sh +++ b/security/nss/fuzz/config/clone_libfuzzer.sh @@ -1,6 +1,6 @@ #!/bin/sh -LIBFUZZER_REVISION=56bd1d43451cca4b6a11d3be316bb77ab159b09d +LIBFUZZER_REVISION=6937e68f927b6aefe526fcb9db8953f497e6e74d d=$(dirname $0) $d/git-copy.sh https://chromium.googlesource.com/chromium/llvm-project/llvm/lib/Fuzzer $LIBFUZZER_REVISION $d/../libFuzzer diff --git a/security/nss/gtests/certdb_gtest/alg1485_unittest.cc b/security/nss/gtests/certdb_gtest/alg1485_unittest.cc index b7c65941496f64faa4d163c952ee84494fa99812..ef6733092785de63611595eab0e4f0f298d88bfb 100644 --- a/security/nss/gtests/certdb_gtest/alg1485_unittest.cc +++ b/security/nss/gtests/certdb_gtest/alg1485_unittest.cc @@ -10,6 +10,7 @@ #include "nss.h" #include "scoped_ptrs.h" +#include "prprf.h" namespace nss_test { @@ -89,4 +90,23 @@ INSTANTIATE_TEST_CASE_P(ParseAVAStrings, Alg1485ParseTest, ::testing::ValuesIn(kAVATestStrings)); INSTANTIATE_TEST_CASE_P(CompareAVAStrings, Alg1485CompareTest, ::testing::ValuesIn(kAVACompareStrings)); + +TEST_F(Alg1485Test, ShortOIDTest) { + // This is not a valid OID (too short). CERT_GetOidString should return 0. + unsigned char data[] = {0x05}; + const SECItem oid = {siBuffer, data, sizeof(data)}; + char* result = CERT_GetOidString(&oid); + EXPECT_EQ(result, nullptr); +} + +TEST_F(Alg1485Test, BrokenOIDTest) { + // This is not a valid OID (first bit of last byte is not set). + // CERT_GetOidString should return 0. + unsigned char data[] = {0x81, 0x82, 0x83, 0x84}; + const SECItem oid = {siBuffer, data, sizeof(data)}; + char* result = CERT_GetOidString(&oid); + EXPECT_EQ(15U, strlen(result)); + EXPECT_EQ(0, strncmp("OID.UNSUPPORTED", result, 15)); + PR_smprintf_free(result); +} } diff --git a/security/nss/gtests/manifest.mn b/security/nss/gtests/manifest.mn index 1ae4cab776ccd29a156e5d3de73aa3d39bad7c56..3bc3664329bc18aaf5e3cca9a18e2796b496becf 100644 --- a/security/nss/gtests/manifest.mn +++ b/security/nss/gtests/manifest.mn @@ -23,8 +23,9 @@ NSS_SRCDIRS = \ certdb_gtest \ certhigh_gtest \ pk11_gtest \ + softoken_gtest \ ssl_gtest \ - nss_bogo_shim \ + nss_bogo_shim \ $(NULL) endif endif diff --git a/security/nss/gtests/softoken_gtest/Makefile b/security/nss/gtests/softoken_gtest/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..996669782d9eaf3ca4cf4ccbed1abed0a53c2d8a --- /dev/null +++ b/security/nss/gtests/softoken_gtest/Makefile @@ -0,0 +1,45 @@ +#! gmake +# +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +####################################################################### +# (1) Include initial platform-independent assignments (MANDATORY). # +####################################################################### + +include manifest.mn + +####################################################################### +# (2) Include "global" configuration information. (OPTIONAL) # +####################################################################### + +include $(CORE_DEPTH)/coreconf/config.mk + +####################################################################### +# (3) Include "component" configuration information. (OPTIONAL) # +####################################################################### + + +####################################################################### +# (4) Include "local" platform-dependent assignments (OPTIONAL). # +####################################################################### + +include ../common/gtest.mk + +CFLAGS += -I$(CORE_DEPTH)/lib/util + +####################################################################### +# (5) Execute "global" rules. (OPTIONAL) # +####################################################################### + +include $(CORE_DEPTH)/coreconf/rules.mk + +####################################################################### +# (6) Execute "component" rules. (OPTIONAL) # +####################################################################### + + +####################################################################### +# (7) Execute "local" rules. (OPTIONAL). # +####################################################################### diff --git a/security/nss/gtests/softoken_gtest/manifest.mn b/security/nss/gtests/softoken_gtest/manifest.mn new file mode 100644 index 0000000000000000000000000000000000000000..4b34c099f576178636ee97e6b58545ea8a7e4993 --- /dev/null +++ b/security/nss/gtests/softoken_gtest/manifest.mn @@ -0,0 +1,25 @@ +# -*- makefile -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +CORE_DEPTH = ../.. +DEPTH = ../.. +MODULE = nss + +CPPSRCS = \ + softoken_gtest.cc \ + $(NULL) + +INCLUDES += \ + -I$(CORE_DEPTH)/gtests/google_test/gtest/include \ + -I$(CORE_DEPTH)/cpputil \ + $(NULL) + +REQUIRES = nspr gtest + +PROGRAM = softoken_gtest + +EXTRA_LIBS = \ + $(DIST)/lib/$(LIB_PREFIX)gtest.$(LIB_SUFFIX) \ + $(DIST)/lib/$(LIB_PREFIX)gtestutil.$(LIB_SUFFIX) \ + $(NULL) diff --git a/security/nss/gtests/softoken_gtest/softoken_gtest.cc b/security/nss/gtests/softoken_gtest/softoken_gtest.cc new file mode 100644 index 0000000000000000000000000000000000000000..2cddaed50040c13086d10191f378c5b1b0360e56 --- /dev/null +++ b/security/nss/gtests/softoken_gtest/softoken_gtest.cc @@ -0,0 +1,125 @@ +#include <cstdlib> + +#include "nspr.h" +#include "nss.h" +#include "pk11pub.h" + +#include "scoped_ptrs.h" + +#define GTEST_HAS_RTTI 0 +#include "gtest/gtest.h" + +namespace nss_test { + +// Given a prefix, attempts to create a unique directory that the user can do +// work in without impacting other tests. For example, if given the prefix +// "scratch", a directory like "scratch05c17b25" will be created in the current +// working directory (or the location specified by NSS_GTEST_WORKDIR, if +// defined). +// Upon destruction, the implementation will attempt to delete the directory. +// However, no attempt is made to first remove files in the directory - the +// user is responsible for this. If the directory is not empty, deleting it will +// fail. +// Statistically, it is technically possible to fail to create a unique +// directory name, but this is extremely unlikely given the expected workload of +// this implementation. +class ScopedUniqueDirectory { + public: + explicit ScopedUniqueDirectory(const std::string& prefix); + + // NB: the directory must be empty upon destruction + ~ScopedUniqueDirectory() { assert(rmdir(mPath.c_str()) == 0); } + + const std::string& GetPath() { return mPath; } + + private: + static const int RETRY_LIMIT = 5; + static void GenerateRandomName(/*in/out*/ std::string& prefix); + static bool TryMakingDirectory(/*in/out*/ std::string& prefix); + + std::string mPath; +}; + +ScopedUniqueDirectory::ScopedUniqueDirectory(const std::string& prefix) { + std::string path; + const char* workingDirectory = PR_GetEnvSecure("NSS_GTEST_WORKDIR"); + if (workingDirectory) { + path.assign(workingDirectory); + } + path.append(prefix); + for (int i = 0; i < RETRY_LIMIT; i++) { + std::string pathCopy(path); + // TryMakingDirectory will modify its input. If it fails, we want to throw + // away the modified result. + if (TryMakingDirectory(pathCopy)) { + mPath.assign(pathCopy); + break; + } + } + assert(mPath.length() > 0); +} + +void ScopedUniqueDirectory::GenerateRandomName(std::string& prefix) { + std::stringstream ss; + ss << prefix; + // RAND_MAX is at least 32767. + ss << std::setfill('0') << std::setw(4) << std::hex << rand() << rand(); + // This will overwrite the value of prefix. This is a little inefficient, but + // at least it makes the code simple. + ss >> prefix; +} + +bool ScopedUniqueDirectory::TryMakingDirectory(std::string& prefix) { + GenerateRandomName(prefix); +#if defined(_WIN32) + return _mkdir(prefix.c_str()) == 0; +#else + return mkdir(prefix.c_str(), 0777) == 0; +#endif +} + +class SoftokenTest : public ::testing::Test { + protected: + SoftokenTest() : mNSSDBDir("SoftokenTest.d-") {} + + virtual void SetUp() { + std::string nssInitArg("sql:"); + nssInitArg.append(mNSSDBDir.GetPath()); + ASSERT_EQ(SECSuccess, NSS_Initialize(nssInitArg.c_str(), "", "", SECMOD_DB, + NSS_INIT_NOROOTINIT)); + } + + virtual void TearDown() { + ASSERT_EQ(SECSuccess, NSS_Shutdown()); + const std::string& nssDBDirPath = mNSSDBDir.GetPath(); + ASSERT_EQ(0, unlink((nssDBDirPath + "/cert9.db").c_str())); + ASSERT_EQ(0, unlink((nssDBDirPath + "/key4.db").c_str())); + ASSERT_EQ(0, unlink((nssDBDirPath + "/pkcs11.txt").c_str())); + } + + ScopedUniqueDirectory mNSSDBDir; +}; + +TEST_F(SoftokenTest, ResetSoftokenEmptyPassword) { + ScopedPK11SlotInfo slot(PK11_GetInternalKeySlot()); + ASSERT_TRUE(slot); + EXPECT_EQ(SECSuccess, PK11_InitPin(slot.get(), nullptr, nullptr)); + EXPECT_EQ(SECSuccess, PK11_ResetToken(slot.get(), nullptr)); + EXPECT_EQ(SECSuccess, PK11_InitPin(slot.get(), nullptr, nullptr)); +} + +TEST_F(SoftokenTest, ResetSoftokenNonEmptyPassword) { + ScopedPK11SlotInfo slot(PK11_GetInternalKeySlot()); + ASSERT_TRUE(slot); + EXPECT_EQ(SECSuccess, PK11_InitPin(slot.get(), nullptr, "password")); + EXPECT_EQ(SECSuccess, PK11_ResetToken(slot.get(), nullptr)); + EXPECT_EQ(SECSuccess, PK11_InitPin(slot.get(), nullptr, "password2")); +} + +} // namespace nss_test + +int main(int argc, char** argv) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/softoken_gtest/softoken_gtest.gyp b/security/nss/gtests/softoken_gtest/softoken_gtest.gyp new file mode 100644 index 0000000000000000000000000000000000000000..cff0ea414a7dcc7782ddec78b4a507d370e74653 --- /dev/null +++ b/security/nss/gtests/softoken_gtest/softoken_gtest.gyp @@ -0,0 +1,51 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +{ + 'includes': [ + '../../coreconf/config.gypi', + '../common/gtest.gypi', + ], + 'targets': [ + { + 'target_name': 'softoken_gtest', + 'type': 'executable', + 'sources': [ + 'softoken_gtest.cc', + ], + 'dependencies': [ + '<(DEPTH)/exports.gyp:nss_exports', + '<(DEPTH)/lib/util/util.gyp:nssutil3', + '<(DEPTH)/gtests/google_test/google_test.gyp:gtest', + ], + 'conditions': [ + [ 'test_build==1', { + 'dependencies': [ + '<(DEPTH)/lib/nss/nss.gyp:nss_static', + '<(DEPTH)/lib/pk11wrap/pk11wrap.gyp:pk11wrap_static', + '<(DEPTH)/lib/cryptohi/cryptohi.gyp:cryptohi', + '<(DEPTH)/lib/certhigh/certhigh.gyp:certhi', + '<(DEPTH)/lib/certdb/certdb.gyp:certdb', + '<(DEPTH)/lib/base/base.gyp:nssb', + '<(DEPTH)/lib/dev/dev.gyp:nssdev', + '<(DEPTH)/lib/pki/pki.gyp:nsspki', + '<(DEPTH)/lib/ssl/ssl.gyp:ssl', + ], + }, { + 'dependencies': [ + '<(DEPTH)/lib/nss/nss.gyp:nss3', + '<(DEPTH)/lib/ssl/ssl.gyp:ssl3', + ], + }], + ], + } + ], + 'target_defaults': { + 'include_dirs': [ + '../../lib/util' + ] + }, + 'variables': { + 'module': 'nss' + } +} diff --git a/security/nss/gtests/ssl_gtest/manifest.mn b/security/nss/gtests/ssl_gtest/manifest.mn index cc729c0f161ac217c4ce2245a92ab96c0a196ace..e7d3e10878f9d197c97240f2899bbcd492333592 100644 --- a/security/nss/gtests/ssl_gtest/manifest.mn +++ b/security/nss/gtests/ssl_gtest/manifest.mn @@ -30,6 +30,7 @@ CPPSRCS = \ ssl_gtest.cc \ ssl_hrr_unittest.cc \ ssl_loopback_unittest.cc \ + ssl_misc_unittest.cc \ ssl_record_unittest.cc \ ssl_resumption_unittest.cc \ ssl_skip_unittest.cc \ diff --git a/security/nss/gtests/ssl_gtest/ssl_0rtt_unittest.cc b/security/nss/gtests/ssl_gtest/ssl_0rtt_unittest.cc index 85b7011a1cd29d2437e671a566da43ef5d9ad239..a144161d67b115e71f1aebcce0ab6516a6769b95 100644 --- a/security/nss/gtests/ssl_gtest/ssl_0rtt_unittest.cc +++ b/security/nss/gtests/ssl_gtest/ssl_0rtt_unittest.cc @@ -7,6 +7,7 @@ #include "secerr.h" #include "ssl.h" #include "sslerr.h" +#include "sslexp.h" #include "sslproto.h" extern "C" { diff --git a/security/nss/gtests/ssl_gtest/ssl_gtest.gyp b/security/nss/gtests/ssl_gtest/ssl_gtest.gyp index 8cd7d10096852141f0810eb456050894cd4e0d25..9bf147e4f549b340d58376ec89906255b4b8b390 100644 --- a/security/nss/gtests/ssl_gtest/ssl_gtest.gyp +++ b/security/nss/gtests/ssl_gtest/ssl_gtest.gyp @@ -31,6 +31,7 @@ 'ssl_gtest.cc', 'ssl_hrr_unittest.cc', 'ssl_loopback_unittest.cc', + 'ssl_misc_unittest.cc', 'ssl_record_unittest.cc', 'ssl_resumption_unittest.cc', 'ssl_skip_unittest.cc', diff --git a/security/nss/gtests/ssl_gtest/ssl_loopback_unittest.cc b/security/nss/gtests/ssl_gtest/ssl_loopback_unittest.cc index 77703dd8efba11ee5b31ecb04c5c38a29ee9c534..d7a21f99db7a02e1f3d2d21bece3bdfbc9c7d1ea 100644 --- a/security/nss/gtests/ssl_gtest/ssl_loopback_unittest.cc +++ b/security/nss/gtests/ssl_gtest/ssl_loopback_unittest.cc @@ -6,10 +6,12 @@ #include <functional> #include <memory> +#include <vector> #include "secerr.h" #include "ssl.h" #include "sslerr.h" #include "sslproto.h" +#include "ssl3prot.h" extern "C" { // This is not something that should make you happy. @@ -323,6 +325,42 @@ TEST_F(TlsConnectStreamTls13, NegotiateShortHeaders) { Connect(); } +TEST_F(TlsConnectStreamTls13, ClientAltHandshakeType) { + client_->SetAltHandshakeTypeEnabled(); + auto filter = std::make_shared<TlsHeaderRecorder>(); + server_->SetPacketFilter(filter); + Connect(); + ASSERT_EQ(kTlsHandshakeType, filter->header(0)->content_type()); +} + +TEST_F(TlsConnectStreamTls13, ServerAltHandshakeType) { + server_->SetAltHandshakeTypeEnabled(); + auto filter = std::make_shared<TlsHeaderRecorder>(); + server_->SetPacketFilter(filter); + Connect(); + ASSERT_EQ(kTlsHandshakeType, filter->header(0)->content_type()); +} + +TEST_F(TlsConnectStreamTls13, BothAltHandshakeType) { + client_->SetAltHandshakeTypeEnabled(); + server_->SetAltHandshakeTypeEnabled(); + auto header_filter = std::make_shared<TlsHeaderRecorder>(); + auto sh_filter = std::make_shared<TlsInspectorRecordHandshakeMessage>( + kTlsHandshakeServerHello); + std::vector<std::shared_ptr<PacketFilter>> filters = {header_filter, + sh_filter}; + auto chained = std::make_shared<ChainedPacketFilter>(filters); + server_->SetPacketFilter(chained); + header_filter->SetAgent(server_.get()); + header_filter->EnableDecryption(); + Connect(); + ASSERT_EQ(kTlsAltHandshakeType, header_filter->header(0)->content_type()); + ASSERT_EQ(kTlsHandshakeType, header_filter->header(1)->content_type()); + uint32_t ver; + ASSERT_TRUE(sh_filter->buffer().Read(0, 2, &ver)); + ASSERT_EQ((uint32_t)(0x7a00 | TLS_1_3_DRAFT_VERSION), ver); +} + INSTANTIATE_TEST_CASE_P( GenericStream, TlsConnectGeneric, ::testing::Combine(TlsConnectTestBase::kTlsVariantsStream, diff --git a/security/nss/gtests/ssl_gtest/ssl_misc_unittest.cc b/security/nss/gtests/ssl_gtest/ssl_misc_unittest.cc new file mode 100644 index 0000000000000000000000000000000000000000..2b1b92dcd809d97be3beb32494312823d6644ad7 --- /dev/null +++ b/security/nss/gtests/ssl_gtest/ssl_misc_unittest.cc @@ -0,0 +1,20 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "sslexp.h" + +#include "gtest_utils.h" + +namespace nss_test { + +class MiscTest : public ::testing::Test {}; + +TEST_F(MiscTest, NonExistentExperimentalAPI) { + EXPECT_EQ(nullptr, SSL_GetExperimentalAPI("blah")); + EXPECT_EQ(SSL_ERROR_UNSUPPORTED_EXPERIMENTAL_API, PORT_GetError()); +} + +} // namespace nss_test diff --git a/security/nss/gtests/ssl_gtest/tls_agent.cc b/security/nss/gtests/ssl_gtest/tls_agent.cc index d6d91f7f7b9487825f6a15f6b3c389f6c97d634b..7d76cffb26f0fd1218038b417c3975c5f18e69f1 100644 --- a/security/nss/gtests/ssl_gtest/tls_agent.cc +++ b/security/nss/gtests/ssl_gtest/tls_agent.cc @@ -10,6 +10,7 @@ #include "pk11func.h" #include "ssl.h" #include "sslerr.h" +#include "sslexp.h" #include "sslproto.h" #include "tls_parser.h" @@ -414,6 +415,13 @@ void TlsAgent::SetShortHeadersEnabled() { EXPECT_EQ(SECSuccess, rv); } +void TlsAgent::SetAltHandshakeTypeEnabled() { + EXPECT_TRUE(EnsureTlsSetup()); + + SECStatus rv = SSL_UseAltServerHelloType(ssl_fd(), true); + EXPECT_EQ(SECSuccess, rv); +} + void TlsAgent::SetVersionRange(uint16_t minver, uint16_t maxver) { vrange_.min = minver; vrange_.max = maxver; diff --git a/security/nss/gtests/ssl_gtest/tls_agent.h b/security/nss/gtests/ssl_gtest/tls_agent.h index 4bccb9a8498ec16bd9c8d0cf1a2a22c2647e2713..2ae10bbd0ac109d2e52eb1614d90256aef776bd2 100644 --- a/security/nss/gtests/ssl_gtest/tls_agent.h +++ b/security/nss/gtests/ssl_gtest/tls_agent.h @@ -127,6 +127,7 @@ class TlsAgent : public PollTarget { void Set0RttEnabled(bool en); void SetFallbackSCSVEnabled(bool en); void SetShortHeadersEnabled(); + void SetAltHandshakeTypeEnabled(); void SetVersionRange(uint16_t minver, uint16_t maxver); void GetVersionRange(uint16_t* minver, uint16_t* maxver); void CheckPreliminaryInfo(); diff --git a/security/nss/gtests/ssl_gtest/tls_connect.cc b/security/nss/gtests/ssl_gtest/tls_connect.cc index c8de5a1fee1e4ba8422608945cf8e4258dbade50..19ca0929bf2ed3c7be0851a0abb5c1e74a54e809 100644 --- a/security/nss/gtests/ssl_gtest/tls_connect.cc +++ b/security/nss/gtests/ssl_gtest/tls_connect.cc @@ -5,6 +5,7 @@ * You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "tls_connect.h" +#include "sslexp.h" extern "C" { #include "libssl_internals.h" } diff --git a/security/nss/gtests/ssl_gtest/tls_filter.cc b/security/nss/gtests/ssl_gtest/tls_filter.cc index 76d9aaaffcbbebff88707d946da29f172e021917..433b9bf7b78808f9eccd9c0813b073b053816816 100644 --- a/security/nss/gtests/ssl_gtest/tls_filter.cc +++ b/security/nss/gtests/ssl_gtest/tls_filter.cc @@ -227,7 +227,8 @@ PacketFilter::Action TlsHandshakeFilter::FilterRecord( const TlsRecordHeader& record_header, const DataBuffer& input, DataBuffer* output) { // Check that the first byte is as requested. - if (record_header.content_type() != kTlsHandshakeType) { + if ((record_header.content_type() != kTlsHandshakeType) && + (record_header.content_type() != kTlsAltHandshakeType)) { return KEEP; } @@ -369,6 +370,20 @@ PacketFilter::Action TlsConversationRecorder::FilterRecord( return KEEP; } +PacketFilter::Action TlsHeaderRecorder::FilterRecord( + const TlsRecordHeader& header, const DataBuffer& input, + DataBuffer* output) { + headers_.push_back(header); + return KEEP; +} + +const TlsRecordHeader* TlsHeaderRecorder::header(size_t index) { + if (index > headers_.size() + 1) { + return nullptr; + } + return &headers_[index]; +} + PacketFilter::Action ChainedPacketFilter::Filter(const DataBuffer& input, DataBuffer* output) { DataBuffer in(input); @@ -378,6 +393,7 @@ PacketFilter::Action ChainedPacketFilter::Filter(const DataBuffer& input, if (action == DROP) { return DROP; } + if (action == CHANGE) { in = *output; changed = true; diff --git a/security/nss/gtests/ssl_gtest/tls_filter.h b/security/nss/gtests/ssl_gtest/tls_filter.h index e4030e23f5aacbf8259b1e8f66b30d639442eb28..bb05664a551514f28f619904b620f87cac1cd7a3 100644 --- a/security/nss/gtests/ssl_gtest/tls_filter.h +++ b/security/nss/gtests/ssl_gtest/tls_filter.h @@ -133,6 +133,7 @@ inline std::ostream& operator<<(std::ostream& stream, TlsRecordHeader& hdr) { stream << "Alert"; break; case kTlsHandshakeType: + case kTlsAltHandshakeType: stream << "Handshake"; break; case kTlsApplicationDataType: @@ -230,7 +231,19 @@ class TlsConversationRecorder : public TlsRecordFilter { DataBuffer* output); private: - DataBuffer& buffer_; + DataBuffer buffer_; +}; + +// Make a copy of the records +class TlsHeaderRecorder : public TlsRecordFilter { + public: + virtual PacketFilter::Action FilterRecord(const TlsRecordHeader& header, + const DataBuffer& input, + DataBuffer* output); + const TlsRecordHeader* header(size_t index); + + private: + std::vector<TlsRecordHeader> headers_; }; // Runs multiple packet filters in series. diff --git a/security/nss/lib/certdb/alg1485.c b/security/nss/lib/certdb/alg1485.c index 38b2fe4b51fbdc44b166464b7b8b7f01543b22e6..bab23be1c49a86b08f49cd26ee9083284ec26814 100644 --- a/security/nss/lib/certdb/alg1485.c +++ b/security/nss/lib/certdb/alg1485.c @@ -703,14 +703,19 @@ CERT_GetOidString(const SECItem* oid) return NULL; } + /* If the OID has length 1, we bail. */ + if (oid->len < 2) { + return NULL; + } + /* first will point to the next sequence of bytes to decode */ first = (PRUint8*)oid->data; /* stop points to one past the legitimate data */ stop = &first[oid->len]; /* - * Check for our pseudo-encoded single-digit OIDs - */ + * Check for our pseudo-encoded single-digit OIDs + */ if ((*first == 0x80) && (2 == oid->len)) { /* Funky encoding. The second byte is the number */ rvString = PR_smprintf("%lu", (PRUint32)first[1]); @@ -728,6 +733,10 @@ CERT_GetOidString(const SECItem* oid) break; } } + /* There's no first bit set, so this isn't valid. Bail.*/ + if (last == stop) { + goto unsupported; + } bytesBeforeLast = (unsigned int)(last - first); if (bytesBeforeLast <= 3U) { /* 0-28 bit number */ PRUint32 n = 0; @@ -748,12 +757,12 @@ CERT_GetOidString(const SECItem* oid) CASE(2, 0x7f); CASE(1, 0x7f); case 0: - n |= - last[0] & 0x7f; + n |= last[0] & 0x7f; break; } - if (last[0] & 0x80) + if (last[0] & 0x80) { goto unsupported; + } if (!rvString) { /* This is the first number.. decompose it */ diff --git a/security/nss/lib/freebl/config.mk b/security/nss/lib/freebl/config.mk index 918a66363f470fc870e2337c0ec4bcdf901a88a0..f150770965981451a6dc3d5b023fed913209db25 100644 --- a/security/nss/lib/freebl/config.mk +++ b/security/nss/lib/freebl/config.mk @@ -90,7 +90,12 @@ EXTRA_SHARED_LIBS += \ endif endif +ifeq ($(OS_ARCH), Linux) +CFLAGS += -std=gnu99 +endif + ifeq ($(OS_ARCH), Darwin) +CFLAGS += -std=gnu99 EXTRA_SHARED_LIBS += -dylib_file @executable_path/libplc4.dylib:$(DIST)/lib/libplc4.dylib -dylib_file @executable_path/libplds4.dylib:$(DIST)/lib/libplds4.dylib endif diff --git a/security/nss/lib/freebl/freebl.gyp b/security/nss/lib/freebl/freebl.gyp index 8c0d0dcd5d716e3e02b1b106ab16fed4fd8e2a75..b357fb49ecc7cf0df30c382be0c52ce33ce7593a 100644 --- a/security/nss/lib/freebl/freebl.gyp +++ b/security/nss/lib/freebl/freebl.gyp @@ -166,6 +166,7 @@ 'OTHER_CFLAGS': [ '-mpclmul', '-maes', + '-std=gnu99', ], }, }], @@ -232,6 +233,9 @@ 'FREEBL_LOWHASH', 'FREEBL_NO_DEPEND', ], + 'cflags': [ + '-std=gnu99', + ], }], [ 'OS=="linux" or OS=="android"', { 'conditions': [ diff --git a/security/nss/lib/nss/nss.h b/security/nss/lib/nss/nss.h index 61c50054e270133107f9f67c560d2da2e3e7ec68..000d8d9432076cb900875cbb51c6bfb4840d4363 100644 --- a/security/nss/lib/nss/nss.h +++ b/security/nss/lib/nss/nss.h @@ -22,12 +22,12 @@ * The format of the version string should be * "<major version>.<minor version>[.<patch level>[.<build number>]][ <ECC>][ <Beta>]" */ -#define NSS_VERSION "3.32" _NSS_CUSTOMIZED +#define NSS_VERSION "3.33" _NSS_CUSTOMIZED " Beta" #define NSS_VMAJOR 3 -#define NSS_VMINOR 32 +#define NSS_VMINOR 33 #define NSS_VPATCH 0 #define NSS_VBUILD 0 -#define NSS_BETA PR_FALSE +#define NSS_BETA PR_TRUE #ifndef RC_INVOKED diff --git a/security/nss/lib/pki/pki3hack.c b/security/nss/lib/pki/pki3hack.c index 548853970b1c63be9fe45168fe04cf926a33ae2e..fb3110a233ed38a68f6d216a566aaeb3d96fc07a 100644 --- a/security/nss/lib/pki/pki3hack.c +++ b/security/nss/lib/pki/pki3hack.c @@ -180,16 +180,18 @@ STAN_RemoveModuleFromDefaultTrustDomain( NSSTrustDomain *td; int i; td = STAN_GetDefaultTrustDomain(); - NSSRWLock_LockWrite(td->tokensLock); for (i = 0; i < module->slotCount; i++) { token = PK11Slot_GetNSSToken(module->slots[i]); if (token) { nssToken_NotifyCertsNotVisible(token); + NSSRWLock_LockWrite(td->tokensLock); nssList_Remove(td->tokenList, token); + NSSRWLock_UnlockWrite(td->tokensLock); PK11Slot_SetNSSToken(module->slots[i], NULL); nssToken_Destroy(token); } } + NSSRWLock_LockWrite(td->tokensLock); nssListIterator_Destroy(td->tokens); td->tokens = nssList_CreateIterator(td->tokenList); NSSRWLock_UnlockWrite(td->tokensLock); diff --git a/security/nss/lib/softoken/pkcs11.c b/security/nss/lib/softoken/pkcs11.c index a594fd501bd8494bcdb8d7888a2703ce0b11c6f2..4e940990e9cd7eacbc0193e78c7687a715c442fc 100644 --- a/security/nss/lib/softoken/pkcs11.c +++ b/security/nss/lib/softoken/pkcs11.c @@ -3566,7 +3566,6 @@ NSC_InitToken(CK_SLOT_ID slotID, CK_CHAR_PTR pPin, { SFTKSlot *slot = sftk_SlotFromID(slotID, PR_FALSE); SFTKDBHandle *handle; - SFTKDBHandle *certHandle; SECStatus rv; unsigned int i; SFTKObject *object; @@ -3614,19 +3613,16 @@ NSC_InitToken(CK_SLOT_ID slotID, CK_CHAR_PTR pPin, } rv = sftkdb_ResetKeyDB(handle); + /* clear the password */ + sftkdb_ClearPassword(handle); + /* update slot->needLogin (should be true now since no password is set) */ + sftk_checkNeedLogin(slot, handle); sftk_freeDB(handle); if (rv != SECSuccess) { return CKR_DEVICE_ERROR; } - /* finally mark all the user certs as non-user certs */ - certHandle = sftk_getCertDB(slot); - if (certHandle == NULL) - return CKR_OK; - - sftk_freeDB(certHandle); - - return CKR_OK; /*is this the right function for not implemented*/ + return CKR_OK; } /* NSC_InitPIN initializes the normal user's PIN. */ diff --git a/security/nss/lib/softoken/sdb.c b/security/nss/lib/softoken/sdb.c index 8690df34ca021392dd3877686f8fef1d68339338..57337e3342aed9e382bbb09e8834816866c92a15 100644 --- a/security/nss/lib/softoken/sdb.c +++ b/security/nss/lib/softoken/sdb.c @@ -1600,7 +1600,7 @@ loser: return error; } -static const char RESET_CMD[] = "DROP TABLE IF EXISTS %s;"; +static const char RESET_CMD[] = "DELETE FROM %s;"; CK_RV sdb_Reset(SDB *sdb) { @@ -1621,17 +1621,19 @@ sdb_Reset(SDB *sdb) goto loser; } - /* delete the key table */ - newStr = sqlite3_mprintf(RESET_CMD, sdb_p->table); - if (newStr == NULL) { - error = CKR_HOST_MEMORY; - goto loser; - } - sqlerr = sqlite3_exec(sqlDB, newStr, NULL, 0, NULL); - sqlite3_free(newStr); + if (tableExists(sqlDB, sdb_p->table)) { + /* delete the contents of the key table */ + newStr = sqlite3_mprintf(RESET_CMD, sdb_p->table); + if (newStr == NULL) { + error = CKR_HOST_MEMORY; + goto loser; + } + sqlerr = sqlite3_exec(sqlDB, newStr, NULL, 0, NULL); + sqlite3_free(newStr); - if (sqlerr != SQLITE_OK) - goto loser; + if (sqlerr != SQLITE_OK) + goto loser; + } /* delete the password entry table */ sqlerr = sqlite3_exec(sqlDB, "DROP TABLE IF EXISTS metaData;", @@ -1866,30 +1868,29 @@ sdb_init(char *dbname, char *table, sdbDataType type, int *inUpdate, * so we use it for the cache (see sdb_buildCache for how it's done).*/ /* - * we decide whether or not to use the cache based on the following input. - * - * NSS_SDB_USE_CACHE environment variable is non-existant or set to - * anything other than "no" or "yes" ("auto", for instance). - * This is the normal case. NSS will measure the performance of access - * to the temp database versus the access to the users passed in - * database location. If the temp database location is "significantly" - * faster we will use the cache. - * - * NSS_SDB_USE_CACHE environment variable is set to "no": cache will not - * be used. - * - * NSS_SDB_USE_CACHE environment variable is set to "yes": cache will - * always be used. - * - * It is expected that most applications would use the "auto" selection, - * the environment variable is primarily to simplify testing, and to - * correct potential corner cases where */ + * we decide whether or not to use the cache based on the following input. + * + * NSS_SDB_USE_CACHE environment variable is set to anything other than + * "yes" or "no" (for instance, "auto"): NSS will measure the performance + * of access to the temp database versus the access to the user's + * passed-in database location. If the temp database location is + * "significantly" faster we will use the cache. + * + * NSS_SDB_USE_CACHE environment variable is nonexistent or set to "no": + * cache will not be used. + * + * NSS_SDB_USE_CACHE environment variable is set to "yes": cache will + * always be used. + * + * It is expected that most applications will not need this feature, and + * thus it is disabled by default. + */ env = PR_GetEnvSecure("NSS_SDB_USE_CACHE"); - if (env && PORT_Strcasecmp(env, "no") == 0) { + if (!env || PORT_Strcasecmp(env, "no") == 0) { enableCache = PR_FALSE; - } else if (env && PORT_Strcasecmp(env, "yes") == 0) { + } else if (PORT_Strcasecmp(env, "yes") == 0) { enableCache = PR_TRUE; } else { char *tempDir = NULL; @@ -2035,10 +2036,11 @@ s_open(const char *directory, const char *certPrefix, const char *keyPrefix, { char *env; env = PR_GetEnvSecure("NSS_SDB_USE_CACHE"); - /* If the environment variable is set to yes or no, sdb_init() will - * ignore the value of accessOps, and we can skip the measuring.*/ - if (!env || ((PORT_Strcasecmp(env, "no") != 0) && - (PORT_Strcasecmp(env, "yes") != 0))) { + /* If the environment variable is undefined or set to yes or no, + * sdb_init() will ignore the value of accessOps, and we can skip the + * measuring.*/ + if (env && PORT_Strcasecmp(env, "no") != 0 && + PORT_Strcasecmp(env, "yes") != 0) { accessOps = sdb_measureAccess(directory); } } diff --git a/security/nss/lib/softoken/softkver.h b/security/nss/lib/softoken/softkver.h index bfc16afb227be4d789b8766b0206e5307c7dc2e7..7e2ab84d1a968bd6229085bf4d82c59aee64629f 100644 --- a/security/nss/lib/softoken/softkver.h +++ b/security/nss/lib/softoken/softkver.h @@ -21,11 +21,11 @@ * The format of the version string should be * "<major version>.<minor version>[.<patch level>[.<build number>]][ <ECC>][ <Beta>]" */ -#define SOFTOKEN_VERSION "3.32" SOFTOKEN_ECC_STRING +#define SOFTOKEN_VERSION "3.33" SOFTOKEN_ECC_STRING " Beta" #define SOFTOKEN_VMAJOR 3 -#define SOFTOKEN_VMINOR 32 +#define SOFTOKEN_VMINOR 33 #define SOFTOKEN_VPATCH 0 #define SOFTOKEN_VBUILD 0 -#define SOFTOKEN_BETA PR_FALSE +#define SOFTOKEN_BETA PR_TRUE #endif /* _SOFTKVER_H_ */ diff --git a/security/nss/lib/ssl/SSLerrs.h b/security/nss/lib/ssl/SSLerrs.h index b73fb6bd04b3e9541e8d550ae3270c5ea50dcd01..db8e5e69b2fe815a54ddb0b2bb131be8477ed662 100644 --- a/security/nss/lib/ssl/SSLerrs.h +++ b/security/nss/lib/ssl/SSLerrs.h @@ -511,3 +511,12 @@ ER3(SSL_ERROR_DOWNGRADE_WITH_EARLY_DATA, (SSL_ERROR_BASE + 160), ER3(SSL_ERROR_TOO_MUCH_EARLY_DATA, (SSL_ERROR_BASE + 161), "SSL received more early data than permitted.") + +ER3(SSL_ERROR_RX_UNEXPECTED_END_OF_EARLY_DATA, (SSL_ERROR_BASE + 162), + "SSL received an unexpected End of Early Data message.") + +ER3(SSL_ERROR_RX_MALFORMED_END_OF_EARLY_DATA, (SSL_ERROR_BASE + 163), + "SSL received a malformed End of Early Data message.") + +ER3(SSL_ERROR_UNSUPPORTED_EXPERIMENTAL_API, (SSL_ERROR_BASE + 164), + "An experimental API was called, but not supported.") diff --git a/security/nss/lib/ssl/exports.gyp b/security/nss/lib/ssl/exports.gyp index e2123af84c2bbee06c42b1e5405f29aa28cb7ac8..c3b34c6cc3072910aa243be127d189327139bf32 100644 --- a/security/nss/lib/ssl/exports.gyp +++ b/security/nss/lib/ssl/exports.gyp @@ -15,6 +15,7 @@ 'preenc.h', 'ssl.h', 'sslerr.h', + 'sslexp.h', 'sslproto.h', 'sslt.h' ], diff --git a/security/nss/lib/ssl/manifest.mn b/security/nss/lib/ssl/manifest.mn index fbb88baffa520122528cf72da448eed5e3e786fa..4c42e7e3779f545a0ce176a84f6b42a39a205358 100644 --- a/security/nss/lib/ssl/manifest.mn +++ b/security/nss/lib/ssl/manifest.mn @@ -10,6 +10,7 @@ EXPORTS = \ ssl.h \ sslt.h \ sslerr.h \ + sslexp.h \ sslproto.h \ preenc.h \ $(NULL) diff --git a/security/nss/lib/ssl/ssl.def b/security/nss/lib/ssl/ssl.def index 94d3042239df3b0cab68b13a6aac149ff2e9ec56..9a447dbefa834c0534f1393680553aacb2a40dda 100644 --- a/security/nss/lib/ssl/ssl.def +++ b/security/nss/lib/ssl/ssl.def @@ -234,3 +234,9 @@ SSL_AlertSentCallback; ;+ local: ;+*; ;+}; +;+NSS_3.33 { # NSS 3.33 release +;+ global: +SSL_GetExperimentalAPI; +;+ local: +;+*; +;+}; diff --git a/security/nss/lib/ssl/ssl.h b/security/nss/lib/ssl/ssl.h index 7e538ac1fce2ffc1ebdcb2e236aafa89c1ce7540..20244f59b8e79852d87dc5c4b929fc6d98355162 100644 --- a/security/nss/lib/ssl/ssl.h +++ b/security/nss/lib/ssl/ssl.h @@ -1374,6 +1374,13 @@ extern const char *NSSSSL_GetVersion(void); */ SSL_IMPORT SECStatus SSL_AuthCertificateComplete(PRFileDesc *fd, PRErrorCode error); + +/* + * This is used to access experimental APIs. Don't call this directly. This is + * used to enable the experimental APIs that are defined in "sslexp.h". + */ +SSL_IMPORT void *SSL_GetExperimentalAPI(const char *name); + SEC_END_PROTOS #endif /* __ssl_h_ */ diff --git a/security/nss/lib/ssl/ssl3con.c b/security/nss/lib/ssl/ssl3con.c index 686ac55140e2193ebceb1fa2160ced3ed81b63fe..c616e2a961bfd1c86dcb021480a9cee7574b9309 100644 --- a/security/nss/lib/ssl/ssl3con.c +++ b/security/nss/lib/ssl/ssl3con.c @@ -1090,7 +1090,8 @@ ssl_ClientReadVersion(sslSocket *ss, PRUint8 **b, unsigned int *len, PORT_SetError(SSL_ERROR_UNSUPPORTED_VERSION); return SECFailure; } - if (temp == tls13_EncodeDraftVersion(SSL_LIBRARY_VERSION_TLS_1_3)) { + if (temp == tls13_EncodeDraftVersion(SSL_LIBRARY_VERSION_TLS_1_3) || (ss->opt.enableAltHandshaketype && + (temp == tls13_EncodeAltDraftVersion(SSL_LIBRARY_VERSION_TLS_1_3)))) { v = SSL_LIBRARY_VERSION_TLS_1_3; } else { v = (SSL3ProtocolVersion)temp; @@ -2977,6 +2978,7 @@ ssl3_FlushHandshakeMessages(sslSocket *ss, PRInt32 flags) ssl_SEND_FLAG_CAP_RECORD_VERSION; PRInt32 count = -1; SECStatus rv; + SSL3ContentType ct = content_handshake; PORT_Assert(ss->opt.noLocks || ssl_HaveSSL3HandshakeLock(ss)); PORT_Assert(ss->opt.noLocks || ssl_HaveXmitBufLock(ss)); @@ -2990,7 +2992,12 @@ ssl3_FlushHandshakeMessages(sslSocket *ss, PRInt32 flags) PORT_SetError(SEC_ERROR_INVALID_ARGS); return SECFailure; } - count = ssl3_SendRecord(ss, NULL, content_handshake, + /* Maybe send the first message with alt handshake type. */ + if (ss->ssl3.hs.altHandshakeType) { + ct = content_alt_handshake; + ss->ssl3.hs.altHandshakeType = PR_FALSE; + } + count = ssl3_SendRecord(ss, NULL, ct, ss->sec.ci.sendBuf.buf, ss->sec.ci.sendBuf.len, flags); if (count < 0) { @@ -9321,7 +9328,7 @@ ssl3_SendServerHello(sslSocket *ss) if (IS_DTLS(ss) && ss->version < SSL_LIBRARY_VERSION_TLS_1_3) { version = dtls_TLSVersionToDTLSVersion(ss->version); } else { - version = tls13_EncodeDraftVersion(ss->version); + version = ss->ssl3.hs.altHandshakeType ? tls13_EncodeAltDraftVersion(ss->version) : tls13_EncodeDraftVersion(ss->version); } rv = ssl3_AppendHandshakeNumber(ss, version, 2); @@ -9752,13 +9759,12 @@ ssl3_HandleCertificateVerify(sslSocket *ss, PRUint8 *b, PRUint32 length, hashAlg = ssl_SignatureSchemeToHashType(sigScheme); - if (hashes->u.pointer_to_hash_input.data) { - rv = ssl3_ComputeHandshakeHash(hashes->u.pointer_to_hash_input.data, - hashes->u.pointer_to_hash_input.len, - hashAlg, &localHashes); - } else { - rv = SECFailure; - } + /* Read from the message buffer, but we need to use only up to the end + * of the previous handshake message. The length of the transcript up to + * that point is saved in |hashes->u.transcriptLen|. */ + rv = ssl3_ComputeHandshakeHash(ss->ssl3.hs.messages.buf, + hashes->u.transcriptLen, + hashAlg, &localHashes); if (rv == SECSuccess) { hashesForVerify = &localHashes; @@ -11658,15 +11664,15 @@ ssl3_HandleHandshakeMessage(sslSocket *ss, PRUint8 *b, PRUint32 length, * additional handshake messages will have been added to the * buffer, e.g. the certificate_verify message itself.) * - * Therefore, we use SSL3Hashes.u.pointer_to_hash_input - * to signal the current state of the buffer. + * Therefore, we use SSL3Hashes.u.transcriptLen to save how much + * data there is and read directly from ss->ssl3.hs.messages + * when calculating the hashes. * * ssl3_HandleCertificateVerify will detect * hashType == handshake_hash_record * and use that information to calculate the hash. */ - hashes.u.pointer_to_hash_input.data = ss->ssl3.hs.messages.buf; - hashes.u.pointer_to_hash_input.len = ss->ssl3.hs.messages.len; + hashes.u.transcriptLen = ss->ssl3.hs.messages.len; hashesPtr = &hashes; } else { computeHashes = PR_TRUE; @@ -12729,6 +12735,14 @@ process_it: */ ssl_GetSSL3HandshakeLock(ss); + /* Special case: allow alt content type for TLS 1.3 ServerHello. */ + if ((rType == content_alt_handshake) && + (ss->vrange.max >= SSL_LIBRARY_VERSION_TLS_1_3) && + (ss->ssl3.hs.ws == wait_server_hello) && + (ss->opt.enableAltHandshaketype) && + (!IS_DTLS(ss))) { + rType = content_handshake; + } /* All the functions called in this switch MUST set error code if ** they return SECFailure or SECWouldBlock. */ diff --git a/security/nss/lib/ssl/ssl3prot.h b/security/nss/lib/ssl/ssl3prot.h index dede24438d1bcf6d86208aef51b82a27e9d8b2fc..97487ca6718b25336dcca033f9aada782e711c63 100644 --- a/security/nss/lib/ssl/ssl3prot.h +++ b/security/nss/lib/ssl/ssl3prot.h @@ -41,7 +41,8 @@ typedef enum { content_change_cipher_spec = 20, content_alert = 21, content_handshake = 22, - content_application_data = 23 + content_application_data = 23, + content_alt_handshake = 24 } SSL3ContentType; typedef struct { @@ -235,7 +236,7 @@ typedef struct { union { PRUint8 raw[64]; SSL3HashesIndividually s; - SECItem pointer_to_hash_input; + unsigned int transcriptLen; } u; } SSL3Hashes; diff --git a/security/nss/lib/ssl/sslerr.h b/security/nss/lib/ssl/sslerr.h index 865077cda6fe8b262e873e5d1bbd1f7c3c73e781..0db576bce6eca81e1e8f10dc236306e0090e646b 100644 --- a/security/nss/lib/ssl/sslerr.h +++ b/security/nss/lib/ssl/sslerr.h @@ -246,6 +246,11 @@ typedef enum { SSL_ERROR_MISSING_PSK_KEY_EXCHANGE_MODES = (SSL_ERROR_BASE + 159), SSL_ERROR_DOWNGRADE_WITH_EARLY_DATA = (SSL_ERROR_BASE + 160), SSL_ERROR_TOO_MUCH_EARLY_DATA = (SSL_ERROR_BASE + 161), + SSL_ERROR_RX_UNEXPECTED_END_OF_EARLY_DATA = (SSL_ERROR_BASE + 162), + SSL_ERROR_RX_MALFORMED_END_OF_EARLY_DATA = (SSL_ERROR_BASE + 163), + + SSL_ERROR_UNSUPPORTED_EXPERIMENTAL_API = (SSL_ERROR_BASE + 164), + SSL_ERROR_END_OF_LIST /* let the c compiler determine the value of this. */ } SSLErrorCodes; #endif /* NO_SECURITY_ERROR_ENUM */ diff --git a/security/nss/lib/ssl/sslexp.h b/security/nss/lib/ssl/sslexp.h new file mode 100644 index 0000000000000000000000000000000000000000..eee60280a4ad3aec095098e0f1cff46f6c720b1d --- /dev/null +++ b/security/nss/lib/ssl/sslexp.h @@ -0,0 +1,37 @@ +/* + * This file contains prototypes for experimental SSL functions. + * + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef __sslexp_h_ +#define __sslexp_h_ + +#include "ssl.h" +#include "sslerr.h" + +SEC_BEGIN_PROTOS + +/* The functions in this header file are not guaranteed to remain available in + * future NSS versions. Code that uses these functions needs to safeguard + * against the function not being available. */ + +#define SSL_EXPERIMENTAL_API(name, arglist, args) \ + (SSL_GetExperimentalAPI(name) \ + ? ((SECStatus(*) arglist)SSL_GetExperimentalAPI(name))args \ + : SECFailure) + +/* Allow the ServerHello to be record type 24. Experiment to test: + * https://github.com/tlswg/tls13-spec/pull/1051 + * This will either become part of the standard or be disabled + * after we have tested it. + */ +#define SSL_UseAltServerHelloType(fd, enable) \ + SSL_EXPERIMENTAL_API("SSL_UseAltServerHelloType", \ + (PRFileDesc * _fd, PRBool _enable), \ + (fd, enable)) + +SEC_END_PROTOS + +#endif /* __sslexp_h_ */ diff --git a/security/nss/lib/ssl/sslimpl.h b/security/nss/lib/ssl/sslimpl.h index 64694b0df9e930b601ab7c943a2de5fe3ef040c4..0ffaa3a6a64fc7155bb285d975707a0c4d6ef4ba 100644 --- a/security/nss/lib/ssl/sslimpl.h +++ b/security/nss/lib/ssl/sslimpl.h @@ -298,6 +298,7 @@ typedef struct sslOptionsStr { unsigned int requireDHENamedGroups : 1; unsigned int enable0RttData : 1; unsigned int enableShortHeaders : 1; + unsigned int enableAltHandshaketype : 1; } sslOptions; typedef enum { sslHandshakingUndetermined = 0, @@ -883,6 +884,7 @@ typedef struct SSL3HandshakeStateStr { ssl3KEADef kea_def_mutable; /* Used to hold the writable kea_def * we use for TLS 1.3 */ PRBool shortHeaders; /* Assigned if we are doing short headers. */ + PRBool altHandshakeType; /* Assigned if we are doing the wrapped handshake. */ } SSL3HandshakeState; /* diff --git a/security/nss/lib/ssl/sslsock.c b/security/nss/lib/ssl/sslsock.c index 99828c85b19b3cc94cf0fad3f3ae9dd2bff4547f..9a7880c493d956bc7fee4e97e1ec0e10c18f491d 100644 --- a/security/nss/lib/ssl/sslsock.c +++ b/security/nss/lib/ssl/sslsock.c @@ -11,6 +11,7 @@ #include "cert.h" #include "keyhi.h" #include "ssl.h" +#include "sslexp.h" #include "sslimpl.h" #include "sslproto.h" #include "nspr.h" @@ -80,10 +81,11 @@ static sslOptions ssl_defaults = { PR_FALSE, /* requireDHENamedGroups */ PR_FALSE, /* enable0RttData */ #ifdef NSS_ENABLE_TLS13_SHORT_HEADERS - PR_TRUE /* enableShortHeaders */ + PR_TRUE, /* enableShortHeaders */ #else - PR_FALSE /* enableShortHeaders */ + PR_FALSE, /* enableShortHeaders */ #endif + PR_FALSE /* enableAltHandshaketype */ }; /* @@ -2214,7 +2216,7 @@ ssl3_GetEffectiveVersionPolicy(SSLProtocolVariant variant, return SECSuccess; } -/* +/* * Assumes that rangeParam values are within the supported boundaries, * but should contain all potentially allowed versions, even if they contain * conflicting versions. @@ -3840,3 +3842,48 @@ SSL_CanBypass(CERTCertificate *cert, SECKEYPrivateKey *srvPrivkey, *pcanbypass = PR_FALSE; return SECSuccess; } + +/* Functions that are truly experimental use EXP, functions that are no longer + * experimental use PUB. + * + * When initially defining a new API, add that API here using the EXP() macro + * and name the function with a SSLExp_ prefix. Define the experimental API as + * a macro in sslexp.h using the SSL_EXPERIMENTAL_API() macro defined there. + * + * Once an API is stable and proven, move the macro definition in sslexp.h to a + * proper function declaration in ssl.h. Keeping the function in this list + * ensures that code built against the release that contained the experimental + * API will continue to work; use PUB() to reference the public function. + */ +#define EXP(n) \ + { \ + "SSL_" #n, SSLExp_##n \ + } +#define PUB(n) \ + { \ + "SSL_" #n, SSL_##n \ + } +struct { + const char *const name; + void *function; +} ssl_experimental_functions[] = { +#ifndef SSL_DISABLE_EXPERIMENTAL_API + EXP(UseAltServerHelloType), +#endif + { "", NULL } +}; +#undef EXP +#undef PUB + +void * +SSL_GetExperimentalAPI(const char *name) +{ + unsigned int i; + for (i = 0; i < PR_ARRAY_SIZE(ssl_experimental_functions); ++i) { + if (strcmp(name, ssl_experimental_functions[i].name) == 0) { + return ssl_experimental_functions[i].function; + } + } + PORT_SetError(SSL_ERROR_UNSUPPORTED_EXPERIMENTAL_API); + return NULL; +} diff --git a/security/nss/lib/ssl/tls13con.c b/security/nss/lib/ssl/tls13con.c index 560493848b4f3b9911caefcda7b43bd714b61a79..fff6f71f4e8a308a61ce0869f0ba26968cc1acd9 100644 --- a/security/nss/lib/ssl/tls13con.c +++ b/security/nss/lib/ssl/tls13con.c @@ -4475,6 +4475,17 @@ tls13_EncodeDraftVersion(SSL3ProtocolVersion version) return (PRUint16)version; } +PRUint16 +tls13_EncodeAltDraftVersion(SSL3ProtocolVersion version) +{ +#ifdef TLS_1_3_DRAFT_VERSION + if (version == SSL_LIBRARY_VERSION_TLS_1_3) { + return 0x7a00 | TLS_1_3_DRAFT_VERSION; + } +#endif + return (PRUint16)version; +} + /* Pick the highest version we support that is also advertised. */ SECStatus tls13_NegotiateVersion(sslSocket *ss, const TLSExtension *supported_versions) @@ -4496,6 +4507,7 @@ tls13_NegotiateVersion(sslSocket *ss, const TLSExtension *supported_versions) } for (version = ss->vrange.max; version >= ss->vrange.min; --version) { PRUint16 wire = tls13_EncodeDraftVersion(version); + PRUint16 alt_wire = tls13_EncodeAltDraftVersion(version); unsigned long offset; for (offset = 0; offset < versions.len; offset += 2) { @@ -4505,9 +4517,33 @@ tls13_NegotiateVersion(sslSocket *ss, const TLSExtension *supported_versions) ss->version = version; return SECSuccess; } + if (ss->opt.enableAltHandshaketype && !IS_DTLS(ss) && + supported == alt_wire) { + ss->version = version; + ss->ssl3.hs.altHandshakeType = PR_TRUE; + return SECSuccess; + } } } FATAL_ERROR(ss, SSL_ERROR_UNSUPPORTED_VERSION, protocol_version); return SECFailure; } + +SECStatus +SSLExp_UseAltServerHelloType(PRFileDesc *fd, PRBool enable) +{ + sslSocket *ss; + + ss = ssl_FindSocket(fd); + if (!ss) { + SSL_DBG(("%d: SSL[%d]: bad socket in SSLExp_UseAltServerHelloType", + SSL_GETPID(), fd)); + PORT_SetError(SEC_ERROR_INVALID_ARGS); + return SECFailure; + } + + ss->opt.enableAltHandshaketype = enable; + + return SECSuccess; +} diff --git a/security/nss/lib/ssl/tls13con.h b/security/nss/lib/ssl/tls13con.h index 92eb545b0004b9f5545db8ddd7c14cbe857ed875..899b4490de1e83cfddc4123525ccfe159d9a6d8d 100644 --- a/security/nss/lib/ssl/tls13con.h +++ b/security/nss/lib/ssl/tls13con.h @@ -81,9 +81,10 @@ SECStatus tls13_HandleEndOfEarlyData(sslSocket *ss); SECStatus tls13_HandleEarlyApplicationData(sslSocket *ss, sslBuffer *origBuf); PRBool tls13_ClientAllow0Rtt(const sslSocket *ss, const sslSessionID *sid); PRUint16 tls13_EncodeDraftVersion(SSL3ProtocolVersion version); -PRUint16 tls13_DecodeDraftVersion(PRUint16 version); +PRUint16 tls13_EncodeAltDraftVersion(SSL3ProtocolVersion version); SECStatus tls13_NegotiateVersion(sslSocket *ss, const TLSExtension *supported_versions); SECStatus tls13_SendNewSessionTicket(sslSocket *ss); +SECStatus SSLExp_UseAltServerHelloType(PRFileDesc *fd, PRBool enable); #endif /* __tls13con_h_ */ diff --git a/security/nss/lib/ssl/tls13exthandle.c b/security/nss/lib/ssl/tls13exthandle.c index c2ce390fff8a6d05f43209dafe9805f44da23ff5..c7466be81675427778a022c3b69dc9c2a76b723d 100644 --- a/security/nss/lib/ssl/tls13exthandle.c +++ b/security/nss/lib/ssl/tls13exthandle.c @@ -896,6 +896,10 @@ tls13_ClientSendSupportedVersionsXtn(const sslSocket *ss, TLSExtensionData *xtnD extensions_len = 2 + 2 + 1 + 2 * (ss->vrange.max - ss->vrange.min + 1); + if (ss->opt.enableAltHandshaketype && !IS_DTLS(ss)) { + extensions_len += 2; + } + if (maxBytes < (PRUint32)extensions_len) { PORT_Assert(0); return 0; @@ -914,6 +918,15 @@ tls13_ClientSendSupportedVersionsXtn(const sslSocket *ss, TLSExtensionData *xtnD if (rv != SECSuccess) return -1; + if (ss->opt.enableAltHandshaketype && !IS_DTLS(ss)) { + rv = ssl3_ExtAppendHandshakeNumber( + ss, tls13_EncodeAltDraftVersion( + SSL_LIBRARY_VERSION_TLS_1_3), + 2); + if (rv != SECSuccess) + return -1; + } + for (version = ss->vrange.max; version >= ss->vrange.min; --version) { rv = ssl3_ExtAppendHandshakeNumber( ss, tls13_EncodeDraftVersion(version), 2); diff --git a/security/nss/lib/util/nssutil.h b/security/nss/lib/util/nssutil.h index 5d7f22f3589141d8ea9157dfb3740d673cb4803c..921a3127675606ba6cfb60b9d372abe29b72dce8 100644 --- a/security/nss/lib/util/nssutil.h +++ b/security/nss/lib/util/nssutil.h @@ -19,12 +19,12 @@ * The format of the version string should be * "<major version>.<minor version>[.<patch level>[.<build number>]][ <Beta>]" */ -#define NSSUTIL_VERSION "3.32" +#define NSSUTIL_VERSION "3.33 Beta" #define NSSUTIL_VMAJOR 3 -#define NSSUTIL_VMINOR 32 +#define NSSUTIL_VMINOR 33 #define NSSUTIL_VPATCH 0 #define NSSUTIL_VBUILD 0 -#define NSSUTIL_BETA PR_FALSE +#define NSSUTIL_BETA PR_TRUE SEC_BEGIN_PROTOS diff --git a/security/nss/lib/util/secoid.c b/security/nss/lib/util/secoid.c index da03b7c06e812dfd9fd03ff86d9a91c03286c26d..a05621c59e5a21502ba880f0155443a61941756a 100644 --- a/security/nss/lib/util/secoid.c +++ b/security/nss/lib/util/secoid.c @@ -1841,13 +1841,11 @@ secoid_FindDynamic(const SECItem *key) { SECOidData *ret = NULL; + NSSRWLock_LockRead(dynOidLock); if (dynOidHash) { - NSSRWLock_LockRead(dynOidLock); - if (dynOidHash) { /* must check it again with lock held. */ - ret = (SECOidData *)PL_HashTableLookup(dynOidHash, key); - } - NSSRWLock_UnlockRead(dynOidLock); + ret = (SECOidData *)PL_HashTableLookup(dynOidHash, key); } + NSSRWLock_UnlockRead(dynOidLock); if (ret == NULL) { PORT_SetError(SEC_ERROR_UNRECOGNIZED_OID); } @@ -1866,14 +1864,12 @@ secoid_FindDynamicByTag(SECOidTag tagnum) } tagNumDiff = tagnum - SEC_OID_TOTAL; - if (dynOidTable) { - NSSRWLock_LockRead(dynOidLock); - if (dynOidTable != NULL && /* must check it again with lock held. */ - tagNumDiff < dynOidEntriesUsed) { - dxo = dynOidTable[tagNumDiff]; - } - NSSRWLock_UnlockRead(dynOidLock); + NSSRWLock_LockRead(dynOidLock); + if (dynOidTable != NULL && + tagNumDiff < dynOidEntriesUsed) { + dxo = dynOidTable[tagNumDiff]; } + NSSRWLock_UnlockRead(dynOidLock); if (dxo == NULL) { PORT_SetError(SEC_ERROR_UNRECOGNIZED_OID); } diff --git a/security/nss/mach b/security/nss/mach index 22546d732b598fd068bac9ced74c707e07c01b2b..3592299e607b02c6bd0f20cf1bc2dca4ff5225e3 100755 --- a/security/nss/mach +++ b/security/nss/mach @@ -20,12 +20,16 @@ cwd = os.path.dirname(os.path.abspath(__file__)) class cfAction(argparse.Action): docker_command = ["docker"] + restorecon = None def __call__(self, parser, args, values, option_string=None): if "noroot" not in values: self.setDockerCommand() else: values.remove("noroot") + files = [os.path.join('/home/worker/nss', + os.path.relpath(os.path.abspath(x), start=cwd)) + for x in values] # First check if we can run docker. try: @@ -55,10 +59,12 @@ class cfAction(argparse.Action): self.buildImage(docker_image, cf_docker_folder) command = self.docker_command + [ - 'run', '-v', cwd + ':/home/worker/nss', '--rm', '-ti', docker_image + 'run', '-v', cwd + ':/home/worker/nss:Z', '--rm', '-ti', docker_image ] # The clang format script returns 1 if something's to do. We don't care. - subprocess.call(command + values) + subprocess.call(command + files) + if self.restorecon is not None: + subprocess.call([self.restorecon, '-R', cwd]) def filesChanged(self, path): hash = sha256() @@ -87,6 +93,8 @@ class cfAction(argparse.Action): def setDockerCommand(self): if platform.system() == "Linux": + from distutils.spawn import find_executable + self.restorecon = find_executable('restorecon') self.docker_command = ["sudo"] + self.docker_command @@ -114,6 +122,13 @@ class testAction(argparse.Action): self.runTest(values) +class commandsAction(argparse.Action): + commands = [] + def __call__(self, parser, args, values, option_string=None): + for c in commandsAction.commands: + print(c) + + def parse_arguments(): parser = argparse.ArgumentParser( description='NSS helper script. ' + @@ -143,6 +158,16 @@ def parse_arguments(): ] parser_test.add_argument( 'test', choices=tests, help="Available tests", action=testAction) + + parser_commands = subparsers.add_parser( + 'mach-commands', + help="list commands") + parser_commands.add_argument( + 'mach-commands', + nargs='*', + action=commandsAction) + + commandsAction.commands = [c for c in subparsers.choices] return parser.parse_args() diff --git a/security/nss/nss.gyp b/security/nss/nss.gyp index e62d28449d8be346b6a44f9c9c2dd4a094694aa5..1727dbe0b966c4a5cafeb0ac819e2573f23b48fd 100644 --- a/security/nss/nss.gyp +++ b/security/nss/nss.gyp @@ -168,6 +168,7 @@ 'gtests/certdb_gtest/certdb_gtest.gyp:certdb_gtest', 'gtests/freebl_gtest/freebl_gtest.gyp:prng_gtest', 'gtests/pk11_gtest/pk11_gtest.gyp:pk11_gtest', + 'gtests/softoken_gtest/softoken_gtest.gyp:softoken_gtest', 'gtests/ssl_gtest/ssl_gtest.gyp:ssl_gtest', 'gtests/util_gtest/util_gtest.gyp:util_gtest', 'gtests/nss_bogo_shim/nss_bogo_shim.gyp:nss_bogo_shim', diff --git a/security/nss/tests/gtests/gtests.sh b/security/nss/tests/gtests/gtests.sh index c785241c4e1692a1147c5675338e1f1ad572ffb3..6de1ee4b72de73e2d0fe6cb993677e01d9b46ecb 100755 --- a/security/nss/tests/gtests/gtests.sh +++ b/security/nss/tests/gtests/gtests.sh @@ -83,7 +83,7 @@ gtest_cleanup() } ################## main ################################################# -GTESTS="prng_gtest certhigh_gtest certdb_gtest der_gtest pk11_gtest util_gtest freebl_gtest" +GTESTS="prng_gtest certhigh_gtest certdb_gtest der_gtest pk11_gtest util_gtest freebl_gtest softoken_gtest" SOURCE_DIR="$PWD"/../.. gtest_init $0 gtest_start diff --git a/taskcluster/ci/test/tests.yml b/taskcluster/ci/test/tests.yml index b1ef5e79a30ebd30ad28dec17767203c4c93697b..be4b4141d9f5ff457910d360624da462449ae6fd 100644 --- a/taskcluster/ci/test/tests.yml +++ b/taskcluster/ci/test/tests.yml @@ -802,8 +802,8 @@ mochitest-devtools-chrome: - --mochitest-suite=mochitest-devtools-chrome-chunked instance-size: by-test-platform: - # Bug 1281241: migrating to m3.large instances - linux64-asan/opt: legacy + # Bug 1361476 - try xlarge on asan to see if it avoids OOM + linux64-asan/opt: xlarge default: default # Bug 1296086: high number of intermittents observed with software GL and large instances allow-software-gl-layers: false diff --git a/testing/web-platform/tests/intersection-observer/timestamp.html b/testing/web-platform/tests/intersection-observer/timestamp.html index cffd915a8e7dc19f6d2ccf3eee9eabfc664f834f..644b61194cbcbd6d250d52fb2c0759e8e9f745d2 100644 --- a/testing/web-platform/tests/intersection-observer/timestamp.html +++ b/testing/web-platform/tests/intersection-observer/timestamp.html @@ -87,7 +87,8 @@ function step2() { // Test results are only significant if there's a gap between // top window time and iframe window time. assert_greater_than(topWindowTimeBeforeNotification, iframeWindowTimeAfterNotification, - "Time ranges for top and iframe windows are disjoint."); + "Time ranges for top and iframe windows are disjoint. Times: " + + [topWindowTimeOnTestStart, topWindowTimeBeforeCreatingIframe, topWindowTimeBeforeNotification, topWindowTimeAfterNotification, iframeWindowTimeBeforeNotification, iframeWindowTimeAfterNotification]); assert_equals(topWindowEntries.length, 2, "Top window observer has two notifications."); assert_between_inclusive( diff --git a/toolkit/components/extensions/Extension.jsm b/toolkit/components/extensions/Extension.jsm index 2297fadee0722c083af16daeb716e23f606b21f4..72244f53c90bbe5a0de2e1fac276af569e0599a5 100644 --- a/toolkit/components/extensions/Extension.jsm +++ b/toolkit/components/extensions/Extension.jsm @@ -47,6 +47,8 @@ XPCOMUtils.defineLazyPreferenceGetter(this, "processCount", "dom.ipc.processCoun XPCOMUtils.defineLazyModuleGetter(this, "AddonManager", "resource://gre/modules/AddonManager.jsm"); +XPCOMUtils.defineLazyModuleGetter(this, "AddonManagerPrivate", + "resource://gre/modules/AddonManager.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "AsyncShutdown", "resource://gre/modules/AsyncShutdown.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "ExtensionAPIs", @@ -750,6 +752,36 @@ const PROXIED_EVENTS = new Set(["test-harness-message", "add-permissions", "remo const shutdownPromises = new Map(); +class BootstrapScope { + install(data, reason) {} + uninstall(data, reason) {} + + startup(data, reason) { + this.extension = new Extension(data, this.BOOTSTRAP_REASON_TO_STRING_MAP[reason]); + return this.extension.startup(); + } + + shutdown(data, reason) { + this.extension.shutdown(this.BOOTSTRAP_REASON_TO_STRING_MAP[reason]); + this.extension = null; + } +} + +XPCOMUtils.defineLazyGetter(BootstrapScope.prototype, "BOOTSTRAP_REASON_TO_STRING_MAP", () => { + const {BOOTSTRAP_REASONS} = AddonManagerPrivate; + + return Object.freeze({ + [BOOTSTRAP_REASONS.APP_STARTUP]: "APP_STARTUP", + [BOOTSTRAP_REASONS.APP_SHUTDOWN]: "APP_SHUTDOWN", + [BOOTSTRAP_REASONS.ADDON_ENABLE]: "ADDON_ENABLE", + [BOOTSTRAP_REASONS.ADDON_DISABLE]: "ADDON_DISABLE", + [BOOTSTRAP_REASONS.ADDON_INSTALL]: "ADDON_INSTALL", + [BOOTSTRAP_REASONS.ADDON_UNINSTALL]: "ADDON_UNINSTALL", + [BOOTSTRAP_REASONS.ADDON_UPGRADE]: "ADDON_UPGRADE", + [BOOTSTRAP_REASONS.ADDON_DOWNGRADE]: "ADDON_DOWNGRADE", + }); +}); + // We create one instance of this class per extension. |addonData| // comes directly from bootstrap.js when initializing. this.Extension = class extends ExtensionData { @@ -840,6 +872,10 @@ this.Extension = class extends ExtensionData { /* eslint-enable mozilla/balanced-listeners */ } + static getBootstrapScope(id, file) { + return new BootstrapScope(); + } + get groupFrameLoader() { let frameLoader = this._backgroundPageFrameLoader; for (let view of this.views) { diff --git a/toolkit/components/jsdownloads/src/DownloadHistory.jsm b/toolkit/components/jsdownloads/src/DownloadHistory.jsm index 6fc2489c504eb3918d11edccbf81f6295c6fd068..e929acf32204fe487d65571910fa2f44b7722cec 100644 --- a/toolkit/components/jsdownloads/src/DownloadHistory.jsm +++ b/toolkit/components/jsdownloads/src/DownloadHistory.jsm @@ -19,17 +19,29 @@ this.EXPORTED_SYMBOLS = [ const { classes: Cc, interfaces: Ci, utils: Cu, results: Cr } = Components; +Cu.import("resource://gre/modules/DownloadList.jsm"); Cu.import("resource://gre/modules/Services.jsm"); Cu.import("resource://gre/modules/XPCOMUtils.jsm"); +XPCOMUtils.defineLazyModuleGetter(this, "Downloads", + "resource://gre/modules/Downloads.jsm"); +XPCOMUtils.defineLazyModuleGetter(this, "OS", + "resource://gre/modules/osfile.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "PlacesUtils", "resource://gre/modules/PlacesUtils.jsm"); +// Places query used to retrieve all history downloads for the related list. +const HISTORY_PLACES_QUERY = + "place:transition=" + Ci.nsINavHistoryService.TRANSITION_DOWNLOAD + + "&sort=" + Ci.nsINavHistoryQueryOptions.SORT_BY_DATE_ASCENDING; + +const DESTINATIONFILEURI_ANNO = "downloads/destinationFileURI"; const METADATA_ANNO = "downloads/metaData"; const METADATA_STATE_FINISHED = 1; const METADATA_STATE_FAILED = 2; const METADATA_STATE_CANCELED = 3; +const METADATA_STATE_PAUSED = 4; const METADATA_STATE_BLOCKED_PARENTAL = 6; const METADATA_STATE_DIRTY = 8; @@ -38,6 +50,26 @@ const METADATA_STATE_DIRTY = 8; * downloads for future sessions. */ this.DownloadHistory = { + /** + * Retrieves the main DownloadHistoryList object which provides a view on + * downloads from previous browsing sessions, as well as downloads from this + * session that were not started from a private browsing window. + * + * @return {Promise} + * @resolves The requested DownloadHistoryList object. + * @rejects JavaScript exception. + */ + getList() { + if (!this._promiseList) { + this._promiseList = Downloads.getList(Downloads.PUBLIC).then(list => { + return new DownloadHistoryList(list, HISTORY_PLACES_QUERY); + }); + } + + return this._promiseList; + }, + _promiseList: null, + /** * Stores new detailed metadata for the given download in history. This is * normally called after a download finishes, fails, or is canceled. @@ -88,4 +120,581 @@ this.DownloadHistory = { Cu.reportError(ex); } }, + + /** + * Reads current metadata from Places annotations for the specified URI, and + * returns an object with the format: + * + * { targetFileSpec, state, endTime, fileSize, ... } + * + * The targetFileSpec property is the value of "downloads/destinationFileURI", + * while the other properties are taken from "downloads/metaData". Any of the + * properties may be missing from the object. + */ + getPlacesMetaDataFor(spec) { + let metaData = {}; + + try { + let uri = Services.io.newURI(spec); + try { + metaData = JSON.parse(PlacesUtils.annotations.getPageAnnotation( + uri, METADATA_ANNO)); + } catch (ex) {} + metaData.targetFileSpec = PlacesUtils.annotations.getPageAnnotation( + uri, DESTINATIONFILEURI_ANNO); + } catch (ex) {} + + return metaData; + }, +}; + +/** + * This cache exists in order to optimize the load of DownloadsHistoryList, when + * Places annotations for history downloads must be read. In fact, annotations + * are stored in a single table, and reading all of them at once is much more + * efficient than an individual query. + * + * When this property is first requested, it reads the annotations for all the + * history downloads and stores them indefinitely. + * + * The historical annotations are not expected to change for the duration of the + * session, except in the case where a session download is running for the same + * URI as a history download. To avoid using stale data, consumers should + * permanently remove from the cache any URI corresponding to a session + * download. This is a very small mumber compared to history downloads. + * + * This property returns a Map from each download source URI found in Places + * annotations to an object with the format: + * + * { targetFileSpec, state, endTime, fileSize, ... } + * + * The targetFileSpec property is the value of "downloads/destinationFileURI", + * while the other properties are taken from "downloads/metaData". Any of the + * properties may be missing from the object. + */ +XPCOMUtils.defineLazyGetter(this, "gCachedPlacesMetaData", function() { + let placesMetaData = new Map(); + + // Read the metadata annotations first, but ignore invalid JSON. + for (let result of PlacesUtils.annotations.getAnnotationsWithName( + METADATA_ANNO)) { + try { + placesMetaData.set(result.uri.spec, JSON.parse(result.annotationValue)); + } catch (ex) {} + } + + // Add the target file annotations to the metadata. + for (let result of PlacesUtils.annotations.getAnnotationsWithName( + DESTINATIONFILEURI_ANNO)) { + let metaData = placesMetaData.get(result.uri.spec); + if (!metaData) { + metaData = {}; + placesMetaData.set(result.uri.spec, metaData); + } + metaData.targetFileSpec = result.annotationValue; + } + + return placesMetaData; +}); + +/** + * Represents a download from the browser history. This object implements part + * of the interface of the Download object. + * + * While Download objects are shared between the public DownloadList and all the + * DownloadHistoryList instances, multiple HistoryDownload objects referring to + * the same item can be created for different DownloadHistoryList instances. + * + * @param placesNode + * The Places node from which the history download should be initialized. + */ +function HistoryDownload(placesNode) { + this.placesNode = placesNode; + + // History downloads should get the referrer from Places (bug 829201). + this.source = { + url: placesNode.uri, + isPrivate: false, + }; + this.target = { + path: undefined, + exists: false, + size: undefined, + }; + + // In case this download cannot obtain its end time from the Places metadata, + // use the time from the Places node, that is the start time of the download. + this.endTime = placesNode.time / 1000; +} + +HistoryDownload.prototype = { + /** + * DownloadSlot containing this history download. + */ + slot: null, + + /** + * Pushes information from Places metadata into this object. + */ + updateFromMetaData(metaData) { + try { + this.target.path = Cc["@mozilla.org/network/protocol;1?name=file"] + .getService(Ci.nsIFileProtocolHandler) + .getFileFromURLSpec(metaData.targetFileSpec).path; + } catch (ex) { + this.target.path = undefined; + } + + if ("state" in metaData) { + this.succeeded = metaData.state == METADATA_STATE_FINISHED; + this.canceled = metaData.state == METADATA_STATE_CANCELED || + metaData.state == METADATA_STATE_PAUSED; + this.endTime = metaData.endTime; + + // Recreate partial error information from the state saved in history. + if (metaData.state == METADATA_STATE_FAILED) { + this.error = { message: "History download failed." }; + } else if (metaData.state == METADATA_STATE_BLOCKED_PARENTAL) { + this.error = { becauseBlockedByParentalControls: true }; + } else if (metaData.state == METADATA_STATE_DIRTY) { + this.error = { + becauseBlockedByReputationCheck: true, + reputationCheckVerdict: metaData.reputationCheckVerdict || "", + }; + } else { + this.error = null; + } + + // Normal history downloads are assumed to exist until the user interface + // is refreshed, at which point these values may be updated. + this.target.exists = true; + this.target.size = metaData.fileSize; + } else { + // Metadata might be missing from a download that has started but hasn't + // stopped already. Normally, this state is overridden with the one from + // the corresponding in-progress session download. But if the browser is + // terminated abruptly and additionally the file with information about + // in-progress downloads is lost, we may end up using this state. We use + // the failed state to allow the download to be restarted. + // + // On the other hand, if the download is missing the target file + // annotation as well, it is just a very old one, and we can assume it + // succeeded. + this.succeeded = !this.target.path; + this.error = this.target.path ? { message: "Unstarted download." } : null; + this.canceled = false; + + // These properties may be updated if the user interface is refreshed. + this.target.exists = false; + this.target.size = undefined; + } + }, + + /** + * History downloads are never in progress. + */ + stopped: true, + + /** + * No percentage indication is shown for history downloads. + */ + hasProgress: false, + + /** + * History downloads cannot be restarted using their partial data, even if + * they are indicated as paused in their Places metadata. The only way is to + * use the information from a persisted session download, that will be shown + * instead of the history download. In case this session download is not + * available, we show the history download as canceled, not paused. + */ + hasPartialData: false, + + /** + * This method may be called when deleting a history download. + */ + async finalize() {}, + + /** + * This method mimicks the "refresh" method of session downloads. + */ + async refresh() { + try { + this.target.size = (await OS.File.stat(this.target.path)).size; + this.target.exists = true; + } catch (ex) { + // We keep the known file size from the metadata, if any. + this.target.exists = false; + } + + this.slot.list._notifyAllViews("onDownloadChanged", this); + }, +}; + +/** + * Represents one item in the list of public session and history downloads. + * + * The object may contain a session download, a history download, or both. When + * both a history and a session download are present, the session download gets + * priority and its information is accessed. + * + * @param list + * The DownloadHistoryList that owns this DownloadSlot object. + */ +function DownloadSlot(list) { + this.list = list; +} + +DownloadSlot.prototype = { + list: null, + + /** + * Download object representing the session download contained in this slot. + */ + sessionDownload: null, + + /** + * HistoryDownload object contained in this slot. + */ + get historyDownload() { + return this._historyDownload; + }, + set historyDownload(historyDownload) { + this._historyDownload = historyDownload; + if (historyDownload) { + historyDownload.slot = this; + } + }, + _historyDownload: null, + + /** + * Returns the Download or HistoryDownload object for displaying information + * and executing commands in the user interface. + */ + get download() { + return this.sessionDownload || this.historyDownload; + }, +}; + +/** + * Represents an ordered collection of DownloadSlot objects containing a merged + * view on session downloads and history downloads. Views on this list will + * receive notifications for changes to both types of downloads. + * + * Downloads in this list are sorted from oldest to newest, with all session + * downloads after all the history downloads. When a new history download is + * added and the list also contains session downloads, the insertBefore option + * of the onDownloadAdded notification refers to the first session download. + * + * The list of downloads cannot be modified using the DownloadList methods. + * + * @param publicList + * Underlying DownloadList containing public downloads. + * @param place + * Places query used to retrieve history downloads. + */ +this.DownloadHistoryList = function(publicList, place) { + DownloadList.call(this); + + // While "this._slots" contains all the data in order, the other properties + // provide fast access for the most common operations. + this._slots = []; + this._slotsForUrl = new Map(); + this._slotForDownload = new WeakMap(); + + // Start the asynchronous queries to retrieve history and session downloads. + publicList.addView(this).catch(Cu.reportError); + let queries = {}, options = {}; + PlacesUtils.history.queryStringToQueries(place, queries, {}, options); + if (!queries.value.length) { + queries.value = [PlacesUtils.history.getNewQuery()]; + } + + let result = PlacesUtils.history.executeQueries(queries.value, + queries.value.length, + options.value); + result.addObserver(this); +} + +this.DownloadHistoryList.prototype = { + __proto__: DownloadList.prototype, + + /** + * This is set when executing the Places query. + */ + get result() { + return this._result; + }, + set result(result) { + if (this._result == result) { + return; + } + + if (this._result) { + PlacesUtils.annotations.removeObserver(this); + this._result.removeObserver(this); + this._result.root.containerOpen = false; + } + + this._result = result; + + if (this._result) { + this._result.root.containerOpen = true; + PlacesUtils.annotations.addObserver(this); + } + }, + _result: null, + + /** + * Index of the first slot that contains a session download. This is equal to + * the length of the list when there are no session downloads. + */ + _firstSessionSlotIndex: 0, + + _insertSlot({ slot, index, slotsForUrl }) { + // Add the slot to the ordered array. + this._slots.splice(index, 0, slot); + this._downloads.splice(index, 0, slot.download); + if (!slot.sessionDownload) { + this._firstSessionSlotIndex++; + } + + // Add the slot to the fast access maps. + slotsForUrl.add(slot); + this._slotsForUrl.set(slot.download.source.url, slotsForUrl); + + // Add the associated view items. + this._notifyAllViews("onDownloadAdded", slot.download, { + insertBefore: this._downloads[index + 1], + }); + }, + + _removeSlot({ slot, slotsForUrl }) { + // Remove the slot from the ordered array. + let index = this._slots.indexOf(slot); + this._slots.splice(index, 1); + this._downloads.splice(index, 1); + if (this._firstSessionSlotIndex > index) { + this._firstSessionSlotIndex--; + } + + // Remove the slot from the fast access maps. + slotsForUrl.delete(slot); + if (slotsForUrl.size == 0) { + this._slotsForUrl.delete(slot.download.source.url); + } + + // Remove the associated view items. + this._notifyAllViews("onDownloadRemoved", slot.download); + }, + + /** + * Ensures that the information about a history download is stored in at least + * one slot, adding a new one at the end of the list if necessary. + * + * A reference to the same Places node will be stored in the HistoryDownload + * object for all the DownloadSlot objects associated with the source URL. + * + * @param placesNode + * The Places node that represents the history download. + */ + _insertPlacesNode(placesNode) { + let slotsForUrl = this._slotsForUrl.get(placesNode.uri) || new Set(); + + // If there are existing slots associated with this URL, we only have to + // ensure that the Places node reference is kept updated in case the more + // recent Places notification contained a different node object. + if (slotsForUrl.size > 0) { + for (let slot of slotsForUrl) { + if (!slot.historyDownload) { + slot.historyDownload = new HistoryDownload(placesNode); + } else { + slot.historyDownload.placesNode = placesNode; + } + } + return; + } + + // If there are no existing slots for this URL, we have to create a new one. + // Since the history download is visible in the slot, we also have to update + // the object using the Places metadata. + let historyDownload = new HistoryDownload(placesNode); + historyDownload.updateFromMetaData( + gCachedPlacesMetaData.get(placesNode.uri) || + DownloadHistory.getPlacesMetaDataFor(placesNode.uri)); + let slot = new DownloadSlot(this); + slot.historyDownload = historyDownload; + this._insertSlot({ slot, slotsForUrl, index: this._firstSessionSlotIndex }); + }, + + // nsINavHistoryResultObserver + containerStateChanged(node, oldState, newState) { + this.invalidateContainer(node); + }, + + // nsINavHistoryResultObserver + invalidateContainer(container) { + this._notifyAllViews("onDownloadBatchStarting"); + + // Remove all the current slots containing only history downloads. + for (let index = this._slots.length - 1; index >= 0; index--) { + let slot = this._slots[index]; + if (slot.sessionDownload) { + // The visible data doesn't change, so we don't have to notify views. + slot.historyDownload = null; + } else { + let slotsForUrl = this._slotsForUrl.get(slot.download.source.url); + this._removeSlot({ slot, slotsForUrl }); + } + } + + // Add new slots or reuse existing ones for history downloads. + for (let index = 0; index < container.childCount; index++) { + try { + this._insertPlacesNode(container.getChild(index)); + } catch (ex) { + Cu.reportError(ex); + } + } + + this._notifyAllViews("onDownloadBatchEnded"); + }, + + // nsINavHistoryResultObserver + nodeInserted(parent, placesNode) { + this._insertPlacesNode(placesNode); + }, + + // nsINavHistoryResultObserver + nodeRemoved(parent, placesNode, aOldIndex) { + let slotsForUrl = this._slotsForUrl.get(placesNode.uri); + for (let slot of slotsForUrl) { + if (slot.sessionDownload) { + // The visible data doesn't change, so we don't have to notify views. + slot.historyDownload = null; + } else { + this._removeSlot({ slot, slotsForUrl }); + } + } + }, + + // nsINavHistoryResultObserver + nodeAnnotationChanged() {}, + nodeIconChanged() {}, + nodeTitleChanged() {}, + nodeKeywordChanged() {}, + nodeDateAddedChanged() {}, + nodeLastModifiedChanged() {}, + nodeHistoryDetailsChanged() {}, + nodeTagsChanged() {}, + sortingChanged() {}, + nodeMoved() {}, + nodeURIChanged() {}, + batching() {}, + + // nsIAnnotationObserver + onPageAnnotationSet(page, name) { + // Annotations can only be added after a history node has been added, so we + // have to listen for changes to nodes we already added to the list. + if (name != DESTINATIONFILEURI_ANNO && name != METADATA_ANNO) { + return; + } + + let slotsForUrl = this._slotsForUrl.get(page.spec); + if (!slotsForUrl) { + return; + } + + for (let slot of slotsForUrl) { + if (slot.sessionDownload) { + // The visible data doesn't change, so we don't have to notify views. + return; + } + slot.historyDownload.updateFromMetaData( + DownloadHistory.getPlacesMetaDataFor(page.spec)); + this._notifyAllViews("onDownloadChanged", slot.download); + } + }, + + // nsIAnnotationObserver + onItemAnnotationSet() {}, + onPageAnnotationRemoved() {}, + onItemAnnotationRemoved() {}, + + // DownloadList callback + onDownloadAdded(download) { + let url = download.source.url; + let slotsForUrl = this._slotsForUrl.get(url) || new Set(); + + // When a session download is attached to a slot, we ensure not to keep + // stale metadata around for the corresponding history download. This + // prevents stale state from being used if the view is rebuilt. + // + // Note that we will eagerly load the data in the cache at this point, even + // if we have seen no history download. The case where no history download + // will appear at all is rare enough in normal usage, so we can apply this + // simpler solution rather than keeping a list of cache items to ignore. + gCachedPlacesMetaData.delete(url); + + // For every source URL, there can be at most one slot containing a history + // download without an associated session download. If we find one, then we + // can reuse it for the current session download, although we have to move + // it together with the other session downloads. + let slot = [...slotsForUrl][0]; + if (slot && !slot.sessionDownload) { + // Remove the slot because we have to change its position. + this._removeSlot({ slot, slotsForUrl }); + } else { + slot = new DownloadSlot(this); + } + slot.sessionDownload = download; + this._insertSlot({ slot, slotsForUrl, index: this._slots.length }); + this._slotForDownload.set(download, slot); + }, + + // DownloadList callback + onDownloadChanged(download) { + let slot = this._slotForDownload.get(download); + this._notifyAllViews("onDownloadChanged", slot.download); + }, + + // DownloadList callback + onDownloadRemoved(download) { + let url = download.source.url; + let slotsForUrl = this._slotsForUrl.get(url); + let slot = this._slotForDownload.get(download); + this._removeSlot({ slot, slotsForUrl }); + + // If there was only one slot for this source URL and it also contained a + // history download, we should resurrect it in the correct area of the list. + if (slotsForUrl.size == 0 && slot.historyDownload) { + // We have one download slot containing both a session download and a + // history download, and we are now removing the session download. + // Previously, we did not use the Places metadata because it was obscured + // by the session download. Since this is no longer the case, we have to + // read the latest metadata before resurrecting the history download. + slot.historyDownload.updateFromMetaData( + DownloadHistory.getPlacesMetaDataFor(url)); + slot.sessionDownload = null; + // Place the resurrected history slot after all the session slots. + this._insertSlot({ slot, slotsForUrl, + index: this._firstSessionSlotIndex }); + } + + this._slotForDownload.delete(download); + }, + + // DownloadList + add() { + throw new Error("Not implemented."); + }, + + // DownloadList + remove() { + throw new Error("Not implemented."); + }, + + // DownloadList + removeFinished() { + throw new Error("Not implemented."); + }, }; diff --git a/toolkit/components/jsdownloads/src/DownloadList.jsm b/toolkit/components/jsdownloads/src/DownloadList.jsm index 1eca4f7c737e47c0dbf7a4f60ee94ef7347b6a0d..a6c7a7db5515fd3333909efbd3d21ec8997f014b 100644 --- a/toolkit/components/jsdownloads/src/DownloadList.jsm +++ b/toolkit/components/jsdownloads/src/DownloadList.jsm @@ -178,18 +178,17 @@ this.DownloadList.prototype = { }, /** - * Notifies all the views of a download addition, change, or removal. + * Notifies all the views of a download addition, change, removal, or other + * event. The additional arguments are passed to the called method. * - * @param aMethodName + * @param methodName * String containing the name of the method to call on the view. - * @param aDownload - * The Download object that changed. */ - _notifyAllViews(aMethodName, aDownload) { + _notifyAllViews(methodName, ...args) { for (let view of this._views) { try { - if (aMethodName in view) { - view[aMethodName](aDownload); + if (methodName in view) { + view[methodName](...args); } } catch (ex) { Cu.reportError(ex); diff --git a/toolkit/components/jsdownloads/test/unit/common_test_Download.js b/toolkit/components/jsdownloads/test/unit/common_test_Download.js index ccf6e7790cea489a400f3265e1844c1eb2f86820..8d8432ecedbe93655e66478df6bf35358212fc7f 100644 --- a/toolkit/components/jsdownloads/test/unit/common_test_Download.js +++ b/toolkit/components/jsdownloads/test/unit/common_test_Download.js @@ -2317,7 +2317,7 @@ add_task(async function test_history() { mustInterruptResponses(); // We will wait for the visit to be notified during the download. - await PlacesTestUtils.clearHistory(); + await PlacesUtils.history.clear(); let promiseVisit = promiseWaitForVisit(httpUrl("interruptible.txt")); // Start a download that is not allowed to finish yet. @@ -2329,7 +2329,7 @@ add_task(async function test_history() { do_check_eq(transitionType, Ci.nsINavHistoryService.TRANSITION_DOWNLOAD); // Restart and complete the download after clearing history. - await PlacesTestUtils.clearHistory(); + await PlacesUtils.history.clear(); download.cancel(); continueResponses(); await download.start(); @@ -2344,7 +2344,7 @@ add_task(async function test_history() { */ add_task(async function test_history_tryToKeepPartialData() { // We will wait for the visit to be notified during the download. - await PlacesTestUtils.clearHistory(); + await PlacesUtils.history.clear(); let promiseVisit = promiseWaitForVisit(httpUrl("interruptible_resumable.txt")); diff --git a/toolkit/components/jsdownloads/test/unit/head.js b/toolkit/components/jsdownloads/test/unit/head.js index c5c818931d0f63111b93beaaf57ad165b73a1a21..9b2510f549b92b7d677090378a95e3a7f6647a01 100644 --- a/toolkit/components/jsdownloads/test/unit/head.js +++ b/toolkit/components/jsdownloads/test/unit/head.js @@ -29,8 +29,6 @@ XPCOMUtils.defineLazyModuleGetter(this, "HttpServer", "resource://testing-common/httpd.js"); XPCOMUtils.defineLazyModuleGetter(this, "NetUtil", "resource://gre/modules/NetUtil.jsm"); -XPCOMUtils.defineLazyModuleGetter(this, "PlacesTestUtils", - "resource://testing-common/PlacesTestUtils.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "PlacesUtils", "resource://gre/modules/PlacesUtils.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "Promise", diff --git a/toolkit/components/jsdownloads/test/unit/test_DownloadHistory.js b/toolkit/components/jsdownloads/test/unit/test_DownloadHistory.js new file mode 100644 index 0000000000000000000000000000000000000000..f4e73ba481676382322d0f1d205af22c6c6d583d --- /dev/null +++ b/toolkit/components/jsdownloads/test/unit/test_DownloadHistory.js @@ -0,0 +1,226 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +/** + * Tests the DownloadHistory module. + */ + +"use strict"; + +Cu.import("resource://gre/modules/DownloadHistory.jsm"); + +XPCOMUtils.defineLazyServiceGetter(this, "gDownloadHistory", + "@mozilla.org/browser/download-history;1", + Ci.nsIDownloadHistory); + +let baseDate = new Date("2000-01-01"); + +/** + * Waits for the download annotations to be set for the given page, required + * because the addDownload method will add these to the database asynchronously. + */ +function waitForAnnotations(sourceUriSpec) { + let sourceUri = Services.io.newURI(sourceUriSpec); + let destinationFileUriSet = false; + let metaDataSet = false; + return new Promise(resolve => { + PlacesUtils.annotations.addObserver({ + onPageAnnotationSet(page, name) { + if (!page.equals(sourceUri)) { + return; + } + switch (name) { + case "downloads/destinationFileURI": + destinationFileUriSet = true; + break; + case "downloads/metaData": + metaDataSet = true; + break; + } + if (destinationFileUriSet && metaDataSet) { + PlacesUtils.annotations.removeObserver(this); + resolve(); + } + }, + onItemAnnotationSet() {}, + onPageAnnotationRemoved() {}, + onItemAnnotationRemoved() {}, + }); + }); +} + +/** + * Non-fatal assertion used to test whether the downloads in the list already + * match the expected state. + */ +function areEqual(a, b) { + if (a === b) { + Assert.equal(a, b); + return true; + } + do_print(a + " !== " + b); + return false; +} + +/** + * Tests that various operations on session and history downloads are reflected + * by the DownloadHistoryList object, and that the order of results is correct. + */ +add_task(async function test_DownloadHistory() { + // Clean up at the beginning and at the end of the test. + async function cleanup() { + await PlacesUtils.history.clear(); + } + do_register_cleanup(cleanup); + await cleanup(); + + let testDownloads = [ + // History downloads should appear in order at the beginning of the list. + { offset: 10, canceled: true }, + { offset: 20, succeeded: true }, + { offset: 30, error: { becauseSourceFailed: true } }, + { offset: 40, error: { becauseBlockedByParentalControls: true } }, + { offset: 50, error: { becauseBlockedByReputationCheck: true } }, + // Session downloads should show up after all the history download, in the + // same order as they were added. + { offset: 45, canceled: true, inSession: true }, + { offset: 35, canceled: true, hasPartialData: true, inSession: true }, + { offset: 55, succeeded: true, inSession: true }, + ]; + const NEXT_OFFSET = 60; + + async function addTestDownload(properties) { + properties.source = { url: httpUrl("source" + properties.offset) }; + let targetFile = getTempFile(TEST_TARGET_FILE_NAME + properties.offset); + properties.target = { path: targetFile.path }; + properties.startTime = new Date(baseDate.getTime() + properties.offset); + + let download = await Downloads.createDownload(properties); + if (properties.inSession) { + await publicList.add(download); + } + + // Add the download to history using the XPCOM service, then use the + // DownloadHistory module to save the associated metadata. + let promiseAnnotations = waitForAnnotations(properties.source.url); + let promiseVisit = promiseWaitForVisit(properties.source.url); + gDownloadHistory.addDownload(Services.io.newURI(properties.source.url), + null, + properties.startTime.getTime() * 1000, + NetUtil.newURI(targetFile)); + await promiseVisit; + DownloadHistory.updateMetaData(download); + await promiseAnnotations; + } + + // Add all the test downloads to history. + let publicList = await promiseNewList(); + for (let properties of testDownloads) { + await addTestDownload(properties); + } + + // This allows waiting for an expected list at various points during the test. + let view = { + downloads: [], + onDownloadAdded(download, options = {}) { + if (options.insertBefore) { + let index = this.downloads.indexOf(options.insertBefore); + this.downloads.splice(index, 0, download); + } else { + this.downloads.push(download); + } + this.checkForExpectedDownloads(); + }, + onDownloadChanged(download) { + this.checkForExpectedDownloads(); + }, + onDownloadRemoved(download) { + let index = this.downloads.indexOf(download); + this.downloads.splice(index, 1); + this.checkForExpectedDownloads(); + }, + checkForExpectedDownloads() { + // Wait for all the expected downloads to be added or removed before doing + // the detailed tests. This is done to avoid creating irrelevant output. + if (this.downloads.length != testDownloads.length) { + return; + } + for (let i = 0; i < this.downloads.length; i++) { + if (this.downloads[i].source.url != testDownloads[i].source.url || + this.downloads[i].target.path != testDownloads[i].target.path) { + return; + } + } + // Check and report the actual state of the downloads. Even if the items + // are in the expected order, the metadata for history downloads might not + // have been updated to the final state yet. + for (let i = 0; i < view.downloads.length; i++) { + let download = view.downloads[i]; + let testDownload = testDownloads[i]; + do_print("Checking download source " + download.source.url + + " with target " + download.target.path); + if (!areEqual(download.succeeded, !!testDownload.succeeded) || + !areEqual(download.canceled, !!testDownload.canceled) || + !areEqual(download.hasPartialData, !!testDownload.hasPartialData) || + !areEqual(!!download.error, !!testDownload.error)) { + return; + } + // If the above properties match, the error details should be correct. + if (download.error) { + if (testDownload.error.becauseSourceFailed) { + Assert.equal(download.error.message, "History download failed."); + } + Assert.equal(download.error.becauseBlockedByParentalControls, + testDownload.error.becauseBlockedByParentalControls); + Assert.equal(download.error.becauseBlockedByReputationCheck, + testDownload.error.becauseBlockedByReputationCheck); + } + } + this.resolveWhenExpected(); + }, + resolveWhenExpected: () => {}, + async waitForExpected() { + let promise = new Promise(resolve => this.resolveWhenExpected = resolve); + this.checkForExpectedDownloads(); + await promise; + }, + }; + + // Initialize DownloadHistoryList only after having added the history and + // session downloads, and check that they are loaded in the correct order. + let list = await DownloadHistory.getList(); + await list.addView(view); + await view.waitForExpected(); + + // Remove a download from history and verify that the change is reflected. + let downloadToRemove = testDownloads[1]; + testDownloads.splice(1, 1); + await PlacesUtils.history.remove(downloadToRemove.source.url); + await view.waitForExpected(); + + // Add a download to history and verify it's placed before session downloads, + // even if the start date is more recent. + let downloadToAdd = { offset: NEXT_OFFSET, canceled: true }; + testDownloads.splice(testDownloads.findIndex(d => d.inSession), 0, + downloadToAdd); + await addTestDownload(downloadToAdd); + await view.waitForExpected(); + + // Add a session download and verify it's placed after all session downloads, + // even if the start date is less recent. + let sessionDownloadToAdd = { offset: 0, inSession: true, succeeded: true }; + testDownloads.push(sessionDownloadToAdd); + await addTestDownload(sessionDownloadToAdd); + await view.waitForExpected(); + + // Add a session download for the same URI without a history entry, and verify + // it's visible and placed after all session downloads. + testDownloads.push(sessionDownloadToAdd); + await publicList.add(await Downloads.createDownload(sessionDownloadToAdd)); + await view.waitForExpected(); + + // Clear history and check that session downloads with partial data remain. + testDownloads = testDownloads.filter(d => d.hasPartialData); + await PlacesUtils.history.clear(); + await view.waitForExpected(); +}); diff --git a/toolkit/components/jsdownloads/test/unit/test_DownloadList.js b/toolkit/components/jsdownloads/test/unit/test_DownloadList.js index bb78ad0c1e509c4f020233678db185607c5af61b..7208b10034aab0061b7744571213c889a606ea6a 100644 --- a/toolkit/components/jsdownloads/test/unit/test_DownloadList.js +++ b/toolkit/components/jsdownloads/test/unit/test_DownloadList.js @@ -341,7 +341,7 @@ add_task(async function test_history_expiration() { // We must replace the visits added while executing the downloads with visits // that are older than 7 days, otherwise they will not be expired. - await PlacesTestUtils.clearHistory(); + await PlacesUtils.history.clear(); await promiseExpirableDownloadVisit(); await promiseExpirableDownloadVisit(httpUrl("interruptible.txt")); @@ -383,7 +383,7 @@ add_task(async function test_history_clear() { await downloadOne.start(); await downloadTwo.start(); - await PlacesTestUtils.clearHistory(); + await PlacesUtils.history.clear(); // Wait for the removal notifications that may still be pending. await deferred.promise; diff --git a/toolkit/components/jsdownloads/test/unit/xpcshell.ini b/toolkit/components/jsdownloads/test/unit/xpcshell.ini index 890a9bfbbe64faa27bae0aa0657f09140c74c8ef..c732c6c9ae84cb86e06a8fd3c9934ecac4e2808a 100644 --- a/toolkit/components/jsdownloads/test/unit/xpcshell.ini +++ b/toolkit/components/jsdownloads/test/unit/xpcshell.ini @@ -8,6 +8,7 @@ support-files = common_test_Download.js [test_DownloadCore.js] +[test_DownloadHistory.js] [test_DownloadIntegration.js] [test_DownloadLegacy.js] [test_DownloadList.js] diff --git a/toolkit/components/passwordmgr/test/unit/test_logins_decrypt_failure.js b/toolkit/components/passwordmgr/test/unit/test_logins_decrypt_failure.js index ffbedb4de3f6bc3b3e69680564b8cc01d7e20838..a14e4b057ee35b889766366d04b8518b0d3e4c0d 100644 --- a/toolkit/components/passwordmgr/test/unit/test_logins_decrypt_failure.js +++ b/toolkit/components/passwordmgr/test/unit/test_logins_decrypt_failure.js @@ -20,7 +20,7 @@ function resetMasterPassword() let token = Cc["@mozilla.org/security/pk11tokendb;1"] .getService(Ci.nsIPK11TokenDB).getInternalKeyToken(); token.reset(); - token.changePassword("", ""); + token.initPassword(""); } // Tests diff --git a/toolkit/components/telemetry/EventInfo.h b/toolkit/components/telemetry/EventInfo.h index 86b167f2e0609c3a1f6e2ef75780fd7864a3f11a..c30ad1a2d8de36958d23de4c51e4f28ff4fef689 100644 --- a/toolkit/components/telemetry/EventInfo.h +++ b/toolkit/components/telemetry/EventInfo.h @@ -35,9 +35,9 @@ struct CommonEventInfo { mozilla::Telemetry::Common::RecordedProcessType record_in_processes; // Convenience functions for accessing event strings. - const char* expiration_version() const; - const char* category() const; - const char* extra_key(uint32_t index) const; + const nsCString expiration_version() const; + const nsCString category() const; + const nsCString extra_key(uint32_t index) const; }; struct EventInfo { @@ -48,8 +48,8 @@ struct EventInfo { uint32_t method_offset; uint32_t object_offset; - const char* method() const; - const char* object() const; + const nsCString method() const; + const nsCString object() const; }; } // namespace diff --git a/toolkit/components/telemetry/Processes.yaml b/toolkit/components/telemetry/Processes.yaml index fc7a0170c85c0144505a73cc1dc929ddc97910ab..425a8e7cc2cad8471e6d13b4b99e6e355ae1b5be 100644 --- a/toolkit/components/telemetry/Processes.yaml +++ b/toolkit/components/telemetry/Processes.yaml @@ -19,3 +19,8 @@ extension: gpu: gecko_enum: GeckoProcessType_GPU description: This is the compositor or GPU process. +dynamic: + gecko_enum: GeckoProcessType_Default + description: > + This is not a real process, it is used to logically group add-on probes. + It contains data of any probes registered at runtime by add-ons. diff --git a/toolkit/components/telemetry/Telemetry.cpp b/toolkit/components/telemetry/Telemetry.cpp index 2c7c66332ea22f2fed0349c02115952dc73792c0..558083033fbad703741fbd82bce9a344291575b1 100644 --- a/toolkit/components/telemetry/Telemetry.cpp +++ b/toolkit/components/telemetry/Telemetry.cpp @@ -1807,12 +1807,20 @@ TelemetryImpl::RecordEvent(const nsACString & aCategory, const nsACString & aMet } NS_IMETHODIMP -TelemetryImpl::SnapshotBuiltinEvents(uint32_t aDataset, bool aClear, JSContext* aCx, +TelemetryImpl::SnapshotEvents(uint32_t aDataset, bool aClear, JSContext* aCx, uint8_t optional_argc, JS::MutableHandleValue aResult) { return TelemetryEvent::CreateSnapshots(aDataset, aClear, aCx, optional_argc, aResult); } +NS_IMETHODIMP +TelemetryImpl::RegisterEvents(const nsACString& aCategory, + JS::Handle<JS::Value> aEventData, + JSContext* cx) +{ + return TelemetryEvent::RegisterEvents(aCategory, aEventData, cx); +} + NS_IMETHODIMP TelemetryImpl::ClearEvents() { diff --git a/toolkit/components/telemetry/TelemetryEvent.cpp b/toolkit/components/telemetry/TelemetryEvent.cpp index 65cfb7f3d099f79a12e467756d4c223c73e2f218..0dd812b2ba3c9dbe2391e098e8f4691bfcea406b 100644 --- a/toolkit/components/telemetry/TelemetryEvent.cpp +++ b/toolkit/components/telemetry/TelemetryEvent.cpp @@ -5,6 +5,7 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include <prtime.h> +#include <limits> #include "nsITelemetry.h" #include "nsHashKeys.h" #include "nsDataHashtable.h" @@ -19,6 +20,7 @@ #include "nsJSUtils.h" #include "nsXULAppAPI.h" #include "nsUTF8Utils.h" +#include "nsPrintfCString.h" #include "TelemetryCommon.h" #include "TelemetryEvent.h" @@ -89,9 +91,10 @@ namespace { const uint32_t kEventCount = mozilla::Telemetry::EventID::EventCount; // This is a special event id used to mark expired events, to make expiry checks -// faster at runtime. -const uint32_t kExpiredEventId = kEventCount + 1; -static_assert(kEventCount < kExpiredEventId, "Should not overflow."); +// cheap at runtime. +const uint32_t kExpiredEventId = std::numeric_limits<uint32_t>::max(); +static_assert(kExpiredEventId > kEventCount, + "Built-in event count should be less than the expired event id."); // This is the hard upper limit on the number of event records we keep in storage. // If we cross this limit, we will drop any further event recording until elements @@ -101,10 +104,60 @@ const uint32_t kMaxEventRecords = 1000; const uint32_t kMaxValueByteLength = 80; // Maximum length of any string value in the extra dictionary, in UTF8 byte sequence length. const uint32_t kMaxExtraValueByteLength = 80; +// Maximum length of dynamic method names, in UTF8 byte sequence length. +const uint32_t kMaxMethodNameByteLength = 20; +// Maximum length of dynamic object names, in UTF8 byte sequence length. +const uint32_t kMaxObjectNameByteLength = 20; +// Maximum length of extra key names, in UTF8 byte sequence length. +const uint32_t kMaxExtraKeyNameByteLength = 15; +// The maximum number of valid extra keys for an event. +const uint32_t kMaxExtraKeyCount = 10; typedef nsDataHashtable<nsCStringHashKey, uint32_t> StringUintMap; typedef nsClassHashtable<nsCStringHashKey, nsCString> StringMap; +struct EventKey { + uint32_t id; + bool dynamic; +}; + +struct DynamicEventInfo { + DynamicEventInfo(const nsACString& category, const nsACString& method, + const nsACString& object, const nsTArray<nsCString>& extra_keys, + bool recordOnRelease) + : category(category) + , method(method) + , object(object) + , extra_keys(extra_keys) + , recordOnRelease(recordOnRelease) + {} + + DynamicEventInfo(const DynamicEventInfo&) = default; + DynamicEventInfo& operator=(const DynamicEventInfo&) = delete; + + const nsCString category; + const nsCString method; + const nsCString object; + const nsTArray<nsCString> extra_keys; + const bool recordOnRelease; + + size_t + SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const + { + size_t n = 0; + + n += category.SizeOfExcludingThisIfUnshared(aMallocSizeOf); + n += method.SizeOfExcludingThisIfUnshared(aMallocSizeOf); + n += object.SizeOfExcludingThisIfUnshared(aMallocSizeOf); + n += extra_keys.ShallowSizeOfExcludingThis(aMallocSizeOf); + for (auto& key : extra_keys) { + n += key.SizeOfExcludingThisIfUnshared(aMallocSizeOf); + } + + return n; + } +}; + enum class RecordEventResult { Ok, UnknownEvent, @@ -114,29 +167,29 @@ enum class RecordEventResult { WrongProcess, }; +enum class RegisterEventResult { + Ok, + AlreadyRegistered, +}; + typedef nsTArray<EventExtraEntry> ExtraArray; class EventRecord { public: - EventRecord(double timestamp, uint32_t eventId, const Maybe<nsCString>& value, + EventRecord(double timestamp, const EventKey& key, const Maybe<nsCString>& value, const ExtraArray& extra) : mTimestamp(timestamp) - , mEventId(eventId) + , mEventKey(key) , mValue(value) , mExtra(extra) {} - EventRecord(const EventRecord& other) - : mTimestamp(other.mTimestamp) - , mEventId(other.mEventId) - , mValue(other.mValue) - , mExtra(other.mExtra) - {} + EventRecord(const EventRecord& other) = default; EventRecord& operator=(const EventRecord& other) = delete; double Timestamp() const { return mTimestamp; } - uint32_t EventId() const { return mEventId; } + const EventKey& GetEventKey() const { return mEventKey; } const Maybe<nsCString>& Value() const { return mValue; } const ExtraArray& Extra() const { return mExtra; } @@ -144,43 +197,43 @@ public: private: const double mTimestamp; - const uint32_t mEventId; + const EventKey mEventKey; const Maybe<nsCString> mValue; const ExtraArray mExtra; }; // Implements the methods for EventInfo. -const char* +const nsCString EventInfo::method() const { - return &gEventsStringTable[this->method_offset]; + return nsCString(&gEventsStringTable[this->method_offset]); } -const char* +const nsCString EventInfo::object() const { - return &gEventsStringTable[this->object_offset]; + return nsCString(&gEventsStringTable[this->object_offset]); } // Implements the methods for CommonEventInfo. -const char* +const nsCString CommonEventInfo::category() const { - return &gEventsStringTable[this->category_offset]; + return nsCString(&gEventsStringTable[this->category_offset]); } -const char* +const nsCString CommonEventInfo::expiration_version() const { - return &gEventsStringTable[this->expiration_version_offset]; + return nsCString(&gEventsStringTable[this->expiration_version_offset]); } -const char* +const nsCString CommonEventInfo::extra_key(uint32_t index) const { MOZ_ASSERT(index < this->extra_count); uint32_t key_index = gExtraKeysTable[this->extra_index + index]; - return &gEventsStringTable[key_index]; + return nsCString(&gEventsStringTable[key_index]); } // Implementation for the EventRecord class. @@ -217,9 +270,17 @@ UniqueEventName(const nsACString& category, const nsACString& method, const nsAC nsCString UniqueEventName(const EventInfo& info) { - return UniqueEventName(nsDependentCString(info.common_info.category()), - nsDependentCString(info.method()), - nsDependentCString(info.object())); + return UniqueEventName(info.common_info.category(), + info.method(), + info.object()); +} + +nsCString +UniqueEventName(const DynamicEventInfo& info) +{ + return UniqueEventName(info.category, + info.method, + info.object); } bool @@ -255,20 +316,23 @@ bool gInitDone = false; bool gCanRecordBase; bool gCanRecordExtended; -// The EventName -> EventID cache map. -StringUintMap gEventNameIDMap(kEventCount); +// The EventName -> EventKey cache map. +nsClassHashtable<nsCStringHashKey, EventKey> gEventNameIDMap(kEventCount); // The CategoryName -> CategoryID cache map. StringUintMap gCategoryNameIDMap; // This tracks the IDs of the categories for which recording is enabled. -nsTHashtable<nsUint32HashKey> gEnabledCategories; +nsTHashtable<nsCStringHashKey> gEnabledCategories; // The main event storage. Events are inserted here, keyed by process id and // in recording order. typedef nsTArray<EventRecord> EventRecordArray; nsClassHashtable<nsUint32HashKey, EventRecordArray> gEventRecords; +// The details on dynamic events that are recorded from addons are registered here. +StaticAutoPtr<nsTArray<DynamicEventInfo>> gDynamicEventInfo; + } // namespace //////////////////////////////////////////////////////////////////////// @@ -278,27 +342,68 @@ nsClassHashtable<nsUint32HashKey, EventRecordArray> gEventRecords; namespace { +unsigned int +GetDataset(const StaticMutexAutoLock& lock, const EventKey& eventKey) +{ + if (!eventKey.dynamic) { + return gEventInfo[eventKey.id].common_info.dataset; + } + + if (!gDynamicEventInfo) { + return nsITelemetry::DATASET_RELEASE_CHANNEL_OPTIN; + } + + return (*gDynamicEventInfo)[eventKey.id].recordOnRelease ? + nsITelemetry::DATASET_RELEASE_CHANNEL_OPTOUT : + nsITelemetry::DATASET_RELEASE_CHANNEL_OPTIN; +} + +nsCString +GetCategory(const StaticMutexAutoLock& lock, const EventKey& eventKey) +{ + if (!eventKey.dynamic) { + return gEventInfo[eventKey.id].common_info.category(); + } + + if (!gDynamicEventInfo) { + return NS_LITERAL_CSTRING(""); + } + + return (*gDynamicEventInfo)[eventKey.id].category; +} + bool -CanRecordEvent(const StaticMutexAutoLock& lock, const CommonEventInfo& info, +CanRecordEvent(const StaticMutexAutoLock& lock, const EventKey& eventKey, ProcessID process) { if (!gCanRecordBase) { return false; } - if (!CanRecordDataset(info.dataset, gCanRecordBase, gCanRecordExtended)) { + if (!CanRecordDataset(GetDataset(lock, eventKey), gCanRecordBase, gCanRecordExtended)) { return false; } - if (!CanRecordInProcess(info.record_in_processes, process)) { - return false; + // We don't allow specifying a process to record in for dynamic events. + if (!eventKey.dynamic) { + const CommonEventInfo& info = gEventInfo[eventKey.id].common_info; + if (!CanRecordInProcess(info.record_in_processes, process)) { + return false; + } } - return gEnabledCategories.GetEntry(info.category_offset); + return gEnabledCategories.GetEntry(GetCategory(lock, eventKey)); +} + +bool +IsExpired(const EventKey& key) +{ + return key.id == kExpiredEventId; } EventRecordArray* -GetEventRecordsForProcess(const StaticMutexAutoLock& lock, ProcessID processType) +GetEventRecordsForProcess(const StaticMutexAutoLock& lock, ProcessID processType, + const EventKey& eventKey) { EventRecordArray* eventRecords = nullptr; if (!gEventRecords.Get(uint32_t(processType), &eventRecords)) { @@ -308,14 +413,41 @@ GetEventRecordsForProcess(const StaticMutexAutoLock& lock, ProcessID processType return eventRecords; } -bool -GetEventId(const StaticMutexAutoLock& lock, const nsACString& category, - const nsACString& method, const nsACString& object, - uint32_t* eventId) +EventKey* +GetEventKey(const StaticMutexAutoLock& lock, const nsACString& category, + const nsACString& method, const nsACString& object) { - MOZ_ASSERT(eventId); + EventKey* event; const nsCString& name = UniqueEventName(category, method, object); - return gEventNameIDMap.Get(name, eventId); + if (!gEventNameIDMap.Get(name, &event)) { + return nullptr; + } + return event; +} + +static bool +CheckExtraKeysValid(const EventKey& eventKey, const ExtraArray& extra) +{ + nsTHashtable<nsCStringHashKey> validExtraKeys; + if (!eventKey.dynamic) { + const CommonEventInfo& common = gEventInfo[eventKey.id].common_info; + for (uint32_t i = 0; i < common.extra_count; ++i) { + validExtraKeys.PutEntry(common.extra_key(i)); + } + } else if (gDynamicEventInfo) { + const DynamicEventInfo& info = (*gDynamicEventInfo)[eventKey.id]; + for (uint32_t i = 0, len = info.extra_keys.Length(); i < len; ++i) { + validExtraKeys.PutEntry(info.extra_keys[i]); + } + } + + for (uint32_t i = 0; i < extra.Length(); ++i) { + if (!validExtraKeys.GetEntry(extra[i].key)) { + return false; + } + } + + return true; } RecordEventResult @@ -324,47 +456,43 @@ RecordEvent(const StaticMutexAutoLock& lock, ProcessID processType, const nsACString& method, const nsACString& object, const Maybe<nsCString>& value, const ExtraArray& extra) { - EventRecordArray* eventRecords = GetEventRecordsForProcess(lock, processType); + // Look up the event id. + EventKey* eventKey = GetEventKey(lock, category, method, object); + if (!eventKey) { + return RecordEventResult::UnknownEvent; + } + + if (eventKey->dynamic) { + processType = ProcessID::Dynamic; + } + + EventRecordArray* eventRecords = GetEventRecordsForProcess(lock, processType, *eventKey); // Apply hard limit on event count in storage. if (eventRecords->Length() >= kMaxEventRecords) { return RecordEventResult::StorageLimitReached; } - // Look up the event id. - uint32_t eventId; - if (!GetEventId(lock, category, method, object, &eventId)) { - return RecordEventResult::UnknownEvent; - } - // If the event is expired or not enabled for this process, we silently drop this call. // We don't want recording for expired probes to be an error so code doesn't // have to be removed at a specific time or version. // Even logging warnings would become very noisy. - if (eventId == kExpiredEventId) { + if (IsExpired(*eventKey)) { return RecordEventResult::ExpiredEvent; } // Check whether we can record this event. - const CommonEventInfo& common = gEventInfo[eventId].common_info; - if (!CanRecordEvent(lock, common, processType)) { + if (!CanRecordEvent(lock, *eventKey, processType)) { return RecordEventResult::Ok; } // Check whether the extra keys passed are valid. - nsTHashtable<nsCStringHashKey> validExtraKeys; - for (uint32_t i = 0; i < common.extra_count; ++i) { - validExtraKeys.PutEntry(nsDependentCString(common.extra_key(i))); - } - - for (uint32_t i = 0; i < extra.Length(); ++i) { - if (!validExtraKeys.GetEntry(extra[i].key)) { - return RecordEventResult::InvalidExtraKey; - } + if (!CheckExtraKeysValid(*eventKey, extra)) { + return RecordEventResult::InvalidExtraKey; } // Add event record. - eventRecords->AppendElement(EventRecord(timestamp, eventId, value, extra)); + eventRecords->AppendElement(EventRecord(timestamp, *eventKey, value, extra)); return RecordEventResult::Ok; } @@ -372,16 +500,18 @@ RecordEventResult ShouldRecordChildEvent(const StaticMutexAutoLock& lock, const nsACString& category, const nsACString& method, const nsACString& object) { - uint32_t eventId; - if (!GetEventId(lock, category, method, object, &eventId)) { - return RecordEventResult::UnknownEvent; + EventKey* eventKey = GetEventKey(lock, category, method, object); + if (!eventKey) { + // This event is unknown in this process, but it might be a dynamic event + // that was registered in the parent process. + return RecordEventResult::Ok; } - if (eventId == kExpiredEventId) { + if (IsExpired(*eventKey)) { return RecordEventResult::ExpiredEvent; } - const auto processes = gEventInfo[eventId].common_info.record_in_processes; + const auto processes = gEventInfo[eventKey->id].common_info.record_in_processes; if (!CanRecordInProcess(processes, XRE_GetProcessType())) { return RecordEventResult::WrongProcess; } @@ -389,6 +519,37 @@ ShouldRecordChildEvent(const StaticMutexAutoLock& lock, const nsACString& catego return RecordEventResult::Ok; } +RegisterEventResult +RegisterEvents(const StaticMutexAutoLock& lock, const nsACString& category, + const nsTArray<DynamicEventInfo>& eventInfos, + const nsTArray<bool>& eventExpired) +{ + MOZ_ASSERT(eventInfos.Length() == eventExpired.Length(), "Event data array sizes should match."); + + // Check that none of the events are already registered. + for (auto& info : eventInfos) { + if (gEventNameIDMap.Get(UniqueEventName(info))) { + return RegisterEventResult::AlreadyRegistered; + } + } + + // Register the new events. + if (!gDynamicEventInfo) { + gDynamicEventInfo = new nsTArray<DynamicEventInfo>(); + } + + for (uint32_t i = 0, len = eventInfos.Length(); i < len; ++i) { + gDynamicEventInfo->AppendElement(eventInfos[i]); + uint32_t eventId = eventExpired[i] ? kExpiredEventId : gDynamicEventInfo->Length() - 1; + gEventNameIDMap.Put(UniqueEventName(eventInfos[i]), new EventKey{eventId, true}); + } + + // Now after successful registration enable recording for this category. + gEnabledCategories.PutEntry(category); + + return RegisterEventResult::Ok; +} + } // anonymous namespace //////////////////////////////////////////////////////////////////////// @@ -401,7 +562,8 @@ namespace { nsresult SerializeEventsArray(const EventRecordArray& events, JSContext* cx, - JS::MutableHandleObject result) + JS::MutableHandleObject result, + unsigned int dataset) { // We serialize the events to a JS array. JS::RootedObject eventsArray(cx, JS_NewArrayObject(cx, events.Length())); @@ -411,7 +573,6 @@ SerializeEventsArray(const EventRecordArray& events, for (uint32_t i = 0; i < events.Length(); ++i) { const EventRecord& record = events[i]; - const EventInfo& info = gEventInfo[record.EventId()]; // Each entry is an array of one of the forms: // [timestamp, category, method, object, value] @@ -426,12 +587,21 @@ SerializeEventsArray(const EventRecordArray& events, } // Add category, method, object. - const char* strings[] = { - info.common_info.category(), - info.method(), - info.object(), - }; - for (const char* s : strings) { + nsCString strings[3]; + const EventKey& eventKey = record.GetEventKey(); + if (!eventKey.dynamic) { + const EventInfo& info = gEventInfo[eventKey.id]; + strings[0] = info.common_info.category(); + strings[1] = info.method(); + strings[2] = info.object(); + } else if (gDynamicEventInfo) { + const DynamicEventInfo& info = (*gDynamicEventInfo)[eventKey.id]; + strings[0] = info.category; + strings[1] = info.method; + strings[2] = info.object; + } + + for (const nsCString& s : strings) { const NS_ConvertUTF8toUTF16 wide(s); if (!items.append(JS::StringValue(JS_NewUCStringCopyN(cx, wide.Data(), wide.Length())))) { return NS_ERROR_FAILURE; @@ -529,20 +699,19 @@ TelemetryEvent::InitializeGlobalState(bool aCanRecordBase, bool aCanRecordExtend // If this event is expired or not recorded in this process, mark it with // a special event id. // This avoids doing repeated checks at runtime. - if (IsExpiredVersion(info.common_info.expiration_version()) || + if (IsExpiredVersion(info.common_info.expiration_version().get()) || IsExpiredDate(info.common_info.expiration_day)) { eventId = kExpiredEventId; } - gEventNameIDMap.Put(UniqueEventName(info), eventId); - if (!gCategoryNameIDMap.Contains(nsDependentCString(info.common_info.category()))) { - gCategoryNameIDMap.Put(nsDependentCString(info.common_info.category()), + gEventNameIDMap.Put(UniqueEventName(info), new EventKey{eventId, false}); + if (!gCategoryNameIDMap.Contains(info.common_info.category())) { + gCategoryNameIDMap.Put(info.common_info.category(), info.common_info.category_offset); } } #ifdef DEBUG - gEventNameIDMap.MarkImmutable(); gCategoryNameIDMap.MarkImmutable(); #endif gInitDone = true; @@ -562,6 +731,8 @@ TelemetryEvent::DeInitializeGlobalState() gEnabledCategories.Clear(); gEventRecords.Clear(); + gDynamicEventInfo = nullptr; + gInitDone = false; } @@ -719,10 +890,14 @@ TelemetryEvent::RecordEvent(const nsACString& aCategory, const nsACString& aMeth PromiseFlatCString(aObject).get()); return NS_ERROR_INVALID_ARG; } - case RecordEventResult::InvalidExtraKey: - LogToBrowserConsole(nsIScriptError::warningFlag, - NS_LITERAL_STRING("Invalid extra key for event.")); + case RecordEventResult::InvalidExtraKey: { + nsPrintfCString msg(R"(Invalid extra key for event ["%s", "%s", "%s"].)", + PromiseFlatCString(aCategory).get(), + PromiseFlatCString(aMethod).get(), + PromiseFlatCString(aObject).get()); + LogToBrowserConsole(nsIScriptError::warningFlag, NS_ConvertUTF8toUTF16(msg)); return NS_OK; + } case RecordEventResult::StorageLimitReached: LogToBrowserConsole(nsIScriptError::warningFlag, NS_LITERAL_STRING("Event storage limit reached.")); @@ -732,6 +907,227 @@ TelemetryEvent::RecordEvent(const nsACString& aCategory, const nsACString& aMeth } } +static bool +GetArrayPropertyValues(JSContext* cx, JS::HandleObject obj, const char* property, + nsTArray<nsCString>* results) +{ + JS::RootedValue value(cx); + if (!JS_GetProperty(cx, obj, property, &value)) { + JS_ReportErrorASCII(cx, R"(Missing required property "%s" for event)", property); + return false; + } + + bool isArray = false; + if (!JS_IsArrayObject(cx, value, &isArray) || !isArray) { + JS_ReportErrorASCII(cx, R"(Property "%s" for event should be an array)", property); + return false; + } + + JS::RootedObject arrayObj(cx, &value.toObject()); + uint32_t arrayLength; + if (!JS_GetArrayLength(cx, arrayObj, &arrayLength)) { + return false; + } + + for (uint32_t arrayIdx = 0; arrayIdx < arrayLength; ++arrayIdx) { + JS::Rooted<JS::Value> element(cx); + if (!JS_GetElement(cx, arrayObj, arrayIdx, &element)) { + return false; + } + + if (!element.isString()) { + JS_ReportErrorASCII(cx, R"(Array entries for event property "%s" should be strings)", property); + return false; + } + + nsAutoJSString jsStr; + if (!jsStr.init(cx, element)) { + return false; + } + + results->AppendElement(NS_ConvertUTF16toUTF8(jsStr)); + } + + return true; +} + +static bool +IsStringCharValid(const char aChar, const bool allowInfixPeriod) +{ + return (aChar >= 'A' && aChar <= 'Z') + || (aChar >= 'a' && aChar <= 'z') + || (aChar >= '0' && aChar <= '9') + || (allowInfixPeriod && (aChar == '.')); +} + +static bool +IsValidIdentifierString(const nsACString& str, const size_t maxLength, + const bool allowInfixPeriod) +{ + // Check string length. + if (str.Length() > maxLength) { + return false; + } + + // Check string characters. + const char* first = str.BeginReading(); + const char* end = str.EndReading(); + + for (const char* cur = first; cur < end; ++cur) { + const bool allowPeriod = allowInfixPeriod && (cur != first) && (cur != (end - 1)); + if (!IsStringCharValid(*cur, allowPeriod)) { + return false; + } + } + + return true; +} + +nsresult +TelemetryEvent::RegisterEvents(const nsACString& aCategory, + JS::Handle<JS::Value> aEventData, + JSContext* cx) +{ + if (!IsValidIdentifierString(aCategory, 30, true)) { + JS_ReportErrorASCII(cx, "Category parameter should match the identifier pattern."); + return NS_ERROR_INVALID_ARG; + } + + if (!aEventData.isObject()) { + JS_ReportErrorASCII(cx, "Event data parameter should be an object"); + return NS_ERROR_INVALID_ARG; + } + + JS::RootedObject obj(cx, &aEventData.toObject()); + JS::Rooted<JS::IdVector> eventPropertyIds(cx, JS::IdVector(cx)); + if (!JS_Enumerate(cx, obj, &eventPropertyIds)) { + return NS_ERROR_FAILURE; + } + + // Collect the event data into local storage first. + // Only after successfully validating all contained events will we register them into global storage. + nsTArray<DynamicEventInfo> newEventInfos; + nsTArray<bool> newEventExpired; + + for (size_t i = 0, n = eventPropertyIds.length(); i < n; i++) { + nsAutoJSString eventName; + if (!eventName.init(cx, eventPropertyIds[i])) { + return NS_ERROR_FAILURE; + } + + if (!IsValidIdentifierString(NS_ConvertUTF16toUTF8(eventName), kMaxMethodNameByteLength, false)) { + JS_ReportErrorASCII(cx, "Event names should match the identifier pattern."); + return NS_ERROR_INVALID_ARG; + } + + JS::RootedValue value(cx); + if (!JS_GetPropertyById(cx, obj, eventPropertyIds[i], &value) || !value.isObject()) { + return NS_ERROR_FAILURE; + } + JS::RootedObject eventObj(cx, &value.toObject()); + + // Extract the event registration data. + nsTArray<nsCString> methods; + nsTArray<nsCString> objects; + nsTArray<nsCString> extra_keys; + bool expired = false; + bool recordOnRelease = false; + + // The methods & objects properties are required. + if (!GetArrayPropertyValues(cx, eventObj, "methods", &methods)) { + return NS_ERROR_FAILURE; + } + + if (!GetArrayPropertyValues(cx, eventObj, "objects", &objects)) { + return NS_ERROR_FAILURE; + } + + // extra_keys is optional. + bool hasProperty = false; + if (JS_HasProperty(cx, eventObj, "extra_keys", &hasProperty) && hasProperty) { + if (!GetArrayPropertyValues(cx, eventObj, "extra_keys", &extra_keys)) { + return NS_ERROR_FAILURE; + } + } + + // expired is optional. + if (JS_HasProperty(cx, eventObj, "expired", &hasProperty) && hasProperty) { + JS::RootedValue temp(cx); + if (!JS_GetProperty(cx, eventObj, "expired", &temp) || !temp.isBoolean()) { + return NS_ERROR_FAILURE; + } + + expired = temp.toBoolean(); + } + + // record_on_release is optional. + if (JS_HasProperty(cx, eventObj, "record_on_release", &hasProperty) && hasProperty) { + JS::RootedValue temp(cx); + if (!JS_GetProperty(cx, eventObj, "record_on_release", &temp) || !temp.isBoolean()) { + return NS_ERROR_FAILURE; + } + + recordOnRelease = temp.toBoolean(); + } + + // Validate methods. + for (auto& method : methods) { + if (!IsValidIdentifierString(method, kMaxMethodNameByteLength, false)) { + JS_ReportErrorASCII(cx, "Method names should match the identifier pattern."); + return NS_ERROR_INVALID_ARG; + } + } + + // Validate objects. + for (auto& object : objects) { + if (!IsValidIdentifierString(object, kMaxObjectNameByteLength, false)) { + JS_ReportErrorASCII(cx, "Object names should match the identifier pattern."); + return NS_ERROR_INVALID_ARG; + } + } + + // Validate extra keys. + if (extra_keys.Length() > kMaxExtraKeyCount) { + JS_ReportErrorASCII(cx, "No more than 10 extra keys can be registered."); + return NS_ERROR_INVALID_ARG; + } + for (auto& key : extra_keys) { + if (!IsValidIdentifierString(key, kMaxExtraKeyNameByteLength, false)) { + JS_ReportErrorASCII(cx, "Extra key names should match the identifier pattern."); + return NS_ERROR_INVALID_ARG; + } + } + + // Append event infos to be registered. + for (auto& method : methods) { + for (auto& object : objects) { + // We defer the actual registration here in case any other event description is invalid. + // In that case we don't need to roll back any partial registration. + DynamicEventInfo info{nsCString(aCategory), method, object, + nsTArray<nsCString>(extra_keys), recordOnRelease}; + newEventInfos.AppendElement(info); + newEventExpired.AppendElement(expired); + } + } + } + + RegisterEventResult res = RegisterEventResult::Ok; + { + StaticMutexAutoLock locker(gTelemetryEventsMutex); + res = ::RegisterEvents(locker, aCategory, newEventInfos, newEventExpired); + } + + switch (res) { + case RegisterEventResult::AlreadyRegistered: + JS_ReportErrorASCII(cx, "Attempt to register event that is already registered."); + return NS_ERROR_INVALID_ARG; + default: + break; + } + + return NS_OK; +} + nsresult TelemetryEvent::CreateSnapshots(uint32_t aDataset, bool aClear, JSContext* cx, uint8_t optional_argc, JS::MutableHandleValue aResult) @@ -762,9 +1158,7 @@ TelemetryEvent::CreateSnapshots(uint32_t aDataset, bool aClear, JSContext* cx, const uint32_t len = eventStorage->Length(); for (uint32_t i = 0; i < len; ++i) { const EventRecord& record = (*eventStorage)[i]; - const EventInfo& info = gEventInfo[record.EventId()]; - - if (IsInDataset(info.common_info.dataset, aDataset)) { + if (IsInDataset(GetDataset(locker, record.GetEventKey()), aDataset)) { events.AppendElement(record); } } @@ -790,7 +1184,7 @@ TelemetryEvent::CreateSnapshots(uint32_t aDataset, bool aClear, JSContext* cx, for (uint32_t i = 0; i < processLength; ++i) { JS::RootedObject eventsArray(cx); - if (NS_FAILED(SerializeEventsArray(processEvents[i].second(), cx, &eventsArray))) { + if (NS_FAILED(SerializeEventsArray(processEvents[i].second(), cx, &eventsArray, aDataset))) { return NS_ERROR_FAILURE; } @@ -831,9 +1225,9 @@ TelemetryEvent::SetEventRecordingEnabled(const nsACString& category, bool enable } if (enabled) { - gEnabledCategories.PutEntry(categoryId); + gEnabledCategories.PutEntry(category); } else { - gEnabledCategories.RemoveEntry(categoryId); + gEnabledCategories.RemoveEntry(category); } } @@ -867,5 +1261,12 @@ TelemetryEvent::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) n += gEnabledCategories.ShallowSizeOfExcludingThis(aMallocSizeOf); + if (gDynamicEventInfo) { + n += gDynamicEventInfo->ShallowSizeOfIncludingThis(aMallocSizeOf); + for (auto& info : *gDynamicEventInfo) { + n += info.SizeOfExcludingThis(aMallocSizeOf); + } + } + return n; } diff --git a/toolkit/components/telemetry/TelemetryEvent.h b/toolkit/components/telemetry/TelemetryEvent.h index 705284fdeef4a115519c0b849be3c17eac9b86a1..0d8fc887aa3bcd31d4c560a9a624660ef8135b9e 100644 --- a/toolkit/components/telemetry/TelemetryEvent.h +++ b/toolkit/components/telemetry/TelemetryEvent.h @@ -33,7 +33,11 @@ nsresult RecordEvent(const nsACString& aCategory, const nsACString& aMethod, const nsACString& aObject, JS::HandleValue aValue, JS::HandleValue aExtra, JSContext* aCx, uint8_t optional_argc); + void SetEventRecordingEnabled(const nsACString& aCategory, bool aEnabled); +nsresult RegisterEvents(const nsACString& aCategory, JS::Handle<JS::Value> aEventData, + JSContext* cx); + nsresult CreateSnapshots(uint32_t aDataset, bool aClear, JSContext* aCx, uint8_t optional_argc, JS::MutableHandleValue aResult); diff --git a/toolkit/components/telemetry/TelemetrySession.jsm b/toolkit/components/telemetry/TelemetrySession.jsm index e698e1f124ad500f47718752d54ca80cb047436c..f72a96b907f70bcbf61f18abe3163cb6abf3c21c 100644 --- a/toolkit/components/telemetry/TelemetrySession.jsm +++ b/toolkit/components/telemetry/TelemetrySession.jsm @@ -1008,8 +1008,8 @@ var Impl = { return []; } - let snapshot = Telemetry.snapshotBuiltinEvents(this.getDatasetType(), - clearSubsession); + let snapshot = Telemetry.snapshotEvents(this.getDatasetType(), + clearSubsession); // Don't return the test events outside of test environments. if (!this._testing) { @@ -1322,6 +1322,9 @@ var Impl = { keyedHistograms: keyedHistograms.extension, events: events.extension || [], }, + dynamic: { + events: events.dynamic || [], + }, }; // Only include the GPU process if we've accumulated data for it. diff --git a/toolkit/components/telemetry/docs/collection/events.rst b/toolkit/components/telemetry/docs/collection/events.rst index 9554d801a72fb9348cd07022c25d8cc93be505a6..81210ee3dd1313575d3f30a33a9502906a8351cf 100644 --- a/toolkit/components/telemetry/docs/collection/events.rst +++ b/toolkit/components/telemetry/docs/collection/events.rst @@ -66,7 +66,7 @@ The YAML definition file ======================== Any event recorded into Firefox Telemetry must be registered before it can be recorded. -This happens in `Events.yaml <https://dxr.mozilla.org/mozilla-central/source/toolkit/components/telemetry/Events.yaml>`_. +For any code that ships as part of Firefox that happens in `Events.yaml <https://dxr.mozilla.org/mozilla-central/source/toolkit/components/telemetry/Events.yaml>`_. The probes in the definition file are represented in a fixed-depth, three-level structure. The first level contains *category* names (grouping multiple events together), the second level contains *event* names, under which the events properties are listed. E.g.: @@ -155,6 +155,9 @@ Example: // event: [982134, "ui", "completion", "search-bar", "yahoo", // {"qerylen": "7", "results": "23"}] +``setEventRecordingEnabled()`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + .. code-block:: js Services.telemetry.setEventRecordingEnabled(category, enabled); @@ -170,12 +173,49 @@ Example: Services.telemetry.setEventRecordingEnabled("ui", false); // ... now "ui" events will not be recorded anymore. +``registerEvents()`` +~~~~~~~~~~~~~~~~~~~~ + +.. code-block:: js + + Services.telemetry.registerEvents(category, eventData); + +Register new events from add-ons. + +* ``category`` - *(required, string)* The category the events are in. +* ``eventData`` - *(required, object)* An object of the form ``{eventName1: event1Data, ...}``, where each events data is an object with the entries: + + * ``methods`` - *(required, list of strings)* The valid event methods. + * ``objects`` - *(required, list of strings)* The valid event objects. + * ``extra_keys`` - *(optional, list of strings)* The valid extra keys for the event. + * ``record_on_release`` - *(optional, bool)* + +For events recorded from add-ons, registration happens at runtime. Any new events must first be registered through this function before they can be recorded. +The registered categories will automatically be enabled for recording. + +After registration, the events can be recorded through the ``recordEvent()`` function. They will be submitted in the main pings payload under ``processes.dynamic.events``. + +New events registered here are subject to the same limitations as the ones registered through ``Events.yaml``, although the naming was in parts updated to recent policy changes. + +Example: + +.. code-block:: js + + Services.telemetry.registerEvents("myAddon.interaction", { + "click": { + methods: ["click"], + objects: ["red_button", "blue_button"], + } + }); + // Now events can be recorded. + Services.telemetry.recordEvent("myAddon.interaction", "click", "red_button"); + Internal API -~~~~~~~~~~~~ +------------ .. code-block:: js - Services.telemetry.snapshotBuiltinEvents(dataset, clear); + Services.telemetry.snapshotEvents(dataset, clear); Services.telemetry.clearEvents(); These functions are only supposed to be used by Telemetry internally or in tests. @@ -186,3 +226,4 @@ Version History - Firefox 52: Initial event support (`bug 1302663 <https://bugzilla.mozilla.org/show_bug.cgi?id=1302663>`_). - Firefox 53: Event recording disabled by default (`bug 1329139 <https://bugzilla.mozilla.org/show_bug.cgi?id=1329139>`_). - Firefox 54: Added child process events (`bug 1313326 <https://bugzilla.mozilla.org/show_bug.cgi?id=1313326>`_). +- Firefox 56: Added support for recording new probes from add-ons (`bug 1302681 <bug https://bugzilla.mozilla.org/show_bug.cgi?id=1302681>`_). diff --git a/toolkit/components/telemetry/nsITelemetry.idl b/toolkit/components/telemetry/nsITelemetry.idl index c959c8ced695367ab88b8b566e47be3c5eb981d2..07b87605b18428ef94f528b4abad9d61df36a513 100644 --- a/toolkit/components/telemetry/nsITelemetry.idl +++ b/toolkit/components/telemetry/nsITelemetry.idl @@ -498,7 +498,35 @@ interface nsITelemetry : nsISupports * @param [aClear=false] Whether to clear out the scalars after snapshotting. */ [implicit_jscontext, optional_argc] - jsval snapshotBuiltinEvents(in uint32_t aDataset, [optional] in boolean aClear); + jsval snapshotEvents(in uint32_t aDataset, [optional] in boolean aClear); + + /** + * Register new events to record them from addons. This allows registering multiple + * events for a category. They will be valid only for the current Firefox session. + * Note that events shipping in Firefox should be registered in Events.yaml. + * + * @param aCategory The unique category the events are registered in. + * @param aEventData An object that contains registration data for 1-N events of the form: + * { + * "categoryName": { + * "methods": ["test1"], + * "objects": ["object1"], + * "record_on_release": false, + * "extra_keys": ["key1", "key2"], // optional + * "expired": false // optional, defaults to false. + * }, + * ... + * } + * @param aEventData.<name>.methods List of methods for this event entry. + * @param aEventData.<name>.objects List of objects for this event entry. + * @param aEventData.<name>.extra_keys Optional, list of allowed extra keys for this event entry. + * @param aEventData.<name>.record_on_release Optional, whether to record this data on release. + * Defaults to false. + * @param aEventData.<name>.expired Optional, whether this event entry is expired. This allows + * recording it without error, but it will be discarded. Defaults to false. + */ + [implicit_jscontext] + void registerEvents(in ACString aCategory, in jsval aEventData); /** * Resets all the stored events. This is intended to be only used in tests. diff --git a/toolkit/components/telemetry/tests/unit/test_ChildEvents.js b/toolkit/components/telemetry/tests/unit/test_ChildEvents.js index b6fe3999ced1bac4d36ce2d1323c1965b0498c1f..7a77a1cf9fc0021325cd0e65ea0a8d61b7a81b6e 100644 --- a/toolkit/components/telemetry/tests/unit/test_ChildEvents.js +++ b/toolkit/components/telemetry/tests/unit/test_ChildEvents.js @@ -36,11 +36,18 @@ const UNRECORDED_PARENT_EVENTS = [ ["telemetry.test", "content_only", "object1"], ]; +const RECORDED_DYNAMIC_EVENTS = [ + ["telemetry.test.dynamic", "test1", "object1"], + ["telemetry.test.dynamic", "test2", "object1"], +]; + function run_child_test() { // Record some events in the "content" process. RECORDED_CONTENT_EVENTS.forEach(e => Telemetry.recordEvent(...e)); // These events should not be recorded for the content process. UNRECORDED_CONTENT_EVENTS.forEach(e => Telemetry.recordEvent(...e)); + // Record some dynamic events from the content process. + RECORDED_DYNAMIC_EVENTS.forEach(e => Telemetry.recordEvent(...e)); } /** @@ -50,8 +57,9 @@ function run_child_test() { async function waitForContentEvents() { await ContentTaskUtils.waitForCondition(() => { const snapshot = - Telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); - return Object.keys(snapshot).includes("content"); + Telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, false); + return Object.keys(snapshot).includes("content") && + Object.keys(snapshot).includes("dynamic"); }); } @@ -74,6 +82,21 @@ add_task(async function() { // Enable recording for the test event category. Telemetry.setEventRecordingEnabled("telemetry.test", true); + // Register dynamic test events. + Telemetry.registerEvents("telemetry.test.dynamic", { + // Event with only required fields. + "test1": { + methods: ["test1"], + objects: ["object1"], + }, + // Event with extra_keys. + "test2": { + methods: ["test2", "test2b"], + objects: ["object1"], + extra_keys: ["key1", "key2"], + }, + }); + // Run test in child, don't wait for it to finish: just wait for the // MESSAGE_CHILD_TEST_DONE. const timestampBeforeChildEvents = Telemetry.msSinceProcessStart(); @@ -100,6 +123,8 @@ add_task(async function() { Assert.ok("events" in payload.processes.parent, "Main process section should have events."); Assert.ok("content" in payload.processes, "Should have child process section"); Assert.ok("events" in payload.processes.content, "Child process section should have events."); + Assert.ok("dynamic" in payload.processes, "Should have dynamic process section"); + Assert.ok("events" in payload.processes.dynamic, "Dynamic process section should have events."); // Check that the expected events are present from the content process. let contentEvents = payload.processes.content.events.map(e => e.slice(1)); @@ -115,6 +140,13 @@ add_task(async function() { Assert.deepEqual(parentEvents[i], RECORDED_PARENT_EVENTS[i], "Should have recorded expected event."); } + // Check that the expected dynamic events are present. + let dynamicEvents = payload.processes.dynamic.events.map(e => e.slice(1)); + Assert.equal(dynamicEvents.length, RECORDED_DYNAMIC_EVENTS.length, "Should match expected event count."); + for (let i = 0; i < RECORDED_DYNAMIC_EVENTS.length; ++i) { + Assert.deepEqual(dynamicEvents[i], RECORDED_DYNAMIC_EVENTS[i], "Should have recorded expected event."); + } + // Check that the event timestamps are in the expected ranges. let contentTimestamps = payload.processes.content.events.map(e => e[0]); let parentTimestamps = payload.processes.parent.events.map(e => e[0]); @@ -127,9 +159,9 @@ add_task(async function() { // Make sure all events are cleared from storage properly. let snapshot = - Telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, true); + Telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, true); Assert.greaterOrEqual(Object.keys(snapshot).length, 2, "Should have events from at least two processes."); snapshot = - Telemetry.snapshotBuiltinEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, true); + Telemetry.snapshotEvents(Ci.nsITelemetry.DATASET_RELEASE_CHANNEL_OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should have cleared all events from storage."); }); diff --git a/toolkit/components/telemetry/tests/unit/test_TelemetryEvents.js b/toolkit/components/telemetry/tests/unit/test_TelemetryEvents.js index 45b037cf11c7812121e6e0413c2f783122461cf5..d1fa4c8e583ed3ef9f19ec5dfda1c9e9b7c0a953 100644 --- a/toolkit/components/telemetry/tests/unit/test_TelemetryEvents.js +++ b/toolkit/components/telemetry/tests/unit/test_TelemetryEvents.js @@ -44,13 +44,13 @@ add_task(async function test_recording_state() { // Both test categories should be off by default. events.forEach(e => Telemetry.recordEvent(...e)); - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not have recorded any events."); // Enable one test category and see that we record correctly. Telemetry.setEventRecordingEnabled("telemetry.test", true); events.forEach(e => Telemetry.recordEvent(...e)); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); Assert.equal(snapshot.parent.length, 1, "Should have recorded one event."); Assert.equal(snapshot.parent[0][1], "telemetry.test", "Should have recorded one event in telemetry.test"); @@ -58,7 +58,7 @@ add_task(async function test_recording_state() { // Also enable the other test category and see that we record correctly. Telemetry.setEventRecordingEnabled("telemetry.test.second", true); events.forEach(e => Telemetry.recordEvent(...e)); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); Assert.equal(snapshot.parent.length, 2, "Should have recorded two events."); Assert.equal(snapshot.parent[0][1], "telemetry.test", "Should have recorded one event in telemetry.test"); @@ -67,7 +67,7 @@ add_task(async function test_recording_state() { // Now turn of one category again and check that this works as expected. Telemetry.setEventRecordingEnabled("telemetry.test", false); events.forEach(e => Telemetry.recordEvent(...e)); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); Assert.equal(snapshot.parent.length, 1, "Should have recorded one event."); Assert.equal(snapshot.parent[0][1], "telemetry.test.second", "Should have recorded one event in telemetry.test.second"); @@ -145,12 +145,12 @@ add_task(async function test_recording() { }; // Check that the expected events were recorded. - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, false); + let snapshot = Telemetry.snapshotEvents(OPTIN, false); Assert.ok(("parent" in snapshot), "Should have entry for main process."); checkEvents(snapshot.parent, expected); // Check serializing only opt-out events. - snapshot = Telemetry.snapshotBuiltinEvents(OPTOUT, false); + snapshot = Telemetry.snapshotEvents(OPTOUT, false); Assert.ok(("parent" in snapshot), "Should have entry for main process."); let filtered = expected.filter(e => e.optout == true); checkEvents(snapshot.parent, filtered); @@ -167,12 +167,12 @@ add_task(async function test_clear() { // Check that events were recorded. // The events are cleared by passing the respective flag. - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); Assert.equal(snapshot.parent.length, 2 * COUNT, `Should have recorded ${2 * COUNT} events.`); // Now the events should be cleared. - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, false); + snapshot = Telemetry.snapshotEvents(OPTIN, false); Assert.equal(Object.keys(snapshot).length, 0, `Should have cleared the events.`); }); @@ -181,17 +181,17 @@ add_task(async function test_expiry() { // Recording call with event that is expired by version. Telemetry.recordEvent("telemetry.test", "expired_version", "object1"); - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not record event with expired version."); // Recording call with event that is expired by date. Telemetry.recordEvent("telemetry.test", "expired_date", "object1"); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not record event with expired date."); // Recording call with event that has expiry_version and expiry_date in the future. Telemetry.recordEvent("telemetry.test", "not_expired_optout", "object1"); - snapshot = Telemetry.snapshotBuiltinEvents(OPTOUT, true); + snapshot = Telemetry.snapshotEvents(OPTOUT, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); Assert.equal(snapshot.parent.length, 1, "Should record event when date and version are not expired."); }); @@ -201,22 +201,22 @@ add_task(async function test_invalidParams() { // Recording call with wrong type for value argument. Telemetry.recordEvent("telemetry.test", "test1", "object1", 1); - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not record event when value argument with invalid type is passed."); // Recording call with wrong type for extra argument. Telemetry.recordEvent("telemetry.test", "test1", "object1", null, "invalid"); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not record event when extra argument with invalid type is passed."); // Recording call with unknown extra key. Telemetry.recordEvent("telemetry.test", "test1", "object1", null, {"key3": "x"}); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not record event when extra argument with invalid key is passed."); // Recording call with invalid value type. Telemetry.recordEvent("telemetry.test", "test1", "object1", null, {"key3": 1}); - snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.equal(Object.keys(snapshot).length, 0, "Should not record event when extra argument with invalid value type is passed."); }); @@ -231,7 +231,7 @@ add_task(async function test_storageLimit() { } // Check that the right events were recorded. - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); let events = snapshot.parent; Assert.equal(events.length, LIMIT, `Should have only recorded ${LIMIT} events`); @@ -274,7 +274,7 @@ add_task(async function test_valueLimits() { } // Check that the right events were recorded. - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); let events = snapshot.parent; Assert.equal(events.length, expected.length, @@ -294,10 +294,239 @@ add_task(async function test_unicodeValues() { Telemetry.recordEvent("telemetry.test", "test1", "object1", null, {"key1": value}); // Check that the values were correctly recorded. - let snapshot = Telemetry.snapshotBuiltinEvents(OPTIN, true); + let snapshot = Telemetry.snapshotEvents(OPTIN, true); Assert.ok(("parent" in snapshot), "Should have entry for main process."); let events = snapshot.parent; Assert.equal(events.length, 2, "Should have recorded 2 events."); Assert.equal(events[0][4], value, "Should have recorded the right value."); Assert.equal(events[1][5].key1, value, "Should have recorded the right extra value."); }); + +add_task(function* test_dynamicEvents() { + Telemetry.clearEvents(); + Telemetry.canRecordExtended = true; + + // Register some test events. + Telemetry.registerEvents("telemetry.test.dynamic", { + // Event with only required fields. + "test1": { + methods: ["test1"], + objects: ["object1"], + }, + // Event with extra_keys. + "test2": { + methods: ["test2", "test2b"], + objects: ["object1"], + extra_keys: ["key1", "key2"], + }, + // Expired event. + "test3": { + methods: ["test3"], + objects: ["object1"], + expired: true, + }, + // A release-channel recording event. + "test4": { + methods: ["test4"], + objects: ["object1"], + record_on_release: true, + }, + }); + + // Record some valid events. + Telemetry.recordEvent("telemetry.test.dynamic", "test1", "object1"); + Telemetry.recordEvent("telemetry.test.dynamic", "test2", "object1", null, + {"key1": "foo", "key2": "bar"}); + Telemetry.recordEvent("telemetry.test.dynamic", "test3", "object1", "some value"); + Telemetry.recordEvent("telemetry.test.dynamic", "test4", "object1", null); + + // Test recording an unknown event. + Assert.throws(() => Telemetry.recordEvent("telemetry.test.dynamic", "unknown", "unknown"), + /Error: Unknown event: \["telemetry\.test\.dynamic", "unknown", "unknown"\]/, + "Should throw when recording an unknown dynamic event."); + + // Now check that the snapshot contains the expected data. + let snapshot = Telemetry.snapshotEvents(OPTIN, false); + Assert.ok(("dynamic" in snapshot), "Should have dynamic events in the snapshot."); + + let expected = [ + ["telemetry.test.dynamic", "test1", "object1"], + ["telemetry.test.dynamic", "test2", "object1", null, {key1: "foo", key2: "bar"}], + // "test3" is epxired, so it should not be recorded. + ["telemetry.test.dynamic", "test4", "object1"], + ]; + let events = snapshot.dynamic; + Assert.equal(events.length, expected.length, "Should have recorded the right amount of events."); + for (let i = 0; i < expected.length; ++i) { + Assert.deepEqual(events[i].slice(1), expected[i], + "Should have recorded the expected event data."); + } + + // Check that the opt-out snapshot contains only the one expected event. + snapshot = Telemetry.snapshotEvents(OPTOUT, false); + Assert.ok(("dynamic" in snapshot), "Should have dynamic events in the snapshot."); + Assert.equal(snapshot.dynamic.length, 1, "Should have one opt-out event in the snapshot."); + expected = ["telemetry.test.dynamic", "test4", "object1"]; + Assert.deepEqual(snapshot.dynamic[0].slice(1), expected); + + // Recording with unknown extra keys should be ignored and print an error. + Telemetry.clearEvents(); + Telemetry.recordEvent("telemetry.test.dynamic", "test1", "object1", null, {"key1": "foo"}); + Telemetry.recordEvent("telemetry.test.dynamic", "test2", "object1", null, {"key1": "foo", "unknown": "bar"}); + snapshot = Telemetry.snapshotEvents(OPTIN, true); + Assert.ok(!("dynamic" in snapshot), "Should have not recorded dynamic events with unknown extra keys."); + + // Other built-in events should not show up in the "dynamic" bucket of the snapshot. + Telemetry.recordEvent("telemetry.test", "test1", "object1"); + snapshot = Telemetry.snapshotEvents(OPTIN, true); + Assert.ok(!("dynamic" in snapshot), "Should have not recorded built-in event into dynamic bucket."); + + // Test that recording opt-in and opt-out events works as expected. + Telemetry.clearEvents(); + Telemetry.canRecordExtended = false; + + Telemetry.recordEvent("telemetry.test.dynamic", "test1", "object1"); + Telemetry.recordEvent("telemetry.test.dynamic", "test4", "object1"); + + expected = [ + // Only "test4" should have been recorded. + ["telemetry.test.dynamic", "test4", "object1"], + ]; + snapshot = Telemetry.snapshotEvents(OPTIN, true); + Assert.equal(snapshot.dynamic.length, 1, "Should have one opt-out event in the snapshot."); + Assert.deepEqual(snapshot.dynamic.map(e => e.slice(1)), expected); +}); + +add_task(function* test_dynamicEventRegistrationValidation() { + Telemetry.canRecordExtended = true; + Telemetry.clearEvents(); + + // Test registration of invalid categories. + Assert.throws(() => Telemetry.registerEvents("telemetry+test+dynamic", { + "test1": { + methods: ["test1"], + objects: ["object1"], + }, + }), + /Category parameter should match the identifier pattern\./, + "Should throw when registering category names with invalid characters."); + Assert.throws(() => Telemetry.registerEvents("telemetry.test.test.test.test.test.test.test.test", { + "test1": { + methods: ["test1"], + objects: ["object1"], + }, + }), + /Category parameter should match the identifier pattern\./, + "Should throw when registering overly long category names."); + + // Test registration of invalid event names. + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic1", { + "test?1": { + methods: ["test1"], + objects: ["object1"], + }, + }), + /Event names should match the identifier pattern\./, + "Should throw when registering event names with invalid characters."); + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic2", { + "test1test1test1test1test1test1test1": { + methods: ["test1"], + objects: ["object1"], + }, + }), + /Event names should match the identifier pattern\./, + "Should throw when registering overly long event names."); + + // Test registration of invalid method names. + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic3", { + "test1": { + methods: ["test?1"], + objects: ["object1"], + }, + }), + /Method names should match the identifier pattern\./, + "Should throw when registering method names with invalid characters."); + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic", { + "test1": { + methods: ["test1test1test1test1test1test1test1"], + objects: ["object1"], + }, + }), + /Method names should match the identifier pattern\./, + "Should throw when registering overly long method names."); + + // Test registration of invalid object names. + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic4", { + "test1": { + methods: ["test1"], + objects: ["object?1"], + }, + }), + /Object names should match the identifier pattern\./, + "Should throw when registering object names with invalid characters."); + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic5", { + "test1": { + methods: ["test1"], + objects: ["object1object1object1object1object1object1"], + }, + }), + /Object names should match the identifier pattern\./, + "Should throw when registering overly long object names."); + + // Test validation of invalid key names. + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic6", { + "test1": { + methods: ["test1"], + objects: ["object1"], + extra_keys: ["a?1"], + }, + }), + /Extra key names should match the identifier pattern\./, + "Should throw when registering extra key names with invalid characters."); + + // Test validation of key names that are too long - we allow a maximum of 15 characters. + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic7", { + "test1": { + methods: ["test1"], + objects: ["object1"], + extra_keys: ["a012345678901234"], + }, + }), + /Extra key names should match the identifier pattern\./, + "Should throw when registering extra key names which are too long."); + Telemetry.registerEvents("telemetry.test.dynamic8", { + "test1": { + methods: ["test1"], + objects: ["object1"], + extra_keys: ["a01234567890123"], + }, + }); + + // Test validation of extra key count - we only allow 10. + Assert.throws(() => Telemetry.registerEvents("telemetry.test.dynamic9", { + "test1": { + methods: ["test1"], + objects: ["object1"], + extra_keys: ["a1", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "a10", "a11"], + }, + }), + /No more than 10 extra keys can be registered\./, + "Should throw when registering too many extra keys."); + Telemetry.registerEvents("telemetry.test.dynamic10", { + "test1": { + methods: ["test1"], + objects: ["object1"], + extra_keys: ["a1", "a2", "a3", "a4", "a5", "a6", "a7", "a8", "a9", "a10"], + }, + }); + + // Test registering an event thats already registered through Events.yaml. + Assert.throws(() => Telemetry.registerEvents("telemetry.test", { + "test1": { + methods: ["test1"], + objects: ["object1"], + }, + }), + /Attempt to register event that is already registered\./, + "Should throw when registering event that already was registered."); +}); diff --git a/toolkit/components/telemetry/tests/unit/test_TelemetryHealthPing.js b/toolkit/components/telemetry/tests/unit/test_TelemetryHealthPing.js index e12a4fdfc802240602ab4f59cebc13bd09eeff75..55f11ca0882fa9a1198b067dc2fca8f327e98f1d 100644 --- a/toolkit/components/telemetry/tests/unit/test_TelemetryHealthPing.js +++ b/toolkit/components/telemetry/tests/unit/test_TelemetryHealthPing.js @@ -11,6 +11,7 @@ Cu.import("resource://gre/modules/TelemetryStorage.jsm", this); Cu.import("resource://gre/modules/TelemetryUtils.jsm", this); Cu.import("resource://gre/modules/Preferences.jsm", this); Cu.import("resource://gre/modules/XPCOMUtils.jsm", this); +Cu.import("resource://testing-common/TelemetryArchiveTesting.jsm", this); XPCOMUtils.defineLazyModuleGetter(this, "TelemetryHealthPing", "resource://gre/modules/TelemetryHealthPing.jsm"); @@ -30,6 +31,18 @@ function fakeHealthSchedulerTimer(set, clear) { telemetryHealthPing.Policy.clearSchedulerTickTimeout = clear; } +async function waitForConditionWithPromise(promiseFn, timeoutMsg, tryCount = 30) { + const SINGLE_TRY_TIMEOUT = 100; + let tries = 0; + do { + try { + return await promiseFn(); + } catch (ex) {} + await new Promise(resolve => do_timeout(SINGLE_TRY_TIMEOUT, resolve)); + } while (++tries <= tryCount); + throw new Error(timeoutMsg); +} + add_task(async function setup() { // Trigger a proper telemetry init. do_get_profile(true); @@ -110,34 +123,56 @@ add_task(async function test_sendOverSizedPing() { add_task(async function test_sendOnTimeout() { TelemetryHealthPing.testReset(); + await TelemetrySend.reset(); PingServer.clearRequests(); let PING_TYPE = "ping-on-timeout"; + // Disable send retry to make this test more deterministic. + fakePingSendTimer(() => {}, () => {}); + // Set up small ping submission timeout to always have timeout error. TelemetrySend.testSetTimeoutForPingSubmit(2); - // Reset the timeout after receiving the first ping to be able to send health ping. - PingServer.registerPingHandler((request, result) => { + await TelemetryController.submitExternalPing(PING_TYPE, {}); + + let response; + PingServer.registerPingHandler((req, res) => { PingServer.resetPingHandler(); - TelemetrySend.testResetTimeOutToDefault(); + // We don't finish the response yet to make sure to trigger a timeout. + res.processAsync(); + response = res; }); - await TelemetryController.submitExternalPing(PING_TYPE, {}); - let ping = await PingServer.promiseNextPing(); - checkHealthPingStructure(ping, { + // Wait for health ping. + let ac = new TelemetryArchiveTesting.Checker(); + await ac.promiseInit(); + await waitForConditionWithPromise(() => { + ac.promiseFindPing("health", []); + }, "Failed to find health ping"); + + if (response) { + response.finish(); + } + + TelemetrySend.testResetTimeOutToDefault(); + PingServer.resetPingHandler(); + TelemetrySend.notifyCanUpload(); + + let pings = await PingServer.promiseNextPings(2); + let healthPing = pings.find(ping => ping.type === "health"); + checkHealthPingStructure(healthPing, { [TelemetryHealthPing.FailureType.SEND_FAILURE]: { "timeout": 1 }, "os": TelemetryHealthPing.OsInfo, "reason": TelemetryHealthPing.Reason.IMMEDIATE }); - - // Clear pending pings to avoid resending pings which fail with time out error. await TelemetryStorage.testClearPendingPings(); }); add_task(async function test_sendOnlyTopTenDiscardedPings() { TelemetryHealthPing.testReset(); + await TelemetrySend.reset(); PingServer.clearRequests(); let PING_TYPE = "sort-discarded"; @@ -155,10 +190,11 @@ add_task(async function test_sendOnlyTopTenDiscardedPings() { // Add failures for (let i = 1; i < 12; i++) { for (let j = 1; j < i; j++) { - await TelemetryHealthPing.recordDiscardedPing(PING_TYPE + i); + TelemetryHealthPing.recordDiscardedPing(PING_TYPE + i); } } + await TelemetrySend.reset(); await pingSubmissionCallBack(); let ping = await PingServer.promiseNextPing(); diff --git a/toolkit/content/aboutTelemetry.js b/toolkit/content/aboutTelemetry.js index 17dc59b154b6d678fd09705aeee308f6bc554def..3ff2483f9ad7cc65a59263b8338ac8c6480110b3 100644 --- a/toolkit/content/aboutTelemetry.js +++ b/toolkit/content/aboutTelemetry.js @@ -2093,7 +2093,8 @@ var HistogramSection = { if (hgramsProcess === "parent") { histograms = aPayload.histograms; - } else if ("processes" in aPayload && hgramsProcess in aPayload.processes) { + } else if ("processes" in aPayload && hgramsProcess in aPayload.processes && + "histograms" in aPayload.processes[hgramsProcess]) { histograms = aPayload.processes[hgramsProcess].histograms; } @@ -2126,7 +2127,8 @@ var KeyedHistogramSection = { let keyedHgramsProcess = keyedHgramsOption.getAttribute("value"); if (keyedHgramsProcess === "parent") { keyedHistograms = aPayload.keyedHistograms; - } else if ("processes" in aPayload && keyedHgramsProcess in aPayload.processes) { + } else if ("processes" in aPayload && keyedHgramsProcess in aPayload.processes && + "keyedHistograms" in aPayload.processes[keyedHgramsProcess]) { keyedHistograms = aPayload.processes[keyedHgramsProcess].keyedHistograms; } diff --git a/toolkit/moz.configure b/toolkit/moz.configure index c9cc3b9e35fb688632d59ee25a35bdd80fa39810..b0b9dd7bd51a2a1d2d1036c1a224a36d5e0cc1a9 100644 --- a/toolkit/moz.configure +++ b/toolkit/moz.configure @@ -722,6 +722,7 @@ with only_when(building_stylo_bindgen): @depends(llvm_config, '--with-libclang-path', '--with-clang-path', host_library_name_info, host) @imports('os.path') + @imports('glob') @imports(_from='textwrap', _import='dedent') def bindgen_config_paths(llvm_config, libclang_path, clang_path, library_name_info, host): @@ -737,6 +738,9 @@ with only_when(building_stylo_bindgen): if host.kernel == 'Linux': libclang_choices.append('libclang.so.1') + if host.os == 'OpenBSD': + libclang_choices = glob.glob(path + '/libclang.so.*.*') + # At least one of the choices must be found. for choice in libclang_choices: libclang = os.path.join(path, choice) diff --git a/toolkit/mozapps/extensions/AddonManager.jsm b/toolkit/mozapps/extensions/AddonManager.jsm index b947b2021d4767e263a367c3b543493467e6ba95..4c84bf442949e3ff135c28318433548f697f381b 100644 --- a/toolkit/mozapps/extensions/AddonManager.jsm +++ b/toolkit/mozapps/extensions/AddonManager.jsm @@ -3175,6 +3175,11 @@ this.AddonManagerPrivate = { AddonType, + get BOOTSTRAP_REASONS() { + return AddonManagerInternal._getProviderByName("XPIProvider") + .BOOTSTRAP_REASONS; + }, + recordTimestamp(name, value) { AddonManagerInternal.recordTimestamp(name, value); }, diff --git a/toolkit/mozapps/extensions/content/extensions.js b/toolkit/mozapps/extensions/content/extensions.js index 2238db70258a5d3ec32a80f8a43f5e68302efb4a..0a9a930806faa9912275cee071591ed0e2aaae68 100644 --- a/toolkit/mozapps/extensions/content/extensions.js +++ b/toolkit/mozapps/extensions/content/extensions.js @@ -3643,16 +3643,18 @@ var gDetailView = { try { if (this._addon.optionsType == AddonManager.OPTIONS_TYPE_INLINE_BROWSER) { - whenViewLoaded(() => { - this.createOptionsBrowser(rows).then(browser => { - // Make sure the browser is unloaded as soon as we change views, - // rather than waiting for the next detail view to load. - document.addEventListener("ViewChanged", function() { - browser.remove(); - }, {once: true}); - - finish(browser); - }); + whenViewLoaded(async () => { + await this._addon.startupPromise; + + let browser = await this.createOptionsBrowser(rows); + + // Make sure the browser is unloaded as soon as we change views, + // rather than waiting for the next detail view to load. + document.addEventListener("ViewChanged", function() { + browser.remove(); + }, {once: true}); + + finish(browser); }); if (aCallback) diff --git a/toolkit/mozapps/extensions/internal/WebExtensionBootstrap.js b/toolkit/mozapps/extensions/internal/WebExtensionBootstrap.js deleted file mode 100644 index 555c2b8c2c836abd3446225085bc0483b93067fc..0000000000000000000000000000000000000000 --- a/toolkit/mozapps/extensions/internal/WebExtensionBootstrap.js +++ /dev/null @@ -1,38 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -"use strict"; - -/* exported startup, shutdown, install, uninstall */ - -Components.utils.import("resource://gre/modules/Extension.jsm"); - -var extension; - -const BOOTSTRAP_REASON_TO_STRING_MAP = { - [this.APP_STARTUP]: "APP_STARTUP", - [this.APP_SHUTDOWN]: "APP_SHUTDOWN", - [this.ADDON_ENABLE]: "ADDON_ENABLE", - [this.ADDON_DISABLE]: "ADDON_DISABLE", - [this.ADDON_INSTALL]: "ADDON_INSTALL", - [this.ADDON_UNINSTALL]: "ADDON_UNINSTALL", - [this.ADDON_UPGRADE]: "ADDON_UPGRADE", - [this.ADDON_DOWNGRADE]: "ADDON_DOWNGRADE", -}; - -function install(data, reason) { -} - -function startup(data, reason) { - extension = new Extension(data, BOOTSTRAP_REASON_TO_STRING_MAP[reason]); - extension.startup(); -} - -function shutdown(data, reason) { - extension.shutdown(BOOTSTRAP_REASON_TO_STRING_MAP[reason]); - extension = null; -} - -function uninstall(data, reason) { -} diff --git a/toolkit/mozapps/extensions/internal/XPIProvider.jsm b/toolkit/mozapps/extensions/internal/XPIProvider.jsm index 20ba236b4743ae04a1169d1e265113f9d223c8df..37749b144c8a15506256154c799b6626cebafa0e 100644 --- a/toolkit/mozapps/extensions/internal/XPIProvider.jsm +++ b/toolkit/mozapps/extensions/internal/XPIProvider.jsm @@ -25,6 +25,8 @@ XPCOMUtils.defineLazyModuleGetter(this, "AppConstants", "resource://gre/modules/AppConstants.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "ChromeManifestParser", "resource://gre/modules/ChromeManifestParser.jsm"); +XPCOMUtils.defineLazyModuleGetter(this, "Extension", + "resource://gre/modules/Extension.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "LightweightThemeManager", "resource://gre/modules/LightweightThemeManager.jsm"); XPCOMUtils.defineLazyModuleGetter(this, "FileUtils", @@ -1797,6 +1799,8 @@ this.XPIProvider = { return "XPIProvider"; }, + BOOTSTRAP_REASONS: Object.freeze(BOOTSTRAP_REASONS), + // An array of known install locations installLocations: null, // A dictionary of known install locations by name @@ -4225,37 +4229,39 @@ this.XPIProvider = { return; } - let uri = getURIForResourceInFile(aFile, "bootstrap.js").spec; - if (aType == "dictionary") - uri = "resource://gre/modules/addons/SpellCheckDictionaryBootstrap.js" - else if (isWebExtension(aType)) - uri = "resource://gre/modules/addons/WebExtensionBootstrap.js" - else if (aType == "apiextension") - uri = "resource://gre/modules/addons/APIExtensionBootstrap.js" + if (isWebExtension(aType)) { + activeAddon.bootstrapScope = Extension.getBootstrapScope(aId, aFile); + } else { + let uri = getURIForResourceInFile(aFile, "bootstrap.js").spec; + if (aType == "dictionary") + uri = "resource://gre/modules/addons/SpellCheckDictionaryBootstrap.js" + else if (aType == "apiextension") + uri = "resource://gre/modules/addons/APIExtensionBootstrap.js" - activeAddon.bootstrapScope = - new Cu.Sandbox(principal, { sandboxName: uri, - wantGlobalProperties: ["indexedDB"], - addonId: aId, - metadata: { addonID: aId, URI: uri } }); + activeAddon.bootstrapScope = + new Cu.Sandbox(principal, { sandboxName: uri, + wantGlobalProperties: ["indexedDB"], + addonId: aId, + metadata: { addonID: aId, URI: uri } }); - try { - // Copy the reason values from the global object into the bootstrap scope. - for (let name in BOOTSTRAP_REASONS) - activeAddon.bootstrapScope[name] = BOOTSTRAP_REASONS[name]; + try { + // Copy the reason values from the global object into the bootstrap scope. + for (let name in BOOTSTRAP_REASONS) + activeAddon.bootstrapScope[name] = BOOTSTRAP_REASONS[name]; - // Add other stuff that extensions want. - Object.assign(activeAddon.bootstrapScope, {Worker, ChromeWorker}); + // Add other stuff that extensions want. + Object.assign(activeAddon.bootstrapScope, {Worker, ChromeWorker}); - // Define a console for the add-on - XPCOMUtils.defineLazyGetter( - activeAddon.bootstrapScope, "console", - () => new ConsoleAPI({ consoleID: "addon/" + aId })); + // Define a console for the add-on + XPCOMUtils.defineLazyGetter( + activeAddon.bootstrapScope, "console", + () => new ConsoleAPI({ consoleID: "addon/" + aId })); - activeAddon.bootstrapScope.__SCRIPT_URI_SPEC__ = uri; - Services.scriptloader.loadSubScript(uri, activeAddon.bootstrapScope); - } catch (e) { - logger.warn("Error loading bootstrap.js for " + aId, e); + activeAddon.bootstrapScope.__SCRIPT_URI_SPEC__ = uri; + Services.scriptloader.loadSubScript(uri, activeAddon.bootstrapScope); + } catch (e) { + logger.warn("Error loading bootstrap.js for " + aId, e); + } } // Notify the BrowserToolboxProcess that a new addon has been loaded. @@ -4338,8 +4344,8 @@ this.XPIProvider = { return; let method = undefined; + let scope = activeAddon.bootstrapScope; try { - let scope = activeAddon.bootstrapScope; method = scope[aMethod] || Cu.evalInSandbox(`${aMethod};`, scope); } catch (e) { // An exception will be caught if the expected method is not defined. @@ -4384,11 +4390,18 @@ this.XPIProvider = { } else { logger.debug("Calling bootstrap method " + aMethod + " on " + aAddon.id + " version " + aAddon.version); + + let result; try { - method(params, aReason); + result = method.call(scope, params, aReason); } catch (e) { logger.warn("Exception running bootstrap method " + aMethod + " on " + aAddon.id, e); } + + if (aMethod == "startup") { + activeAddon.startupPromise = Promise.resolve(result); + activeAddon.startupPromise.catch(Cu.reportError); + } } } finally { // Extensions are automatically initialized in the correct order at startup. @@ -5477,6 +5490,17 @@ AddonWrapper.prototype = { return addon.bootstrap && canRunInSafeMode(addon); }, + get startupPromise() { + let addon = addonFor(this); + if (!addon.bootstrap || !this.isActive) + return null; + + let activeAddon = XPIProvider.activeAddons.get(addon.id); + if (activeAddon) + return activeAddon.startupPromise || null; + return null; + }, + updateBlocklistState(applySoftBlock = true) { addonFor(this).updateBlocklistState({applySoftBlock}); }, diff --git a/toolkit/mozapps/extensions/internal/moz.build b/toolkit/mozapps/extensions/internal/moz.build index 09a37b9eb39445aef6636414ddc9457a01383ae0..8564f46ef63bfba2cdabac52b41f1a6e0add08e8 100644 --- a/toolkit/mozapps/extensions/internal/moz.build +++ b/toolkit/mozapps/extensions/internal/moz.build @@ -16,7 +16,6 @@ EXTRA_JS_MODULES.addons += [ 'LightweightThemeImageOptimizer.jsm', 'ProductAddonChecker.jsm', 'SpellCheckDictionaryBootstrap.js', - 'WebExtensionBootstrap.js', 'XPIInstall.jsm', 'XPIProvider.jsm', 'XPIProviderUtils.js', diff --git a/toolkit/mozapps/extensions/test/xpcshell/test_shutdown.js b/toolkit/mozapps/extensions/test/xpcshell/test_shutdown.js index 2941cafb399d35f7a54ddf364bfe4e517ac99b49..48b0b06c627dd44cc79bec110753c129da2c1e23 100644 --- a/toolkit/mozapps/extensions/test/xpcshell/test_shutdown.js +++ b/toolkit/mozapps/extensions/test/xpcshell/test_shutdown.js @@ -24,7 +24,7 @@ const IGNORE_PRIVATE = ["AddonAuthor", "AddonCompatibilityOverride", "setTelemetryDetails", "getTelemetryDetails", "callNoUpdateListeners", "backgroundUpdateTimerHandler", "hasUpgradeListener", "getUpgradeListener", - "isDBLoaded"]; + "isDBLoaded", "BOOTSTRAP_REASONS"]; async function test_functions() { for (let prop in AddonManager) { @@ -64,10 +64,10 @@ async function test_functions() { } for (let prop in AddonManagerPrivate) { - if (typeof AddonManagerPrivate[prop] != "function") - continue; if (IGNORE_PRIVATE.indexOf(prop) != -1) continue; + if (typeof AddonManagerPrivate[prop] != "function") + continue; try { do_print("AddonManagerPrivate." + prop); diff --git a/toolkit/mozapps/installer/windows/nsis/common.nsh b/toolkit/mozapps/installer/windows/nsis/common.nsh index 032a196a4f07150dcf5bfe83e1ec814f984ace8f..ff37411794c10dc35be41ee62e7e388d33e7c0dd 100755 --- a/toolkit/mozapps/installer/windows/nsis/common.nsh +++ b/toolkit/mozapps/installer/windows/nsis/common.nsh @@ -7609,6 +7609,46 @@ Exch $0 ; pixels from the beginning of the dialog to the end of the control !macroend +/** + * Gets the number of dialog units from the top of a dialog to the bottom of a + * control + * + * _DIALOG the handle of the dialog + * _CONTROL the handle of the control + * _RES_DU return value - dialog units from the top of the dialog to the bottom + * of the control + */ +!macro GetDlgItemBottomDUCall _DIALOG _CONTROL _RES_DU + Push "${_DIALOG}" + Push "${_CONTROL}" + ${CallArtificialFunction} GetDlgItemBottomDU_ + Pop ${_RES_DU} +!macroend + +!define GetDlgItemBottomDU "!insertmacro GetDlgItemBottomDUCall" +!define un.GetDlgItemBottomDU "!insertmacro GetDlgItemBottomDUCall" + +!macro GetDlgItemBottomDU_ + Exch $0 ; handle of the control + Exch $1 ; handle of the dialog + Push $2 + Push $3 + + ; #32770 is the dialog class + FindWindow $2 "#32770" "" $HWNDPARENT + System::Call '*(i, i, i, i) i .r3' + System::Call 'user32::GetWindowRect(i r0, i r3)' + System::Call 'user32::MapWindowPoints(i 0, i r2, i r3, i 2)' + System::Call 'user32::MapDialogRect(i r1, i r3)' + System::Call '*$3(i, i, i, i .r0)' + System::Free $3 + + Pop $3 + Pop $2 + Pop $1 + Exch $0 ; pixels from the top of the dialog to the bottom of the control +!macroend + /** * Gets the width and height for sizing a control that has the specified text. * If the text has embedded newlines then the width and height will be diff --git a/toolkit/mozapps/installer/windows/nsis/makensis.mk b/toolkit/mozapps/installer/windows/nsis/makensis.mk index f4b580d848afb6a8652cf2d0db630f907a798910..a62c55b3f9293f7d43f1094ac3d17b822dd6f0c1 100755 --- a/toolkit/mozapps/installer/windows/nsis/makensis.mk +++ b/toolkit/mozapps/installer/windows/nsis/makensis.mk @@ -31,6 +31,7 @@ CUSTOM_NSIS_PLUGINS = \ InetBgDL.dll \ InvokeShellVerb.dll \ liteFirewallW.dll \ + nsJSON.dll \ ServicesHelper.dll \ ShellLink.dll \ UAC.dll \ diff --git a/widget/android/nsWindow.cpp b/widget/android/nsWindow.cpp index 90ff9205c1f5e5c99339d2db5e6f7bed03c7823a..31662ecb192e91c1236c7f600deaf03cd6337570 100644 --- a/widget/android/nsWindow.cpp +++ b/widget/android/nsWindow.cpp @@ -365,7 +365,9 @@ public: : mWindow(aPtr, aWindow) , mNPZC(aNPZC) , mPreviousButtons(0) - {} + { + MOZ_ASSERT(mWindow); + } ~NPZCSupport() {} @@ -827,7 +829,9 @@ public: : mWindow(aPtr, aWindow) , mCompositor(aInstance) , mCompositorPaused(true) - {} + { + MOZ_ASSERT(mWindow); + } ~LayerViewSupport() {} @@ -859,6 +863,9 @@ private: void OnResumedCompositor() { MOZ_ASSERT(NS_IsMainThread()); + if (!mWindow) { + return; // Already shut down. + } // When we receive this, the compositor has already been told to // resume. (It turns out that waiting till we reach here to tell @@ -919,7 +926,9 @@ public: jni::Object::Param aSurface) { MOZ_ASSERT(NS_IsMainThread()); - MOZ_ASSERT(mWindow); + if (!mWindow) { + return; // Already shut down. + } mSurface = aSurface; mWindow->CreateLayerManager(aWidth, aHeight);