author | Andreea Pavel <apavel@mozilla.com> |
Tue, 14 May 2019 07:17:06 +0300 | |
changeset 473724 | f25162fe921d7923006ee4e0cd9467a1962d6f9e |
parent 473723 | 9c23e192d2f8ff9db68ddbf5a40dd3ec94bee054 (current diff) |
parent 473710 | e0a622476b7756daebac7ccb7a9bbeb5dfe3cac0 (diff) |
child 473725 | 092e0808f3f8f921164933e96b42100e4bf4192f |
push id | 113104 |
push user | aiakab@mozilla.com |
push date | Tue, 14 May 2019 09:49:04 +0000 |
treeherder | mozilla-inbound@3edc4e3973b2 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
milestone | 68.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/browser/components/extensions/ext-browser.json +++ b/browser/components/extensions/ext-browser.json @@ -30,17 +30,17 @@ "scopes": ["addon_parent"], "paths": [ ["captivePortal"] ] }, "chrome_settings_overrides": { "url": "chrome://browser/content/parent/ext-chrome-settings-overrides.js", "scopes": [], - "events": ["update", "uninstall"], + "events": ["update", "uninstall", "disable"], "schema": "chrome://browser/content/schemas/chrome_settings_overrides.json", "manifest": ["chrome_settings_overrides"] }, "commands": { "url": "chrome://browser/content/parent/ext-commands.js", "schema": "chrome://browser/content/schemas/commands.json", "scopes": ["addon_parent"], "events": ["uninstall"], @@ -170,16 +170,17 @@ "paths": [ ["sessions"] ] }, "sidebarAction": { "url": "chrome://browser/content/parent/ext-sidebarAction.js", "schema": "chrome://browser/content/schemas/sidebar_action.json", "scopes": ["addon_parent"], + "events": ["uninstall"], "manifest": ["sidebar_action"], "paths": [ ["sidebarAction"] ] }, "tabs": { "url": "chrome://browser/content/parent/ext-tabs.js", "schema": "chrome://browser/content/schemas/tabs.json", @@ -188,17 +189,17 @@ "paths": [ ["tabs"] ] }, "urlOverrides": { "url": "chrome://browser/content/parent/ext-url-overrides.js", "schema": "chrome://browser/content/schemas/url_overrides.json", "scopes": ["addon_parent"], - "events": ["uninstall"], + "events": ["update", "uninstall", "disable"], "manifest": ["chrome_url_overrides"], "paths": [ ["urlOverrides"] ] }, "windows": { "url": "chrome://browser/content/parent/ext-windows.js", "schema": "chrome://browser/content/schemas/windows.json",
--- a/browser/components/extensions/parent/ext-browserAction.js +++ b/browser/components/extensions/parent/ext-browserAction.js @@ -110,17 +110,17 @@ this.browserAction = class extends Exten handleLocationChange(eventType, tab, fromBrowse) { if (fromBrowse) { this.tabContext.clear(tab); this.updateOnChange(tab); } } - onShutdown(reason) { + onShutdown() { browserActionMap.delete(this.extension); this.tabContext.shutdown(); CustomizableUI.destroyWidget(this.id); this.clearPopup(); }
--- a/browser/components/extensions/parent/ext-chrome-settings-overrides.js +++ b/browser/components/extensions/parent/ext-chrome-settings-overrides.js @@ -170,16 +170,23 @@ this.chrome_settings_overrides = class e let haveSearchProvider = manifest && manifest.chrome_settings_overrides && manifest.chrome_settings_overrides.search_provider; if (!haveSearchProvider) { this.removeSearchSettings(id); } } + static onDisable(id) { + homepagePopup.clearConfirmation(id); + + chrome_settings_overrides.processDefaultSearchSetting("disable", id); + chrome_settings_overrides.removeEngine(id); + } + async onManifestEntry(entryName) { let {extension} = this; let {manifest} = extension; await ExtensionSettingsStore.initialize(); let homepageUrl = manifest.chrome_settings_overrides.homepage; @@ -188,18 +195,17 @@ this.chrome_settings_overrides = class e if (extension.startupReason == "ADDON_INSTALL" || extension.startupReason == "ADDON_ENABLE") { inControl = await ExtensionPreferencesManager.setSetting( extension.id, "homepage_override", homepageUrl); } else { let item = await ExtensionPreferencesManager.getSetting("homepage_override"); inControl = item && item.id == extension.id; } - // We need to add the listener here too since onPrefsChanged won't trigger on a - // restart (the prefs are already set). + if (inControl) { Services.prefs.setBoolPref(HOMEPAGE_PRIVATE_ALLOWED, extension.privateBrowsingAllowed); // Also set this now as an upgraded browser will need this. Services.prefs.setBoolPref(HOMEPAGE_EXTENSION_CONTROLLED, true); if (extension.startupReason == "APP_STARTUP") { handleInitialHomepagePopup(extension.id, homepageUrl); } else { homepagePopup.addObserver(extension.id); @@ -220,24 +226,16 @@ this.chrome_settings_overrides = class e extension.on("remove-permissions", async (ignoreEvent, permissions) => { if (permissions.permissions.includes("internal:privateBrowsingAllowed")) { let item = await ExtensionPreferencesManager.getSetting("homepage_override"); if (item && item.id == extension.id) { Services.prefs.setBoolPref(HOMEPAGE_PRIVATE_ALLOWED, false); } } }); - - extension.callOnClose({ - close: () => { - if (extension.shutdownReason == "ADDON_DISABLE") { - homepagePopup.clearConfirmation(extension.id); - } - }, - }); } if (manifest.chrome_settings_overrides.search_provider) { // Registering a search engine can potentially take a long while, // or not complete at all (when searchInitialized is never resolved), // so we are deliberately not awaiting the returned promise here. let searchStartupPromise = this.processSearchProviderManifestEntry().finally(() => { if (pendingSearchSetupTasks.get(extension.id) === searchStartupPromise) { @@ -256,24 +254,16 @@ this.chrome_settings_overrides = class e let searchProvider = manifest.chrome_settings_overrides.search_provider; if (searchProvider.is_default) { await searchInitialized; if (!this.extension) { Cu.reportError(`Extension shut down before search provider was registered`); return; } } - extension.callOnClose({ - close: () => { - if (extension.shutdownReason == "ADDON_DISABLE") { - chrome_settings_overrides.processDefaultSearchSetting("disable", extension.id); - chrome_settings_overrides.removeEngine(extension.id); - } - }, - }); let engineName = searchProvider.name.trim(); if (searchProvider.is_default) { let engine = Services.search.getEngineByName(engineName); let defaultEngines = await Services.search.getDefaultEngines(); if (engine && defaultEngines.some(defaultEngine => defaultEngine.name == engineName)) { // Needs to be called every time to handle reenabling, but // only sets default for install or enable.
--- a/browser/components/extensions/parent/ext-commands.js +++ b/browser/components/extensions/parent/ext-commands.js @@ -15,17 +15,17 @@ this.commands = class extends ExtensionA extension: this.extension, onCommand: (name) => this.emit("command", name), }); this.extension.shortcuts = shortcuts; await shortcuts.loadCommands(); await shortcuts.register(); } - onShutdown(reason) { + onShutdown() { this.extension.shortcuts.unregister(); } getAPI(context) { return { commands: { getAll: () => this.extension.shortcuts.allCommands(), update: (args) => this.extension.shortcuts.updateCommand(args),
--- a/browser/components/extensions/parent/ext-devtools.js +++ b/browser/components/extensions/parent/ext-devtools.js @@ -351,17 +351,17 @@ this.devtools = class extends ExtensionA if (!this.isDevToolsPageDisabled()) { this.pageDefinition.build(); } DevToolsShim.on("toolbox-created", this.onToolboxCreated); DevToolsShim.on("toolbox-destroy", this.onToolboxDestroy); } - onShutdown(reason) { + onShutdown() { DevToolsShim.off("toolbox-created", this.onToolboxCreated); DevToolsShim.off("toolbox-destroy", this.onToolboxDestroy); // Shutdown the extension devtools_page from all existing toolboxes. this.pageDefinition.shutdown(); this.pageDefinition = null; // Iterate over the existing toolboxes and unlist the devtools webextension from them.
--- a/browser/components/extensions/parent/ext-menus.js +++ b/browser/components/extensions/parent/ext-menus.js @@ -1095,17 +1095,17 @@ this.menusInternal = class extends Exten super(extension); if (!gMenuMap.size) { menuTracker.register(); } gMenuMap.set(extension, new Map()); } - onShutdown(reason) { + onShutdown() { let {extension} = this; if (gMenuMap.has(extension)) { gMenuMap.delete(extension); gRootItems.delete(extension); gShownMenuItems.delete(extension); gOnShownSubscribers.delete(extension); if (!gMenuMap.size) {
--- a/browser/components/extensions/parent/ext-omnibox.js +++ b/browser/components/extensions/parent/ext-omnibox.js @@ -15,17 +15,17 @@ this.omnibox = class extends ExtensionAP // This will throw if the keyword is already registered. ExtensionSearchHandler.registerKeyword(keyword, extension); this.keyword = keyword; } catch (e) { extension.manifestError(e.message); } } - onShutdown(reason) { + onShutdown() { ExtensionSearchHandler.unregisterKeyword(this.keyword); } getAPI(context) { let {extension} = context; return { omnibox: { setDefaultSuggestion: (suggestion) => {
--- a/browser/components/extensions/parent/ext-pageAction.js +++ b/browser/components/extensions/parent/ext-pageAction.js @@ -113,26 +113,26 @@ this.pageAction = class extends Extensio if (this.isShown(tab)) { this.updateButton(window); } } } } } - onShutdown(reason) { + onShutdown(isAppShutdown) { pageActionMap.delete(this.extension); this.tabContext.shutdown(); // Removing the browser page action causes PageActions to forget about it // across app restarts, so don't remove it on app shutdown, but do remove // it on all other shutdowns since there's no guarantee the action will be // coming back. - if (reason != "APP_SHUTDOWN" && this.browserPageAction) { + if (!isAppShutdown && this.browserPageAction) { this.browserPageAction.remove(); this.browserPageAction = null; } } // Returns the value of the property |prop| for the given tab, where // |prop| is one of "show", "title", "icon", "popup". getProperty(tab, prop) {
--- a/browser/components/extensions/parent/ext-sidebarAction.js +++ b/browser/components/extensions/parent/ext-sidebarAction.js @@ -83,52 +83,58 @@ this.sidebarAction = class extends Exten sidebarActionMap.set(extension, this); } onReady() { this.build(); } - onShutdown(reason) { + onShutdown(isAppShutdown) { sidebarActionMap.delete(this.this); this.tabContext.shutdown(); // Don't remove everything on app shutdown so session restore can handle // restoring open sidebars. - if (reason === "APP_SHUTDOWN") { + if (isAppShutdown) { return; } for (let window of windowTracker.browserWindows()) { let {document, SidebarUI} = window; if (SidebarUI.currentID === this.id) { SidebarUI.hide(); } - if (SidebarUI.lastOpenedId === this.id && - reason === "ADDON_UNINSTALL") { - SidebarUI.lastOpenedId = null; - } let menu = document.getElementById(this.menuId); if (menu) { menu.remove(); } let button = document.getElementById(this.buttonId); if (button) { button.remove(); } let header = document.getElementById("sidebar-switcher-target"); header.removeEventListener("SidebarShown", this.updateHeader); SidebarUI.sidebars.delete(this.id); } windowTracker.removeOpenListener(this.windowOpenListener); windowTracker.removeCloseListener(this.windowCloseListener); } + static onUninstall(id) { + const sidebarId = `${makeWidgetId(id)}-sidebar-action`; + for (let window of windowTracker.browserWindows()) { + let {SidebarUI} = window; + if (SidebarUI.lastOpenedId === sidebarId) { + SidebarUI.lastOpenedId = null; + } + } + } + build() { this.tabContext.on("tab-select", // eslint-disable-line mozilla/balanced-listeners (evt, tab) => { this.updateWindow(tab.ownerGlobal); }); let install = this.extension.startupReason === "ADDON_INSTALL"; for (let window of windowTracker.browserWindows()) { this.updateWindow(window); let {SidebarUI} = window;
--- a/browser/components/extensions/parent/ext-url-overrides.js +++ b/browser/components/extensions/parent/ext-url-overrides.js @@ -88,67 +88,57 @@ ExtensionParent.apiManager.on("extension extensionId = (item && item.id) || setting.id; url = item && (item.value || item.initialValue); } } setNewTabURL(extensionId, url); }); this.urlOverrides = class extends ExtensionAPI { - static onUninstall(id) { + static async onDisable(id) { + newTabPopup.clearConfirmation(id); + await ExtensionSettingsStore.initialize(); + if (ExtensionSettingsStore.hasSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME)) { + ExtensionSettingsStore.disable(id, STORE_TYPE, NEW_TAB_SETTING_NAME); + } + } + + static async onUninstall(id) { // TODO: This can be removed once bug 1438364 is fixed and all data is cleaned up. newTabPopup.clearConfirmation(id); + + await ExtensionSettingsStore.initialize(); + if (ExtensionSettingsStore.hasSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME)) { + ExtensionSettingsStore.removeSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME); + } } - processNewTabSetting(action) { - let {extension} = this; - ExtensionSettingsStore[action](extension.id, STORE_TYPE, NEW_TAB_SETTING_NAME); + static async onUpdate(id, manifest) { + if (!manifest.chrome_url_overrides || + !manifest.chrome_url_overrides.newtab) { + await ExtensionSettingsStore.initialize(); + if (ExtensionSettingsStore.hasSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME)) { + ExtensionSettingsStore.removeSetting(id, STORE_TYPE, NEW_TAB_SETTING_NAME); + } + } } async onManifestEntry(entryName) { let {extension} = this; let {manifest} = extension; await ExtensionSettingsStore.initialize(); if (manifest.chrome_url_overrides.newtab) { - // Set up the shutdown code for the setting. - extension.callOnClose({ - close: () => { - switch (extension.shutdownReason) { - case "ADDON_DISABLE": - this.processNewTabSetting("disable"); - newTabPopup.clearConfirmation(extension.id); - break; - - // We can remove the setting on upgrade or downgrade because it will be - // added back in when the manifest is re-read. This will cover the case - // where a new version of an add-on removes the manifest key. - case "ADDON_DOWNGRADE": - case "ADDON_UPGRADE": - case "ADDON_UNINSTALL": - this.processNewTabSetting("removeSetting"); - break; - } - }, - }); - let url = extension.baseURI.resolve(manifest.chrome_url_overrides.newtab); let item = await ExtensionSettingsStore.addSetting( extension.id, STORE_TYPE, NEW_TAB_SETTING_NAME, url, () => aboutNewTabService.newTabURL); - // If the extension was just re-enabled, change the setting to enabled. - // This is required because addSetting above is used for both add and update. - if (["ADDON_ENABLE", "ADDON_UPGRADE", "ADDON_DOWNGRADE"] - .includes(extension.startupReason)) { - item = ExtensionSettingsStore.enable(extension.id, STORE_TYPE, NEW_TAB_SETTING_NAME); - } - // Set the newTabURL to the current value of the setting. if (item) { setNewTabURL(item.id, item.value || item.initialValue); } // We need to monitor permission change and update the preferences. // eslint-disable-next-line mozilla/balanced-listeners extension.on("add-permissions", async (ignoreEvent, permissions) => {
--- a/browser/extensions/formautofill/api.js +++ b/browser/extensions/formautofill/api.js @@ -125,18 +125,18 @@ this.formautofill = class extends Extens // Listen for the autocomplete popup message to lazily append our stylesheet related to the popup. Services.mm.addMessageListener("FormAutoComplete:MaybeOpenPopup", onMaybeOpenPopup); formAutofillParent.init().catch(Cu.reportError); Services.mm.loadFrameScript("chrome://formautofill/content/FormAutofillFrameScript.js", true, true); } - onShutdown(reason) { - if (reason == "APP_SHUTDOWN") { + onShutdown(isAppShutdown) { + if (isAppShutdown) { return; } resProto.setSubstitution(RESOURCE_HOST, null); this.chromeHandle.destruct(); this.chromeHandle = null;
--- a/browser/extensions/report-site-issue/experimentalAPIs/l10n.js +++ b/browser/extensions/report-site-issue/experimentalAPIs/l10n.js @@ -11,18 +11,18 @@ var {Services} = ChromeUtils.import("res XPCOMUtils.defineLazyGetter(this, "l10nStrings", function() { return Services.strings.createBundle( "chrome://webcompat-reporter/locale/webcompat.properties"); }); let l10nManifest; this.l10n = class extends ExtensionAPI { - onShutdown(reason) { - if (reason !== "APP_SHUTDOWN" && l10nManifest) { + onShutdown(isAppShutdown) { + if (!isAppShutdown && l10nManifest) { Components.manager.removeBootstrappedManifestLocation(l10nManifest); } } getAPI(context) { // Until we move to Fluent (bug 1446164), we're stuck with // chrome.manifest for handling localization since its what the // build system can handle for localized repacks. if (context.extension.rootURI instanceof Ci.nsIJARURI) {
--- a/devtools/server/actors/object.js +++ b/devtools/server/actors/object.js @@ -133,16 +133,20 @@ const proto = { // FF40+: Allow to know how many properties an object has to lazily display them // when there is a bunch. if (isTypedArray(g)) { // Bug 1348761: getOwnPropertyNames is unnecessary slow on TypedArrays g.ownPropertyLength = getArrayLength(this.obj); } else if (isStorage(g)) { g.ownPropertyLength = getStorageLength(this.obj); + } else if (isReplaying) { + // When replaying we can get the number of properties directly, to avoid + // needing to enumerate all of them. + g.ownPropertyLength = this.obj.getOwnPropertyNamesCount(); } else { try { g.ownPropertyLength = this.obj.getOwnPropertyNames().length; } catch (err) { // The above can throw when the debuggee does not subsume the object's // compartment, or for some WrappedNatives like Cu.Sandbox. } } @@ -330,16 +334,23 @@ const proto = { let obj = this.obj; let level = 0, i = 0; // Do not search safe getters in unsafe objects. if (!DevToolsUtils.isSafeDebuggerObject(obj)) { return safeGetterValues; } + // Do not search for safe getters while replaying. While this would be nice + // to support, it involves a lot of back-and-forth between processes and + // would be better to do entirely in the replaying process. + if (isReplaying) { + return safeGetterValues; + } + // Most objects don't have any safe getters but inherit some from their // prototype. Avoid calling getOwnPropertyNames on objects that may have // many properties like Array, strings or js objects. That to avoid // freezing firefox when doing so. if (isArray(this.obj) || ["Object", "String"].includes(this.obj.class)) { obj = obj.proto; level++; }
--- a/devtools/server/actors/object/previewers.js +++ b/devtools/server/actors/object/previewers.js @@ -372,16 +372,20 @@ function GenericObject(objectActor, grip if (ObjectUtils.isStorage(obj)) { // local and session storage cannot be iterated over using // Object.getOwnPropertyNames() because it skips keys that are duplicated // on the prototype e.g. "key", "getKeys" so we need to gather the real // keys using the storage.key() function. for (let j = 0; j < rawObj.length; j++) { names.push(rawObj.key(j)); } + } else if (isReplaying) { + // When replaying we can access a batch of properties for use in generating + // the preview. This avoids needing to enumerate all properties. + names = obj.getEnumerableOwnPropertyNamesForPreview(); } else { names = obj.getOwnPropertyNames(); } symbols = obj.getOwnPropertySymbols(); } catch (ex) { // Calling getOwnPropertyNames() on some wrapped native prototypes is not // allowed: "cannot modify properties of a WrappedNative". See bug 952093. } @@ -776,16 +780,22 @@ previewers.Object = [ function PseudoArray({obj, hooks}, grip, rawObj) { // An object is considered a pseudo-array if all the following apply: // - All its properties are array indices except, optionally, a "length" property. // - At least it has the "0" array index. // - The array indices are consecutive. // - The value of "length", if present, is the number of array indices. + // Don't generate pseudo array previews when replaying. We don't want to + // have to enumerate all the properties in order to determine this. + if (isReplaying) { + return false; + } + let keys; try { keys = obj.getOwnPropertyNames(); } catch (err) { // The above can throw when the debuggee does not subsume the object's // compartment, or for some WrappedNatives like Cu.Sandbox. return false; }
--- a/devtools/server/actors/replay/debugger.js +++ b/devtools/server/actors/replay/debugger.js @@ -189,31 +189,33 @@ ReplayDebugger.prototype = { this._ensurePaused(); this._setResume(() => { this._direction = forward ? Direction.FORWARD : Direction.BACKWARD; dumpv("Resuming " + this._direction); this._control.resume(forward); if (this._paused) { // If we resume and immediately pause, we are at an endpoint of the // recording. Force the thread to pause. + this._capturePauseData(); this.replayingOnForcedPause(this.getNewestFrame()); } }); }, replayTimeWarp(target) { this._ensurePaused(); this._setResume(() => { this._direction = Direction.NONE; dumpv("Warping " + JSON.stringify(target)); this._control.timeWarp(target); // timeWarp() doesn't return until the child has reached the target of // the warp, after which we force the thread to pause. assert(this._paused); + this._capturePauseData(); this.replayingOnForcedPause(this.getNewestFrame()); }); }, replayPause() { this._ensurePaused(); // Cancel any pending resume. @@ -347,16 +349,58 @@ ReplayDebugger.prototype = { _invalidateAfterUnpause() { this._frames.forEach(frame => frame._invalidate()); this._frames.length = 0; this._objects.forEach(obj => obj._invalidate()); this._objects.length = 0; }, + // Fill in the debugger with (hopefully) all data the client/server need to + // pause at the current location. + _capturePauseData() { + if (this._frames.length) { + return; + } + + const pauseData = this._sendRequestAllowDiverge({ type: "pauseData" }); + if (!pauseData.frames) { + return; + } + + for (const data of Object.values(pauseData.scripts)) { + this._addScript(data); + } + + for (const { scriptId, offset, metadata} of pauseData.offsetMetadata) { + if (this._scripts[scriptId]) { + const script = this._getScript(scriptId); + script._addOffsetMetadata(offset, metadata); + } + } + + for (const { data, preview } of Object.values(pauseData.objects)) { + if (!this._objects[data.id]) { + this._addObject(data); + } + this._getObject(data.id)._preview = preview; + } + + for (const { data, names } of Object.values(pauseData.environments)) { + if (!this._objects[data.id]) { + this._addObject(data); + } + this._getObject(data.id)._names = names; + } + + for (const frame of pauseData.frames) { + this._frames[frame.index] = new ReplayDebuggerFrame(this, frame); + } + }, + ///////////////////////////////////////////////////////// // Search management ///////////////////////////////////////////////////////// _forEachSearch(callback) { for (const { position } of this._searches) { callback(position); } @@ -551,30 +595,34 @@ ReplayDebugger.prototype = { ///////////////////////////////////////////////////////// // Object methods ///////////////////////////////////////////////////////// _getObject(id) { if (id && !this._objects[id]) { const data = this._sendRequest({ type: "getObject", id }); - switch (data.kind) { - case "Object": - this._objects[id] = new ReplayDebuggerObject(this, data); - break; - case "Environment": - this._objects[id] = new ReplayDebuggerEnvironment(this, data); - break; - default: - ThrowError("Unknown object kind"); - } + this._addObject(data); } return this._objects[id]; }, + _addObject(data) { + switch (data.kind) { + case "Object": + this._objects[data.id] = new ReplayDebuggerObject(this, data); + break; + case "Environment": + this._objects[data.id] = new ReplayDebuggerEnvironment(this, data); + break; + default: + ThrowError("Unknown object kind"); + } + }, + // Convert a value we received from the child. _convertValue(value) { if (isNonNullObject(value)) { if (value.object) { return this._getObject(value.object); } if (value.snapshot) { return new ReplayDebuggerObjectSnapshot(this, value.snapshot); @@ -689,18 +737,20 @@ ReplayDebugger.prototype = { get replayingOnPopFrame() { return this._searchBreakpoints(({position, data}) => { return (position.kind == "OnPop" && !position.script) ? data : null; }); }, set replayingOnPopFrame(handler) { if (handler) { - this._setBreakpoint(() => { handler.call(this, this.getNewestFrame()); }, - { kind: "OnPop" }, handler); + this._setBreakpoint(() => { + this._capturePauseData(); + handler.call(this, this.getNewestFrame()); + }, { kind: "OnPop" }, handler); } else { this._clearMatchingBreakpoints(({position}) => { return position.kind == "OnPop" && !position.script; }); } }, getNewConsoleMessage() { @@ -722,16 +772,17 @@ ReplayDebugger.prototype = { /////////////////////////////////////////////////////////////////////////////// // ReplayDebuggerScript /////////////////////////////////////////////////////////////////////////////// function ReplayDebuggerScript(dbg, data) { this._dbg = dbg; this._data = data; + this._offsetMetadata = []; } ReplayDebuggerScript.prototype = { get displayName() { return this._data.displayName; }, get url() { return this._data.url; }, get startLine() { return this._data.startLine; }, get lineCount() { return this._data.lineCount; }, get source() { return this._dbg._getSource(this._data.sourceId); }, @@ -744,28 +795,39 @@ ReplayDebuggerScript.prototype = { return this._dbg._sendRequest({ type, id: this._data.id, value }); }, getLineOffsets(line) { return this._forward("getLineOffsets", line); }, getOffsetLocation(pc) { return this._forward("getOffsetLocation", pc); }, getSuccessorOffsets(pc) { return this._forward("getSuccessorOffsets", pc); }, getPredecessorOffsets(pc) { return this._forward("getPredecessorOffsets", pc); }, getAllColumnOffsets() { return this._forward("getAllColumnOffsets"); }, - getOffsetMetadata(pc) { return this._forward("getOffsetMetadata", pc); }, getPossibleBreakpoints(query) { return this._forward("getPossibleBreakpoints", query); }, getPossibleBreakpointOffsets(query) { return this._forward("getPossibleBreakpointOffsets", query); }, + getOffsetMetadata(pc) { + if (!this._offsetMetadata[pc]) { + this._addOffsetMetadata(pc, this._forward("getOffsetMetadata", pc)); + } + return this._offsetMetadata[pc]; + }, + + _addOffsetMetadata(pc, metadata) { + this._offsetMetadata[pc] = metadata; + }, + setBreakpoint(offset, handler) { - this._dbg._setBreakpoint(() => { handler.hit(this._dbg.getNewestFrame()); }, - { kind: "Break", script: this._data.id, offset }, - handler); + this._dbg._setBreakpoint(() => { + this._dbg._capturePauseData(); + handler.hit(this._dbg.getNewestFrame()); + }, { kind: "Break", script: this._data.id, offset }, handler); }, clearBreakpoint(handler) { this._dbg._clearMatchingBreakpoints(({position, data}) => { return position.script == this._data.id && handler == data; }); }, @@ -862,35 +924,38 @@ ReplayDebuggerFrame.prototype = { set onStep(handler) { // Use setReplayingOnStep or replayClearSteppingHooks instead. NotAllowed(); }, setReplayingOnStep(handler, offsets) { offsets.forEach(offset => { - this._dbg._setBreakpoint( - () => { handler.call(this._dbg.getNewestFrame()); }, - { kind: "OnStep", - script: this._data.script, - offset, - frameIndex: this._data.index }, - handler); + this._dbg._setBreakpoint(() => { + this._dbg._capturePauseData(); + handler.call(this._dbg.getNewestFrame()); + }, { + kind: "OnStep", + script: this._data.script, + offset, + frameIndex: this._data.index, + }, handler); }); }, get onPop() { return this._dbg._searchBreakpoints(({position, data}) => { return this._positionMatches(position, "OnPop") ? data : null; }); }, set onPop(handler) { if (handler) { this._dbg._setBreakpoint(() => { + this._dbg._capturePauseData(); const result = this._dbg._sendRequest({ type: "popFrameResult" }); handler.call(this._dbg.getNewestFrame(), this._dbg._convertCompletionValue(result)); }, { kind: "OnPop", script: this._data.script, frameIndex: this._data.index }, handler); } else { // Use replayClearSteppingHooks instead. @@ -912,25 +977,25 @@ ReplayDebuggerFrame.prototype = { /////////////////////////////////////////////////////////////////////////////// // ReplayDebuggerObject /////////////////////////////////////////////////////////////////////////////// function ReplayDebuggerObject(dbg, data) { this._dbg = dbg; this._data = data; + this._preview = null; this._properties = null; - this._proxyData = null; } ReplayDebuggerObject.prototype = { _invalidate() { this._data = null; + this._preview = null; this._properties = null; - this._proxyData = null; }, get callable() { return this._data.callable; }, get isBoundFunction() { return this._data.isBoundFunction; }, get isArrowFunction() { return this._data.isArrowFunction; }, get isGeneratorFunction() { return this._data.isGeneratorFunction; }, get isAsyncFunction() { return this._data.isAsyncFunction; }, get class() { return this._data.class; }, @@ -951,80 +1016,92 @@ ReplayDebuggerObject.prototype = { return null; }, getOwnPropertyNames() { this._ensureProperties(); return Object.keys(this._properties); }, + getEnumerableOwnPropertyNamesForPreview() { + if (this._preview) { + return Object.keys(this._preview.enumerableOwnProperties); + } + return this.getOwnPropertyNames(); + }, + + getOwnPropertyNamesCount() { + if (this._preview) { + return this._preview.ownPropertyNamesCount; + } + return this.getOwnPropertyNames().length; + }, + getOwnPropertySymbols() { // Symbol properties are not handled yet. return []; }, getOwnPropertyDescriptor(name) { + if (this._preview) { + if (this._preview.enumerableOwnProperties) { + const desc = this._preview.enumerableOwnProperties[name]; + if (desc) { + return this._convertPropertyDescriptor(desc); + } + } + if (name == "length") { + return this._convertPropertyDescriptor(this._preview.lengthProperty); + } + if (name == "displayName") { + return this._convertPropertyDescriptor(this._preview.displayNameProperty); + } + } this._ensureProperties(); - const desc = this._properties[name]; - return desc ? this._convertPropertyDescriptor(desc) : undefined; + return this._convertPropertyDescriptor(this._properties[name]); }, _ensureProperties() { if (!this._properties) { const id = this._data.id; - const properties = + this._properties = this._dbg._sendRequestAllowDiverge({ type: "getObjectProperties", id }); - this._properties = Object.create(null); - properties.forEach(({name, desc}) => { this._properties[name] = desc; }); } }, _convertPropertyDescriptor(desc) { + if (!desc) { + return undefined; + } const rv = Object.assign({}, desc); if ("value" in desc) { rv.value = this._dbg._convertValue(desc.value); } if ("get" in desc) { rv.get = this._dbg._getObject(desc.get); } if ("set" in desc) { rv.set = this._dbg._getObject(desc.set); } return rv; }, - _ensureProxyData() { - if (!this._proxyData) { - const data = this._dbg._sendRequestAllowDiverge({ - type: "objectProxyData", - id: this._data.id, - }); - if (data.exception) { - throw new Error(data.exception); - } - this._proxyData = data; - } - }, - unwrap() { if (!this.isProxy) { return this; } - this._ensureProxyData(); - return this._dbg._convertValue(this._proxyData.unwrapped); + return this._dbg._convertValue(this._data.proxyUnwrapped); }, get proxyTarget() { - this._ensureProxyData(); - return this._dbg._convertValue(this._proxyData.target); + return this._dbg._convertValue(this._data.proxyTarget); }, get proxyHandler() { - this._ensureProxyData(); - return this._dbg._convertValue(this._proxyData.handler); + return this._dbg._convertValue(this._data.proxyHandler); }, get boundTargetFunction() { if (this.isBoundFunction) { return this._dbg._getObject(this._data.boundTargetFunction); } return undefined; },
--- a/devtools/server/actors/replay/replay.js +++ b/devtools/server/actors/replay/replay.js @@ -208,17 +208,17 @@ dbg.onNewScript = function(script) { /////////////////////////////////////////////////////////////////////////////// // Snapshots are generated for objects that might be inspected at times when we // are not paused at the point where the snapshot was originally taken. The // snapshot data is provided to the server, which can use it to provide limited // answers to the client about the object's contents, without having to consult // a child process. -function snapshotObjectProperty({ name, desc }) { +function snapshotObjectProperty([ name, desc ]) { // Only capture primitive properties in object snapshots. if ("value" in desc && !convertedValueIsObject(desc.value)) { return { name, desc }; } return { name, desc: { value: "<unavailable>" } }; } function makeObjectSnapshot(object) { @@ -238,17 +238,17 @@ function makeObjectSnapshot(object) { class: object.class, name: object.name, displayName: object.displayName, parameterNames: object.parameterNames, isProxy: object.isProxy, isExtensible: object.isExtensible(), isSealed: object.isSealed(), isFrozen: object.isFrozen(), - properties: getObjectProperties(object).map(snapshotObjectProperty), + properties: Object.entries(getObjectProperties(object)).map(snapshotObjectProperty), }; } /////////////////////////////////////////////////////////////////////////////// // Console Message State /////////////////////////////////////////////////////////////////////////////// const gConsoleMessages = []; @@ -619,62 +619,309 @@ function getSourceData(id) { sourceMapURL: source.sourceMapURL, }; } function forwardToScript(name) { return request => gScripts.getObject(request.id)[name](request.value); } +function getFrameData(index) { + const frame = scriptFrameForIndex(index); + + let _arguments = null; + if (frame.arguments) { + _arguments = []; + for (let i = 0; i < frame.arguments.length; i++) { + _arguments.push(convertValue(frame.arguments[i])); + } + } + + return { + index, + type: frame.type, + callee: getObjectId(frame.callee), + environment: getObjectId(frame.environment), + generator: frame.generator, + constructing: frame.constructing, + this: convertValue(frame.this), + script: gScripts.getId(frame.script), + offset: frame.offset, + arguments: _arguments, + }; +} + function unknownObjectProperties(why) { return [{ name: "Unknown properties", desc: { value: why, enumerable: true, }, }]; } +function getObjectData(id) { + const object = gPausedObjects.getObject(id); + if (object instanceof Debugger.Object) { + const rv = { + id, + kind: "Object", + callable: object.callable, + isBoundFunction: object.isBoundFunction, + isArrowFunction: object.isArrowFunction, + isGeneratorFunction: object.isGeneratorFunction, + isAsyncFunction: object.isAsyncFunction, + proto: getObjectId(object.proto), + class: object.class, + name: object.name, + displayName: object.displayName, + parameterNames: object.parameterNames, + script: gScripts.getId(object.script), + environment: getObjectId(object.environment), + isProxy: object.isProxy, + isExtensible: object.isExtensible(), + isSealed: object.isSealed(), + isFrozen: object.isFrozen(), + }; + if (rv.isBoundFunction) { + rv.boundTargetFunction = getObjectId(object.boundTargetFunction); + rv.boundThis = convertValue(object.boundThis); + rv.boundArguments = getObjectId(makeDebuggeeValue(object.boundArguments)); + } + if (rv.isProxy) { + rv.proxyUnwrapped = convertValue(object.unwrap()); + rv.proxyTarget = convertValue(object.proxyTarget); + rv.proxyHandler = convertValue(object.proxyHandler); + } + return rv; + } + if (object instanceof Debugger.Environment) { + return { + id, + kind: "Environment", + type: object.type, + parent: getObjectId(object.parent), + object: object.type == "declarative" ? 0 : getObjectId(object.object), + callee: getObjectId(object.callee), + optimizedOut: object.optimizedOut, + }; + } + throw new Error("Unknown object kind"); +} + function getObjectProperties(object) { let names; try { names = object.getOwnPropertyNames(); } catch (e) { return unknownObjectProperties(e.toString()); } - return names.map(name => { + const rv = Object.create(null); + names.forEach(name => { let desc; try { desc = object.getOwnPropertyDescriptor(name); } catch (e) { - return { name, desc: { value: "Unknown: " + e, enumerable: true } }; + desc = { name, desc: { value: "Unknown: " + e, enumerable: true } }; } if ("value" in desc) { desc.value = convertValue(desc.value); } if ("get" in desc) { desc.get = getObjectId(desc.get); } if ("set" in desc) { desc.set = getObjectId(desc.set); } - return { name, desc }; + rv[name] = desc; }); + return rv; +} + +function getEnvironmentNames(env) { + try { + const names = env.names(); + + return names.map(name => { + return { name, value: convertValue(env.getVariable(name)) }; + }); + } catch (e) { + return [{name: "Unknown names", + value: "Exception thrown in getEnvironmentNames" }]; + } } function getWindow() { // Hopefully there is exactly one window in this enumerator. for (const window of Services.ww.getWindowEnumerator()) { return window; } return null; } +// Maximum number of properties the server is interested in when previewing an +// object. +const OBJECT_PREVIEW_MAX_ITEMS = 10; + +// When the replaying process pauses, the server needs to inspect a lot of state +// around frames, objects, etc. in order to fill in all the information the +// client needs to update the UI for the pause location. Done naively, this +// inspection requires a lot of back and forth with the replaying process to +// get all this data. This is bad for performance, and especially so if the +// replaying process is on a different machine from the server. Instead, the +// debugger running in the server can request a pause data packet which includes +// everything the server will need. +// +// This should avoid overapproximation, so that we can quickly send pause data +// across a network connection, and especially should not underapproximate +// as the server will end up needing to make more requests before the client can +// finish pausing. +function getPauseData() { + const numFrames = countScriptFrames(); + if (!numFrames) { + return {}; + } + + const rv = { + frames: [], + scripts: {}, + offsetMetadata: [], + objects: {}, + environments: {}, + }; + + function addValue(value, includeProperties) { + if (value && typeof value == "object" && value.object) { + addObject(value.object, includeProperties); + } + } + + function addObject(id, includeProperties) { + if (!id) { + return; + } + + // If includeProperties is set then previewing the object requires knowledge + // of its enumerable properties. + const needObject = !rv.objects[id]; + const needProperties = + includeProperties && + (needObject || !rv.objects[id].preview.enumerableOwnProperties); + + if (!needObject && !needProperties) { + return; + } + + const object = gPausedObjects.getObject(id); + assert(object instanceof Debugger.Object); + + const properties = getObjectProperties(object); + const propertyEntries = Object.entries(properties); + + if (needObject) { + rv.objects[id] = { + data: getObjectData(id), + preview: { + ownPropertyNamesCount: propertyEntries.length, + }, + }; + + const preview = rv.objects[id].preview; + + // Add some properties (if present) which the server might ask for + // even when it isn't interested in the rest of the properties. + if (properties.length) { + preview.lengthProperty = properties.length; + } + if (properties.displayName) { + preview.displayNameProperty = properties.displayName; + } + } + + if (needProperties) { + const preview = rv.objects[id].preview; + + // The server is only interested in enumerable properties, and at most + // OBJECT_PREVIEW_MAX_ITEMS of them. Limiting the properties we send to + // only those the server needs avoids having to send the contents of huge + // objects like Windows, most of which will not be used. + const enumerableOwnProperties = Object.create(null); + let enumerablePropertyCount = 0; + for (const [ name, desc ] of propertyEntries) { + if (desc.enumerable) { + enumerableOwnProperties[name] = desc; + addPropertyDescriptor(desc, false); + if (++enumerablePropertyCount == OBJECT_PREVIEW_MAX_ITEMS) { + break; + } + } + } + preview.enumerableOwnProperties = enumerableOwnProperties; + } + } + + function addPropertyDescriptor(desc, includeProperties) { + if (desc.value) { + addValue(desc.value, includeProperties); + } + if (desc.get) { + addObject(desc.get, includeProperties); + } + if (desc.set) { + addObject(desc.set, includeProperties); + } + } + + function addEnvironment(id) { + if (!id || rv.environments[id]) { + return; + } + + const env = gPausedObjects.getObject(id); + assert(env instanceof Debugger.Environment); + + const data = getObjectData(id); + const names = getEnvironmentNames(env); + rv.environments[id] = { data, names }; + + addEnvironment(data.parent); + } + + // eslint-disable-next-line no-shadow + function addScript(id) { + if (!rv.scripts[id]) { + rv.scripts[id] = getScriptData(id); + } + } + + for (let i = 0; i < numFrames; i++) { + const frame = getFrameData(i); + const script = gScripts.getObject(frame.script); + rv.frames.push(frame); + rv.offsetMetadata.push({ + scriptId: frame.script, + offset: frame.offset, + metadata: script.getOffsetMetadata(frame.offset), + }); + addScript(frame.script); + addValue(frame.this, true); + if (frame.arguments) { + for (const arg of frame.arguments) { + addValue(arg, true); + } + } + addObject(frame.callee, false); + addEnvironment(frame.environment, true); + } + + return rv; +} + /////////////////////////////////////////////////////////////////////////////// // Handlers /////////////////////////////////////////////////////////////////////////////// const gRequestHandlers = { repaint() { if (!RecordReplayControl.maybeDivergeFromRecording()) { @@ -728,80 +975,28 @@ const gRequestHandlers = { return sources; }, getSource(request) { return getSourceData(request.id); }, getObject(request) { - const object = gPausedObjects.getObject(request.id); - if (object instanceof Debugger.Object) { - const rv = { - id: request.id, - kind: "Object", - callable: object.callable, - isBoundFunction: object.isBoundFunction, - isArrowFunction: object.isArrowFunction, - isGeneratorFunction: object.isGeneratorFunction, - isAsyncFunction: object.isAsyncFunction, - proto: getObjectId(object.proto), - class: object.class, - name: object.name, - displayName: object.displayName, - parameterNames: object.parameterNames, - script: gScripts.getId(object.script), - environment: getObjectId(object.environment), - isProxy: object.isProxy, - isExtensible: object.isExtensible(), - isSealed: object.isSealed(), - isFrozen: object.isFrozen(), - }; - if (rv.isBoundFunction) { - rv.boundTargetFunction = getObjectId(object.boundTargetFunction); - rv.boundThis = convertValue(object.boundThis); - rv.boundArguments = getObjectId(makeDebuggeeValue(object.boundArguments)); - } - return rv; - } - if (object instanceof Debugger.Environment) { - return { - id: request.id, - kind: "Environment", - type: object.type, - parent: getObjectId(object.parent), - object: object.type == "declarative" ? 0 : getObjectId(object.object), - callee: getObjectId(object.callee), - optimizedOut: object.optimizedOut, - }; - } - throw new Error("Unknown object kind"); + return getObjectData(request.id); }, getObjectProperties(request) { if (!RecordReplayControl.maybeDivergeFromRecording()) { return unknownObjectProperties("Recording divergence in getObjectProperties"); } const object = gPausedObjects.getObject(request.id); return getObjectProperties(object); }, - objectProxyData(request) { - if (!RecordReplayControl.maybeDivergeFromRecording()) { - return { exception: "Recording divergence in unwrapObject" }; - } - const obj = gPausedObjects.getObject(request.id); - return { - unwrapped: convertValue(obj.unwrap()), - target: convertValue(obj.proxyTarget), - handler: convertValue(obj.proxyHandler), - }; - }, - objectApply(request) { if (!RecordReplayControl.maybeDivergeFromRecording()) { return { throw: "Recording divergence in objectApply" }; } const obj = gPausedObjects.getObject(request.id); const thisv = convertValueFromParent(request.thisv); const args = request.args.map(v => convertValueFromParent(v)); const rv = obj.apply(thisv, args); @@ -809,61 +1004,40 @@ const gRequestHandlers = { }, getEnvironmentNames(request) { if (!RecordReplayControl.maybeDivergeFromRecording()) { return [{name: "Unknown names", value: "Recording divergence in getEnvironmentNames" }]; } - try { - const env = gPausedObjects.getObject(request.id); - const names = env.names(); - - return names.map(name => { - return { name, value: convertValue(env.getVariable(name)) }; - }); - } catch (e) { - return [{name: "Unknown names", - value: "Exception thrown in getEnvironmentNames" }]; - } + const env = gPausedObjects.getObject(request.id); + return getEnvironmentNames(env); }, getFrame(request) { if (request.index == -1 /* NewestFrameIndex */) { const numFrames = countScriptFrames(); + if (!numFrames) { // Return an empty object when there are no frames. return {}; } request.index = numFrames - 1; } - const frame = scriptFrameForIndex(request.index); + return getFrameData(request.index); + }, - let _arguments = null; - if (frame.arguments) { - _arguments = []; - for (let i = 0; i < frame.arguments.length; i++) { - _arguments.push(convertValue(frame.arguments[i])); - } + pauseData(request) { + if (!RecordReplayControl.maybeDivergeFromRecording()) { + return { error: "Recording divergence in pauseData" }; } - return { - index: request.index, - type: frame.type, - callee: getObjectId(frame.callee), - environment: getObjectId(frame.environment), - generator: frame.generator, - constructing: frame.constructing, - this: convertValue(frame.this), - script: gScripts.getId(frame.script), - offset: frame.offset, - arguments: _arguments, - }; + return getPauseData(); }, getLineOffsets: forwardToScript("getLineOffsets"), getOffsetLocation: forwardToScript("getOffsetLocation"), getSuccessorOffsets: forwardToScript("getSuccessorOffsets"), getPredecessorOffsets: forwardToScript("getPredecessorOffsets"), getAllColumnOffsets: forwardToScript("getAllColumnOffsets"), getOffsetMetadata: forwardToScript("getOffsetMetadata"),
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/wasm/funcref.js @@ -0,0 +1,144 @@ +// |jit-test| skip-if: !wasmReftypesEnabled() + +const {Module,Instance,Global,RuntimeError} = WebAssembly; + +const badWasmFunc = /can only pass WebAssembly exported functions to funcref/; +const typeErr = /type mismatch/; + + +// Validation: + +wasmEvalText(`(module (func (local anyref funcref) (local.set 0 (local.get 1))))`); +wasmEvalText(`(module (func (local funcref funcref) (local.set 0 (local.get 1))))`); +wasmEvalText(`(module (func (local funcref) (local.set 0 (ref.null))))`); +wasmFailValidateText(`(module (func (local funcref anyref) (local.set 0 (local.get 1))))`, typeErr); +wasmEvalText(`(module (global (mut funcref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`); +wasmEvalText(`(module (global (mut anyref) (ref.null)) (func (param funcref) (global.set 0 (local.get 0))))`); +wasmFailValidateText(`(module (global (mut funcref) (ref.null)) (func (param anyref) (global.set 0 (local.get 0))))`, typeErr); +wasmEvalText(`(module (func (param funcref)) (func (param funcref) (call 0 (local.get 0))))`); +wasmEvalText(`(module (func (param anyref)) (func (param funcref) (call 0 (local.get 0))))`); +wasmFailValidateText(`(module (func (param funcref)) (func (param anyref) (call 0 (local.get 0))))`, typeErr); +wasmEvalText(`(module (func (param funcref) (result funcref) (block funcref (local.get 0) (br 0))))`); +wasmEvalText(`(module (func (param funcref) (result anyref) (block anyref (local.get 0) (br 0))))`); +wasmFailValidateText(`(module (func (param anyref) (result anyref) (block funcref (local.get 0) (br 0))))`, typeErr); +wasmEvalText(`(module (func (param funcref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`); +wasmEvalText(`(module (func (param anyref funcref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`); +wasmEvalText(`(module (func (param funcref anyref) (result anyref) (select (local.get 0) (local.get 1) (i32.const 0))))`); +wasmFailValidateText(`(module (func (param anyref funcref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr); +wasmFailValidateText(`(module (func (param funcref anyref) (result funcref) (select (local.get 0) (local.get 1) (i32.const 0))))`, typeErr); + + +// Runtime: + +var m = new Module(wasmTextToBinary(`(module (func (export "wasmFun")))`)); +const wasmFun1 = new Instance(m).exports.wasmFun; +const wasmFun2 = new Instance(m).exports.wasmFun; +const wasmFun3 = new Instance(m).exports.wasmFun; + +var run = wasmEvalText(`(module + (global (mut funcref) (ref.null)) + (func (param $x funcref) (param $test i32) (result funcref) + local.get $x + global.get 0 + local.get $test + select + ) + (func (export "run") (param $a funcref) (param $b funcref) (param $c funcref) (param $test1 i32) (param $test2 i32) (result funcref) + local.get $a + global.set 0 + block funcref + local.get $b + local.get $test1 + br_if 0 + drop + local.get $c + end + local.get $test2 + call 0 + ) +)`).exports.run; +assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, false), wasmFun1); +assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, false), wasmFun1); +assertEq(run(wasmFun1, wasmFun2, wasmFun3, true, true), wasmFun2); +assertEq(run(wasmFun1, wasmFun2, wasmFun3, false, true), wasmFun3); + +var run = wasmEvalText(`(module + (type $t0 (func (param anyref) (result anyref))) + (type $t1 (func (param funcref) (result anyref))) + (type $t2 (func (param anyref) (result funcref))) + (type $t3 (func (param funcref funcref) (result funcref))) + (func $f0 (type $t0) ref.null) + (func $f1 (type $t1) ref.null) + (func $f2 (type $t2) ref.null) + (func $f3 (type $t3) ref.null) + (table funcref (elem $f0 $f1 $f2 $f3)) + (func (export "run") (param i32 i32) (result anyref) + block $b3 block $b2 block $b1 block $b0 + local.get 0 + br_table $b0 $b1 $b2 $b3 + end $b0 + ref.null + local.get 1 + call_indirect $t0 + return + end $b1 + ref.null + local.get 1 + call_indirect $t1 + return + end $b2 + ref.null + local.get 1 + call_indirect $t2 + return + end $b3 + ref.null + ref.null + local.get 1 + call_indirect $t3 + return + ) +)`).exports.run; + +for (var i = 0; i < 4; i++) { + for (var j = 0; j < 4; j++) { + if (i == j) + assertEq(run(i, j), null); + else + assertErrorMessage(() => run(i, j), RuntimeError, /indirect call signature mismatch/); + } +} + + +// JS API: + +const wasmFun = wasmEvalText(`(module (func (export "x")))`).exports.x; + +var run = wasmEvalText(`(module (func (export "run") (param funcref) (result funcref) (local.get 0)))`).exports.run; +assertEq(run(wasmFun), wasmFun); +assertEq(run(null), null); +assertErrorMessage(() => run(() => {}), TypeError, badWasmFunc); + +var importReturnValue; +var importFun = () => importReturnValue; +var run = wasmEvalText(`(module (func (import "" "i") (result funcref)) (func (export "run") (result funcref) (call 0)))`, {'':{i:importFun}}).exports.run; +importReturnValue = wasmFun; +assertEq(run(), wasmFun); +importReturnValue = null; +assertEq(run(), null); +importReturnValue = undefined; +assertErrorMessage(() => run(), TypeError, badWasmFunc); +importReturnValue = () => {}; +assertErrorMessage(() => run(), TypeError, badWasmFunc); + +var g = new Global({value:'funcref', mutable:true}, wasmFun); +assertEq(g.value, wasmFun); +g.value = null; +assertEq(g.value, null); +Math.sin(); +assertErrorMessage(() => g.value = () => {}, TypeError, badWasmFunc); +var g = new Global({value:'funcref', mutable:true}, null); +assertEq(g.value, null); +g.value = wasmFun; +assertEq(g.value, wasmFun); +assertErrorMessage(() => new Global({value:'funcref'}, () => {}), TypeError, badWasmFunc);
--- a/js/src/jit-test/tests/wasm/tables.js +++ b/js/src/jit-test/tests/wasm/tables.js @@ -1,15 +1,17 @@ const Module = WebAssembly.Module; const Instance = WebAssembly.Instance; const Table = WebAssembly.Table; const Memory = WebAssembly.Memory; const LinkError = WebAssembly.LinkError; const RuntimeError = WebAssembly.RuntimeError; +const badFuncRefError = /can only pass WebAssembly exported functions to funcref/; + var callee = i => `(func $f${i} (result i32) (i32.const ${i}))`; wasmFailValidateText(`(module (elem (i32.const 0) $f0) ${callee(0)})`, /elem segment requires a table section/); wasmFailValidateText(`(module (table 10 funcref) (elem (i32.const 0) 0))`, /table element out of range/); wasmFailValidateText(`(module (table 10 funcref) (func) (elem (i32.const 0) 0 1))`, /table element out of range/); wasmFailValidateText(`(module (table 10 funcref) (func) (elem (f32.const 0) 0) ${callee(0)})`, /type mismatch/); assertErrorMessage(() => wasmEvalText(`(module (table 10 funcref) (elem (i32.const 10) $f0) ${callee(0)})`), LinkError, /elem segment does not fit/); @@ -117,18 +119,18 @@ tbl.set(1, e2.g); tbl.set(2, e3.h); var e4 = wasmEvalText(`(module (import "a" "b" (table 3 funcref)) ${caller})`, {a:{b:tbl}}).exports; assertEq(e4.call(0), 42); assertErrorMessage(() => e4.call(1), RuntimeError, /indirect call signature mismatch/); assertEq(e4.call(2), 13); var asmjsFun = (function() { "use asm"; function f() {} return f })(); assertEq(isAsmJSFunction(asmjsFun), isAsmJSCompilationAvailable()); -assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, /can only assign WebAssembly exported functions/); -assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, /bad initializer to funcref table/); +assertErrorMessage(() => tbl.set(0, asmjsFun), TypeError, badFuncRefError); +assertErrorMessage(() => tbl.grow(1, asmjsFun), TypeError, badFuncRefError); var m = new Module(wasmTextToBinary(`(module (type $i2i (func (param i32) (result i32))) (import "a" "mem" (memory 1)) (import "a" "tbl" (table 10 funcref)) (import $imp "a" "imp" (result i32)) (func $call (param $i i32) (result i32) (i32.add
--- a/js/src/jit/AliasAnalysis.cpp +++ b/js/src/jit/AliasAnalysis.cpp @@ -174,17 +174,16 @@ static inline const MDefinition* GetObje case MDefinition::Opcode::WasmStore: case MDefinition::Opcode::WasmCompareExchangeHeap: case MDefinition::Opcode::WasmAtomicBinopHeap: case MDefinition::Opcode::WasmAtomicExchangeHeap: case MDefinition::Opcode::WasmLoadGlobalVar: case MDefinition::Opcode::WasmLoadGlobalCell: case MDefinition::Opcode::WasmStoreGlobalVar: case MDefinition::Opcode::WasmStoreGlobalCell: - case MDefinition::Opcode::WasmLoadRef: case MDefinition::Opcode::WasmStoreRef: case MDefinition::Opcode::ArrayJoin: case MDefinition::Opcode::ArraySlice: return nullptr; default: #ifdef DEBUG // Crash when the default aliasSet is overriden, but when not added in the // list above.
--- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -7416,17 +7416,18 @@ void CodeGenerator::emitWasmCallBase(LWa break; case wasm::CalleeDesc::Builtin: masm.call(desc, callee.builtin()); reloadRegs = false; switchRealm = false; break; case wasm::CalleeDesc::BuiltinInstanceMethod: masm.wasmCallBuiltinInstanceMethod(desc, mir->instanceArg(), - callee.builtin()); + callee.builtin(), + mir->builtinMethodFailureMode()); switchRealm = false; break; } // Note the assembler offset for the associated LSafePoint. markSafepointAt(masm.currentOffset(), lir); // Now that all the outbound in-memory args are on the stack, note the @@ -7525,20 +7526,16 @@ void CodeGenerator::visitWasmStoreSlot(L } } void CodeGenerator::visitWasmDerivedPointer(LWasmDerivedPointer* ins) { masm.movePtr(ToRegister(ins->base()), ToRegister(ins->output())); masm.addPtr(Imm32(int32_t(ins->offset())), ToRegister(ins->output())); } -void CodeGenerator::visitWasmLoadRef(LWasmLoadRef* lir) { - masm.loadPtr(Address(ToRegister(lir->ptr()), 0), ToRegister(lir->output())); -} - void CodeGenerator::visitWasmStoreRef(LWasmStoreRef* ins) { Register tls = ToRegister(ins->tls()); Register valueAddr = ToRegister(ins->valueAddr()); Register value = ToRegister(ins->value()); Register temp = ToRegister(ins->temp()); Label skipPreBarrier; wasm::EmitWasmPreBarrierGuard(masm, tls, temp, valueAddr, &skipPreBarrier); @@ -13913,16 +13910,17 @@ void CodeGenerator::emitIonToWasmCallBas case wasm::ValType::I32: case wasm::ValType::F32: case wasm::ValType::F64: argMir = ToMIRType(sig.args()[i]); break; case wasm::ValType::I64: case wasm::ValType::Ref: case wasm::ValType::AnyRef: + case wasm::ValType::FuncRef: // Don't forget to trace GC type arguments in TraceJitExitFrames // when they're enabled. MOZ_CRASH("unexpected argument type when calling from ion to wasm"); case wasm::ValType::NullRef: MOZ_CRASH("NullRef not expressible"); } ABIArg arg = abi.next(argMir); @@ -13971,16 +13969,17 @@ void CodeGenerator::emitIonToWasmCallBas MOZ_ASSERT(ToFloatRegister(lir->output()) == ReturnFloat32Reg); break; case wasm::ExprType::F64: MOZ_ASSERT(lir->mir()->type() == MIRType::Double); MOZ_ASSERT(ToFloatRegister(lir->output()) == ReturnDoubleReg); break; case wasm::ExprType::Ref: case wasm::ExprType::AnyRef: + case wasm::ExprType::FuncRef: case wasm::ExprType::I64: // Don't forget to trace GC type return value in TraceJitExitFrames // when they're enabled. MOZ_CRASH("unexpected return type when calling from ion to wasm"); case wasm::ExprType::NullRef: MOZ_CRASH("NullRef not expressible"); case wasm::ExprType::Limit: MOZ_CRASH("Limit"); @@ -14011,21 +14010,16 @@ void CodeGenerator::visitIonToWasmCall(L void CodeGenerator::visitIonToWasmCallV(LIonToWasmCallV* lir) { emitIonToWasmCallBase(lir); } void CodeGenerator::visitWasmNullConstant(LWasmNullConstant* lir) { masm.xorPtr(ToRegister(lir->output()), ToRegister(lir->output())); } -void CodeGenerator::visitIsNullPointer(LIsNullPointer* lir) { - masm.cmpPtrSet(Assembler::Equal, ToRegister(lir->value()), ImmWord(0), - ToRegister(lir->output())); -} - void CodeGenerator::visitWasmCompareAndSelect(LWasmCompareAndSelect* ins) { bool cmpIs32bit = ins->compareType() == MCompare::Compare_Int32 || ins->compareType() == MCompare::Compare_UInt32; bool selIs32bit = ins->mir()->type() == MIRType::Int32; if (cmpIs32bit && selIs32bit) { Register out = ToRegister(ins->output()); MOZ_ASSERT(ToRegister(ins->ifTrueExpr()) == out,
--- a/js/src/jit/Lowering.cpp +++ b/js/src/jit/Lowering.cpp @@ -4345,21 +4345,16 @@ void LIRGenerator::visitWasmStoreGlobalC } } void LIRGenerator::visitWasmDerivedPointer(MWasmDerivedPointer* ins) { LAllocation base = useRegisterAtStart(ins->base()); define(new (alloc()) LWasmDerivedPointer(base), ins); } -void LIRGenerator::visitWasmLoadRef(MWasmLoadRef* ins) { - define(new (alloc()) LWasmLoadRef(useRegisterAtStart(ins->getOperand(0))), - ins); -} - void LIRGenerator::visitWasmStoreRef(MWasmStoreRef* ins) { LAllocation tls = useRegister(ins->tls()); LAllocation valueAddr = useFixed(ins->valueAddr(), PreBarrierReg); LAllocation value = useRegister(ins->value()); add(new (alloc()) LWasmStoreRef(tls, valueAddr, value, temp()), ins); } void LIRGenerator::visitWasmParameter(MWasmParameter* ins) { @@ -4717,21 +4712,16 @@ void LIRGenerator::visitConstant(MConsta MOZ_CRASH("unexpected constant type"); } } void LIRGenerator::visitWasmNullConstant(MWasmNullConstant* ins) { define(new (alloc()) LWasmNullConstant(), ins); } -void LIRGenerator::visitIsNullPointer(MIsNullPointer* ins) { - define(new (alloc()) LIsNullPointer(useRegisterAtStart(ins->getOperand(0))), - ins); -} - void LIRGenerator::visitWasmFloatConstant(MWasmFloatConstant* ins) { switch (ins->type()) { case MIRType::Double: define(new (alloc()) LDouble(ins->toDouble()), ins); break; case MIRType::Float32: define(new (alloc()) LFloat32(ins->toFloat32()), ins); break;
--- a/js/src/jit/MCallOptimize.cpp +++ b/js/src/jit/MCallOptimize.cpp @@ -4267,16 +4267,17 @@ IonBuilder::InliningResult IonBuilder::i case wasm::ValType::F32: conversion = MToFloat32::New(alloc(), arg); break; case wasm::ValType::F64: conversion = MToDouble::New(alloc(), arg); break; case wasm::ValType::I64: case wasm::ValType::AnyRef: + case wasm::ValType::FuncRef: case wasm::ValType::Ref: MOZ_CRASH("impossible per above check"); case wasm::ValType::NullRef: MOZ_CRASH("NullRef not expressible"); } current->add(conversion); call->initArg(i, conversion);
--- a/js/src/jit/MIR.cpp +++ b/js/src/jit/MIR.cpp @@ -5446,27 +5446,29 @@ MWasmCall* MWasmCall::New(TempAllocator& call->initOperand(call->argRegs_.length(), tableIndex); } return call; } MWasmCall* MWasmCall::NewBuiltinInstanceMethodCall( TempAllocator& alloc, const wasm::CallSiteDesc& desc, - const wasm::SymbolicAddress builtin, const ABIArg& instanceArg, - const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned) { + const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode, + const ABIArg& instanceArg, const Args& args, MIRType resultType, + uint32_t stackArgAreaSizeUnaligned) { auto callee = wasm::CalleeDesc::builtinInstanceMethod(builtin); MWasmCall* call = MWasmCall::New(alloc, desc, callee, args, resultType, stackArgAreaSizeUnaligned, nullptr); if (!call) { return nullptr; } MOZ_ASSERT(instanceArg != ABIArg()); call->instanceArg_ = instanceArg; + call->builtinMethodFailureMode_ = failureMode; return call; } void MSqrt::trySpecializeFloat32(TempAllocator& alloc) { if (!input()->canProduceFloat32() || !CheckUsesAreFloat32Consumers(this)) { if (input()->type() == MIRType::Float32) { ConvertDefinitionToDouble<0>(alloc, input(), this); }
--- a/js/src/jit/MIR.h +++ b/js/src/jit/MIR.h @@ -1565,39 +1565,16 @@ class MWasmNullConstant : public MNullar bool congruentTo(const MDefinition* ins) const override { return ins->isWasmNullConstant(); } AliasSet getAliasSet() const override { return AliasSet::None(); } ALLOW_CLONE(MWasmNullConstant) }; -class MIsNullPointer : public MUnaryInstruction, public NoTypePolicy::Data { - explicit MIsNullPointer(MDefinition* value) - : MUnaryInstruction(classOpcode, value) { - MOZ_ASSERT(value->type() == MIRType::Pointer); - setResultType(MIRType::Boolean); - setMovable(); - } - - public: - INSTRUCTION_HEADER(IsNullPointer); - - static MIsNullPointer* New(TempAllocator& alloc, MDefinition* value) { - return new (alloc) MIsNullPointer(value); - } - - bool congruentTo(const MDefinition* ins) const override { - return congruentIfOperandsEqual(ins); - } - AliasSet getAliasSet() const override { return AliasSet::None(); } - - ALLOW_CLONE(MIsNullPointer) -}; - // Floating-point value as created by wasm. Just a constant value, used to // effectively inhibite all the MIR optimizations. This uses the same LIR nodes // as a MConstant of the same type would. class MWasmFloatConstant : public MNullaryInstruction { union { float f32_; double f64_; uint64_t bits_; @@ -11793,41 +11770,16 @@ class MWasmDerivedPointer : public MUnar bool congruentTo(const MDefinition* ins) const override { return congruentIfOperandsEqual(ins); } ALLOW_CLONE(MWasmDerivedPointer) }; -class MWasmLoadRef : public MUnaryInstruction, public NoTypePolicy::Data { - AliasSet::Flag aliasSet_; - - explicit MWasmLoadRef(MDefinition* valueAddr, AliasSet::Flag aliasSet, - bool isMovable = true) - : MUnaryInstruction(classOpcode, valueAddr), aliasSet_(aliasSet) { - MOZ_ASSERT(valueAddr->type() == MIRType::Pointer); - setResultType(MIRType::RefOrNull); - if (isMovable) { - setMovable(); - } - } - - public: - INSTRUCTION_HEADER(WasmLoadRef) - TRIVIAL_NEW_WRAPPERS - - bool congruentTo(const MDefinition* ins) const override { - return congruentIfOperandsEqual(ins); - } - AliasSet getAliasSet() const override { return AliasSet::Load(aliasSet_); } - - ALLOW_CLONE(MWasmLoadRef) -}; - class MWasmStoreRef : public MAryInstruction<3>, public NoTypePolicy::Data { AliasSet::Flag aliasSet_; MWasmStoreRef(MDefinition* tls, MDefinition* valueAddr, MDefinition* value, AliasSet::Flag aliasSet) : MAryInstruction<3>(classOpcode), aliasSet_(aliasSet) { MOZ_ASSERT(valueAddr->type() == MIRType::Pointer); MOZ_ASSERT(value->type() == MIRType::RefOrNull); @@ -11892,25 +11844,27 @@ class MWasmStackArg : public MUnaryInstr uint32_t spOffset() const { return spOffset_; } void incrementOffset(uint32_t inc) { spOffset_ += inc; } }; class MWasmCall final : public MVariadicInstruction, public NoTypePolicy::Data { wasm::CallSiteDesc desc_; wasm::CalleeDesc callee_; + wasm::FailureMode builtinMethodFailureMode_; FixedList<AnyRegister> argRegs_; uint32_t stackArgAreaSizeUnaligned_; ABIArg instanceArg_; MWasmCall(const wasm::CallSiteDesc& desc, const wasm::CalleeDesc& callee, uint32_t stackArgAreaSizeUnaligned) : MVariadicInstruction(classOpcode), desc_(desc), callee_(callee), + builtinMethodFailureMode_(wasm::FailureMode::Infallible), stackArgAreaSizeUnaligned_(stackArgAreaSizeUnaligned) {} public: INSTRUCTION_HEADER(WasmCall) struct Arg { AnyRegister reg; MDefinition* def; @@ -11920,26 +11874,31 @@ class MWasmCall final : public MVariadic static MWasmCall* New(TempAllocator& alloc, const wasm::CallSiteDesc& desc, const wasm::CalleeDesc& callee, const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned, MDefinition* tableIndex = nullptr); static MWasmCall* NewBuiltinInstanceMethodCall( TempAllocator& alloc, const wasm::CallSiteDesc& desc, - const wasm::SymbolicAddress builtin, const ABIArg& instanceArg, - const Args& args, MIRType resultType, uint32_t stackArgAreaSizeUnaligned); + const wasm::SymbolicAddress builtin, wasm::FailureMode failureMode, + const ABIArg& instanceArg, const Args& args, MIRType resultType, + uint32_t stackArgAreaSizeUnaligned); size_t numArgs() const { return argRegs_.length(); } AnyRegister registerForArg(size_t index) const { MOZ_ASSERT(index < numArgs()); return argRegs_[index]; } const wasm::CallSiteDesc& desc() const { return desc_; } const wasm::CalleeDesc& callee() const { return callee_; } + wasm::FailureMode builtinMethodFailureMode() const { + MOZ_ASSERT(callee_.which() == wasm::CalleeDesc::BuiltinInstanceMethod); + return builtinMethodFailureMode_; + } uint32_t stackArgAreaSizeUnaligned() const { return stackArgAreaSizeUnaligned_; } bool possiblyCalls() const override { return true; } const ABIArg& instanceArg() const { return instanceArg_; } };
--- a/js/src/jit/MacroAssembler.cpp +++ b/js/src/jit/MacroAssembler.cpp @@ -3186,33 +3186,57 @@ CodeOffset MacroAssembler::wasmCallImpor WasmTlsReg); loadWasmPinnedRegsFromTls(); return call(desc, ABINonArgReg0); } CodeOffset MacroAssembler::wasmCallBuiltinInstanceMethod( const wasm::CallSiteDesc& desc, const ABIArg& instanceArg, - wasm::SymbolicAddress builtin) { + wasm::SymbolicAddress builtin, wasm::FailureMode failureMode) { MOZ_ASSERT(instanceArg != ABIArg()); if (instanceArg.kind() == ABIArg::GPR) { loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, instance)), instanceArg.gpr()); } else if (instanceArg.kind() == ABIArg::Stack) { // Safe to use ABINonArgReg0 since it's the last thing before the call. Register scratch = ABINonArgReg0; loadPtr(Address(WasmTlsReg, offsetof(wasm::TlsData, instance)), scratch); storePtr(scratch, Address(getStackPointer(), instanceArg.offsetFromArgBase())); } else { MOZ_CRASH("Unknown abi passing style for pointer"); } - return call(desc, builtin); + CodeOffset ret = call(desc, builtin); + + if (failureMode != wasm::FailureMode::Infallible) { + Label noTrap; + switch (failureMode) { + case wasm::FailureMode::Infallible: + MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE(); + case wasm::FailureMode::FailOnNegI32: + branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap); + break; + case wasm::FailureMode::FailOnNullPtr: + branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap); + break; + case wasm::FailureMode::FailOnInvalidRef: + branchPtr(Assembler::NotEqual, ReturnReg, + ImmWord(uintptr_t(wasm::AnyRef::invalid().forCompiledCode())), + &noTrap); + break; + } + wasmTrap(wasm::Trap::ThrowReported, + wasm::BytecodeOffset(desc.lineOrBytecode())); + bind(&noTrap); + } + + return ret; } CodeOffset MacroAssembler::wasmCallIndirect(const wasm::CallSiteDesc& desc, const wasm::CalleeDesc& callee, bool needsBoundsCheck) { Register scratch = WasmTableCallScratchReg0; Register index = WasmTableCallIndexReg;
--- a/js/src/jit/MacroAssembler.h +++ b/js/src/jit/MacroAssembler.h @@ -1922,17 +1922,18 @@ class MacroAssembler : public MacroAssem const wasm::CalleeDesc& callee, bool needsBoundsCheck); // This function takes care of loading the pointer to the current instance // as the implicit first argument. It preserves TLS and pinned registers. // (TLS & pinned regs are non-volatile registers in the system ABI). CodeOffset wasmCallBuiltinInstanceMethod(const wasm::CallSiteDesc& desc, const ABIArg& instanceArg, - wasm::SymbolicAddress builtin); + wasm::SymbolicAddress builtin, + wasm::FailureMode failureMode); // As enterFakeExitFrame(), but using register conventions appropriate for // wasm stubs. void enterFakeExitFrameForWasm(Register cxreg, Register scratch, ExitFrameType type) PER_SHARED_ARCH; public: // ========================================================================
--- a/js/src/jit/shared/LIR-shared.h +++ b/js/src/jit/shared/LIR-shared.h @@ -6677,27 +6677,16 @@ class LWasmDerivedPointer : public LInst explicit LWasmDerivedPointer(const LAllocation& base) : LInstructionHelper(classOpcode) { setOperand(0, base); } const LAllocation* base() { return getOperand(0); } size_t offset() { return mirRaw()->toWasmDerivedPointer()->offset(); } }; -class LWasmLoadRef : public LInstructionHelper<1, 1, 0> { - public: - LIR_HEADER(WasmLoadRef); - explicit LWasmLoadRef(const LAllocation& ptr) - : LInstructionHelper(classOpcode) { - setOperand(0, ptr); - } - MWasmLoadRef* mir() const { return mirRaw()->toWasmLoadRef(); } - const LAllocation* ptr() { return getOperand(0); } -}; - class LWasmStoreRef : public LInstructionHelper<0, 3, 1> { public: LIR_HEADER(WasmStoreRef); LWasmStoreRef(const LAllocation& tls, const LAllocation& valueAddr, const LAllocation& value, const LDefinition& temp) : LInstructionHelper(classOpcode) { setOperand(0, tls); setOperand(1, valueAddr); @@ -6777,27 +6766,16 @@ inline bool IsWasmCall(LNode::Opcode op) } class LWasmNullConstant : public LInstructionHelper<1, 0, 0> { public: LIR_HEADER(WasmNullConstant); explicit LWasmNullConstant() : LInstructionHelper(classOpcode) {} }; -class LIsNullPointer : public LInstructionHelper<1, 1, 0> { - public: - LIR_HEADER(IsNullPointer); - explicit LIsNullPointer(const LAllocation& value) - : LInstructionHelper(classOpcode) { - setOperand(0, value); - } - MIsNullPointer* mir() const { return mirRaw()->toIsNullPointer(); } - const LAllocation* value() { return getOperand(0); } -}; - template <size_t Defs> class LWasmCallBase : public LVariadicInstruction<Defs, 0> { using Base = LVariadicInstruction<Defs, 0>; bool needsBoundsCheck_; public: LWasmCallBase(LNode::Opcode opcode, uint32_t numOperands,
--- a/js/src/js.msg +++ b/js/src/js.msg @@ -396,28 +396,27 @@ MSG_DEF(JSMSG_WASM_INT_DIVIDE_BY_ZERO, 0 MSG_DEF(JSMSG_WASM_OUT_OF_BOUNDS, 0, JSEXN_WASMRUNTIMEERROR, "index out of bounds") MSG_DEF(JSMSG_WASM_UNALIGNED_ACCESS, 0, JSEXN_WASMRUNTIMEERROR, "unaligned memory access") MSG_DEF(JSMSG_WASM_WAKE_OVERFLOW, 0, JSEXN_WASMRUNTIMEERROR, "too many woken agents") MSG_DEF(JSMSG_WASM_DROPPED_DATA_SEG, 0, JSEXN_WASMRUNTIMEERROR, "use of dropped data segment") MSG_DEF(JSMSG_WASM_DROPPED_ELEM_SEG, 0, JSEXN_WASMRUNTIMEERROR, "use of dropped element segment") MSG_DEF(JSMSG_WASM_DEREF_NULL, 0, JSEXN_WASMRUNTIMEERROR, "dereferencing null pointer") MSG_DEF(JSMSG_WASM_BAD_RANGE , 2, JSEXN_RANGEERR, "bad {0} {1}") MSG_DEF(JSMSG_WASM_BAD_GROW, 1, JSEXN_RANGEERR, "failed to grow {0}") -MSG_DEF(JSMSG_WASM_BAD_TBL_GROW_INIT, 1, JSEXN_TYPEERR, "bad initializer to {0} table") MSG_DEF(JSMSG_WASM_TABLE_OUT_OF_BOUNDS, 0, JSEXN_RANGEERR, "table index out of bounds") MSG_DEF(JSMSG_WASM_BAD_UINT32, 2, JSEXN_TYPEERR, "bad {0} {1}") MSG_DEF(JSMSG_WASM_BAD_BUF_ARG, 0, JSEXN_TYPEERR, "first argument must be an ArrayBuffer or typed array object") MSG_DEF(JSMSG_WASM_BAD_MOD_ARG, 0, JSEXN_TYPEERR, "first argument must be a WebAssembly.Module") MSG_DEF(JSMSG_WASM_BAD_BUF_MOD_ARG, 0, JSEXN_TYPEERR, "first argument must be a WebAssembly.Module, ArrayBuffer or typed array object") MSG_DEF(JSMSG_WASM_BAD_DESC_ARG, 1, JSEXN_TYPEERR, "first argument must be a {0} descriptor") MSG_DEF(JSMSG_WASM_BAD_ELEMENT, 0, JSEXN_TYPEERR, "\"element\" property of table descriptor must be \"funcref\"") MSG_DEF(JSMSG_WASM_BAD_ELEMENT_GENERALIZED, 0, JSEXN_TYPEERR, "\"element\" property of table descriptor must be \"funcref\" or \"anyref\"") MSG_DEF(JSMSG_WASM_BAD_IMPORT_ARG, 0, JSEXN_TYPEERR, "second argument must be an object") MSG_DEF(JSMSG_WASM_BAD_IMPORT_FIELD, 1, JSEXN_TYPEERR, "import object field '{0}' is not an Object") -MSG_DEF(JSMSG_WASM_BAD_TABLE_VALUE, 0, JSEXN_TYPEERR, "can only assign WebAssembly exported functions to Table") +MSG_DEF(JSMSG_WASM_BAD_FUNCREF_VALUE, 0, JSEXN_TYPEERR, "can only pass WebAssembly exported functions to funcref") MSG_DEF(JSMSG_WASM_BAD_I64_TYPE, 0, JSEXN_TYPEERR, "cannot pass i64 to or from JS") MSG_DEF(JSMSG_WASM_BAD_GLOBAL_TYPE, 0, JSEXN_TYPEERR, "bad type for a WebAssembly.Global") MSG_DEF(JSMSG_WASM_NO_TRANSFER, 0, JSEXN_TYPEERR, "cannot transfer WebAssembly/asm.js ArrayBuffer") MSG_DEF(JSMSG_WASM_TEXT_FAIL, 1, JSEXN_SYNTAXERR, "wasm text error: {0}") MSG_DEF(JSMSG_WASM_MISSING_MAXIMUM, 0, JSEXN_TYPEERR, "'shared' is true but maximum is not specified") MSG_DEF(JSMSG_WASM_GLOBAL_IMMUTABLE, 0, JSEXN_TYPEERR, "can't set value of immutable global") // Proxy
--- a/js/src/wasm/AsmJS.cpp +++ b/js/src/wasm/AsmJS.cpp @@ -2014,17 +2014,17 @@ class MOZ_STACK_CLASS JS_HAZ_ROOTED Modu } MOZ_ASSERT(sigIndex >= env_.asmJSSigToTableIndex.length()); if (!env_.asmJSSigToTableIndex.resize(sigIndex + 1)) { return false; } env_.asmJSSigToTableIndex[sigIndex] = env_.tables.length(); - if (!env_.tables.emplaceBack(TableKind::TypedFunction, Limits(mask + 1))) { + if (!env_.tables.emplaceBack(TableKind::AsmJS, Limits(mask + 1))) { return false; } Global* global = validationLifo_.new_<Global>(Global::Table); if (!global) { return false; } @@ -6557,17 +6557,18 @@ static bool ValidateGlobalVariable(JSCon if (!ToNumber(cx, v, &d)) { return false; } val->emplace(d); return true; } case ValType::Ref: case ValType::NullRef: - case ValType::AnyRef: { + case ValType::AnyRef: + case ValType::FuncRef: { MOZ_CRASH("not available in asm.js"); } } } } MOZ_CRASH("unreachable"); }
--- a/js/src/wasm/WasmAST.h +++ b/js/src/wasm/WasmAST.h @@ -122,19 +122,21 @@ class AstValType { which_ = IsValType; type_ = ValType(ValType::Ref, ref.index()); } else { which_ = IsAstRef; ref_ = ref; } } - bool isRefType() const { +#ifdef ENABLE_WASM_GC + bool isNarrowType() const { return code() == ValType::AnyRef || code() == ValType::Ref; } +#endif bool isValid() const { return !(which_ == IsValType && !type_.isValid()); } bool isResolved() const { return which_ == IsValType; } AstRef& asRef() { return ref_; } void resolve() {
--- a/js/src/wasm/WasmBaselineCompile.cpp +++ b/js/src/wasm/WasmBaselineCompile.cpp @@ -1051,16 +1051,17 @@ void BaseLocalIter::settle() { MOZ_ASSERT(argsIter_.done()); if (index_ < locals_.length()) { switch (locals_[index_].code()) { case ValType::I32: case ValType::I64: case ValType::F32: case ValType::F64: case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: // TODO/AnyRef-boxing: With boxed immediates and strings, the // debugger must be made aware that AnyRef != Pointer. ASSERT_ANYREF_IS_JSOBJECT; mirType_ = ToMIRType(locals_[index_]); frameOffset_ = pushLocal(MIRTypeToSize(mirType_)); break; case ValType::NullRef: @@ -2778,16 +2779,17 @@ class BaseCompiler final : public BaseCo void maybeReserveJoinRegI(ExprType type) { switch (type.code()) { case ExprType::I32: needI32(joinRegI32_); break; case ExprType::I64: needI64(joinRegI64_); break; + case ExprType::FuncRef: case ExprType::AnyRef: case ExprType::NullRef: case ExprType::Ref: needRef(joinRegPtr_); break; default:; } } @@ -2795,16 +2797,17 @@ class BaseCompiler final : public BaseCo void maybeUnreserveJoinRegI(ExprType type) { switch (type.code()) { case ExprType::I32: freeI32(joinRegI32_); break; case ExprType::I64: freeI64(joinRegI64_); break; + case ExprType::FuncRef: case ExprType::AnyRef: case ExprType::NullRef: case ExprType::Ref: freeRef(joinRegPtr_); break; default:; } } @@ -2820,16 +2823,17 @@ class BaseCompiler final : public BaseCo case ExprType::F32: needF32(joinRegF32_); break; case ExprType::F64: needF64(joinRegF64_); break; case ExprType::Ref: case ExprType::NullRef: + case ExprType::FuncRef: case ExprType::AnyRef: needRef(joinRegPtr_); break; default: break; } } @@ -2844,16 +2848,17 @@ class BaseCompiler final : public BaseCo case ExprType::F32: freeF32(joinRegF32_); break; case ExprType::F64: freeF64(joinRegF64_); break; case ExprType::Ref: case ExprType::NullRef: + case ExprType::FuncRef: case ExprType::AnyRef: freeRef(joinRegPtr_); break; default: break; } } @@ -3773,16 +3778,17 @@ class BaseCompiler final : public BaseCo case ExprType::F32: { DebugOnly<Stk::Kind> k(stk_.back().kind()); MOZ_ASSERT(k == Stk::RegisterF32 || k == Stk::ConstF32 || k == Stk::MemF32 || k == Stk::LocalF32); return Some(AnyReg(popF32(joinRegF32_))); } case ExprType::Ref: case ExprType::NullRef: + case ExprType::FuncRef: case ExprType::AnyRef: { DebugOnly<Stk::Kind> k(stk_.back().kind()); MOZ_ASSERT(k == Stk::RegisterRef || k == Stk::ConstRef || k == Stk::MemRef || k == Stk::LocalRef); return Some(AnyReg(popRef(joinRegPtr_))); } default: { MOZ_CRASH("Compiler bug: unexpected expression type"); @@ -3811,16 +3817,17 @@ class BaseCompiler final : public BaseCo needF32(joinRegF32_); return Some(AnyReg(joinRegF32_)); case ExprType::F64: MOZ_ASSERT(isAvailableF64(joinRegF64_)); needF64(joinRegF64_); return Some(AnyReg(joinRegF64_)); case ExprType::Ref: case ExprType::NullRef: + case ExprType::FuncRef: case ExprType::AnyRef: MOZ_ASSERT(isAvailableRef(joinRegPtr_)); needRef(joinRegPtr_); return Some(AnyReg(joinRegPtr_)); case ExprType::Void: return Nothing(); default: MOZ_CRASH("Compiler bug: unexpected type"); @@ -4234,16 +4241,17 @@ class BaseCompiler final : public BaseCo break; case ExprType::F64: masm.storeDouble(RegF64(ReturnDoubleReg), resultsAddress); break; case ExprType::F32: masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress); break; case ExprType::Ref: + case ExprType::FuncRef: case ExprType::AnyRef: masm.storePtr(RegPtr(ReturnReg), resultsAddress); break; case ExprType::NullRef: default: MOZ_CRASH("Function return type"); } } @@ -4264,16 +4272,17 @@ class BaseCompiler final : public BaseCo break; case ExprType::F64: masm.loadDouble(resultsAddress, RegF64(ReturnDoubleReg)); break; case ExprType::F32: masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg)); break; case ExprType::Ref: + case ExprType::FuncRef: case ExprType::AnyRef: masm.loadPtr(resultsAddress, RegPtr(ReturnReg)); break; case ExprType::NullRef: default: MOZ_CRASH("Function return type"); } } @@ -4576,16 +4585,17 @@ class BaseCompiler final : public BaseCo } #endif case ABIArg::Uninitialized: MOZ_CRASH("Uninitialized ABIArg kind"); } break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { ABIArg argLoc = call->abi.next(MIRType::RefOrNull); if (argLoc.kind() == ABIArg::Stack) { ScratchPtr scratch(*this); loadRef(arg, scratch); masm.storePtr(scratch, Address(masm.getStackPointer(), argLoc.offsetFromArgBase())); } else { @@ -4633,24 +4643,25 @@ class BaseCompiler final : public BaseCo CalleeDesc callee = CalleeDesc::import(globalDataOffset); return masm.wasmCallImport(desc, callee); } CodeOffset builtinCall(SymbolicAddress builtin, const FunctionCall& call) { return callSymbolic(builtin, call); } - CodeOffset builtinInstanceMethodCall(SymbolicAddress builtin, + CodeOffset builtinInstanceMethodCall(const SymbolicAddressSignature& builtin, const ABIArg& instanceArg, const FunctionCall& call) { // Builtin method calls assume the TLS register has been set. masm.loadWasmTlsRegFromFrame(); CallSiteDesc desc(call.lineOrBytecode, CallSiteDesc::Symbolic); - return masm.wasmCallBuiltinInstanceMethod(desc, instanceArg, builtin); + return masm.wasmCallBuiltinInstanceMethod( + desc, instanceArg, builtin.identity, builtin.failureMode); } ////////////////////////////////////////////////////////////////////// // // Sundry low-level code generators. // The compiler depends on moveImm32() clearing the high bits of a 64-bit // register on 64-bit systems except MIPS64 where high bits are sign extended @@ -8548,16 +8559,17 @@ void BaseCompiler::doReturn(ExprType typ case ExprType::F32: { RegF32 rv = popF32(RegF32(ReturnFloat32Reg)); returnCleanup(popStack); freeF32(rv); break; } case ExprType::Ref: case ExprType::NullRef: + case ExprType::FuncRef: case ExprType::AnyRef: { RegPtr rv = popRef(RegPtr(ReturnReg)); returnCleanup(popStack); freeRef(rv); break; } default: { MOZ_CRASH("Function return type"); @@ -8990,16 +9002,17 @@ bool BaseCompiler::emitGetLocal() { break; case ValType::F64: pushLocalF64(slot); break; case ValType::F32: pushLocalF32(slot); break; case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: pushLocalRef(slot); break; case ValType::NullRef: default: MOZ_CRASH("Local variable type"); } @@ -9054,16 +9067,17 @@ bool BaseCompiler::emitSetOrTeeLocal(uin if (isSetLocal) { freeF32(rv); } else { pushF32(rv); } break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { RegPtr rv = popRef(); syncLocal(slot); fr.storeLocalPtr(rv, localFromSlot(slot, MIRType::RefOrNull)); if (isSetLocal) { freeRef(rv); } else { pushRef(rv); @@ -9119,22 +9133,22 @@ bool BaseCompiler::emitGetGlobal() { break; case ValType::F32: pushF32(value.f32()); break; case ValType::F64: pushF64(value.f64()); break; case ValType::Ref: + case ValType::FuncRef: + case ValType::AnyRef: + pushRef(intptr_t(value.ref().forCompiledCode())); + break; case ValType::NullRef: - pushRef(intptr_t(value.ref())); - break; - case ValType::AnyRef: - pushRef(intptr_t(value.anyref().forCompiledCode())); - break; + MOZ_CRASH("NullRef not expressible"); default: MOZ_CRASH("Global constant type"); } return true; } switch (global.type().code()) { case ValType::I32: { @@ -9161,16 +9175,17 @@ bool BaseCompiler::emitGetGlobal() { case ValType::F64: { RegF64 rv = needF64(); ScratchI32 tmp(*this); masm.loadDouble(addressOfGlobalVar(global, tmp), rv); pushF64(rv); break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { RegPtr rv = needRef(); ScratchI32 tmp(*this); masm.loadPtr(addressOfGlobalVar(global, tmp), rv); pushRef(rv); break; } case ValType::NullRef: @@ -9220,16 +9235,17 @@ bool BaseCompiler::emitSetGlobal() { case ValType::F64: { RegF64 rv = popF64(); ScratchI32 tmp(*this); masm.storeDouble(rv, addressOfGlobalVar(global, tmp)); freeF64(rv); break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { RegPtr valueAddr(PreBarrierReg); needRef(valueAddr); { ScratchI32 tmp(*this); masm.computeEffectiveAddress(addressOfGlobalVar(global, tmp), valueAddr); } @@ -9627,16 +9643,17 @@ bool BaseCompiler::emitSelect() { moveF64(rs, r); masm.bind(&done); freeF64(rs); pushF64(r); break; } case ValType::Ref: case ValType::NullRef: + case ValType::FuncRef: case ValType::AnyRef: { RegPtr r, rs; pop2xRef(&r, &rs); emitBranchPerform(&b); moveRef(rs, r); masm.bind(&done); freeRef(rs); pushRef(r); @@ -9780,17 +9797,17 @@ bool BaseCompiler::emitInstanceCall(uint t = sizeof(void*) == 4 ? ValType::I32 : ValType::I64; break; default: MOZ_CRASH("Unexpected type"); } passArg(t, peek(numNonInstanceArgs - i), &baselineCall); } CodeOffset raOffset = - builtinInstanceMethodCall(builtin.identity, instanceArg, baselineCall); + builtinInstanceMethodCall(builtin, instanceArg, baselineCall); if (!createStackMap("emitInstanceCall", raOffset)) { return false; } endCall(baselineCall, stackSpace); popValueStackBy(numNonInstanceArgs); @@ -10142,64 +10159,48 @@ bool BaseCompiler::emitWait(ValType type if (!iter_.readWait(&addr, type, byteSize, ¬hing, ¬hing)) { return false; } if (deadCode_) { return true; } - // Returns -1 on trap, otherwise nonnegative result. switch (type.code()) { case ValType::I32: if (!emitInstanceCall(lineOrBytecode, SASigWaitI32)) { return false; } break; case ValType::I64: if (!emitInstanceCall(lineOrBytecode, SASigWaitI64)) { return false; } break; default: MOZ_CRASH(); } - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - return true; } bool BaseCompiler::emitWake() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); Nothing nothing; LinearMemoryAddress<Nothing> addr; if (!iter_.readWake(&addr, ¬hing)) { return false; } if (deadCode_) { return true; } - // Returns -1 on trap, otherwise nonnegative result. - if (!emitInstanceCall(lineOrBytecode, SASigWake)) { - return false; - } - - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - - return true; + return emitInstanceCall(lineOrBytecode, SASigWake); } #ifdef ENABLE_WASM_BULKMEM_OPS bool BaseCompiler::emitMemOrTableCopy(bool isMem) { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); uint32_t dstMemOrTableIndex = 0; uint32_t srcMemOrTableIndex = 0; @@ -10208,95 +10209,69 @@ bool BaseCompiler::emitMemOrTableCopy(bo &srcMemOrTableIndex, ¬hing, ¬hing)) { return false; } if (deadCode_) { return true; } - // Returns -1 on trap, otherwise 0. if (isMem) { MOZ_ASSERT(srcMemOrTableIndex == 0); MOZ_ASSERT(dstMemOrTableIndex == 0); if (!emitInstanceCall(lineOrBytecode, SASigMemCopy, /*pushReturnedValue=*/false)) { return false; } } else { pushI32(dstMemOrTableIndex); pushI32(srcMemOrTableIndex); if (!emitInstanceCall(lineOrBytecode, SASigTableCopy, /*pushReturnedValue=*/false)) { return false; } } - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - return true; } bool BaseCompiler::emitDataOrElemDrop(bool isData) { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); uint32_t segIndex = 0; if (!iter_.readDataOrElemDrop(isData, &segIndex)) { return false; } if (deadCode_) { return true; } // Despite the cast to int32_t, the callee regards the value as unsigned. - // - // Returns -1 on trap, otherwise 0. pushI32(int32_t(segIndex)); - const SymbolicAddressSignature& callee = - isData ? SASigDataDrop : SASigElemDrop; - if (!emitInstanceCall(lineOrBytecode, callee, /*pushReturnedValue=*/false)) { - return false; - } - - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - - return true; + + return emitInstanceCall(lineOrBytecode, + isData ? SASigDataDrop : SASigElemDrop, + /*pushReturnedValue=*/false); } bool BaseCompiler::emitMemFill() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); Nothing nothing; if (!iter_.readMemFill(¬hing, ¬hing, ¬hing)) { return false; } if (deadCode_) { return true; } - // Returns -1 on trap, otherwise 0. - if (!emitInstanceCall(lineOrBytecode, SASigMemFill, - /*pushReturnedValue=*/false)) { - return false; - } - - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - - return true; + return emitInstanceCall(lineOrBytecode, SASigMemFill, + /*pushReturnedValue=*/false); } bool BaseCompiler::emitMemOrTableInit(bool isMem) { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); uint32_t segIndex = 0; uint32_t dstTableIndex = 0; Nothing nothing; @@ -10304,36 +10279,30 @@ bool BaseCompiler::emitMemOrTableInit(bo ¬hing, ¬hing)) { return false; } if (deadCode_) { return true; } - // Returns -1 on trap, otherwise 0. pushI32(int32_t(segIndex)); if (isMem) { if (!emitInstanceCall(lineOrBytecode, SASigMemInit, /*pushReturnedValue=*/false)) { return false; } } else { pushI32(dstTableIndex); if (!emitInstanceCall(lineOrBytecode, SASigTableInit, /*pushReturnedValue=*/false)) { return false; } } - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - return true; } #endif MOZ_MUST_USE bool BaseCompiler::emitTableFill() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); @@ -10343,58 +10312,38 @@ bool BaseCompiler::emitTableFill() { return false; } if (deadCode_) { return true; } // fill(start:u32, val:ref, len:u32, table:u32) -> u32 - // - // Returns -1 on trap, otherwise 0. pushI32(tableIndex); - if (!emitInstanceCall(lineOrBytecode, SASigTableFill, - /*pushReturnedValue=*/false)) { - return false; - } - - Label ok; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - - return true; + return emitInstanceCall(lineOrBytecode, SASigTableFill, + /*pushReturnedValue=*/false); } MOZ_MUST_USE bool BaseCompiler::emitTableGet() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); Nothing index; uint32_t tableIndex; if (!iter_.readTableGet(&tableIndex, &index)) { return false; } if (deadCode_) { return true; } - // get(index:u32, table:u32) -> void* - // - // Returns nullptr for error, otherwise a pointer to a nonmoveable memory - // location that holds the anyref value. + // get(index:u32, table:u32) -> uintptr_t(AnyRef) pushI32(tableIndex); if (!emitInstanceCall(lineOrBytecode, SASigTableGet, /*pushReturnedValue=*/false)) { return false; } - Label noTrap; - masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &noTrap); - trap(Trap::ThrowReported); - masm.bind(&noTrap); - - masm.loadPtr(Address(ReturnReg, 0), ReturnReg); // Push the resulting anyref back on the eval stack. NOTE: needRef() must // not kill the value in the register. RegPtr r = RegPtr(ReturnReg); needRef(r); pushRef(r); return true; @@ -10408,61 +10357,48 @@ bool BaseCompiler::emitTableGrow() { uint32_t tableIndex; if (!iter_.readTableGrow(&tableIndex, &initValue, &delta)) { return false; } if (deadCode_) { return true; } // grow(initValue:anyref, delta:u32, table:u32) -> u32 - // - // infallible. pushI32(tableIndex); return emitInstanceCall(lineOrBytecode, SASigTableGrow); } MOZ_MUST_USE bool BaseCompiler::emitTableSet() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); Nothing index, value; uint32_t tableIndex; if (!iter_.readTableSet(&tableIndex, &index, &value)) { return false; } if (deadCode_) { return true; } // set(index:u32, value:ref, table:u32) -> i32 - // - // Returns -1 on range error, otherwise 0 (which is then ignored). pushI32(tableIndex); - if (!emitInstanceCall(lineOrBytecode, SASigTableSet, - /*pushReturnedValue=*/false)) { - return false; - } - Label noTrap; - masm.branchTest32(Assembler::NotSigned, ReturnReg, ReturnReg, &noTrap); - trap(Trap::ThrowReported); - masm.bind(&noTrap); - return true; + return emitInstanceCall(lineOrBytecode, SASigTableSet, + /*pushReturnedValue=*/false); } MOZ_MUST_USE bool BaseCompiler::emitTableSize() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); uint32_t tableIndex; if (!iter_.readTableSize(&tableIndex)) { return false; } if (deadCode_) { return true; } // size(table:u32) -> u32 - // - // infallible. pushI32(tableIndex); return emitInstanceCall(lineOrBytecode, SASigTableSize); } bool BaseCompiler::emitStructNew() { uint32_t lineOrBytecode = readCallSiteLineOrBytecode(); uint32_t typeIndex; @@ -10482,23 +10418,16 @@ bool BaseCompiler::emitStructNew() { const StructType& structType = env_.types[typeIndex].structType(); pushI32(structType.moduleIndex_); if (!emitInstanceCall(lineOrBytecode, SASigStructNew)) { return false; } - // Null pointer check. - - Label ok; - masm.branchTestPtr(Assembler::NonZero, ReturnReg, ReturnReg, &ok); - trap(Trap::ThrowReported); - masm.bind(&ok); - // As many arguments as there are fields. MOZ_ASSERT(args.length() == structType.fields_.length()); // Optimization opportunity: Iterate backward to pop arguments off the // stack. This will generate more instructions than we want, since we // really only need to pop the stack once at the end, not for every element, // but to do better we need a bit more machinery to load elements off the @@ -10540,16 +10469,17 @@ bool BaseCompiler::emitStructNew() { } case ValType::F64: { RegF64 r = popF64(); masm.storeDouble(r, Address(rdata, offs)); freeF64(r); break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { RegPtr value = popRef(); masm.storePtr(value, Address(rdata, offs)); // A write barrier is needed here for the extremely unlikely case // that the object is allocated in the tenured area - a result of // a GC artifact. @@ -10657,16 +10587,17 @@ bool BaseCompiler::emitStructGet() { } case ValType::F64: { RegF64 r = needF64(); masm.loadDouble(Address(rp, offs), r); pushF64(r); break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { RegPtr r = needRef(); masm.loadPtr(Address(rp, offs), r); pushRef(r); break; } case ValType::NullRef: { MOZ_CRASH("NullRef not expressible"); @@ -10718,16 +10649,17 @@ bool BaseCompiler::emitStructSet() { break; case ValType::F32: rf = popF32(); break; case ValType::F64: rd = popF64(); break; case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: rr = popRef(); break; case ValType::NullRef: MOZ_CRASH("NullRef not expressible"); default: MOZ_CRASH("Unexpected field type"); } @@ -10761,16 +10693,17 @@ bool BaseCompiler::emitStructSet() { break; } case ValType::F64: { masm.storeDouble(rd, Address(rp, offs)); freeF64(rd); break; } case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { masm.computeEffectiveAddress(Address(rp, offs), valueAddr); // emitBarrieredStore consumes valueAddr if (!emitBarrieredStore(Some(rp), valueAddr, rr, structType.fields_[fieldIndex].type)) { return false; } freeRef(rr); @@ -10797,31 +10730,33 @@ bool BaseCompiler::emitStructNarrow() { if (!iter_.readStructNarrow(&inputType, &outputType, ¬hing)) { return false; } if (deadCode_) { return true; } + // Currently not supported by struct.narrow validation. + MOZ_ASSERT(inputType != ValType::FuncRef); + MOZ_ASSERT(outputType != ValType::FuncRef); + // AnyRef -> AnyRef is a no-op, just leave the value on the stack. if (inputType == ValType::AnyRef && outputType == ValType::AnyRef) { return true; } RegPtr rp = popRef(); // AnyRef -> (ref T) must first unbox; leaves rp or null bool mustUnboxAnyref = inputType == ValType::AnyRef; // Dynamic downcast (ref T) -> (ref U), leaves rp or null - // - // Infallible. const StructType& outputStruct = env_.types[outputType.refTypeIndex()].structType(); pushI32(mustUnboxAnyref); pushI32(outputStruct.moduleIndex_); pushRef(rp); return emitInstanceCall(lineOrBytecode, SASigStructNarrow); }
--- a/js/src/wasm/WasmBuiltins.cpp +++ b/js/src/wasm/WasmBuiltins.cpp @@ -51,116 +51,163 @@ static const unsigned BUILTIN_THUNK_LIFO #define _F64 MIRType::Double #define _F32 MIRType::Float32 #define _I32 MIRType::Int32 #define _I64 MIRType::Int64 #define _PTR MIRType::Pointer #define _RoN MIRType::RefOrNull #define _VOID MIRType::None #define _END MIRType::None +#define _Infallible FailureMode::Infallible +#define _FailOnNegI32 FailureMode::FailOnNegI32 +#define _FailOnNullPtr FailureMode::FailOnNullPtr +#define _FailOnInvalidRef FailureMode::FailOnInvalidRef namespace js { namespace wasm { const SymbolicAddressSignature SASigSinD = { - SymbolicAddress::SinD, _F64, 1, {_F64, _END}}; + SymbolicAddress::SinD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigCosD = { - SymbolicAddress::CosD, _F64, 1, {_F64, _END}}; + SymbolicAddress::CosD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigTanD = { - SymbolicAddress::TanD, _F64, 1, {_F64, _END}}; + SymbolicAddress::TanD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigASinD = { - SymbolicAddress::ASinD, _F64, 1, {_F64, _END}}; + SymbolicAddress::ASinD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigACosD = { - SymbolicAddress::ACosD, _F64, 1, {_F64, _END}}; + SymbolicAddress::ACosD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigATanD = { - SymbolicAddress::ATanD, _F64, 1, {_F64, _END}}; + SymbolicAddress::ATanD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigCeilD = { - SymbolicAddress::CeilD, _F64, 1, {_F64, _END}}; + SymbolicAddress::CeilD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigCeilF = { - SymbolicAddress::CeilF, _F32, 1, {_F32, _END}}; + SymbolicAddress::CeilF, _F32, _Infallible, 1, {_F32, _END}}; const SymbolicAddressSignature SASigFloorD = { - SymbolicAddress::FloorD, _F64, 1, {_F64, _END}}; + SymbolicAddress::FloorD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigFloorF = { - SymbolicAddress::FloorF, _F32, 1, {_F32, _END}}; + SymbolicAddress::FloorF, _F32, _Infallible, 1, {_F32, _END}}; const SymbolicAddressSignature SASigTruncD = { - SymbolicAddress::TruncD, _F64, 1, {_F64, _END}}; + SymbolicAddress::TruncD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigTruncF = { - SymbolicAddress::TruncF, _F32, 1, {_F32, _END}}; + SymbolicAddress::TruncF, _F32, _Infallible, 1, {_F32, _END}}; const SymbolicAddressSignature SASigNearbyIntD = { - SymbolicAddress::NearbyIntD, _F64, 1, {_F64, _END}}; + SymbolicAddress::NearbyIntD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigNearbyIntF = { - SymbolicAddress::NearbyIntF, _F32, 1, {_F32, _END}}; + SymbolicAddress::NearbyIntF, _F32, _Infallible, 1, {_F32, _END}}; const SymbolicAddressSignature SASigExpD = { - SymbolicAddress::ExpD, _F64, 1, {_F64, _END}}; + SymbolicAddress::ExpD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigLogD = { - SymbolicAddress::LogD, _F64, 1, {_F64, _END}}; + SymbolicAddress::LogD, _F64, _Infallible, 1, {_F64, _END}}; const SymbolicAddressSignature SASigPowD = { - SymbolicAddress::PowD, _F64, 2, {_F64, _F64, _END}}; + SymbolicAddress::PowD, _F64, _Infallible, 2, {_F64, _F64, _END}}; const SymbolicAddressSignature SASigATan2D = { - SymbolicAddress::ATan2D, _F64, 2, {_F64, _F64, _END}}; + SymbolicAddress::ATan2D, _F64, _Infallible, 2, {_F64, _F64, _END}}; const SymbolicAddressSignature SASigMemoryGrow = { - SymbolicAddress::MemoryGrow, _I32, 2, {_PTR, _I32, _END}}; + SymbolicAddress::MemoryGrow, _I32, _Infallible, 2, {_PTR, _I32, _END}}; const SymbolicAddressSignature SASigMemorySize = { - SymbolicAddress::MemorySize, _I32, 1, {_PTR, _END}}; -const SymbolicAddressSignature SASigWaitI32 = { - SymbolicAddress::WaitI32, _I32, 4, {_PTR, _I32, _I32, _I64, _END}}; -const SymbolicAddressSignature SASigWaitI64 = { - SymbolicAddress::WaitI64, _I32, 4, {_PTR, _I32, _I64, _I64, _END}}; + SymbolicAddress::MemorySize, _I32, _Infallible, 1, {_PTR, _END}}; +const SymbolicAddressSignature SASigWaitI32 = {SymbolicAddress::WaitI32, + _I32, + _FailOnNegI32, + 4, + {_PTR, _I32, _I32, _I64, _END}}; +const SymbolicAddressSignature SASigWaitI64 = {SymbolicAddress::WaitI64, + _I32, + _FailOnNegI32, + 4, + {_PTR, _I32, _I64, _I64, _END}}; const SymbolicAddressSignature SASigWake = { - SymbolicAddress::Wake, _I32, 3, {_PTR, _I32, _I32, _END}}; -const SymbolicAddressSignature SASigMemCopy = { - SymbolicAddress::MemCopy, _I32, 4, {_PTR, _I32, _I32, _I32, _END}}; + SymbolicAddress::Wake, _I32, _FailOnNegI32, 3, {_PTR, _I32, _I32, _END}}; +const SymbolicAddressSignature SASigMemCopy = {SymbolicAddress::MemCopy, + _VOID, + _FailOnNegI32, + 4, + {_PTR, _I32, _I32, _I32, _END}}; const SymbolicAddressSignature SASigDataDrop = { - SymbolicAddress::DataDrop, _I32, 2, {_PTR, _I32, _END}}; -const SymbolicAddressSignature SASigMemFill = { - SymbolicAddress::MemFill, _I32, 4, {_PTR, _I32, _I32, _I32, _END}}; + SymbolicAddress::DataDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}}; +const SymbolicAddressSignature SASigMemFill = {SymbolicAddress::MemFill, + _VOID, + _FailOnNegI32, + 4, + {_PTR, _I32, _I32, _I32, _END}}; const SymbolicAddressSignature SASigMemInit = { - SymbolicAddress::MemInit, _I32, 5, {_PTR, _I32, _I32, _I32, _I32, _END}}; + SymbolicAddress::MemInit, + _VOID, + _FailOnNegI32, + 5, + {_PTR, _I32, _I32, _I32, _I32, _END}}; const SymbolicAddressSignature SASigTableCopy = { SymbolicAddress::TableCopy, - _I32, + _VOID, + _FailOnNegI32, 6, {_PTR, _I32, _I32, _I32, _I32, _I32, _END}}; const SymbolicAddressSignature SASigElemDrop = { - SymbolicAddress::ElemDrop, _I32, 2, {_PTR, _I32, _END}}; + SymbolicAddress::ElemDrop, _VOID, _FailOnNegI32, 2, {_PTR, _I32, _END}}; const SymbolicAddressSignature SASigTableFill = { - SymbolicAddress::TableFill, _I32, 5, {_PTR, _I32, _RoN, _I32, _I32, _END}}; -const SymbolicAddressSignature SASigTableGet = { - SymbolicAddress::TableGet, _PTR, 3, {_PTR, _I32, _I32, _END}}; + SymbolicAddress::TableFill, + _VOID, + _FailOnNegI32, + 5, + {_PTR, _I32, _RoN, _I32, _I32, _END}}; +const SymbolicAddressSignature SASigTableGet = {SymbolicAddress::TableGet, + _RoN, + _FailOnInvalidRef, + 3, + {_PTR, _I32, _I32, _END}}; const SymbolicAddressSignature SASigTableGrow = { - SymbolicAddress::TableGrow, _I32, 4, {_PTR, _RoN, _I32, _I32, _END}}; + SymbolicAddress::TableGrow, + _I32, + _Infallible, + 4, + {_PTR, _RoN, _I32, _I32, _END}}; const SymbolicAddressSignature SASigTableInit = { SymbolicAddress::TableInit, - _I32, + _VOID, + _FailOnNegI32, 6, {_PTR, _I32, _I32, _I32, _I32, _I32, _END}}; -const SymbolicAddressSignature SASigTableSet = { - SymbolicAddress::TableSet, _I32, 4, {_PTR, _I32, _RoN, _I32, _END}}; +const SymbolicAddressSignature SASigTableSet = {SymbolicAddress::TableSet, + _VOID, + _FailOnNegI32, + 4, + {_PTR, _I32, _RoN, _I32, _END}}; const SymbolicAddressSignature SASigTableSize = { - SymbolicAddress::TableSize, _I32, 2, {_PTR, _I32, _END}}; + SymbolicAddress::TableSize, _I32, _Infallible, 2, {_PTR, _I32, _END}}; const SymbolicAddressSignature SASigPostBarrier = { - SymbolicAddress::PostBarrier, _VOID, 2, {_PTR, _PTR, _END}}; + SymbolicAddress::PostBarrier, _VOID, _Infallible, 2, {_PTR, _PTR, _END}}; const SymbolicAddressSignature SASigPostBarrierFiltering = { - SymbolicAddress::PostBarrierFiltering, _VOID, 2, {_PTR, _PTR, _END}}; + SymbolicAddress::PostBarrierFiltering, + _VOID, + _Infallible, + 2, + {_PTR, _PTR, _END}}; const SymbolicAddressSignature SASigStructNew = { - SymbolicAddress::StructNew, _RoN, 2, {_PTR, _I32, _END}}; + SymbolicAddress::StructNew, _RoN, _FailOnNullPtr, 2, {_PTR, _I32, _END}}; const SymbolicAddressSignature SASigStructNarrow = { - SymbolicAddress::StructNarrow, _RoN, 4, {_PTR, _I32, _I32, _RoN, _END}}; + SymbolicAddress::StructNarrow, + _RoN, + _Infallible, + 4, + {_PTR, _I32, _I32, _RoN, _END}}; } // namespace wasm } // namespace js #undef _F64 #undef _F32 #undef _I32 #undef _I64 #undef _PTR #undef _RoN #undef _VOID #undef _END +#undef _Infallible +#undef _FailOnNegI32 +#undef _FailOnNullPtr // ============================================================================ // WebAssembly builtin C++ functions called from wasm code to implement internal // wasm operations: implementations. #if defined(JS_CODEGEN_ARM) extern "C" { @@ -614,16 +661,19 @@ void* wasm::AddressOf(SymbolicAddress im *abiType = Args_General4; return FuncCast(Instance::callImport_i32, *abiType); case SymbolicAddress::CallImport_I64: *abiType = Args_General4; return FuncCast(Instance::callImport_i64, *abiType); case SymbolicAddress::CallImport_F64: *abiType = Args_General4; return FuncCast(Instance::callImport_f64, *abiType); + case SymbolicAddress::CallImport_FuncRef: + *abiType = Args_General4; + return FuncCast(Instance::callImport_funcref, *abiType); case SymbolicAddress::CallImport_AnyRef: *abiType = Args_General4; return FuncCast(Instance::callImport_anyref, *abiType); case SymbolicAddress::CoerceInPlace_ToInt32: *abiType = Args_General1; return FuncCast(CoerceInPlace_ToInt32, *abiType); case SymbolicAddress::CoerceInPlace_ToNumber: *abiType = Args_General1; @@ -832,16 +882,17 @@ bool wasm::NeedsBuiltinThunk(SymbolicAdd switch (sym) { case SymbolicAddress::HandleDebugTrap: // GenerateDebugTrapStub case SymbolicAddress::HandleThrow: // GenerateThrowStub case SymbolicAddress::HandleTrap: // GenerateTrapExit case SymbolicAddress::CallImport_Void: // GenerateImportInterpExit case SymbolicAddress::CallImport_I32: case SymbolicAddress::CallImport_I64: case SymbolicAddress::CallImport_F64: + case SymbolicAddress::CallImport_FuncRef: case SymbolicAddress::CallImport_AnyRef: case SymbolicAddress::CoerceInPlace_ToInt32: // GenerateImportJitExit case SymbolicAddress::CoerceInPlace_ToNumber: #if defined(JS_CODEGEN_MIPS32) case SymbolicAddress::js_jit_gAtomic64Lock: #endif #ifdef WASM_CODEGEN_DEBUG case SymbolicAddress::PrintI32:
--- a/js/src/wasm/WasmConstants.h +++ b/js/src/wasm/WasmConstants.h @@ -44,17 +44,17 @@ enum class SectionId { enum class TypeCode { I32 = 0x7f, // SLEB128(-0x01) I64 = 0x7e, // SLEB128(-0x02) F32 = 0x7d, // SLEB128(-0x03) F64 = 0x7c, // SLEB128(-0x04) // A function pointer with any signature - AnyFunc = 0x70, // SLEB128(-0x10) + FuncRef = 0x70, // SLEB128(-0x10) // A reference to any type. AnyRef = 0x6f, // Type constructor for reference types. Ref = 0x6e, // Type constructor for function types
--- a/js/src/wasm/WasmCraneliftCompile.cpp +++ b/js/src/wasm/WasmCraneliftCompile.cpp @@ -431,19 +431,19 @@ TypeCode global_type(const GlobalDesc* g size_t global_tlsOffset(const GlobalDesc* global) { return globalToTlsOffset(global->offset()); } // TableDesc size_t table_tlsOffset(const TableDesc* table) { - MOZ_RELEASE_ASSERT(table->kind == TableKind::AnyFunction || - table->kind == TableKind::TypedFunction, - "cranelift doesn't support AnyRef tables yet."); + MOZ_RELEASE_ASSERT( + table->kind == TableKind::FuncRef || table->kind == TableKind::AsmJS, + "cranelift doesn't support AnyRef tables yet."); return globalToTlsOffset(table->globalDataOffset); } // Sig size_t funcType_numArgs(const FuncTypeWithId* funcType) { return funcType->args().length(); }
--- a/js/src/wasm/WasmFrameIter.cpp +++ b/js/src/wasm/WasmFrameIter.cpp @@ -1256,16 +1256,17 @@ static const char* ThunkedNativeToDescri switch (func) { case SymbolicAddress::HandleDebugTrap: case SymbolicAddress::HandleThrow: case SymbolicAddress::HandleTrap: case SymbolicAddress::CallImport_Void: case SymbolicAddress::CallImport_I32: case SymbolicAddress::CallImport_I64: case SymbolicAddress::CallImport_F64: + case SymbolicAddress::CallImport_FuncRef: case SymbolicAddress::CallImport_AnyRef: case SymbolicAddress::CoerceInPlace_ToInt32: case SymbolicAddress::CoerceInPlace_ToNumber: MOZ_ASSERT(!NeedsBuiltinThunk(func), "not in sync with NeedsBuiltinThunk"); break; case SymbolicAddress::ToInt32: return "call to asm.js native ToInt32 coercion (in wasm)";
--- a/js/src/wasm/WasmGenerator.cpp +++ b/js/src/wasm/WasmGenerator.cpp @@ -362,28 +362,28 @@ bool ModuleGenerator::init(Metadata* may } } if (env_->startFuncIndex) { addOrMerge(ExportedFunc(*env_->startFuncIndex, true)); } for (const ElemSegment* seg : env_->elemSegments) { - TableKind kind = !seg->active() ? TableKind::AnyFunction + TableKind kind = !seg->active() ? TableKind::FuncRef : env_->tables[seg->tableIndex].kind; switch (kind) { - case TableKind::AnyFunction: + case TableKind::FuncRef: for (uint32_t funcIndex : seg->elemFuncIndices) { if (funcIndex == NullFuncIndex) { continue; } addOrMerge(ExportedFunc(funcIndex, false)); } break; - case TableKind::TypedFunction: + case TableKind::AsmJS: // asm.js functions are not exported. break; case TableKind::AnyRef: break; } } auto* newEnd =
--- a/js/src/wasm/WasmInstance.cpp +++ b/js/src/wasm/WasmInstance.cpp @@ -126,16 +126,17 @@ bool Instance::callImport(JSContext* cx, args[i].set(Int32Value(*(int32_t*)&argv[i])); break; case ValType::F32: args[i].set(JS::CanonicalizedDoubleValue(*(float*)&argv[i])); break; case ValType::F64: args[i].set(JS::CanonicalizedDoubleValue(*(double*)&argv[i])); break; + case ValType::FuncRef: case ValType::AnyRef: { args[i].set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)&argv[i]))); break; } case ValType::Ref: MOZ_CRASH("temporarily unsupported Ref type in callImport"); case ValType::I64: MOZ_CRASH("unhandled type in callImport"); @@ -214,17 +215,17 @@ bool Instance::callImport(JSContext* cx, break; case ValType::F32: type = TypeSet::DoubleType(); break; case ValType::F64: type = TypeSet::DoubleType(); break; case ValType::Ref: - MOZ_CRASH("case guarded above"); + case ValType::FuncRef: case ValType::AnyRef: MOZ_CRASH("case guarded above"); case ValType::I64: MOZ_CRASH("NYI"); case ValType::NullRef: MOZ_CRASH("NullRef not expressible"); } if (!TypeScript::ArgTypes(script, i)->hasType(type)) { @@ -300,38 +301,59 @@ Instance::callImport_anyref(Instance* in RootedValue rval(cx); if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) { return false; } RootedAnyRef result(cx, AnyRef::null()); if (!BoxAnyRef(cx, rval, &result)) { return false; } + static_assert(sizeof(argv[0]) >= sizeof(void*), "fits"); *(void**)argv = result.get().forCompiledCode(); return true; } -/* static */ uint32_t /* infallible */ -Instance::memoryGrow_i32(Instance* instance, uint32_t delta) { +/* static */ int32_t /* 0 to signal trap; 1 to signal OK */ +Instance::callImport_funcref(Instance* instance, int32_t funcImportIndex, + int32_t argc, uint64_t* argv) { + JSContext* cx = TlsContext.get(); + RootedValue rval(cx); + if (!instance->callImport(cx, funcImportIndex, argc, argv, &rval)) { + return false; + } + + RootedFunction fun(cx); + if (!CheckFuncRefValue(cx, rval, &fun)) { + return false; + } + + *(void**)argv = fun; + return true; +} + +/* static */ uint32_t Instance::memoryGrow_i32(Instance* instance, + uint32_t delta) { + MOZ_ASSERT(SASigMemoryGrow.failureMode == FailureMode::Infallible); MOZ_ASSERT(!instance->isAsmJS()); JSContext* cx = TlsContext.get(); RootedWasmMemoryObject memory(cx, instance->memory_); uint32_t ret = WasmMemoryObject::grow(memory, delta, cx); // If there has been a moving grow, this Instance should have been notified. MOZ_RELEASE_ASSERT(instance->tlsData()->memoryBase == instance->memory_->buffer().dataPointerEither()); return ret; } -/* static */ uint32_t /* infallible */ -Instance::memorySize_i32(Instance* instance) { +/* static */ uint32_t Instance::memorySize_i32(Instance* instance) { + MOZ_ASSERT(SASigMemorySize.failureMode == FailureMode::Infallible); + // This invariant must hold when running Wasm code. Assert it here so we can // write tests for cross-realm calls. MOZ_ASSERT(TlsContext.get()->realm() == instance->realm()); uint32_t byteLength = instance->memory()->volatileMemoryLength(); MOZ_ASSERT(byteLength % wasm::PageSize == 0); return byteLength / wasm::PageSize; } @@ -369,30 +391,32 @@ static int32_t PerformWait(Instance* ins return 2; case FutexThread::WaitResult::Error: return -1; default: MOZ_CRASH(); } } -/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */ -Instance::wait_i32(Instance* instance, uint32_t byteOffset, int32_t value, - int64_t timeout_ns) { +/* static */ int32_t Instance::wait_i32(Instance* instance, uint32_t byteOffset, + int32_t value, int64_t timeout_ns) { + MOZ_ASSERT(SASigWaitI32.failureMode == FailureMode::FailOnNegI32); return PerformWait<int32_t>(instance, byteOffset, value, timeout_ns); } -/* static */ int32_t /* -1 to signal trap; nonnegative result for ok */ -Instance::wait_i64(Instance* instance, uint32_t byteOffset, int64_t value, - int64_t timeout_ns) { +/* static */ int32_t Instance::wait_i64(Instance* instance, uint32_t byteOffset, + int64_t value, int64_t timeout_ns) { + MOZ_ASSERT(SASigWaitI64.failureMode == FailureMode::FailOnNegI32); return PerformWait<int64_t>(instance, byteOffset, value, timeout_ns); } -/* static */ int32_t /* -1 to signal trap; nonnegative for ok */ -Instance::wake(Instance* instance, uint32_t byteOffset, int32_t count) { +/* static */ int32_t Instance::wake(Instance* instance, uint32_t byteOffset, + int32_t count) { + MOZ_ASSERT(SASigWake.failureMode == FailureMode::FailOnNegI32); + JSContext* cx = TlsContext.get(); // The alignment guard is not in the wasm spec as of 2017-11-02, but is // considered likely to appear, as 4-byte alignment is required for WAKE by // the spec's validation algorithm. if (byteOffset & 3) { JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, @@ -413,19 +437,21 @@ Instance::wake(Instance* instance, uint3 JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_WAKE_OVERFLOW); return -1; } return int32_t(woken); } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::memCopy(Instance* instance, uint32_t dstByteOffset, - uint32_t srcByteOffset, uint32_t len) { +/* static */ int32_t Instance::memCopy(Instance* instance, + uint32_t dstByteOffset, + uint32_t srcByteOffset, uint32_t len) { + MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32); + WasmMemoryObject* mem = instance->memory(); uint32_t memLen = mem->volatileMemoryLength(); if (len == 0) { // Even though the length is zero, we must check for a valid offset. But // zero-length operations at the edge of the memory are allowed. if (dstByteOffset <= memLen && srcByteOffset <= memLen) { return 0; @@ -481,18 +507,19 @@ Instance::memCopy(Instance* instance, ui } JSContext* cx = TlsContext.get(); JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS); return -1; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::dataDrop(Instance* instance, uint32_t segIndex) { +/* static */ int32_t Instance::dataDrop(Instance* instance, uint32_t segIndex) { + MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32); + MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(), "ensured by validation"); if (!instance->passiveDataSegments_[segIndex]) { JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_DROPPED_DATA_SEG); return -1; } @@ -500,19 +527,20 @@ Instance::dataDrop(Instance* instance, u SharedDataSegment& segRefPtr = instance->passiveDataSegments_[segIndex]; MOZ_RELEASE_ASSERT(!segRefPtr->active()); // Drop this instance's reference to the DataSegment so it can be released. segRefPtr = nullptr; return 0; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::memFill(Instance* instance, uint32_t byteOffset, uint32_t value, - uint32_t len) { +/* static */ int32_t Instance::memFill(Instance* instance, uint32_t byteOffset, + uint32_t value, uint32_t len) { + MOZ_ASSERT(SASigMemFill.failureMode == FailureMode::FailOnNegI32); + WasmMemoryObject* mem = instance->memory(); uint32_t memLen = mem->volatileMemoryLength(); if (len == 0) { // Even though the length is zero, we must check for a valid offset. But // zero-length operations at the edge of the memory are allowed. if (byteOffset <= memLen) { return 0; @@ -553,19 +581,21 @@ Instance::memFill(Instance* instance, ui } JSContext* cx = TlsContext.get(); JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS); return -1; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::memInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset, - uint32_t len, uint32_t segIndex) { +/* static */ int32_t Instance::memInit(Instance* instance, uint32_t dstOffset, + uint32_t srcOffset, uint32_t len, + uint32_t segIndex) { + MOZ_ASSERT(SASigMemInit.failureMode == FailureMode::FailOnNegI32); + MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveDataSegments_.length(), "ensured by validation"); if (!instance->passiveDataSegments_[segIndex]) { JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_DROPPED_DATA_SEG); return -1; } @@ -631,20 +661,22 @@ Instance::memInit(Instance* instance, ui } } JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS); return -1; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::tableCopy(Instance* instance, uint32_t dstOffset, uint32_t srcOffset, - uint32_t len, uint32_t dstTableIndex, - uint32_t srcTableIndex) { +/* static */ int32_t Instance::tableCopy(Instance* instance, uint32_t dstOffset, + uint32_t srcOffset, uint32_t len, + uint32_t dstTableIndex, + uint32_t srcTableIndex) { + MOZ_ASSERT(SASigMemCopy.failureMode == FailureMode::FailOnNegI32); + const SharedTable& srcTable = instance->tables()[srcTableIndex]; uint32_t srcTableLen = srcTable->length(); const SharedTable& dstTable = instance->tables()[dstTableIndex]; uint32_t dstTableLen = dstTable->length(); if (len == 0) { // Even though the number of items to copy is zero, we must check for valid @@ -708,18 +740,19 @@ Instance::tableCopy(Instance* instance, } } JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS); return -1; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::elemDrop(Instance* instance, uint32_t segIndex) { +/* static */ int32_t Instance::elemDrop(Instance* instance, uint32_t segIndex) { + MOZ_ASSERT(SASigDataDrop.failureMode == FailureMode::FailOnNegI32); + MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(), "ensured by validation"); if (!instance->passiveElemSegments_[segIndex]) { JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_DROPPED_ELEM_SEG); return -1; } @@ -766,30 +799,33 @@ void Instance::initElems(uint32_t tableI WasmInstanceObject* calleeInstanceObj = ExportedFunctionToInstanceObject(fun); Instance& calleeInstance = calleeInstanceObj->instance(); Tier calleeTier = calleeInstance.code().bestTier(); const CodeRange& calleeCodeRange = calleeInstanceObj->getExportedFunctionCodeRange(fun, calleeTier); void* code = calleeInstance.codeBase(calleeTier) + calleeCodeRange.funcTableEntry(); - table.setAnyFunc(dstOffset + i, code, &calleeInstance); + table.setFuncRef(dstOffset + i, code, &calleeInstance); continue; } } void* code = codeBaseTier + codeRanges[funcToCodeRange[funcIndex]].funcTableEntry(); - table.setAnyFunc(dstOffset + i, code, this); + table.setFuncRef(dstOffset + i, code, this); } } } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::tableInit(Instance* instance, uint32_t dstOffset, uint32_t srcOffset, - uint32_t len, uint32_t segIndex, uint32_t tableIndex) { +/* static */ int32_t Instance::tableInit(Instance* instance, uint32_t dstOffset, + uint32_t srcOffset, uint32_t len, + uint32_t segIndex, + uint32_t tableIndex) { + MOZ_ASSERT(SASigTableInit.failureMode == FailureMode::FailOnNegI32); + MOZ_RELEASE_ASSERT(size_t(segIndex) < instance->passiveElemSegments_.length(), "ensured by validation"); if (!instance->passiveElemSegments_[segIndex]) { JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_DROPPED_ELEM_SEG); return -1; } @@ -798,17 +834,17 @@ Instance::tableInit(Instance* instance, MOZ_RELEASE_ASSERT(!seg.active()); const uint32_t segLen = seg.length(); const Table& table = *instance->tables()[tableIndex]; const uint32_t tableLen = table.length(); // Element segments cannot currently contain arbitrary values, and anyref // tables cannot be initialized from segments. - MOZ_ASSERT(table.kind() == TableKind::AnyFunction); + MOZ_ASSERT(table.kind() == TableKind::FuncRef); // We are proposing to copy // // seg[ srcOffset .. srcOffset + len - 1 ] // to // tableBase[ dstOffset .. dstOffset + len - 1 ] if (len == 0) { @@ -846,19 +882,21 @@ Instance::tableInit(Instance* instance, } } JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_OUT_OF_BOUNDS); return -1; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::tableFill(Instance* instance, uint32_t start, void* value, - uint32_t len, uint32_t tableIndex) { +/* static */ int32_t Instance::tableFill(Instance* instance, uint32_t start, + void* value, uint32_t len, + uint32_t tableIndex) { + MOZ_ASSERT(SASigTableFill.failureMode == FailureMode::FailOnNegI32); + Table& table = *instance->tables()[tableIndex]; MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef); if (len == 0) { // Even though the length is zero, we must check for a valid offset. But // zero-length operations at the edge of the table are allowed. if (start <= table.length()) { return 0; @@ -892,105 +930,104 @@ Instance::tableFill(Instance* instance, } JSContext* cx = TlsContext.get(); JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_TABLE_OUT_OF_BOUNDS); return -1; } -// The return convention for tableGet() is awkward but avoids a situation where -// Ion code has to hold a value that may or may not be a pointer to GC'd -// storage, or where Ion has to pass in a pointer to storage where a return -// value can be written. -// -// Note carefully that the pointer that is returned may not be valid past -// operations that change the size of the table or cause GC work; it is strictly -// to be used to retrieve the return value. - -/* static */ void* /* nullptr to signal trap; pointer to table location - otherwise */ -Instance::tableGet(Instance* instance, uint32_t index, uint32_t tableIndex) { +/* static */ void* Instance::tableGet(Instance* instance, uint32_t index, + uint32_t tableIndex) { + MOZ_ASSERT(SASigTableGet.failureMode == FailureMode::FailOnInvalidRef); const Table& table = *instance->tables()[tableIndex]; MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef); if (index >= table.length()) { JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_TABLE_OUT_OF_BOUNDS); - return nullptr; + return AnyRef::invalid().forCompiledCode(); } - return const_cast<void*>(table.getShortlivedAnyRefLocForCompiledCode(index)); + return table.getAnyRef(index).forCompiledCode(); } -/* static */ uint32_t /* infallible */ -Instance::tableGrow(Instance* instance, void* initValue, uint32_t delta, - uint32_t tableIndex) { +/* static */ uint32_t Instance::tableGrow(Instance* instance, void* initValue, + uint32_t delta, uint32_t tableIndex) { + MOZ_ASSERT(SASigTableGrow.failureMode == FailureMode::Infallible); + RootedAnyRef obj(TlsContext.get(), AnyRef::fromCompiledCode(initValue)); Table& table = *instance->tables()[tableIndex]; MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef); uint32_t oldSize = table.grow(delta, TlsContext.get()); if (oldSize != uint32_t(-1) && initValue != nullptr) { for (uint32_t i = 0; i < delta; i++) { table.setAnyRef(oldSize + i, obj.get()); } } return oldSize; } -/* static */ int32_t /* -1 to signal trap; 0 for ok */ -Instance::tableSet(Instance* instance, uint32_t index, void* value, - uint32_t tableIndex) { +/* static */ int32_t Instance::tableSet(Instance* instance, uint32_t index, + void* value, uint32_t tableIndex) { + MOZ_ASSERT(SASigTableSet.failureMode == FailureMode::FailOnNegI32); + Table& table = *instance->tables()[tableIndex]; MOZ_RELEASE_ASSERT(table.kind() == TableKind::AnyRef); if (index >= table.length()) { JS_ReportErrorNumberASCII(TlsContext.get(), GetErrorMessage, nullptr, JSMSG_WASM_TABLE_OUT_OF_BOUNDS); return -1; } table.setAnyRef(index, AnyRef::fromCompiledCode(value)); return 0; } -/* static */ uint32_t /* infallible */ -Instance::tableSize(Instance* instance, uint32_t tableIndex) { +/* static */ uint32_t Instance::tableSize(Instance* instance, + uint32_t tableIndex) { + MOZ_ASSERT(SASigTableSize.failureMode == FailureMode::Infallible); Table& table = *instance->tables()[tableIndex]; return table.length(); } -/* static */ void /* infallible */ -Instance::postBarrier(Instance* instance, gc::Cell** location) { +/* static */ void Instance::postBarrier(Instance* instance, + gc::Cell** location) { + MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible); MOZ_ASSERT(location); TlsContext.get()->runtime()->gc.storeBuffer().putCell(location); } -/* static */ void /* infallible */ -Instance::postBarrierFiltering(Instance* instance, gc::Cell** location) { +/* static */ void Instance::postBarrierFiltering(Instance* instance, + gc::Cell** location) { + MOZ_ASSERT(SASigPostBarrier.failureMode == FailureMode::Infallible); MOZ_ASSERT(location); if (*location == nullptr || !gc::IsInsideNursery(*location)) { return; } TlsContext.get()->runtime()->gc.storeBuffer().putCell(location); } // The typeIndex is an index into the structTypeDescrs_ table in the instance. // That table holds TypeDescr objects. // // When we fail to allocate we return a nullptr; the wasm side must check this // and propagate it as an error. -/* static */ void* /* null on OOM, otherwise a pointer */ -Instance::structNew(Instance* instance, uint32_t typeIndex) { +/* static */ void* Instance::structNew(Instance* instance, uint32_t typeIndex) { + MOZ_ASSERT(SASigStructNew.failureMode == FailureMode::FailOnNullPtr); JSContext* cx = TlsContext.get(); Rooted<TypeDescr*> typeDescr(cx, instance->structTypeDescrs_[typeIndex]); return TypedObject::createZeroed(cx, typeDescr); } -/* static */ void* /* infallible */ -Instance::structNarrow(Instance* instance, uint32_t mustUnboxAnyref, - uint32_t outputTypeIndex, void* maybeNullPtr) { +/* static */ void* Instance::structNarrow(Instance* instance, + uint32_t mustUnboxAnyref, + uint32_t outputTypeIndex, + void* maybeNullPtr) { + MOZ_ASSERT(SASigStructNarrow.failureMode == FailureMode::Infallible); + JSContext* cx = TlsContext.get(); Rooted<TypedObject*> obj(cx); Rooted<StructTypeDescr*> typeDescr(cx); if (maybeNullPtr == nullptr) { return maybeNullPtr; } @@ -1054,17 +1091,17 @@ Instance::structNarrow(Instance* instanc // Note, dst must point into nonmoveable storage that is not in the nursery, // this matters for the write barriers. Furthermore, for pointer types the // current value of *dst must be null so that only a post-barrier is required. // // Regarding the destination not being in the nursery, we have these cases. // Either the written location is in the global data section in the // WasmInstanceObject, or the Cell of a WasmGlobalObject: // -// - WasmInstanceObjects are always tenured and u.ref_/anyref_ may point to a +// - WasmInstanceObjects are always tenured and u.ref_ may point to a // nursery object, so we need a post-barrier since the global data of an // instance is effectively a field of the WasmInstanceObject. // // - WasmGlobalObjects are always tenured, and they have a Cell field, so a // post-barrier may be needed for the same reason as above. void CopyValPostBarriered(uint8_t* dst, const Val& src) { switch (src.type().code()) { @@ -1083,43 +1120,32 @@ void CopyValPostBarriered(uint8_t* dst, memcpy(dst, &x, sizeof(x)); break; } case ValType::F64: { double x = src.f64(); memcpy(dst, &x, sizeof(x)); break; } + case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: { // TODO/AnyRef-boxing: With boxed immediates and strings, the write // barrier is going to have to be more complicated. ASSERT_ANYREF_IS_JSOBJECT; MOZ_ASSERT(*(void**)dst == nullptr, "should be null so no need for a pre-barrier"); - AnyRef x = src.anyref(); - memcpy(dst, x.asJSObjectAddress(), sizeof(x)); + AnyRef x = src.ref(); + memcpy(dst, x.asJSObjectAddress(), sizeof(*x.asJSObjectAddress())); if (!x.isNull()) { JSObject::writeBarrierPost((JSObject**)dst, nullptr, x.asJSObject()); } break; } - case ValType::Ref: { - MOZ_ASSERT(*(JSObject**)dst == nullptr, - "should be null so no need for a pre-barrier"); - JSObject* x = src.ref(); - memcpy(dst, &x, sizeof(x)); - if (x) { - JSObject::writeBarrierPost((JSObject**)dst, nullptr, x); - } - break; - } case ValType::NullRef: { - break; - } - default: { MOZ_CRASH("unexpected Val type"); } } } Instance::Instance(JSContext* cx, Handle<WasmInstanceObject*> object, SharedCode code, UniqueTlsData tlsDataIn, HandleWasmMemoryObject memory, SharedTableVector&& tables, @@ -1397,23 +1423,23 @@ void Instance::tracePrivate(JSTracer* tr TraceNullableEdge(trc, &funcImportTls(fi).fun, "wasm import"); } for (const SharedTable& table : tables_) { table->trace(trc); } for (const GlobalDesc& global : code().metadata().globals) { - // Indirect anyref global get traced by the owning WebAssembly.Global. + // Indirect reference globals get traced by the owning WebAssembly.Global. if (!global.type().isReference() || global.isConstant() || global.isIndirect()) { continue; } GCPtrObject* obj = (GCPtrObject*)(globalData() + global.offset()); - TraceNullableEdge(trc, obj, "wasm ref/anyref global"); + TraceNullableEdge(trc, obj, "wasm reference-typed global"); } TraceNullableEdge(trc, &memory_, "wasm buffer"); structTypeDescrs_.trace(trc); } void Instance::trace(JSTracer* trc) { // Technically, instead of having this method, the caller could use @@ -1651,90 +1677,98 @@ bool Instance::callExport(JSContext* cx, // stored in the first element of the array (which, therefore, must have // length >= 1). Vector<ExportArg, 8> exportArgs(cx); if (!exportArgs.resize(Max<size_t>(1, funcType->args().length()))) { return false; } ASSERT_ANYREF_IS_JSOBJECT; - Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> anyrefs(cx); + Rooted<GCVector<JSObject*, 8, SystemAllocPolicy>> refs(cx); DebugCodegen(DebugChannel::Function, "wasm-function[%d]; arguments ", funcIndex); RootedValue v(cx); for (size_t i = 0; i < funcType->args().length(); ++i) { v = i < args.length() ? args[i] : UndefinedValue(); switch (funcType->arg(i).code()) { case ValType::I32: if (!ToInt32(cx, v, (int32_t*)&exportArgs[i])) { - DebugCodegen(DebugChannel::Function, "call to ToInt32 failed!\n"); return false; } DebugCodegen(DebugChannel::Function, "i32(%d) ", *(int32_t*)&exportArgs[i]); break; case ValType::I64: MOZ_CRASH("unexpected i64 flowing into callExport"); case ValType::F32: if (!RoundFloat32(cx, v, (float*)&exportArgs[i])) { - DebugCodegen(DebugChannel::Function, - "call to RoundFloat32 failed!\n"); return false; } DebugCodegen(DebugChannel::Function, "f32(%f) ", *(float*)&exportArgs[i]); break; case ValType::F64: if (!ToNumber(cx, v, (double*)&exportArgs[i])) { - DebugCodegen(DebugChannel::Function, "call to ToNumber failed!\n"); return false; } DebugCodegen(DebugChannel::Function, "f64(%lf) ", *(double*)&exportArgs[i]); break; case ValType::Ref: MOZ_CRASH("temporarily unsupported Ref type in callExport"); + case ValType::FuncRef: { + RootedFunction fun(cx); + if (!CheckFuncRefValue(cx, v, &fun)) { + return false; + } + // Store in rooted array until no more GC is possible. + ASSERT_ANYREF_IS_JSOBJECT; + if (!refs.emplaceBack(fun)) { + return false; + } + DebugCodegen(DebugChannel::Function, "ptr(#%d) ", + int(refs.length() - 1)); + break; + } case ValType::AnyRef: { RootedAnyRef ar(cx, AnyRef::null()); if (!BoxAnyRef(cx, v, &ar)) { - DebugCodegen(DebugChannel::Function, "call to BoxAnyRef failed!\n"); return false; } - // We'll copy the value into the arguments array just before the call; - // for now tuck the value away in a rooted array. + // Store in rooted array until no more GC is possible. ASSERT_ANYREF_IS_JSOBJECT; - if (!anyrefs.emplaceBack(ar.get().asJSObject())) { + if (!refs.emplaceBack(ar.get().asJSObject())) { return false; } DebugCodegen(DebugChannel::Function, "ptr(#%d) ", - int(anyrefs.length() - 1)); + int(refs.length() - 1)); break; } case ValType::NullRef: { MOZ_CRASH("NullRef not expressible"); } } } DebugCodegen(DebugChannel::Function, "\n"); // Copy over reference values from the rooted array, if any. - if (anyrefs.length() > 0) { + if (refs.length() > 0) { DebugCodegen(DebugChannel::Function, "; "); size_t nextRef = 0; for (size_t i = 0; i < funcType->args().length(); ++i) { if (funcType->arg(i).isReference()) { ASSERT_ANYREF_IS_JSOBJECT; - *(void**)&exportArgs[i] = (void*)anyrefs[nextRef++]; + *(void**)&exportArgs[i] = (void*)refs[nextRef++]; DebugCodegen(DebugChannel::Function, "ptr(#%d) = %p ", int(nextRef - 1), *(void**)&exportArgs[i]); } } - anyrefs.clear(); + refs.clear(); } { JitActivation activation(cx); // Call the per-exported-function trampoline created by GenerateEntry. auto funcPtr = JS_DATA_TO_FUNC_PTR(ExportFuncPtr, interpEntry); if (!CALL_GENERATED_2(funcPtr, exportArgs.begin(), tlsData())) { @@ -1778,16 +1812,17 @@ bool Instance::callExport(JSContext* cx, DebugCodegen(DebugChannel::Function, "f32(%f)", *(float*)retAddr); break; case ExprType::F64: args.rval().set(NumberValue(*(double*)retAddr)); DebugCodegen(DebugChannel::Function, "f64(%lf)", *(double*)retAddr); break; case ExprType::Ref: MOZ_CRASH("temporarily unsupported Ref type in callExport"); + case ExprType::FuncRef: case ExprType::AnyRef: args.rval().set(UnboxAnyRef(AnyRef::fromCompiledCode(*(void**)retAddr))); DebugCodegen(DebugChannel::Function, "ptr(%p)", *(void**)retAddr); break; case ExprType::NullRef: MOZ_CRASH("NullRef not expressible"); case ExprType::Limit: MOZ_CRASH("Limit");
--- a/js/src/wasm/WasmInstance.h +++ b/js/src/wasm/WasmInstance.h @@ -177,16 +177,17 @@ class Instance { public: // Functions to be called directly from wasm code. static int32_t callImport_void(Instance*, int32_t, int32_t, uint64_t*); static int32_t callImport_i32(Instance*, int32_t, int32_t, uint64_t*); static int32_t callImport_i64(Instance*, int32_t, int32_t, uint64_t*); static int32_t callImport_f64(Instance*, int32_t, int32_t, uint64_t*); static int32_t callImport_anyref(Instance*, int32_t, int32_t, uint64_t*); + static int32_t callImport_funcref(Instance*, int32_t, int32_t, uint64_t*); static uint32_t memoryGrow_i32(Instance* instance, uint32_t delta); static uint32_t memorySize_i32(Instance* instance); static int32_t wait_i32(Instance* instance, uint32_t byteOffset, int32_t value, int64_t timeout); static int32_t wait_i64(Instance* instance, uint32_t byteOffset, int64_t value, int64_t timeout); static int32_t wake(Instance* instance, uint32_t byteOffset, int32_t count); static int32_t memCopy(Instance* instance, uint32_t destByteOffset,
--- a/js/src/wasm/WasmIonCompile.cpp +++ b/js/src/wasm/WasmIonCompile.cpp @@ -178,16 +178,17 @@ class FunctionCompiler { break; case ValType::F32: ins = MConstant::New(alloc(), Float32Value(0.f), MIRType::Float32); break; case ValType::F64: ins = MConstant::New(alloc(), DoubleValue(0.0), MIRType::Double); break; case ValType::Ref: + case ValType::FuncRef: case ValType::AnyRef: ins = MWasmNullConstant::New(alloc()); break; case ValType::NullRef: MOZ_CRASH("NullRef not expressible"); } curBlock_->add(ins); @@ -675,76 +676,16 @@ class FunctionCompiler { } auto* ins = MWasmAddOffset::New(alloc(), base, access->offset(), bytecodeOffset()); curBlock_->add(ins); access->clearOffset(); return ins; } - bool checkI32NegativeMeansFailedResult(MDefinition* value) { - if (inDeadCode()) { - return true; - } - - auto* zero = constant(Int32Value(0), MIRType::Int32); - auto* cond = compare(value, zero, JSOP_LT, MCompare::Compare_Int32); - - MBasicBlock* failBlock; - if (!newBlock(curBlock_, &failBlock)) { - return false; - } - - MBasicBlock* okBlock; - if (!newBlock(curBlock_, &okBlock)) { - return false; - } - - curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock)); - failBlock->end( - MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset())); - curBlock_ = okBlock; - return true; - } - - bool checkPointerNullMeansFailedResult(MDefinition* value) { - if (inDeadCode()) { - return true; - } - - auto* cond = MIsNullPointer::New(alloc(), value); - curBlock_->add(cond); - - MBasicBlock* failBlock; - if (!newBlock(curBlock_, &failBlock)) { - return false; - } - - MBasicBlock* okBlock; - if (!newBlock(curBlock_, &okBlock)) { - return false; - } - - curBlock_->end(MTest::New(alloc(), cond, failBlock, okBlock)); - failBlock->end( - MWasmTrap::New(alloc(), wasm::Trap::ThrowReported, bytecodeOffset())); - curBlock_ = okBlock; - return true; - } - - MDefinition* derefTableElementPointer(MDefinition* base) { - // Table element storage may be moved by GC operations, so reads from that - // storage are not movable. - MWasmLoadRef* load = - MWasmLoadRef::New(alloc(), base, AliasSet::WasmTableElement, - /*isMovable=*/false); - curBlock_->add(load); - return load; - } - MDefinition* load(MDefinition* base, MemoryAccessDesc* access, ValType result) { if (inDeadCode()) { return nullptr; } MWasmLoadTls* memoryBase = maybeLoadMemoryBase(); MInstruction* load = nullptr; @@ -1152,16 +1093,18 @@ class FunctionCompiler { bool builtinCall(const SymbolicAddressSignature& builtin, uint32_t lineOrBytecode, const CallCompileState& call, MDefinition** def) { if (inDeadCode()) { *def = nullptr; return true; } + MOZ_ASSERT(builtin.failureMode == FailureMode::Infallible); + CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic); auto callee = CalleeDesc::builtin(builtin.identity); auto* ins = MWasmCall::New(alloc(), desc, callee, call.regArgs_, builtin.retType, StackArgAreaSizeUnaligned(builtin)); if (!ins) { return false; } @@ -1169,32 +1112,37 @@ class FunctionCompiler { curBlock_->add(ins); *def = ins; return true; } bool builtinInstanceMethodCall(const SymbolicAddressSignature& builtin, uint32_t lineOrBytecode, const CallCompileState& call, - MDefinition** def) { + MDefinition** def = nullptr) { + MOZ_ASSERT_IF(!def, builtin.retType == MIRType::None); if (inDeadCode()) { - *def = nullptr; + if (def) { + *def = nullptr; + } return true; } CallSiteDesc desc(lineOrBytecode, CallSiteDesc::Symbolic); auto* ins = MWasmCall::NewBuiltinInstanceMethodCall( - alloc(), desc, builtin.identity, call.instanceArg_, call.regArgs_, - builtin.retType, StackArgAreaSizeUnaligned(builtin)); + alloc(), desc, builtin.identity, builtin.failureMode, call.instanceArg_, + call.regArgs_, builtin.retType, StackArgAreaSizeUnaligned(builtin)); if (!ins) { return false; } curBlock_->add(ins); - *def = ins; + if (def) { + *def = ins; + } return true; } /*********************************************** Control flow generation */ inline bool inDeadCode() const { return curBlock_ == nullptr; } void returnExpr(MDefinition* operand) { @@ -2178,18 +2126,19 @@ static bool EmitGetGlobal(FunctionCompil result = f.constant(int64_t(value.i64())); break; case ValType::F32: result = f.constant(value.f32()); break; case ValType::F64: result = f.constant(value.f64()); break; + case ValType::FuncRef: case ValType::AnyRef: - MOZ_ASSERT(value.anyref().isNull()); + MOZ_ASSERT(value.ref().isNull()); result = f.nullRefConstant(); break; default: MOZ_CRASH("unexpected type in EmitGetGlobal"); } f.iter().setResult(result); return true; @@ -2218,18 +2167,17 @@ static bool EmitSetGlobal(FunctionCompil CallCompileState args; if (!f.passInstance(callee.argTypes[0], &args)) { return false; } if (!f.passArg(barrierAddr, callee.argTypes[1], &args)) { return false; } f.finishCall(&args); - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { + if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args)) { return false; } } return true; } static bool EmitTeeGlobal(FunctionCompiler& f) { @@ -2843,20 +2791,16 @@ static bool EmitWait(FunctionCompiler& f return false; } MDefinition* ret; if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { return false; } - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - f.iter().setResult(ret); return true; } static bool EmitWake(FunctionCompiler& f) { uint32_t lineOrBytecode = f.readCallSiteLineOrBytecode(); const SymbolicAddressSignature& callee = SASigWake; @@ -2890,20 +2834,16 @@ static bool EmitWake(FunctionCompiler& f return false; } MDefinition* ret; if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { return false; } - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - f.iter().setResult(ret); return true; } static bool EmitAtomicXchg(FunctionCompiler& f, ValType type, Scalar::Type viewType) { LinearMemoryAddress<MDefinition*> addr; MDefinition* value; @@ -2969,26 +2909,17 @@ static bool EmitMemOrTableCopy(FunctionC if (!f.passArg(sti, callee.argTypes[5], &args)) { return false; } } if (!f.finishCall(&args)) { return false; } - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { - return false; - } - - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - - return true; + return f.builtinInstanceMethodCall(callee, lineOrBytecode, args); } static bool EmitDataOrElemDrop(FunctionCompiler& f, bool isData) { uint32_t segIndexVal = 0; if (!f.iter().readDataOrElemDrop(isData, &segIndexVal)) { return false; } @@ -3010,26 +2941,17 @@ static bool EmitDataOrElemDrop(FunctionC if (!f.passArg(segIndex, callee.argTypes[1], &args)) { return false; } if (!f.finishCall(&args)) { return false; } - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { - return false; - } - - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - - return true; + return f.builtinInstanceMethodCall(callee, lineOrBytecode, args); } static bool EmitMemFill(FunctionCompiler& f) { MDefinition *start, *val, *len; if (!f.iter().readMemFill(&start, &val, &len)) { return false; } @@ -3054,26 +2976,17 @@ static bool EmitMemFill(FunctionCompiler if (!f.passArg(len, callee.argTypes[3], &args)) { return false; } if (!f.finishCall(&args)) { return false; } - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { - return false; - } - - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - - return true; + return f.builtinInstanceMethodCall(callee, lineOrBytecode, args); } static bool EmitMemOrTableInit(FunctionCompiler& f, bool isMem) { uint32_t segIndexVal = 0, dstTableIndex = 0; MDefinition *dstOff, *srcOff, *len; if (!f.iter().readMemOrTableInit(isMem, &segIndexVal, &dstTableIndex, &dstOff, &srcOff, &len)) { return false; @@ -3115,26 +3028,17 @@ static bool EmitMemOrTableInit(FunctionC if (!f.passArg(dti, callee.argTypes[5], &args)) { return false; } } if (!f.finishCall(&args)) { return false; } - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { - return false; - } - - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - - return true; + return f.builtinInstanceMethodCall(callee, lineOrBytecode, args); } #endif // ENABLE_WASM_BULKMEM_OPS #ifdef ENABLE_WASM_REFTYPES // Note, table.{get,grow,set} on table(funcref) are currently rejected by the // verifier. static bool EmitTableFill(FunctionCompiler& f) { @@ -3174,26 +3078,17 @@ static bool EmitTableFill(FunctionCompil if (!f.passArg(tableIndexArg, callee.argTypes[4], &args)) { return false; } if (!f.finishCall(&args)) { return false; } - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { - return false; - } - - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - - return true; + return f.builtinInstanceMethodCall(callee, lineOrBytecode, args); } static bool EmitTableGet(FunctionCompiler& f) { uint32_t tableIndex; MDefinition* index; if (!f.iter().readTableGet(&tableIndex, &index)) { return false; } @@ -3224,26 +3119,18 @@ static bool EmitTableGet(FunctionCompile } if (!f.finishCall(&args)) { return false; } // The return value here is either null, denoting an error, or a short-lived // pointer to a location containing a possibly-null ref. - MDefinition* result; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &result)) { - return false; - } - if (!f.checkPointerNullMeansFailedResult(result)) { - return false; - } - - MDefinition* ret = f.derefTableElementPointer(result); - if (!ret) { + MDefinition* ret; + if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { return false; } f.iter().setResult(ret); return true; } static bool EmitTableGrow(FunctionCompiler& f) { @@ -3332,24 +3219,17 @@ static bool EmitTableSet(FunctionCompile if (!f.passArg(tableIndexArg, callee.argTypes[3], &args)) { return false; } if (!f.finishCall(&args)) { return false; } - MDefinition* ret; - if (!f.builtinInstanceMethodCall(callee, lineOrBytecode, args, &ret)) { - return false; - } - if (!f.checkI32NegativeMeansFailedResult(ret)) { - return false; - } - return true; + return f.builtinInstanceMethodCall(callee, lineOrBytecode, args); } static bool EmitTableSize(FunctionCompiler& f) { uint32_t tableIndex; if (!f.iter().readTableSize(&tableIndex)) { return false; }
--- a/js/src/wasm/WasmJS.cpp +++ b/js/src/wasm/WasmJS.cpp @@ -173,22 +173,30 @@ static bool ToWebAssemblyValue(JSContext case ValType::F64: { double d; if (!ToNumber(cx, v, &d)) { return false; } val.set(Val(d)); return true; } + case ValType::FuncRef: { + RootedFunction fun(cx); + if (!CheckFuncRefValue(cx, v, &fun)) { + return false; + } + val.set(Val(ValType::FuncRef, AnyRef::fromJSObject(fun))); + return true; + } case ValType::AnyRef: { RootedAnyRef tmp(cx, AnyRef::null()); if (!BoxAnyRef(cx, v, &tmp)) { return false; } - val.set(Val(tmp)); + val.set(Val(ValType::AnyRef, tmp)); return true; } case ValType::Ref: case ValType::NullRef: case ValType::I64: { break; } } @@ -198,18 +206,19 @@ static bool ToWebAssemblyValue(JSContext static Value ToJSValue(const Val& val) { switch (val.type().code()) { case ValType::I32: return Int32Value(val.i32()); case ValType::F32: return DoubleValue(JS::CanonicalizeNaN(double(val.f32()))); case ValType::F64: return DoubleValue(JS::CanonicalizeNaN(val.f64())); + case ValType::FuncRef: case ValType::AnyRef: - return UnboxAnyRef(val.anyref()); + return UnboxAnyRef(val.ref()); case ValType::Ref: case ValType::NullRef: case ValType::I64: break; } MOZ_CRASH("unexpected type when translating to a JS value"); } @@ -1531,16 +1540,39 @@ WasmFunctionScope* WasmInstanceObject::g return funcScope; } bool wasm::IsWasmExportedFunction(JSFunction* fun) { return fun->kind() == JSFunction::Wasm; } +bool wasm::CheckFuncRefValue(JSContext* cx, HandleValue v, + MutableHandleFunction fun) { + if (v.isNull()) { + MOZ_ASSERT(!fun); + return true; + } + + if (v.isObject()) { + JSObject& obj = v.toObject(); + if (obj.is<JSFunction>()) { + JSFunction* f = &obj.as<JSFunction>(); + if (IsWasmExportedFunction(f)) { + fun.set(f); + return true; + } + } + } + + JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, + JSMSG_WASM_BAD_FUNCREF_VALUE); + return false; +} + Instance& wasm::ExportedFunctionToInstance(JSFunction* fun) { return ExportedFunctionToInstanceObject(fun)->instance(); } WasmInstanceObject* wasm::ExportedFunctionToInstanceObject(JSFunction* fun) { MOZ_ASSERT(fun->kind() == JSFunction::Wasm || fun->kind() == JSFunction::AsmJS); const Value& v = fun->getExtendedSlot(FunctionExtended::WASM_INSTANCE_SLOT); @@ -2001,17 +2033,17 @@ bool WasmTableObject::construct(JSContex RootedLinearString elementLinearStr(cx, elementStr->ensureLinear(cx)); if (!elementLinearStr) { return false; } TableKind tableKind; if (StringEqualsAscii(elementLinearStr, "anyfunc") || StringEqualsAscii(elementLinearStr, "funcref")) { - tableKind = TableKind::AnyFunction; + tableKind = TableKind::FuncRef; #ifdef ENABLE_WASM_REFTYPES } else if (StringEqualsAscii(elementLinearStr, "anyref")) { if (!HasReftypesSupport(cx)) { JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_ELEMENT); return false; } tableKind = TableKind::AnyRef; @@ -2090,18 +2122,18 @@ bool WasmTableObject::getImpl(JSContext* } uint32_t index; if (!ToTableIndex(cx, args.get(0), table, "get index", &index)) { return false; } switch (table.kind()) { - case TableKind::AnyFunction: { - const FunctionTableElem& elem = table.getAnyFunc(index); + case TableKind::FuncRef: { + const FunctionTableElem& elem = table.getFuncRef(index); if (!elem.code) { args.rval().setNull(); return true; } Instance& instance = *elem.tls->instance; const CodeRange& codeRange = *instance.code().lookupFuncRange(elem.code); @@ -2129,16 +2161,23 @@ bool WasmTableObject::getImpl(JSContext* /* static */ bool WasmTableObject::get(JSContext* cx, unsigned argc, Value* vp) { CallArgs args = CallArgsFromVp(argc, vp); return CallNonGenericMethod<IsTable, getImpl>(cx, args); } static void TableFunctionFill(JSContext* cx, Table* table, HandleFunction value, uint32_t index, uint32_t limit) { + if (!value) { + while (index < limit) { + table->setNull(index++); + } + return; + } + RootedWasmInstanceObject instanceObj(cx, ExportedFunctionToInstanceObject(value)); uint32_t funcIndex = ExportedFunctionToFuncIndex(value); #ifdef DEBUG RootedFunction f(cx); MOZ_ASSERT(instanceObj->getExportedFunction(cx, instanceObj, funcIndex, &f)); MOZ_ASSERT(value == f); @@ -2146,32 +2185,18 @@ static void TableFunctionFill(JSContext* Instance& instance = instanceObj->instance(); Tier tier = instance.code().bestTier(); const MetadataTier& metadata = instance.metadata(tier); const CodeRange& codeRange = metadata.codeRange(metadata.lookupFuncExport(funcIndex)); void* code = instance.codeBase(tier) + codeRange.funcTableEntry(); while (index < limit) { - table->setAnyFunc(index++, code, &instance); - } -} - -static bool IsWasmExportedFunction(const Value& v, MutableHandleFunction f) { - if (!v.isObject()) { - return false; - } - - JSObject& obj = v.toObject(); - if (!obj.is<JSFunction>() || !IsWasmExportedFunction(&obj.as<JSFunction>())) { - return false; - } - - f.set(&obj.as<JSFunction>()); - return true; + table->setFuncRef(index++, code, &instance); + } } /* static */ bool WasmTableObject::setImpl(JSContext* cx, const CallArgs& args) { RootedWasmTableObject tableObj( cx, &args.thisv().toObject().as<WasmTableObject>()); Table& table = tableObj->table(); @@ -2181,31 +2206,24 @@ bool WasmTableObject::setImpl(JSContext* uint32_t index; if (!ToTableIndex(cx, args.get(0), table, "set index", &index)) { return false; } RootedValue fillValue(cx, args[1]); switch (table.kind()) { - case TableKind::AnyFunction: { - RootedFunction value(cx); - if (!IsWasmExportedFunction(fillValue, &value) && !fillValue.isNull()) { - JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, - JSMSG_WASM_BAD_TABLE_VALUE); + case TableKind::FuncRef: { + RootedFunction fun(cx); + if (!CheckFuncRefValue(cx, fillValue, &fun)) { return false; } - - if (value) { - MOZ_ASSERT(index < MaxTableLength); - static_assert(MaxTableLength < UINT32_MAX, "Invariant"); - TableFunctionFill(cx, &table, value, index, index + 1); - } else { - table.setNull(index); - } + MOZ_ASSERT(index < MaxTableLength); + static_assert(MaxTableLength < UINT32_MAX, "Invariant"); + TableFunctionFill(cx, &table, fun, index, index + 1); break; } case TableKind::AnyRef: { RootedAnyRef tmp(cx, AnyRef::null()); if (!BoxAnyRef(cx, fillValue, &tmp)) { return false; } table.setAnyRef(index, tmp); @@ -2255,31 +2273,30 @@ bool WasmTableObject::growImpl(JSContext } MOZ_ASSERT(delta <= MaxTableLength); // grow() should ensure this MOZ_ASSERT(oldLength <= MaxTableLength - delta); // ditto static_assert(MaxTableLength < UINT32_MAX, "Invariant"); switch (table->table().kind()) { - case TableKind::AnyFunction: { - RootedFunction value(cx); + case TableKind::FuncRef: { if (fillValue.isNull()) { #ifdef DEBUG for (uint32_t index = oldLength; index < oldLength + delta; index++) { - MOZ_ASSERT(table->table().getAnyFunc(index).code == nullptr); + MOZ_ASSERT(table->table().getFuncRef(index).code == nullptr); } #endif - } else if (IsWasmExportedFunction(fillValue, &value)) { - TableFunctionFill(cx, &table->table(), value, oldLength, + } else { + RootedFunction fun(cx); + if (!CheckFuncRefValue(cx, fillValue, &fun)) { + return false; + } + TableFunctionFill(cx, &table->table(), fun, oldLength, oldLength + delta); - } else { - JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, - JSMSG_WASM_BAD_TBL_GROW_INIT, "funcref"); - return false; } break; } case TableKind::AnyRef: { RootedAnyRef tmp(cx, AnyRef::null()); if (!BoxAnyRef(cx, fillValue, &tmp)) { return false; } @@ -2348,24 +2365,24 @@ void WasmGlobalObject::trace(JSTracer* t WasmGlobalObject* global = reinterpret_cast<WasmGlobalObject*>(obj); if (global->isNewborn()) { // This can happen while we're allocating the object, in which case // every single slot of the object is not defined yet. In particular, // there's nothing to trace yet. return; } switch (global->type().code()) { + case ValType::FuncRef: case ValType::AnyRef: - if (!global->cell()->anyref.isNull()) { + if (!global->cell()->ref.isNull()) { // TODO/AnyRef-boxing: With boxed immediates and strings, the write // barrier is going to have to be more complicated. ASSERT_ANYREF_IS_JSOBJECT; - TraceManuallyBarrieredEdge(trc, - global->cell()->anyref.asJSObjectAddress(), - "wasm anyref global"); + TraceManuallyBarrieredEdge(trc, global->cell()->ref.asJSObjectAddress(), + "wasm reference-typed global"); } break; case ValType::I32: case ValType::F32: case ValType::I64: case ValType::F64: break; case ValType::Ref: @@ -2417,32 +2434,32 @@ WasmGlobalObject* WasmGlobalObject::crea cell->i64 = val.i64(); break; case ValType::F32: cell->f32 = val.f32(); break; case ValType::F64: cell->f64 = val.f64(); break; - case ValType::NullRef: - MOZ_ASSERT(!cell->ref, "value should be null already"); - break; + case ValType::FuncRef: case ValType::AnyRef: - MOZ_ASSERT(cell->anyref.isNull(), "no prebarriers needed"); - cell->anyref = val.anyref(); - if (!cell->anyref.isNull()) { + MOZ_ASSERT(cell->ref.isNull(), "no prebarriers needed"); + cell->ref = val.ref(); + if (!cell->ref.isNull()) { // TODO/AnyRef-boxing: With boxed immediates and strings, the write // barrier is going to have to be more complicated. ASSERT_ANYREF_IS_JSOBJECT; - JSObject::writeBarrierPost(&cell->anyref, nullptr, - cell->anyref.asJSObject()); + JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(), nullptr, + cell->ref.asJSObject()); } break; case ValType::Ref: MOZ_CRASH("Ref NYI"); + case ValType::NullRef: + MOZ_CRASH("NullRef not expressible"); } obj->initReservedSlot(TYPE_SLOT, Int32Value(int32_t(val.type().bitsUnsafe()))); obj->initReservedSlot(MUTABLE_SLOT, JS::BooleanValue(isMutable)); obj->initReservedSlot(CELL_SLOT, PrivateValue(cell)); MOZ_ASSERT(!obj->isNewborn()); @@ -2500,16 +2517,19 @@ bool WasmGlobalObject::construct(JSConte // initializing value. globalType = ValType::I64; } else if (StringEqualsAscii(typeLinearStr, "f32")) { globalType = ValType::F32; } else if (StringEqualsAscii(typeLinearStr, "f64")) { globalType = ValType::F64; #ifdef ENABLE_WASM_REFTYPES } else if (HasReftypesSupport(cx) && + StringEqualsAscii(typeLinearStr, "funcref")) { + globalType = ValType::FuncRef; + } else if (HasReftypesSupport(cx) && StringEqualsAscii(typeLinearStr, "anyref")) { globalType = ValType::AnyRef; #endif } else { JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_GLOBAL_TYPE); return false; } @@ -2528,29 +2548,32 @@ bool WasmGlobalObject::construct(JSConte globalVal = Val(uint64_t(0)); break; case ValType::F32: globalVal = Val(float(0.0)); break; case ValType::F64: globalVal = Val(double(0.0)); break; + case ValType::FuncRef: + globalVal = Val(ValType::FuncRef, AnyRef::null()); + break; case ValType::AnyRef: - globalVal = Val(AnyRef::null()); + globalVal = Val(ValType::AnyRef, AnyRef::null()); break; case ValType::Ref: MOZ_CRASH("Ref NYI"); case ValType::NullRef: MOZ_CRASH("NullRef not expressible"); } // Override with non-undefined value, if provided. RootedValue valueVal(cx, args.get(1)); if (!valueVal.isUndefined() || - (args.length() >= 2 && globalType == ValType::AnyRef)) { + (args.length() >= 2 && globalType.isReference())) { if (!ToWebAssemblyValue(cx, globalType, valueVal, &globalVal)) { return false; } } WasmGlobalObject* global = WasmGlobalObject::create(cx, globalVal, isMutable); if (!global) { return false; @@ -2565,16 +2588,17 @@ static bool IsGlobal(HandleValue v) { } /* static */ bool WasmGlobalObject::valueGetterImpl(JSContext* cx, const CallArgs& args) { switch (args.thisv().toObject().as<WasmGlobalObject>().type().code()) { case ValType::I32: case ValType::F32: case ValType::F64: + case ValType::FuncRef: case ValType::AnyRef: args.rval().set(args.thisv().toObject().as<WasmGlobalObject>().value(cx)); return true; case ValType::I64: JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_I64_TYPE); return false; case ValType::Ref: @@ -2622,27 +2646,28 @@ bool WasmGlobalObject::valueSetterImpl(J cell->i32 = val.get().i32(); break; case ValType::F32: cell->f32 = val.get().f32(); break; case ValType::F64: cell->f64 = val.get().f64(); break; + case ValType::FuncRef: case ValType::AnyRef: { - AnyRef prevPtr = cell->anyref; + AnyRef prevPtr = cell->ref; // TODO/AnyRef-boxing: With boxed immediates and strings, the write // barrier is going to have to be more complicated. ASSERT_ANYREF_IS_JSOBJECT; JSObject::writeBarrierPre(prevPtr.asJSObject()); - cell->anyref = val.get().anyref(); - if (!cell->anyref.isNull()) { - JSObject::writeBarrierPost(cell->anyref.asJSObjectAddress(), + cell->ref = val.get().ref(); + if (!cell->ref.isNull()) { + JSObject::writeBarrierPost(cell->ref.asJSObjectAddress(), prevPtr.asJSObject(), - cell->anyref.asJSObject()); + cell->ref.asJSObject()); } break; } case ValType::I64: MOZ_CRASH("unexpected i64 when setting global's value"); case ValType::Ref: MOZ_CRASH("Ref NYI"); case ValType::NullRef: @@ -2688,18 +2713,21 @@ void WasmGlobalObject::val(MutableHandle outval.set(Val(uint64_t(cell->i64))); return; case ValType::F32: outval.set(Val(cell->f32)); return; case ValType::F64: outval.set(Val(cell->f64)); return; + case ValType::FuncRef: + outval.set(Val(ValType::FuncRef, cell->ref)); + return; case ValType::AnyRef: - outval.set(Val(cell->anyref)); + outval.set(Val(ValType::AnyRef, cell->ref)); return; case ValType::Ref: MOZ_CRASH("Ref NYI"); case ValType::NullRef: MOZ_CRASH("NullRef not expressible"); } MOZ_CRASH("unexpected Global type"); }
--- a/js/src/wasm/WasmJS.h +++ b/js/src/wasm/WasmJS.h @@ -97,23 +97,24 @@ MOZ_MUST_USE bool DeserializeModule(JSCo // A WebAssembly "Exported Function" is the spec name for the JS function // objects created to wrap wasm functions. This predicate returns false // for asm.js functions which are semantically just normal JS functions // (even if they are implemented via wasm under the hood). The accessor // functions for extracting the instance and func-index of a wasm function // can be used for both wasm and asm.js, however. -extern bool IsWasmExportedFunction(JSFunction* fun); +bool IsWasmExportedFunction(JSFunction* fun); +bool CheckFuncRefValue(JSContext* cx, HandleValue v, MutableHandleFunction fun); -extern Instance& ExportedFunctionToInstance(JSFunction* fun); -extern WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun); -extern uint32_t ExportedFunctionToFuncIndex(JSFunction* fun); +Instance& ExportedFunctionToInstance(JSFunction* fun); +WasmInstanceObject* ExportedFunctionToInstanceObject(JSFunction* fun); +uint32_t ExportedFunctionToFuncIndex(JSFunction* fun); -extern bool IsSharedWasmMemoryObject(JSObject* obj); +bool IsSharedWasmMemoryObject(JSObject* obj); } // namespace wasm // The class of the WebAssembly global namespace object. extern const Class WebAssemblyClass; JSObject* InitWebAssemblyClass(JSContext* cx, Handle<GlobalObject*> global); @@ -171,18 +172,17 @@ class WasmGlobalObject : public NativeOb public: // For exposed globals the Cell holds the value of the global; the // instance's global area holds a pointer to the Cell. union Cell { int32_t i32; int64_t i64; float f32; double f64; - JSObject* ref; // Note, this breaks an abstraction boundary - wasm::AnyRef anyref; + wasm::AnyRef ref; Cell() : i64(0) {} ~Cell() {} }; static const unsigned RESERVED_SLOTS = 3; static const Class class_; static const JSPropertySpec properties[]; static const JSFunctionSpec methods[];
--- a/js/src/wasm/WasmModule.cpp +++ b/js/src/wasm/WasmModule.cpp @@ -1223,16 +1223,17 @@ static bool MakeStructField(JSContext* c case ValType::F64: t = GlobalObject::getOrCreateScalarTypeDescr(cx, cx->global(), Scalar::Float64); break; case ValType::Ref: t = GlobalObject::getOrCreateReferenceTypeDescr( cx, cx->global(), ReferenceType::TYPE_OBJECT); break; + case ValType::FuncRef: case ValType::AnyRef: t = GlobalObject::getOrCreateReferenceTypeDescr( cx, cx->global(), ReferenceType::TYPE_WASM_ANYREF); break; default: MOZ_CRASH("Bad field type"); } MOZ_ASSERT(t != nullptr);
--- a/js/src/wasm/WasmOpIter.h +++ b/js/src/wasm/WasmOpIter.h @@ -41,16 +41,17 @@ class StackType { #ifdef DEBUG bool isValidCode() { switch (UnpackTypeCodeType(tc_)) { case TypeCode::I32: case TypeCode::I64: case TypeCode::F32: case TypeCode::F64: case TypeCode::AnyRef: + case TypeCode::FuncRef: case TypeCode::Ref: case TypeCode::NullRef: case TypeCode::Limit: return true; default: return false; } } @@ -59,16 +60,17 @@ class StackType { public: enum Code { I32 = uint8_t(ValType::I32), I64 = uint8_t(ValType::I64), F32 = uint8_t(ValType::F32), F64 = uint8_t(ValType::F64), AnyRef = uint8_t(ValType::AnyRef), + FuncRef = uint8_t(ValType::FuncRef), Ref = uint8_t(ValType::Ref), NullRef = uint8_t(ValType::NullRef), TVar = uint8_t(TypeCode::Limit), }; StackType() : tc_(InvalidPackedTypeCode()) {} @@ -78,34 +80,26 @@ class StackType { explicit StackType(const ValType& t) : tc_(t.packed()) {} PackedTypeCode packed() const { return tc_; } Code code() const { return Code(UnpackTypeCodeType(tc_)); } uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); } - bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; } - bool isReference() const { - TypeCode tc = UnpackTypeCodeType(tc_); - return tc == TypeCode::Ref || tc == TypeCode::AnyRef || - tc == TypeCode::NullRef; - } + bool isReference() const { return IsReferenceType(tc_); } bool operator==(const StackType& that) const { return tc_ == that.tc_; } - bool operator!=(const StackType& that) const { return tc_ != that.tc_; } - bool operator==(Code that) const { MOZ_ASSERT(that != Code::Ref); return code() == that; } - bool operator!=(Code that) const { return !(*this == that); } }; static inline ValType NonTVarToValType(StackType type) { MOZ_ASSERT(type != StackType::TVar); return ValType(type.packed()); } @@ -760,16 +754,17 @@ inline bool OpIter<Policy>::readBlockTyp switch (uncheckedCode) { case uint8_t(ExprType::Void): case uint8_t(ExprType::I32): case uint8_t(ExprType::I64): case uint8_t(ExprType::F32): case uint8_t(ExprType::F64): known = true; break; + case uint8_t(ExprType::FuncRef): case uint8_t(ExprType::AnyRef): #ifdef ENABLE_WASM_REFTYPES known = true; #endif break; case uint8_t(ExprType::Ref): known = env_.gcTypesEnabled() && uncheckedRefTypeIndex < MaxTypes && uncheckedRefTypeIndex < env_.types.length(); @@ -1559,17 +1554,17 @@ inline bool OpIter<Policy>::readCallIndi } if (*tableIndex >= env_.tables.length()) { // Special case this for improved user experience. if (!env_.tables.length()) { return fail("can't call_indirect without a table"); } return fail("table index out of range for call_indirect"); } - if (env_.tables[*tableIndex].kind != TableKind::AnyFunction) { + if (env_.tables[*tableIndex].kind != TableKind::FuncRef) { return fail("indirect calls must go through a table of 'funcref'"); } if (!popWithType(ValType::I32, callee)) { return false; } if (!env_.types[*funcTypeIndex].isFuncType()) { @@ -1950,17 +1945,17 @@ inline bool OpIter<Policy>::readMemOrTab } else { if (memOrTableIndex >= env_.tables.length()) { return fail("table index out of range for table.init"); } *dstTableIndex = memOrTableIndex; // Element segments must carry functions exclusively and funcref is not // yet a subtype of anyref. - if (env_.tables[*dstTableIndex].kind != TableKind::AnyFunction) { + if (env_.tables[*dstTableIndex].kind != TableKind::FuncRef) { return fail("only tables of 'funcref' may have element segments"); } if (*segIndex >= env_.elemSegments.length()) { return fail("table.init segment index out of range"); } } return true;
--- a/js/src/wasm/WasmStubs.cpp +++ b/js/src/wasm/WasmStubs.cpp @@ -307,16 +307,17 @@ static void StoreABIReturn(MacroAssemble masm.canonicalizeFloat(ReturnFloat32Reg); masm.storeFloat32(ReturnFloat32Reg, Address(argv, 0)); break; case ExprType::F64: masm.canonicalizeDouble(ReturnDoubleReg); masm.storeDouble(ReturnDoubleReg, Address(argv, 0)); break; case ExprType::Ref: + case ExprType::FuncRef: case ExprType::AnyRef: masm.storePtr(ReturnReg, Address(argv, 0)); break; case ExprType::NullRef: MOZ_CRASH("NullRef not expressible"); case ExprType::Limit: MOZ_CRASH("Limit"); } @@ -895,20 +896,19 @@ static bool GenerateJitEntry(MacroAssemb case ExprType::F64: { masm.canonicalizeDouble(ReturnDoubleReg); GenPrintF64(DebugChannel::Function, masm, ReturnDoubleReg); ScratchDoubleScope fpscratch(masm); masm.boxDouble(ReturnDoubleReg, JSReturnOperand, fpscratch); break; } case ExprType::Ref: - MOZ_CRASH("return ref in jitentry NYI"); - break; + case ExprType::FuncRef: case ExprType::AnyRef: - MOZ_CRASH("return anyref in jitentry NYI"); + MOZ_CRASH("returning reference in jitentry NYI"); break; case ExprType::I64: MOZ_CRASH("unexpected return type when calling from ion to wasm"); case ExprType::NullRef: MOZ_CRASH("NullRef not expressible"); case ExprType::Limit: MOZ_CRASH("Limit"); } @@ -1146,16 +1146,17 @@ void wasm::GenerateDirectCallFromJit(Mac masm.canonicalizeFloat(ReturnFloat32Reg); GenPrintF32(DebugChannel::Function, masm, ReturnFloat32Reg); break; case wasm::ExprType::F64: masm.canonicalizeDouble(ReturnDoubleReg); GenPrintF64(DebugChannel::Function, masm, ReturnDoubleReg); break; case wasm::ExprType::Ref: + case wasm::ExprType::FuncRef: case wasm::ExprType::AnyRef: case wasm::ExprType::I64: MOZ_CRASH("unexpected return type when calling from ion to wasm"); case wasm::ExprType::NullRef: MOZ_CRASH("NullRef not expressible"); case wasm::ExprType::Limit: MOZ_CRASH("Limit"); } @@ -1547,16 +1548,24 @@ static bool GenerateImportInterpExit(Mac masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel); masm.loadDouble(argv, ReturnDoubleReg); GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ", funcImportIndex); GenPrintF64(DebugChannel::Import, masm, ReturnDoubleReg); break; case ExprType::Ref: MOZ_CRASH("No Ref support here yet"); + case ExprType::FuncRef: + masm.call(SymbolicAddress::CallImport_FuncRef); + masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel); + masm.loadPtr(argv, ReturnReg); + GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ", + funcImportIndex); + GenPrintPtr(DebugChannel::Import, masm, ReturnReg); + break; case ExprType::AnyRef: masm.call(SymbolicAddress::CallImport_AnyRef); masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel); masm.loadPtr(argv, ReturnReg); GenPrintf(DebugChannel::Import, masm, "wasm-import[%u]; returns ", funcImportIndex); GenPrintPtr(DebugChannel::Import, masm, ReturnReg); break; @@ -1748,20 +1757,19 @@ static bool GenerateImportJitExit(MacroA masm.convertValueToFloat(JSReturnOperand, ReturnFloat32Reg, &oolConvert); GenPrintF32(DebugChannel::Import, masm, ReturnFloat32Reg); break; case ExprType::F64: masm.convertValueToDouble(JSReturnOperand, ReturnDoubleReg, &oolConvert); GenPrintF64(DebugChannel::Import, masm, ReturnDoubleReg); break; case ExprType::Ref: - MOZ_CRASH("ref returned by import (jit exit) NYI"); - break; + case ExprType::FuncRef: case ExprType::AnyRef: - MOZ_CRASH("anyref returned by import (jit exit) NYI"); + MOZ_CRASH("reference returned by import (jit exit) NYI"); break; case ExprType::NullRef: MOZ_CRASH("NullRef not expressible"); case ExprType::Limit: MOZ_CRASH("Limit"); } GenPrintf(DebugChannel::Import, masm, "\n");
--- a/js/src/wasm/WasmTable.cpp +++ b/js/src/wasm/WasmTable.cpp @@ -25,17 +25,17 @@ #include "wasm/WasmInstance.h" #include "wasm/WasmJS.h" using namespace js; using namespace js::wasm; using mozilla::CheckedInt; Table::Table(JSContext* cx, const TableDesc& desc, - HandleWasmTableObject maybeObject, UniqueAnyFuncArray functions) + HandleWasmTableObject maybeObject, UniqueFuncRefArray functions) : maybeObject_(maybeObject), observers_(cx->zone()), functions_(std::move(functions)), kind_(desc.kind), length_(desc.limits.initial), maximum_(desc.limits.maximum) { MOZ_ASSERT(kind_ != TableKind::AnyRef); } @@ -50,19 +50,19 @@ Table::Table(JSContext* cx, const TableD maximum_(desc.limits.maximum) { MOZ_ASSERT(kind_ == TableKind::AnyRef); } /* static */ SharedTable Table::create(JSContext* cx, const TableDesc& desc, HandleWasmTableObject maybeObject) { switch (desc.kind) { - case TableKind::AnyFunction: - case TableKind::TypedFunction: { - UniqueAnyFuncArray functions( + case TableKind::FuncRef: + case TableKind::AsmJS: { + UniqueFuncRefArray functions( cx->pod_calloc<FunctionTableElem>(desc.limits.initial)); if (!functions) { return nullptr; } return SharedTable( cx->new_<Table>(cx, desc, maybeObject, std::move(functions))); } case TableKind::AnyRef: { @@ -85,31 +85,31 @@ void Table::tracePrivate(JSTracer* trc) // GC. TraceWeakEdge may sound better, but it is less efficient given that // we know object_ is already marked. if (maybeObject_) { MOZ_ASSERT(!gc::IsAboutToBeFinalized(&maybeObject_)); TraceEdge(trc, &maybeObject_, "wasm table object"); } switch (kind_) { - case TableKind::AnyFunction: { + case TableKind::FuncRef: { for (uint32_t i = 0; i < length_; i++) { if (functions_[i].tls) { functions_[i].tls->instance->trace(trc); } else { MOZ_ASSERT(!functions_[i].code); } } break; } case TableKind::AnyRef: { objects_.trace(trc); break; } - case TableKind::TypedFunction: { + case TableKind::AsmJS: { #ifdef DEBUG for (uint32_t i = 0; i < length_; i++) { MOZ_ASSERT(!functions_[i].tls); } #endif break; } } @@ -130,51 +130,45 @@ void Table::trace(JSTracer* trc) { uint8_t* Table::functionBase() const { if (kind() == TableKind::AnyRef) { return nullptr; } return (uint8_t*)functions_.get(); } -const FunctionTableElem& Table::getAnyFunc(uint32_t index) const { +const FunctionTableElem& Table::getFuncRef(uint32_t index) const { MOZ_ASSERT(isFunction()); return functions_[index]; } AnyRef Table::getAnyRef(uint32_t index) const { MOZ_ASSERT(!isFunction()); // TODO/AnyRef-boxing: With boxed immediates and strings, the write barrier // is going to have to be more complicated. ASSERT_ANYREF_IS_JSOBJECT; return AnyRef::fromJSObject(objects_[index]); } -const void* Table::getShortlivedAnyRefLocForCompiledCode(uint32_t index) const { - MOZ_ASSERT(!isFunction()); - return const_cast<HeapPtr<JSObject*>&>(objects_[index]) - .unsafeUnbarrieredForTracing(); -} - -void Table::setAnyFunc(uint32_t index, void* code, const Instance* instance) { +void Table::setFuncRef(uint32_t index, void* code, const Instance* instance) { MOZ_ASSERT(isFunction()); FunctionTableElem& elem = functions_[index]; if (elem.tls) { JSObject::writeBarrierPre(elem.tls->instance->objectUnbarriered()); } switch (kind_) { - case TableKind::AnyFunction: + case TableKind::FuncRef: elem.code = code; elem.tls = instance->tlsData(); MOZ_ASSERT(elem.tls->instance->objectUnbarriered()->isTenured(), "no writeBarrierPost (Table::set)"); break; - case TableKind::TypedFunction: + case TableKind::AsmJS: elem.code = code; elem.tls = nullptr; break; case TableKind::AnyRef: MOZ_CRASH("Bad table type"); } } @@ -183,39 +177,39 @@ void Table::setAnyRef(uint32_t index, An // TODO/AnyRef-boxing: With boxed immediates and strings, the write barrier // is going to have to be more complicated. ASSERT_ANYREF_IS_JSOBJECT; objects_[index] = new_obj.asJSObject(); } void Table::setNull(uint32_t index) { switch (kind_) { - case TableKind::AnyFunction: { + case TableKind::FuncRef: { FunctionTableElem& elem = functions_[index]; if (elem.tls) { JSObject::writeBarrierPre(elem.tls->instance->objectUnbarriered()); } elem.code = nullptr; elem.tls = nullptr; break; } case TableKind::AnyRef: { setAnyRef(index, AnyRef::null()); break; } - case TableKind::TypedFunction: { + case TableKind::AsmJS: { MOZ_CRASH("Should not happen"); } } } void Table::copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex) { switch (kind_) { - case TableKind::AnyFunction: { + case TableKind::FuncRef: { FunctionTableElem& dst = functions_[dstIndex]; if (dst.tls) { JSObject::writeBarrierPre(dst.tls->instance->objectUnbarriered()); } FunctionTableElem& src = srcTable.functions_[srcIndex]; dst.code = src.code; dst.tls = src.tls; @@ -228,17 +222,17 @@ void Table::copy(const Table& srcTable, MOZ_ASSERT(!dst.code); } break; } case TableKind::AnyRef: { setAnyRef(dstIndex, srcTable.getAnyRef(srcIndex)); break; } - case TableKind::TypedFunction: { + case TableKind::AsmJS: { MOZ_CRASH("Bad table type"); } } } uint32_t Table::grow(uint32_t delta, JSContext* cx) { // This isn't just an optimization: movingGrowable() assumes that // onMovingGrowTable does not fire when length == maximum. @@ -259,17 +253,17 @@ uint32_t Table::grow(uint32_t delta, JSC } MOZ_ASSERT(movingGrowable()); JSRuntime* rt = cx->runtime(); // Use JSRuntime's MallocProvider to avoid throwing. switch (kind_) { - case TableKind::AnyFunction: { + case TableKind::FuncRef: { // Note that realloc does not release functions_'s pointee on failure // which is exactly what we need here. FunctionTableElem* newFunctions = rt->pod_realloc<FunctionTableElem>( functions_.get(), length_, newLength.value()); if (!newFunctions) { return -1; } Unused << functions_.release(); @@ -280,17 +274,17 @@ uint32_t Table::grow(uint32_t delta, JSC break; } case TableKind::AnyRef: { if (!objects_.resize(newLength.value())) { return -1; } break; } - case TableKind::TypedFunction: { + case TableKind::AsmJS: { MOZ_CRASH("Bad table type"); } } length_ = newLength.value(); for (InstanceSet::Range r = observers_.all(); !r.empty(); r.popFront()) { r.front()->instance().onMovingGrowTable(this);
--- a/js/src/wasm/WasmTable.h +++ b/js/src/wasm/WasmTable.h @@ -24,75 +24,73 @@ namespace js { namespace wasm { // A Table is an indexable array of opaque values. Tables are first-class // stateful objects exposed to WebAssembly. asm.js also uses Tables to represent // its homogeneous function-pointer tables. // -// A table of AnyFunction holds FunctionTableElems, which are (instance*,index) +// A table of FuncRef holds FunctionTableElems, which are (instance*,index) // pairs, where the instance must be traced. // // A table of AnyRef holds JSObject pointers, which must be traced. // TODO/AnyRef-boxing: With boxed immediates and strings, JSObject* is no longer // the most appropriate representation for Cell::anyref. STATIC_ASSERT_ANYREF_IS_JSOBJECT; typedef GCVector<HeapPtr<JSObject*>, 0, SystemAllocPolicy> TableAnyRefVector; class Table : public ShareableBase<Table> { using InstanceSet = JS::WeakCache<GCHashSet< WeakHeapPtrWasmInstanceObject, MovableCellHasher<WeakHeapPtrWasmInstanceObject>, SystemAllocPolicy>>; - using UniqueAnyFuncArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>; + using UniqueFuncRefArray = UniquePtr<FunctionTableElem[], JS::FreePolicy>; WeakHeapPtrWasmTableObject maybeObject_; InstanceSet observers_; - UniqueAnyFuncArray functions_; // either functions_ has data + UniqueFuncRefArray functions_; // either functions_ has data TableAnyRefVector objects_; // or objects_, but not both const TableKind kind_; uint32_t length_; const Maybe<uint32_t> maximum_; template <class> friend struct js::MallocProvider; Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject, - UniqueAnyFuncArray functions); + UniqueFuncRefArray functions); Table(JSContext* cx, const TableDesc& td, HandleWasmTableObject maybeObject, TableAnyRefVector&& objects); void tracePrivate(JSTracer* trc); friend class js::WasmTableObject; public: static RefPtr<Table> create(JSContext* cx, const TableDesc& desc, HandleWasmTableObject maybeObject); void trace(JSTracer* trc); TableKind kind() const { return kind_; } - bool isTypedFunction() const { return kind_ == TableKind::TypedFunction; } bool isFunction() const { - return kind_ == TableKind::AnyFunction || kind_ == TableKind::TypedFunction; + return kind_ == TableKind::FuncRef || kind_ == TableKind::AsmJS; } uint32_t length() const { return length_; } Maybe<uint32_t> maximum() const { return maximum_; } // Only for function values. Raw pointer to the table. uint8_t* functionBase() const; - // get/setAnyFunc is allowed only on table-of-funcref. + // get/setFuncRef is allowed only on table-of-funcref. // get/setAnyRef is allowed only on table-of-anyref. // setNull is allowed on either. - const FunctionTableElem& getAnyFunc(uint32_t index) const; - void setAnyFunc(uint32_t index, void* code, const Instance* instance); + const FunctionTableElem& getFuncRef(uint32_t index) const; + void setFuncRef(uint32_t index, void* code, const Instance* instance); AnyRef getAnyRef(uint32_t index) const; - const void* getShortlivedAnyRefLocForCompiledCode(uint32_t index) const; void setAnyRef(uint32_t index, AnyRef); void setNull(uint32_t index); // Copy entry from |srcTable| at |srcIndex| to this table at |dstIndex|. // Used by table.copy. void copy(const Table& srcTable, uint32_t dstIndex, uint32_t srcIndex);
--- a/js/src/wasm/WasmTextToBinary.cpp +++ b/js/src/wasm/WasmTextToBinary.cpp @@ -85,17 +85,16 @@ class WasmToken { EndOfFile, Equal, Error, Export, ExtraConversionOpcode, Field, Float, Func, - FuncRef, #ifdef ENABLE_WASM_GC GcFeatureOptIn, #endif GetGlobal, GetLocal, Global, If, Import, @@ -371,17 +370,16 @@ class WasmToken { case EndOfFile: case Equal: case End: case Error: case Export: case Field: case Float: case Func: - case FuncRef: #ifdef ENABLE_WASM_GC case GcFeatureOptIn: #endif case Global: case Mutable: case Import: case Index: case Memory: @@ -949,17 +947,17 @@ WasmToken WasmTokenStream::next() { case '9': return literal(begin); case 'a': if (consume(u"align")) { return WasmToken(WasmToken::Align, begin, cur_); } if (consume(u"anyfunc")) { - return WasmToken(WasmToken::FuncRef, begin, cur_); + return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_); } if (consume(u"anyref")) { return WasmToken(WasmToken::ValueType, ValType::AnyRef, begin, cur_); } if (consume(u"atomic.")) { if (consume(u"wake") || consume(u"notify")) { return WasmToken(WasmToken::Wake, ThreadOp::Wake, begin, cur_); } @@ -1032,17 +1030,17 @@ WasmToken WasmTokenStream::next() { break; case 'f': if (consume(u"field")) { return WasmToken(WasmToken::Field, begin, cur_); } if (consume(u"funcref")) { - return WasmToken(WasmToken::FuncRef, begin, cur_); + return WasmToken(WasmToken::ValueType, ValType::FuncRef, begin, cur_); } if (consume(u"func")) { return WasmToken(WasmToken::Func, begin, cur_); } if (consume(u"f32")) { if (!consume(u".")) { @@ -3968,27 +3966,27 @@ static AstExpr* ParseStructSet(WasmParse } static AstExpr* ParseStructNarrow(WasmParseContext& c, bool inParens) { AstValType inputType; if (!ParseValType(c, &inputType)) { return nullptr; } - if (!inputType.isRefType()) { + if (!inputType.isNarrowType()) { c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error); return nullptr; } AstValType outputType; if (!ParseValType(c, &outputType)) { return nullptr; } - if (!outputType.isRefType()) { + if (!outputType.isNarrowType()) { c.ts.generateError(c.ts.peek(), "struct.narrow requires ref type", c.error); return nullptr; } AstExpr* ptr = ParseExpr(c, inParens); if (!ptr) { return nullptr; } @@ -4714,30 +4712,29 @@ static bool ParseGlobalType(WasmParseCon return false; } return true; } static bool ParseElemType(WasmParseContext& c, TableKind* tableKind) { WasmToken token; - if (c.ts.getIf(WasmToken::FuncRef, &token)) { - *tableKind = TableKind::AnyFunction; - return true; - } + if (c.ts.getIf(WasmToken::ValueType, &token)) { + if (token.valueType() == ValType::FuncRef) { + *tableKind = TableKind::FuncRef; + return true; + } #ifdef ENABLE_WASM_REFTYPES - if (c.ts.getIf(WasmToken::ValueType, &token) && - token.valueType() == ValType::AnyRef) { - *tableKind = TableKind::AnyRef; - return true; + if (token.valueType() == ValType::AnyRef) { + *tableKind = TableKind::AnyRef; + return true; + } +#endif } c.ts.generateError(token, "'funcref' or 'anyref' required", c.error); -#else - c.ts.generateError(token, "'funcref' required", c.error); -#endif return false; } static bool ParseTableSig(WasmParseContext& c, Limits* table, TableKind* tableKind) { return ParseLimits(c, table, Shareable::False) && ParseElemType(c, tableKind); } @@ -6853,18 +6850,18 @@ static bool EncodeLimits(Encoder& e, con } return true; } static bool EncodeTableLimits(Encoder& e, const Limits& limits, TableKind tableKind) { switch (tableKind) { - case TableKind::AnyFunction: - if (!e.writeVarU32(uint32_t(TypeCode::AnyFunc))) { + case TableKind::FuncRef: + if (!e.writeVarU32(uint32_t(TypeCode::FuncRef))) { return false; } break; case TableKind::AnyRef: if (!e.writeVarU32(uint32_t(TypeCode::AnyRef))) { return false; } break; @@ -7275,17 +7272,17 @@ static bool EncodeDataCountSection(Encod static bool EncodeElemSegment(Encoder& e, AstElemSegment& segment) { if (!EncodeDestinationOffsetOrFlags(e, segment.targetTable().index(), segment.offsetIfActive())) { return false; } if (segment.isPassive()) { - if (!e.writeFixedU8(uint8_t(TypeCode::AnyFunc))) { + if (!e.writeFixedU8(uint8_t(TypeCode::FuncRef))) { return false; } } if (!e.writeVarU32(segment.elems().length())) { return false; }
--- a/js/src/wasm/WasmTypes.cpp +++ b/js/src/wasm/WasmTypes.cpp @@ -73,38 +73,33 @@ Val::Val(const LitVal& val) { return; case ValType::I64: u.i64_ = val.i64(); return; case ValType::F64: u.f64_ = val.f64(); return; case ValType::Ref: + case ValType::FuncRef: + case ValType::AnyRef: u.ref_ = val.ref(); return; - case ValType::AnyRef: - u.anyref_ = val.anyref(); - return; case ValType::NullRef: break; } MOZ_CRASH(); } void Val::trace(JSTracer* trc) { - if (type_.isValid()) { - if (type_.isRef() && u.ref_) { - TraceManuallyBarrieredEdge(trc, &u.ref_, "wasm ref/anyref global"); - } else if (type_ == ValType::AnyRef && !u.anyref_.isNull()) { - // TODO/AnyRef-boxing: With boxed immediates and strings, the write - // barrier is going to have to be more complicated. - ASSERT_ANYREF_IS_JSOBJECT; - TraceManuallyBarrieredEdge(trc, u.anyref_.asJSObjectAddress(), - "wasm ref/anyref global"); - } + if (type_.isValid() && type_.isReference() && !u.ref_.isNull()) { + // TODO/AnyRef-boxing: With boxed immediates and strings, the write + // barrier is going to have to be more complicated. + ASSERT_ANYREF_IS_JSOBJECT; + TraceManuallyBarrieredEdge(trc, u.ref_.asJSObjectAddress(), + "wasm reference-typed global"); } } void AnyRef::trace(JSTracer* trc) { if (value_) { TraceManuallyBarrieredEdge(trc, &value_, "wasm anyref referent"); } } @@ -268,16 +263,17 @@ static const unsigned sMaxTypes = (sTotalBits - sTagBits - sReturnBit - sLengthBits) / sTypeBits; static bool IsImmediateType(ValType vt) { switch (vt.code()) { case ValType::I32: case ValType::I64: case ValType::F32: case ValType::F64: + case ValType::FuncRef: case ValType::AnyRef: return true; case ValType::NullRef: case ValType::Ref: return false; } MOZ_CRASH("bad ValType"); } @@ -288,18 +284,20 @@ static unsigned EncodeImmediateType(ValT case ValType::I32: return 0; case ValType::I64: return 1; case ValType::F32: return 2; case ValType::F64: return 3; + case ValType::FuncRef: + return 4; case ValType::AnyRef: - return 4; + return 5; case ValType::NullRef: case ValType::Ref: break; } MOZ_CRASH("bad ValType"); } /* static */ @@ -719,16 +717,17 @@ void DebugFrame::updateReturnJSValue() { cachedReturnJSValue_.setDouble(JS::CanonicalizeNaN(resultF32_)); break; case ExprType::F64: cachedReturnJSValue_.setDouble(JS::CanonicalizeNaN(resultF64_)); break; case ExprType::Ref: cachedReturnJSValue_ = ObjectOrNullValue((JSObject*)resultRef_); break; + case ExprType::FuncRef: case ExprType::AnyRef: cachedReturnJSValue_ = UnboxAnyRef(resultAnyRef_); break; default: MOZ_CRASH("result type"); } }
--- a/js/src/wasm/WasmTypes.h +++ b/js/src/wasm/WasmTypes.h @@ -250,16 +250,22 @@ static inline TypeCode UnpackTypeCodeTyp return TypeCode(uint32_t(ptc) & 255); } static inline uint32_t UnpackTypeCodeIndex(PackedTypeCode ptc) { MOZ_ASSERT(UnpackTypeCodeType(ptc) == TypeCode::Ref); return uint32_t(ptc) >> 8; } +static inline bool IsReferenceType(PackedTypeCode ptc) { + TypeCode tc = UnpackTypeCodeType(ptc); + return tc == TypeCode::Ref || tc == TypeCode::AnyRef || + tc == TypeCode::FuncRef || tc == TypeCode::NullRef; +} + // The ExprType represents the type of a WebAssembly expression or return value // and may either be a ValType or void. // // (Soon, expression types will be generalized to a list of ValType and this // class will go away, replaced, wherever it is used, by a varU32 + list of // ValType.) class ValType; @@ -270,16 +276,17 @@ class ExprType { #ifdef DEBUG bool isValidCode() { switch (UnpackTypeCodeType(tc_)) { case TypeCode::I32: case TypeCode::I64: case TypeCode::F32: case TypeCode::F64: case TypeCode::AnyRef: + case TypeCode::FuncRef: case TypeCode::NullRef: case TypeCode::Ref: case TypeCode::BlockVoid: case TypeCode::Limit: return true; default: return false; } @@ -290,16 +297,17 @@ class ExprType { enum Code { Void = uint8_t(TypeCode::BlockVoid), I32 = uint8_t(TypeCode::I32), I64 = uint8_t(TypeCode::I64), F32 = uint8_t(TypeCode::F32), F64 = uint8_t(TypeCode::F64), AnyRef = uint8_t(TypeCode::AnyRef), + FuncRef = uint8_t(TypeCode::FuncRef), NullRef = uint8_t(TypeCode::NullRef), Ref = uint8_t(TypeCode::Ref), Limit = uint8_t(TypeCode::Limit) }; ExprType() : tc_() {} @@ -316,42 +324,33 @@ class ExprType { explicit ExprType(PackedTypeCode ptc) : tc_(ptc) { MOZ_ASSERT(isValidCode()); } explicit inline ExprType(const ValType& t); PackedTypeCode packed() const { return tc_; } - PackedTypeCode* packedPtr() { return &tc_; } Code code() const { return Code(UnpackTypeCodeType(tc_)); } + bool isValid() const { return IsValid(tc_); } + uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); } - - bool isValid() const { return IsValid(tc_); } - bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; } - bool isReference() const { - TypeCode tc = UnpackTypeCodeType(tc_); - return tc == TypeCode::Ref || tc == TypeCode::AnyRef || - tc == TypeCode::NullRef; - } + bool isReference() const { return IsReferenceType(tc_); } bool operator==(const ExprType& that) const { return tc_ == that.tc_; } - bool operator!=(const ExprType& that) const { return tc_ != that.tc_; } - bool operator==(Code that) const { MOZ_ASSERT(that != Code::Ref); return code() == that; } - bool operator!=(Code that) const { return !(*this == that); } }; // The ValType represents the storage type of a WebAssembly location, whether // parameter, local, or global. class ValType { PackedTypeCode tc_; @@ -359,16 +358,17 @@ class ValType { #ifdef DEBUG bool isValidCode() { switch (UnpackTypeCodeType(tc_)) { case TypeCode::I32: case TypeCode::I64: case TypeCode::F32: case TypeCode::F64: case TypeCode::AnyRef: + case TypeCode::FuncRef: case TypeCode::NullRef: case TypeCode::Ref: return true; default: return false; } } #endif @@ -376,16 +376,17 @@ class ValType { public: enum Code { I32 = uint8_t(TypeCode::I32), I64 = uint8_t(TypeCode::I64), F32 = uint8_t(TypeCode::F32), F64 = uint8_t(TypeCode::F64), AnyRef = uint8_t(TypeCode::AnyRef), + FuncRef = uint8_t(TypeCode::FuncRef), NullRef = uint8_t(TypeCode::NullRef), Ref = uint8_t(TypeCode::Ref), }; ValType() : tc_(InvalidPackedTypeCode()) {} MOZ_IMPLICIT ValType(Code c) : tc_(PackTypeCode(TypeCode(c))) { MOZ_ASSERT(isValidCode()); @@ -426,37 +427,29 @@ class ValType { } PackedTypeCode packed() const { return tc_; } uint32_t bitsUnsafe() const { return PackedTypeCodeToBits(tc_); } Code code() const { return Code(UnpackTypeCodeType(tc_)); } + bool isValid() const { return IsValid(tc_); } + uint32_t refTypeIndex() const { return UnpackTypeCodeIndex(tc_); } - - bool isValid() const { return IsValid(tc_); } - bool isRef() const { return UnpackTypeCodeType(tc_) == TypeCode::Ref; } - bool isReference() const { - TypeCode tc = UnpackTypeCodeType(tc_); - return tc == TypeCode::Ref || tc == TypeCode::AnyRef || - tc == TypeCode::NullRef; - } + bool isReference() const { return IsReferenceType(tc_); } bool operator==(const ValType& that) const { return tc_ == that.tc_; } - bool operator!=(const ValType& that) const { return tc_ != that.tc_; } - bool operator==(Code that) const { MOZ_ASSERT(that != Code::Ref); return code() == that; } - bool operator!=(Code that) const { return !(*this == that); } }; // The dominant use of this data type is for locals and args, and profiling // with ZenGarden and Tanks suggests an initial size of 16 minimises heap // allocation, both in terms of blocks and bytes. typedef Vector<ValType, 16, SystemAllocPolicy> ValTypeVector; @@ -466,16 +459,17 @@ static inline unsigned SizeOf(ValType vt switch (vt.code()) { case ValType::I32: case ValType::F32: return 4; case ValType::I64: case ValType::F64: return 8; case ValType::AnyRef: + case ValType::FuncRef: case ValType::NullRef: case ValType::Ref: return sizeof(intptr_t); } MOZ_CRASH("Invalid ValType"); } static inline jit::MIRType ToMIRType(ValType vt) { @@ -485,16 +479,17 @@ static inline jit::MIRType ToMIRType(Val case ValType::I64: return jit::MIRType::Int64; case ValType::F32: return jit::MIRType::Float32; case ValType::F64: return jit::MIRType::Double; case ValType::Ref: case ValType::AnyRef: + case ValType::FuncRef: case ValType::NullRef: return jit::MIRType::RefOrNull; } MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("bad type"); } static inline bool IsNumberType(ValType vt) { return !vt.isReference(); } @@ -522,16 +517,18 @@ static inline const char* ToCString(Expr case ExprType::I64: return "i64"; case ExprType::F32: return "f32"; case ExprType::F64: return "f64"; case ExprType::AnyRef: return "anyref"; + case ExprType::FuncRef: + return "funcref"; case ExprType::NullRef: return "nullref"; case ExprType::Ref: return "ref"; case ExprType::Limit:; } MOZ_CRASH("bad expression type"); } @@ -566,21 +563,26 @@ static inline const char* ToCString(ValT // For version 0, we simply equate AnyRef and JSObject* (this means that there // are technically no tags at all yet). We use a simple boxing scheme that // wraps a JS value that is not already JSObject in a distinguishable JSObject // that holds the value, see WasmTypes.cpp for details. class AnyRef { JSObject* value_; + explicit AnyRef() : value_((JSObject*)-1) {} explicit AnyRef(JSObject* p) : value_(p) { MOZ_ASSERT(((uintptr_t)p & 0x03) == 0); } public: + // An invalid AnyRef cannot arise naturally from wasm and so can be used as + // a sentinel value to indicate failure from an AnyRef-returning function. + static AnyRef invalid() { return AnyRef(); } + // Given a void* that comes from compiled wasm code, turn it into AnyRef. static AnyRef fromCompiledCode(void* p) { return AnyRef((JSObject*)p); } // Given a JSObject* that comes from JS, turn it into AnyRef. static AnyRef fromJSObject(JSObject* p) { return AnyRef(p); } // Generate an AnyRef null pointer. static AnyRef null() { return AnyRef(nullptr); } @@ -697,40 +699,33 @@ class LitVal { protected: ValType type_; union U { U() : i32_(0) {} uint32_t i32_; uint64_t i64_; float f32_; double f64_; - JSObject* ref_; // Note, this breaks an abstraction boundary - AnyRef anyref_; + AnyRef ref_; } u; public: LitVal() : type_(), u{} {} explicit LitVal(uint32_t i32) : type_(ValType::I32) { u.i32_ = i32; } explicit LitVal(uint64_t i64) : type_(ValType::I64) { u.i64_ = i64; } explicit LitVal(float f32) : type_(ValType::F32) { u.f32_ = f32; } explicit LitVal(double f64) : type_(ValType::F64) { u.f64_ = f64; } - explicit LitVal(AnyRef any) : type_(ValType::AnyRef) { + explicit LitVal(ValType type, AnyRef any) : type_(type) { + MOZ_ASSERT(type.isReference()); MOZ_ASSERT(any.isNull(), "use Val for non-nullptr ref types to get tracing"); - u.anyref_ = any; - } - - explicit LitVal(ValType refType, JSObject* ref) : type_(refType) { - MOZ_ASSERT(refType.isRef()); - MOZ_ASSERT(ref == nullptr, - "use Val for non-nullptr ref types to get tracing"); - u.ref_ = ref; + u.ref_ = any; } ValType type() const { return type_; } static constexpr size_t sizeofLargestValue() { return sizeof(u); } uint32_t i32() const { MOZ_ASSERT(type_ == ValType::I32); return u.i32_; @@ -742,42 +737,38 @@ class LitVal { const float& f32() const { MOZ_ASSERT(type_ == ValType::F32); return u.f32_; } const double& f64() const { MOZ_ASSERT(type_ == ValType::F64); return u.f64_; } - JSObject* ref() const { - MOZ_ASSERT(type_.isRef()); + AnyRef ref() const { + MOZ_ASSERT(type_.isReference()); return u.ref_; } - AnyRef anyref() const { - MOZ_ASSERT(type_ == ValType::AnyRef); - return u.anyref_; - } }; // A Val is a LitVal that can contain (non-null) pointers to GC things. All Vals // must be stored in Rooteds so that their trace() methods are called during // stack marking. Vals do not implement barriers and thus may not be stored on // the heap. class MOZ_NON_PARAM Val : public LitVal { public: Val() : LitVal() {} explicit Val(const LitVal& val); explicit Val(uint32_t i32) : LitVal(i32) {} explicit Val(uint64_t i64) : LitVal(i64) {} explicit Val(float f32) : LitVal(f32) {} explicit Val(double f64) : LitVal(f64) {} - explicit Val(AnyRef val) : LitVal(AnyRef::null()) { u.anyref_ = val; } - explicit Val(ValType type, JSObject* obj) : LitVal(type, (JSObject*)nullptr) { - u.ref_ = obj; + explicit Val(ValType type, AnyRef val) : LitVal(type, AnyRef::null()) { + MOZ_ASSERT(type.isReference()); + u.ref_ = val; } void trace(JSTracer* trc); }; typedef Rooted<Val> RootedVal; typedef Handle<Val> HandleVal; typedef MutableHandle<Val> MutableHandleVal; @@ -1858,16 +1849,17 @@ enum class SymbolicAddress { HandleDebugTrap, HandleThrow, HandleTrap, ReportInt64JSCall, CallImport_Void, CallImport_I32, CallImport_I64, CallImport_F64, + CallImport_FuncRef, CallImport_AnyRef, CoerceInPlace_ToInt32, CoerceInPlace_ToNumber, CoerceInPlace_JitEntry, DivI64, UDivI64, ModI64, UModI64, @@ -1908,31 +1900,45 @@ enum class SymbolicAddress { PrintPtr, PrintF32, PrintF64, PrintText, #endif Limit }; +// The FailureMode indicates whether, immediately after a call to a builtin +// returns, the return value should be checked against an error condition +// (and if so, which one) which signals that the C++ calle has already +// reported an error and thus wasm needs to wasmTrap(Trap::ThrowReported). + +enum class FailureMode : uint8_t { + Infallible, + FailOnNegI32, + FailOnNullPtr, + FailOnInvalidRef +}; + // SymbolicAddressSignature carries type information for a function referred // to by a SymbolicAddress. In order that |argTypes| can be written out as a // static initialiser, it has to have fixed length. At present // SymbolicAddressType is used to describe functions with at most 6 arguments, // so |argTypes| has 7 entries in order to allow the last value to be // MIRType::None, in the hope of catching any accidental overruns of the // defined section of the array. static constexpr size_t SymbolicAddressSignatureMaxArgs = 6; struct SymbolicAddressSignature { // The SymbolicAddress that is described. const SymbolicAddress identity; // The return type, or MIRType::None to denote 'void'. const jit::MIRType retType; + // The failure mode, which is checked by masm.wasmCallBuiltinInstanceMethod. + const FailureMode failureMode; // The number of arguments, 0 .. SymbolicAddressSignatureMaxArgs only. const uint8_t numArgs; // The argument types; SymbolicAddressSignatureMaxArgs + 1 guard, which // should be MIRType::None. const jit::MIRType argTypes[SymbolicAddressSignatureMaxArgs + 1]; }; // The 16 in this assertion is derived as follows: SymbolicAddress is probably @@ -1961,20 +1967,23 @@ struct Limits { Limits() = default; explicit Limits(uint32_t initial, const Maybe<uint32_t>& maximum = Nothing(), Shareable shared = Shareable::False) : initial(initial), maximum(maximum), shared(shared) {} }; // TableDesc describes a table as well as the offset of the table's base pointer -// in global memory. Currently, wasm only has "any function" and asm.js only -// "typed function". - -enum class TableKind { AnyFunction, AnyRef, TypedFunction }; +// in global memory. The TableKind determines the representation: +// - AnyRef: a wasm anyref word (wasm::AnyRef) +// - FuncRef: a two-word FunctionTableElem (wasm indirect call ABI) +// - AsmJS: a two-word FunctionTableElem (asm.js ABI) +// Eventually there should be a single unified AnyRef representation. + +enum class TableKind { AnyRef, FuncRef, AsmJS }; struct TableDesc { TableKind kind; bool importedOrExported; uint32_t globalDataOffset; Limits limits; TableDesc() = default; @@ -2101,18 +2110,18 @@ struct TableTls { // Length of the table in number of elements (not bytes). uint32_t length; // Pointer to the array of elements (which can have various representations). // For tables of anyref this is null. void* functionBase; }; -// Table elements for TableKind::AnyFunctions carry both the code pointer and an -// instance pointer. +// Table element for TableKind::FuncRef which carries both the code pointer and +// an instance pointer. struct FunctionTableElem { // The code to call when calling this element. The table ABI is the system // ABI with the additional ABI requirements that: // - WasmTlsReg and any pinned registers have been loaded appropriately // - if this is a heterogeneous table that requires a signature check, // WasmTableCallSigReg holds the signature id. void* code;
--- a/js/src/wasm/WasmValidate.cpp +++ b/js/src/wasm/WasmValidate.cpp @@ -1311,16 +1311,17 @@ static bool DecodeStructType(Decoder& d, offset = layout.addScalar(Scalar::Float32); break; case ValType::F64: offset = layout.addScalar(Scalar::Float64); break; case ValType::Ref: offset = layout.addReference(ReferenceType::TYPE_OBJECT); break; + case ValType::FuncRef: case ValType::AnyRef: offset = layout.addReference(ReferenceType::TYPE_WASM_ANYREF); break; default: MOZ_CRASH("Unknown type"); } if (!offset.isValid()) { return d.fail("Object too large"); @@ -1557,18 +1558,18 @@ static bool DecodeLimits(Decoder& d, Lim static bool DecodeTableTypeAndLimits(Decoder& d, bool gcTypesEnabled, TableDescVector* tables) { uint8_t elementType; if (!d.readFixedU8(&elementType)) { return d.fail("expected table element type"); } TableKind tableKind; - if (elementType == uint8_t(TypeCode::AnyFunc)) { - tableKind = TableKind::AnyFunction; + if (elementType == uint8_t(TypeCode::FuncRef)) { + tableKind = TableKind::FuncRef; #ifdef ENABLE_WASM_REFTYPES } else if (elementType == uint8_t(TypeCode::AnyRef)) { tableKind = TableKind::AnyRef; #endif } else { #ifdef ENABLE_WASM_REFTYPES return d.fail("expected 'funcref' or 'anyref' element type"); #else @@ -1597,16 +1598,17 @@ static bool DecodeTableTypeAndLimits(Dec } static bool GlobalIsJSCompatible(Decoder& d, ValType type, bool isMutable) { switch (type.code()) { case ValType::I32: case ValType::F32: case ValType::F64: case ValType::I64: + case ValType::FuncRef: case ValType::AnyRef: break; #ifdef WASM_PRIVATE_REFTYPES case ValType::Ref: return d.fail("cannot expose reference type"); #endif default: return d.fail("unexpected variable type in global import/export"); @@ -1932,24 +1934,18 @@ static bool DecodeInitializerExpression( *init = InitExpr(LitVal(f64)); break; } case uint16_t(Op::RefNull): { if (!expected.isReference()) { return d.fail( "type mismatch: initializer type and expected type don't match"); } - if (expected == ValType::AnyRef) { - *init = InitExpr(LitVal(AnyRef::null())); - } else { - if (!env->gcTypesEnabled()) { - return d.fail("unexpected initializer expression"); - } - *init = InitExpr(LitVal(expected, nullptr)); - } + MOZ_ASSERT_IF(expected.isRef(), env->gcTypesEnabled()); + *init = InitExpr(LitVal(expected, AnyRef::null())); break; } case uint16_t(Op::GetGlobal): { uint32_t i; const GlobalDescVector& globals = env->globals; if (!d.readVarU32(&i)) { return d.fail( "failed to read global.get index in initializer expression"); @@ -2266,17 +2262,17 @@ static bool DecodeElemSection(Decoder& d tableIndex >= env->tables.length()) { return d.fail("table index out of range for element segment"); } if (initializerKind == InitializerKind::Passive) { // Too many bugs result from keeping this value zero. For passive // segments, there really is no segment index, and we should never // touch the field. tableIndex = (uint32_t)-1; - } else if (env->tables[tableIndex].kind != TableKind::AnyFunction) { + } else if (env->tables[tableIndex].kind != TableKind::FuncRef) { return d.fail("only tables of 'funcref' may have element segments"); } seg->tableIndex = tableIndex; switch (initializerKind) { case InitializerKind::Active: case InitializerKind::ActiveWithIndex: { @@ -2287,17 +2283,17 @@ static bool DecodeElemSection(Decoder& d seg->offsetIfActive.emplace(offset); break; } case InitializerKind::Passive: { uint8_t form; if (!d.readFixedU8(&form)) { return d.fail("expected type form"); } - if (form != uint8_t(TypeCode::AnyFunc)) { + if (form != uint8_t(TypeCode::FuncRef)) { return d.fail( "passive segments can only contain function references"); } break; } } uint32_t numElems;
--- a/js/src/wasm/WasmValidate.h +++ b/js/src/wasm/WasmValidate.h @@ -606,16 +606,17 @@ class Decoder { switch (code) { case uint8_t(ValType::I32): case uint8_t(ValType::F32): case uint8_t(ValType::F64): case uint8_t(ValType::I64): *type = ValType::Code(code); return true; #ifdef ENABLE_WASM_REFTYPES + case uint8_t(ValType::FuncRef): case uint8_t(ValType::AnyRef): *type = ValType::Code(code); return true; # ifdef ENABLE_WASM_GC case uint8_t(ValType::Ref): { if (!gcTypesEnabled) { return fail("(ref T) types not enabled"); }
--- a/mobile/android/components/extensions/ext-browserAction.js +++ b/mobile/android/components/extensions/ext-browserAction.js @@ -125,17 +125,17 @@ this.browserAction = class extends Exten onManifestEntry(entryName) { let {extension} = this; let {manifest} = extension; let browserAction = new BrowserAction(manifest.browser_action, extension); browserActionMap.set(extension, browserAction); } - onShutdown(reason) { + onShutdown() { let {extension} = this; if (browserActionMap.has(extension)) { browserActionMap.get(extension).shutdown(); browserActionMap.delete(extension); } }
--- a/mobile/android/components/extensions/ext-pageAction.js +++ b/mobile/android/components/extensions/ext-pageAction.js @@ -210,17 +210,17 @@ this.pageAction = class extends Extensio onManifestEntry(entryName) { let {extension} = this; let {manifest} = extension; let pageAction = new PageAction(manifest.page_action, extension); pageActionMap.set(extension, pageAction); } - onShutdown(reason) { + onShutdown() { let {extension} = this; if (pageActionMap.has(extension)) { pageActionMap.get(extension).shutdown(); pageActionMap.delete(extension); } }
--- a/toolkit/components/extensions/Extension.jsm +++ b/toolkit/components/extensions/Extension.jsm @@ -2028,17 +2028,16 @@ class Extension extends ExtensionData { // caches. These caches may keep the file open. file.remove(false); }).catch(Cu.reportError); } async shutdown(reason) { this.state = "Shutdown"; - this.shutdownReason = reason; this.hasShutdown = true; if (!this.policy) { return; } if (this.hasPermission("storage") && ExtensionStorageIDB.selectedBackendPromises.has(this)) { this.state = "Shutdown: Storage"; @@ -2056,48 +2055,49 @@ class Extension extends ExtensionData { if (this.rootURI instanceof Ci.nsIJARURI) { this.state = "Shutdown: Flush jar cache"; let file = this.rootURI.JARFile.QueryInterface(Ci.nsIFileURL).file; Services.ppmm.broadcastAsyncMessage("Extension:FlushJarCache", {path: file.path}); this.state = "Shutdown: Flushed jar cache"; } - if (this.cleanupFile || reason !== "APP_SHUTDOWN") { + const isAppShutdown = reason === "APP_SHUTDOWN"; + if (this.cleanupFile || !isAppShutdown) { StartupCache.clearAddonData(this.id); } activeExtensionIDs.delete(this.id); sharedData.set("extensions/activeIDs", activeExtensionIDs); for (let key of this.sharedDataKeys) { sharedData.delete(key); } Services.ppmm.removeMessageListener(this.MESSAGE_EMIT_EVENT, this); - this.updatePermissions(this.shutdownReason); + this.updatePermissions(reason); if (!this.manifest) { this.state = "Shutdown: Complete: No manifest"; this.policy.active = false; return this.cleanupGeneratedFile(); } GlobalManager.uninit(this); for (let obj of this.onShutdown) { obj.close(); } - ParentAPIManager.shutdownExtension(this.id); + ParentAPIManager.shutdownExtension(this.id, reason); Management.emit("shutdown", this); - this.emit("shutdown"); + this.emit("shutdown", isAppShutdown); const TIMED_OUT = Symbol(); this.state = "Shutdown: Emit shutdown"; let result = await Promise.race([ this.broadcast("Extension:Shutdown", {id: this.id}), promiseTimeout(CHILD_SHUTDOWN_TIMEOUT_MS).then(() => TIMED_OUT), ]);
--- a/toolkit/components/extensions/ExtensionCommon.jsm +++ b/toolkit/components/extensions/ExtensionCommon.jsm @@ -329,19 +329,19 @@ class EventEmitter { * once for each extension that uses the API. */ class ExtensionAPI extends EventEmitter { constructor(extension) { super(); this.extension = extension; - extension.once("shutdown", () => { + extension.once("shutdown", (what, isAppShutdown) => { if (this.onShutdown) { - this.onShutdown(extension.shutdownReason); + this.onShutdown(isAppShutdown); } this.extension = null; }); } destroy() { }
--- a/toolkit/components/extensions/ExtensionParent.jsm +++ b/toolkit/components/extensions/ExtensionParent.jsm @@ -14,16 +14,17 @@ /* exported ExtensionParent */ var EXPORTED_SYMBOLS = ["ExtensionParent"]; const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm"); const {XPCOMUtils} = ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm"); XPCOMUtils.defineLazyModuleGetters(this, { + AddonManager: "resource://gre/modules/AddonManager.jsm", AppConstants: "resource://gre/modules/AppConstants.jsm", AsyncShutdown: "resource://gre/modules/AsyncShutdown.jsm", DeferredTask: "resource://gre/modules/DeferredTask.jsm", E10SUtils: "resource://gre/modules/E10SUtils.jsm", ExtensionData: "resource://gre/modules/Extension.jsm", GeckoViewConnection: "resource://gre/modules/GeckoViewWebExtension.jsm", MessageChannel: "resource://gre/modules/MessageChannel.jsm", MessageManagerProxy: "resource://gre/modules/MessageManagerProxy.jsm", @@ -107,16 +108,27 @@ let apiManager = new class extends Schem this.on("uninstall", (e, {id}) => { let modules = this.eventModules.get("uninstall"); return Promise.all(Array.from(modules).map(async apiName => { let module = await this.asyncLoadModule(apiName); return module.onUninstall(id); })); }); /* eslint-enable mozilla/balanced-listeners */ + + // Handle any changes that happened during startup + let disabledIds = AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_DISABLED); + if (disabledIds.length > 0) { + this._callHandlers(disabledIds, "disable", "onDisable"); + } + + let uninstalledIds = AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_UNINSTALLED); + if (uninstalledIds.length > 0) { + this._callHandlers(uninstalledIds, "uninstall", "onUninstall"); + } } getModuleJSONURLs() { return Array.from(Services.catMan.enumerateCategory(CATEGORY_EXTENSION_MODULES), ({value}) => value); } // Loads all the ext-*.js scripts currently registered. @@ -178,16 +190,33 @@ let apiManager = new class extends Schem if (result.tabId) { if (sync) { return result; } target.messageManager.sendAsyncMessage("Extension:SetFrameData", result); } } } + + // Call static handlers for the given event on the given extension ids, + // and set up a shutdown blocker to ensure they all complete. + _callHandlers(ids, event, method) { + let promises = Array.from(this.eventModules.get(event)) + .map(async modName => { + let module = await this.asyncLoadModule(modName); + return ids.map(id => module[method](id)); + }).flat(); + if (event === "disable") { + promises.push(...ids.map(id => this.emit("disable", id))); + } + + AsyncShutdown.profileBeforeChange.addBlocker( + `Extension API ${event} handlers for ${ids.join(",")}`, + Promise.all(promises)); + } }(); // A proxy for extension ports between two DISTINCT message managers. // This is used by ProxyMessenger, to ensure that a port always receives a // disconnection message when the other side closes, even if that other side // fails to send the message before the message manager disconnects. class ExtensionPortProxy { /** @@ -820,27 +849,23 @@ ParentAPIManager = { for (let extension of GlobalManager.extensionMap.values()) { if (extension.parentMessageManager === mm) { extension.parentMessageManager = null; } } } }, - shutdownExtension(extensionId) { + shutdownExtension(extensionId, reason) { + if (["ADDON_DISABLE", "ADDON_UNINSTALL"].includes(reason)) { + apiManager._callHandlers([extensionId], "disable", "onDisable"); + } + for (let [childId, context] of this.proxyContexts) { if (context.extension.id == extensionId) { - if (["ADDON_DISABLE", "ADDON_UNINSTALL"].includes(context.extension.shutdownReason)) { - let modules = apiManager.eventModules.get("disable"); - Array.from(modules).map(async apiName => { - let module = await apiManager.asyncLoadModule(apiName); - module.onDisable(extensionId); - }); - } - context.shutdown(); this.proxyContexts.delete(childId); } } }, receiveMessage({name, data, target}) { try {
--- a/toolkit/components/extensions/ExtensionPreferencesManager.jsm +++ b/toolkit/components/extensions/ExtensionPreferencesManager.jsm @@ -35,20 +35,18 @@ XPCOMUtils.defineLazyGetter(this, "defau return new Preferences({defaultBranch: true}); }); /* eslint-disable mozilla/balanced-listeners */ Management.on("uninstall", (type, {id}) => { ExtensionPreferencesManager.removeAll(id); }); -Management.on("shutdown", (type, extension) => { - if (extension.shutdownReason == "ADDON_DISABLE") { - this.ExtensionPreferencesManager.disableAll(extension.id); - } +Management.on("disable", (type, id) => { + this.ExtensionPreferencesManager.disableAll(id); }); Management.on("startup", async (type, extension) => { if (extension.startupReason == "ADDON_ENABLE") { this.ExtensionPreferencesManager.enableAll(extension.id); } }); /* eslint-enable mozilla/balanced-listeners */
--- a/toolkit/components/extensions/parent/ext-protocolHandlers.js +++ b/toolkit/components/extensions/parent/ext-protocolHandlers.js @@ -45,21 +45,21 @@ this.protocolHandlers = class extends Ex protoInfo.preferredApplicationHandler = handler; protoInfo.alwaysAskBeforeHandling = false; } handlers.appendElement(handler); handlerService.store(protoInfo); } } - onShutdown(shutdownReason) { + onShutdown(isAppShutdown) { let {extension} = this; let {manifest} = extension; - if (shutdownReason === "APP_SHUTDOWN") { + if (isAppShutdown) { return; } for (let handlerConfig of manifest.protocol_handlers) { let protoInfo = protocolService.getProtocolHandlerInfo(handlerConfig.protocol); let appHandlers = protoInfo.possibleApplicationHandlers; for (let i = 0; i < appHandlers.length; i++) { let handler = appHandlers.queryElementAt(i, Ci.nsISupports);
--- a/toolkit/components/extensions/parent/ext-theme.js +++ b/toolkit/components/extensions/parent/ext-theme.js @@ -378,18 +378,18 @@ this.theme = class extends ExtensionAPI extension, details: manifest.theme, darkDetails: manifest.dark_theme, experiment: manifest.theme_experiment, startupData: extension.startupData, }); } - onShutdown(reason) { - if (reason === "APP_SHUTDOWN") { + onShutdown(isAppShutdown) { + if (isAppShutdown) { return; } let {extension} = this; for (let [windowId, theme] of windowOverrides) { if (theme.extension === extension) { Theme.unload(windowId); }
--- a/toolkit/mozapps/extensions/internal/XPIProvider.jsm +++ b/toolkit/mozapps/extensions/internal/XPIProvider.jsm @@ -2314,16 +2314,19 @@ var XPIProvider = { } try { let reason = BOOTSTRAP_REASONS.APP_STARTUP; // Eventually set INSTALLED reason when a bootstrap addon // is dropped in profile folder and automatically installed if (AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_INSTALLED) .includes(addon.id)) reason = BOOTSTRAP_REASONS.ADDON_INSTALL; + else if (AddonManager.getStartupChanges(AddonManager.STARTUP_CHANGE_ENABLED) + .includes(addon.id)) + reason = BOOTSTRAP_REASONS.ADDON_ENABLE; BootstrapScope.get(addon).startup(reason); } catch (e) { logger.error("Failed to load bootstrap addon " + addon.id + " from " + addon.descriptor, e); } } AddonManagerPrivate.recordTimestamp("XPI_bootstrap_addons_end"); } catch (e) {
new file mode 100644 --- /dev/null +++ b/toolkit/mozapps/extensions/test/xpcshell/test_startup_enable.js @@ -0,0 +1,43 @@ + + +createAppInfo("xpcshell@tessts.mozilla.org", "XPCShell", "1", "1"); +BootstrapMonitor.init(); + +// Test that enabling an extension during startup generates the +// proper reason for startup(). +add_task(async function test_startup_enable() { + const ID = "compat@tests.mozilla.org"; + + await promiseStartupManager(); + + await promiseInstallWebExtension({ + manifest: { + applications: { + gecko: { + id: ID, + strict_min_version: "1", + strict_max_version: "1", + }, + }, + }, + }); + + BootstrapMonitor.checkInstalled(ID); + BootstrapMonitor.checkStarted(ID); + let {reason} = BootstrapMonitor.started.get(ID); + equal(reason, BOOTSTRAP_REASONS.ADDON_INSTALL, + "Startup reason is ADDON_INSTALL at install"); + + gAppInfo.platformVersion = "2"; + await promiseRestartManager("2"); + BootstrapMonitor.checkInstalled(ID); + BootstrapMonitor.checkNotStarted(ID); + + gAppInfo.platformVersion = "1"; + await promiseRestartManager("1"); + BootstrapMonitor.checkInstalled(ID); + BootstrapMonitor.checkStarted(ID); + ({reason} = BootstrapMonitor.started.get(ID)); + equal(reason, BOOTSTRAP_REASONS.ADDON_ENABLE, + "Startup reason is ADDON_ENABLE when re-enabled at startup"); +});
--- a/toolkit/mozapps/extensions/test/xpcshell/xpcshell.ini +++ b/toolkit/mozapps/extensions/test/xpcshell/xpcshell.ini @@ -98,16 +98,17 @@ skip-if = true [test_signed_langpack.js] [test_signed_long.js] [test_signed_updatepref.js] skip-if = require_signing || !allow_legacy_extensions [test_signed_verify.js] [test_startup.js] # Bug 676992: test consistently fails on Android fail-if = os == "android" +[test_startup_enable.js] [test_strictcompatibility.js] head = head_addons.js head_compat.js [test_syncGUID.js] [test_system_allowed.js] head = head_addons.js head_system_addons.js [test_system_delay_update.js] head = head_addons.js head_system_addons.js skip-if = true # Bug 1495021