author | Carsten "Tomcat" Book <cbook@mozilla.com> |
Tue, 21 Mar 2017 12:02:59 +0100 | |
changeset 348560 | 67ecac412f76a6bf229fead47b40eed69c146d61 |
parent 348536 | ca4ae502156eaea6fffb296bb9c3b3930af8ab58 (current diff) |
parent 348559 | d6b50ff5c020faa23f7e1bdf6280a6927f0a1e87 (diff) |
child 348584 | bd4f3810b402147f8656390555b29502ce5e2644 |
push id | 31528 |
push user | cbook@mozilla.com |
push date | Tue, 21 Mar 2017 11:03:13 +0000 |
treeherder | mozilla-central@67ecac412f76 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | merge |
milestone | 55.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
testing/web-platform/meta/webvtt/parsing/cue-text-parsing/tests/entities.html.ini | file | annotate | diff | comparison | revisions | |
testing/web-platform/meta/webvtt/parsing/cue-text-parsing/tests/tags.html.ini | file | annotate | diff | comparison | revisions | |
testing/web-platform/meta/webvtt/parsing/cue-text-parsing/tests/timestamps.html.ini | file | annotate | diff | comparison | revisions | |
tools/lint/eslint/eslint-plugin-mozilla/.eslintrc.js | file | annotate | diff | comparison | revisions | |
tools/lint/eslint/eslint-plugin-mozilla/lib/processors/self-hosted.js | file | annotate | diff | comparison | revisions |
--- a/browser/components/extensions/test/browser/browser-common.ini +++ b/browser/components/extensions/test/browser/browser-common.ini @@ -124,16 +124,17 @@ support-files = [browser_ext_themes_validation.js] [browser_ext_topwindowid.js] [browser_ext_url_overrides_newtab.js] [browser_ext_url_overrides_home.js] [browser_ext_webRequest.js] [browser_ext_webNavigation_frameId0.js] [browser_ext_webNavigation_getFrames.js] [browser_ext_webNavigation_onCreatedNavigationTarget.js] +[browser_ext_webNavigation_onCreatedNavigationTarget_contextmenu.js] [browser_ext_webNavigation_onCreatedNavigationTarget_window_open.js] [browser_ext_webNavigation_urlbar_transitions.js] [browser_ext_windows.js] [browser_ext_windows_create.js] tags = fullscreen [browser_ext_windows_create_params.js] [browser_ext_windows_create_tabId.js] [browser_ext_windows_create_url.js]
--- a/browser/components/extensions/test/browser/browser_ext_webNavigation_onCreatedNavigationTarget.js +++ b/browser/components/extensions/test/browser/browser_ext_webNavigation_onCreatedNavigationTarget.js @@ -47,24 +47,16 @@ async function runTestCase({extension, o is(webNavMsg.sourceTabId, sourceTabId, "Got the expected sourceTabId property"); is(webNavMsg.sourceFrameId, sourceFrameId, "Got the expected sourceFrameId property"); is(webNavMsg.url, url, "Got the expected url property"); is(completedNavMsg.tabId, createdTabId, "Got the expected webNavigation.onCompleted tabId property"); is(completedNavMsg.url, url, "Got the expected webNavigation.onCompleted url property"); } -async function clickContextMenuItem({pageElementSelector, contextMenuItemLabel}) { - const contentAreaContextMenu = await openContextMenu(pageElementSelector); - const item = contentAreaContextMenu.getElementsByAttribute("label", contextMenuItemLabel); - is(item.length, 1, `found contextMenu item for "${contextMenuItemLabel}"`); - item[0].click(); - await closeContextMenu(); -} - add_task(function* test_on_created_navigation_target_from_mouse_click() { const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); const extension = ExtensionTestUtils.loadExtension({ background, manifest: { permissions: ["webNavigation"], }, @@ -122,69 +114,16 @@ add_task(function* test_on_created_navig }, }); yield BrowserTestUtils.removeTab(tab); yield extension.unload(); }); -add_task(function* test_on_created_navigation_target_from_context_menu() { - const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); - - const extension = ExtensionTestUtils.loadExtension({ - background, - manifest: { - permissions: ["webNavigation"], - }, - }); - - yield extension.startup(); - - const expectedSourceTab = yield extension.awaitMessage("expectedSourceTab"); - - info("Open link in a new tab from the context menu"); - - yield runTestCase({ - extension, - async openNavTarget() { - await clickContextMenuItem({ - pageElementSelector: "#test-create-new-tab-from-context-menu", - contextMenuItemLabel: "Open Link in New Tab", - }); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: 0, - url: `${OPENED_PAGE}#new-tab-from-context-menu`, - }, - }); - - info("Open link in a new window from the context menu"); - - yield runTestCase({ - extension, - async openNavTarget() { - await clickContextMenuItem({ - pageElementSelector: "#test-create-new-window-from-context-menu", - contextMenuItemLabel: "Open Link in New Window", - }); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: 0, - url: `${OPENED_PAGE}#new-window-from-context-menu`, - }, - }); - - yield BrowserTestUtils.removeTab(tab); - - yield extension.unload(); -}); - add_task(function* test_on_created_navigation_target_from_mouse_click_subframe() { const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); const extension = ExtensionTestUtils.loadExtension({ background, manifest: { permissions: ["webNavigation"], }, @@ -251,70 +190,8 @@ add_task(function* test_on_created_navig }, }); yield BrowserTestUtils.removeTab(tab); yield extension.unload(); }); -add_task(function* test_on_created_navigation_target_from_context_menu_subframe() { - const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); - - const extension = ExtensionTestUtils.loadExtension({ - background, - manifest: { - permissions: ["webNavigation"], - }, - }); - - yield extension.startup(); - - const expectedSourceTab = yield extension.awaitMessage("expectedSourceTab"); - - info("Open a subframe link in a new tab from the context menu"); - - yield runTestCase({ - extension, - async openNavTarget() { - await clickContextMenuItem({ - pageElementSelector: function() { - // This code runs as a framescript in the child process and it returns the - // target link in the subframe. - return this.content.frames[0] // eslint-disable-line mozilla/no-cpows-in-tests - .document.querySelector("#test-create-new-tab-from-context-menu-subframe"); - }, - contextMenuItemLabel: "Open Link in New Tab", - }); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: expectedSourceTab.sourceTabFrames[1].frameId, - url: `${OPENED_PAGE}#new-tab-from-context-menu-subframe`, - }, - }); - - info("Open a subframe link in a new window from the context menu"); - - yield runTestCase({ - extension, - async openNavTarget() { - await clickContextMenuItem({ - pageElementSelector: function() { - // This code runs as a framescript in the child process and it returns the - // target link in the subframe. - return this.content.frames[0] // eslint-disable-line mozilla/no-cpows-in-tests - .document.querySelector("#test-create-new-window-from-context-menu-subframe"); - }, - contextMenuItemLabel: "Open Link in New Window", - }); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: expectedSourceTab.sourceTabFrames[1].frameId, - url: `${OPENED_PAGE}#new-window-from-context-menu-subframe`, - }, - }); - - yield BrowserTestUtils.removeTab(tab); - - yield extension.unload(); -});
copy from browser/components/extensions/test/browser/browser_ext_webNavigation_onCreatedNavigationTarget.js copy to browser/components/extensions/test/browser/browser_ext_webNavigation_onCreatedNavigationTarget_contextmenu.js --- a/browser/components/extensions/test/browser/browser_ext_webNavigation_onCreatedNavigationTarget.js +++ b/browser/components/extensions/test/browser/browser_ext_webNavigation_onCreatedNavigationTarget_contextmenu.js @@ -55,83 +55,16 @@ async function runTestCase({extension, o async function clickContextMenuItem({pageElementSelector, contextMenuItemLabel}) { const contentAreaContextMenu = await openContextMenu(pageElementSelector); const item = contentAreaContextMenu.getElementsByAttribute("label", contextMenuItemLabel); is(item.length, 1, `found contextMenu item for "${contextMenuItemLabel}"`); item[0].click(); await closeContextMenu(); } -add_task(function* test_on_created_navigation_target_from_mouse_click() { - const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); - - const extension = ExtensionTestUtils.loadExtension({ - background, - manifest: { - permissions: ["webNavigation"], - }, - }); - - yield extension.startup(); - - const expectedSourceTab = yield extension.awaitMessage("expectedSourceTab"); - - info("Open link in a new tab using Ctrl-click"); - - yield runTestCase({ - extension, - openNavTarget() { - BrowserTestUtils.synthesizeMouseAtCenter("#test-create-new-tab-from-mouse-click", - {ctrlKey: true, metaKey: true}, - tab.linkedBrowser); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: 0, - url: `${OPENED_PAGE}#new-tab-from-mouse-click`, - }, - }); - - info("Open link in a new window using Shift-click"); - - yield runTestCase({ - extension, - openNavTarget() { - BrowserTestUtils.synthesizeMouseAtCenter("#test-create-new-window-from-mouse-click", - {shiftKey: true}, - tab.linkedBrowser); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: 0, - url: `${OPENED_PAGE}#new-window-from-mouse-click`, - }, - }); - - info("Open link with target=\"_blank\" in a new tab using click"); - - yield runTestCase({ - extension, - openNavTarget() { - BrowserTestUtils.synthesizeMouseAtCenter("#test-create-new-tab-from-targetblank-click", - {}, - tab.linkedBrowser); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: 0, - url: `${OPENED_PAGE}#new-tab-from-targetblank-click`, - }, - }); - - yield BrowserTestUtils.removeTab(tab); - - yield extension.unload(); -}); - add_task(function* test_on_created_navigation_target_from_context_menu() { const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); const extension = ExtensionTestUtils.loadExtension({ background, manifest: { permissions: ["webNavigation"], }, @@ -175,92 +108,16 @@ add_task(function* test_on_created_navig }, }); yield BrowserTestUtils.removeTab(tab); yield extension.unload(); }); -add_task(function* test_on_created_navigation_target_from_mouse_click_subframe() { - const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); - - const extension = ExtensionTestUtils.loadExtension({ - background, - manifest: { - permissions: ["webNavigation"], - }, - }); - - yield extension.startup(); - - const expectedSourceTab = yield extension.awaitMessage("expectedSourceTab"); - - info("Open a subframe link in a new tab using Ctrl-click"); - - yield runTestCase({ - extension, - openNavTarget() { - BrowserTestUtils.synthesizeMouseAtCenter(function() { - // This code runs as a framescript in the child process and it returns the - // target link in the subframe. - return this.content.frames[0].document // eslint-disable-line mozilla/no-cpows-in-tests - .querySelector("#test-create-new-tab-from-mouse-click-subframe"); - }, {ctrlKey: true, metaKey: true}, tab.linkedBrowser); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: expectedSourceTab.sourceTabFrames[1].frameId, - url: `${OPENED_PAGE}#new-tab-from-mouse-click-subframe`, - }, - }); - - info("Open a subframe link in a new window using Shift-click"); - - yield runTestCase({ - extension, - openNavTarget() { - BrowserTestUtils.synthesizeMouseAtCenter(function() { - // This code runs as a framescript in the child process and it returns the - // target link in the subframe. - return this.content.frames[0].document // eslint-disable-line mozilla/no-cpows-in-tests - .querySelector("#test-create-new-window-from-mouse-click-subframe"); - }, {shiftKey: true}, tab.linkedBrowser); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: expectedSourceTab.sourceTabFrames[1].frameId, - url: `${OPENED_PAGE}#new-window-from-mouse-click-subframe`, - }, - }); - - info("Open a subframe link with target=\"_blank\" in a new tab using click"); - - yield runTestCase({ - extension, - openNavTarget() { - BrowserTestUtils.synthesizeMouseAtCenter(function() { - // This code runs as a framescript in the child process and it returns the - // target link in the subframe. - return this.content.frames[0].document // eslint-disable-line mozilla/no-cpows-in-tests - .querySelector("#test-create-new-tab-from-targetblank-click-subframe"); - }, {}, tab.linkedBrowser); - }, - expectedWebNavProps: { - sourceTabId: expectedSourceTab.sourceTabId, - sourceFrameId: expectedSourceTab.sourceTabFrames[1].frameId, - url: `${OPENED_PAGE}#new-tab-from-targetblank-click-subframe`, - }, - }); - - yield BrowserTestUtils.removeTab(tab); - - yield extension.unload(); -}); - add_task(function* test_on_created_navigation_target_from_context_menu_subframe() { const tab = yield BrowserTestUtils.openNewForegroundTab(gBrowser, SOURCE_PAGE); const extension = ExtensionTestUtils.loadExtension({ background, manifest: { permissions: ["webNavigation"], },
--- a/devtools/shared/css/generated/properties-db.js +++ b/devtools/shared/css/generated/properties-db.js @@ -2860,16 +2860,17 @@ exports.CSS_PROPERTIES = { "color-interpolation", "color-interpolation-filters", "column-count", "column-fill", "column-gap", "column-rule-color", "column-rule-style", "column-rule-width", + "column-span", "column-width", "contain", "content", "-moz-control-character-visibility", "counter-increment", "counter-reset", "cursor", "display", @@ -9342,16 +9343,20 @@ exports.PREFERENCES = [ "box-decoration-break", "layout.css.box-decoration-break.enabled" ], [ "color-adjust", "layout.css.color-adjust.enabled" ], [ + "column-span", + "layout.css.column-span.enabled" + ], + [ "contain", "layout.css.contain.enabled" ], [ "font-variation-settings", "layout.css.font-variations.enabled" ], [
--- a/dom/audiochannel/AudioChannelService.cpp +++ b/dom/audiochannel/AudioChannelService.cpp @@ -31,16 +31,18 @@ #endif #include "mozilla/Preferences.h" using namespace mozilla; using namespace mozilla::dom; using namespace mozilla::hal; +static mozilla::LazyLogModule gAudioChannelLog("AudioChannel"); + namespace { // If true, any new AudioChannelAgent will be muted when created. bool sAudioChannelMutedByDefault = false; bool sAudioChannelCompeting = false; bool sAudioChannelCompetingAllAgents = false; bool sXPCOMShuttingDown = false; @@ -221,23 +223,19 @@ AudioChannelService::Get() if (sXPCOMShuttingDown) { return nullptr; } RefPtr<AudioChannelService> service = gAudioChannelService.get(); return service.forget(); } -/* static */ PRLogModuleInfo* +/* static */ LogModule* AudioChannelService::GetAudioChannelLog() { - static PRLogModuleInfo *gAudioChannelLog; - if (!gAudioChannelLog) { - gAudioChannelLog = PR_NewLogModule("AudioChannel"); - } return gAudioChannelLog; } /* static */ void AudioChannelService::Shutdown() { if (gAudioChannelService) { nsCOMPtr<nsIObserverService> obs = mozilla::services::GetObserverService();
--- a/dom/audiochannel/AudioChannelService.h +++ b/dom/audiochannel/AudioChannelService.h @@ -11,16 +11,17 @@ #include "nsAutoPtr.h" #include "nsIObserver.h" #include "nsTObserverArray.h" #include "nsTArray.h" #include "AudioChannelAgent.h" #include "nsAttrValue.h" #include "mozilla/dom/AudioChannelBinding.h" +#include "mozilla/Logging.h" #include <functional> class nsIRunnable; class nsPIDOMWindowOuter; struct PRLogModuleInfo; namespace mozilla { @@ -97,17 +98,17 @@ public: /** * Returns the AudioChannelService singleton if one exists. * If AudioChannelService doesn't exist, returns null. */ static already_AddRefed<AudioChannelService> Get(); static bool IsAudioChannelMutedByDefault(); - static PRLogModuleInfo* GetAudioChannelLog(); + static LogModule* GetAudioChannelLog(); static bool IsEnableAudioCompeting(); /** * Any audio channel agent that starts playing should register itself to * this service, sharing the AudioChannel. */ void RegisterAudioChannelAgent(AudioChannelAgent* aAgent,
--- a/dom/media/webvtt/vtt.jsm +++ b/dom/media/webvtt/vtt.jsm @@ -399,17 +399,17 @@ Cu.import('resource://gre/modules/Servic if (tagStack.length && tagStack[tagStack.length - 1] === t.substr(2).replace(">", "")) { tagStack.pop(); current = current.parentNode; } // Otherwise just ignore the end tag. continue; } - var ts = collectTimeStamp(t.substr(1, t.length - 2)); + var ts = collectTimeStamp(t.substr(1, t.length - 1)); var node; if (ts) { // Timestamps are lead nodes as well. node = window.document.createProcessingInstruction("timestamp", normalizedTimeStamp(ts)); current.appendChild(node); continue; } var m = t.match(/^<([^.\s/0-9>]+)(\.[^\s\\>]+)?([^>\\]+)?(\\?)>?$/);
--- a/dom/xbl/nsXBLService.cpp +++ b/dom/xbl/nsXBLService.cpp @@ -411,18 +411,20 @@ nsXBLService::IsChromeOrResourceURI(nsIU // on destruction. class MOZ_STACK_CLASS AutoStyleNewChildren { public: explicit AutoStyleNewChildren(Element* aElement) : mElement(aElement) { MOZ_ASSERT(mElement); } ~AutoStyleNewChildren() { nsIPresShell* presShell = mElement->OwnerDoc()->GetShell(); - ServoStyleSet* servoSet = presShell ? presShell->StyleSet()->GetAsServo() : nullptr; - if (servoSet) { + if (!presShell || !presShell->DidInitialize()) { + return; + } + if (ServoStyleSet* servoSet = presShell->StyleSet()->GetAsServo()) { // In general the element is always styled by the time we're applying XBL // bindings, because we need to style the element to know what the binding // URI is. However, programmatic consumers of the XBL service (like the // XML pretty printer) _can_ apply bindings without having styled the bound // element. We could assert against this and require the callers manually // resolve the style first, but it's easy enough to just handle here. if (MOZ_UNLIKELY(!mElement->HasServoData())) { servoSet->StyleNewSubtree(mElement);
--- a/js/src/builtin/.eslintrc.js +++ b/js/src/builtin/.eslintrc.js @@ -1,15 +1,19 @@ "use strict"; module.exports = { "extends": [ "../../../toolkit/.eslintrc.js" ], + "plugins": [ + "spidermonkey-js" + ], + "rules": { // We should fix those at some point, but we use this to detect NaNs. "no-self-compare": "off", // Disabling these two make it easier to implement the spec. "spaced-comment": "off", "no-lonely-if": "off", // SpiderMonkey's style doesn't match any of the possible options. "brace-style": "off",
--- a/layout/style/nsStyleContext.h +++ b/layout/style/nsStyleContext.h @@ -683,16 +683,18 @@ private: if (!aComputeData && needToCompute) { \ return nullptr; \ } \ \ const nsStyle##name_* data = \ Servo_GetStyle##name_(mSource.AsServoComputedValues()); \ /* perform any remaining main thread work on the struct */ \ if (needToCompute) { \ + MOZ_ASSERT(NS_IsMainThread()); \ + MOZ_ASSERT(!mozilla::ServoStyleSet::IsInServoTraversal()); \ const_cast<nsStyle##name_*>(data)->FinishStyle(PresContext()); \ /* the Servo-backed StyleContextSource owns the struct */ \ AddStyleBit(NS_STYLE_INHERIT_BIT(name_)); \ } \ return data; \ } #define STYLE_STRUCT_RESET(name_, checkdata_cb_) \
--- a/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java +++ b/mobile/android/base/java/org/mozilla/gecko/BrowserApp.java @@ -639,17 +639,17 @@ public class BrowserApp extends GeckoApp } @Override public boolean onTouch(View v, MotionEvent event) { return false; } }); - mProgressView = (ToolbarProgressView) findViewById(R.id.progress); + mProgressView = (ToolbarProgressView) findViewById(R.id.page_progress); mProgressView.setDynamicToolbar(mDynamicToolbar); mBrowserToolbar.setProgressBar(mProgressView); // Initialize Tab History Controller. tabHistoryController = new TabHistoryController(new OnShowTabHistory() { @Override public void onShowHistory(final List<TabHistoryPage> historyPageList, final int toIndex) { runOnUiThread(new Runnable() {
--- a/mobile/android/base/java/org/mozilla/gecko/customtabs/CustomTabsActivity.java +++ b/mobile/android/base/java/org/mozilla/gecko/customtabs/CustomTabsActivity.java @@ -25,16 +25,17 @@ import android.support.v7.widget.Toolbar import android.text.TextUtils; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.ImageButton; +import android.widget.ProgressBar; import org.mozilla.gecko.GeckoApp; import org.mozilla.gecko.R; import org.mozilla.gecko.Tab; import org.mozilla.gecko.Tabs; import org.mozilla.gecko.menu.GeckoMenu; import org.mozilla.gecko.menu.GeckoMenuInflater; import org.mozilla.gecko.util.ColorUtil; @@ -49,16 +50,17 @@ public class CustomTabsActivity extends private static final String SAVED_TOOLBAR_COLOR = "SavedToolbarColor"; @ColorInt private static final int DEFAULT_ACTION_BAR_COLOR = 0xFF363b40; // default color to match design private final SparseArrayCompat<PendingIntent> menuItemsIntent = new SparseArrayCompat<>(); private GeckoPopupMenu popupMenu; private ActionBarPresenter actionBarPresenter; + private ProgressBar mProgressView; // A state to indicate whether this activity is finishing with customize animation private boolean usingCustomAnimation = false; @ColorInt private int toolbarColor = DEFAULT_ACTION_BAR_COLOR; @Override public void onCreate(Bundle savedInstanceState) { @@ -70,17 +72,18 @@ public class CustomTabsActivity extends toolbarColor = getIntent().getIntExtra(EXTRA_TOOLBAR_COLOR, DEFAULT_ACTION_BAR_COLOR); } // Translucent color does not make sense for toolbar color. Ensure it is 0xFF. toolbarColor = 0xFF000000 | toolbarColor; setThemeFromToolbarColor(); - final Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); + mProgressView = (ProgressBar) findViewById(R.id.page_progress); + final Toolbar toolbar = (Toolbar) findViewById(R.id.actionbar); setSupportActionBar(toolbar); final ActionBar actionBar = getSupportActionBar(); bindNavigationCallback(toolbar); actionBarPresenter = new ActionBarPresenter(actionBar); actionBarPresenter.displayUrlOnly(getIntent().getDataString()); actionBarPresenter.setBackgroundColor(toolbarColor, getWindow()); actionBar.setDisplayHomeAsUpEnabled(true); @@ -145,16 +148,27 @@ public class CustomTabsActivity extends } @Override public void onTabChanged(Tab tab, Tabs.TabEvents msg, String data) { if (!Tabs.getInstance().isSelectedTab(tab)) { return; } + if (msg == Tabs.TabEvents.START + || msg == Tabs.TabEvents.STOP + || msg == Tabs.TabEvents.ADDED + || msg == Tabs.TabEvents.LOAD_ERROR + || msg == Tabs.TabEvents.LOADED + || msg == Tabs.TabEvents.LOCATION_CHANGE) { + + updateProgress((tab.getState() == Tab.STATE_LOADING), + tab.getLoadProgress()); + } + if (msg == Tabs.TabEvents.LOCATION_CHANGE || msg == Tabs.TabEvents.SECURITY_CHANGE || msg == Tabs.TabEvents.TITLE) { actionBarPresenter.update(tab); } updateMenuItemForward(); } @@ -358,16 +372,31 @@ public class CustomTabsActivity extends } final MenuItem forwardMenuItem = popupMenu.getMenu().findItem(R.id.custom_tabs_menu_forward); final Tab tab = Tabs.getInstance().getSelectedTab(); final boolean enabled = (tab != null && tab.canDoForward()); forwardMenuItem.setEnabled(enabled); } + /** + * Update loading progress of current page + * + * @param isLoading to indicate whether ProgressBar should be visible or not + * @param progress value of loading progress in percent, should be 0 - 100. + */ + private void updateProgress(final boolean isLoading, final int progress) { + if (isLoading) { + mProgressView.setVisibility(View.VISIBLE); + mProgressView.setProgress(progress); + } else { + mProgressView.setVisibility(View.GONE); + } + } + private void onReloadClicked() { final Tab tab = Tabs.getInstance().getSelectedTab(); if (tab != null) { tab.doReload(true); } } private void onForwardClicked() {
--- a/mobile/android/base/resources/layout/customtabs_activity.xml +++ b/mobile/android/base/resources/layout/customtabs_activity.xml @@ -1,38 +1,39 @@ <?xml version="1.0" encoding="utf-8"?> <!-- This Source Code Form is subject to the terms of the Mozilla Public - License, v. 2.0. If a copy of the MPL was not distributed with this - file, You can obtain one at http://mozilla.org/MPL/2.0/. --> <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:app="http://schemas.android.com/apk/res-auto" + xmlns:tools="http://schemas.android.com/tools" android:id="@+id/root_layout" android:orientation="vertical" android:layout_width="match_parent" android:layout_height="match_parent"> <!-- This layout is quite complex because GeckoApp accesses all view groups in this tree. In a perfect world this should just include a GeckoView. --> <android.support.v7.widget.Toolbar - android:id="@+id/toolbar" + android:id="@id/actionbar" android:layout_width="match_parent" android:layout_height="?attr/actionBarSize" android:elevation="4dp" android:background="@color/text_and_tabs_tray_grey" app:layout_scrollFlags="scroll|enterAlways"/> <view class="org.mozilla.gecko.GeckoApp$MainLayout" android:id="@+id/main_layout" android:layout_width="match_parent" - android:layout_below="@+id/toolbar" + android:layout_below="@id/actionbar" android:layout_height="match_parent" android:background="@android:color/transparent"> <RelativeLayout android:id="@+id/gecko_layout" android:layout_width="match_parent" android:layout_height="match_parent" android:layout_below="@+id/tablet_tab_strip" android:layout_above="@+id/find_in_page"> @@ -42,9 +43,19 @@ android:layout_width="match_parent" android:layout_height="match_parent" android:scrollbars="none"/> </RelativeLayout> </view> + <ProgressBar + android:id="@id/page_progress" + style="@style/Base.Widget.AppCompat.ProgressBar.Horizontal" + android:layout_width="match_parent" + android:layout_height="4dp" + android:layout_alignTop="@id/main_layout" + android:background="@drawable/url_bar_bg" + android:progressDrawable="@drawable/progressbar" + tools:progress="70"/> + </RelativeLayout> \ No newline at end of file
--- a/mobile/android/base/resources/layout/gecko_app.xml +++ b/mobile/android/base/resources/layout/gecko_app.xml @@ -139,26 +139,26 @@ android:id="@+id/browser_toolbar" style="@style/BrowserToolbar" android:layout_width="match_parent" android:layout_height="match_parent" android:clickable="true" android:focusable="true" android:background="@drawable/url_bar_bg"/> - <org.mozilla.gecko.ActionModeCompatView android:id="@+id/actionbar" + <org.mozilla.gecko.ActionModeCompatView android:id="@id/actionbar" android:layout_height="match_parent" android:layout_width="match_parent" style="@style/GeckoActionBar.ActionMode"/> </ViewFlipper> </LinearLayout> - <org.mozilla.gecko.toolbar.ToolbarProgressView android:id="@+id/progress" + <org.mozilla.gecko.toolbar.ToolbarProgressView android:id="@id/page_progress" android:layout_width="match_parent" android:layout_height="14dp" android:layout_marginTop="-8dp" android:layout_below="@id/browser_chrome" android:src="@drawable/progress" android:background="@null" android:visibility="gone" />
--- a/mobile/android/base/resources/values/ids.xml +++ b/mobile/android/base/resources/values/ids.xml @@ -14,11 +14,13 @@ <item type="id" name="recycler_view_click_support" /> <item type="id" name="range_list"/> <item type="id" name="pref_header_general"/> <item type="id" name="pref_header_privacy"/> <item type="id" name="pref_header_search"/> <item type="id" name="updateServicePermissionNotification" /> <item type="id" name="websiteContentNotification" /> <item type="id" name="foregroundNotification" /> + <item type="id" name="actionbar"/> <item type="id" name="action_button"/> + <item type="id" name="page_progress"/> </resources>
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/fxa/sync/FxAccountSyncAdapter.java +++ b/mobile/android/services/src/main/java/org/mozilla/gecko/fxa/sync/FxAccountSyncAdapter.java @@ -29,16 +29,17 @@ import org.mozilla.gecko.fxa.authenticat import org.mozilla.gecko.fxa.authenticator.FxAccountAuthenticator; import org.mozilla.gecko.fxa.login.FxAccountLoginStateMachine; import org.mozilla.gecko.fxa.login.Married; import org.mozilla.gecko.fxa.login.State; import org.mozilla.gecko.fxa.login.State.StateLabel; import org.mozilla.gecko.fxa.sync.FxAccountSyncDelegate.Result; import org.mozilla.gecko.sync.BackoffHandler; import org.mozilla.gecko.sync.GlobalSession; +import org.mozilla.gecko.sync.MetaGlobal; import org.mozilla.gecko.sync.PrefsBackoffHandler; import org.mozilla.gecko.sync.SharedPreferencesClientsDataDelegate; import org.mozilla.gecko.sync.SyncConfiguration; import org.mozilla.gecko.sync.ThreadPool; import org.mozilla.gecko.sync.Utils; import org.mozilla.gecko.sync.crypto.KeyBundle; import org.mozilla.gecko.sync.delegates.GlobalSessionCallback; import org.mozilla.gecko.sync.delegates.ClientsDataDelegate; @@ -127,16 +128,17 @@ public class FxAccountSyncAdapter extend /* package-local */ void requestFollowUpSync(String stage) { this.stageNamesForFollowUpSync.add(stage); } protected final Collection<String> stageNamesToSync; // Keeps track of incomplete stages during this sync that need to be re-synced once we're done. private final List<String> stageNamesForFollowUpSync = Collections.synchronizedList(new ArrayList<String>()); + private boolean fullSyncNecessary = false; public SyncDelegate(BlockingQueue<Result> latch, SyncResult syncResult, AndroidFxAccount fxAccount, Collection<String> stageNamesToSync) { super(latch, syncResult); this.stageNamesToSync = Collections.unmodifiableCollection(stageNamesToSync); } public Collection<String> getStageNamesToSync() { return this.stageNamesToSync; @@ -195,16 +197,24 @@ public class FxAccountSyncAdapter extend * Schedule an incomplete stage for a follow-up sync. */ @Override public void handleIncompleteStage(Stage currentState, GlobalSession globalSession) { syncDelegate.requestFollowUpSync(currentState.getRepositoryName()); } + /** + * Use with caution, as this will request an immediate follow-up sync of all stages. + */ + @Override + public void handleFullSyncNecessary() { + syncDelegate.fullSyncNecessary = true; + } + @Override public void handleSuccess(GlobalSession globalSession) { Logger.info(LOG_TAG, "Global session succeeded."); // Get the number of clients, so we can schedule the sync interval accordingly. try { int otherClientsCount = globalSession.getClientsDelegate().getClientsCount(); Logger.debug(LOG_TAG, "" + otherClientsCount + " other client(s)."); @@ -449,16 +459,17 @@ public class FxAccountSyncAdapter extend }); final BlockingQueue<Result> latch = new LinkedBlockingQueue<>(1); Collection<String> knownStageNames = SyncConfiguration.validEngineNames(); Collection<String> stageNamesToSync = Utils.getStagesToSyncFromBundle(knownStageNames, extras); final SyncDelegate syncDelegate = new SyncDelegate(latch, syncResult, fxAccount, stageNamesToSync); + Result offeredResult = null; try { // This will be the same chunk of SharedPreferences that we pass through to GlobalSession/SyncConfiguration. final SharedPreferences sharedPrefs = fxAccount.getSyncPrefs(); final BackoffHandler backgroundBackoffHandler = new PrefsBackoffHandler(sharedPrefs, "background"); final BackoffHandler rateLimitBackoffHandler = new PrefsBackoffHandler(sharedPrefs, "rate"); @@ -586,37 +597,57 @@ public class FxAccountSyncAdapter extend fxAccount.fetchProfileJSON(); } catch (Exception e) { syncDelegate.handleError(e); return; } } }); - latch.take(); + offeredResult = latch.take(); } catch (Exception e) { Logger.error(LOG_TAG, "Got error syncing.", e); syncDelegate.handleError(e); } finally { fxAccount.releaseSharedAccountStateLock(); } + lastSyncRealtimeMillis = SystemClock.elapsedRealtime(); + + // We got to this point without being offered a result, and so it's unwise to proceed with + // trying to sync stages again. Nothing else we can do but log an error. + if (offeredResult == null) { + Logger.error(LOG_TAG, "Did not receive a sync result from the delegate."); + return; + } + + // Full sync (of all of stages) is necessary if we hit "concurrent modification" errors while + // uploading meta/global stage. This is considered both a rare and important event, so it's + // deemed safe and necessary to request an immediate sync, which will ignore any back-offs and + // will happen right away. + if (syncDelegate.fullSyncNecessary) { + Logger.info(LOG_TAG, "Syncing done. Full follow-up sync necessary, requesting immediate sync."); + fxAccount.requestImmediateSync(null, null); + return; + } + // If there are any incomplete stages, request a follow-up sync. Otherwise, we're done. // Incomplete stage is: // - one that hit a 412 error during either upload or download of data, indicating that // its collection has been modified remotely, or // - one that hit a sync deadline final String[] stagesToSyncAgain; synchronized (syncDelegate.stageNamesForFollowUpSync) { stagesToSyncAgain = syncDelegate.stageNamesForFollowUpSync.toArray( new String[syncDelegate.stageNamesForFollowUpSync.size()] ); } - if (stagesToSyncAgain.length > 0) { - Logger.info(LOG_TAG, "Syncing done. Requesting an immediate follow-up sync."); - fxAccount.requestImmediateSync(stagesToSyncAgain, null); - } else { + if (stagesToSyncAgain.length == 0) { Logger.info(LOG_TAG, "Syncing done."); + return; } - lastSyncRealtimeMillis = SystemClock.elapsedRealtime(); + + // If there are any other stages marked as incomplete, request that they're synced again. + Logger.info(LOG_TAG, "Syncing done. Requesting an immediate follow-up sync."); + fxAccount.requestImmediateSync(stagesToSyncAgain, null); } }
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/GlobalSession.java +++ b/mobile/android/services/src/main/java/org/mozilla/gecko/sync/GlobalSession.java @@ -1,15 +1,16 @@ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package org.mozilla.gecko.sync; import android.content.Context; +import android.support.annotation.VisibleForTesting; import org.json.simple.JSONArray; import org.mozilla.gecko.background.common.log.Logger; import org.mozilla.gecko.sync.crypto.CryptoException; import org.mozilla.gecko.sync.crypto.KeyBundle; import org.mozilla.gecko.sync.delegates.ClientsDataDelegate; import org.mozilla.gecko.sync.delegates.FreshStartDelegate; import org.mozilla.gecko.sync.delegates.GlobalSessionCallback; @@ -404,69 +405,91 @@ public class GlobalSession implements Ht updateMetaGlobalInPlace(); Logger.debug(LOG_TAG, "Uploading updated meta/global record."); final Object monitor = new Object(); Runnable doUpload = new Runnable() { @Override public void run() { - config.metaGlobal.upload(new MetaGlobalDelegate() { - @Override - public void handleSuccess(MetaGlobal global, SyncStorageResponse response) { - Logger.info(LOG_TAG, "Successfully uploaded updated meta/global record."); - // Engine changes are stored as diffs, so update enabled engines in config to match uploaded meta/global. - config.enabledEngineNames = config.metaGlobal.getEnabledEngineNames(); - // Clear userSelectedEngines because they are updated in config and meta/global. - config.userSelectedEngines = null; - - synchronized (monitor) { - monitor.notify(); - } - } - - @Override - public void handleMissing(MetaGlobal global, SyncStorageResponse response) { - Logger.warn(LOG_TAG, "Got 404 missing uploading updated meta/global record; shouldn't happen. Ignoring."); - synchronized (monitor) { - monitor.notify(); - } - } - - @Override - public void handleFailure(SyncStorageResponse response) { - Logger.warn(LOG_TAG, "Failed to upload updated meta/global record; ignoring."); - synchronized (monitor) { - monitor.notify(); - } - } - - @Override - public void handleError(Exception e) { - Logger.warn(LOG_TAG, "Got exception trying to upload updated meta/global record; ignoring.", e); - synchronized (monitor) { - monitor.notify(); - } - } - }); + // During regular meta/global upload, set X-I-U-S to the last-modified value of meta/global + // in info/collections, to ensure we catch concurrent modifications by other clients. + Long lastModifiedTimestamp = config.infoCollections.getTimestamp("meta"); + // Theoretically, meta/global's timestamp might be missing from info/collections. + // The safest thing in that case is to assert that meta/global hasn't been modified by other + // clients by setting X-I-U-S to 0. + // See Bug 1346438. + if (lastModifiedTimestamp == null) { + lastModifiedTimestamp = 0L; + } + config.metaGlobal.upload(lastModifiedTimestamp, makeMetaGlobalUploadDelegate(config, callback, monitor)); } }; final Thread upload = new Thread(doUpload); synchronized (monitor) { try { upload.start(); monitor.wait(); Logger.debug(LOG_TAG, "Uploaded updated meta/global record."); } catch (InterruptedException e) { Logger.error(LOG_TAG, "Uploading updated meta/global interrupted; continuing."); } } } + @VisibleForTesting + public static MetaGlobalDelegate makeMetaGlobalUploadDelegate(final SyncConfiguration config, final GlobalSessionCallback callback, final Object monitor) { + return new MetaGlobalDelegate() { + @Override + public void handleSuccess(MetaGlobal global, SyncStorageResponse response) { + Logger.info(LOG_TAG, "Successfully uploaded updated meta/global record."); + // Engine changes are stored as diffs, so update enabled engines in config to match uploaded meta/global. + config.enabledEngineNames = config.metaGlobal.getEnabledEngineNames(); + // Clear userSelectedEngines because they are updated in config and meta/global. + config.userSelectedEngines = null; + + synchronized (monitor) { + monitor.notify(); + } + } + + @Override + public void handleMissing(MetaGlobal global, SyncStorageResponse response) { + Logger.warn(LOG_TAG, "Got 404 missing uploading updated meta/global record; shouldn't happen. Ignoring."); + synchronized (monitor) { + monitor.notify(); + } + } + + @Override + public void handleFailure(SyncStorageResponse response) { + Logger.warn(LOG_TAG, "Failed to upload updated meta/global record; ignoring."); + + // If we encountered a concurrent modification while uploading meta/global, request that + // sync of all stages happens once we're done. + if (response.getStatusCode() == 412) { + callback.handleFullSyncNecessary(); + } + + synchronized (monitor) { + monitor.notify(); + } + } + + @Override + public void handleError(Exception e) { + Logger.warn(LOG_TAG, "Got exception trying to upload updated meta/global record; ignoring.", e); + synchronized (monitor) { + monitor.notify(); + } + } + }; + } + public void abort(Exception e, String reason) { Logger.warn(LOG_TAG, "Aborting sync: " + reason, e); cleanUp(); long existingBackoff = largestBackoffObserved.get(); if (existingBackoff > 0) { callback.requestBackoff(existingBackoff); } @@ -539,38 +562,38 @@ public class GlobalSession implements Ht } public void fetchInfoCollections(JSONRecordFetchDelegate callback) throws URISyntaxException { final JSONRecordFetcher fetcher = new JSONRecordFetcher(config.infoCollectionsURL(), getAuthHeaderProvider()); fetcher.fetch(callback); } /** - * Upload new crypto/keys. + * Upload new crypto/keys with X-If-Unmodified-Since=0 * * @param keys * new keys. * @param keyUploadDelegate * a delegate. */ - public void uploadKeys(final CollectionKeys keys, + public void uploadKeys(final CollectionKeys keys, final long timestamp, final KeyUploadDelegate keyUploadDelegate) { SyncStorageRecordRequest request; try { request = new SyncStorageRecordRequest(this.config.keysURI()); } catch (URISyntaxException e) { keyUploadDelegate.onKeyUploadFailed(e); return; } request.delegate = new SyncStorageRequestDelegate() { @Override public String ifUnmodifiedSince() { - return null; + return Utils.millisecondsToDecimalSecondsString(timestamp); } @Override public void handleRequestSuccess(SyncStorageResponse response) { Logger.debug(LOG_TAG, "Keys uploaded."); BaseResource.consumeEntity(response); // We don't need the response at all. keyUploadDelegate.onKeysUploaded(); } @@ -704,36 +727,56 @@ public class GlobalSession implements Ht public void processMissingMetaGlobal(MetaGlobal global) { freshStart(); } /** * Do a fresh start then quietly finish the sync, starting another. */ public void freshStart() { - final GlobalSession globalSession = this; - freshStart(this, new FreshStartDelegate() { + freshStart(this, makeFreshStartDelegate(this)); + } + @VisibleForTesting + public static FreshStartDelegate makeFreshStartDelegate(final GlobalSession globalSession) { + return new FreshStartDelegate() { @Override public void onFreshStartFailed(Exception e) { - globalSession.abort(e, "Fresh start failed."); + if (!(e instanceof HTTPFailureException)) { + globalSession.abort(e, "Fresh start failed."); + return; + } + + if (((HTTPFailureException) e).response.getStatusCode() != 412) { + globalSession.abort(e, "Fresh start failed with non-412 status code."); + return; + } + + // In case of a concurrent modification during a fresh start, restart global session. + try { + // We are not persisting SyncConfiguration at this point; we can't be sure of its state. + globalSession.restart(); + } catch (AlreadySyncingException restartException) { + Logger.warn(LOG_TAG, "Got exception restarting sync after freshStart failure.", restartException); + globalSession.abort(restartException, "Got exception restarting sync after freshStart failure."); + } } @Override public void onFreshStart() { try { Logger.warn(LOG_TAG, "Fresh start succeeded; restarting global session."); globalSession.config.persistToPrefs(); globalSession.restart(); } catch (Exception e) { Logger.warn(LOG_TAG, "Got exception when restarting sync after freshStart.", e); globalSession.abort(e, "Got exception after freshStart."); } } - }); + }; } /** * Clean the server, aborting the current sync. * <p> * <ol> * <li>Wipe the server storage.</li> * <li>Reset all stages and purge cached state: (meta/global and crypto/keys records).</li> @@ -757,21 +800,21 @@ public class GlobalSession implements Ht session.resetAllStages(); session.config.purgeMetaGlobal(); session.config.purgeCryptoKeys(); session.config.persistToPrefs(); Logger.info(LOG_TAG, "Uploading new meta/global with sync ID " + mg.syncID + "."); - // It would be good to set the X-If-Unmodified-Since header to `timestamp` - // for this PUT to ensure at least some level of transactionality. - // Unfortunately, the servers don't support it after a wipe right now - // (bug 693893), so we're going to defer this until bug 692700. - mg.upload(new MetaGlobalDelegate() { + // During a fresh start, set X-I-U-S to 0 to ensure we don't race with other clients. + // Since we are performing a fresh start, we are asserting that meta/global was not uploaded + // by other clients. + // See Bug 1346438. + mg.upload(0L, new MetaGlobalDelegate() { @Override public void handleSuccess(MetaGlobal uploadedGlobal, SyncStorageResponse uploadResponse) { Logger.info(LOG_TAG, "Uploaded new meta/global with sync ID " + uploadedGlobal.syncID + "."); // Generate new keys. CollectionKeys keys = null; try { keys = session.generateNewCryptoKeys(); @@ -779,19 +822,20 @@ public class GlobalSession implements Ht Logger.warn(LOG_TAG, "Got exception generating new keys; failing fresh start.", e); freshStartDelegate.onFreshStartFailed(e); } if (keys == null) { Logger.warn(LOG_TAG, "Got null keys from generateNewKeys; failing fresh start."); freshStartDelegate.onFreshStartFailed(null); } - // Upload new keys. + // Upload new keys. Assert that no other client uploaded keys yet by setting X-I-U-S to 0. + // See Bug 1346438. Logger.info(LOG_TAG, "Uploading new crypto/keys."); - session.uploadKeys(keys, new KeyUploadDelegate() { + session.uploadKeys(keys, 0L, new KeyUploadDelegate() { @Override public void onKeysUploaded() { Logger.info(LOG_TAG, "Uploaded new crypto/keys."); freshStartDelegate.onFreshStart(); } @Override public void onKeyUploadFailed(Exception e) {
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/InfoCollections.java +++ b/mobile/android/services/src/main/java/org/mozilla/gecko/sync/InfoCollections.java @@ -1,14 +1,16 @@ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package org.mozilla.gecko.sync; +import android.support.annotation.Nullable; + import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.mozilla.gecko.background.common.log.Logger; /** @@ -62,16 +64,17 @@ public class InfoCollections { /** * Return the timestamp for the given collection, or null if the timestamps * have not been fetched or the given collection does not have a timestamp. * * @param collection * The collection to inspect. * @return the timestamp in milliseconds since epoch. */ + @Nullable public Long getTimestamp(String collection) { if (timestamps == null) { return null; } return timestamps.get(collection); } /**
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/MetaGlobal.java +++ b/mobile/android/services/src/main/java/org/mozilla/gecko/sync/MetaGlobal.java @@ -17,17 +17,17 @@ import org.mozilla.gecko.background.comm import org.mozilla.gecko.sync.MetaGlobalException.MetaGlobalMalformedSyncIDException; import org.mozilla.gecko.sync.MetaGlobalException.MetaGlobalMalformedVersionException; import org.mozilla.gecko.sync.delegates.MetaGlobalDelegate; import org.mozilla.gecko.sync.net.AuthHeaderProvider; import org.mozilla.gecko.sync.net.SyncStorageRecordRequest; import org.mozilla.gecko.sync.net.SyncStorageRequestDelegate; import org.mozilla.gecko.sync.net.SyncStorageResponse; -public class MetaGlobal implements SyncStorageRequestDelegate { +public class MetaGlobal { private static final String LOG_TAG = "MetaGlobal"; protected String metaURL; // Fields. protected ExtendedJSONObject engines; protected JSONArray declined; protected Long storageVersion; protected String syncID; @@ -49,29 +49,28 @@ public class MetaGlobal implements SyncS this.authHeaderProvider = authHeaderProvider; } public void fetch(MetaGlobalDelegate delegate) { this.callback = delegate; try { this.isUploading = false; SyncStorageRecordRequest r = new SyncStorageRecordRequest(this.metaURL); - r.delegate = this; + r.delegate = new MetaUploadDelegate(this, null); r.get(); } catch (URISyntaxException e) { this.callback.handleError(e); } } - public void upload(MetaGlobalDelegate callback) { + public void upload(long lastModifiedTimestamp, MetaGlobalDelegate callback) { try { this.isUploading = true; SyncStorageRecordRequest r = new SyncStorageRecordRequest(this.metaURL); - - r.delegate = this; + r.delegate = new MetaUploadDelegate(this, lastModifiedTimestamp); this.callback = callback; r.put(this.asCryptoRecord()); } catch (Exception e) { callback.handleError(e); } } protected ExtendedJSONObject asRecordContents() { @@ -314,35 +313,16 @@ public class MetaGlobal implements SyncS this.syncID = syncID; } // SyncStorageRequestDelegate methods for fetching. public String credentials() { return null; } - @Override - public AuthHeaderProvider getAuthHeaderProvider() { - return authHeaderProvider; - } - - @Override - public String ifUnmodifiedSince() { - return null; - } - - @Override - public void handleRequestSuccess(SyncStorageResponse response) { - if (this.isUploading) { - this.handleUploadSuccess(response); - } else { - this.handleDownloadSuccess(response); - } - } - private void handleUploadSuccess(SyncStorageResponse response) { this.callback.handleSuccess(this, response); } private void handleDownloadSuccess(SyncStorageResponse response) { if (response.wasSuccessful()) { try { CryptoRecord record = CryptoRecord.fromJSONRecord(response.jsonObjectBody()); @@ -351,22 +331,54 @@ public class MetaGlobal implements SyncS } catch (Exception e) { this.callback.handleError(e); } return; } this.callback.handleFailure(response); } - @Override - public void handleRequestFailure(SyncStorageResponse response) { - if (response.getStatusCode() == 404) { - this.callback.handleMissing(this, response); - return; + private static class MetaUploadDelegate implements SyncStorageRequestDelegate { + private final MetaGlobal metaGlobal; + private final Long ifUnmodifiedSinceTimestamp; + + /* package-local */ MetaUploadDelegate(final MetaGlobal metaGlobal, final Long ifUnmodifiedSinceTimestamp) { + this.metaGlobal = metaGlobal; + this.ifUnmodifiedSinceTimestamp = ifUnmodifiedSinceTimestamp; + } + + @Override + public AuthHeaderProvider getAuthHeaderProvider() { + return metaGlobal.authHeaderProvider; + } + + @Override + public String ifUnmodifiedSince() { + if (ifUnmodifiedSinceTimestamp == null) { + return null; + } + return Utils.millisecondsToDecimalSecondsString(ifUnmodifiedSinceTimestamp); } - this.callback.handleFailure(response); - } + + @Override + public void handleRequestSuccess(SyncStorageResponse response) { + if (metaGlobal.isUploading) { + metaGlobal.handleUploadSuccess(response); + } else { + metaGlobal.handleDownloadSuccess(response); + } + } - @Override - public void handleRequestError(Exception e) { - this.callback.handleError(e); + @Override + public void handleRequestFailure(SyncStorageResponse response) { + if (response.getStatusCode() == 404) { + metaGlobal.callback.handleMissing(metaGlobal, response); + return; + } + metaGlobal.callback.handleFailure(response); + } + + @Override + public void handleRequestError(Exception e) { + metaGlobal.callback.handleError(e); + } } }
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/delegates/GlobalSessionCallback.java +++ b/mobile/android/services/src/main/java/org/mozilla/gecko/sync/delegates/GlobalSessionCallback.java @@ -34,16 +34,17 @@ public interface GlobalSessionCallback { */ void informMigrated(GlobalSession session); void handleAborted(GlobalSession globalSession, String reason); void handleError(GlobalSession globalSession, Exception ex); void handleSuccess(GlobalSession globalSession); void handleStageCompleted(Stage currentState, GlobalSession globalSession); void handleIncompleteStage(Stage currentState, GlobalSession globalSession); + void handleFullSyncNecessary(); /** * Called when a {@link GlobalSession} wants to know if it should continue * to make storage requests. * * @return false if the session should make no further requests. */ boolean shouldBackOffStorage();
--- a/mobile/android/services/src/main/java/org/mozilla/gecko/sync/stage/FetchMetaGlobalStage.java +++ b/mobile/android/services/src/main/java/org/mozilla/gecko/sync/stage/FetchMetaGlobalStage.java @@ -53,27 +53,28 @@ public class FetchMetaGlobalStage extend @Override public void execute() throws NoSuchStageException { InfoCollections infoCollections = session.config.infoCollections; if (infoCollections == null) { session.abort(null, "No info/collections set in FetchMetaGlobalStage."); return; } - long lastModified = session.config.persistedMetaGlobal().lastModified(); + final long lastModified = session.config.persistedMetaGlobal().lastModified(); if (!infoCollections.updateNeeded(META_COLLECTION, lastModified)) { // Try to use our local collection keys for this session. Logger.info(LOG_TAG, "Trying to use persisted meta/global for this session."); MetaGlobal global = session.config.persistedMetaGlobal().metaGlobal(session.config.metaURL(), session.getAuthHeaderProvider()); if (global != null) { Logger.info(LOG_TAG, "Using persisted meta/global for this session."); session.processMetaGlobal(global); // Calls session.advance(). return; } Logger.info(LOG_TAG, "Failed to use persisted meta/global for this session."); } // We need an update: fetch or upload meta/global as necessary. + // We assert when we believe meta/global was last modified via X-I-U-S. Logger.info(LOG_TAG, "Fetching fresh meta/global for this session."); MetaGlobal global = new MetaGlobal(session.config.metaURL(), session.getAuthHeaderProvider()); global.fetch(new StageMetaGlobalDelegate(session)); } }
--- a/mobile/android/tests/background/junit3/src/org/mozilla/gecko/background/testhelpers/DefaultGlobalSessionCallback.java +++ b/mobile/android/tests/background/junit3/src/org/mozilla/gecko/background/testhelpers/DefaultGlobalSessionCallback.java @@ -30,16 +30,21 @@ public class DefaultGlobalSessionCallbac @Override public void handleIncompleteStage(Stage currentState, GlobalSession globalSession) { } @Override + public void handleFullSyncNecessary() { + + } + + @Override public void handleAborted(GlobalSession globalSession, String reason) { } @Override public void handleError(GlobalSession globalSession, Exception ex) { } @Override
--- a/mobile/android/tests/background/junit4/src/org/mozilla/android/sync/net/test/TestGlobalSession.java +++ b/mobile/android/tests/background/junit4/src/org/mozilla/android/sync/net/test/TestGlobalSession.java @@ -22,23 +22,26 @@ import org.mozilla.gecko.background.test import org.mozilla.gecko.background.testhelpers.MockPrefsGlobalSession; import org.mozilla.gecko.background.testhelpers.MockServerSyncStage; import org.mozilla.gecko.background.testhelpers.MockSharedPreferences; import org.mozilla.gecko.background.testhelpers.TestRunner; import org.mozilla.gecko.background.testhelpers.WaitHelper; import org.mozilla.gecko.sync.EngineSettings; import org.mozilla.gecko.sync.ExtendedJSONObject; import org.mozilla.gecko.sync.GlobalSession; +import org.mozilla.gecko.sync.HTTPFailureException; +import org.mozilla.gecko.sync.InfoCollections; import org.mozilla.gecko.sync.MetaGlobal; import org.mozilla.gecko.sync.NonObjectJSONException; import org.mozilla.gecko.sync.SyncConfiguration; import org.mozilla.gecko.sync.SyncConfigurationException; import org.mozilla.gecko.sync.Utils; import org.mozilla.gecko.sync.crypto.CryptoException; import org.mozilla.gecko.sync.crypto.KeyBundle; +import org.mozilla.gecko.sync.delegates.MetaGlobalDelegate; import org.mozilla.gecko.sync.net.BaseResource; import org.mozilla.gecko.sync.net.BasicAuthHeaderProvider; import org.mozilla.gecko.sync.net.SyncStorageResponse; import org.mozilla.gecko.sync.repositories.domain.VersionConstants; import org.mozilla.gecko.sync.stage.AndroidBrowserBookmarksServerSyncStage; import org.mozilla.gecko.sync.stage.GlobalSyncStage; import org.mozilla.gecko.sync.stage.GlobalSyncStage.Stage; import org.mozilla.gecko.sync.stage.NoSuchStageException; @@ -54,16 +57,17 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import static org.mockito.Mockito.mock; @RunWith(TestRunner.class) public class TestGlobalSession { private int TEST_PORT = HTTPServerTestHelper.getTestPort(); private final String TEST_CLUSTER_URL = "http://localhost:" + TEST_PORT; private final String TEST_USERNAME = "johndoe"; private final String TEST_PASSWORD = "password"; private final String TEST_SYNC_KEY = "abcdeabcdeabcdeabcdeabcdea"; @@ -386,16 +390,17 @@ public class TestGlobalSession { @Test public void testUploadUpdatedMetaGlobal() throws Exception { // Set up session with meta/global. final MockGlobalSessionCallback callback = new MockGlobalSessionCallback(); final GlobalSession session = MockPrefsGlobalSession.getSession(TEST_USERNAME, TEST_PASSWORD, new KeyBundle(TEST_USERNAME, TEST_SYNC_KEY), callback, null, null); session.config.metaGlobal = session.generateNewMetaGlobal(); + session.config.infoCollections = mock(InfoCollections.class); session.enginesToUpdate.clear(); // Set enabledEngines in meta/global, including a "new engine." String[] origEngines = new String[] { "bookmarks", "clients", "forms", "history", "tabs", "new-engine" }; ExtendedJSONObject origEnginesJSONObject = new ExtendedJSONObject(); for (String engineName : origEngines) { EngineSettings mockEngineSettings = new EngineSettings(Utils.generateGuid(), Integer.valueOf(0)); @@ -428,13 +433,58 @@ public class TestGlobalSession { expected.remove(name); } for (String name : toAdd) { expected.add(name); } assertEquals(expected, session.config.metaGlobal.getEnabledEngineNames()); } + @Test + public void testUploadMetaGlobalDelegate412() { + final Object monitor = new Object(); + final MockGlobalSessionCallback callback = new MockGlobalSessionCallback(); + MetaGlobalDelegate metaGlobalDelegate = GlobalSession.makeMetaGlobalUploadDelegate( + mock(SyncConfiguration.class), + callback, + monitor + ); + + metaGlobalDelegate.handleFailure(makeSyncStorageResponse(412)); + + assertTrue(callback.calledFullSyncNecessary); + } + + @Test + public void testUploadMetaGlobalDelegateNon412() { + final Object monitor = new Object(); + final MockGlobalSessionCallback callback = new MockGlobalSessionCallback(); + MetaGlobalDelegate metaGlobalDelegate = GlobalSession.makeMetaGlobalUploadDelegate( + mock(SyncConfiguration.class), + callback, + monitor + ); + + metaGlobalDelegate.handleFailure(makeSyncStorageResponse(400)); + + assertFalse(callback.calledFullSyncNecessary); + } + public void testStageAdvance() { assertEquals(GlobalSession.nextStage(Stage.idle), Stage.checkPreconditions); assertEquals(GlobalSession.nextStage(Stage.completed), Stage.idle); } + + public static HTTPFailureException makeHttpFailureException(int statusCode) { + return new HTTPFailureException(makeSyncStorageResponse(statusCode)); + } + + public static SyncStorageResponse makeSyncStorageResponse(int statusCode) { + // \\( >.<)// + return new SyncStorageResponse( + new BasicHttpResponse( + new BasicStatusLine( + new ProtocolVersion("HTTP", 1, 1), statusCode, null + ) + ) + ); + } }
--- a/mobile/android/tests/background/junit4/src/org/mozilla/android/sync/net/test/TestMetaGlobal.java +++ b/mobile/android/tests/background/junit4/src/org/mozilla/android/sync/net/test/TestMetaGlobal.java @@ -291,17 +291,17 @@ public class TestMetaGlobal { } public MockMetaGlobalFetchDelegate doUpload(final MetaGlobal global) { final MockMetaGlobalFetchDelegate delegate = new MockMetaGlobalFetchDelegate(); WaitHelper.getTestWaiter().performWait(WaitHelper.onThreadRunnable(new Runnable() { @Override public void run() { - global.upload(delegate); + global.upload(0L, delegate); } })); return delegate; } @Test public void testUpload() {
--- a/mobile/android/tests/background/junit4/src/org/mozilla/android/sync/test/helpers/MockGlobalSessionCallback.java +++ b/mobile/android/tests/background/junit4/src/org/mozilla/android/sync/test/helpers/MockGlobalSessionCallback.java @@ -4,16 +4,17 @@ package org.mozilla.android.sync.test.helpers; import org.mozilla.gecko.background.testhelpers.WaitHelper; import org.mozilla.gecko.sync.GlobalSession; import org.mozilla.gecko.sync.delegates.GlobalSessionCallback; import org.mozilla.gecko.sync.stage.GlobalSyncStage.Stage; import java.net.URI; +import java.util.ArrayList; import static org.junit.Assert.assertEquals; /** * A callback for use with a GlobalSession that records what happens for later * inspection. * * This callback is expected to be used from within the friendly confines of a @@ -30,16 +31,18 @@ public class MockGlobalSessionCallback i public Exception calledErrorException = null; public boolean calledAborted = false; public boolean calledRequestBackoff = false; public boolean calledInformUnauthorizedResponse = false; public boolean calledInformUpgradeRequiredResponse = false; public boolean calledInformMigrated = false; public URI calledInformUnauthorizedResponseClusterURL = null; public long weaveBackoff = -1; + public boolean calledFullSyncNecessary = false; + public ArrayList<String> incompleteStages = new ArrayList<>(); @Override public void handleSuccess(GlobalSession globalSession) { this.calledSuccess = true; assertEquals(0, this.stageCounter); this.testWaiter().performNotify(); } @@ -54,17 +57,22 @@ public class MockGlobalSessionCallback i this.calledError = true; this.calledErrorException = ex; this.testWaiter().performNotify(); } @Override public void handleIncompleteStage(Stage currentState, GlobalSession globalSession) { + this.incompleteStages.add(currentState.getRepositoryName()); + } + @Override + public void handleFullSyncNecessary() { + this.calledFullSyncNecessary = true; } @Override public void handleStageCompleted(Stage currentState, GlobalSession globalSession) { stageCounter--; }
--- a/mobile/android/tests/background/junit4/src/org/mozilla/gecko/background/testhelpers/DefaultGlobalSessionCallback.java +++ b/mobile/android/tests/background/junit4/src/org/mozilla/gecko/background/testhelpers/DefaultGlobalSessionCallback.java @@ -46,12 +46,17 @@ public class DefaultGlobalSessionCallbac @Override public void handleIncompleteStage(Stage currentState, GlobalSession globalSession) { } @Override + public void handleFullSyncNecessary() { + + } + + @Override public boolean shouldBackOffStorage() { return false; } }
--- a/mobile/android/tests/background/junit4/src/org/mozilla/gecko/sync/stage/test/TestFetchMetaGlobalStage.java +++ b/mobile/android/tests/background/junit4/src/org/mozilla/gecko/sync/stage/test/TestFetchMetaGlobalStage.java @@ -4,50 +4,59 @@ package org.mozilla.gecko.sync.stage.test; import android.os.SystemClock; import org.json.simple.JSONArray; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; +import org.mozilla.android.sync.net.test.TestGlobalSession; import org.mozilla.android.sync.net.test.TestMetaGlobal; import org.mozilla.android.sync.test.helpers.HTTPServerTestHelper; import org.mozilla.android.sync.test.helpers.MockGlobalSessionCallback; import org.mozilla.android.sync.test.helpers.MockServer; import org.mozilla.gecko.background.testhelpers.MockGlobalSession; import org.mozilla.gecko.background.testhelpers.TestRunner; import org.mozilla.gecko.background.testhelpers.WaitHelper; import org.mozilla.gecko.sync.AlreadySyncingException; import org.mozilla.gecko.sync.CollectionKeys; import org.mozilla.gecko.sync.CryptoRecord; import org.mozilla.gecko.sync.ExtendedJSONObject; import org.mozilla.gecko.sync.GlobalSession; +import org.mozilla.gecko.sync.HTTPFailureException; import org.mozilla.gecko.sync.InfoCollections; import org.mozilla.gecko.sync.MetaGlobal; import org.mozilla.gecko.sync.NonObjectJSONException; import org.mozilla.gecko.sync.SyncConfigurationException; import org.mozilla.gecko.sync.crypto.CryptoException; import org.mozilla.gecko.sync.crypto.KeyBundle; import org.mozilla.gecko.sync.delegates.FreshStartDelegate; +import org.mozilla.gecko.sync.delegates.GlobalSessionCallback; import org.mozilla.gecko.sync.delegates.KeyUploadDelegate; import org.mozilla.gecko.sync.delegates.WipeServerDelegate; import org.mozilla.gecko.sync.net.AuthHeaderProvider; +import org.mozilla.gecko.sync.net.SyncStorageResponse; import org.mozilla.gecko.sync.stage.FetchMetaGlobalStage; import org.mozilla.gecko.sync.stage.GlobalSyncStage.Stage; import org.simpleframework.http.Request; import org.simpleframework.http.Response; import java.io.IOException; import java.net.URI; import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import ch.boye.httpclientandroidlib.HttpResponse; +import ch.boye.httpclientandroidlib.ProtocolVersion; +import ch.boye.httpclientandroidlib.message.BasicHttpResponse; +import ch.boye.httpclientandroidlib.message.BasicStatusLine; + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @RunWith(TestRunner.class) public class TestFetchMetaGlobalStage { @SuppressWarnings("unused") private static final String LOG_TAG = "TestMetaGlobalStage"; @@ -64,103 +73,115 @@ public class TestFetchMetaGlobalStage { private final String TEST_INFO_COLLECTIONS_JSON = "{}"; private static final String TEST_SYNC_ID = "testSyncID"; private static final long TEST_STORAGE_VERSION = GlobalSession.STORAGE_VERSION; private InfoCollections infoCollections; private KeyBundle syncKeyBundle; private MockGlobalSessionCallback callback; - private GlobalSession session; - - private boolean calledRequiresUpgrade = false; - private boolean calledProcessMissingMetaGlobal = false; - private boolean calledFreshStart = false; - private boolean calledWipeServer = false; - private boolean calledUploadKeys = false; - private boolean calledResetAllStages = false; + private LocalMockGlobalSession session; private static void assertSameContents(JSONArray expected, Set<String> actual) { assertEquals(expected.size(), actual.size()); for (Object o : expected) { assertTrue(actual.contains(o)); } } + private class LocalMockGlobalSession extends MockGlobalSession { + private boolean calledRequiresUpgrade = false; + private boolean calledProcessMissingMetaGlobal = false; + private boolean calledFreshStart = false; + private boolean calledWipeServer = false; + private boolean calledUploadKeys = false; + private boolean calledResetAllStages = false; + private boolean calledRestart = false; + private boolean calledAbort = false; + + public LocalMockGlobalSession(String username, String password, KeyBundle keyBundle, GlobalSessionCallback callback) throws SyncConfigurationException, IllegalArgumentException, NonObjectJSONException, IOException { + super(username, password, keyBundle, callback); + } + + @Override + protected void prepareStages() { + super.prepareStages(); + withStage(Stage.fetchMetaGlobal, new FetchMetaGlobalStage()); + } + + @Override + public void requiresUpgrade() { + calledRequiresUpgrade = true; + this.abort(null, "Requires upgrade"); + } + + @Override + public void processMissingMetaGlobal(MetaGlobal mg) { + calledProcessMissingMetaGlobal = true; + this.abort(null, "Missing meta/global"); + } + + // Don't really uploadKeys. + @Override + public void uploadKeys(CollectionKeys keys, long lastModified, KeyUploadDelegate keyUploadDelegate) { + calledUploadKeys = true; + keyUploadDelegate.onKeysUploaded(); + } + + // On fresh start completed, just stop. + @Override + public void freshStart() { + calledFreshStart = true; + freshStart(this, new FreshStartDelegate() { + @Override + public void onFreshStartFailed(Exception e) { + WaitHelper.getTestWaiter().performNotify(e); + } + + @Override + public void onFreshStart() { + WaitHelper.getTestWaiter().performNotify(); + } + }); + } + + // Don't really wipeServer. + @Override + protected void wipeServer(final AuthHeaderProvider authHeaderProvider, final WipeServerDelegate wipeDelegate) { + calledWipeServer = true; + wipeDelegate.onWiped(System.currentTimeMillis()); + } + + @Override + protected void restart() throws AlreadySyncingException { + calledRestart = true; + WaitHelper.getTestWaiter().performNotify(); + } + + @Override + public void abort(Exception e, String reason) { + calledAbort = true; + super.abort(e, reason); + } + + // Don't really resetAllStages. + @Override + public void resetAllStages() { + calledResetAllStages = true; + } + } + @Before public void setUp() throws Exception { - calledRequiresUpgrade = false; - calledProcessMissingMetaGlobal = false; - calledFreshStart = false; - calledWipeServer = false; - calledUploadKeys = false; - calledResetAllStages = false; - // Set info collections to not have crypto. infoCollections = new InfoCollections(new ExtendedJSONObject(TEST_INFO_COLLECTIONS_JSON)); syncKeyBundle = new KeyBundle(TEST_USERNAME, TEST_SYNC_KEY); callback = new MockGlobalSessionCallback(); - session = new MockGlobalSession(TEST_USERNAME, TEST_PASSWORD, - syncKeyBundle, callback) { - @Override - protected void prepareStages() { - super.prepareStages(); - withStage(Stage.fetchMetaGlobal, new FetchMetaGlobalStage()); - } - - @Override - public void requiresUpgrade() { - calledRequiresUpgrade = true; - this.abort(null, "Requires upgrade"); - } - - @Override - public void processMissingMetaGlobal(MetaGlobal mg) { - calledProcessMissingMetaGlobal = true; - this.abort(null, "Missing meta/global"); - } - - // Don't really uploadKeys. - @Override - public void uploadKeys(CollectionKeys keys, KeyUploadDelegate keyUploadDelegate) { - calledUploadKeys = true; - keyUploadDelegate.onKeysUploaded(); - } - - // On fresh start completed, just stop. - @Override - public void freshStart() { - calledFreshStart = true; - freshStart(this, new FreshStartDelegate() { - @Override - public void onFreshStartFailed(Exception e) { - WaitHelper.getTestWaiter().performNotify(e); - } - - @Override - public void onFreshStart() { - WaitHelper.getTestWaiter().performNotify(); - } - }); - } - - // Don't really wipeServer. - @Override - protected void wipeServer(final AuthHeaderProvider authHeaderProvider, final WipeServerDelegate wipeDelegate) { - calledWipeServer = true; - wipeDelegate.onWiped(System.currentTimeMillis()); - } - - // Don't really resetAllStages. - @Override - public void resetAllStages() { - calledResetAllStages = true; - } - }; + session = new LocalMockGlobalSession(TEST_USERNAME, TEST_PASSWORD, syncKeyBundle, callback); session.config.setClusterURL(new URI(TEST_CLUSTER_URL)); session.config.infoCollections = infoCollections; } protected void doSession(MockServer server) { data.startHTTPServer(server); WaitHelper.getTestWaiter().performWait(WaitHelper.onThreadRunnable(new Runnable() { @Override @@ -180,17 +201,17 @@ public class TestFetchMetaGlobalStage { MetaGlobal mg = new MetaGlobal(null, null); mg.setSyncID(TEST_SYNC_ID); mg.setStorageVersion(Long.valueOf(TEST_STORAGE_VERSION + 1)); MockServer server = new MockServer(200, mg.asCryptoRecord().toJSONString()); doSession(server); assertEquals(true, callback.calledError); - assertTrue(calledRequiresUpgrade); + assertTrue(session.calledRequiresUpgrade); } @SuppressWarnings("unchecked") private JSONArray makeTestDeclinedArray() { final JSONArray declined = new JSONArray(); declined.add("foobar"); return declined; } @@ -212,18 +233,18 @@ public class TestFetchMetaGlobalStage { // Set declined engines in the server object. final JSONArray testingDeclinedEngines = makeTestDeclinedArray(); mg.setDeclinedEngineNames(testingDeclinedEngines); MockServer server = new MockServer(200, mg.asCryptoRecord().toJSONString()); doSession(server); assertTrue(callback.calledSuccess); - assertFalse(calledProcessMissingMetaGlobal); - assertFalse(calledResetAllStages); + assertFalse(session.calledProcessMissingMetaGlobal); + assertFalse(session.calledResetAllStages); assertEquals(TEST_SYNC_ID, session.config.metaGlobal.getSyncID()); assertEquals(TEST_STORAGE_VERSION, session.config.metaGlobal.getStorageVersion().longValue()); assertEquals(TEST_SYNC_ID, session.config.syncID); // Declined engines propagate from the server meta/global. final Set<String> actual = session.config.metaGlobal.getDeclinedEngineNames(); assertSameContents(testingDeclinedEngines, actual); } @@ -245,18 +266,18 @@ public class TestFetchMetaGlobalStage { // Set declined engines in the server object. final JSONArray testingDeclinedEngines = makeTestDeclinedArray(); mg.setDeclinedEngineNames(testingDeclinedEngines); MockServer server = new MockServer(200, mg.asCryptoRecord().toJSONString()); doSession(server); assertEquals(true, callback.calledSuccess); - assertFalse(calledProcessMissingMetaGlobal); - assertTrue(calledResetAllStages); + assertFalse(session.calledProcessMissingMetaGlobal); + assertTrue(session.calledResetAllStages); assertEquals(TEST_SYNC_ID, session.config.metaGlobal.getSyncID()); assertEquals(TEST_STORAGE_VERSION, session.config.metaGlobal.getStorageVersion().longValue()); assertEquals(TEST_SYNC_ID, session.config.syncID); // Declined engines propagate from the server meta/global. final Set<String> actual = session.config.metaGlobal.getDeclinedEngineNames(); assertSameContents(testingDeclinedEngines, actual); } @@ -294,41 +315,41 @@ public class TestFetchMetaGlobalStage { } @Test public void testFetchMissing() throws Exception { MockServer server = new MockServer(404, "missing"); doSession(server); assertEquals(true, callback.calledError); - assertTrue(calledProcessMissingMetaGlobal); + assertTrue(session.calledProcessMissingMetaGlobal); } /** * Empty payload object has no syncID or storageVersion and should call freshStart. * @throws Exception */ @Test public void testFetchEmptyPayload() throws Exception { MockServer server = new MockServer(200, TestMetaGlobal.TEST_META_GLOBAL_EMPTY_PAYLOAD_RESPONSE); doSession(server); - assertTrue(calledFreshStart); + assertTrue(session.calledFreshStart); } /** * No payload means no syncID or storageVersion and therefore we should call freshStart. * @throws Exception */ @Test public void testFetchNoPayload() throws Exception { MockServer server = new MockServer(200, TestMetaGlobal.TEST_META_GLOBAL_NO_PAYLOAD_RESPONSE); doSession(server); - assertTrue(calledFreshStart); + assertTrue(session.calledFreshStart); } /** * Malformed payload is a server response issue, not a meta/global record * issue. This should error out of the sync. * @throws Exception */ @Test @@ -379,16 +400,67 @@ public class TestFetchMetaGlobalStage { // We shouldn't be trying to download anything after uploading meta/global. mgDownloaded.set(true); } this.handle(request, response, 404, "missing"); } }; doFreshStart(server); - assertTrue(this.calledFreshStart); - assertTrue(this.calledWipeServer); - assertTrue(this.calledUploadKeys); + assertTrue(session.calledFreshStart); + assertTrue(session.calledWipeServer); + assertTrue(session.calledUploadKeys); assertTrue(mgUploaded.get()); assertFalse(mgDownloaded.get()); assertEquals(GlobalSession.STORAGE_VERSION, uploadedMg.getStorageVersion().longValue()); } + + @Test + public void testFreshStartDelegateSuccess() { + final FreshStartDelegate freshStartDelegate = GlobalSession.makeFreshStartDelegate(session); + + WaitHelper.getTestWaiter().performWait(WaitHelper.onThreadRunnable( + new Runnable() { + @Override + public void run() { + freshStartDelegate.onFreshStart(); + } + } + )); + + assertTrue(session.calledRestart); + assertFalse(session.calledAbort); + } + + @Test + public void testFreshStartDelegate412() { + final FreshStartDelegate freshStartDelegate = GlobalSession.makeFreshStartDelegate(session); + + WaitHelper.getTestWaiter().performWait(WaitHelper.onThreadRunnable( + new Runnable() { + @Override + public void run() { + freshStartDelegate.onFreshStartFailed(TestGlobalSession.makeHttpFailureException(412)); + } + } + )); + + assertTrue(session.calledRestart); + assertFalse(session.calledAbort); + } + + @Test + public void testFreshStartDelegateNon412() { + final FreshStartDelegate freshStartDelegate = GlobalSession.makeFreshStartDelegate(session); + + WaitHelper.getTestWaiter().performWait(WaitHelper.onThreadRunnable( + new Runnable() { + @Override + public void run() { + freshStartDelegate.onFreshStartFailed(TestGlobalSession.makeHttpFailureException(400)); + } + } + )); + + assertFalse(session.calledRestart); + assertTrue(session.calledAbort); + } }
--- a/netwerk/protocol/http/nsHttpChannel.cpp +++ b/netwerk/protocol/http/nsHttpChannel.cpp @@ -5871,16 +5871,94 @@ nsHttpChannel::AsyncOpen(nsIStreamListen if (NS_FAILED(rv)) { CloseCacheEntry(false); Unused << AsyncAbort(rv); } return NS_OK; } +namespace { + +class InitLocalBlockListXpcCallback final : public nsIURIClassifierCallback { +public: + using CallbackType = nsHttpChannel::InitLocalBlockListCallback; + + explicit InitLocalBlockListXpcCallback(const CallbackType& aCallback) + : mCallback(aCallback) + { + } + + NS_DECL_ISUPPORTS + NS_DECL_NSIURICLASSIFIERCALLBACK + +private: + ~InitLocalBlockListXpcCallback() = default; + + CallbackType mCallback; +}; + +NS_IMPL_ISUPPORTS(InitLocalBlockListXpcCallback, nsIURIClassifierCallback) + +/*virtual*/ nsresult +InitLocalBlockListXpcCallback::OnClassifyComplete(nsresult /*aErrorCode*/, + const nsACString& aLists, // Only this matters. + const nsACString& /*aProvider*/, + const nsACString& /*aPrefix*/) +{ + bool localBlockList = !aLists.IsEmpty(); + mCallback(localBlockList); + return NS_OK; +} + +} // end of unnamed namespace/ + +bool +nsHttpChannel::InitLocalBlockList(const InitLocalBlockListCallback& aCallback) +{ + mLocalBlocklist = false; + + if (!(mLoadFlags & LOAD_CLASSIFY_URI)) { + return false; + } + + // Check to see if this principal exists on local blocklists. + nsCOMPtr<nsIURIClassifier> classifier = do_GetService(NS_URICLASSIFIERSERVICE_CONTRACTID); + RefPtr<nsChannelClassifier> channelClassifier = new nsChannelClassifier(this); + bool tpEnabled = false; + channelClassifier->ShouldEnableTrackingProtection(&tpEnabled); + if (!classifier || !tpEnabled) { + return false; + } + + // We skip speculative connections by setting mLocalBlocklist only + // when tracking protection is enabled. Though we could do this for + // both phishing and malware, it is not necessary for correctness, + // since no network events will be received while the + // nsChannelClassifier is in progress. See bug 1122691. + nsCOMPtr<nsIURI> uri; + nsresult rv = GetURI(getter_AddRefs(uri)); + if (NS_FAILED(rv) || !uri) { + return false; + } + + nsAutoCString tables; + Preferences::GetCString("urlclassifier.trackingTable", &tables); + nsTArray<nsCString> results; + + RefPtr<InitLocalBlockListXpcCallback> xpcCallback + = new InitLocalBlockListXpcCallback(aCallback); + rv = classifier->AsyncClassifyLocalWithTables(uri, tables, xpcCallback); + if (NS_FAILED(rv)) { + return false; + } + + return true; +} + NS_IMETHODIMP nsHttpChannel::AsyncOpen2(nsIStreamListener *aListener) { nsCOMPtr<nsIStreamListener> listener = aListener; nsresult rv = nsContentSecurityManager::doContentSecurityCheck(this, listener); if (NS_WARN_IF(NS_FAILED(rv))) { ReleaseListeners(); return rv; @@ -6025,86 +6103,28 @@ nsHttpChannel::BeginConnect() SetLoadGroupUserAgentOverride(); // Check to see if we should redirect this channel elsewhere by // nsIHttpChannel.redirectTo API request if (mAPIRedirectToURI) { return AsyncCall(&nsHttpChannel::HandleAsyncAPIRedirect); } - // Check to see if this principal exists on local blocklists. - RefPtr<nsChannelClassifier> channelClassifier = new nsChannelClassifier(this); - if (mLoadFlags & LOAD_CLASSIFY_URI) { - nsCOMPtr<nsIURIClassifier> classifier = do_GetService(NS_URICLASSIFIERSERVICE_CONTRACTID); - bool tpEnabled = false; - channelClassifier->ShouldEnableTrackingProtection(&tpEnabled); - if (classifier && tpEnabled) { - // We skip speculative connections by setting mLocalBlocklist only - // when tracking protection is enabled. Though we could do this for - // both phishing and malware, it is not necessary for correctness, - // since no network events will be received while the - // nsChannelClassifier is in progress. See bug 1122691. - - // We cannot check the entity whitelist here (IsTrackerWhitelisted()) - // because that method is asynchronous and we need to run - // synchronously here. - // See https://bugzilla.mozilla.org/show_bug.cgi?id=1100024#c2. - nsCOMPtr<nsIURI> uri; - rv = GetURI(getter_AddRefs(uri)); - if (NS_SUCCEEDED(rv) && uri) { - nsAutoCString tables; - Preferences::GetCString("urlclassifier.trackingTable", &tables); - nsTArray<nsCString> results; - rv = classifier->ClassifyLocalWithTables(uri, tables, results); - if (NS_SUCCEEDED(rv) && !results.IsEmpty()) { - LOG(("nsHttpChannel::ClassifyLocalWithTables found " - "uri on local tracking blocklist [this=%p]", - this)); - mLocalBlocklist = true; - } else { - LOG(("nsHttpChannel::ClassifyLocalWithTables no result " - "found [this=%p]", this)); - } - } - } - } // If mTimingEnabled flag is not set after OnModifyRequest() then // clear the already recorded AsyncOpen value for consistency. if (!mTimingEnabled) mAsyncOpenTime = TimeStamp(); // if this somehow fails we can go on without it Unused << gHttpHandler->AddConnectionHeader(&mRequestHead, mCaps); if (mLoadFlags & VALIDATE_ALWAYS || BYPASS_LOCAL_CACHE(mLoadFlags)) mCaps |= NS_HTTP_REFRESH_DNS; - if (!mLocalBlocklist && !mConnectionInfo->UsingHttpProxy() && - !(mLoadFlags & (LOAD_NO_NETWORK_IO | LOAD_ONLY_FROM_CACHE))) { - // Start a DNS lookup very early in case the real open is queued the DNS can - // happen in parallel. Do not do so in the presence of an HTTP proxy as - // all lookups other than for the proxy itself are done by the proxy. - // Also we don't do a lookup if the LOAD_NO_NETWORK_IO or - // LOAD_ONLY_FROM_CACHE flags are set. - // - // We keep the DNS prefetch object around so that we can retrieve - // timing information from it. There is no guarantee that we actually - // use the DNS prefetch data for the real connection, but as we keep - // this data around for 3 minutes by default, this should almost always - // be correct, and even when it isn't, the timing still represents _a_ - // valid DNS lookup timing for the site, even if it is not _the_ - // timing we used. - LOG(("nsHttpChannel::BeginConnect [this=%p] prefetching%s\n", - this, mCaps & NS_HTTP_REFRESH_DNS ? ", refresh requested" : "")); - mDNSPrefetch = new nsDNSPrefetch(mURI, originAttributes, - this, mTimingEnabled); - mDNSPrefetch->PrefetchHigh(mCaps & NS_HTTP_REFRESH_DNS); - } - // Adjust mCaps according to our request headers: // - If "Connection: close" is set as a request header, then do not bother // trying to establish a keep-alive connection. if (mRequestHead.HasHeaderValue(nsHttp::Connection, "close")) mCaps &= ~(NS_HTTP_ALLOW_KEEPALIVE); if (gHttpHandler->CriticalRequestPrioritization()) { if (mClassOfService & nsIClassOfService::Leader) { @@ -6147,35 +6167,95 @@ nsHttpChannel::BeginConnect() throttler->AddChannel(this); } } if (!(mLoadFlags & LOAD_CLASSIFY_URI)) { return ContinueBeginConnectWithResult(); } + // We are about to do a async lookup to check if the URI is a + // tracker. The result will be delivered along with the callback. + // Chances are the lookup is not needed so InitLocalBlockList() + // will return false and then we can BeginConnectActual() right away. + RefPtr<nsHttpChannel> self = this; + bool willCallback = InitLocalBlockList([self](bool aLocalBlockList) -> void { + self->mLocalBlocklist = aLocalBlockList; + nsresult rv = self->BeginConnectActual(); + if (NS_FAILED(rv)) { + // Since this error is thrown asynchronously so that the caller + // of BeginConnect() will not do clean up for us. We have to do + // it on our own. + self->CloseCacheEntry(false); + Unused << self->AsyncAbort(rv); + } + }); + + if (!willCallback) { + // We can do BeginConnectActual immediately if mLocalBlockList is initialized + // synchronously. Note that we don't need to handle the failure because + // BeginConnect() will return synchronously and the caller will be responsible + // for handling it. + return BeginConnectActual(); + } + + return NS_OK; +} + +nsresult +nsHttpChannel::BeginConnectActual() +{ + if (mCanceled) { + return mStatus; + } + + if (!mLocalBlocklist && !mConnectionInfo->UsingHttpProxy() && + !(mLoadFlags & (LOAD_NO_NETWORK_IO | LOAD_ONLY_FROM_CACHE))) { + // Start a DNS lookup very early in case the real open is queued the DNS can + // happen in parallel. Do not do so in the presence of an HTTP proxy as + // all lookups other than for the proxy itself are done by the proxy. + // Also we don't do a lookup if the LOAD_NO_NETWORK_IO or + // LOAD_ONLY_FROM_CACHE flags are set. + // + // We keep the DNS prefetch object around so that we can retrieve + // timing information from it. There is no guarantee that we actually + // use the DNS prefetch data for the real connection, but as we keep + // this data around for 3 minutes by default, this should almost always + // be correct, and even when it isn't, the timing still represents _a_ + // valid DNS lookup timing for the site, even if it is not _the_ + // timing we used. + LOG(("nsHttpChannel::BeginConnect [this=%p] prefetching%s\n", + this, mCaps & NS_HTTP_REFRESH_DNS ? ", refresh requested" : "")); + OriginAttributes originAttributes; + NS_GetOriginAttributes(this, originAttributes); + mDNSPrefetch = new nsDNSPrefetch(mURI, originAttributes, + this, mTimingEnabled); + mDNSPrefetch->PrefetchHigh(mCaps & NS_HTTP_REFRESH_DNS); + } + // mLocalBlocklist is true only if tracking protection is enabled and the // URI is a tracking domain, it makes no guarantees about phishing or // malware, so if LOAD_CLASSIFY_URI is true we must call // nsChannelClassifier to catch phishing and malware URIs. bool callContinueBeginConnect = true; if (!mLocalBlocklist) { // Here we call ContinueBeginConnectWithResult and not // ContinueBeginConnect so that in the case of an error we do not start // channelClassifier. - rv = ContinueBeginConnectWithResult(); + nsresult rv = ContinueBeginConnectWithResult(); if (NS_FAILED(rv)) { return rv; } callContinueBeginConnect = false; } // nsChannelClassifier calls ContinueBeginConnect if it has not already // been called, after optionally cancelling the channel once we have a // remote verdict. We call a concrete class instead of an nsI* that might // be overridden. + RefPtr<nsChannelClassifier> channelClassifier = new nsChannelClassifier(this); LOG(("nsHttpChannel::Starting nsChannelClassifier %p [this=%p]", channelClassifier.get(), this)); channelClassifier->Start(); if (callContinueBeginConnect) { return ContinueBeginConnectWithResult(); } return NS_OK; }
--- a/netwerk/protocol/http/nsHttpChannel.h +++ b/netwerk/protocol/http/nsHttpChannel.h @@ -183,16 +183,18 @@ public: AddSecurityMessage(const nsAString& aMessageTag, const nsAString& aMessageCategory) override; void SetWarningReporter(HttpChannelSecurityWarningReporter* aReporter) { mWarningReporter = aReporter; } public: /* internal necko use only */ + using InitLocalBlockListCallback = std::function<void(bool)>; + void InternalSetUploadStream(nsIInputStream *uploadStream) { mUploadStream = uploadStream; } void SetUploadStreamHasHeaders(bool hasHeaders) { mUploadStreamHasHeaders = hasHeaders; } MOZ_MUST_USE nsresult SetReferrerWithPolicyInternal(nsIURI *referrer, uint32_t referrerPolicy) { nsAutoCString spec; @@ -288,17 +290,28 @@ public: protected: virtual ~nsHttpChannel(); private: typedef nsresult (nsHttpChannel::*nsContinueRedirectionFunc)(nsresult result); bool RequestIsConditional(); - MOZ_MUST_USE nsresult BeginConnect(); + + // Connections will only be established in this function. + // (including DNS prefetch and speculative connection.) + nsresult BeginConnectActual(); + + // We might synchronously or asynchronously call BeginConnectActual, + // which includes DNS prefetch and speculative connection, according to + // whether an async tracker lookup is required. If the tracker lookup + // is required, this funciton will just return NS_OK and BeginConnectActual() + // will be called when callback. See Bug 1325054 for more information. + nsresult BeginConnect(); + MOZ_MUST_USE nsresult ContinueBeginConnectWithResult(); void ContinueBeginConnect(); MOZ_MUST_USE nsresult Connect(); void SpeculativeConnect(); MOZ_MUST_USE nsresult SetupTransaction(); void SetupTransactionRequestContext(); MOZ_MUST_USE nsresult CallOnStartRequest(); MOZ_MUST_USE nsresult ProcessResponse(); @@ -321,16 +334,18 @@ private: MOZ_MUST_USE nsresult EnsureAssocReq(); void ProcessSSLInformation(); bool IsHTTPS(); MOZ_MUST_USE nsresult ContinueOnStartRequest1(nsresult); MOZ_MUST_USE nsresult ContinueOnStartRequest2(nsresult); MOZ_MUST_USE nsresult ContinueOnStartRequest3(nsresult); + bool InitLocalBlockList(const InitLocalBlockListCallback& aCallback); + // redirection specific methods void HandleAsyncRedirect(); void HandleAsyncAPIRedirect(); MOZ_MUST_USE nsresult ContinueHandleAsyncRedirect(nsresult); void HandleAsyncNotModified(); void HandleAsyncFallback(); MOZ_MUST_USE nsresult ContinueHandleAsyncFallback(nsresult); MOZ_MUST_USE nsresult PromptTempRedirect();
--- a/services/sync/modules/constants.js +++ b/services/sync/modules/constants.js @@ -73,16 +73,20 @@ FORMS_STORE_BATCH_SIZE: 5 PASSWORDS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE ADDONS_STORE_BATCH_SIZE: 1000000, // process all addons at once APPS_STORE_BATCH_SIZE: 50, // same as MOBILE_BATCH_SIZE // Default batch size for download batching // (how many records are fetched at a time from the server when batching is used). DEFAULT_DOWNLOAD_BATCH_SIZE: 1000, + +// Default maximum size for a record payload +DEFAULT_MAX_RECORD_PAYLOAD_BYTES: 262144, // 256KB + // score thresholds for early syncs SINGLE_USER_THRESHOLD: 1000, MULTI_DEVICE_THRESHOLD: 300, // Other score increment constants SCORE_INCREMENT_SMALL: 1, SCORE_INCREMENT_MEDIUM: 10,
--- a/services/sync/modules/doctor.js +++ b/services/sync/modules/doctor.js @@ -101,17 +101,17 @@ this.Doctor = { if (nowSeconds - lastValidation < validationInterval) { log.info(`Skipping validation of ${e.name}: too recent since last validation attempt`); continue; } // Update the time now, even if we decline to actually perform a // validation. We don't want to check the rest of these more frequently // than once a day. - Svc.Prefs.set("validation.lastTime", Math.floor(nowSeconds)); + Svc.Prefs.set(prefPrefix + "validation.lastTime", Math.floor(nowSeconds)); // Validation only occurs a certain percentage of the time. let validationProbability = Svc.Prefs.get(prefPrefix + "validation.percentageChance", 0) / 100.0; if (validationProbability < Math.random()) { log.info(`Skipping validation of ${e.name}: Probability threshold not met`); continue; }
--- a/services/sync/modules/engines.js +++ b/services/sync/modules/engines.js @@ -926,16 +926,24 @@ SyncEngine.prototype = { get lastSyncLocal() { return parseInt(Svc.Prefs.get(this.name + ".lastSyncLocal", "0"), 10); }, set lastSyncLocal(value) { // Store as a string because pref can only store C longs as numbers. Svc.Prefs.set(this.name + ".lastSyncLocal", value.toString()); }, + get maxRecordPayloadBytes() { + let serverConfiguration = this.service.serverConfiguration; + if (serverConfiguration && serverConfiguration.max_record_payload_bytes) { + return serverConfiguration.max_record_payload_bytes; + } + return DEFAULT_MAX_RECORD_PAYLOAD_BYTES; + }, + /* * Returns a changeset for this sync. Engine implementations can override this * method to bypass the tracker for certain or all changed items. */ getChangedIDs() { return this._tracker.changedIDs; }, @@ -1628,16 +1636,23 @@ SyncEngine.prototype = { let out; let ok = false; try { out = this._createRecord(id); if (this._log.level <= Log.Level.Trace) this._log.trace("Outgoing: " + out); out.encrypt(this.service.collectionKeys.keyForCollection(this.name)); + let payloadLength = JSON.stringify(out.payload).length; + if (payloadLength > this.maxRecordPayloadBytes) { + if (this.allowSkippedRecord) { + this._modified.delete(id); // Do not attempt to sync that record again + } + throw new Error(`Payload too big: ${payloadLength} bytes`); + } ok = true; } catch (ex) { this._log.warn("Error creating record", ex); ++counts.failed; if (Async.isShutdownException(ex) || !this.allowSkippedRecord) { Observers.notify("weave:engine:sync:uploaded", counts, this.name); throw ex; }
--- a/services/sync/modules/engines/tabs.js +++ b/services/sync/modules/engines/tabs.js @@ -205,23 +205,21 @@ TabStore.prototype = { record.clientName = this.engine.service.clientsEngine.localName; // Sort tabs in descending-used order to grab the most recently used let tabs = this.getAllTabs(true).sort(function(a, b) { return b.lastUsed - a.lastUsed; }); // Figure out how many tabs we can pack into a payload. - // See bug 535326 comment 8 for an explanation of the estimation - // If the server configuration is absent, we use the old max payload size of 28K - let size = JSON.stringify(tabs).length; + // We use byteLength here because the data is not encrypted in ascii yet. + let size = new TextEncoder("utf-8").encode(JSON.stringify(tabs)).byteLength; let origLength = tabs.length; - const MAX_TAB_SIZE = (this.engine.service.serverConfiguration ? - this.engine.service.serverConfiguration.max_record_payload_bytes : - 28672) / 4 * 3 - 1500; + // See bug 535326 comment 8 for an explanation of the estimation + const MAX_TAB_SIZE = this.engine.maxRecordPayloadBytes / 4 * 3 - 1500; if (size > MAX_TAB_SIZE) { // Estimate a little more than the direct fraction to maximize packing let cutoff = Math.ceil(tabs.length * MAX_TAB_SIZE / size); tabs = tabs.slice(0, cutoff + 1); // Keep dropping off the last entry until the data fits while (JSON.stringify(tabs).length > MAX_TAB_SIZE) tabs.pop();
--- a/services/sync/tests/unit/test_doctor.js +++ b/services/sync/tests/unit/test_doctor.js @@ -1,20 +1,67 @@ /* Any copyright is dedicated to the Public Domain. http://creativecommons.org/publicdomain/zero/1.0/ */ const { Doctor, REPAIR_ADVANCE_PERIOD } = Cu.import("resource://services-sync/doctor.js", {}); +Cu.import("resource://gre/modules/Services.jsm"); initTestLogging("Trace"); function mockDoctor(mocks) { // Clone the object and put mocks in that. return Object.assign({}, Doctor, mocks); } +add_task(async function test_validation_interval() { + let now = 1000; + let doctor = mockDoctor({ + _now() { + // note that the function being mocked actually returns seconds. + return now; + }, + }); + + let engine = { + name: "test-engine", + getValidator() { + return { + validate(e) { + return {}; + } + } + }, + } + + // setup prefs which enable test-engine validation. + Services.prefs.setBoolPref("services.sync.engine.test-engine.validation.enabled", true); + Services.prefs.setIntPref("services.sync.engine.test-engine.validation.percentageChance", 100); + Services.prefs.setIntPref("services.sync.engine.test-engine.validation.maxRecords", 1); + // And say we should validate every 10 seconds. + Services.prefs.setIntPref("services.sync.engine.test-engine.validation.interval", 10); + + deepEqual(doctor._getEnginesToValidate([engine]), { + "test-engine": { + engine, + maxRecords: 1, + } + }); + // We haven't advanced the timestamp, so we should not validate again. + deepEqual(doctor._getEnginesToValidate([engine]), {}); + // Advance our clock by 11 seconds. + now += 11; + // We should validate again. + deepEqual(doctor._getEnginesToValidate([engine]), { + "test-engine": { + engine, + maxRecords: 1, + } + }); +}); + add_task(async function test_repairs_start() { let repairStarted = false; let problems = { missingChildren: ["a", "b", "c"], } let validator = { validate(engine) { return problems;
--- a/services/sync/tests/unit/test_syncengine_sync.js +++ b/services/sync/tests/unit/test_syncengine_sync.js @@ -1338,60 +1338,88 @@ add_task(async function test_uploadOutgo // The 'flying' record wasn't marked so it wasn't uploaded do_check_eq(collection.payload("flying"), undefined); } finally { await cleanAndGo(engine, server); } }); - -add_task(async function test_uploadOutgoing_huge() { +async function test_uploadOutgoing_max_record_payload_bytes(allowSkippedRecord) { + _("SyncEngine._uploadOutgoing throws when payload is bigger than max_record_payload_bytes"); let collection = new ServerCollection(); collection._wbos.flying = new ServerWBO("flying"); collection._wbos.scotsman = new ServerWBO("scotsman"); let server = sync_httpd_setup({ "/1.1/foo/storage/rotary": collection.handler(), "/1.1/foo/storage/rotary/flying": collection.wbo("flying").handler(), + "/1.1/foo/storage/rotary/scotsman": collection.wbo("scotsman").handler(), }); await SyncTestingInfrastructure(server); generateNewKeys(Service.collectionKeys); let engine = makeRotaryEngine(); - engine.allowSkippedRecord = true; + engine.allowSkippedRecord = allowSkippedRecord; engine.lastSync = 1; - engine._store.items = { flying: "a".repeat(1024 * 1024) }; + engine._store.items = { flying: "a".repeat(1024 * 1024), scotsman: "abcd" }; engine._tracker.addChangedID("flying", 1000); + engine._tracker.addChangedID("scotsman", 1000); let meta_global = Service.recordManager.set(engine.metaURL, new WBORecord(engine.metaURL)); meta_global.payload.engines = {rotary: {version: engine.version, syncID: engine.syncID}}; try { - // Confirm initial environment do_check_eq(engine.lastSyncLocal, 0); do_check_eq(collection.payload("flying"), undefined); + do_check_eq(collection.payload("scotsman"), undefined); engine._syncStartup(); engine._uploadOutgoing(); + + if (!allowSkippedRecord) { + do_throw("should not get here"); + } + engine.trackRemainingChanges(); - // Check we didn't upload to the server - do_check_eq(collection.payload("flying"), undefined); - // And that we won't try to upload it again next time. + // Check we uploaded the other record to the server + do_check_true(collection.payload("scotsman")); + // And that we won't try to upload the huge record next time. do_check_eq(engine._tracker.changedIDs["flying"], undefined); + } catch (e) { + if (allowSkippedRecord) { + do_throw("should not get here"); + } + + engine.trackRemainingChanges(); + + // Check that we will try to upload the huge record next time + do_check_eq(engine._tracker.changedIDs["flying"], 1000); } finally { + // Check we didn't upload the oversized record to the server + do_check_eq(collection.payload("flying"), undefined); await cleanAndGo(engine, server); } +} + + +add_task(async function test_uploadOutgoing_max_record_payload_bytes_disallowSkippedRecords() { + return test_uploadOutgoing_max_record_payload_bytes(false); +}); + + +add_task(async function test_uploadOutgoing_max_record_payload_bytes_allowSkippedRecords() { + return test_uploadOutgoing_max_record_payload_bytes(true); }); add_task(async function test_uploadOutgoing_failed() { _("SyncEngine._uploadOutgoing doesn't clear the tracker of objects that failed to upload."); let collection = new ServerCollection(); // We only define the "flying" WBO on the server, not the "scotsman"
--- a/services/sync/tests/unit/test_tab_store.js +++ b/services/sync/tests/unit/test_tab_store.js @@ -88,27 +88,27 @@ function test_getAllTabs() { function test_createRecord() { let store = getMockStore(); let record; store.getTabState = mockGetTabState; store.shouldSkipWindow = mockShouldSkipWindow; store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1); - let numtabs = Math.ceil(20000. / 77.); + let numtabs = 2600; // Note: this number is connected to DEFAULT_MAX_RECORD_PAYLOAD_BYTES store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, 1); record = store.createRecord("fake-guid"); ok(record instanceof TabSetRecord); equal(record.tabs.length, 1); _("create a big record"); store.getWindowEnumerator = mockGetWindowEnumerator.bind(this, "http://foo.com", 1, numtabs); record = store.createRecord("fake-guid"); ok(record instanceof TabSetRecord); - equal(record.tabs.length, 256); + equal(record.tabs.length, 2501); } function run_test() { test_create(); test_getAllTabs(); test_createRecord(); }
--- a/servo/components/script/dom/canvasrenderingcontext2d.rs +++ b/servo/components/script/dom/canvasrenderingcontext2d.rs @@ -993,25 +993,25 @@ impl CanvasRenderingContext2DMethods for // https://html.spec.whatwg.org/multipage/#dom-context-2d-createimagedata fn CreateImageData(&self, sw: Finite<f64>, sh: Finite<f64>) -> Fallible<Root<ImageData>> { if *sw == 0.0 || *sh == 0.0 { return Err(Error::IndexSize); } let sw = cmp::max(1, sw.abs().to_u32().unwrap()); let sh = cmp::max(1, sh.abs().to_u32().unwrap()); - Ok(ImageData::new(&self.global(), sw, sh, None)) + ImageData::new(&self.global(), sw, sh, None) } // https://html.spec.whatwg.org/multipage/#dom-context-2d-createimagedata fn CreateImageData_(&self, imagedata: &ImageData) -> Fallible<Root<ImageData>> { - Ok(ImageData::new(&self.global(), - imagedata.Width(), - imagedata.Height(), - None)) + ImageData::new(&self.global(), + imagedata.Width(), + imagedata.Height(), + None) } // https://html.spec.whatwg.org/multipage/#dom-context-2d-getimagedata fn GetImageData(&self, sx: Finite<f64>, sy: Finite<f64>, sw: Finite<f64>, sh: Finite<f64>) @@ -1054,17 +1054,17 @@ impl CanvasRenderingContext2DMethods for // Un-premultiply alpha for chunk in data.chunks_mut(4) { let alpha = chunk[3] as usize; chunk[0] = UNPREMULTIPLY_TABLE[256 * alpha + chunk[0] as usize]; chunk[1] = UNPREMULTIPLY_TABLE[256 * alpha + chunk[1] as usize]; chunk[2] = UNPREMULTIPLY_TABLE[256 * alpha + chunk[2] as usize]; } - Ok(ImageData::new(&self.global(), sw, sh, Some(data))) + ImageData::new(&self.global(), sw, sh, Some(data)) } // https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata fn PutImageData(&self, imagedata: &ImageData, dx: Finite<f64>, dy: Finite<f64>) { self.PutImageData_(imagedata, dx, dy, Finite::wrap(0f64),
--- a/servo/components/script/dom/imagedata.rs +++ b/servo/components/script/dom/imagedata.rs @@ -23,30 +23,34 @@ pub struct ImageData { reflector_: Reflector, width: u32, height: u32, data: Heap<*mut JSObject>, } impl ImageData { #[allow(unsafe_code)] - pub fn new(global: &GlobalScope, width: u32, height: u32, mut data: Option<Vec<u8>>) -> Root<ImageData> { + pub fn new(global: &GlobalScope, + width: u32, + height: u32, + mut data: Option<Vec<u8>>) + -> Fallible<Root<ImageData>> { let len = width * height * 4; unsafe { let cx = global.get_cx(); rooted!(in (cx) let mut js_object = ptr::null_mut()); let data = match data { Some(ref mut d) => { d.resize(len as usize, 0); CreateWith::Slice(&d[..]) }, None => CreateWith::Length(len), }; Uint8ClampedArray::create(cx, data, js_object.handle_mut()).unwrap(); - Self::new_with_jsobject(global, width, Some(height), Some(js_object.get())).unwrap() + Self::new_with_jsobject(global, width, Some(height), Some(js_object.get())) } } #[allow(unsafe_code)] unsafe fn new_with_jsobject(global: &GlobalScope, width: u32, mut opt_height: Option<u32>, opt_jsobject: Option<*mut JSObject>)
--- a/servo/components/script/layout_wrapper.rs +++ b/servo/components/script/layout_wrapper.rs @@ -928,21 +928,22 @@ impl<ConcreteNode> Iterator for ThreadSa use ::selectors::Element; match self.parent_node.get_pseudo_element_type() { PseudoElementType::Before(_) | PseudoElementType::After(_) => None, PseudoElementType::DetailsSummary(_) => { let mut current_node = self.current_node.clone(); loop { let next_node = if let Some(ref node) = current_node { - if node.is_element() && - node.as_element().unwrap().get_local_name() == &local_name!("summary") && - node.as_element().unwrap().get_namespace() == &ns!(html) { - self.current_node = None; - return Some(node.clone()); + if let Some(element) = node.as_element() { + if element.get_local_name() == &local_name!("summary") && + element.get_namespace() == &ns!(html) { + self.current_node = None; + return Some(node.clone()); + } } unsafe { node.dangerous_next_sibling() } } else { self.current_node = None; return None }; current_node = next_node; }
--- a/servo/components/style/gecko_bindings/bindings.rs +++ b/servo/components/style/gecko_bindings/bindings.rs @@ -438,17 +438,17 @@ extern "C" { extern "C" { pub fn Gecko_DropStyleChildrenIterator(it: StyleChildrenIteratorOwned); } extern "C" { pub fn Gecko_GetNextStyleChild(it: StyleChildrenIteratorBorrowedMut) -> RawGeckoNodeBorrowedOrNull; } extern "C" { - pub fn Gecko_ElementState(element: RawGeckoElementBorrowed) -> u16; + pub fn Gecko_ElementState(element: RawGeckoElementBorrowed) -> u64; } extern "C" { pub fn Gecko_IsLink(element: RawGeckoElementBorrowed) -> bool; } extern "C" { pub fn Gecko_IsTextNode(node: RawGeckoNodeBorrowed) -> bool; } extern "C" {
--- a/servo/components/style/gecko_bindings/structs_debug.rs +++ b/servo/components/style/gecko_bindings/structs_debug.rs @@ -2234,17 +2234,17 @@ pub mod root { #[repr(C)] #[derive(Debug)] pub struct FastElementCreationOptions { pub _base: root::mozilla::dom::ElementCreationOptions, } #[test] fn bindgen_test_layout_FastElementCreationOptions() { assert_eq!(::std::mem::size_of::<FastElementCreationOptions>() - , 32usize , concat ! ( + , 56usize , concat ! ( "Size of: " , stringify ! ( FastElementCreationOptions ) )); assert_eq! (::std::mem::align_of::<FastElementCreationOptions>() , 8usize , concat ! ( "Alignment of " , stringify ! ( FastElementCreationOptions ) )); } #[repr(C)] @@ -3029,16 +3029,19 @@ pub mod root { assert_eq! (unsafe { & ( * ( 0 as * const OriginAttributesDictionary ) ) . mUserContextId as * const _ as usize } , 32usize , concat ! ( "Alignment of field: " , stringify ! ( OriginAttributesDictionary ) , "::" , stringify ! ( mUserContextId ) )); } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct Promise([u8; 0]); pub mod workers { #[allow(unused_imports)] use self::super::super::super::super::root; } #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum CSSStyleSheetParsingMode { Author = 0, @@ -3411,33 +3414,41 @@ pub mod root { Denied = 3, EndGuard_ = 4, } #[repr(C)] #[derive(Debug)] pub struct ElementCreationOptions { pub _base: root::mozilla::dom::DictionaryBase, pub mIs: [u64; 3usize], + pub mPseudo: [u64; 3usize], } #[test] fn bindgen_test_layout_ElementCreationOptions() { assert_eq!(::std::mem::size_of::<ElementCreationOptions>() , - 32usize , concat ! ( + 56usize , concat ! ( "Size of: " , stringify ! ( ElementCreationOptions ) )); assert_eq! (::std::mem::align_of::<ElementCreationOptions>() , 8usize , concat ! ( "Alignment of " , stringify ! ( ElementCreationOptions ) )); assert_eq! (unsafe { & ( * ( 0 as * const ElementCreationOptions ) ) . mIs as * const _ as usize } , 8usize , concat ! ( "Alignment of field: " , stringify ! ( ElementCreationOptions ) , "::" , stringify ! ( mIs ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const ElementCreationOptions ) ) . + mPseudo as * const _ as usize } , 32usize , concat + ! ( + "Alignment of field: " , stringify ! ( + ElementCreationOptions ) , "::" , stringify ! ( + mPseudo ) )); } #[repr(C)] #[derive(Debug)] pub struct ElementCreationOptionsOrString { pub mType: root::mozilla::dom::ElementCreationOptionsOrString_Type, pub mValue: root::mozilla::dom::ElementCreationOptionsOrString_Value, } #[repr(u32)] @@ -3701,19 +3712,16 @@ pub mod root { pub struct NodeIterator([u8; 0]); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum OrientationType { } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct ProcessingInstruction([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] - pub struct Promise([u8; 0]); - #[repr(C)] - #[derive(Debug, Copy, Clone)] pub struct StyleSheetList([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct SVGSVGElement([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct Touch([u8; 0]); #[repr(C)] @@ -3795,19 +3803,16 @@ pub mod root { impl Clone for BorrowedAttrInfo { fn clone(&self) -> Self { *self } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct ShadowRoot([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] - pub struct CustomElementData([u8; 0]); - #[repr(C)] - #[derive(Debug, Copy, Clone)] pub struct Selection([u8; 0]); #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ScrollBehavior { Auto = 0, Instant = 1, Smooth = 2, EndGuard_ = 3, @@ -3862,16 +3867,19 @@ pub mod root { assert_eq! (unsafe { & ( * ( 0 as * const ScrollToOptions ) ) . mTop as * const _ as usize } , 24usize , concat ! ( "Alignment of field: " , stringify ! ( ScrollToOptions ) , "::" , stringify ! ( mTop ) )); } #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct CustomElementData([u8; 0]); + #[repr(C)] #[derive(Debug)] pub struct FragmentOrElement { pub _base: root::nsIContent, pub mRefCnt: root::nsCycleCollectingAutoRefCnt, pub _mOwningThread: root::nsAutoOwningThread, /** * Array containing all attributes and children for this element */ @@ -5021,17 +5029,17 @@ pub mod root { * Event states are associated with pseudo-classes. */ #[repr(C)] #[derive(Debug, Copy)] pub struct EventStates { pub mStates: root::mozilla::EventStates_InternalType, } pub type EventStates_InternalType = u64; - pub type EventStates_ServoType = u16; + pub type EventStates_ServoType = u64; #[test] fn bindgen_test_layout_EventStates() { assert_eq!(::std::mem::size_of::<EventStates>() , 8usize , concat ! ( "Size of: " , stringify ! ( EventStates ) )); assert_eq! (::std::mem::align_of::<EventStates>() , 8usize , concat ! ( "Alignment of " , stringify ! ( EventStates ) )); assert_eq! (unsafe { @@ -7535,17 +7543,17 @@ pub mod root { root::mozilla::dom::BorrowedAttrInfo; pub type ServoElementSnapshot_Element = root::mozilla::dom::Element; pub type ServoElementSnapshot_ServoStateType = root::mozilla::EventStates_ServoType; pub use self::super::super::root::mozilla::ServoElementSnapshotFlags as ServoElementSnapshot_Flags; #[test] fn bindgen_test_layout_ServoElementSnapshot() { - assert_eq!(::std::mem::size_of::<ServoElementSnapshot>() , 24usize + assert_eq!(::std::mem::size_of::<ServoElementSnapshot>() , 32usize , concat ! ( "Size of: " , stringify ! ( ServoElementSnapshot ) )); assert_eq! (::std::mem::align_of::<ServoElementSnapshot>() , 8usize , concat ! ( "Alignment of " , stringify ! ( ServoElementSnapshot ) )); assert_eq! (unsafe { & ( * ( 0 as * const ServoElementSnapshot ) ) . @@ -7564,23 +7572,23 @@ pub mod root { & ( * ( 0 as * const ServoElementSnapshot ) ) . mState as * const _ as usize } , 16usize , concat ! ( "Alignment of field: " , stringify ! ( ServoElementSnapshot ) , "::" , stringify ! ( mState ) )); assert_eq! (unsafe { & ( * ( 0 as * const ServoElementSnapshot ) ) . mIsHTMLElementInHTMLDocument as * const _ as usize } , - 18usize , concat ! ( + 24usize , concat ! ( "Alignment of field: " , stringify ! ( ServoElementSnapshot ) , "::" , stringify ! ( mIsHTMLElementInHTMLDocument ) )); assert_eq! (unsafe { & ( * ( 0 as * const ServoElementSnapshot ) ) . - mIsInChromeDocument as * const _ as usize } , 19usize + mIsInChromeDocument as * const _ as usize } , 25usize , concat ! ( "Alignment of field: " , stringify ! ( ServoElementSnapshot ) , "::" , stringify ! ( mIsInChromeDocument ) )); } #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Directionality { @@ -8345,16 +8353,22 @@ pub mod root { #[derive(Debug, Copy, Clone)] pub struct pair<_T1, _T2> { pub first: _T1, pub second: _T2, } pub type pair_first_type<_T1> = _T1; pub type pair_second_type<_T2> = _T2; #[repr(C)] + pub struct atomic<_Tp> { + pub _base: (), + pub _phantom_0: ::std::marker::PhantomData<_Tp>, + } + pub type atomic___base = [u8; 0usize]; + #[repr(C)] #[derive(Debug, Copy)] pub struct input_iterator_tag { pub _address: u8, } #[test] fn bindgen_test_layout_input_iterator_tag() { assert_eq!(::std::mem::size_of::<input_iterator_tag>() , 1usize , concat ! ( @@ -8363,269 +8377,284 @@ pub mod root { , concat ! ( "Alignment of " , stringify ! ( input_iterator_tag ) )); } impl Clone for input_iterator_tag { fn clone(&self) -> Self { *self } } #[repr(C)] + #[derive(Debug, Copy)] + pub struct forward_iterator_tag { + pub _address: u8, + } + #[test] + fn bindgen_test_layout_forward_iterator_tag() { + assert_eq!(::std::mem::size_of::<forward_iterator_tag>() , 1usize + , concat ! ( + "Size of: " , stringify ! ( forward_iterator_tag ) )); + assert_eq! (::std::mem::align_of::<forward_iterator_tag>() , + 1usize , concat ! ( + "Alignment of " , stringify ! ( forward_iterator_tag ) + )); + } + impl Clone for forward_iterator_tag { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct bidirectional_iterator_tag { + pub _address: u8, + } + #[test] + fn bindgen_test_layout_bidirectional_iterator_tag() { + assert_eq!(::std::mem::size_of::<bidirectional_iterator_tag>() , + 1usize , concat ! ( + "Size of: " , stringify ! ( bidirectional_iterator_tag + ) )); + assert_eq! (::std::mem::align_of::<bidirectional_iterator_tag>() , + 1usize , concat ! ( + "Alignment of " , stringify ! ( + bidirectional_iterator_tag ) )); + } + impl Clone for bidirectional_iterator_tag { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct random_access_iterator_tag { + pub _address: u8, + } + #[test] + fn bindgen_test_layout_random_access_iterator_tag() { + assert_eq!(::std::mem::size_of::<random_access_iterator_tag>() , + 1usize , concat ! ( + "Size of: " , stringify ! ( random_access_iterator_tag + ) )); + assert_eq! (::std::mem::align_of::<random_access_iterator_tag>() , + 1usize , concat ! ( + "Alignment of " , stringify ! ( + random_access_iterator_tag ) )); + } + impl Clone for random_access_iterator_tag { + fn clone(&self) -> Self { *self } + } + #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct iterator<_Category, _Tp, _Distance, _Pointer, _Reference> { pub _address: u8, pub _phantom_0: ::std::marker::PhantomData<_Category>, pub _phantom_1: ::std::marker::PhantomData<_Tp>, pub _phantom_2: ::std::marker::PhantomData<_Distance>, pub _phantom_3: ::std::marker::PhantomData<_Pointer>, pub _phantom_4: ::std::marker::PhantomData<_Reference>, } - pub type iterator_iterator_category<_Category> = _Category; pub type iterator_value_type<_Tp> = _Tp; pub type iterator_difference_type<_Distance> = _Distance; pub type iterator_pointer<_Pointer> = _Pointer; pub type iterator_reference<_Reference> = _Reference; - #[repr(C)] - #[derive(Debug)] - pub struct atomic<_Tp> { - pub _M_i: _Tp, - } - pub mod chrono { - #[allow(unused_imports)] - use self::super::super::super::root; - } - } - pub mod __gnu_cxx { - #[allow(unused_imports)] - use self::super::super::root; - } - pub type __off_t = ::std::os::raw::c_long; - pub type __off64_t = ::std::os::raw::c_long; - #[repr(C)] - #[derive(Debug, Copy)] - pub struct _IO_FILE { - pub _flags: ::std::os::raw::c_int, - pub _IO_read_ptr: *mut ::std::os::raw::c_char, - pub _IO_read_end: *mut ::std::os::raw::c_char, - pub _IO_read_base: *mut ::std::os::raw::c_char, - pub _IO_write_base: *mut ::std::os::raw::c_char, - pub _IO_write_ptr: *mut ::std::os::raw::c_char, - pub _IO_write_end: *mut ::std::os::raw::c_char, - pub _IO_buf_base: *mut ::std::os::raw::c_char, - pub _IO_buf_end: *mut ::std::os::raw::c_char, - pub _IO_save_base: *mut ::std::os::raw::c_char, - pub _IO_backup_base: *mut ::std::os::raw::c_char, - pub _IO_save_end: *mut ::std::os::raw::c_char, - pub _markers: *mut root::_IO_marker, - pub _chain: *mut root::_IO_FILE, - pub _fileno: ::std::os::raw::c_int, - pub _flags2: ::std::os::raw::c_int, - pub _old_offset: root::__off_t, - pub _cur_column: ::std::os::raw::c_ushort, - pub _vtable_offset: ::std::os::raw::c_char, - pub _shortbuf: [::std::os::raw::c_char; 1usize], - pub _lock: *mut root::_IO_lock_t, - pub _offset: root::__off64_t, - pub __pad1: *mut ::std::os::raw::c_void, - pub __pad2: *mut ::std::os::raw::c_void, - pub __pad3: *mut ::std::os::raw::c_void, - pub __pad4: *mut ::std::os::raw::c_void, - pub __pad5: usize, - pub _mode: ::std::os::raw::c_int, - pub _unused2: [::std::os::raw::c_char; 20usize], - } - #[test] - fn bindgen_test_layout__IO_FILE() { - assert_eq!(::std::mem::size_of::<_IO_FILE>() , 216usize , concat ! ( - "Size of: " , stringify ! ( _IO_FILE ) )); - assert_eq! (::std::mem::align_of::<_IO_FILE>() , 8usize , concat ! ( - "Alignment of " , stringify ! ( _IO_FILE ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _flags as * const _ as + pub type iterator_iterator_category<_Category> = _Category; + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct __bit_const_reference<_Cp> { + pub __seg_: root::std::__bit_const_reference___storage_pointer<_Cp>, + pub __mask_: root::std::__bit_const_reference___storage_type<_Cp>, + } + pub type __bit_const_reference___storage_type<_Cp> = _Cp; + pub type __bit_const_reference___storage_pointer<_Cp> = _Cp; + } + pub type __int64_t = ::std::os::raw::c_longlong; + pub type __darwin_va_list = root::__builtin_va_list; + pub type __darwin_off_t = root::__int64_t; + pub type va_list = root::__darwin_va_list; + pub type fpos_t = root::__darwin_off_t; + #[repr(C)] + #[derive(Debug, Copy)] + pub struct __sbuf { + pub _base: *mut ::std::os::raw::c_uchar, + pub _size: ::std::os::raw::c_int, + } + #[test] + fn bindgen_test_layout___sbuf() { + assert_eq!(::std::mem::size_of::<__sbuf>() , 16usize , concat ! ( + "Size of: " , stringify ! ( __sbuf ) )); + assert_eq! (::std::mem::align_of::<__sbuf>() , 8usize , concat ! ( + "Alignment of " , stringify ! ( __sbuf ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sbuf ) ) . _base as * const _ as usize } , 0usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , + "Alignment of field: " , stringify ! ( __sbuf ) , "::" , + stringify ! ( _base ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sbuf ) ) . _size as * const _ as + usize } , 8usize , concat ! ( + "Alignment of field: " , stringify ! ( __sbuf ) , "::" , + stringify ! ( _size ) )); + } + impl Clone for __sbuf { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct __sFILEX([u8; 0]); + #[repr(C)] + #[derive(Debug, Copy)] + pub struct __sFILE { + pub _p: *mut ::std::os::raw::c_uchar, + pub _r: ::std::os::raw::c_int, + pub _w: ::std::os::raw::c_int, + pub _flags: ::std::os::raw::c_short, + pub _file: ::std::os::raw::c_short, + pub _bf: root::__sbuf, + pub _lbfsize: ::std::os::raw::c_int, + pub _cookie: *mut ::std::os::raw::c_void, + pub _close: ::std::option::Option<unsafe extern "C" fn(arg1: + *mut ::std::os::raw::c_void) + -> ::std::os::raw::c_int>, + pub _read: ::std::option::Option<unsafe extern "C" fn(arg1: + *mut ::std::os::raw::c_void, + arg2: + *mut ::std::os::raw::c_char, + arg3: + ::std::os::raw::c_int) + -> ::std::os::raw::c_int>, + pub _seek: ::std::option::Option<unsafe extern "C" fn(arg1: + *mut ::std::os::raw::c_void, + arg2: + root::fpos_t, + arg3: + ::std::os::raw::c_int) + -> ::std::os::raw::c_longlong>, + pub _write: ::std::option::Option<unsafe extern "C" fn(arg1: + *mut ::std::os::raw::c_void, + arg2: + *const ::std::os::raw::c_char, + arg3: + ::std::os::raw::c_int) + -> ::std::os::raw::c_int>, + pub _ub: root::__sbuf, + pub _extra: *mut root::__sFILEX, + pub _ur: ::std::os::raw::c_int, + pub _ubuf: [::std::os::raw::c_uchar; 3usize], + pub _nbuf: [::std::os::raw::c_uchar; 1usize], + pub _lb: root::__sbuf, + pub _blksize: ::std::os::raw::c_int, + pub _offset: root::fpos_t, + } + #[test] + fn bindgen_test_layout___sFILE() { + assert_eq!(::std::mem::size_of::<__sFILE>() , 152usize , concat ! ( + "Size of: " , stringify ! ( __sFILE ) )); + assert_eq! (::std::mem::align_of::<__sFILE>() , 8usize , concat ! ( + "Alignment of " , stringify ! ( __sFILE ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _p as * const _ as + usize } , 0usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _p ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _r as * const _ as + usize } , 8usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _r ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _w as * const _ as + usize } , 12usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _w ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _flags as * const _ as + usize } , 16usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , stringify ! ( _flags ) )); assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_read_ptr as * - const _ as usize } , 8usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_read_ptr ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_read_end as * - const _ as usize } , 16usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_read_end ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_read_base as * - const _ as usize } , 24usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_read_base ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_write_base as * - const _ as usize } , 32usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_write_base ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_write_ptr as * - const _ as usize } , 40usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_write_ptr ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_write_end as * - const _ as usize } , 48usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_write_end ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_buf_base as * - const _ as usize } , 56usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_buf_base ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_buf_end as * const - _ as usize } , 64usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_buf_end ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_save_base as * - const _ as usize } , 72usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_save_base ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_backup_base as * - const _ as usize } , 80usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_backup_base ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _IO_save_end as * - const _ as usize } , 88usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _IO_save_end ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _markers as * const _ - as usize } , 96usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _markers ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _chain as * const _ as + & ( * ( 0 as * const __sFILE ) ) . _file as * const _ as + usize } , 18usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _file ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _bf as * const _ as + usize } , 24usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _bf ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _lbfsize as * const _ + as usize } , 40usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _lbfsize ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _cookie as * const _ as + usize } , 48usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _cookie ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _close as * const _ as + usize } , 56usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _close ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _read as * const _ as + usize } , 64usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _read ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _seek as * const _ as + usize } , 72usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _seek ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _write as * const _ as + usize } , 80usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _write ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _ub as * const _ as + usize } , 88usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _ub ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _extra as * const _ as usize } , 104usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _chain ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _fileno as * const _ - as usize } , 112usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _fileno ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _flags2 as * const _ - as usize } , 116usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _flags2 ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _old_offset as * const - _ as usize } , 120usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _old_offset ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _cur_column as * const - _ as usize } , 128usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _cur_column ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _vtable_offset as * - const _ as usize } , 130usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _vtable_offset ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _shortbuf as * const _ - as usize } , 131usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _shortbuf ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _lock as * const _ as - usize } , 136usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _lock ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _offset as * const _ - as usize } , 144usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _extra ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _ur as * const _ as + usize } , 112usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _ur ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _ubuf as * const _ as + usize } , 116usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _ubuf ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _nbuf as * const _ as + usize } , 119usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _nbuf ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _lb as * const _ as + usize } , 120usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _lb ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _blksize as * const _ + as usize } , 136usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , + stringify ! ( _blksize ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const __sFILE ) ) . _offset as * const _ as + usize } , 144usize , concat ! ( + "Alignment of field: " , stringify ! ( __sFILE ) , "::" , stringify ! ( _offset ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . __pad1 as * const _ as - usize } , 152usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( __pad1 ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . __pad2 as * const _ as - usize } , 160usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( __pad2 ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . __pad3 as * const _ as - usize } , 168usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( __pad3 ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . __pad4 as * const _ as - usize } , 176usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( __pad4 ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . __pad5 as * const _ as - usize } , 184usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( __pad5 ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _mode as * const _ as - usize } , 192usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _mode ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_FILE ) ) . _unused2 as * const _ - as usize } , 196usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_FILE ) , "::" , - stringify ! ( _unused2 ) )); - } - impl Clone for _IO_FILE { - fn clone(&self) -> Self { *self } - } - pub type FILE = root::_IO_FILE; - pub type va_list = root::__builtin_va_list; - pub type _IO_lock_t = ::std::os::raw::c_void; - #[repr(C)] - #[derive(Debug, Copy)] - pub struct _IO_marker { - pub _next: *mut root::_IO_marker, - pub _sbuf: *mut root::_IO_FILE, - pub _pos: ::std::os::raw::c_int, - } - #[test] - fn bindgen_test_layout__IO_marker() { - assert_eq!(::std::mem::size_of::<_IO_marker>() , 24usize , concat ! ( - "Size of: " , stringify ! ( _IO_marker ) )); - assert_eq! (::std::mem::align_of::<_IO_marker>() , 8usize , concat ! ( - "Alignment of " , stringify ! ( _IO_marker ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_marker ) ) . _next as * const _ - as usize } , 0usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_marker ) , "::" - , stringify ! ( _next ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_marker ) ) . _sbuf as * const _ - as usize } , 8usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_marker ) , "::" - , stringify ! ( _sbuf ) )); - assert_eq! (unsafe { - & ( * ( 0 as * const _IO_marker ) ) . _pos as * const _ as - usize } , 16usize , concat ! ( - "Alignment of field: " , stringify ! ( _IO_marker ) , "::" - , stringify ! ( _pos ) )); - } - impl Clone for _IO_marker { - fn clone(&self) -> Self { *self } - } + } + impl Clone for __sFILE { + fn clone(&self) -> Self { *self } + } + pub type FILE = root::__sFILE; /** * MozRefCountType is Mozilla's reference count type. * * We use the same type to represent the refcount of RefCounted objects * as well, in order to be able to use the leak detection facilities * that are implemented by XPCOM. * * Note that this type is not in the mozilla namespace so that it is @@ -15630,73 +15659,73 @@ pub mod root { & ( * ( 0 as * const nsNodeWeakReference ) ) . mNode as * const _ as usize } , 24usize , concat ! ( "Alignment of field: " , stringify ! ( nsNodeWeakReference ) , "::" , stringify ! ( mNode ) )); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsDOMMutationObserver([u8; 0]); - pub const NODE_HAS_LISTENERMANAGER: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_LISTENERMANAGER; - pub const NODE_HAS_PROPERTIES: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_PROPERTIES; - pub const NODE_IS_ANONYMOUS_ROOT: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_ANONYMOUS_ROOT; - pub const NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE; - pub const NODE_IS_NATIVE_ANONYMOUS_ROOT: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_NATIVE_ANONYMOUS_ROOT; - pub const NODE_FORCE_XBL_BINDINGS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_FORCE_XBL_BINDINGS; - pub const NODE_MAY_BE_IN_BINDING_MNGR: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_MAY_BE_IN_BINDING_MNGR; - pub const NODE_IS_EDITABLE: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_EDITABLE; - pub const NODE_IS_NATIVE_ANONYMOUS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_NATIVE_ANONYMOUS; - pub const NODE_IS_IN_SHADOW_TREE: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_IN_SHADOW_TREE; - pub const NODE_HAS_EMPTY_SELECTOR: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_EMPTY_SELECTOR; - pub const NODE_HAS_SLOW_SELECTOR: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_SLOW_SELECTOR; - pub const NODE_HAS_EDGE_CHILD_SELECTOR: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_EDGE_CHILD_SELECTOR; - pub const NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS; - pub const NODE_ALL_SELECTOR_FLAGS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_ALL_SELECTOR_FLAGS; - pub const NODE_NEEDS_FRAME: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_NEEDS_FRAME; - pub const NODE_DESCENDANTS_NEED_FRAMES: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_DESCENDANTS_NEED_FRAMES; - pub const NODE_HAS_ACCESSKEY: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_ACCESSKEY; - pub const NODE_HAS_DIRECTION_RTL: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_DIRECTION_RTL; - pub const NODE_HAS_DIRECTION_LTR: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_HAS_DIRECTION_LTR; - pub const NODE_ALL_DIRECTION_FLAGS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_ALL_DIRECTION_FLAGS; - pub const NODE_CHROME_ONLY_ACCESS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_CHROME_ONLY_ACCESS; - pub const NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS; - pub const NODE_SHARED_RESTYLE_BIT_1: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_SHARED_RESTYLE_BIT_1; - pub const NODE_SHARED_RESTYLE_BIT_2: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_SHARED_RESTYLE_BIT_2; - pub const NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_SHARED_RESTYLE_BIT_1; - pub const NODE_TYPE_SPECIFIC_BITS_OFFSET: root::_bindgen_ty_118 = - _bindgen_ty_118::NODE_TYPE_SPECIFIC_BITS_OFFSET; - #[repr(u32)] - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub enum _bindgen_ty_118 { + pub const NODE_HAS_LISTENERMANAGER: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_LISTENERMANAGER; + pub const NODE_HAS_PROPERTIES: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_PROPERTIES; + pub const NODE_IS_ANONYMOUS_ROOT: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_ANONYMOUS_ROOT; + pub const NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE; + pub const NODE_IS_NATIVE_ANONYMOUS_ROOT: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_NATIVE_ANONYMOUS_ROOT; + pub const NODE_FORCE_XBL_BINDINGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_FORCE_XBL_BINDINGS; + pub const NODE_MAY_BE_IN_BINDING_MNGR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_MAY_BE_IN_BINDING_MNGR; + pub const NODE_IS_EDITABLE: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_EDITABLE; + pub const NODE_IS_NATIVE_ANONYMOUS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_NATIVE_ANONYMOUS; + pub const NODE_IS_IN_SHADOW_TREE: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_IN_SHADOW_TREE; + pub const NODE_HAS_EMPTY_SELECTOR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_EMPTY_SELECTOR; + pub const NODE_HAS_SLOW_SELECTOR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_SLOW_SELECTOR; + pub const NODE_HAS_EDGE_CHILD_SELECTOR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_EDGE_CHILD_SELECTOR; + pub const NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS; + pub const NODE_ALL_SELECTOR_FLAGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_ALL_SELECTOR_FLAGS; + pub const NODE_NEEDS_FRAME: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_NEEDS_FRAME; + pub const NODE_DESCENDANTS_NEED_FRAMES: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_DESCENDANTS_NEED_FRAMES; + pub const NODE_HAS_ACCESSKEY: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_ACCESSKEY; + pub const NODE_HAS_DIRECTION_RTL: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_DIRECTION_RTL; + pub const NODE_HAS_DIRECTION_LTR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_DIRECTION_LTR; + pub const NODE_ALL_DIRECTION_FLAGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_ALL_DIRECTION_FLAGS; + pub const NODE_CHROME_ONLY_ACCESS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_CHROME_ONLY_ACCESS; + pub const NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS; + pub const NODE_SHARED_RESTYLE_BIT_1: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_SHARED_RESTYLE_BIT_1; + pub const NODE_SHARED_RESTYLE_BIT_2: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_SHARED_RESTYLE_BIT_2; + pub const NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_SHARED_RESTYLE_BIT_1; + pub const NODE_TYPE_SPECIFIC_BITS_OFFSET: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_TYPE_SPECIFIC_BITS_OFFSET; + #[repr(u32)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum _bindgen_ty_28 { NODE_HAS_LISTENERMANAGER = 4, NODE_HAS_PROPERTIES = 8, NODE_IS_ANONYMOUS_ROOT = 16, NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE = 32, NODE_IS_NATIVE_ANONYMOUS_ROOT = 64, NODE_FORCE_XBL_BINDINGS = 128, NODE_MAY_BE_IN_BINDING_MNGR = 256, NODE_IS_EDITABLE = 512, @@ -15714,16 +15743,137 @@ pub mod root { NODE_HAS_DIRECTION_LTR = 1048576, NODE_ALL_DIRECTION_FLAGS = 1572864, NODE_CHROME_ONLY_ACCESS = 2097152, NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS = 4194304, NODE_SHARED_RESTYLE_BIT_1 = 8388608, NODE_SHARED_RESTYLE_BIT_2 = 16777216, NODE_TYPE_SPECIFIC_BITS_OFFSET = 23, } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsScanner([u8; 0]); + /*************************************************************** + Notes: + ***************************************************************/ + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsITokenizer { + pub _base: root::nsISupports, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsITokenizer_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsITokenizer() { + assert_eq!(::std::mem::size_of::<nsITokenizer>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsITokenizer ) )); + assert_eq! (::std::mem::align_of::<nsITokenizer>() , 8usize , concat ! + ( "Alignment of " , stringify ! ( nsITokenizer ) )); + } + impl Clone for nsITokenizer { + fn clone(&self) -> Self { *self } + } + #[repr(u32)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum nsDTDMode { + eDTDMode_unknown = 0, + eDTDMode_quirks = 1, + eDTDMode_almost_standards = 2, + eDTDMode_full_standards = 3, + eDTDMode_autodetect = 4, + eDTDMode_fragment = 5, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIContentSink([u8; 0]); + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct CParserContext([u8; 0]); + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsIDTD { + pub _base: root::nsISupports, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIDTD_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsIDTD() { + assert_eq!(::std::mem::size_of::<nsIDTD>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsIDTD ) )); + assert_eq! (::std::mem::align_of::<nsIDTD>() , 8usize , concat ! ( + "Alignment of " , stringify ! ( nsIDTD ) )); + } + impl Clone for nsIDTD { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsParserBase { + pub _base: root::nsISupports, + } + #[test] + fn bindgen_test_layout_nsParserBase() { + assert_eq!(::std::mem::size_of::<nsParserBase>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsParserBase ) )); + assert_eq! (::std::mem::align_of::<nsParserBase>() , 8usize , concat ! + ( "Alignment of " , stringify ! ( nsParserBase ) )); + } + impl Clone for nsParserBase { + fn clone(&self) -> Self { *self } + } + #[repr(u32)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum eParserCommands { + eViewNormal = 0, + eViewSource = 1, + eViewFragment = 2, + eViewErrors = 3, + } + /** + * This GECKO-INTERNAL interface is on track to being REMOVED (or refactored + * to the point of being near-unrecognizable). + * + * Please DO NOT #include this file in comm-central code, in your XULRunner + * app or binary extensions. + * + * Please DO NOT #include this into new files even inside Gecko. It is more + * likely than not that #including this header is the wrong thing to do. + */ + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsIParser { + pub _base: root::nsParserBase, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIParser_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsIParser() { + assert_eq!(::std::mem::size_of::<nsIParser>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsIParser ) )); + assert_eq! (::std::mem::align_of::<nsIParser>() , 8usize , concat ! ( + "Alignment of " , stringify ! ( nsIParser ) )); + } + impl Clone for nsIParser { + fn clone(&self) -> Self { *self } + } pub type nscoord = i32; pub type nsIntPoint = root::mozilla::gfx::IntPoint; /** * Currently needs to be 'double' for Cairo compatibility. Could * become 'float', perhaps, in some configurations. */ pub type gfxFloat = f64; #[repr(C)] @@ -16093,19 +16243,16 @@ pub mod root { #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsHTMLStyleSheet([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsIBFCacheEntry([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] - pub struct nsIContentSink([u8; 0]); - #[repr(C)] - #[derive(Debug, Copy, Clone)] pub struct nsIDocumentEncoder([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsIDOMNodeFilter([u8; 0]); /** * An internal interface */ #[repr(C)] @@ -17506,16 +17653,38 @@ pub mod root { eStyleStruct_Border = 18, eStyleStruct_Outline = 19, eStyleStruct_XUL = 20, eStyleStruct_SVGReset = 21, eStyleStruct_Column = 22, eStyleStruct_Effects = 23, nsStyleStructID_Length = 24, } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsIProperties { + pub _base: root::nsISupports, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIProperties_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsIProperties() { + assert_eq!(::std::mem::size_of::<nsIProperties>() , 8usize , concat ! + ( "Size of: " , stringify ! ( nsIProperties ) )); + assert_eq! (::std::mem::align_of::<nsIProperties>() , 8usize , concat + ! ( "Alignment of " , stringify ! ( nsIProperties ) )); + } + impl Clone for nsIProperties { + fn clone(&self) -> Self { *self } + } #[repr(u32)] /** * Types of animatable values. */ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum nsStyleAnimType { eStyleAnimType_Custom = 0, eStyleAnimType_Coord = 1, @@ -22356,19 +22525,16 @@ pub mod root { pub struct imgLoader([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct imgCacheEntry([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsIApplicationCache([u8; 0]); #[repr(C)] - #[derive(Debug, Copy, Clone)] - pub struct nsIProperties([u8; 0]); - #[repr(C)] #[derive(Debug)] pub struct imgRequest { pub _base: root::nsIStreamListener, pub _base_1: root::nsIThreadRetargetableStreamListener, pub _base_2: root::nsIChannelEventSink, pub _base_3: root::nsIInterfaceRequestor, pub _base_4: root::nsIAsyncVerifyRedirectCallback, pub mRefCnt: root::mozilla::ThreadSafeAutoRefCnt,
--- a/servo/components/style/gecko_bindings/structs_release.rs +++ b/servo/components/style/gecko_bindings/structs_release.rs @@ -2233,17 +2233,17 @@ pub mod root { #[repr(C)] #[derive(Debug)] pub struct FastElementCreationOptions { pub _base: root::mozilla::dom::ElementCreationOptions, } #[test] fn bindgen_test_layout_FastElementCreationOptions() { assert_eq!(::std::mem::size_of::<FastElementCreationOptions>() - , 32usize , concat ! ( + , 56usize , concat ! ( "Size of: " , stringify ! ( FastElementCreationOptions ) )); assert_eq! (::std::mem::align_of::<FastElementCreationOptions>() , 8usize , concat ! ( "Alignment of " , stringify ! ( FastElementCreationOptions ) )); } #[repr(C)] @@ -3020,16 +3020,19 @@ pub mod root { assert_eq! (unsafe { & ( * ( 0 as * const OriginAttributesDictionary ) ) . mUserContextId as * const _ as usize } , 32usize , concat ! ( "Alignment of field: " , stringify ! ( OriginAttributesDictionary ) , "::" , stringify ! ( mUserContextId ) )); } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct Promise([u8; 0]); pub mod workers { #[allow(unused_imports)] use self::super::super::super::super::root; } #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum CSSStyleSheetParsingMode { Author = 0, @@ -3394,33 +3397,41 @@ pub mod root { Denied = 3, EndGuard_ = 4, } #[repr(C)] #[derive(Debug)] pub struct ElementCreationOptions { pub _base: root::mozilla::dom::DictionaryBase, pub mIs: [u64; 3usize], + pub mPseudo: [u64; 3usize], } #[test] fn bindgen_test_layout_ElementCreationOptions() { assert_eq!(::std::mem::size_of::<ElementCreationOptions>() , - 32usize , concat ! ( + 56usize , concat ! ( "Size of: " , stringify ! ( ElementCreationOptions ) )); assert_eq! (::std::mem::align_of::<ElementCreationOptions>() , 8usize , concat ! ( "Alignment of " , stringify ! ( ElementCreationOptions ) )); assert_eq! (unsafe { & ( * ( 0 as * const ElementCreationOptions ) ) . mIs as * const _ as usize } , 8usize , concat ! ( "Alignment of field: " , stringify ! ( ElementCreationOptions ) , "::" , stringify ! ( mIs ) )); + assert_eq! (unsafe { + & ( * ( 0 as * const ElementCreationOptions ) ) . + mPseudo as * const _ as usize } , 32usize , concat + ! ( + "Alignment of field: " , stringify ! ( + ElementCreationOptions ) , "::" , stringify ! ( + mPseudo ) )); } #[repr(C)] #[derive(Debug)] pub struct ElementCreationOptionsOrString { pub mType: root::mozilla::dom::ElementCreationOptionsOrString_Type, pub mValue: root::mozilla::dom::ElementCreationOptionsOrString_Value, } #[repr(u32)] @@ -3682,19 +3693,16 @@ pub mod root { pub struct NodeIterator([u8; 0]); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum OrientationType { } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct ProcessingInstruction([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] - pub struct Promise([u8; 0]); - #[repr(C)] - #[derive(Debug, Copy, Clone)] pub struct StyleSheetList([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct SVGSVGElement([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct Touch([u8; 0]); #[repr(C)] @@ -3776,19 +3784,16 @@ pub mod root { impl Clone for BorrowedAttrInfo { fn clone(&self) -> Self { *self } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct ShadowRoot([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] - pub struct CustomElementData([u8; 0]); - #[repr(C)] - #[derive(Debug, Copy, Clone)] pub struct Selection([u8; 0]); #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum ScrollBehavior { Auto = 0, Instant = 1, Smooth = 2, EndGuard_ = 3, @@ -3843,16 +3848,19 @@ pub mod root { assert_eq! (unsafe { & ( * ( 0 as * const ScrollToOptions ) ) . mTop as * const _ as usize } , 24usize , concat ! ( "Alignment of field: " , stringify ! ( ScrollToOptions ) , "::" , stringify ! ( mTop ) )); } #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct CustomElementData([u8; 0]); + #[repr(C)] #[derive(Debug)] pub struct FragmentOrElement { pub _base: root::nsIContent, pub mRefCnt: root::nsCycleCollectingAutoRefCnt, /** * Array containing all attributes and children for this element */ pub mAttrsAndChildren: root::nsAttrAndChildArray, @@ -4891,17 +4899,17 @@ pub mod root { * Event states are associated with pseudo-classes. */ #[repr(C)] #[derive(Debug, Copy)] pub struct EventStates { pub mStates: root::mozilla::EventStates_InternalType, } pub type EventStates_InternalType = u64; - pub type EventStates_ServoType = u16; + pub type EventStates_ServoType = u64; #[test] fn bindgen_test_layout_EventStates() { assert_eq!(::std::mem::size_of::<EventStates>() , 8usize , concat ! ( "Size of: " , stringify ! ( EventStates ) )); assert_eq! (::std::mem::align_of::<EventStates>() , 8usize , concat ! ( "Alignment of " , stringify ! ( EventStates ) )); assert_eq! (unsafe { @@ -7347,17 +7355,17 @@ pub mod root { root::mozilla::dom::BorrowedAttrInfo; pub type ServoElementSnapshot_Element = root::mozilla::dom::Element; pub type ServoElementSnapshot_ServoStateType = root::mozilla::EventStates_ServoType; pub use self::super::super::root::mozilla::ServoElementSnapshotFlags as ServoElementSnapshot_Flags; #[test] fn bindgen_test_layout_ServoElementSnapshot() { - assert_eq!(::std::mem::size_of::<ServoElementSnapshot>() , 24usize + assert_eq!(::std::mem::size_of::<ServoElementSnapshot>() , 32usize , concat ! ( "Size of: " , stringify ! ( ServoElementSnapshot ) )); assert_eq! (::std::mem::align_of::<ServoElementSnapshot>() , 8usize , concat ! ( "Alignment of " , stringify ! ( ServoElementSnapshot ) )); assert_eq! (unsafe { & ( * ( 0 as * const ServoElementSnapshot ) ) . @@ -7376,23 +7384,23 @@ pub mod root { & ( * ( 0 as * const ServoElementSnapshot ) ) . mState as * const _ as usize } , 16usize , concat ! ( "Alignment of field: " , stringify ! ( ServoElementSnapshot ) , "::" , stringify ! ( mState ) )); assert_eq! (unsafe { & ( * ( 0 as * const ServoElementSnapshot ) ) . mIsHTMLElementInHTMLDocument as * const _ as usize } , - 18usize , concat ! ( + 24usize , concat ! ( "Alignment of field: " , stringify ! ( ServoElementSnapshot ) , "::" , stringify ! ( mIsHTMLElementInHTMLDocument ) )); assert_eq! (unsafe { & ( * ( 0 as * const ServoElementSnapshot ) ) . - mIsInChromeDocument as * const _ as usize } , 19usize + mIsInChromeDocument as * const _ as usize } , 25usize , concat ! ( "Alignment of field: " , stringify ! ( ServoElementSnapshot ) , "::" , stringify ! ( mIsInChromeDocument ) )); } #[repr(u8)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum Directionality { @@ -8157,16 +8165,22 @@ pub mod root { #[derive(Debug, Copy, Clone)] pub struct pair<_T1, _T2> { pub first: _T1, pub second: _T2, } pub type pair_first_type<_T1> = _T1; pub type pair_second_type<_T2> = _T2; #[repr(C)] + pub struct atomic<_Tp> { + pub _base: (), + pub _phantom_0: ::std::marker::PhantomData<_Tp>, + } + pub type atomic___base = [u8; 0usize]; + #[repr(C)] #[derive(Debug, Copy)] pub struct input_iterator_tag { pub _address: u8, } #[test] fn bindgen_test_layout_input_iterator_tag() { assert_eq!(::std::mem::size_of::<input_iterator_tag>() , 1usize , concat ! ( @@ -8175,41 +8189,97 @@ pub mod root { , concat ! ( "Alignment of " , stringify ! ( input_iterator_tag ) )); } impl Clone for input_iterator_tag { fn clone(&self) -> Self { *self } } #[repr(C)] + #[derive(Debug, Copy)] + pub struct forward_iterator_tag { + pub _address: u8, + } + #[test] + fn bindgen_test_layout_forward_iterator_tag() { + assert_eq!(::std::mem::size_of::<forward_iterator_tag>() , 1usize + , concat ! ( + "Size of: " , stringify ! ( forward_iterator_tag ) )); + assert_eq! (::std::mem::align_of::<forward_iterator_tag>() , + 1usize , concat ! ( + "Alignment of " , stringify ! ( forward_iterator_tag ) + )); + } + impl Clone for forward_iterator_tag { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct bidirectional_iterator_tag { + pub _address: u8, + } + #[test] + fn bindgen_test_layout_bidirectional_iterator_tag() { + assert_eq!(::std::mem::size_of::<bidirectional_iterator_tag>() , + 1usize , concat ! ( + "Size of: " , stringify ! ( bidirectional_iterator_tag + ) )); + assert_eq! (::std::mem::align_of::<bidirectional_iterator_tag>() , + 1usize , concat ! ( + "Alignment of " , stringify ! ( + bidirectional_iterator_tag ) )); + } + impl Clone for bidirectional_iterator_tag { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct random_access_iterator_tag { + pub _address: u8, + } + #[test] + fn bindgen_test_layout_random_access_iterator_tag() { + assert_eq!(::std::mem::size_of::<random_access_iterator_tag>() , + 1usize , concat ! ( + "Size of: " , stringify ! ( random_access_iterator_tag + ) )); + assert_eq! (::std::mem::align_of::<random_access_iterator_tag>() , + 1usize , concat ! ( + "Alignment of " , stringify ! ( + random_access_iterator_tag ) )); + } + impl Clone for random_access_iterator_tag { + fn clone(&self) -> Self { *self } + } + #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct iterator<_Category, _Tp, _Distance, _Pointer, _Reference> { pub _address: u8, pub _phantom_0: ::std::marker::PhantomData<_Category>, pub _phantom_1: ::std::marker::PhantomData<_Tp>, pub _phantom_2: ::std::marker::PhantomData<_Distance>, pub _phantom_3: ::std::marker::PhantomData<_Pointer>, pub _phantom_4: ::std::marker::PhantomData<_Reference>, } - pub type iterator_iterator_category<_Category> = _Category; pub type iterator_value_type<_Tp> = _Tp; pub type iterator_difference_type<_Distance> = _Distance; pub type iterator_pointer<_Pointer> = _Pointer; pub type iterator_reference<_Reference> = _Reference; - #[repr(C)] - #[derive(Debug)] - pub struct atomic<_Tp> { - pub _M_i: _Tp, - } - } - pub mod __gnu_cxx { - #[allow(unused_imports)] - use self::super::super::root; - } - pub type va_list = root::__builtin_va_list; + pub type iterator_iterator_category<_Category> = _Category; + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct __bit_const_reference<_Cp> { + pub __seg_: root::std::__bit_const_reference___storage_pointer<_Cp>, + pub __mask_: root::std::__bit_const_reference___storage_type<_Cp>, + } + pub type __bit_const_reference___storage_type<_Cp> = _Cp; + pub type __bit_const_reference___storage_pointer<_Cp> = _Cp; + } + pub type __darwin_va_list = root::__builtin_va_list; + pub type va_list = root::__darwin_va_list; /** * MozRefCountType is Mozilla's reference count type. * * We use the same type to represent the refcount of RefCounted objects * as well, in order to be able to use the leak detection facilities * that are implemented by XPCOM. * * Note that this type is not in the mozilla namespace so that it is @@ -15057,73 +15127,73 @@ pub mod root { & ( * ( 0 as * const nsNodeWeakReference ) ) . mNode as * const _ as usize } , 16usize , concat ! ( "Alignment of field: " , stringify ! ( nsNodeWeakReference ) , "::" , stringify ! ( mNode ) )); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsDOMMutationObserver([u8; 0]); - pub const NODE_HAS_LISTENERMANAGER: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_LISTENERMANAGER; - pub const NODE_HAS_PROPERTIES: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_PROPERTIES; - pub const NODE_IS_ANONYMOUS_ROOT: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_ANONYMOUS_ROOT; - pub const NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE; - pub const NODE_IS_NATIVE_ANONYMOUS_ROOT: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_NATIVE_ANONYMOUS_ROOT; - pub const NODE_FORCE_XBL_BINDINGS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_FORCE_XBL_BINDINGS; - pub const NODE_MAY_BE_IN_BINDING_MNGR: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_MAY_BE_IN_BINDING_MNGR; - pub const NODE_IS_EDITABLE: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_EDITABLE; - pub const NODE_IS_NATIVE_ANONYMOUS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_NATIVE_ANONYMOUS; - pub const NODE_IS_IN_SHADOW_TREE: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_IN_SHADOW_TREE; - pub const NODE_HAS_EMPTY_SELECTOR: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_EMPTY_SELECTOR; - pub const NODE_HAS_SLOW_SELECTOR: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_SLOW_SELECTOR; - pub const NODE_HAS_EDGE_CHILD_SELECTOR: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_EDGE_CHILD_SELECTOR; - pub const NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS; - pub const NODE_ALL_SELECTOR_FLAGS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_ALL_SELECTOR_FLAGS; - pub const NODE_NEEDS_FRAME: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_NEEDS_FRAME; - pub const NODE_DESCENDANTS_NEED_FRAMES: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_DESCENDANTS_NEED_FRAMES; - pub const NODE_HAS_ACCESSKEY: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_ACCESSKEY; - pub const NODE_HAS_DIRECTION_RTL: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_DIRECTION_RTL; - pub const NODE_HAS_DIRECTION_LTR: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_HAS_DIRECTION_LTR; - pub const NODE_ALL_DIRECTION_FLAGS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_ALL_DIRECTION_FLAGS; - pub const NODE_CHROME_ONLY_ACCESS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_CHROME_ONLY_ACCESS; - pub const NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS; - pub const NODE_SHARED_RESTYLE_BIT_1: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_SHARED_RESTYLE_BIT_1; - pub const NODE_SHARED_RESTYLE_BIT_2: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_SHARED_RESTYLE_BIT_2; - pub const NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_SHARED_RESTYLE_BIT_1; - pub const NODE_TYPE_SPECIFIC_BITS_OFFSET: root::_bindgen_ty_105 = - _bindgen_ty_105::NODE_TYPE_SPECIFIC_BITS_OFFSET; - #[repr(u32)] - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] - pub enum _bindgen_ty_105 { + pub const NODE_HAS_LISTENERMANAGER: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_LISTENERMANAGER; + pub const NODE_HAS_PROPERTIES: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_PROPERTIES; + pub const NODE_IS_ANONYMOUS_ROOT: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_ANONYMOUS_ROOT; + pub const NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE; + pub const NODE_IS_NATIVE_ANONYMOUS_ROOT: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_NATIVE_ANONYMOUS_ROOT; + pub const NODE_FORCE_XBL_BINDINGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_FORCE_XBL_BINDINGS; + pub const NODE_MAY_BE_IN_BINDING_MNGR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_MAY_BE_IN_BINDING_MNGR; + pub const NODE_IS_EDITABLE: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_EDITABLE; + pub const NODE_IS_NATIVE_ANONYMOUS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_NATIVE_ANONYMOUS; + pub const NODE_IS_IN_SHADOW_TREE: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_IN_SHADOW_TREE; + pub const NODE_HAS_EMPTY_SELECTOR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_EMPTY_SELECTOR; + pub const NODE_HAS_SLOW_SELECTOR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_SLOW_SELECTOR; + pub const NODE_HAS_EDGE_CHILD_SELECTOR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_EDGE_CHILD_SELECTOR; + pub const NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS; + pub const NODE_ALL_SELECTOR_FLAGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_ALL_SELECTOR_FLAGS; + pub const NODE_NEEDS_FRAME: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_NEEDS_FRAME; + pub const NODE_DESCENDANTS_NEED_FRAMES: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_DESCENDANTS_NEED_FRAMES; + pub const NODE_HAS_ACCESSKEY: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_ACCESSKEY; + pub const NODE_HAS_DIRECTION_RTL: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_DIRECTION_RTL; + pub const NODE_HAS_DIRECTION_LTR: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_HAS_DIRECTION_LTR; + pub const NODE_ALL_DIRECTION_FLAGS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_ALL_DIRECTION_FLAGS; + pub const NODE_CHROME_ONLY_ACCESS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_CHROME_ONLY_ACCESS; + pub const NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS; + pub const NODE_SHARED_RESTYLE_BIT_1: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_SHARED_RESTYLE_BIT_1; + pub const NODE_SHARED_RESTYLE_BIT_2: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_SHARED_RESTYLE_BIT_2; + pub const NODE_HAS_DIRTY_DESCENDANTS_FOR_SERVO: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_SHARED_RESTYLE_BIT_1; + pub const NODE_TYPE_SPECIFIC_BITS_OFFSET: root::_bindgen_ty_28 = + _bindgen_ty_28::NODE_TYPE_SPECIFIC_BITS_OFFSET; + #[repr(u32)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum _bindgen_ty_28 { NODE_HAS_LISTENERMANAGER = 4, NODE_HAS_PROPERTIES = 8, NODE_IS_ANONYMOUS_ROOT = 16, NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE = 32, NODE_IS_NATIVE_ANONYMOUS_ROOT = 64, NODE_FORCE_XBL_BINDINGS = 128, NODE_MAY_BE_IN_BINDING_MNGR = 256, NODE_IS_EDITABLE = 512, @@ -15141,16 +15211,137 @@ pub mod root { NODE_HAS_DIRECTION_LTR = 1048576, NODE_ALL_DIRECTION_FLAGS = 1572864, NODE_CHROME_ONLY_ACCESS = 2097152, NODE_IS_ROOT_OF_CHROME_ONLY_ACCESS = 4194304, NODE_SHARED_RESTYLE_BIT_1 = 8388608, NODE_SHARED_RESTYLE_BIT_2 = 16777216, NODE_TYPE_SPECIFIC_BITS_OFFSET = 23, } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsScanner([u8; 0]); + /*************************************************************** + Notes: + ***************************************************************/ + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsITokenizer { + pub _base: root::nsISupports, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsITokenizer_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsITokenizer() { + assert_eq!(::std::mem::size_of::<nsITokenizer>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsITokenizer ) )); + assert_eq! (::std::mem::align_of::<nsITokenizer>() , 8usize , concat ! + ( "Alignment of " , stringify ! ( nsITokenizer ) )); + } + impl Clone for nsITokenizer { + fn clone(&self) -> Self { *self } + } + #[repr(u32)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum nsDTDMode { + eDTDMode_unknown = 0, + eDTDMode_quirks = 1, + eDTDMode_almost_standards = 2, + eDTDMode_full_standards = 3, + eDTDMode_autodetect = 4, + eDTDMode_fragment = 5, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIContentSink([u8; 0]); + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct CParserContext([u8; 0]); + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsIDTD { + pub _base: root::nsISupports, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIDTD_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsIDTD() { + assert_eq!(::std::mem::size_of::<nsIDTD>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsIDTD ) )); + assert_eq! (::std::mem::align_of::<nsIDTD>() , 8usize , concat ! ( + "Alignment of " , stringify ! ( nsIDTD ) )); + } + impl Clone for nsIDTD { + fn clone(&self) -> Self { *self } + } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsParserBase { + pub _base: root::nsISupports, + } + #[test] + fn bindgen_test_layout_nsParserBase() { + assert_eq!(::std::mem::size_of::<nsParserBase>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsParserBase ) )); + assert_eq! (::std::mem::align_of::<nsParserBase>() , 8usize , concat ! + ( "Alignment of " , stringify ! ( nsParserBase ) )); + } + impl Clone for nsParserBase { + fn clone(&self) -> Self { *self } + } + #[repr(u32)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum eParserCommands { + eViewNormal = 0, + eViewSource = 1, + eViewFragment = 2, + eViewErrors = 3, + } + /** + * This GECKO-INTERNAL interface is on track to being REMOVED (or refactored + * to the point of being near-unrecognizable). + * + * Please DO NOT #include this file in comm-central code, in your XULRunner + * app or binary extensions. + * + * Please DO NOT #include this into new files even inside Gecko. It is more + * likely than not that #including this header is the wrong thing to do. + */ + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsIParser { + pub _base: root::nsParserBase, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIParser_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsIParser() { + assert_eq!(::std::mem::size_of::<nsIParser>() , 8usize , concat ! ( + "Size of: " , stringify ! ( nsIParser ) )); + assert_eq! (::std::mem::align_of::<nsIParser>() , 8usize , concat ! ( + "Alignment of " , stringify ! ( nsIParser ) )); + } + impl Clone for nsIParser { + fn clone(&self) -> Self { *self } + } pub type nscoord = i32; pub type nsIntPoint = root::mozilla::gfx::IntPoint; /** * Currently needs to be 'double' for Cairo compatibility. Could * become 'float', perhaps, in some configurations. */ pub type gfxFloat = f64; #[repr(C)] @@ -15520,19 +15711,16 @@ pub mod root { #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsHTMLStyleSheet([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsIBFCacheEntry([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] - pub struct nsIContentSink([u8; 0]); - #[repr(C)] - #[derive(Debug, Copy, Clone)] pub struct nsIDocumentEncoder([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsIDOMNodeFilter([u8; 0]); /** * An internal interface */ #[repr(C)] @@ -16933,16 +17121,38 @@ pub mod root { eStyleStruct_Border = 18, eStyleStruct_Outline = 19, eStyleStruct_XUL = 20, eStyleStruct_SVGReset = 21, eStyleStruct_Column = 22, eStyleStruct_Effects = 23, nsStyleStructID_Length = 24, } + #[repr(C)] + #[derive(Debug, Copy)] + pub struct nsIProperties { + pub _base: root::nsISupports, + } + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct nsIProperties_COMTypeInfo<T, U> { + pub _address: u8, + pub _phantom_0: ::std::marker::PhantomData<T>, + pub _phantom_1: ::std::marker::PhantomData<U>, + } + #[test] + fn bindgen_test_layout_nsIProperties() { + assert_eq!(::std::mem::size_of::<nsIProperties>() , 8usize , concat ! + ( "Size of: " , stringify ! ( nsIProperties ) )); + assert_eq! (::std::mem::align_of::<nsIProperties>() , 8usize , concat + ! ( "Alignment of " , stringify ! ( nsIProperties ) )); + } + impl Clone for nsIProperties { + fn clone(&self) -> Self { *self } + } #[repr(u32)] /** * Types of animatable values. */ #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum nsStyleAnimType { eStyleAnimType_Custom = 0, eStyleAnimType_Coord = 1, @@ -21715,19 +21925,16 @@ pub mod root { pub struct imgLoader([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct imgCacheEntry([u8; 0]); #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct nsIApplicationCache([u8; 0]); #[repr(C)] - #[derive(Debug, Copy, Clone)] - pub struct nsIProperties([u8; 0]); - #[repr(C)] #[derive(Debug)] pub struct imgRequest { pub _base: root::nsIStreamListener, pub _base_1: root::nsIThreadRetargetableStreamListener, pub _base_2: root::nsIChannelEventSink, pub _base_3: root::nsIInterfaceRequestor, pub _base_4: root::nsIAsyncVerifyRedirectCallback, pub mRefCnt: root::mozilla::ThreadSafeAutoRefCnt,
--- a/servo/components/style/lib.rs +++ b/servo/components/style/lib.rs @@ -167,19 +167,25 @@ macro_rules! reexport_computed_values { } longhand_properties_idents!(reexport_computed_values); /// Returns whether the two arguments point to the same value. /// /// FIXME: Remove this and use Arc::ptr_eq once we require Rust 1.17 #[inline] pub fn arc_ptr_eq<T: 'static>(a: &Arc<T>, b: &Arc<T>) -> bool { - let a: &T = &**a; - let b: &T = &**b; - (a as *const T) == (b as *const T) + ptr_eq::<T>(&**a, &**b) +} + +/// Pointer equality +/// +/// FIXME: Remove this and use std::ptr::eq once we require Rust 1.17 +#[inline] +pub fn ptr_eq<T: ?Sized>(a: *const T, b: *const T) -> bool { + a == b } /// Serializes as CSS a comma-separated list of any `T` that supports being /// serialized as CSS. pub fn serialize_comma_separated_list<W, T>(dest: &mut W, list: &[T]) -> fmt::Result where W: fmt::Write,
--- a/servo/components/style/shared_lock.rs +++ b/servo/components/style/shared_lock.rs @@ -1,62 +1,134 @@ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! Different objects protected by the same lock +#[cfg(feature = "gecko")] +use atomic_refcell::{AtomicRefCell, AtomicRef, AtomicRefMut}; +#[cfg(feature = "servo")] use parking_lot::RwLock; use std::cell::UnsafeCell; use std::fmt; use std::sync::Arc; /// A shared read/write lock that can protect multiple objects. +/// +/// In Gecko builds, we don't need the blocking behavior, just the safety. As +/// such we implement this with an AtomicRefCell instead in Gecko builds, +/// which is ~2x as fast, and panics (rather than deadlocking) when things go +/// wrong (which is much easier to debug on CI). +/// +/// Servo needs the blocking behavior for its unsynchronized animation setup, +/// but that may not be web-compatible and may need to be changed (at which +/// point Servo could use AtomicRefCell too). #[derive(Clone)] #[cfg_attr(feature = "servo", derive(HeapSizeOf))] pub struct SharedRwLock { + #[cfg(feature = "servo")] #[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")] arc: Arc<RwLock<()>>, + + #[cfg(feature = "gecko")] + cell: Arc<AtomicRefCell<SomethingZeroSizedButTyped>>, } +#[cfg(feature = "gecko")] +struct SomethingZeroSizedButTyped; + impl fmt::Debug for SharedRwLock { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("SharedRwLock") } } impl SharedRwLock { - /// Create a new shared lock + /// Create a new shared lock (servo). + #[cfg(feature = "servo")] pub fn new() -> Self { SharedRwLock { arc: Arc::new(RwLock::new(())) } } + /// Create a new shared lock (gecko). + #[cfg(feature = "gecko")] + pub fn new() -> Self { + SharedRwLock { + cell: Arc::new(AtomicRefCell::new(SomethingZeroSizedButTyped)) + } + } + /// Wrap the given data to make its access protected by this lock. pub fn wrap<T>(&self, data: T) -> Locked<T> { Locked { shared_lock: self.clone(), data: UnsafeCell::new(data), } } - /// Obtain the lock for reading + /// Obtain the lock for reading (servo). + #[cfg(feature = "servo")] pub fn read(&self) -> SharedRwLockReadGuard { self.arc.raw_read(); - SharedRwLockReadGuard { - shared_lock: self + SharedRwLockReadGuard(self) + } + + /// Obtain the lock for reading (gecko). + #[cfg(feature = "gecko")] + pub fn read(&self) -> SharedRwLockReadGuard { + SharedRwLockReadGuard(self.cell.borrow()) + } + + /// Obtain the lock for writing (servo). + #[cfg(feature = "servo")] + pub fn write(&self) -> SharedRwLockWriteGuard { + self.arc.raw_write(); + SharedRwLockWriteGuard(self) + } + + /// Obtain the lock for writing (gecko). + #[cfg(feature = "gecko")] + pub fn write(&self) -> SharedRwLockWriteGuard { + SharedRwLockWriteGuard(self.cell.borrow_mut()) + } +} + +/// Proof that a shared lock was obtained for reading (servo). +#[cfg(feature = "servo")] +pub struct SharedRwLockReadGuard<'a>(&'a SharedRwLock); +/// Proof that a shared lock was obtained for writing (gecko). +#[cfg(feature = "gecko")] +pub struct SharedRwLockReadGuard<'a>(AtomicRef<'a, SomethingZeroSizedButTyped>); +#[cfg(feature = "servo")] +impl<'a> Drop for SharedRwLockReadGuard<'a> { + fn drop(&mut self) { + // Unsafe: self.lock is private to this module, only ever set after `raw_read()`, + // and never copied or cloned (see `compile_time_assert` below). + unsafe { + self.0.arc.raw_unlock_read() } } +} - /// Obtain the lock for writing - pub fn write(&self) -> SharedRwLockWriteGuard { - self.arc.raw_write(); - SharedRwLockWriteGuard { - shared_lock: self +/// Proof that a shared lock was obtained for writing (servo). +#[cfg(feature = "servo")] +pub struct SharedRwLockWriteGuard<'a>(&'a SharedRwLock); +/// Proof that a shared lock was obtained for writing (gecko). +#[cfg(feature = "gecko")] +pub struct SharedRwLockWriteGuard<'a>(AtomicRefMut<'a, SomethingZeroSizedButTyped>); +#[cfg(feature = "servo")] +impl<'a> Drop for SharedRwLockWriteGuard<'a> { + fn drop(&mut self) { + // Unsafe: self.lock is private to this module, only ever set after `raw_write()`, + // and never copied or cloned (see `compile_time_assert` below). + unsafe { + self.0.arc.raw_unlock_write() } } } /// Data protect by a shared lock. pub struct Locked<T> { shared_lock: SharedRwLock, data: UnsafeCell<T>, @@ -70,40 +142,46 @@ unsafe impl<T: Send + Sync> Sync for Loc impl<T: fmt::Debug> fmt::Debug for Locked<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let guard = self.shared_lock.read(); self.read_with(&guard).fmt(f) } } impl<T> Locked<T> { + #[cfg(feature = "servo")] fn same_lock_as(&self, lock: &SharedRwLock) -> bool { ::arc_ptr_eq(&self.shared_lock.arc, &lock.arc) } + #[cfg(feature = "gecko")] + fn same_lock_as(&self, derefed_guard: &SomethingZeroSizedButTyped) -> bool { + ::ptr_eq(self.shared_lock.cell.as_ptr(), derefed_guard) + } + /// Access the data for reading. pub fn read_with<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> &'a T { - assert!(self.same_lock_as(&guard.shared_lock), + assert!(self.same_lock_as(&guard.0), "Locked::read_with called with a guard from an unrelated SharedRwLock"); let ptr = self.data.get(); // Unsafe: // // * The guard guarantees that the lock is taken for reading, // and we’ve checked that it’s the correct lock. // * The returned reference borrows *both* the data and the guard, // so that it can outlive neither. unsafe { &*ptr } } /// Access the data for writing. pub fn write_with<'a>(&'a self, guard: &'a mut SharedRwLockWriteGuard) -> &'a mut T { - assert!(self.same_lock_as(&guard.shared_lock), + assert!(self.same_lock_as(&guard.0), "Locked::write_with called with a guard from an unrelated SharedRwLock"); let ptr = self.data.get(); // Unsafe: // // * The guard guarantees that the lock is taken for writing, // and we’ve checked that it’s the correct lock. // * The returned reference borrows *both* the data and the guard, @@ -111,46 +189,16 @@ impl<T> Locked<T> { // * We require a mutable borrow of the guard, // so that one write guard can only be used once at a time. unsafe { &mut *ptr } } } -/// Proof that a shared lock was obtained for reading. -pub struct SharedRwLockReadGuard<'a> { - shared_lock: &'a SharedRwLock, -} - -/// Proof that a shared lock was obtained for writing. -pub struct SharedRwLockWriteGuard<'a> { - shared_lock: &'a SharedRwLock, -} - -impl<'a> Drop for SharedRwLockReadGuard<'a> { - fn drop(&mut self) { - // Unsafe: self.lock is private to this module, only ever set after `raw_read()`, - // and never copied or cloned (see `compile_time_assert` below). - unsafe { - self.shared_lock.arc.raw_unlock_read() - } - } -} - -impl<'a> Drop for SharedRwLockWriteGuard<'a> { - fn drop(&mut self) { - // Unsafe: self.lock is private to this module, only ever set after `raw_write()`, - // and never copied or cloned (see `compile_time_assert` below). - unsafe { - self.shared_lock.arc.raw_unlock_write() - } - } -} - #[allow(dead_code)] mod compile_time_assert { use super::{SharedRwLockReadGuard, SharedRwLockWriteGuard}; trait Marker1 {} impl<T: Clone> Marker1 for T {} impl<'a> Marker1 for SharedRwLockReadGuard<'a> {} // Assert SharedRwLockReadGuard: !Clone impl<'a> Marker1 for SharedRwLockWriteGuard<'a> {} // Assert SharedRwLockWriteGuard: !Clone
deleted file mode 100644 --- a/testing/web-platform/meta/webvtt/parsing/cue-text-parsing/tests/entities.html.ini +++ /dev/null @@ -1,23 +0,0 @@ -[entities.html] - type: testharness - disabled: https://bugzilla.mozilla.org/show_bug.cgi?id=1307710 - expected: - if not debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): CRASH - if not debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): TIMEOUT - if not debug and e10s and (os == "mac") and (version == "OS X 10.10.5") and (processor == "x86_64") and (bits == 64): TIMEOUT - if not debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): CRASH - if not debug and e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): TIMEOUT - if debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and not e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): CRASH - if not debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): CRASH - if not debug and not e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): CRASH - if debug and e10s and (os == "mac") and (version == "OS X 10.10.5") and (processor == "x86_64") and (bits == 64): TIMEOUT - if not debug and not e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): CRASH - if not debug and not e10s and (os == "mac") and (version == "OS X 10.10.5") and (processor == "x86_64") and (bits == 64): CRASH - if debug and e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): TIMEOUT - if not debug and e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): TIMEOUT - if debug and not e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): CRASH - if debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): CRASH
deleted file mode 100644 --- a/testing/web-platform/meta/webvtt/parsing/cue-text-parsing/tests/tags.html.ini +++ /dev/null @@ -1,23 +0,0 @@ -[tags.html] - type: testharness - disabled: https://bugzilla.mozilla.org/show_bug.cgi?id=1307710 - expected: - if not debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): CRASH - if not debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): TIMEOUT - if not debug and e10s and (os == "mac") and (version == "OS X 10.10.5") and (processor == "x86_64") and (bits == 64): TIMEOUT - if not debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): CRASH - if not debug and e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): TIMEOUT - if debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and not e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): CRASH - if not debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): CRASH - if not debug and not e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): CRASH - if debug and e10s and (os == "mac") and (version == "OS X 10.10.5") and (processor == "x86_64") and (bits == 64): TIMEOUT - if not debug and not e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): CRASH - if not debug and not e10s and (os == "mac") and (version == "OS X 10.10.5") and (processor == "x86_64") and (bits == 64): CRASH - if debug and e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86") and (bits == 32): TIMEOUT - if not debug and e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): TIMEOUT - if debug and e10s and (os == "win") and (version == "6.1.7601") and (processor == "x86") and (bits == 32): TIMEOUT - if debug and not e10s and (os == "win") and (version == "6.2.9200") and (processor == "x86_64") and (bits == 64): CRASH - if debug and not e10s and (os == "linux") and (version == "Ubuntu 16.04") and (processor == "x86_64") and (bits == 64): CRASH
deleted file mode 100644 --- a/testing/web-platform/meta/webvtt/parsing/cue-text-parsing/tests/timestamps.html.ini +++ /dev/null @@ -1,8 +0,0 @@ -[timestamps.html] - type: testharness - [WebVTT cue data parser test timestamps - 47fa4306a695161da88533d456ce94829e53b13d] - expected: FAIL - - [WebVTT cue data parser test timestamps - c1036a4322c1852e02e5a1843a9a81dfca6d7af3] - expected: FAIL -
--- a/toolkit/components/extensions/ExtensionChild.jsm +++ b/toolkit/components/extensions/ExtensionChild.jsm @@ -1019,16 +1019,21 @@ class ContentGlobal { receiveMessage({name, data}) { switch (name) { case "Extension:InitExtensionView": // The view type is initialized once and then fixed. this.global.removeMessageListener("Extension:InitExtensionView", this); this.viewType = data.viewType; + // Force external links to open in tabs. + if (["popup", "sidebar"].includes(this.viewType)) { + this.global.docShell.isAppTab = true; + } + if (data.devtoolsToolboxInfo) { this.devtoolsToolboxInfo = data.devtoolsToolboxInfo; } promiseEvent(this.global, "DOMContentLoaded", true).then(() => { this.global.sendAsyncMessage("Extension:ExtensionViewLoaded"); });
--- a/toolkit/components/extensions/ext-browser-content.js +++ b/toolkit/components/extensions/ext-browser-content.js @@ -43,16 +43,19 @@ const BrowserListener = { this.oldBackground = null; if (allowScriptsToClose) { content.QueryInterface(Ci.nsIInterfaceRequestor) .getInterface(Ci.nsIDOMWindowUtils) .allowScriptsToClose(); } + // Force external links to open in tabs. + docShell.isAppTab = true; + addEventListener("DOMWindowCreated", this, true); addEventListener("load", this, true); addEventListener("DOMContentLoaded", this, true); addEventListener("DOMWindowClose", this, true); addEventListener("MozScrolledAreaChanged", this, true); }, destroy() {
--- a/toolkit/components/extensions/ext-theme.js +++ b/toolkit/components/extensions/ext-theme.js @@ -113,16 +113,21 @@ class Theme { let resolvedURL = this.baseURI.resolve(val); this.lwtStyles.headerURL = resolvedURL; break; } } } } + /** + * Helper method for loading icons found in the extension's manifest. + * + * @param {Object} icons Dictionary mapping icon properties to extension URLs. + */ loadIcons(icons) { if (!Preferences.get("extensions.webextensions.themes.icons.enabled")) { // Return early if icons are disabled. return; } for (let icon of Object.getOwnPropertyNames(icons)) { let val = icons[icon]; @@ -166,18 +171,18 @@ extensions.on("manifest_theme", (type, d let theme = new Theme(extension.baseURI); theme.load(manifest.theme); themeMap.set(extension, theme); }); extensions.on("shutdown", (type, extension) => { let theme = themeMap.get(extension); - // We won't have a theme if theme's aren't enabled. if (!theme) { + // We won't have a theme if themes are disabled. return; } theme.unload(); }); /* eslint-enable mozilla/balanced-listeners */ extensions.registerSchemaAPI("theme", "addon_parent", context => { @@ -188,19 +193,19 @@ extensions.registerSchemaAPI("theme", "a if (!gThemesEnabled) { // Return early if themes are disabled. return; } let theme = themeMap.get(extension); if (!theme) { - // Themes which use `update` will not a theme defined - // in the manifest. Therefore, we need to initialize the - // theme the first time `update` is called. + // WebExtensions using the Theme API will not have a theme defined + // in the manifest. Therefore, we need to initialize the theme the + // first time browser.theme.update is called. theme = new Theme(extension.baseURI); themeMap.set(extension, theme); } theme.load(details); }, }, };
new file mode 100644 --- /dev/null +++ b/tools/lint/docs/linters/eslint-plugin-spidermonkey-js.rst @@ -0,0 +1,15 @@ +============================== +Mozilla ESLint SpiderMonkey JS +============================== + +This plugin only creates one item at the moment - a processor for the SpiderMonkey +JS code. + +Processors +========== + +The processor is used to pre-process all *.js files and deals with the macros +that SpiderMonkey uses. + +Note: Currently the ESLint option --fix is disabled when the preprocessor is +enabled.
--- a/tools/lint/docs/linters/eslint.rst +++ b/tools/lint/docs/linters/eslint.rst @@ -29,17 +29,24 @@ supported configuration, see `ESLint's d ESLint Plugin Mozilla --------------------- In addition to default ESLint rules, there are several Mozilla-specific rules that are defined in the :doc:`Mozilla ESLint Plugin <eslint-plugin-mozilla>`. +ESLint Plugin SpiderMonkey JS +----------------------------- + +In addition to default ESLint rules, there is an extra processor for SpiderMonkey +code :doc:`Mozilla ESLint SpiderMonkey JS <eslint-plugin-spidermonkey-js>`. + .. _ESLint: http://eslint.org/ .. _ESLint's documentation: http://eslint.org/docs/user-guide/configuring .. toctree:: :hidden: eslint-plugin-mozilla + eslint-plugin-spidermonkey-js
--- a/tools/lint/eslint.lint +++ b/tools/lint/eslint.lint @@ -79,25 +79,34 @@ def eslint_setup(): return 1 # Install ESLint and external plugins cmd = [npm_path, "install"] print("Installing eslint for mach using \"%s\"..." % (" ".join(cmd))) if not call_process("eslint", cmd): return 1 - # Install in-tree ESLint plugin + # Install in-tree ESLint plugin mozilla. cmd = [npm_path, "install", os.path.join(module_path, "eslint-plugin-mozilla")] print("Installing eslint-plugin-mozilla using \"%s\"..." % (" ".join(cmd))) if not call_process("eslint-plugin-mozilla", cmd): return 1 eslint_path = os.path.join(module_path, "node_modules", ".bin", "eslint") + # Install in-tree ESLint plugin spidermonkey. + cmd = [npm_path, "install", + os.path.join(module_path, "eslint-plugin-spidermonkey-js")] + print("Installing eslint-plugin-spidermonkey-js using \"%s\"..." % (" ".join(cmd))) + if not call_process("eslint-plugin-spidermonkey-js", cmd): + return 1 + + eslint_path = os.path.join(module_path, "node_modules", ".bin", "eslint") + print("\nESLint and approved plugins installed successfully!") print("\nNOTE: Your local eslint binary is at %s\n" % eslint_path) os.chdir(orig_cwd) def call_process(name, cmd, cwd=None): try: @@ -115,22 +124,28 @@ def call_process(name, cmd, cwd=None): def expected_eslint_modules(): # Read the expected version of ESLint and external modules expected_modules_path = os.path.join(get_eslint_module_path(), "package.json") with open(expected_modules_path, "r") as f: expected_modules = json.load(f)["dependencies"] - # Also read the in-tree ESLint plugin version + # Also read the in-tree ESLint plugin mozilla version mozilla_json_path = os.path.join(get_eslint_module_path(), "eslint-plugin-mozilla", "package.json") with open(mozilla_json_path, "r") as f: expected_modules["eslint-plugin-mozilla"] = json.load(f)["version"] + # Also read the in-tree ESLint plugin spidermonkey version + mozilla_json_path = os.path.join(get_eslint_module_path(), + "eslint-plugin-spidermonkey-js", "package.json") + with open(mozilla_json_path, "r") as f: + expected_modules["eslint-plugin-spidermonkey-js"] = json.load(f)["version"] + return expected_modules def eslint_module_has_issues(): has_issues = False node_modules_path = os.path.join(get_eslint_module_path(), "node_modules") for name, version_range in expected_eslint_modules().iteritems():
rename from tools/lint/eslint/eslint-plugin-mozilla/.eslintrc.js rename to tools/lint/eslint/.eslintrc.js
--- a/tools/lint/eslint/eslint-plugin-mozilla/lib/index.js +++ b/tools/lint/eslint/eslint-plugin-mozilla/lib/index.js @@ -15,18 +15,17 @@ module.exports = { environments: { "browser-window": require("../lib/environments/browser-window.js"), "chrome-worker": require("../lib/environments/chrome-worker.js"), "frame-script": require("../lib/environments/frame-script.js"), "places-overlay": require("../lib/environments/places-overlay.js"), "simpletest": require("../lib/environments/simpletest.js") }, processors: { - ".xml": require("../lib/processors/xbl-bindings"), - ".js": require("../lib/processors/self-hosted") + ".xml": require("../lib/processors/xbl-bindings") }, rules: { "avoid-removeChild": require("../lib/rules/avoid-removeChild"), "avoid-nsISupportsString-preferences": require("../lib/rules/avoid-nsISupportsString-preferences"), "balanced-listeners": require("../lib/rules/balanced-listeners"), "import-globals": require("../lib/rules/import-globals"), "import-headjs-globals": require("../lib/rules/import-headjs-globals"),
--- a/tools/lint/eslint/eslint-plugin-mozilla/package.json +++ b/tools/lint/eslint/eslint-plugin-mozilla/package.json @@ -1,11 +1,11 @@ { "name": "eslint-plugin-mozilla", - "version": "0.2.30", + "version": "0.2.31", "description": "A collection of rules that help enforce JavaScript coding standard in the Mozilla project.", "keywords": [ "eslint", "eslintplugin", "eslint-plugin", "mozilla", "firefox" ],
new file mode 100644 --- /dev/null +++ b/tools/lint/eslint/eslint-plugin-spidermonkey-js/LICENSE @@ -0,0 +1,363 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. +
new file mode 100644 --- /dev/null +++ b/tools/lint/eslint/eslint-plugin-spidermonkey-js/lib/index.js @@ -0,0 +1,17 @@ +/** + * @fileoverview A processor to help parse the spidermonkey js code. + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. + */ + +"use strict"; + +//------------------------------------------------------------------------------ +// Plugin Definition +//------------------------------------------------------------------------------ +module.exports = { + processors: { + ".js": require("../lib/processors/self-hosted") + } +};
rename from tools/lint/eslint/eslint-plugin-mozilla/lib/processors/self-hosted.js rename to tools/lint/eslint/eslint-plugin-spidermonkey-js/lib/processors/self-hosted.js
new file mode 100644 --- /dev/null +++ b/tools/lint/eslint/eslint-plugin-spidermonkey-js/package.json @@ -0,0 +1,28 @@ +{ + "name": "eslint-plugin-spidermonkey-js", + "version": "0.1.0", + "description": "A collection of rules that help enforce JavaScript coding standard in the Mozilla SpiderMonkey project.", + "keywords": [ + "eslint", + "eslintplugin", + "eslint-plugin", + "mozilla", + "spidermonkey" + ], + "bugs": { + "url": "https://bugzilla.mozilla.org/enter_bug.cgi?product=Testing&component=Lint" + }, + "homepage": "http://gecko.readthedocs.io/en/latest/tools/lint/linters/eslint-plugin-spidermonkey-js.html", + "repository": { + "type": "hg", + "url": "https://hg.mozilla.org/mozilla-central/" + }, + "author": "Mozilla", + "main": "lib/index.js", + "dependencies": { + }, + "engines": { + "node": ">=6.9.1" + }, + "license": "MPL-2.0" +}