Merge inbound to mozilla-central. a=merge
authorBogdan Tara <btara@mozilla.com>
Sun, 25 Aug 2019 12:42:11 +0300
changeset 489783 f6a1009171e521c4917dcaccf69aba9093467e99
parent 489778 5e92e40b13deb419f91b26ff2471bae09aa94cdd (current diff)
parent 489782 6dd86b824d3f934dc2d0b5dfa8a3d042cf383612 (diff)
child 489784 74ce25be0e9aad7dd95ffde440364adb27ced6e7
child 489792 327cefb3bf5e2de766f34b8cd5c0af13f0897364
push id36484
push userbtara@mozilla.com
push dateSun, 25 Aug 2019 09:42:43 +0000
treeherdermozilla-central@f6a1009171e5 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmerge
milestone70.0a1
first release with
nightly linux32
f6a1009171e5 / 70.0a1 / 20190825094243 / files
nightly linux64
f6a1009171e5 / 70.0a1 / 20190825094243 / files
nightly mac
f6a1009171e5 / 70.0a1 / 20190825094243 / files
nightly win32
f6a1009171e5 / 70.0a1 / 20190825094243 / files
nightly win64
f6a1009171e5 / 70.0a1 / 20190825094243 / files
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
releases
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Merge inbound to mozilla-central. a=merge
--- a/testing/web-platform/meta/css/css-ui/appearance-textfield-001.html.ini
+++ b/testing/web-platform/meta/css/css-ui/appearance-textfield-001.html.ini
@@ -1,6 +1,7 @@
 [appearance-textfield-001.html]
   expected:
     if (os == "win") and debug and not webrender and (processor == "x86"): [FAIL, PASS]
     if (os == "win") and debug and webrender: FAIL
     if (os == "win") and debug and not webrender and (processor == "x86_64"): [PASS, FAIL]
     if (os == "win") and (processor == "x86_64"): FAIL
+    if (os == "win") and not debug: [PASS, FAIL]
--- a/toolkit/components/telemetry/Histograms.json
+++ b/toolkit/components/telemetry/Histograms.json
@@ -6796,43 +6796,16 @@
     "kind": "categorical",
     "keyed": true,
     "labels": ["up_to_date", "success", "backoff", "pref_disabled", "parse_error", "content_error", "sign_error", "sign_retry_error", "conflict_error", "sync_error", "apply_error", "server_error", "certificate_error", "download_error", "timeout_error", "network_error", "offline_error", "cleanup_error", "unknown_error", "custom_1_error", "custom_2_error", "custom_3_error", "custom_4_error", "custom_5_error"],
     "releaseChannelCollection": "opt-out",
     "alert_emails": ["storage-team@mozilla.com"],
     "bug_numbers": [1254099],
     "description": "Generic histogram to track uptake of remote content like blocklists, settings or updates."
   },
-  "THUNDERBIRD_GLODA_SIZE_MB": {
-    "record_in_processes": ["main", "content"],
-    "products": ["firefox", "fennec", "geckoview"],
-    "expires_in_version": "never",
-    "kind": "linear",
-    "high": 1000,
-    "n_buckets": 40,
-    "description": "Gloda: size of global-messages-db.sqlite (MB)"
-  },
-  "THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS": {
-    "record_in_processes": ["main", "content"],
-    "products": ["firefox", "fennec", "geckoview"],
-    "expires_in_version": "never",
-    "kind": "exponential",
-    "high": 10000,
-    "n_buckets": 30,
-    "description": "Conversations: time between the moment we click and the second gloda query returns (ms)"
-  },
-  "THUNDERBIRD_INDEXING_RATE_MSG_PER_S": {
-    "record_in_processes": ["main", "content"],
-    "products": ["firefox", "fennec", "geckoview"],
-    "expires_in_version": "never",
-    "kind": "linear",
-    "high": 100,
-    "n_buckets": 20,
-    "description": "Gloda: indexing rate (message/s)"
-  },
   "FX_GESTURE_INSTALL_SNAPSHOT_OF_PAGE": {
     "record_in_processes": ["main", "content"],
     "products": ["firefox", "fennec", "geckoview"],
     "expires_in_version": "50",
     "kind": "exponential",
     "high": 1000,
     "n_buckets": 30,
     "description": "Firefox: Time taken to store the image capture of the page to a canvas, for reuse while swiping through history (ms)."
--- a/toolkit/components/telemetry/build_scripts/gen_event_data.py
+++ b/toolkit/components/telemetry/build_scripts/gen_event_data.py
@@ -133,23 +133,24 @@ def write_event_table(events, output, st
 def generate_JSON_definitions(output, *filenames):
     """ Write the event definitions to a JSON file.
 
     :param output: the file to write the content to.
     :param filenames: a list of filenames provided by the build system.
            We only support a single file.
     """
     # Load the event data.
-    if len(filenames) > 1:
-        raise Exception('We don\'t support loading from more than one file.')
-    try:
-        events = parse_events.load_events(filenames[0], True)
-    except ParserError as ex:
-        print("\nError processing events:\n" + str(ex) + "\n")
-        sys.exit(1)
+    events = []
+    for filename in filenames:
+        try:
+            batch = parse_events.load_events(filename, True)
+            events.extend(batch)
+        except ParserError as ex:
+            print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
+            sys.exit(1)
 
     event_definitions = OrderedDict()
     for event in events:
         category = event.category
 
         if category not in event_definitions:
             event_definitions[category] = OrderedDict()
 
@@ -165,23 +166,24 @@ def generate_JSON_definitions(output, *f
             'products': event.products,
         })
 
     json.dump(event_definitions, output)
 
 
 def main(output, *filenames):
     # Load the event data.
-    if len(filenames) > 1:
-        raise Exception('We don\'t support loading from more than one file.')
-    try:
-        events = parse_events.load_events(filenames[0], True)
-    except ParserError as ex:
-        print("\nError processing events:\n" + str(ex) + "\n")
-        sys.exit(1)
+    events = []
+    for filename in filenames:
+        try:
+            batch = parse_events.load_events(filename, True)
+            events.extend(batch)
+        except ParserError as ex:
+            print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
+            sys.exit(1)
 
     # Write the scalar data file.
     print(banner, file=output)
     print(file_header, file=output)
 
     # Write the extra keys table.
     string_table = StringTable()
     extra_table = write_extra_table(events, output, string_table)
--- a/toolkit/components/telemetry/build_scripts/gen_event_enum.py
+++ b/toolkit/components/telemetry/build_scripts/gen_event_enum.py
@@ -29,24 +29,24 @@ file_footer = """\
 } // namespace mozilla
 } // namespace Telemetry
 #endif // mozilla_TelemetryEventEnums_h
 """
 
 
 def main(output, *filenames):
     # Load the events first.
-    if len(filenames) > 1:
-        raise Exception('We don\'t support loading from more than one file.')
-
-    try:
-        events = parse_events.load_events(filenames[0], True)
-    except ParserError as ex:
-        print("\nError processing events:\n" + str(ex) + "\n")
-        sys.exit(1)
+    events = []
+    for filename in filenames:
+        try:
+            batch = parse_events.load_events(filename, True)
+            events.extend(batch)
+        except ParserError as ex:
+            print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
+            sys.exit(1)
 
     grouped = dict()
     index = 0
     for e in events:
         category = e.category
         if category not in grouped:
             grouped[category] = []
         grouped[category].append((index, e))
--- a/toolkit/components/telemetry/build_scripts/gen_scalar_data.py
+++ b/toolkit/components/telemetry/build_scripts/gen_scalar_data.py
@@ -104,24 +104,25 @@ def write_scalar_tables(scalars, output)
     for name, indexes in store_table:
         print("/* %s */ %s," % (name, ", ".join(map(str, indexes))), file=output)
     print("};", file=output)
     static_assert(output, "sizeof(%s) <= UINT16_MAX" % store_table_name,
                   "index overflow")
 
 
 def parse_scalar_definitions(filenames):
-    if len(filenames) > 1:
-        raise Exception('We don\'t support loading from more than one file.')
-
-    try:
-        return parse_scalars.load_scalars(filenames[0])
-    except ParserError as ex:
-        print("\nError processing scalars:\n" + str(ex) + "\n")
-        sys.exit(1)
+    scalars = []
+    for filename in filenames:
+        try:
+            batch = parse_scalars.load_scalars(filename)
+            scalars.extend(batch)
+        except ParserError as ex:
+            print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
+            sys.exit(1)
+    return scalars
 
 
 def generate_JSON_definitions(output, *filenames):
     """ Write the scalar definitions to a JSON file.
 
     :param output: the file to write the content to.
     :param filenames: a list of filenames provided by the build system.
            We only support a single file.
--- a/toolkit/components/telemetry/build_scripts/gen_scalar_enum.py
+++ b/toolkit/components/telemetry/build_scripts/gen_scalar_enum.py
@@ -30,24 +30,24 @@ file_footer = """\
 } // namespace mozilla
 } // namespace Telemetry
 #endif // mozilla_TelemetryScalarEnums_h
 """
 
 
 def main(output, *filenames):
     # Load the scalars first.
-    if len(filenames) > 1:
-        raise Exception('We don\'t support loading from more than one file.')
-
-    try:
-        scalars = parse_scalars.load_scalars(filenames[0])
-    except ParserError as ex:
-        print("\nError processing scalars:\n" + str(ex) + "\n")
-        sys.exit(1)
+    scalars = []
+    for filename in filenames:
+        try:
+            batch = parse_scalars.load_scalars(filename)
+            scalars.extend(batch)
+        except ParserError as ex:
+            print("\nError processing %s:\n%s\n" % (filename, str(ex)), file=sys.stderr)
+            sys.exit(1)
 
     # Write the enum file.
     print(banner, file=output)
     print(file_header, file=output)
 
     for s in scalars:
         if s.record_on_os(buildconfig.substs["OS_TARGET"]):
             print("  %s," % s.enum_label, file=output)
--- a/toolkit/components/telemetry/build_scripts/mozparsers/parse_histograms.py
+++ b/toolkit/components/telemetry/build_scripts/mozparsers/parse_histograms.py
@@ -686,17 +686,17 @@ def load_histograms_into_dict(ordered_pa
         d[key] = value
     return d
 
 
 # We support generating histograms from multiple different input files, not
 # just Histograms.json.  For each file's basename, we have a specific
 # routine to parse that file, and return a dictionary mapping histogram
 # names to histogram parameters.
-def from_Histograms_json(filename, strict_type_checks):
+def from_json(filename, strict_type_checks):
     with open(filename, 'r') as f:
         try:
             def hook(ps):
                 return load_histograms_into_dict(ps, strict_type_checks)
             histograms = json.load(f, object_pairs_hook=hook)
         except ValueError, e:
             ParserError("error parsing histograms in %s: %s" % (filename, e.message)).handle_now()
     return histograms
@@ -726,41 +726,49 @@ def from_nsDeprecatedOperationList(filen
                     'description': 'Whether a %s used %s' % (context, op)
                 }
             add_counter('document')
             add_counter('page')
 
     return histograms
 
 
-FILENAME_PARSERS = {
-    'Histograms.json': from_Histograms_json,
-    'nsDeprecatedOperationList.h': from_nsDeprecatedOperationList,
-}
+FILENAME_PARSERS = [
+    (lambda x: from_json if x.endswith('.json') else None),
+    (lambda x: from_nsDeprecatedOperationList if x == 'nsDeprecatedOperationList.h' else None),
+]
 
 # Similarly to the dance above with buildconfig, usecounters may not be
 # available, so handle that gracefully.
 try:
     import usecounters
 
-    FILENAME_PARSERS['UseCounters.conf'] = from_UseCounters_conf
+    FILENAME_PARSERS.append(lambda x: from_UseCounters_conf if x == 'UseCounters.conf' else None)
 except ImportError:
     pass
 
 
 def from_files(filenames, strict_type_checks=True):
     """Return an iterator that provides a sequence of Histograms for
 the histograms defined in filenames.
     """
     if strict_type_checks:
         load_allowlist()
 
     all_histograms = OrderedDict()
     for filename in filenames:
-        parser = FILENAME_PARSERS[os.path.basename(filename)]
+        parser = None
+        for checkFn in FILENAME_PARSERS:
+            parser = checkFn(os.path.basename(filename))
+            if parser is not None:
+                break
+
+        if parser is None:
+            ParserError("Don't know how to parse %s." % filename).handle_now()
+
         histograms = parser(filename, strict_type_checks)
 
         # OrderedDicts are important, because then the iteration order over
         # the parsed histograms is stable, which makes the insertion into
         # all_histograms stable, which makes ordering in generated files
         # stable, which makes builds more deterministic.
         if not isinstance(histograms, OrderedDict):
             ParserError("Histogram parser did not provide an OrderedDict.").handle_now()
--- a/toolkit/components/telemetry/build_scripts/mozparsers/shared_telemetry_utils.py
+++ b/toolkit/components/telemetry/build_scripts/mozparsers/shared_telemetry_utils.py
@@ -24,16 +24,17 @@ KNOWN_PROCESS_FLAGS = {
     'all_childs': 'AllChildren',  # Supporting files from before bug 1363725
 }
 
 SUPPORTED_PRODUCTS = {
     'firefox': 'Firefox',
     'fennec': 'Fennec',
     'geckoview': 'Geckoview',
     'geckoview_streaming': 'GeckoviewStreaming',
+    'thunderbird': 'Thunderbird',
 }
 
 SUPPORTED_OPERATING_SYSTEMS = [
     'mac',
     'linux',
     'windows',
     'android',
     'unix',
--- a/toolkit/components/telemetry/core/TelemetryCommon.cpp
+++ b/toolkit/components/telemetry/core/TelemetryCommon.cpp
@@ -183,16 +183,18 @@ SupportedProduct GetCurrentProduct() {
 #if defined(MOZ_WIDGET_ANDROID)
   if (mozilla::StaticPrefs::toolkit_telemetry_geckoview_streaming()) {
     return SupportedProduct::GeckoviewStreaming;
   } else if (mozilla::StaticPrefs::toolkit_telemetry_isGeckoViewMode()) {
     return SupportedProduct::Geckoview;
   } else {
     return SupportedProduct::Fennec;
   }
+#elif defined(MOZ_THUNDERBIRD)
+  return SupportedProduct::Thunderbird;
 #else
   return SupportedProduct::Firefox;
 #endif
 }
 
 }  // namespace Common
 }  // namespace Telemetry
 }  // namespace mozilla
--- a/toolkit/components/telemetry/core/TelemetryCommon.h
+++ b/toolkit/components/telemetry/core/TelemetryCommon.h
@@ -34,16 +34,17 @@ static_assert(static_cast<uint16_t>(Reco
               "Main process type must be equal to 1 to allow easy matching in "
               "CanRecordInProcess");
 
 enum class SupportedProduct : uint8_t {
   Firefox = (1 << 0),
   Fennec = (1 << 1),
   Geckoview = (1 << 2),
   GeckoviewStreaming = (1 << 3),
+  Thunderbird = (1 << 4),
 };
 MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(SupportedProduct);
 
 template <class EntryType>
 class AutoHashtable : public nsTHashtable<EntryType> {
  public:
   explicit AutoHashtable(
       uint32_t initLength = PLDHashTable::kDefaultInitialLength);
--- a/toolkit/components/telemetry/histogram-allowlists.json
+++ b/toolkit/components/telemetry/histogram-allowlists.json
@@ -397,19 +397,16 @@
     "STUMBLER_VOLUME_BYTES_UPLOADED_PER_SEC",
     "SUBPROCESS_ABNORMAL_ABORT",
     "SUBPROCESS_CRASHES_WITH_DUMP",
     "SYSTEM_FONT_FALLBACK",
     "SYSTEM_FONT_FALLBACK_FIRST",
     "SYSTEM_FONT_FALLBACK_SCRIPT",
     "TAP_TO_LOAD_ENABLED",
     "TAP_TO_LOAD_IMAGE_SIZE",
-    "THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS",
-    "THUNDERBIRD_GLODA_SIZE_MB",
-    "THUNDERBIRD_INDEXING_RATE_MSG_PER_S",
     "TLS_ERROR_REPORT_UI",
     "TRANSLATED_CHARACTERS",
     "TRANSLATED_PAGES",
     "TRANSLATED_PAGES_BY_LANGUAGE",
     "TRANSLATION_OPPORTUNITIES",
     "TRANSLATION_OPPORTUNITIES_BY_LANGUAGE",
     "WEAVE_COMPLETE_SUCCESS_COUNT",
     "WEAVE_CONFIGURED",
@@ -1034,19 +1031,16 @@
     "TELEMETRY_TEST_FLAG",
     "TELEMETRY_TEST_KEYED_COUNT",
     "TELEMETRY_TEST_KEYED_COUNT_INIT_NO_RECORD",
     "TELEMETRY_TEST_KEYED_FLAG",
     "TELEMETRY_TEST_KEYED_RELEASE_OPTIN",
     "TELEMETRY_TEST_KEYED_RELEASE_OPTOUT",
     "TELEMETRY_TEST_RELEASE_OPTIN",
     "TELEMETRY_TEST_RELEASE_OPTOUT",
-    "THUNDERBIRD_CONVERSATIONS_TIME_TO_2ND_GLODA_QUERY_MS",
-    "THUNDERBIRD_GLODA_SIZE_MB",
-    "THUNDERBIRD_INDEXING_RATE_MSG_PER_S",
     "TLS_ERROR_REPORT_UI",
     "TOTAL_CONTENT_PAGE_LOAD_TIME",
     "TOTAL_COUNT_HIGH_ERRORS",
     "TOTAL_COUNT_LOW_ERRORS",
     "TRANSACTION_WAIT_TIME_HTTP",
     "TRANSACTION_WAIT_TIME_SPDY",
     "TRANSLATED_CHARACTERS",
     "TRANSLATED_PAGES",
--- a/toolkit/components/telemetry/moz.build
+++ b/toolkit/components/telemetry/moz.build
@@ -153,16 +153,18 @@ GENERATED_FILES = [
 ]
 
 # Generate histogram files.
 histogram_files = [
     'Histograms.json',
     '/dom/base/UseCounters.conf',
     '/dom/base/nsDeprecatedOperationList.h',
 ]
+if CONFIG['MOZ_TELEMETRY_EXTRA_HISTOGRAM_FILES']:
+    histogram_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_HISTOGRAM_FILES'])
 
 data = GENERATED_FILES['TelemetryHistogramData.inc']
 data.script = 'build_scripts/gen_histogram_data.py'
 data.inputs = histogram_files
 
 enums = GENERATED_FILES['TelemetryHistogramEnums.h']
 enums.script = 'build_scripts/gen_histogram_enum.py'
 enums.inputs = histogram_files
@@ -170,16 +172,18 @@ enums.inputs = histogram_files
 data = GENERATED_FILES['TelemetryHistogramNameMap.h']
 data.script = 'build_scripts/gen_histogram_phf.py'
 data.inputs = histogram_files
 
 # Generate scalar files.
 scalar_files = [
     'Scalars.yaml',
 ]
+if CONFIG['MOZ_TELEMETRY_EXTRA_SCALAR_FILES']:
+    scalar_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_SCALAR_FILES'])
 
 scalar_data = GENERATED_FILES['TelemetryScalarData.h']
 scalar_data.script = 'build_scripts/gen_scalar_data.py'
 scalar_data.inputs = scalar_files
 
 scalar_enums = GENERATED_FILES['TelemetryScalarEnums.h']
 scalar_enums.script = 'build_scripts/gen_scalar_enum.py'
 scalar_enums.inputs = scalar_files
@@ -192,16 +196,18 @@ scalar_json_data.inputs = scalar_files
 
 # Move the scalars JSON file to the directory where the Firefox binary is.
 FINAL_TARGET_FILES += ['!ScalarArtifactDefinitions.json']
 
 # Generate event files.
 event_files = [
     'Events.yaml',
 ]
+if CONFIG['MOZ_TELEMETRY_EXTRA_EVENT_FILES']:
+    event_files.extend(CONFIG['MOZ_TELEMETRY_EXTRA_EVENT_FILES'])
 
 event_data = GENERATED_FILES['TelemetryEventData.h']
 event_data.script = 'build_scripts/gen_event_data.py'
 event_data.inputs = event_files
 
 event_enums = GENERATED_FILES['TelemetryEventEnums.h']
 event_enums.script = 'build_scripts/gen_event_enum.py'
 event_enums.inputs = event_files