--- a/dom/media/PeerConnection.js
+++ b/dom/media/PeerConnection.js
@@ -1185,69 +1185,46 @@ RTCPeerConnection.prototype = {
return this._chain(() => new this._win.Promise((resolve, reject) => {
this._onGetStatsSuccess = resolve;
this._onGetStatsFailure = reject;
this._impl.getStats(selector);
}));
});
},
- createDataChannel: function(label, dict) {
+ createDataChannel: function(label, {
+ maxRetransmits, ordered, negotiated,
+ id = 0xFFFF,
+ maxRetransmitTime,
+ maxPacketLifeTime = maxRetransmitTime,
+ protocol,
+ } = {}) {
this._checkClosed();
- if (dict == undefined) {
- dict = {};
- }
- if (dict.maxRetransmitNum != undefined) {
- dict.maxRetransmits = dict.maxRetransmitNum;
- this.logWarning("Deprecated RTCDataChannelInit dictionary entry maxRetransmitNum used!");
- }
- if (dict.outOfOrderAllowed != undefined) {
- dict.ordered = !dict.outOfOrderAllowed; // the meaning is swapped with
- // the name change
- this.logWarning("Deprecated RTCDataChannelInit dictionary entry outOfOrderAllowed used!");
+
+ if (maxRetransmitTime !== undefined) {
+ this.logWarning("Use maxPacketLifeTime instead of deprecated maxRetransmitTime which will stop working soon in createDataChannel!");
}
-
- if (dict.preset != undefined) {
- dict.negotiated = dict.preset;
- this.logWarning("Deprecated RTCDataChannelInit dictionary entry preset used!");
- }
- if (dict.stream != undefined) {
- dict.id = dict.stream;
- this.logWarning("Deprecated RTCDataChannelInit dictionary entry stream used!");
- }
-
- if (dict.maxRetransmitTime !== null && dict.maxRetransmits !== null) {
+ if (maxPacketLifeTime !== undefined && maxRetransmits !== undefined) {
throw new this._win.DOMException(
- "Both maxRetransmitTime and maxRetransmits cannot be provided",
+ "Both maxPacketLifeTime and maxRetransmits cannot be provided",
"InvalidParameterError");
}
- let protocol;
- if (dict.protocol == undefined) {
- protocol = "";
- } else {
- protocol = dict.protocol;
- }
-
// Must determine the type where we still know if entries are undefined.
let type;
- if (dict.maxRetransmitTime != undefined) {
+ if (maxPacketLifeTime) {
type = Ci.IPeerConnection.kDataChannelPartialReliableTimed;
- } else if (dict.maxRetransmits != undefined) {
+ } else if (maxRetransmits) {
type = Ci.IPeerConnection.kDataChannelPartialReliableRexmit;
} else {
type = Ci.IPeerConnection.kDataChannelReliable;
}
-
// Synchronous since it doesn't block.
- let channel = this._impl.createDataChannel(
- label, protocol, type, !dict.ordered, dict.maxRetransmitTime,
- dict.maxRetransmits, dict.negotiated ? true : false,
- dict.id != undefined ? dict.id : 0xFFFF
- );
- return channel;
+ return this._impl.createDataChannel(label, protocol, type, ordered,
+ maxPacketLifeTime, maxRetransmits,
+ negotiated, id);
}
};
// This is a separate object because we don't want to expose it to DOM.
function PeerConnectionObserver() {
this._dompc = null;
}
PeerConnectionObserver.prototype = {
--- a/dom/webidl/PeerConnectionImpl.webidl
+++ b/dom/webidl/PeerConnectionImpl.webidl
@@ -96,12 +96,12 @@ interface PeerConnectionImpl {
attribute DOMString id;
attribute DOMString peerIdentity;
readonly attribute boolean privacyRequested;
/* Data channels */
[Throws]
DataChannel createDataChannel(DOMString label, DOMString protocol,
- unsigned short type, boolean outOfOrderAllowed,
+ unsigned short type, boolean ordered,
unsigned short maxTime, unsigned short maxNum,
boolean externalNegotiated, unsigned short stream);
};
--- a/dom/webidl/RTCPeerConnection.webidl
+++ b/dom/webidl/RTCPeerConnection.webidl
@@ -33,28 +33,25 @@ enum RTCIceConnectionState {
"connected",
"completed",
"failed",
"disconnected",
"closed"
};
dictionary RTCDataChannelInit {
- boolean ordered = true;
- unsigned short? maxRetransmitTime = null;
- unsigned short? maxRetransmits = null;
- DOMString protocol = "";
- boolean negotiated = false; // spec currently says 'true'; we disagree
- unsigned short? id = null;
+ boolean ordered = true;
+ unsigned short maxPacketLifeTime;
+ unsigned short maxRetransmits;
+ DOMString protocol = "";
+ boolean negotiated = false;
+ unsigned short id;
- // these are deprecated due to renaming in the spec, but still supported for Fx22
- boolean outOfOrderAllowed; // now ordered, and the default changes to keep behavior the same
- unsigned short maxRetransmitNum; // now maxRetransmits
- boolean preset; // now negotiated
- unsigned short stream; // now id
+ // These are deprecated due to renaming in the spec, but still supported for Fx53
+ unsigned short maxRetransmitTime;
};
dictionary RTCOfferAnswerOptions {
// boolean voiceActivityDetection = true; // TODO: support this (Bug 1184712)
};
dictionary RTCAnswerOptions : RTCOfferAnswerOptions {
};
--- a/layout/generic/nsTextFrame.cpp
+++ b/layout/generic/nsTextFrame.cpp
@@ -2059,29 +2059,16 @@ BuildTextRunsScanner::BuildTextRunForFra
uint32_t nextBreakIndex = 0;
nsTextFrame* nextBreakBeforeFrame = GetNextBreakBeforeFrame(&nextBreakIndex);
bool isSVG = mLineContainer->IsSVGText();
bool enabledJustification =
(mLineContainer->StyleText()->mTextAlign == NS_STYLE_TEXT_ALIGN_JUSTIFY ||
mLineContainer->StyleText()->mTextAlignLast == NS_STYLE_TEXT_ALIGN_JUSTIFY);
- // for word-break style
- switch (mLineContainer->StyleText()->mWordBreak) {
- case NS_STYLE_WORDBREAK_BREAK_ALL:
- mLineBreaker.SetWordBreak(nsILineBreaker::kWordBreak_BreakAll);
- break;
- case NS_STYLE_WORDBREAK_KEEP_ALL:
- mLineBreaker.SetWordBreak(nsILineBreaker::kWordBreak_KeepAll);
- break;
- default:
- mLineBreaker.SetWordBreak(nsILineBreaker::kWordBreak_Normal);
- break;
- }
-
const nsStyleText* textStyle = nullptr;
const nsStyleFont* fontStyle = nullptr;
nsStyleContext* lastStyleContext = nullptr;
for (uint32_t i = 0; i < mMappedFlows.Length(); ++i) {
MappedFlow* mappedFlow = &mMappedFlows[i];
nsTextFrame* f = mappedFlow->mStartFrame;
lastStyleContext = f->StyleContext();
@@ -2550,16 +2537,29 @@ HasCompressedLeadingWhitespace(nsTextFra
}
return false;
}
void
BuildTextRunsScanner::SetupBreakSinksForTextRun(gfxTextRun* aTextRun,
const void* aTextPtr)
{
+ // for word-break style
+ switch (mLineContainer->StyleText()->mWordBreak) {
+ case NS_STYLE_WORDBREAK_BREAK_ALL:
+ mLineBreaker.SetWordBreak(nsILineBreaker::kWordBreak_BreakAll);
+ break;
+ case NS_STYLE_WORDBREAK_KEEP_ALL:
+ mLineBreaker.SetWordBreak(nsILineBreaker::kWordBreak_KeepAll);
+ break;
+ default:
+ mLineBreaker.SetWordBreak(nsILineBreaker::kWordBreak_Normal);
+ break;
+ }
+
// textruns have uniform language
const nsStyleFont *styleFont = mMappedFlows[0].mStartFrame->StyleFont();
// We should only use a language for hyphenation if it was specified
// explicitly.
nsIAtom* hyphenationLanguage =
styleFont->mExplicitLanguage ? styleFont->mLanguage.get() : nullptr;
// We keep this pointed at the skip-chars data for the current mappedFlow.
// This lets us cheaply check whether the flow has compressed initial
--- a/layout/reftests/text/reftest.list
+++ b/layout/reftests/text/reftest.list
@@ -117,16 +117,17 @@ HTTP(..) == variation-selector-unsupport
skip-if(Android) == wordbreak-4a.html wordbreak-4a-ref.html
== wordbreak-4b.html wordbreak-4b-ref.html
== wordbreak-5.html wordbreak-5-ref.html
fails-if(/^Windows\x20NT\x2010\.0/.test(http.oscpu)) == wordbreak-6.html wordbreak-6-ref.html # Bug 1258239
HTTP(..) == wordbreak-7a.html wordbreak-7a-ref.html
fails HTTP(..) == wordbreak-7b.html wordbreak-7b-ref.html # bug 479829
== wordbreak-8.html wordbreak-8-ref.html
pref(gfx.font_rendering.graphite.enabled,true) HTTP(..) == wordbreak-9.html wordbreak-9-ref.html
+== wordbreak-dynamic-1.html wordbreak-dynamic-1-ref.html
== wordwrap-01.html wordwrap-01-ref.html
HTTP(..) == wordwrap-02.html wordwrap-02-ref.html
fuzzy-if(gtkWidget,1,177) fuzzy-if(skiaContent,1,50) HTTP(..) == wordwrap-03.html wordwrap-03-ref.html # Fuzzy on Linux because the native textbox gradient is painted in a slightly different position depending on the invalid area.
== wordwrap-04.html wordwrap-04-ref.html
== overflowwrap-04.html wordwrap-04-ref.html
== wordwrap-05.html wordwrap-05-ref.html
== overflowwrap-05.html wordwrap-05-ref.html
== wordwrap-06.html wordwrap-06-ref.html
new file mode 100644
--- /dev/null
+++ b/layout/reftests/text/wordbreak-dynamic-1-ref.html
@@ -0,0 +1,18 @@
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="UTF-8">
+ <title>Reference - word-break: break-all with dynamic change</title>
+ <style>
+ div {
+ font-family: monospace;
+ width: 3ch;
+ background: pink;
+ word-break: break-all;
+ }
+ </style>
+</head>
+<body>
+ <div>a bcdef</div>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/layout/reftests/text/wordbreak-dynamic-1.html
@@ -0,0 +1,25 @@
+<!DOCTYPE html>
+<html class="reftest-wait">
+<head>
+ <meta charset="UTF-8">
+ <title>Test - word-break: break-all with dynamic change</title>
+ <style>
+ div {
+ font-family: monospace;
+ width: 3ch;
+ background: pink;
+ word-break: break-all;
+ }
+ </style>
+</head>
+<body>
+ <div>a bcdef<div></div></div>
+ <script>
+ document.addEventListener("DOMContentLoaded", function() {
+ document.body.offsetHeight;
+ document.querySelector('div > div').style.display = 'none';
+ document.documentElement.classList.remove('reftest-wait');
+ });
+ </script>
+</body>
+</html>
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -1332,39 +1332,39 @@ PeerConnectionImpl::InitializeDataChanne
#endif
return NS_ERROR_FAILURE;
}
already_AddRefed<nsDOMDataChannel>
PeerConnectionImpl::CreateDataChannel(const nsAString& aLabel,
const nsAString& aProtocol,
uint16_t aType,
- bool outOfOrderAllowed,
+ bool ordered,
uint16_t aMaxTime,
uint16_t aMaxNum,
bool aExternalNegotiated,
uint16_t aStream,
ErrorResult &rv)
{
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
RefPtr<nsDOMDataChannel> result;
- rv = CreateDataChannel(aLabel, aProtocol, aType, outOfOrderAllowed,
+ rv = CreateDataChannel(aLabel, aProtocol, aType, ordered,
aMaxTime, aMaxNum, aExternalNegotiated,
aStream, getter_AddRefs(result));
return result.forget();
#else
return nullptr;
#endif
}
NS_IMETHODIMP
PeerConnectionImpl::CreateDataChannel(const nsAString& aLabel,
const nsAString& aProtocol,
uint16_t aType,
- bool outOfOrderAllowed,
+ bool ordered,
uint16_t aMaxTime,
uint16_t aMaxNum,
bool aExternalNegotiated,
uint16_t aStream,
nsDOMDataChannel** aRetval)
{
PC_AUTO_ENTER_API_CALL(false);
MOZ_ASSERT(aRetval);
@@ -1375,17 +1375,17 @@ PeerConnectionImpl::CreateDataChannel(co
static_cast<DataChannelConnection::Type>(aType);
nsresult rv = EnsureDataConnection(WEBRTC_DATACHANNEL_STREAMS_DEFAULT);
if (NS_FAILED(rv)) {
return rv;
}
dataChannel = mDataConnection->Open(
NS_ConvertUTF16toUTF8(aLabel), NS_ConvertUTF16toUTF8(aProtocol), theType,
- !outOfOrderAllowed,
+ ordered,
aType == DataChannelConnection::PARTIAL_RELIABLE_REXMIT ? aMaxNum :
(aType == DataChannelConnection::PARTIAL_RELIABLE_TIMED ? aMaxTime : 0),
nullptr, nullptr, aExternalNegotiated, aStream
);
NS_ENSURE_TRUE(dataChannel,NS_ERROR_FAILURE);
CSFLogDebug(logTag, "%s: making DOMDataChannel", __FUNCTION__);
--- a/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java
+++ b/mobile/android/base/java/org/mozilla/gecko/telemetry/stores/TelemetryJSONFilePingStore.java
@@ -191,17 +191,17 @@ public class TelemetryJSONFilePingStore
return null;
}
final FileInputStream inputStream;
try {
inputStream = new FileInputStream(file);
} catch (final FileNotFoundException e) {
// permission problem might also cause same exception. To get more debug information.
- String fileInfo = String.format("existence: %b, can write: %b, size: %l.",
+ String fileInfo = String.format("existence: %b, can write: %b, size: %d.",
file.exists(), file.canWrite(), file.length());
String msg = String.format(
"Expected file to exist but got exception in thread: %s. File info - %s",
Thread.currentThread().getName(), fileInfo);
throw new IllegalStateException(msg);
}
final JSONObject obj;
--- a/taskcluster/ci/build/android.yml
+++ b/taskcluster/ci/build/android.yml
@@ -1,15 +1,13 @@
android-api-15/debug:
description: "Android 4.0 API15+ Debug"
index:
product: mobile
- job-name:
- buildbot: android-api-15-debug
- gecko-v2: android-api-15-debug
+ job-name: android-api-15-debug
treeherder:
platform: android-4-0-armv7-api15/debug
symbol: tc(B)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
implementation: docker-worker
max-run-time: 7200
run:
--- a/taskcluster/ci/build/linux.yml
+++ b/taskcluster/ci/build/linux.yml
@@ -21,19 +21,17 @@ linux64/opt:
secrets: true
tooltool-downloads: public
need-xvfb: true
linux64/pgo:
description: "Linux64 PGO"
index:
product: firefox
- job-name:
- buildbot: linux64-pgo
- gecko-v2: linux64-pgo
+ job-name: linux64-pgo
treeherder:
platform: linux64/pgo
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
implementation: docker-worker
max-run-time: 36000
@@ -49,19 +47,17 @@ linux64/pgo:
secrets: true
tooltool-downloads: public
need-xvfb: true
linux64/debug:
description: "Linux64 Debug"
index:
product: firefox
- job-name:
- buildbot: linux64-debug
- gecko-v2: linux64-debug
+ job-name: linux64-debug
treeherder:
platform: linux64/debug
symbol: tc(B)
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
implementation: docker-worker
max-run-time: 36000
run:
@@ -126,18 +122,17 @@ linux/debug:
custom-build-variant-cfg: debug
tooltool-downloads: public
need-xvfb: true
linux/pgo:
description: "Linux32 PGO"
index:
product: firefox
- job-name:
- gecko-v2: linux-pgo
+ job-name: linux-pgo
treeherder:
platform: linux32/pgo
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-linux
worker:
implementation: docker-worker
max-run-time: 36000
--- a/taskcluster/ci/build/windows.yml
+++ b/taskcluster/ci/build/windows.yml
@@ -1,14 +1,13 @@
win32/debug:
description: "Win32 Debug"
index:
product: firefox
- job-name:
- gecko-v2: win32-debug
+ job-name: win32-debug
treeherder:
platform: windows2012-32/debug
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
worker:
implementation: generic-worker
max-run-time: 7200
@@ -17,18 +16,17 @@ win32/debug:
script: mozharness/scripts/fx_desktop_build.py
config:
- builds/taskcluster_firefox_windows_32_debug.py
win32/opt:
description: "Win32 Opt"
index:
product: firefox
- job-name:
- gecko-v2: win32-opt
+ job-name: win32-opt
treeherder:
platform: windows2012-32/opt
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
worker:
implementation: generic-worker
max-run-time: 7200
@@ -37,18 +35,17 @@ win32/opt:
script: mozharness/scripts/fx_desktop_build.py
config:
- builds/taskcluster_firefox_windows_32_opt.py
win32/pgo:
description: "Win32 Opt PGO"
index:
product: firefox
- job-name:
- gecko-v2: win32-pgo
+ job-name: win32-pgo
treeherder:
platform: windows2012-32/pgo
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
worker:
implementation: generic-worker
max-run-time: 9000
@@ -58,18 +55,17 @@ win32/pgo:
script: mozharness/scripts/fx_desktop_build.py
config:
- builds/taskcluster_firefox_windows_32_opt.py
win64/debug:
description: "Win64 Debug"
index:
product: firefox
- job-name:
- gecko-v2: win64-debug
+ job-name: win64-debug
treeherder:
platform: windows2012-64/debug
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
worker:
implementation: generic-worker
max-run-time: 7200
@@ -78,18 +74,17 @@ win64/debug:
script: mozharness/scripts/fx_desktop_build.py
config:
- builds/taskcluster_firefox_windows_64_debug.py
win64/opt:
description: "Win64 Opt"
index:
product: firefox
- job-name:
- gecko-v2: win64-opt
+ job-name: win64-opt
treeherder:
platform: windows2012-64/opt
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
worker:
implementation: generic-worker
max-run-time: 7200
@@ -98,18 +93,17 @@ win64/opt:
script: mozharness/scripts/fx_desktop_build.py
config:
- builds/taskcluster_firefox_windows_64_opt.py
win64/pgo:
description: "Win64 Opt PGO"
index:
product: firefox
- job-name:
- gecko-v2: win64-pgo
+ job-name: win64-pgo
treeherder:
platform: windows2012-64/pgo
symbol: tc(B)
tier: 2
worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
worker:
implementation: generic-worker
max-run-time: 10800
--- a/taskcluster/ci/hazard/kind.yml
+++ b/taskcluster/ci/hazard/kind.yml
@@ -19,18 +19,17 @@ job-defaults:
max-run-time: 36000
docker-image: {in-tree: desktop-build}
jobs:
linux64-shell-haz/debug:
description: "JS Shell Hazard Analysis Linux"
index:
product: firefox
- job-name:
- gecko-v2: shell-haz-debug
+ job-name: shell-haz-debug
treeherder:
platform: linux64/debug
symbol: SM-tc(H)
run:
using: hazard
tooltool-manifest: "browser/config/tooltool-manifests/linux64/hazard.manifest"
command: >
cd /home/worker/checkouts/gecko/taskcluster/scripts/builder
@@ -39,18 +38,17 @@ jobs:
files-changed:
- js/public/**
- js/src/**
linux64-haz/debug:
description: "Browser Hazard Analysis Linux"
index:
product: firefox
- job-name:
- gecko-v2: browser-haz-debug
+ job-name: browser-haz-debug
treeherder:
platform: linux64/debug
symbol: tc(H)
run:
using: hazard
tooltool-manifest: "browser/config/tooltool-manifests/linux64/hazard.manifest"
mozconfig: "browser/config/mozconfigs/linux64/hazards"
command: >
--- a/taskcluster/ci/l10n/kind.yml
+++ b/taskcluster/ci/l10n/kind.yml
@@ -42,18 +42,17 @@ job-defaults:
- testing/mozharness/scripts/desktop_l10n.py
- toolkit/locales/**
- toolkit/mozapps/installer/**
jobs:
linux-l10n/opt:
description: "Localization"
index:
- job-name:
- gecko-v2: linux32-l10n-opt
+ job-name: linux32-l10n-opt
treeherder:
platform: linux32/opt
symbol: tc(L10n)
run:
script: mozharness/scripts/desktop_l10n.py
actions: [clone-locales list-locales setup repack summary]
config:
- single_locale/tc_linux32.py
@@ -64,18 +63,17 @@ jobs:
- total-chunks=1
- this-chunk=1
tooltool-downloads: public
need-xvfb: true
linux64-l10n/opt:
description: "Localization"
index:
- job-name:
- gecko-v2: linux64-l10n-opt
+ job-name: linux64-l10n-opt
treeherder:
platform: linux64/opt
symbol: tc(L10n)
run:
script: mozharness/scripts/desktop_l10n.py
actions: [clone-locales list-locales setup repack summary]
config:
- single_locale/tc_linux64.py
@@ -87,18 +85,17 @@ jobs:
- this-chunk=1
tooltool-downloads: public
need-xvfb: true
android-api-15-l10n/opt:
description: "Single Locale Repack"
index:
product: mobile
- job-name:
- gecko-v2: android-l10n-opt
+ job-name: android-l10n-opt
treeherder:
platform: android-4-0-armv7-api15/opt
symbol: tc(L10n)
worker-type: aws-provisioner-v1/gecko-{level}-b-android
worker:
max-run-time: 18000
run:
script: mozharness/scripts/mobile_l10n.py
--- a/taskcluster/ci/spidermonkey/kind.yml
+++ b/taskcluster/ci/spidermonkey/kind.yml
@@ -29,19 +29,17 @@ job-defaults:
# appended to this list
- js/public/**
- js/src/**
jobs:
sm-package/opt:
description: "Spidermonkey source package and test"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-package-opt
+ job-name: sm-package-opt
treeherder:
symbol: SM-tc(pkg)
run:
using: spidermonkey-package
spidermonkey-variant: plain
when:
files-changed:
- build/**
@@ -66,34 +64,31 @@ jobs:
- testing/mozbase/**
- test.mozbuild
- toolkit/mozapps/installer/package-name.mk
- toolkit/mozapps/installer/upload-files.mk
sm-mozjs-sys/debug:
description: "Build js/src as the mozjs_sys Rust crate"
index:
- job-name:
- gecko-v2: sm-mozjs-sys-debug
+ job-name: sm-mozjs-sys-debug
treeherder:
symbol: SM-tc(mozjs-crate)
run:
using: spidermonkey-mozjs-crate
spidermonkey-variant: plain
run-on-projects:
- integration
- release
- try
sm-plain/debug:
description: "Spidermonkey Plain"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-plaindebug-debug
+ job-name: sm-plaindebug-debug
treeherder:
platform: linux64/debug
symbol: SM-tc(p)
run:
spidermonkey-variant: plaindebug
sm-plain/opt:
description: "Spidermonkey Plain"
@@ -102,98 +97,82 @@ jobs:
treeherder:
symbol: SM-tc(p)
run:
spidermonkey-variant: plain
sm-arm-sim/debug:
description: "Spidermonkey ARM sim"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-arm-sim-debug
+ job-name: sm-arm-sim-debug
treeherder:
symbol: SM-tc(arm)
run:
spidermonkey-variant: arm-sim
sm-arm64-sim/debug:
description: "Spidermonkey ARM64 sim"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-arm64-sim-debug
+ job-name: sm-arm64-sim-debug
treeherder:
symbol: SM-tc(arm64)
run:
spidermonkey-variant: arm64-sim
sm-asan/opt:
description: "Spidermonkey Address Sanitizer"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-asan-opt
+ job-name: sm-asan-opt
treeherder:
symbol: SM-tc(asan)
run:
spidermonkey-variant: asan
tooltool-manifest: browser/config/tooltool-manifests/linux64/asan.manifest
sm-compacting/debug:
description: "Spidermonkey Compacting"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-compacting-debug
+ job-name: sm-compacting-debug
treeherder:
symbol: SM-tc(cgc)
run:
spidermonkey-variant: compacting
sm-msan/opt:
description: "Spidermonkey Memory Sanitizer"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-msan-opt
+ job-name: sm-msan-opt
treeherder:
symbol: SM-tc(msan)
run:
spidermonkey-variant: msan
tooltool-manifest: browser/config/tooltool-manifests/linux64/msan.manifest
sm-tsan/opt:
description: "Spidermonkey Thread Sanitizer"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-tsan-opt
+ job-name: sm-tsan-opt
treeherder:
symbol: SM-tc(tsan)
tier: 3
run-on-projects: []
run:
spidermonkey-variant: tsan
tooltool-manifest: browser/config/tooltool-manifests/linux64/tsan.manifest
sm-rootanalysis/debug:
description: "Spidermonkey Root Analysis"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-rootanalysis-debug
+ job-name: sm-rootanalysis-debug
treeherder:
symbol: SM-tc(r)
run:
spidermonkey-variant: rootanalysis
sm-nonunified/debug:
description: "Spidermonkey Non-Unified Debug"
index:
- job-name:
- buildbot: sm-plain
- gecko-v2: sm-nonunified-debug
+ job-name: sm-nonunified-debug
treeherder:
platform: linux64/debug
symbol: SM-tc(nu)
run:
spidermonkey-variant: nonunified
--- a/taskcluster/scripts/tester/test-macosx.sh
+++ b/taskcluster/scripts/tester/test-macosx.sh
@@ -20,22 +20,48 @@ cd $WORKSPACE
rm -rf artifacts
mkdir artifacts
# test required parameters are supplied
if [[ -z ${MOZHARNESS_URL} ]]; then fail "MOZHARNESS_URL is not set"; fi
if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
-# Unzip the mozharness ZIP file created by the build task
-if ! curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
- fail "failed to download mozharness zip"
-fi
-rm -rf mozharness
-unzip -q mozharness.zip
+# Download mozharness with exponential backoff
+# curl already applies exponential backoff, but not for all
+# failed cases, apparently, as we keep getting failed downloads
+# with 404 code.
+download_mozharness() {
+ local max_attempts=10
+ local timeout=1
+ local attempt=0
+
+ echo "Downloading mozharness"
+
+ while [[ $attempt < $max_attempts ]]; do
+ if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
+ rm -rf mozharness
+ if unzip -q mozharness.zip; then
+ break
+ else
+ echo "error unzipping mozharness.zip" >&2
+ fi
+ else
+ echo "failed to download mozharness zip" >&2
+ fi
+ echo "Download failed, retrying in $timeout seconds..." >&2
+ sleep $timeout
+ timeout=$((timeout*2))
+ attempt=$((attempt+1))
+ done
+
+ fail "Failed to download and unzip mozharness"
+}
+
+download_mozharness
rm mozharness.zip
# For telemetry purposes, the build process wants information about the
# source it is running; tc-vcs obscures this a little, but we can provide
# it directly.
export MOZ_SOURCE_REPO="${GECKO_HEAD_REPOSITORY}"
export MOZ_SOURCE_CHANGESET="${GECKO_HEAD_REV}"
--- a/taskcluster/scripts/tester/test-ubuntu.sh
+++ b/taskcluster/scripts/tester/test-ubuntu.sh
@@ -64,25 +64,49 @@ cleanup() {
# To share X issues
cp /home/worker/.xsession-errors ~/artifacts/public/xsession-errors.log
fi
cleanup_xvfb
exit $rv
}
trap cleanup EXIT INT
+# Download mozharness with exponential backoff
+# curl already applies exponential backoff, but not for all
+# failed cases, apparently, as we keep getting failed downloads
+# with 404 code.
+download_mozharness() {
+ local max_attempts=10
+ local timeout=1
+ local attempt=0
+
+ echo "Downloading mozharness"
+
+ while [[ $attempt < $max_attempts ]]; do
+ if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
+ rm -rf mozharness
+ if unzip -q mozharness.zip; then
+ return 0
+ fi
+ echo "error unzipping mozharness.zip" >&2
+ else
+ echo "failed to download mozharness zip" >&2
+ fi
+ echo "Download failed, retrying in $timeout seconds..." >&2
+ sleep $timeout
+ timeout=$((timeout*2))
+ attempt=$((attempt+1))
+ done
+
+ fail "Failed to download and unzip mozharness"
+}
+
# Download mozharness if we're told to.
if [ ${MOZHARNESS_URL} ]; then
- if ! curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
- fail "failed to download mozharness zip"
- fi
- rm -rf mozharness
- if ! unzip -q mozharness.zip; then
- fail "error unzipping mozharness.zip"
- fi
+ download_mozharness
rm mozharness.zip
if ! [ -d mozharness ]; then
fail "mozharness zip did not contain mozharness/"
fi
MOZHARNESS_PATH=`pwd`/mozharness
fi
--- a/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
+++ b/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
@@ -67,13 +67,13 @@ JOB_NAME_WHITELIST = set([
'win32-opt',
'win32-pgo',
'win64-debug',
'win64-opt',
'win64-pgo',
])
JOB_NAME_WHITELIST_ERROR = """\
-The gecko-v2 job name {} is not in the whitelist in __file__.
+The gecko-v2 job name {} is not in the whitelist in gecko_v2_whitelist.py.
If this job runs on Buildbot, please ensure that the job names match between
Buildbot and TaskCluster, then add the job name to the whitelist. If this is a
new job, there is nothing to check -- just add the job to the whitelist.
"""
--- a/taskcluster/taskgraph/transforms/task.py
+++ b/taskcluster/taskgraph/transforms/task.py
@@ -84,30 +84,17 @@ task_description_schema = Schema({
# information for indexing this build so its artifacts can be discovered;
# if omitted, the build will not be indexed.
Optional('index'): {
# the name of the product this build produces
'product': Any('firefox', 'mobile'),
# the names to use for this job in the TaskCluster index
- 'job-name': Any(
- # Assuming the job is named "normally", this is the v2 job name,
- # and the v1 and buildbot routes will be determined appropriately.
- basestring,
-
- # otherwise, give separate names for each of the legacy index
- # routes; if a name is omitted, no corresponding route will be
- # created.
- {
- # the name as it appears in buildbot routes
- Optional('buildbot'): basestring,
- Required('gecko-v2'): basestring,
- }
- ),
+ 'job-name': basestring,
# The rank that the task will receive in the TaskCluster
# index. A newly completed task supercedes the currently
# indexed task iff it has a higher rank. If unspecified,
# 'by-tier' behavior will be used.
'rank': Any(
# Rank is equal the timestamp of the build_date for tier-1
# tasks, and zero for non-tier-1. This sorts tier-{2,3}
@@ -309,25 +296,20 @@ GROUP_NAMES = {
'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
'Aries': 'Aries Device Image',
'Nexus 5-L': 'Nexus 5-L Device Image',
'Cc': 'Toolchain builds',
'SM-tc': 'Spidermonkey builds',
}
UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
-BUILDBOT_ROUTE_TEMPLATES = [
- "index.buildbot.branches.{project}.{job-name-buildbot}",
- "index.buildbot.revisions.{head_rev}.{project}.{job-name-buildbot}",
-]
-
V2_ROUTE_TEMPLATES = [
- "index.gecko.v2.{project}.latest.{product}.{job-name-gecko-v2}",
- "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name-gecko-v2}",
- "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name-gecko-v2}",
+ "index.gecko.v2.{project}.latest.{product}.{job-name}",
+ "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}",
+ "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name}",
]
# the roots of the treeherder routes, keyed by treeherder environment
TREEHERDER_ROUTE_ROOTS = {
'production': 'tc-treeherder',
'staging': 'tc-treeherder-stage',
}
@@ -516,40 +498,27 @@ def add_index_routes(config, tasks):
index = task.get('index')
routes = task.setdefault('routes', [])
if not index:
yield task
continue
job_name = index['job-name']
- # unpack the v2 name to v1 and buildbot names
- if isinstance(job_name, basestring):
- base_name, type_name = job_name.rsplit('-', 1)
- job_name = {
- 'buildbot': base_name,
- 'gecko-v2': '{}-{}'.format(base_name, type_name),
- }
-
- if job_name['gecko-v2'] not in JOB_NAME_WHITELIST:
- raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name['gecko-v2']))
+ if job_name not in JOB_NAME_WHITELIST:
+ raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
subs = config.params.copy()
- for n in job_name:
- subs['job-name-' + n] = job_name[n]
+ subs['job-name'] = job_name
subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
time.gmtime(config.params['build_date']))
subs['product'] = index['product']
- if 'buildbot' in job_name:
- for tpl in BUILDBOT_ROUTE_TEMPLATES:
- routes.append(tpl.format(**subs))
- if 'gecko-v2' in job_name:
- for tpl in V2_ROUTE_TEMPLATES:
- routes.append(tpl.format(**subs))
+ for tpl in V2_ROUTE_TEMPLATES:
+ routes.append(tpl.format(**subs))
# The default behavior is to rank tasks according to their tier
extra_index = task.setdefault('extra', {}).setdefault('index', {})
rank = index.get('rank', 'by-tier')
if rank == 'by-tier':
# rank is zero for non-tier-1 tasks and based on pushid for others;
# this sorts tier-{2,3} builds below tier-1 in the index
@@ -660,17 +629,17 @@ def check_v2_routes():
# we only deal with the 'routes' key here
routes = routes_json['routes']
# we use different variables than mozharness
for mh, tg in [
('{index}', 'index'),
('{build_product}', '{product}'),
- ('{build_name}-{build_type}', '{job-name-gecko-v2}'),
+ ('{build_name}-{build_type}', '{job-name}'),
('{year}.{month}.{day}.{pushdate}', '{build_date_long}')]:
routes = [r.replace(mh, tg) for r in routes]
if sorted(routes) != sorted(V2_ROUTE_TEMPLATES):
raise Exception("V2_ROUTE_TEMPLATES does not match Mozharness's routes.json: "
"%s vs %s" % (V2_ROUTE_TEMPLATES, routes))
check_v2_routes()