author | Wes Kocher <wkocher@mozilla.com> |
Mon, 17 Jul 2017 17:27:51 -0700 | |
changeset 369229 | 5e73b9798464c3f7106f0161dc9a49b234f42f9c |
parent 369189 | efc0b1525edbd357818dc7195537364e76f709e7 (current diff) |
parent 369228 | cde0022cc6e66802188fe1e24510df5e60598678 (diff) |
child 369291 | 216a5bf264b2dae2aefef068533cc0e1bb4d21d7 |
child 369393 | 75c857f94fd58a988cd478a1e3be16d67dabfe3d |
push id | 32193 |
push user | kwierso@gmail.com |
push date | Tue, 18 Jul 2017 00:27:59 +0000 |
treeherder | mozilla-central@5e73b9798464 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | merge |
milestone | 56.0a1 |
first release with | nightly linux32
5e73b9798464
/
56.0a1
/
20170718100239
/
files
nightly linux64
5e73b9798464
/
56.0a1
/
20170718100239
/
files
nightly mac
5e73b9798464
/
56.0a1
/
20170718100333
/
files
nightly win32
5e73b9798464
/
56.0a1
/
20170718030207
/
files
nightly win64
5e73b9798464
/
56.0a1
/
20170718030207
/
files
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
releases | nightly linux32
56.0a1
/
20170718100239
/
pushlog to previous
nightly linux64
56.0a1
/
20170718100239
/
pushlog to previous
nightly mac
56.0a1
/
20170718100333
/
pushlog to previous
nightly win32
56.0a1
/
20170718030207
/
pushlog to previous
nightly win64
56.0a1
/
20170718030207
/
pushlog to previous
|
--- a/devtools/server/actors/highlighters/css-grid.js +++ b/devtools/server/actors/highlighters/css-grid.js @@ -1226,17 +1226,18 @@ CssGridHighlighter.prototype = extend(Au // Draw the text for the grid area name. for (let rowNumber = rowStart; rowNumber < rowEnd; rowNumber++) { for (let columnNumber = columnStart; columnNumber < columnEnd; columnNumber++) { let row = fragment.rows.tracks[rowNumber - 1]; let column = fragment.cols.tracks[columnNumber - 1]; // Check if the font size is exceeds the bounds of the containing grid cell. - if (fontSize > column.breadth || fontSize > row.breadth) { + if (fontSize > (column.breadth * displayPixelRatio) || + fontSize > (row.breadth * displayPixelRatio)) { fontSize = (column.breadth + row.breadth) / 2; this.ctx.font = fontSize + "px " + GRID_FONT_FAMILY; } let textWidth = this.ctx.measureText(area.name).width; // The width of the character 'm' approximates the height of the text. let textHeight = this.ctx.measureText("m").width;
--- a/dom/base/nsContentUtils.cpp +++ b/dom/base/nsContentUtils.cpp @@ -3583,17 +3583,24 @@ bool nsContentUtils::IsInPrivateBrowsing(nsIDocument* aDoc) { if (!aDoc) { return false; } nsCOMPtr<nsILoadGroup> loadGroup = aDoc->GetDocumentLoadGroup(); if (loadGroup) { - return IsInPrivateBrowsing(loadGroup); + nsCOMPtr<nsIInterfaceRequestor> callbacks; + loadGroup->GetNotificationCallbacks(getter_AddRefs(callbacks)); + if (callbacks) { + nsCOMPtr<nsILoadContext> loadContext = do_GetInterface(callbacks); + if (loadContext) { + return loadContext->UsePrivateBrowsing(); + } + } } nsCOMPtr<nsIChannel> channel = aDoc->GetChannel(); return channel && NS_UsePrivateBrowsing(channel); } // static bool
--- a/dom/media/GraphDriver.cpp +++ b/dom/media/GraphDriver.cpp @@ -158,23 +158,19 @@ public: } private: nsCOMPtr<nsIThread> mThread; }; ThreadedDriver::~ThreadedDriver() { if (mThread) { - if (NS_IsMainThread()) { - mThread->Shutdown(); - } else { - nsCOMPtr<nsIRunnable> event = - new MediaStreamGraphShutdownThreadRunnable(mThread.forget()); - NS_DispatchToMainThread(event); - } + nsCOMPtr<nsIRunnable> event = + new MediaStreamGraphShutdownThreadRunnable(mThread.forget()); + NS_DispatchToMainThread(event); } } class MediaStreamGraphInitThreadRunnable : public Runnable { public: explicit MediaStreamGraphInitThreadRunnable(ThreadedDriver* aDriver) : Runnable("MediaStreamGraphInitThreadRunnable") , mDriver(aDriver) {
--- a/dom/payments/PaymentRequest.cpp +++ b/dom/payments/PaymentRequest.cpp @@ -556,16 +556,17 @@ PaymentRequest::DispatchUpdateEvent(cons PaymentRequestUpdateEventInit init; init.mBubbles = false; init.mCancelable = false; RefPtr<PaymentRequestUpdateEvent> event = PaymentRequestUpdateEvent::Constructor(this, aType, init); event->SetTrusted(true); + event->SetRequest(this); return DispatchDOMEvent(nullptr, event, nullptr, nullptr); } already_AddRefed<PaymentAddress> PaymentRequest::GetShippingAddress() const { RefPtr<PaymentAddress> address = mShippingAddress;
--- a/dom/payments/PaymentRequestUpdateEvent.cpp +++ b/dom/payments/PaymentRequestUpdateEvent.cpp @@ -41,21 +41,19 @@ PaymentRequestUpdateEvent::Constructor(c { nsCOMPtr<mozilla::dom::EventTarget> owner = do_QueryInterface(aGlobal.GetAsSupports()); return Constructor(owner, aType, aEventInitDict); } PaymentRequestUpdateEvent::PaymentRequestUpdateEvent(EventTarget* aOwner) : Event(aOwner, nullptr, nullptr) , mWaitForUpdate(false) + , mRequest(nullptr) { MOZ_ASSERT(aOwner); - - // event's target should be a PaymentRequest object - mRequest = static_cast<PaymentRequest *>(aOwner); } void PaymentRequestUpdateEvent::ResolvedCallback(JSContext* aCx, JS::Handle<JS::Value> aValue) { MOZ_ASSERT(mRequest); if (NS_WARN_IF(!aValue.isObject()) || !mWaitForUpdate) { @@ -96,32 +94,46 @@ PaymentRequestUpdateEvent::RejectedCallb mRequest->AbortUpdate(NS_ERROR_DOM_ABORT_ERR); mWaitForUpdate = false; mRequest->SetUpdating(false); } void PaymentRequestUpdateEvent::UpdateWith(Promise& aPromise, ErrorResult& aRv) { + if (!IsTrusted()) { + aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); + return; + } + MOZ_ASSERT(mRequest); - if (mWaitForUpdate || !mRequest->ReadyForUpdate() || - !mEvent->mFlags.mIsBeingDispatched) { + if (mWaitForUpdate || !mRequest->ReadyForUpdate()) { aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR); return; } aPromise.AppendNativeHandler(this); StopPropagation(); StopImmediatePropagation(); mWaitForUpdate = true; mRequest->SetUpdating(true); } +void +PaymentRequestUpdateEvent::SetRequest(PaymentRequest* aRequest) +{ + MOZ_ASSERT(IsTrusted()); + MOZ_ASSERT(!mRequest); + MOZ_ASSERT(aRequest); + + mRequest = aRequest; +} + PaymentRequestUpdateEvent::~PaymentRequestUpdateEvent() { } JSObject* PaymentRequestUpdateEvent::WrapObjectInternal(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) {
--- a/dom/payments/PaymentRequestUpdateEvent.h +++ b/dom/payments/PaymentRequestUpdateEvent.h @@ -36,24 +36,27 @@ public: virtual void RejectedCallback(JSContext* aCx, JS::Handle<JS::Value> aValue) override; static already_AddRefed<PaymentRequestUpdateEvent> Constructor(EventTarget* aOwner, const nsAString& aType, const PaymentRequestUpdateEventInit& aEventInitDict); + // Called by WebIDL constructor static already_AddRefed<PaymentRequestUpdateEvent> Constructor(const GlobalObject& aGlobal, const nsAString& aType, const PaymentRequestUpdateEventInit& aEventInitDict, ErrorResult& aRv); void UpdateWith(Promise& aPromise, ErrorResult& aRv); + void SetRequest(PaymentRequest* aRequest); + protected: ~PaymentRequestUpdateEvent(); private: // Indicating whether an updateWith()-initiated update is currently in progress. bool mWaitForUpdate; RefPtr<PaymentRequest> mRequest; };
--- a/gfx/layers/wr/WebRenderPaintedLayer.cpp +++ b/gfx/layers/wr/WebRenderPaintedLayer.cpp @@ -48,20 +48,19 @@ WebRenderPaintedLayer::SetupExternalImag return true; } bool WebRenderPaintedLayer::UpdateImageClient() { MOZ_ASSERT(WrManager()->GetPaintedLayerCallback()); - LayerIntRegion visibleRegion = GetVisibleRegion(); - LayerIntRect bounds = visibleRegion.GetBounds(); - LayerIntSize size = bounds.Size(); - IntSize imageSize(size.width, size.height); + nsIntRegion visibleRegion = GetVisibleRegion().ToUnknownRegion(); + IntRect bounds = visibleRegion.GetBounds(); + IntSize imageSize = bounds.Size(); UpdateImageHelper helper(mImageContainer, mImageClient, imageSize); { RefPtr<DrawTarget> target = helper.GetDrawTarget(); if (!target) { return false; } @@ -69,17 +68,17 @@ WebRenderPaintedLayer::UpdateImageClient target->ClearRect(Rect(0, 0, imageSize.width, imageSize.height)); target->SetTransform(Matrix().PreTranslate(-bounds.x, -bounds.y)); RefPtr<gfxContext> ctx = gfxContext::CreatePreservingTransformOrNull(target); MOZ_ASSERT(ctx); // already checked the target above WrManager()->GetPaintedLayerCallback()(this, ctx, - visibleRegion.ToUnknownRegion(), visibleRegion.ToUnknownRegion(), + visibleRegion, visibleRegion, DrawRegionClip::DRAW, nsIntRegion(), WrManager()->GetPaintedLayerCallbackData()); if (gfxPrefs::WebRenderHighlightPaintedLayers()) { target->SetTransform(Matrix()); target->FillRect(Rect(0, 0, imageSize.width, imageSize.height), ColorPattern(Color(1.0, 0.0, 0.0, 0.5))); } } @@ -118,34 +117,51 @@ WebRenderPaintedLayer::RenderLayer(wr::D if (GetVisibleRegion().IsEmpty()) { if (gfxPrefs::LayersDump()) { printf_stderr("PaintedLayer %p skipping\n", this->GetLayer()); } return; } - nsIntRegion regionToPaint; - regionToPaint.Sub(mVisibleRegion.ToUnknownRegion(), GetValidRegion()); + bool hasSomethingToPaint = true; + LayerIntRect visibleBounds = mVisibleRegion.GetBounds(); + nsIntRegion visibleRegion = mVisibleRegion.ToUnknownRegion(); + if (visibleBounds == mPaintedRect) { + // If the visible bounds haven't changed, there is a chance that the visible region + // might be entirely valid. If there is anything to paint, though, we'll repaint + // the entire visible region. + nsIntRegion regionToPaint = visibleRegion; + regionToPaint.SubOut(GetValidRegion()); + + if (regionToPaint.IsEmpty()) { + hasSomethingToPaint = false; // yay! + } + } // We have something to paint but can't. This usually happens only in // empty transactions - if (!regionToPaint.IsEmpty() && !WrManager()->GetPaintedLayerCallback()) { + if (hasSomethingToPaint && !WrManager()->GetPaintedLayerCallback()) { WrManager()->SetTransactionIncomplete(); return; } - if (!regionToPaint.IsEmpty() && WrManager()->GetPaintedLayerCallback()) { + if (hasSomethingToPaint && WrManager()->GetPaintedLayerCallback()) { + // In UpdateImageClient we throw away the previous buffer and paint everything in + // a new one, which amounts to losing the valid region. + ClearValidRegion(); if (!UpdateImageClient()) { + mPaintedRect = LayerIntRect(); return; } + mPaintedRect = visibleBounds; + SetValidRegion(visibleRegion); } else { // We have an empty transaction, just reuse the old image we had before. MOZ_ASSERT(mExternalImageId); MOZ_ASSERT(mImageContainer->HasCurrentImage()); - MOZ_ASSERT(GetInvalidRegion().IsEmpty()); } CreateWebRenderDisplayList(aBuilder, aSc); } } // namespace layers } // namespace mozilla
--- a/gfx/layers/wr/WebRenderPaintedLayer.h +++ b/gfx/layers/wr/WebRenderPaintedLayer.h @@ -33,17 +33,17 @@ protected: { MOZ_COUNT_DTOR(WebRenderPaintedLayer); if (mExternalImageId.isSome()) { WrBridge()->DeallocExternalImageId(mExternalImageId.ref()); } } wr::MaybeExternalImageId mExternalImageId; - + LayerIntRect mPaintedRect; public: virtual void InvalidateRegion(const nsIntRegion& aRegion) override { mInvalidRegion.Add(aRegion); UpdateValidRegionAfterInvalidRegionChanged(); } Layer* GetLayer() override { return this; }
--- a/gfx/layers/wr/WebRenderPaintedLayerBlob.cpp +++ b/gfx/layers/wr/WebRenderPaintedLayerBlob.cpp @@ -76,18 +76,16 @@ WebRenderPaintedLayerBlob::RenderLayer(w //XXX: We should switch to updating the blob image instead of adding a new one // That will get rid of this discard bit if (mImageKey.isSome()) { WrManager()->AddImageKeyForDiscard(mImageKey.value()); } mImageKey = Some(GetImageKey()); WrBridge()->SendAddBlobImage(mImageKey.value(), imageSize, size.width * 4, dt->GetFormat(), bytes); mImageBounds = visibleRegion.GetBounds(); - } else { - MOZ_ASSERT(GetInvalidRegion().IsEmpty()); } ScrollingLayersHelper scroller(this, aBuilder, aSc); StackingContextHelper sc(aSc, aBuilder, this); LayerRect rect = Bounds(); DumpLayerInfo("PaintedLayer", rect); aBuilder.PushImage(sc.ToRelativeWrRect(LayerRect(mImageBounds)),
--- a/image/imgLoader.cpp +++ b/image/imgLoader.cpp @@ -2106,23 +2106,20 @@ imgLoader::LoadImage(nsIURI* aURI, RefPtr<imgRequest> request; nsresult rv; nsLoadFlags requestFlags = nsIRequest::LOAD_NORMAL; #ifdef DEBUG bool isPrivate = false; - if (aLoadGroup) { - nsCOMPtr<nsIInterfaceRequestor> callbacks; - aLoadGroup->GetNotificationCallbacks(getter_AddRefs(callbacks)); - if (callbacks) { - nsCOMPtr<nsILoadContext> loadContext = do_GetInterface(callbacks); - isPrivate = loadContext && loadContext->UsePrivateBrowsing(); - } + if (aLoadingDocument) { + isPrivate = nsContentUtils::IsInPrivateBrowsing(aLoadingDocument); + } else if (aLoadGroup) { + isPrivate = nsContentUtils::IsInPrivateBrowsing(aLoadGroup); } MOZ_ASSERT(isPrivate == mRespectPrivacy); #endif // Get the default load flags from the loadgroup (if possible)... if (aLoadGroup) { aLoadGroup->GetLoadFlags(&requestFlags); }
--- a/js/public/GCAPI.h +++ b/js/public/GCAPI.h @@ -60,39 +60,39 @@ namespace JS { D(LAST_DITCH) \ D(TOO_MUCH_MALLOC) \ D(ALLOC_TRIGGER) \ D(DEBUG_GC) \ D(COMPARTMENT_REVIVED) \ D(RESET) \ D(OUT_OF_NURSERY) \ D(EVICT_NURSERY) \ - D(FULL_STORE_BUFFER) \ + D(UNUSED0) \ D(SHARED_MEMORY_LIMIT) \ D(UNUSED1) \ D(INCREMENTAL_TOO_SLOW) \ D(ABORT_GC) \ + D(FULL_WHOLE_CELL_BUFFER) \ + D(FULL_GENERIC_BUFFER) \ + D(FULL_VALUE_BUFFER) \ + D(FULL_CELL_PTR_BUFFER) \ + D(FULL_SLOT_BUFFER) \ + D(FULL_SHAPE_BUFFER) \ \ /* These are reserved for future use. */ \ D(RESERVED0) \ D(RESERVED1) \ D(RESERVED2) \ D(RESERVED3) \ D(RESERVED4) \ D(RESERVED5) \ D(RESERVED6) \ D(RESERVED7) \ D(RESERVED8) \ D(RESERVED9) \ - D(RESERVED10) \ - D(RESERVED11) \ - D(RESERVED12) \ - D(RESERVED13) \ - D(RESERVED14) \ - D(RESERVED15) \ \ /* Reasons from Firefox */ \ D(DOM_WINDOW_UTILS) \ D(COMPONENT_UTILS) \ D(MEM_PRESSURE) \ D(CC_WAITING) \ D(CC_FORCED) \ D(LOAD_END) \
--- a/js/src/builtin/TestingFunctions.cpp +++ b/js/src/builtin/TestingFunctions.cpp @@ -333,17 +333,17 @@ GC(JSContext* cx, unsigned argc, Value* return true; } static bool MinorGC(JSContext* cx, unsigned argc, Value* vp) { CallArgs args = CallArgsFromVp(argc, vp); if (args.get(0) == BooleanValue(true)) - cx->zone()->group()->storeBuffer().setAboutToOverflow(); + cx->zone()->group()->storeBuffer().setAboutToOverflow(JS::gcreason::FULL_GENERIC_BUFFER); cx->minorGC(JS::gcreason::API); args.rval().setUndefined(); return true; } #define FOR_EACH_GC_PARAM(_) \ _("maxBytes", JSGC_MAX_BYTES, true) \
--- a/js/src/gc/GenerateStatsPhases.py +++ b/js/src/gc/GenerateStatsPhases.py @@ -69,16 +69,19 @@ MarkRootsPhaseKind = PhaseKind("MARK_ROO PhaseKind("MARK_COMPARTMENTS", "Mark Compartments", 54) ]) JoinParallelTasksPhaseKind = PhaseKind("JOIN_PARALLEL_TASKS", "Join Parallel Tasks", 67) PhaseKindGraphRoots = [ PhaseKind("MUTATOR", "Mutator Running", 0), PhaseKind("GC_BEGIN", "Begin Callback", 1), + PhaseKind("EVICT_NURSERY_FOR_MAJOR_GC", "Evict Nursery For Major GC", 70, [ + MarkRootsPhaseKind, + ]), PhaseKind("WAIT_BACKGROUND_THREAD", "Wait Background Thread", 2), PhaseKind("PREPARE", "Prepare For Collection", 69, [ PhaseKind("UNMARK", "Unmark", 7), PhaseKind("BUFFER_GRAY_ROOTS", "Buffer Gray Roots", 49), PhaseKind("MARK_DISCARD_CODE", "Mark Discard Code", 3), PhaseKind("RELAZIFY_FUNCTIONS", "Relazify Functions", 4), PhaseKind("PURGE", "Purge", 5), PhaseKind("PURGE_SHAPE_TABLES", "Purge ShapeTables", 60),
--- a/js/src/gc/Nursery.cpp +++ b/js/src/gc/Nursery.cpp @@ -546,16 +546,27 @@ js::Nursery::startProfile(ProfileKey key inline void js::Nursery::endProfile(ProfileKey key) { profileDurations_[key] = TimeStamp::Now() - startTimes_[key]; totalDurations_[key] += profileDurations_[key]; } +static inline bool +IsFullStoreBufferReason(JS::gcreason::Reason reason) +{ + return reason == JS::gcreason::FULL_WHOLE_CELL_BUFFER || + reason == JS::gcreason::FULL_GENERIC_BUFFER || + reason == JS::gcreason::FULL_VALUE_BUFFER || + reason == JS::gcreason::FULL_CELL_PTR_BUFFER || + reason == JS::gcreason::FULL_SLOT_BUFFER || + reason == JS::gcreason::FULL_SHAPE_BUFFER; +} + void js::Nursery::collect(JS::gcreason::Reason reason) { MOZ_ASSERT(!TlsContext.get()->suppressGC); if (!isEnabled() || isEmpty()) { // Our barriers are not always exact, and there may be entries in the // storebuffer even when the nursery is disabled or empty. It's not safe @@ -599,17 +610,17 @@ js::Nursery::collect(JS::gcreason::Reaso endProfile(ProfileKey::Resize); // If we are promoting the nursery, or exhausted the store buffer with // pointers to nursery things, which will force a collection well before // the nursery is full, look for object groups that are getting promoted // excessively and try to pretenure them. startProfile(ProfileKey::Pretenure); uint32_t pretenureCount = 0; - if (promotionRate > 0.8 || reason == JS::gcreason::FULL_STORE_BUFFER) { + if (promotionRate > 0.8 || IsFullStoreBufferReason(reason)) { JSContext* cx = TlsContext.get(); for (auto& entry : tenureCounts.entries) { if (entry.count >= 3000) { ObjectGroup* group = entry.group; if (group->canPreTenure()) { AutoCompartment ac(cx, group); group->setShouldPreTenure(cx); pretenureCount++;
--- a/js/src/gc/Statistics.cpp +++ b/js/src/gc/Statistics.cpp @@ -40,22 +40,31 @@ using mozilla::TimeDuration; /* * If this fails, then you can either delete this assertion and allow all * larger-numbered reasons to pile up in the last telemetry bucket, or switch * to GC_REASON_3 and bump the max value. */ JS_STATIC_ASSERT(JS::gcreason::NUM_TELEMETRY_REASONS >= JS::gcreason::NUM_REASONS); -static inline decltype(mozilla::MakeEnumeratedRange(PhaseKind::FIRST, PhaseKind::LIMIT)) +using PhaseKindRange = decltype(mozilla::MakeEnumeratedRange(PhaseKind::FIRST, PhaseKind::LIMIT)); + +static inline PhaseKindRange AllPhaseKinds() { return mozilla::MakeEnumeratedRange(PhaseKind::FIRST, PhaseKind::LIMIT); } +static inline PhaseKindRange +MajorGCPhaseKinds() +{ + return mozilla::MakeEnumeratedRange(PhaseKind::GC_BEGIN, + PhaseKind(size_t(PhaseKind::GC_END) + 1)); +} + const char* js::gcstats::ExplainInvocationKind(JSGCInvocationKind gckind) { MOZ_ASSERT(gckind == GC_NORMAL || gckind == GC_SHRINK); if (gckind == GC_NORMAL) return "Normal"; else return "Shrinking"; @@ -791,17 +800,17 @@ CheckSelfTime(Phase parent, phases[child].name, childTime.ToMilliseconds()); fflush(stderr); MOZ_CRASH(); } } static PhaseKind -LongestPhaseSelfTime(const Statistics::PhaseTimeTable& times) +LongestPhaseSelfTimeInMajorGC(const Statistics::PhaseTimeTable& times) { // Start with total times per expanded phase, including children's times. Statistics::PhaseTimeTable selfTimes(times); // We have the total time spent in each phase, including descendant times. // Loop over the children and subtract their times from their parent's self // time. for (auto i : AllPhases()) { @@ -815,17 +824,17 @@ LongestPhaseSelfTime(const Statistics::P // Sum expanded phases corresponding to the same phase. EnumeratedArray<PhaseKind, PhaseKind::LIMIT, TimeDuration> phaseTimes; for (auto i : AllPhaseKinds()) phaseTimes[i] = SumPhase(i, selfTimes); // Loop over this table to find the longest phase. TimeDuration longestTime = 0; PhaseKind longestPhase = PhaseKind::NONE; - for (auto i : AllPhaseKinds()) { + for (auto i : MajorGCPhaseKinds()) { if (phaseTimes[i] > longestTime) { longestTime = phaseTimes[i]; longestPhase = i; } } return longestPhase; } @@ -968,22 +977,22 @@ Statistics::endSlice() if (slice.budget.isTimeBudget()) { int64_t budget_ms = slice.budget.timeBudget.budget; runtime->addTelemetry(JS_TELEMETRY_GC_BUDGET_MS, budget_ms); if (budget_ms == runtime->gc.defaultSliceBudget()) runtime->addTelemetry(JS_TELEMETRY_GC_ANIMATION_MS, t(sliceTime)); // Record any phase that goes more than 2x over its budget. if (sliceTime.ToMilliseconds() > 2 * budget_ms) { - reportLongestPhase(slice.phaseTimes, JS_TELEMETRY_GC_SLOW_PHASE); + reportLongestPhaseInMajorGC(slice.phaseTimes, JS_TELEMETRY_GC_SLOW_PHASE); // If we spend a significant length of time waiting for parallel // tasks then report the longest task. TimeDuration joinTime = SumPhase(PhaseKind::JOIN_PARALLEL_TASKS, slice.phaseTimes); if (joinTime.ToMilliseconds() > budget_ms) - reportLongestPhase(slice.parallelTimes, JS_TELEMETRY_GC_SLOW_TASK); + reportLongestPhaseInMajorGC(slice.parallelTimes, JS_TELEMETRY_GC_SLOW_TASK); } } sliceCount_++; } bool last = !runtime->gc.isIncrementalGCInProgress(); if (last) { @@ -1022,19 +1031,19 @@ Statistics::endSlice() phaseStartTimes[Phase::MUTATOR] = mutatorStartTime; phaseTimes[Phase::MUTATOR] = mutatorTime; } aborted = false; } void -Statistics::reportLongestPhase(const PhaseTimeTable& times, int telemetryId) +Statistics::reportLongestPhaseInMajorGC(const PhaseTimeTable& times, int telemetryId) { - PhaseKind longest = LongestPhaseSelfTime(times); + PhaseKind longest = LongestPhaseSelfTimeInMajorGC(times); if (longest == PhaseKind::NONE) return; uint8_t bucket = phaseKinds[longest].telemetryBucket; runtime->addTelemetry(telemetryId, bucket); } bool
--- a/js/src/gc/Statistics.h +++ b/js/src/gc/Statistics.h @@ -366,17 +366,17 @@ FOR_EACH_GC_PROFILE_TIME(DEFINE_TIME_KEY void recordPhaseBegin(Phase phase); void recordPhaseEnd(Phase phase); void gcDuration(TimeDuration* total, TimeDuration* maxPause) const; void sccDurations(TimeDuration* total, TimeDuration* maxPause) const; void printStats(); - void reportLongestPhase(const PhaseTimeTable& times, int telemetryId); + void reportLongestPhaseInMajorGC(const PhaseTimeTable& times, int telemetryId); UniqueChars formatCompactSlicePhaseTimes(const PhaseTimeTable& phaseTimes) const; UniqueChars formatDetailedDescription() const; UniqueChars formatDetailedSliceDescription(unsigned i, const SliceData& slice) const; UniqueChars formatDetailedPhaseTimes(const PhaseTimeTable& phaseTimes) const; UniqueChars formatDetailedTotals() const;
--- a/js/src/gc/StoreBuffer.cpp +++ b/js/src/gc/StoreBuffer.cpp @@ -80,23 +80,23 @@ StoreBuffer::clear() bufferGeneric.clear(); for (ArenaCellSet* set = bufferWholeCell; set; set = set->next) set->arena->bufferedCells() = nullptr; bufferWholeCell = nullptr; } void -StoreBuffer::setAboutToOverflow() +StoreBuffer::setAboutToOverflow(JS::gcreason::Reason reason) { if (!aboutToOverflow_) { aboutToOverflow_ = true; runtime_->gc.stats().count(gcstats::STAT_STOREBUFFER_OVERFLOW); } - nursery_.requestMinorGC(JS::gcreason::FULL_STORE_BUFFER); + nursery_.requestMinorGC(reason); } void StoreBuffer::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes) { sizes->storeBufferVals += bufferVal.sizeOfExcludingThis(mallocSizeOf); sizes->storeBufferCells += bufferCell.sizeOfExcludingThis(mallocSizeOf); @@ -133,17 +133,17 @@ js::gc::AllocateWholeCellSet(Arena* aren Nursery& nursery = zone->group()->nursery(); void* data = nursery.allocateBuffer(zone, sizeof(ArenaCellSet)); if (!data) { oomUnsafe.crash("Failed to allocate WholeCellSet"); return nullptr; } if (nursery.freeSpace() < ArenaCellSet::NurseryFreeThresholdBytes) - zone->group()->storeBuffer().setAboutToOverflow(); + zone->group()->storeBuffer().setAboutToOverflow(JS::gcreason::FULL_WHOLE_CELL_BUFFER); auto cells = static_cast<ArenaCellSet*>(data); new (cells) ArenaCellSet(arena); arena->bufferedCells() = cells; zone->group()->storeBuffer().addToWholeCellBuffer(cells); return cells; }
--- a/js/src/gc/StoreBuffer.h +++ b/js/src/gc/StoreBuffer.h @@ -116,17 +116,17 @@ class StoreBuffer if (last_) { AutoEnterOOMUnsafeRegion oomUnsafe; if (!stores_.put(last_)) oomUnsafe.crash("Failed to allocate for MonoTypeBuffer::put."); } last_ = T(); if (MOZ_UNLIKELY(stores_.count() > MaxEntries)) - owner->setAboutToOverflow(); + owner->setAboutToOverflow(T::FullBufferReason); } bool has(StoreBuffer* owner, const T& v) { sinkStore(owner); return stores_.has(v); } /* Trace the source of all edges in the store buffer. */ @@ -183,17 +183,17 @@ class StoreBuffer oomUnsafe.crash("Failed to allocate for GenericBuffer::put."); *sizep = size; T* tp = storage_->new_<T>(t); if (!tp) oomUnsafe.crash("Failed to allocate for GenericBuffer::put."); if (isAboutToOverflow()) - owner->setAboutToOverflow(); + owner->setAboutToOverflow(JS::gcreason::FULL_GENERIC_BUFFER); } size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) { return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0; } bool isEmpty() { return !storage_ || storage_->isEmpty(); @@ -229,16 +229,18 @@ class StoreBuffer CellPtrEdge tagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) | 1)); } CellPtrEdge untagged() const { return CellPtrEdge((Cell**)(uintptr_t(edge) & ~1)); } bool isTagged() const { return bool(uintptr_t(edge) & 1); } explicit operator bool() const { return edge != nullptr; } typedef PointerEdgeHasher<CellPtrEdge> Hasher; + + static const auto FullBufferReason = JS::gcreason::FULL_CELL_PTR_BUFFER; }; struct ValueEdge { JS::Value* edge; ValueEdge() : edge(nullptr) {} explicit ValueEdge(JS::Value* v) : edge(v) {} @@ -256,16 +258,18 @@ class StoreBuffer ValueEdge tagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) | 1)); } ValueEdge untagged() const { return ValueEdge((JS::Value*)(uintptr_t(edge) & ~1)); } bool isTagged() const { return bool(uintptr_t(edge) & 1); } explicit operator bool() const { return edge != nullptr; } typedef PointerEdgeHasher<ValueEdge> Hasher; + + static const auto FullBufferReason = JS::gcreason::FULL_VALUE_BUFFER; }; struct SlotsEdge { // These definitions must match those in HeapSlot::Kind. const static int SlotKind = 0; const static int ElementKind = 1; @@ -333,16 +337,18 @@ class StoreBuffer explicit operator bool() const { return objectAndKind_ != 0; } typedef struct { typedef SlotsEdge Lookup; static HashNumber hash(const Lookup& l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; } static bool match(const SlotsEdge& k, const Lookup& l) { return k == l; } } Hasher; + + static const auto FullBufferReason = JS::gcreason::FULL_SLOT_BUFFER; }; template <typename Buffer, typename Edge> void unput(Buffer& buffer, const Edge& edge) { MOZ_ASSERT(!JS::CurrentThreadIsHeapBusy()); MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime_)); if (!isEnabled()) return; @@ -426,17 +432,17 @@ class StoreBuffer void traceCells(TenuringTracer& mover) { bufferCell.trace(this, mover); } void traceSlots(TenuringTracer& mover) { bufferSlot.trace(this, mover); } void traceGenericEntries(JSTracer *trc) { bufferGeneric.trace(this, trc); } void traceWholeCells(TenuringTracer& mover); void traceWholeCell(TenuringTracer& mover, JS::TraceKind kind, Cell* cell); /* For use by our owned buffers and for testing. */ - void setAboutToOverflow(); + void setAboutToOverflow(JS::gcreason::Reason); void addToWholeCellBuffer(ArenaCellSet* set); void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes* sizes); }; // A set of cells in an arena used to implement the whole cell store buffer. class ArenaCellSet
--- a/js/src/gc/Zone.cpp +++ b/js/src/gc/Zone.cpp @@ -44,16 +44,17 @@ JS::Zone::Zone(JSRuntime* rt, ZoneGroup* atomCache_(group), externalStringCache_(group), usage(&rt->gc.usage), threshold(), gcDelayBytes(0), propertyTree_(group, this), baseShapes_(group, this), initialShapes_(group, this), + nurseryShapes_(group), data(group, nullptr), isSystem(group, false), #ifdef DEBUG gcLastSweepGroupIndex(group, 0), #endif jitZone_(group, nullptr), gcScheduled_(false), gcPreserveCode_(group, false),
--- a/js/src/gc/Zone.h +++ b/js/src/gc/Zone.h @@ -475,16 +475,23 @@ struct Zone : public JS::shadow::Zone, // Set of initial shapes in the Zone. For certain prototypes -- namely, // those of various builtin classes -- there are two entries: one for a // lookup via TaggedProto, and one for a lookup via JSProtoKey. See // InitialShapeProto. js::ZoneGroupData<js::InitialShapeSet> initialShapes_; public: js::InitialShapeSet& initialShapes() { return initialShapes_.ref(); } + private: + // List of shapes that may contain nursery pointers. + using NurseryShapeVector = js::Vector<js::AccessorShape*, 0, js::SystemAllocPolicy>; + js::ZoneGroupData<NurseryShapeVector> nurseryShapes_; + public: + NurseryShapeVector& nurseryShapes() { return nurseryShapes_.ref(); } + #ifdef JSGC_HASH_TABLE_CHECKS void checkInitialShapesTableAfterMovingGC(); void checkBaseShapeTableAfterMovingGC(); #endif void fixupInitialShapeTable(); void fixupAfterMovingGC(); // Per-zone data for use by an embedder.
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/ion/bug1375404.js @@ -0,0 +1,10 @@ +// |jit-test| error:ReferenceError + +Object.prototype.f = 42; +var T = TypedObject; +var ObjectStruct = new T.StructType({f: T.Object}); +Object.prototype.f = 42; +var o = new ObjectStruct(); +evaluate(` + o.f %= p; +`);
--- a/js/src/jit/CacheIRCompiler.cpp +++ b/js/src/jit/CacheIRCompiler.cpp @@ -66,17 +66,17 @@ CacheRegisterAllocator::useValueRegister popPayload(masm, &loc, reg.scratchReg()); masm.tagValue(loc.payloadType(), reg.scratchReg(), reg); loc.setValueReg(reg); return reg; } case OperandLocation::DoubleReg: { ValueOperand reg = allocateValueRegister(masm); - masm.boxDouble(loc.doubleReg(), reg); + masm.boxDouble(loc.doubleReg(), reg, ScratchDoubleReg); loc.setValueReg(reg); return reg; } case OperandLocation::Uninitialized: break; } @@ -112,17 +112,17 @@ CacheRegisterAllocator::useFixedValueReg MOZ_ASSERT(!currentOpRegs_.has(loc.payloadReg()), "Register shouldn't be in use"); availableRegs_.add(loc.payloadReg()); break; case OperandLocation::PayloadStack: popPayload(masm, &loc, reg.scratchReg()); masm.tagValue(loc.payloadType(), reg.scratchReg(), reg); break; case OperandLocation::DoubleReg: - masm.boxDouble(loc.doubleReg(), reg); + masm.boxDouble(loc.doubleReg(), reg, ScratchDoubleReg); break; case OperandLocation::Uninitialized: MOZ_CRASH(); } loc.setValueReg(reg); return reg; }
--- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -3443,17 +3443,17 @@ CodeGenerator::visitMaybeToDoubleElement Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS), &convert); masm.tagValue(JSVAL_TYPE_INT32, value, out); masm.jump(&done); masm.bind(&convert); masm.convertInt32ToDouble(value, temp); - masm.boxDouble(temp, out); + masm.boxDouble(temp, out, temp); masm.bind(&done); } typedef bool (*CopyElementsForWriteFn)(JSContext*, NativeObject*); static const VMFunction CopyElementsForWriteInfo = FunctionInfo<CopyElementsForWriteFn>(NativeObject::CopyElementsForWrite, "NativeObject::CopyElementsForWrite");
--- a/js/src/jit/IonBuilder.cpp +++ b/js/src/jit/IonBuilder.cpp @@ -3417,17 +3417,18 @@ IonBuilder::powTrySpecialized(bool* emit current->push(output); *emitted = true; return Ok(); } static inline bool SimpleArithOperand(MDefinition* op) { - return !op->mightBeType(MIRType::Object) + return !op->emptyResultTypeSet() + && !op->mightBeType(MIRType::Object) && !op->mightBeType(MIRType::String) && !op->mightBeType(MIRType::Symbol) && !op->mightBeType(MIRType::MagicOptimizedArguments) && !op->mightBeType(MIRType::MagicHole) && !op->mightBeType(MIRType::MagicIsConstructing); } AbortReasonOr<Ok> @@ -7237,17 +7238,16 @@ IonBuilder::ensureDefiniteType(MDefiniti return def; default: { if (def->type() != MIRType::Value) { if (def->type() == MIRType::Int32 && definiteType == MIRType::Double) { replace = MToDouble::New(alloc(), def); break; } - MOZ_ASSERT(def->type() == definiteType); return def; } replace = MUnbox::New(alloc(), def, definiteType, MUnbox::Infallible); break; } } current->add(replace);
--- a/js/src/jit/MacroAssembler.cpp +++ b/js/src/jit/MacroAssembler.cpp @@ -433,35 +433,35 @@ MacroAssembler::loadFromTypedArray(Scala branchTest32(Assembler::Signed, temp, temp, &isDouble); { tagValue(JSVAL_TYPE_INT32, temp, dest); jump(&done); } bind(&isDouble); { convertUInt32ToDouble(temp, ScratchDoubleReg); - boxDouble(ScratchDoubleReg, dest); + boxDouble(ScratchDoubleReg, dest, ScratchDoubleReg); } bind(&done); } else { // Bailout if the value does not fit in an int32. branchTest32(Assembler::Signed, temp, temp, fail); tagValue(JSVAL_TYPE_INT32, temp, dest); } break; case Scalar::Float32: loadFromTypedArray(arrayType, src, AnyRegister(ScratchFloat32Reg), dest.scratchReg(), nullptr); convertFloat32ToDouble(ScratchFloat32Reg, ScratchDoubleReg); - boxDouble(ScratchDoubleReg, dest); + boxDouble(ScratchDoubleReg, dest, ScratchDoubleReg); break; case Scalar::Float64: loadFromTypedArray(arrayType, src, AnyRegister(ScratchDoubleReg), dest.scratchReg(), nullptr); - boxDouble(ScratchDoubleReg, dest); + boxDouble(ScratchDoubleReg, dest, ScratchDoubleReg); break; default: MOZ_CRASH("Invalid typed array type"); } } template void MacroAssembler::loadFromTypedArray(Scalar::Type arrayType, const Address& src, const ValueOperand& dest, bool allowDouble, Register temp, Label* fail); @@ -1912,17 +1912,17 @@ MacroAssembler::convertInt32ValueToDoubl void MacroAssembler::convertInt32ValueToDouble(ValueOperand val) { Label done; branchTestInt32(Assembler::NotEqual, val, &done); unboxInt32(val, val.scratchReg()); convertInt32ToDouble(val.scratchReg(), ScratchDoubleReg); - boxDouble(ScratchDoubleReg, val); + boxDouble(ScratchDoubleReg, val, ScratchDoubleReg); bind(&done); } void MacroAssembler::convertValueToFloatingPoint(ValueOperand value, FloatRegister output, Label* fail, MIRType outputType) { Register tag = splitTagForTest(value);
--- a/js/src/jit/SharedIC.cpp +++ b/js/src/jit/SharedIC.cpp @@ -1084,17 +1084,17 @@ ICBinaryArith_Double::Compiler::generate masm.passABIArg(FloatReg1, MoveOp::DOUBLE); masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, NumberMod), MoveOp::DOUBLE); MOZ_ASSERT(ReturnDoubleReg == FloatReg0); break; default: MOZ_CRASH("Unexpected op"); } - masm.boxDouble(FloatReg0, R0); + masm.boxDouble(FloatReg0, R0, FloatReg0); EmitReturnFromIC(masm); // Failure case - jump to next stub masm.bind(&failure); EmitStubGuardFailure(masm); return true; } @@ -1341,17 +1341,17 @@ ICUnaryArith_Double::Compiler::generateS { Label failure; masm.ensureDouble(R0, FloatReg0, &failure); MOZ_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT); if (op == JSOP_NEG) { masm.negateDouble(FloatReg0); - masm.boxDouble(FloatReg0, R0); + masm.boxDouble(FloatReg0, R0, FloatReg0); } else { // Truncate the double to an int32. Register scratchReg = R1.scratchReg(); Label doneTruncate; Label truncateABICall; masm.branchTruncateDoubleMaybeModUint32(FloatReg0, scratchReg, &truncateABICall); masm.jump(&doneTruncate);
--- a/js/src/jit/arm/MacroAssembler-arm.cpp +++ b/js/src/jit/arm/MacroAssembler-arm.cpp @@ -3100,17 +3100,17 @@ MacroAssemblerARMCompat::unboxValue(cons void MacroAssemblerARMCompat::unboxPrivate(const ValueOperand& src, Register dest) { ma_mov(src.payloadReg(), dest); } void -MacroAssemblerARMCompat::boxDouble(FloatRegister src, const ValueOperand& dest) +MacroAssemblerARMCompat::boxDouble(FloatRegister src, const ValueOperand& dest, FloatRegister) { as_vxfer(dest.payloadReg(), dest.typeReg(), VFPRegister(src), FloatToCore); } void MacroAssemblerARMCompat::boxNonDouble(JSValueType type, Register src, const ValueOperand& dest) { if (src != dest.payloadReg()) ma_mov(src, dest.payloadReg());
--- a/js/src/jit/arm/MacroAssembler-arm.h +++ b/js/src/jit/arm/MacroAssembler-arm.h @@ -775,17 +775,17 @@ class MacroAssemblerARMCompat : public M void unboxValue(const ValueOperand& src, AnyRegister dest); void unboxPrivate(const ValueOperand& src, Register dest); void notBoolean(const ValueOperand& val) { as_eor(val.payloadReg(), val.payloadReg(), Imm8(1)); } // Boxing code. - void boxDouble(FloatRegister src, const ValueOperand& dest); + void boxDouble(FloatRegister src, const ValueOperand& dest, FloatRegister); void boxNonDouble(JSValueType type, Register src, const ValueOperand& dest); // Extended unboxing API. If the payload is already in a register, returns // that register. Otherwise, provides a move to the given scratch register, // and returns that. Register extractObject(const Address& address, Register scratch); Register extractObject(const ValueOperand& value, Register scratch) { return value.payloadReg();
--- a/js/src/jit/arm/SharedIC-arm.cpp +++ b/js/src/jit/arm/SharedIC-arm.cpp @@ -139,17 +139,17 @@ ICBinaryArith_Int32::Compiler::generateS // Move result and box for return. masm.mov(scratchReg, R0.payloadReg()); EmitReturnFromIC(masm); masm.bind(&toUint); ScratchDoubleScope scratchDouble(masm); masm.convertUInt32ToDouble(scratchReg, scratchDouble); - masm.boxDouble(scratchDouble, R0); + masm.boxDouble(scratchDouble, R0, scratchDouble); } else { masm.j(Assembler::LessThan, &failure); // Move result for return. masm.mov(scratchReg, R0.payloadReg()); } break; default: MOZ_CRASH("Unhandled op for BinaryArith_Int32.");
--- a/js/src/jit/arm64/MacroAssembler-arm64.h +++ b/js/src/jit/arm64/MacroAssembler-arm64.h @@ -1299,17 +1299,17 @@ class MacroAssemblerCompat : public vixl void branchNegativeZero(FloatRegister reg, Register scratch, Label* label) { MOZ_CRASH("branchNegativeZero"); } void branchNegativeZeroFloat32(FloatRegister reg, Register scratch, Label* label) { MOZ_CRASH("branchNegativeZeroFloat32"); } - void boxDouble(FloatRegister src, const ValueOperand& dest) { + void boxDouble(FloatRegister src, const ValueOperand& dest, FloatRegister) { Fmov(ARMRegister(dest.valueReg(), 64), ARMFPRegister(src, 64)); } void boxNonDouble(JSValueType type, Register src, const ValueOperand& dest) { boxValue(type, src, dest.valueReg()); } // Note that the |dest| register here may be ScratchReg, so we shouldn't use it. void unboxInt32(const ValueOperand& src, Register dest) {
--- a/js/src/jit/arm64/SharedIC-arm64.cpp +++ b/js/src/jit/arm64/SharedIC-arm64.cpp @@ -139,17 +139,17 @@ ICBinaryArith_Int32::Compiler::generateS // Testing for negative is equivalent to testing bit 31 masm.Tbnz(Wscratch, 31, &toUint); // Move result and box for return. masm.movePayload(Rscratch, R0_); EmitReturnFromIC(masm); masm.bind(&toUint); masm.convertUInt32ToDouble(Rscratch, ScratchDoubleReg); - masm.boxDouble(ScratchDoubleReg, R0); + masm.boxDouble(ScratchDoubleReg, R0, ScratchDoubleReg); } else { // Testing for negative is equivalent to testing bit 31 masm.Tbnz(Wscratch, 31, &failure); // Move result for return. masm.movePayload(Rscratch, R0_); } break; default:
--- a/js/src/jit/none/MacroAssembler-none.h +++ b/js/src/jit/none/MacroAssembler-none.h @@ -355,17 +355,17 @@ class MacroAssemblerNone : public Assemb template <typename T, typename S> void atomicFetchXor16ZeroExtend(const T& value, const S& mem, Register temp, Register output) { MOZ_CRASH(); } template <typename T, typename S> void atomicFetchXor32(const T& value, const S& mem, Register temp, Register output) { MOZ_CRASH(); } template <typename T, typename S> void atomicXor8(const T& value, const S& mem) { MOZ_CRASH(); } template <typename T, typename S> void atomicXor16(const T& value, const S& mem) { MOZ_CRASH(); } template <typename T, typename S> void atomicXor32(const T& value, const S& mem) { MOZ_CRASH(); } Register splitTagForTest(ValueOperand) { MOZ_CRASH(); } - void boxDouble(FloatRegister, ValueOperand) { MOZ_CRASH(); } + void boxDouble(FloatRegister, ValueOperand, FloatRegister) { MOZ_CRASH(); } void boxNonDouble(JSValueType, Register, ValueOperand) { MOZ_CRASH(); } template <typename T> void unboxInt32(T, Register) { MOZ_CRASH(); } template <typename T> void unboxBoolean(T, Register) { MOZ_CRASH(); } template <typename T> void unboxString(T, Register) { MOZ_CRASH(); } template <typename T> void unboxSymbol(T, Register) { MOZ_CRASH(); } template <typename T> void unboxObject(T, Register) { MOZ_CRASH(); } template <typename T> void unboxDouble(T, FloatRegister) { MOZ_CRASH(); } void unboxValue(const ValueOperand&, AnyRegister) { MOZ_CRASH(); }
--- a/js/src/jit/x64/MacroAssembler-x64.h +++ b/js/src/jit/x64/MacroAssembler-x64.h @@ -702,17 +702,17 @@ class MacroAssemblerX64 : public MacroAs emitSet(cond, dest); } void testUndefinedSet(Condition cond, const ValueOperand& value, Register dest) { cond = testUndefined(cond, value); emitSet(cond, dest); } - void boxDouble(FloatRegister src, const ValueOperand& dest) { + void boxDouble(FloatRegister src, const ValueOperand& dest, FloatRegister) { vmovq(src, dest.valueReg()); } void boxNonDouble(JSValueType type, Register src, const ValueOperand& dest) { MOZ_ASSERT(src != dest.valueReg()); boxValue(type, src, dest.valueReg()); } // Note that the |dest| register here may be ScratchReg, so we shouldn't
--- a/js/src/jit/x64/SharedIC-x64.cpp +++ b/js/src/jit/x64/SharedIC-x64.cpp @@ -153,17 +153,17 @@ ICBinaryArith_Int32::Compiler::generateS // Box and return. masm.boxValue(JSVAL_TYPE_INT32, ExtractTemp0, R0.valueReg()); EmitReturnFromIC(masm); masm.bind(&toUint); ScratchDoubleScope scratchDouble(masm); masm.convertUInt32ToDouble(ExtractTemp0, scratchDouble); - masm.boxDouble(scratchDouble, R0); + masm.boxDouble(scratchDouble, R0, scratchDouble); } else { masm.j(Assembler::Signed, &revertRegister); masm.boxValue(JSVAL_TYPE_INT32, ExtractTemp0, R0.valueReg()); } break; default: MOZ_CRASH("Unhandled op in BinaryArith_Int32"); }
--- a/js/src/jit/x86/CodeGenerator-x86.cpp +++ b/js/src/jit/x86/CodeGenerator-x86.cpp @@ -114,17 +114,17 @@ CodeGeneratorX86::visitBoxFloatingPoint( const LAllocation* in = box->getOperand(0); const ValueOperand out = ToOutValue(box); FloatRegister reg = ToFloatRegister(in); if (box->type() == MIRType::Float32) { masm.convertFloat32ToDouble(reg, ScratchFloat32Reg); reg = ScratchFloat32Reg; } - masm.boxDouble(reg, out); + masm.boxDouble(reg, out, reg); } void CodeGeneratorX86::visitUnbox(LUnbox* unbox) { // Note that for unbox, the type and payload indexes are switched on the // inputs. MUnbox* mir = unbox->mir();
--- a/js/src/jit/x86/MacroAssembler-x86.h +++ b/js/src/jit/x86/MacroAssembler-x86.h @@ -657,25 +657,26 @@ class MacroAssemblerX86 : public MacroAs movl(imm.low(), Operand(Address(address.base, address.offset + INT64LOW_OFFSET))); movl(imm.hi(), Operand(Address(address.base, address.offset + INT64HIGH_OFFSET))); } void setStackArg(Register reg, uint32_t arg) { movl(reg, Operand(esp, arg * sizeof(intptr_t))); } - // Note: this function clobbers the source register. - void boxDouble(FloatRegister src, const ValueOperand& dest) { + void boxDouble(FloatRegister src, const ValueOperand& dest, FloatRegister temp) { if (Assembler::HasSSE41()) { vmovd(src, dest.payloadReg()); vpextrd(1, src, dest.typeReg()); } else { vmovd(src, dest.payloadReg()); - vpsrldq(Imm32(4), src, src); - vmovd(src, dest.typeReg()); + if (src != temp) + moveDouble(src, temp); + vpsrldq(Imm32(4), temp, temp); + vmovd(temp, dest.typeReg()); } } void boxNonDouble(JSValueType type, Register src, const ValueOperand& dest) { if (src != dest.payloadReg()) movl(src, dest.payloadReg()); movl(ImmType(type), dest.typeReg()); }
--- a/js/src/jit/x86/SharedIC-x86.cpp +++ b/js/src/jit/x86/SharedIC-x86.cpp @@ -155,17 +155,17 @@ ICBinaryArith_Int32::Compiler::generateS masm.j(Assembler::Signed, &toUint); // Box and return. masm.tagValue(JSVAL_TYPE_INT32, R0.payloadReg(), R0); EmitReturnFromIC(masm); masm.bind(&toUint); masm.convertUInt32ToDouble(R0.payloadReg(), ScratchDoubleReg); - masm.boxDouble(ScratchDoubleReg, R0); + masm.boxDouble(ScratchDoubleReg, R0, ScratchDoubleReg); } else { masm.j(Assembler::Signed, &revertRegister); masm.tagValue(JSVAL_TYPE_INT32, R0.payloadReg(), R0); } break; default: MOZ_CRASH("Unhandled op for BinaryArith_Int32."); }
--- a/js/src/jsgc.cpp +++ b/js/src/jsgc.cpp @@ -6774,17 +6774,17 @@ GCRuntime::gcCycle(bool nonincrementalBy { // Note that the following is allowed to re-enter GC in the finalizer. AutoNotifyGCActivity notify(*this); gcstats::AutoGCSlice agc(stats(), scanZonesBeforeGC(), invocationKind, budget, reason); AutoExposeLiveCrossZoneEdges aelcze(&foundBlackGrayEdges.ref()); - EvictAllNurseries(rt, reason); + minorGC(reason, gcstats::PhaseKind::EVICT_NURSERY_FOR_MAJOR_GC); AutoTraceSession session(rt, JS::HeapState::MajorCollecting); majorGCTriggerReason = JS::gcreason::NO_REASON; interFrameGC = true; number++; if (!isIncrementalGCInProgress()) @@ -6983,26 +6983,25 @@ GCRuntime::collect(bool nonincrementalBy MOZ_ASSERT(!isIncrementalGCInProgress()); break; } /* * Sometimes when we finish a GC we need to immediately start a new one. * This happens in the following cases: * - when we reset the current GC - * - when finalizers drop roots during shutdown (the cleanUpEverything - * case) + * - when finalizers drop roots during shutdown * - when zones that we thought were dead at the start of GC are * not collected (see the large comment in beginMarkPhase) */ repeat = false; if (!isIncrementalGCInProgress()) { if (wasReset) { repeat = true; - } else if (rootsRemoved && cleanUpEverything) { + } else if (rootsRemoved && IsShutdownGC(reason)) { /* Need to re-schedule all zones for GC. */ JS::PrepareForFullGC(rt->activeContextFromOwnThread()); repeat = true; reason = JS::gcreason::ROOTS_REMOVED; } else if (shouldRepeatForDeadZone(reason)) { repeat = true; reason = JS::gcreason::COMPARTMENT_REVIVED; }
--- a/js/src/tests/js1_8_5/extensions/collect-gray.js +++ b/js/src/tests/js1_8_5/extensions/collect-gray.js @@ -63,16 +63,19 @@ assertEq(marks[3], 'gray', 'black map, g // Test edges from map&delegate => key and map&key => value. // // In general, when a&b => x, then if both a and b are black, then x must be // black. If either is gray and the other is marked (gray or black), then x // must be gray (unless otherwise reachable from black.) If neither a nor b is // marked at all, then they will not keep x alive. +if (typeof gczeal !== 'undefined') + gczeal(0); + clearMarkObservers(); // Black map, gray delegate => gray key // wm is in a variable, so is black. wm = new WeakMap(); let key = Object.create(null);
--- a/js/src/vm/Shape-inl.h +++ b/js/src/vm/Shape-inl.h @@ -125,16 +125,73 @@ Shape::new_(JSContext* cx, Handle<StackS inline void Shape::updateBaseShapeAfterMovingGC() { BaseShape* base = base_; if (IsForwarded(base)) base_.unsafeSet(Forwarded(base)); } +static inline void +GetterSetterWriteBarrierPost(AccessorShape* shape) +{ + // If the shape contains any nursery pointers then add it to a vector on the + // zone that we fixup on minor GC. Prevent this vector growing too large + // since we don't tolerate OOM here. + + static const size_t MaxShapeVectorLength = 5000; + + MOZ_ASSERT(shape); + + if (!(shape->hasGetterObject() && IsInsideNursery(shape->getterObject())) && + !(shape->hasSetterObject() && IsInsideNursery(shape->setterObject()))) + { + return; + } + + auto& nurseryShapes = shape->zone()->nurseryShapes(); + + { + AutoEnterOOMUnsafeRegion oomUnsafe; + if (!nurseryShapes.append(shape)) + oomUnsafe.crash("GetterSetterWriteBarrierPost"); + } + + auto& storeBuffer = shape->runtimeFromActiveCooperatingThread()->gc.storeBuffer(); + if (nurseryShapes.length() == 1) { + storeBuffer.putGeneric(NurseryShapesRef(shape->zone())); + } else if (nurseryShapes.length() == MaxShapeVectorLength) { + storeBuffer.setAboutToOverflow(JS::gcreason::FULL_SHAPE_BUFFER); + } +} + +inline +AccessorShape::AccessorShape(const StackShape& other, uint32_t nfixed) + : Shape(other, nfixed), + rawGetter(other.rawGetter), + rawSetter(other.rawSetter) +{ + MOZ_ASSERT(getAllocKind() == gc::AllocKind::ACCESSOR_SHAPE); + GetterSetterWriteBarrierPost(this); +} + +inline void +Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, GCPtrShape* dictp) +{ + if (child.isAccessorShape()) + new (this) AccessorShape(child, nfixed); + else + new (this) Shape(child, nfixed); + this->flags |= IN_DICTIONARY; + + this->listp = nullptr; + if (dictp) + insertIntoDictionary(dictp); +} + template<class ObjectSubclass> /* static */ inline bool EmptyShape::ensureInitialCustomShape(JSContext* cx, Handle<ObjectSubclass*> obj) { static_assert(mozilla::IsBaseOf<JSObject, ObjectSubclass>::value, "ObjectSubclass must be a subclass of JSObject"); // If the provided object has a non-empty shape, it was given the cached
--- a/js/src/vm/Shape.cpp +++ b/js/src/vm/Shape.cpp @@ -1618,16 +1618,25 @@ Shape::fixupAfterMovingGC() { if (inDictionary()) fixupDictionaryShapeAfterMovingGC(); else fixupShapeTreeAfterMovingGC(); } void +NurseryShapesRef::trace(JSTracer* trc) +{ + auto& shapes = zone_->nurseryShapes(); + for (auto shape : shapes) + shape->fixupGetterSetterForBarrier(trc); + shapes.clearAndFree(); +} + +void Shape::fixupGetterSetterForBarrier(JSTracer* trc) { if (!hasGetterValue() && !hasSetterValue()) return; JSObject* priorGetter = asAccessorShape().getterObj; JSObject* priorSetter = asAccessorShape().setterObj; if (!priorGetter && !priorSetter)
--- a/js/src/vm/Shape.h +++ b/js/src/vm/Shape.h @@ -1551,58 +1551,28 @@ Shape::Shape(const StackShape& other, ui #endif MOZ_ASSERT_IF(!isEmptyShape(), AtomIsMarked(zone(), propid())); MOZ_ASSERT_IF(attrs & (JSPROP_GETTER | JSPROP_SETTER), attrs & JSPROP_SHARED); kids.setNull(); } -// This class is used to add a post barrier on the AccessorShape's getter/setter -// objects. It updates the pointers and the shape's entry in the parent's -// KidsHash table. -class ShapeGetterSetterRef : public gc::BufferableRef +// This class is used to update any shapes in a zone that have nursery objects +// as getters/setters. It updates the pointers and the shapes' entries in the +// parents' KidsHash tables. +class NurseryShapesRef : public gc::BufferableRef { - AccessorShape* shape_; + Zone* zone_; public: - explicit ShapeGetterSetterRef(AccessorShape* shape) : shape_(shape) {} - void trace(JSTracer* trc) override { shape_->fixupGetterSetterForBarrier(trc); } + explicit NurseryShapesRef(Zone* zone) : zone_(zone) {} + void trace(JSTracer* trc) override; }; -static inline void -GetterSetterWriteBarrierPost(AccessorShape* shape) -{ - MOZ_ASSERT(shape); - if (shape->hasGetterObject()) { - gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(shape->getterObject())->storeBuffer(); - if (sb) { - sb->putGeneric(ShapeGetterSetterRef(shape)); - return; - } - } - if (shape->hasSetterObject()) { - gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(shape->setterObject())->storeBuffer(); - if (sb) { - sb->putGeneric(ShapeGetterSetterRef(shape)); - return; - } - } -} - -inline -AccessorShape::AccessorShape(const StackShape& other, uint32_t nfixed) - : Shape(other, nfixed), - rawGetter(other.rawGetter), - rawSetter(other.rawSetter) -{ - MOZ_ASSERT(getAllocKind() == gc::AllocKind::ACCESSOR_SHAPE); - GetterSetterWriteBarrierPost(this); -} - inline Shape::Shape(UnownedBaseShape* base, uint32_t nfixed) : base_(base), propid_(JSID_EMPTY), slotInfo(SHAPE_INVALID_SLOT | (nfixed << FIXED_SLOTS_SHIFT)), attrs(JSPROP_SHARED), flags(0), parent(nullptr) @@ -1632,30 +1602,16 @@ Shape::getterObject() const inline JSObject* Shape::setterObject() const { MOZ_ASSERT(hasSetterValue()); return asAccessorShape().setterObj; } -inline void -Shape::initDictionaryShape(const StackShape& child, uint32_t nfixed, GCPtrShape* dictp) -{ - if (child.isAccessorShape()) - new (this) AccessorShape(child, nfixed); - else - new (this) Shape(child, nfixed); - this->flags |= IN_DICTIONARY; - - this->listp = nullptr; - if (dictp) - insertIntoDictionary(dictp); -} - inline Shape* Shape::searchLinear(jsid id) { for (Shape* shape = this; shape; ) { if (shape->propidRef() == id) return shape; shape = shape->parent; }
--- a/layout/reftests/image-rect/reftest.list +++ b/layout/reftests/image-rect/reftest.list @@ -3,14 +3,14 @@ == background-common-usage-pixel.html background-common-usage-ref.html == background-draw-nothing-empty-rect.html background-draw-nothing-ref.html == background-draw-nothing-invalid-syntax.html background-draw-nothing-ref.html == background-draw-nothing-malformed-images.html background-draw-nothing-ref.html == background-monster-rect.html background-monster-rect-ref.html == background-over-size-rect.html background-over-size-rect-ref.html == background-test-parser.html background-test-parser-ref.html fuzzy-if(Android,113,124) == background-with-other-properties.html background-with-other-properties-ref.html -fuzzy-if(Android,16,22) == background-zoom-1.html background-zoom-1-ref.html # Bug 1128229 +fuzzy-if(Android,16,22) fuzzy-if(webrender,16,44) == background-zoom-1.html background-zoom-1-ref.html # Bug 1128229 == background-zoom-2.html background-zoom-2-ref.html == background-zoom-3.html background-zoom-3-ref.html == background-zoom-4.html background-zoom-4-ref.html == dom-api-computed-style.html dom-api-ref.html == dom-api.html dom-api-ref.html
--- a/layout/reftests/invalidation/reftest.list +++ b/layout/reftests/invalidation/reftest.list @@ -45,20 +45,20 @@ pref(layout.animated-image-layers.enable # All the tests marked with random-if(webrender) are specific to "layers" and therefore not really valid with WebRender enabled. # We are marking them random-if so that we ensure they don't crash, but allow any non-crash result. fails-if(webrender) != scroll-inactive-layers.html about:blank fails-if(webrender) != scroll-inactive-layers-2.html about:blank fails-if(webrender) != inactive-layertree-visible-region-1.html about:blank fails-if(webrender) != inactive-layertree-visible-region-2.html about:blank -fails-if(webrender) != transform-floating-point-invalidation.html about:blank -fails-if(webrender) != transform-floating-point-invalidation.html?reverse about:blank -fails-if(webrender) != nudge-to-integer-invalidation.html about:blank -fails-if(webrender) != nudge-to-integer-invalidation.html?reverse about:blank +!= transform-floating-point-invalidation.html about:blank +!= transform-floating-point-invalidation.html?reverse about:blank +!= nudge-to-integer-invalidation.html about:blank +!= nudge-to-integer-invalidation.html?reverse about:blank fails-if(webrender) skip-if(styloVsGecko) != clipped-animated-transform-1.html about:blank # Bug 1352628 (styloVsGecko) random-if(webrender) != paintedlayer-recycling-1.html about:blank random-if(webrender) != paintedlayer-recycling-2.html about:blank pref(layers.single-tile.enabled,false) random-if(webrender) != paintedlayer-recycling-3.html about:blank random-if(webrender) != paintedlayer-recycling-4.html about:blank random-if(webrender) != paintedlayer-recycling-5.html about:blank random-if(webrender) != paintedlayer-recycling-6.html about:blank random-if(webrender) != paintedlayer-recycling-7.html about:blank
--- a/toolkit/components/telemetry/Histograms.json +++ b/toolkit/components/telemetry/Histograms.json @@ -12914,17 +12914,17 @@ "keyed": true, "n_values": 6, "description": "Number of revoke actions on permissions in the control center, keyed by permission id. Values represent the permission type that was revoked. (0=unknown, 1=permanently allowed, 2=permanently blocked, 3=temporarily allowed, 4=temporarily blocked)" }, "JS_AOT_USAGE": { "record_in_processes": ["main", "content"], "alert_emails": ["luke@mozilla.com", "bbouvier@mozilla.com"], "bug_numbers": [1288778], - "expires_in_version": "56", + "expires_in_version": "60", "kind": "enumerated", "n_values": 4, "description": "Counts the number of asm.js vs WebAssembly modules instanciations, at the time modules are getting instanciated." }, "TIME_TO_FIRST_CLICK_MS": { "record_in_processes": ["main", "content"], "alert_emails": ["hkirschner@mozilla.com"], "bug_numbers": [1307675, 1332511],
--- a/toolkit/components/telemetry/TelemetryEnvironment.jsm +++ b/toolkit/components/telemetry/TelemetryEnvironment.jsm @@ -1173,18 +1173,21 @@ EnvironmentCache.prototype = { // Make sure we have a settings section. this._currentEnvironment.settings = this._currentEnvironment.settings || {}; // Update the search engine entry in the current environment. this._currentEnvironment.settings.defaultSearchEngine = this._getDefaultSearchEngine(); this._currentEnvironment.settings.defaultSearchEngineData = Services.search.getDefaultEngineInfo(); // Record the cohort identifier used for search defaults A/B testing. - if (Services.prefs.prefHasUserValue(PREF_SEARCH_COHORT)) - this._currentEnvironment.settings.searchCohort = Services.prefs.getCharPref(PREF_SEARCH_COHORT); + if (Services.prefs.prefHasUserValue(PREF_SEARCH_COHORT)) { + const searchCohort = Services.prefs.getCharPref(PREF_SEARCH_COHORT); + this._currentEnvironment.settings.searchCohort = searchCohort; + TelemetryEnvironment.setExperimentActive("searchCohort", searchCohort); + } }, /** * Update the default search engine value and trigger the environment change. */ _onSearchEngineChange() { this._log.trace("_onSearchEngineChange");
--- a/toolkit/components/telemetry/tests/unit/test_TelemetryEnvironment.js +++ b/toolkit/components/telemetry/tests/unit/test_TelemetryEnvironment.js @@ -1603,20 +1603,37 @@ add_task(async function test_defaultSear data = TelemetryEnvironment.currentEnvironment; checkEnvironmentData(data); Assert.equal(data.settings.defaultSearchEngine, EXPECTED_SEARCH_ENGINE); // Check that by default we are not sending a cohort identifier... Assert.equal(data.settings.searchCohort, undefined); // ... but that if a cohort identifier is set, we send it. + deferred = PromiseUtils.defer(); + TelemetryEnvironment.registerChangeListener("testSearchEngine_pref", deferred.resolve); Services.prefs.setCharPref("browser.search.cohort", "testcohort"); Services.obs.notifyObservers(null, "browser-search-service", "init-complete"); + await deferred.promise; + TelemetryEnvironment.unregisterChangeListener("testSearchEngine_pref"); data = TelemetryEnvironment.currentEnvironment; Assert.equal(data.settings.searchCohort, "testcohort"); + Assert.equal(data.experiments.searchCohort.branch, "testcohort"); + + // Check that when changing the cohort identifier... + deferred = PromiseUtils.defer(); + TelemetryEnvironment.registerChangeListener("testSearchEngine_pref", deferred.resolve); + Services.prefs.setCharPref("browser.search.cohort", "testcohort2"); + Services.obs.notifyObservers(null, "browser-search-service", "init-complete"); + await deferred.promise; + TelemetryEnvironment.unregisterChangeListener("testSearchEngine_pref"); + data = TelemetryEnvironment.currentEnvironment; + // ... the setting and experiment are updated. + Assert.equal(data.settings.searchCohort, "testcohort2"); + Assert.equal(data.experiments.searchCohort.branch, "testcohort2"); }); add_task({ skip_if: () => AppConstants.MOZ_APP_NAME == "thunderbird" }, async function test_delayed_defaultBrowser() { // Skip this test on Thunderbird since it is not a browser, so it cannot // be the default browser. // Make sure we don't have anything already cached for this test.