Bug 875661 - Part 3: Add asserts to thread unsafe getters. (r=billm)
authorShu-yu Guo <shu@rfrn.org>
Thu, 20 Jun 2013 16:40:53 -0700
changeset 135911 191bed3002c99a623b4a2e85eaaa0ee72631e977
parent 135910 40bbd1174f4660876277ee6143e23249f14d723a
child 135912 66bbf82b9ec237f10d71a116d7dfcc104ba8bdd2
push id29872
push usershu@rfrn.org
push dateThu, 20 Jun 2013 23:41:13 +0000
treeherdermozilla-inbound@191bed3002c9 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbillm
bugs875661
milestone24.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 875661 - Part 3: Add asserts to thread unsafe getters. (r=billm)
js/src/gc/Heap.h
js/src/ion/ParallelFunctions.cpp
js/src/jscntxt.h
js/src/jscntxtinlines.h
js/src/vm/ForkJoin.cpp
js/src/vm/ForkJoin.h
js/src/vm/ObjectImpl-inl.h
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -23,16 +23,19 @@
 struct JSCompartment;
 
 extern "C" {
 struct JSRuntime;
 }
 
 namespace js {
 
+// Defined in vm/ForkJoin.cpp
+extern bool InSequentialOrExclusiveParallelSection();
+
 class FreeOp;
 
 namespace gc {
 
 struct Arena;
 struct ArenaHeader;
 struct Chunk;
 
@@ -82,16 +85,17 @@ static const unsigned FINALIZE_OBJECT_LI
  */
 static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
 
 /*
  * A GC cell is the base class for all GC things.
  */
 struct Cell
 {
+  public:
     inline ArenaHeader *arenaHeader() const;
     inline AllocKind tenuredGetAllocKind() const;
     MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
     MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
 
     inline JSRuntime *runtime() const;
     inline Zone *tenuredZone() const;
@@ -948,16 +952,17 @@ Cell::arenaHeader() const
     uintptr_t addr = address();
     addr &= ~ArenaMask;
     return reinterpret_cast<ArenaHeader *>(addr);
 }
 
 inline JSRuntime *
 Cell::runtime() const
 {
+    JS_ASSERT(InSequentialOrExclusiveParallelSection());
     return chunk()->info.runtime;
 }
 
 AllocKind
 Cell::tenuredGetAllocKind() const
 {
     return arenaHeader()->getAllocKind();
 }
@@ -985,16 +990,17 @@ Cell::unmark(uint32_t color) const
     JS_ASSERT(color != BLACK);
     AssertValidColor(this, color);
     chunk()->bitmap.unmark(this, color);
 }
 
 Zone *
 Cell::tenuredZone() const
 {
+    JS_ASSERT(InSequentialOrExclusiveParallelSection());
     JS_ASSERT(isTenured());
     return arenaHeader()->zone;
 }
 
 #ifdef DEBUG
 bool
 Cell::isAligned() const
 {
--- a/js/src/ion/ParallelFunctions.cpp
+++ b/js/src/ion/ParallelFunctions.cpp
@@ -40,17 +40,17 @@ ion::ParNewGCThing(gc::AllocKind allocKi
 }
 
 // Check that the object was created by the current thread
 // (and hence is writable).
 bool
 ion::ParWriteGuard(ForkJoinSlice *slice, JSObject *object)
 {
     JS_ASSERT(ForkJoinSlice::Current() == slice);
-    return !IsInsideNursery(object->runtime(), object) &&
+    return !IsInsideNursery(slice->runtime(), object) &&
            slice->allocator()->arenas.containsArena(slice->runtime(), object->arenaHeader());
 }
 
 #ifdef DEBUG
 static void
 printTrace(const char *prefix, struct IonLIRTraceData *cached)
 {
     fprintf(stderr, "%s / Block %3u / LIR %3u / Mode %u / LIR %s\n",
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1557,17 +1557,22 @@ struct ThreadSafeContext : js::ContextFr
      */
   protected:
     Allocator *allocator_;
 
   public:
     static size_t offsetOfAllocator() { return offsetof(ThreadSafeContext, allocator_); }
 
     inline Allocator *const allocator();
-    inline AllowGC allowGC();
+
+    /* GC support. */
+    inline AllowGC allowGC() const;
+
+    template <typename T>
+    inline bool isInsideCurrentZone(T thing) const;
 
     void *onOutOfMemory(void *p, size_t nbytes) {
         return runtime_->onOutOfMemory(p, nbytes, isJSContext() ? asJSContext() : NULL);
     }
     inline void updateMallocCounter(size_t nbytes) {
         /* Note: this is racy. */
         runtime_->updateMallocCounter(zone_, nbytes);
     }
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -570,16 +570,23 @@ JSContext::leaveCompartment(JSCompartmen
 inline void
 JSContext::setCompartment(JSCompartment *comp)
 {
     compartment_ = comp;
     zone_ = comp ? comp->zone() : NULL;
     allocator_ = zone_ ? &zone_->allocator : NULL;
 }
 
+template <typename T>
+inline bool
+js::ThreadSafeContext::isInsideCurrentZone(T thing) const
+{
+    return thing->isInsideZone(zone_);
+}
+
 #ifdef JSGC_GENERATIONAL
 inline bool
 js::ThreadSafeContext::hasNursery() const
 {
     return isJSContext();
 }
 
 inline js::Nursery &
@@ -593,14 +600,23 @@ js::ThreadSafeContext::nursery()
 inline js::Allocator *const
 js::ThreadSafeContext::allocator()
 {
     JS_ASSERT_IF(isJSContext(), &asJSContext()->zone()->allocator == allocator_);
     return allocator_;
 }
 
 inline js::AllowGC
-js::ThreadSafeContext::allowGC()
+js::ThreadSafeContext::allowGC() const
 {
-    return isJSContext() ? CanGC : NoGC;
+    switch (contextKind_) {
+      case Context_JS:
+        return CanGC;
+      case Context_ForkJoin:
+        return NoGC;
+      default:
+        /* Silence warnings. */
+        JS_NOT_REACHED("Bad context kind");
+        return NoGC;
+    }
 }
 
 #endif /* jscntxtinlines_h */
--- a/js/src/vm/ForkJoin.cpp
+++ b/js/src/vm/ForkJoin.cpp
@@ -116,16 +116,22 @@ ParallelBailoutRecord::setCause(Parallel
 void
 ParallelBailoutRecord::addTrace(JSScript *script,
                                 jsbytecode *pc)
 {
     JS_NOT_REACHED("Not THREADSAFE build");
 }
 
 bool
+js::InSequentialOrExclusiveParallelSection()
+{
+    return true;
+}
+
+bool
 js::ParallelTestsShouldPass(JSContext *cx)
 {
     return false;
 }
 
 #endif // !JS_THREADSAFE || !JS_ION
 
 ///////////////////////////////////////////////////////////////////////////
@@ -1646,17 +1652,18 @@ ForkJoinShared::requestZoneGC(JS::Zone *
 ForkJoinSlice::ForkJoinSlice(PerThreadData *perThreadData,
                              uint32_t sliceId, uint32_t numSlices,
                              Allocator *allocator, ForkJoinShared *shared,
                              ParallelBailoutRecord *bailoutRecord)
   : ThreadSafeContext(shared->runtime(), perThreadData, Context_ForkJoin),
     sliceId(sliceId),
     numSlices(numSlices),
     bailoutRecord(bailoutRecord),
-    shared(shared)
+    shared(shared),
+    acquiredContext_(false)
 {
     /*
      * Unsafely set the zone. This is used to track malloc counters and to
      * trigger GCs and is otherwise not thread-safe to access.
      */
     zone_ = shared->zone();
     allocator_ = allocator;
 }
@@ -1671,26 +1678,37 @@ JSRuntime *
 ForkJoinSlice::runtime()
 {
     return shared->runtime();
 }
 
 JSContext *
 ForkJoinSlice::acquireContext()
 {
-    return shared->acquireContext();
+    JSContext *cx = shared->acquireContext();
+    JS_ASSERT(!acquiredContext_);
+    acquiredContext_ = true;
+    return cx;
 }
 
 void
 ForkJoinSlice::releaseContext()
 {
+    JS_ASSERT(acquiredContext_);
+    acquiredContext_ = false;
     return shared->releaseContext();
 }
 
 bool
+ForkJoinSlice::hasAcquiredContext() const
+{
+    return acquiredContext_;
+}
+
+bool
 ForkJoinSlice::check()
 {
     if (runtime()->interrupt)
         return shared->check(*this);
     else
         return true;
 }
 
@@ -2122,16 +2140,22 @@ parallel::SpewBailoutIR(uint32_t bblockI
                         JSScript *script, jsbytecode *pc)
 {
     spewer.spewBailoutIR(bblockId, lirId, lir, mir, script, pc);
 }
 
 #endif // DEBUG
 
 bool
+js::InSequentialOrExclusiveParallelSection()
+{
+    return !InParallelSection() || ForkJoinSlice::Current()->hasAcquiredContext();
+}
+
+bool
 js::ParallelTestsShouldPass(JSContext *cx)
 {
     return ion::IsEnabled(cx) &&
            ion::IsBaselineEnabled(cx) &&
            !ion::js_IonOptions.eagerCompilation &&
            ion::js_IonOptions.baselineUsesBeforeCompile != 0 &&
            cx->runtime()->gcZeal() == 0;
 }
--- a/js/src/vm/ForkJoin.h
+++ b/js/src/vm/ForkJoin.h
@@ -341,16 +341,17 @@ struct ForkJoinSlice : ThreadSafeContext
     bool check();
 
     // Be wary, the runtime is shared between all threads!
     JSRuntime *runtime();
 
     // Acquire and release the JSContext from the runtime.
     JSContext *acquireContext();
     void releaseContext();
+    bool hasAcquiredContext() const;
 
     // Check the current state of parallel execution.
     static inline ForkJoinSlice *Current();
 
     // Initializes the thread-local state.
     static bool InitializeTLS();
 
   private:
@@ -359,16 +360,18 @@ struct ForkJoinSlice : ThreadSafeContext
 
 #if defined(JS_THREADSAFE) && defined(JS_ION)
     // Initialized by InitializeTLS()
     static unsigned ThreadPrivateIndex;
     static bool TLSInitialized;
 #endif
 
     ForkJoinShared *const shared;
+
+    bool acquiredContext_;
 };
 
 // Locks a JSContext for its scope. Be very careful, because locking a
 // JSContext does *not* allow you to safely mutate the data in the
 // JSContext unless you can guarantee that any of the other threads
 // that want to access that data will also acquire the lock, which is
 // generally not the case. For example, the lock is used in the IC
 // code to allow us to atomically patch up the dispatch table, but we
@@ -408,16 +411,18 @@ InParallelSection()
 #ifdef JS_THREADSAFE
     ForkJoinSlice *current = ForkJoinSlice::Current();
     return current != NULL;
 #else
     return false;
 #endif
 }
 
+bool InSequentialOrExclusiveParallelSection();
+
 bool ParallelTestsShouldPass(JSContext *cx);
 
 ///////////////////////////////////////////////////////////////////////////
 // Debug Spew
 
 namespace parallel {
 
 enum ExecutionStatus {
--- a/js/src/vm/ObjectImpl-inl.h
+++ b/js/src/vm/ObjectImpl-inl.h
@@ -350,16 +350,17 @@ inline size_t
 js::ObjectImpl::tenuredSizeOfThis() const
 {
     return js::gc::Arena::thingSize(tenuredGetAllocKind());
 }
 
 JS_ALWAYS_INLINE JS::Zone *
 js::ObjectImpl::zone() const
 {
+    JS_ASSERT(InSequentialOrExclusiveParallelSection());
     return shape_->zone();
 }
 
 JS_ALWAYS_INLINE JS::Zone *
 ZoneOfValue(const JS::Value &value)
 {
     JS_ASSERT(value.isMarkable());
     if (value.isObject())