[INFER] Fix merge botch, infinite loop.
authorBrian Hackett <bhackett1024@gmail.com>
Tue, 02 Aug 2011 13:04:21 -0700
changeset 76093 37894495d77ddee74fdc66bd3fc2e30f55ab6741
parent 76092 afb8ed83d8345ef802a30cd72b2d4dfc08e96c78
child 76094 f7f014c31f452a80bb7d5e457e374558d3099fb5
push id3
push userfelipc@gmail.com
push dateFri, 30 Sep 2011 20:09:13 +0000
milestone8.0a1
[INFER] Fix merge botch, infinite loop.
js/src/jscompartment.h
js/src/jsfriendapi.h
js/src/jsinfer.cpp
js/src/xpconnect/src/xpcjsruntime.cpp
js/src/xpconnect/src/xpcpublic.h
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -409,30 +409,16 @@ struct JS_FRIEND_API(JSCompartment) {
      */
     JSArenaPool                  pool;
     bool                         activeAnalysis;
     bool                         activeInference;
 
     /* Type information about the scripts and objects in this compartment. */
     js::types::TypeCompartment   types;
 
-    /* Data for tracking analysis/inference memory usage. */
-    struct TypeInferenceMemoryStats
-    {
-        int64 scriptMain;
-        int64 scriptSets;
-        int64 objectMain;
-        int64 objectSets;
-        int64 poolMain;
-    };
-
-    void getTypeInferenceMemoryStats(JSContext *cx, TypeInferenceMemoryStats *stats);
-    void getTypeInferenceObjectStats(js::types::TypeObject *object,
-                                     JSCompartment::TypeInferenceMemoryStats *stats);
-
 #ifdef JS_TRACER
   private:
     /*
      * Trace-tree JIT recorder/interpreter state.  It's created lazily because
      * many compartments don't end up needing it.
      */
     js::TraceMonitor             *traceMonitor_;
 #endif
--- a/js/src/jsfriendapi.h
+++ b/js/src/jsfriendapi.h
@@ -61,11 +61,29 @@ extern JS_FRIEND_API(JSBool)
 JS_SplicePrototype(JSContext *cx, JSObject *obj, JSObject *proto);
 
 extern JS_FRIEND_API(JSObject *)
 JS_NewObjectWithUniqueType(JSContext *cx, JSClass *clasp, JSObject *proto, JSObject *parent);
 
 extern JS_FRIEND_API(uint32)
 JS_ObjectCountDynamicSlots(JSObject *obj);
 
+/* Data for tracking analysis/inference memory usage. */
+typedef struct TypeInferenceMemoryStats
+{
+    int64 scriptMain;
+    int64 scriptSets;
+    int64 objectMain;
+    int64 objectSets;
+    int64 poolMain;
+} TypeInferenceMemoryStats;
+
+extern JS_FRIEND_API(void)
+JS_GetTypeInferenceMemoryStats(JSContext *cx, JSCompartment *compartment,
+                               TypeInferenceMemoryStats *stats);
+
+extern JS_FRIEND_API(void)
+JS_GetTypeInferenceObjectStats(/*TypeObject*/ void *object,
+                               TypeInferenceMemoryStats *stats);
+
 JS_END_EXTERN_C
 
 #endif /* jsfriendapi_h___ */
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -38,16 +38,17 @@
  * ***** END LICENSE BLOCK ***** */
 
 #include "jsapi.h"
 #include "jsautooplen.h"
 #include "jsbit.h"
 #include "jsbool.h"
 #include "jsdate.h"
 #include "jsexn.h"
+#include "jsfriendapi.h"
 #include "jsgc.h"
 #include "jsgcmark.h"
 #include "jsinfer.h"
 #include "jsmath.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsscript.h"
 #include "jscntxt.h"
@@ -4587,16 +4588,17 @@ MarkIteratorUnknownSlow(JSContext *cx)
 
     TypeResult *result = script->types->dynamicList;
     while (result) {
         if (result->offset == uint32(-1)) {
             /* Already know about custom iterators used in this script. */
             JS_ASSERT(result->type.isUnknown());
             return;
         }
+        result = result->next;
     }
 
     InferSpew(ISpewOps, "externalType: customIterator #%u", script->id());
 
     result = cx->new_<TypeResult>(uint32(-1), Type::UnknownType());
     if (!result) {
         cx->compartment->types.setPendingNukeTypes(cx);
         return;
@@ -5529,17 +5531,17 @@ TypeSet::dynamicSize()
 {
     uint32 count = baseObjectCount();
     if (count >= 2)
         return HashSetCapacity(count) * sizeof(TypeObject *);
     return 0;
 }
 
 static void
-GetScriptMemoryStats(JSScript *script, JSCompartment::TypeInferenceMemoryStats *stats)
+GetScriptMemoryStats(JSScript *script, TypeInferenceMemoryStats *stats)
 {
     if (!script->types)
         return;
 
     if (!script->compartment->types.inferenceEnabled) {
         stats->scriptMain += sizeof(TypeScript);
         return;
     }
@@ -5553,30 +5555,34 @@ GetScriptMemoryStats(JSScript *script, J
 
     TypeResult *result = script->types->dynamicList;
     while (result) {
         stats->scriptMain += sizeof(TypeResult);
         result = result->next;
     }
 }
 
-void
-JSCompartment::getTypeInferenceMemoryStats(JSContext *cx, TypeInferenceMemoryStats *stats)
+JS_FRIEND_API(void)
+JS_GetTypeInferenceMemoryStats(JSContext *cx, JSCompartment *compartment,
+                               TypeInferenceMemoryStats *stats)
 {
-    for (JSCList *cursor = scripts.next; cursor != &scripts; cursor = cursor->next) {
+    for (JSCList *cursor = compartment->scripts.next;
+         cursor != &compartment->scripts;
+         cursor = cursor->next) {
         JSScript *script = reinterpret_cast<JSScript *>(cursor);
         GetScriptMemoryStats(script, stats);
     }
 
-    stats->poolMain += ArenaAllocatedSize(pool);
+    stats->poolMain += ArenaAllocatedSize(compartment->pool);
 }
 
-void
-JSCompartment::getTypeInferenceObjectStats(TypeObject *object, TypeInferenceMemoryStats *stats)
+JS_FRIEND_API(void)
+JS_GetTypeInferenceObjectStats(void *object_, TypeInferenceMemoryStats *stats)
 {
+    TypeObject *object = (TypeObject *) object;
     stats->objectMain += sizeof(TypeObject);
 
     if (object->singleton) {
         /*
          * Properties and TypeSet data for singletons are allocated in the
          * compartment's analysis pool.
          */
         return;
--- a/js/src/xpconnect/src/xpcjsruntime.cpp
+++ b/js/src/xpconnect/src/xpcjsruntime.cpp
@@ -1316,17 +1316,17 @@ CompartmentCallback(JSContext *cx, void 
     curr->mjitCode = GetCompartmentMjitCodeSize(compartment);
     curr->mjitData = GetCompartmentMjitDataSize(compartment);
 #endif
 #ifdef JS_TRACER
     curr->tjitCode = GetCompartmentTjitCodeSize(compartment);
     curr->tjitDataAllocatorsMain = GetCompartmentTjitDataAllocatorsMainSize(compartment);
     curr->tjitDataAllocatorsReserve = GetCompartmentTjitDataAllocatorsReserveSize(compartment);
 #endif
-    compartment->getTypeInferenceMemoryStats(cx, &curr->typeInferenceMemory);
+    JS_GetTypeInferenceMemoryStats(cx, compartment, &curr->typeInferenceMemory);
 }
 
 void
 ArenaCallback(JSContext *cx, void *vdata, js::gc::Arena *arena,
               size_t traceKind, size_t thingSize)
 {
     IterateData *data = static_cast<IterateData *>(vdata);
     data->currCompartmentStats->gcHeapArenaHeaders +=
@@ -1364,17 +1364,17 @@ CellCallback(JSContext *cx, void *vdata,
         curr->gcHeapShapes += thingSize;
         js::Shape *shape = static_cast<js::Shape *>(thing);
         if(shape->hasTable())
             curr->propertyTables += shape->getTable()->sizeOf();
     }
     else if(traceKind == JSTRACE_TYPE_OBJECT)
     {
         js::types::TypeObject *obj = static_cast<js::types::TypeObject *>(thing);
-        obj->compartment()->getTypeInferenceObjectStats(obj, &curr->typeInferenceMemory);
+        JS_GetTypeInferenceObjectStats(obj, &curr->typeInferenceMemory);
     }
     else
     {
         JS_ASSERT(traceKind == JSTRACE_XML);
         curr->gcHeapXml += thingSize;
     }
     // Yes, this is a subtraction:  see ArenaCallback() for details.
     curr->gcHeapArenaUnused -= thingSize;
@@ -1742,50 +1742,55 @@ ReportCompartmentStats(const Compartment
     "Memory used by the trace JIT and held in reserve for the compartment's "
     "VMAllocators in case of OOM.",
                        callback, closure);
 #endif
 
     ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
                                               "type-inference/script-main"),
                        nsIMemoryReporter::KIND_HEAP,
-                       stats->typeInferenceMemory.scriptMain,
+                       stats.typeInferenceMemory.scriptMain,
     "Memory used during type inference to store type sets of variables "
-    "and dynamically observed types.");
+    "and dynamically observed types.",
+                       callback, closure);
 
     ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
                                               "type-inference/script-typesets"),
                        nsIMemoryReporter::KIND_HEAP,
-                       stats->typeInferenceMemory.scriptSets,
+                       stats.typeInferenceMemory.scriptSets,
     "Memory used during type inference to hold the contents of type "
-    "sets associated with scripts.");
+    "sets associated with scripts.",
+                       callback, closure);
 
     ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
                                               "type-inference/object-main"),
                        nsIMemoryReporter::KIND_HEAP,
-                       stats->typeInferenceMemory.objectMain,
+                       stats.typeInferenceMemory.objectMain,
     "Memory used during type inference to store types and possible "
-    "property types of JS objects.");
+    "property types of JS objects.",
+                       callback, closure);
 
     ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
                                               "type-inference/object-typesets"),
                        nsIMemoryReporter::KIND_HEAP,
-                       stats->typeInferenceMemory.objectSets,
+                       stats.typeInferenceMemory.objectSets,
     "Memory used during type inference to hold the contents of type "
-    "sets associated with objects.");
+    "sets associated with objects.",
+                       callback, closure);
 
     /*
      * This is in a different category from the rest of type inference
      * data as this can be large but is volatile and cleared on GC.
      */
     ReportMemoryBytes0(MakeMemoryReporterPath(pathPrefix, stats.name,
                                               "type-inference-pools"),
                        nsIMemoryReporter::KIND_HEAP,
-                       stats->typeInferenceMemory.poolMain,
-    "Memory used during type inference to hold transient analysis information.");
+                       stats.typeInferenceMemory.poolMain,
+    "Memory used during type inference to hold transient analysis information.",
+                       callback, closure);
 }
 
 void
 ReportJSStackSizeForRuntime(JSRuntime *rt, const nsACString &pathPrefix,
                             nsIMemoryMultiReporterCallback *callback,
                             nsISupports *closure)
 {
     PRInt64 stackSize = 0;
--- a/js/src/xpconnect/src/xpcpublic.h
+++ b/js/src/xpconnect/src/xpcpublic.h
@@ -36,16 +36,17 @@
  * the terms of any one of the MPL, the GPL or the LGPL.
  *
  * ***** END LICENSE BLOCK ***** */
 
 #ifndef xpcpublic_h
 #define xpcpublic_h
 
 #include "jsapi.h"
+#include "jsfriendapi.h"
 #include "jsobj.h"
 #include "jsgc.h"
 #include "jspubtd.h"
 
 #include "nsISupports.h"
 #include "nsIPrincipal.h"
 #include "nsWrapperCache.h"
 #include "nsStringGlue.h"
@@ -214,17 +215,17 @@ struct CompartmentStats
     PRInt64 mjitCode;
     PRInt64 mjitData;
 #endif
 #ifdef JS_TRACER
     PRInt64 tjitCode;
     PRInt64 tjitDataAllocatorsMain;
     PRInt64 tjitDataAllocatorsReserve;
 #endif
-    JSCompartment::TypeInferenceMemoryStats typeInferenceMemory;
+    TypeInferenceMemoryStats typeInferenceMemory;
 };
 
 struct IterateData
 {
     IterateData()
     : compartmentStatsVector(), currCompartmentStats(NULL) { }
 
     js::Vector<CompartmentStats, 0, js::SystemAllocPolicy> compartmentStatsVector;