☠☠ backed out by b0ea7f4820e0 ☠ ☠ | |
author | Ted Mielczarek <ted.mielczarek@gmail.com> |
Fri, 03 Sep 2010 13:19:41 -0400 | |
changeset 243395 | b15dac9231ee8559a268657dbef4a15895b9b9d8 |
parent 243394 | 5b1402a3d3ca0df8fe25edd7d0bdcd6b99aa3b9d |
child 243396 | c6ea21c091e997452df40133ee33839e75bf5ee0 |
push id | 28738 |
push user | cbook@mozilla.com |
push date | Tue, 12 May 2015 14:11:31 +0000 |
treeherder | mozilla-central@bedce1b405a3 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | gps |
bugs | 528092 |
milestone | 40.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/toolkit/crashreporter/tools/symbolstore.py +++ b/toolkit/crashreporter/tools/symbolstore.py @@ -9,17 +9,18 @@ # then places the resulting symbol file in the proper directory # structure in the symbol store path. Accepts multiple files # on the command line, so can be called as part of a pipe using # find <dir> | xargs symbolstore.pl <dump_syms> <storepath> # But really, you might just want to pass it <dir>. # # Parameters accepted: # -c : Copy debug info files to the same directory structure -# as sym files +# as sym files. On Windows, this will also copy +# binaries into the symbol store. # -a "<archs>" : Run dump_syms -a <arch> for each space separated # cpu architecture in <archs> (only on OS X) # -s <srcdir> : Use <srcdir> as the top source directory to # generate relative filenames. import errno import sys import platform @@ -530,17 +531,17 @@ class Dumper: def FixFilenameCase(self, file): return file # This is a no-op except on Win32 def SourceServerIndexing(self, debug_file, guid, sourceFileStream, vcs_root): return "" # subclasses override this if they want to support this - def CopyDebug(self, file, debug_file, guid): + def CopyDebug(self, file, debug_file, guid, code_file, code_id): pass def Finish(self, stop_pool=True): """Wait for the expected number of jobs to be submitted, and then wait for the pool to finish processing them. By default, will close and clear the pool, but for testcases that need multiple runs, pass stop_pool = False.""" with Dumper.jobs_condition: @@ -604,16 +605,17 @@ class Dumper: def ProcessFilesWork(self, files, arch_num, arch, vcs_root, after, after_arg): self.output_pid(sys.stderr, "Worker processing files: %s" % (files,)) # our result is a status, a cleanup function, an argument to that function, and the tuple of files we were called on result = { 'status' : False, 'after' : after, 'after_arg' : after_arg, 'files' : files } sourceFileStream = '' + code_id, code_file = None, None for file in files: # files is a tuple of files, containing fallbacks in case the first file doesn't process successfully try: proc = subprocess.Popen([self.dump_syms] + arch.split() + [file], stdout=subprocess.PIPE) module_line = proc.stdout.next() if module_line.startswith("MODULE"): # MODULE os cpu guid debug_file @@ -648,32 +650,41 @@ class Dumper: if vcs_root is None: if rootname: vcs_root = rootname # gather up files with hg for indexing if filename.startswith("hg"): (ver, checkout, source_file, revision) = filename.split(":", 3) sourceFileStream += sourcepath + "*" + source_file + '*' + revision + "\r\n" f.write("FILE %s %s\n" % (index, filename)) + elif line.startswith("INFO CODE_ID "): + # INFO CODE_ID code_id code_file + # This gives some info we can use to + # store binaries in the symbol store. + bits = line.rstrip().split(None, 3) + if len(bits) == 4: + code_id, code_file = bits[2:] + f.write(line) else: # pass through all other lines unchanged f.write(line) # we want to return true only if at least one line is not a MODULE or FILE line result['status'] = True f.close() proc.wait() # we output relative paths so callers can get a list of what # was generated self.output(sys.stdout, rel_path) if self.srcsrv and vcs_root: # add source server indexing to the pdb file self.SourceServerIndexing(file, guid, sourceFileStream, vcs_root) # only copy debug the first time if we have multiple architectures if self.copy_debug and arch_num == 0: - self.CopyDebug(file, debug_file, guid) + self.CopyDebug(file, debug_file, guid, + code_file, code_id) except StopIteration: pass except Exception as e: self.output(sys.stderr, "Unexpected error: %s" % (str(e),)) raise if result['status']: # we only need 1 file to work break @@ -715,36 +726,63 @@ class Dumper_Win32(Dumper): if f.lower() == lc_filename: result = os.path.join(path, f) break # Cache the corrected version to avoid future filesystem hits. self.fixedFilenameCaseCache[file] = result return result - def CopyDebug(self, file, debug_file, guid): + def CopyDebug(self, file, debug_file, guid, code_file, code_id): + def compress(path): + compressed_file = path[:-1] + '_' + # ignore makecab's output + success = subprocess.call(["makecab.exe", "/D", + "CompressionType=LZX", "/D", + "CompressionMemory=21", + path, compressed_file], + stdout=open("NUL:","w"), + stderr=subprocess.STDOUT) + if success == 0 and os.path.exists(compressed_file): + os.unlink(path) + return True + return False + rel_path = os.path.join(debug_file, guid, debug_file).replace("\\", "/") full_path = os.path.normpath(os.path.join(self.symbol_path, rel_path)) shutil.copyfile(file, full_path) - # try compressing it - compressed_file = os.path.splitext(full_path)[0] + ".pd_" - # ignore makecab's output - success = subprocess.call(["makecab.exe", "/D", "CompressionType=LZX", "/D", - "CompressionMemory=21", - full_path, compressed_file], - stdout=open("NUL:","w"), stderr=subprocess.STDOUT) - if success == 0 and os.path.exists(compressed_file): - os.unlink(full_path) - self.output(sys.stdout, os.path.splitext(rel_path)[0] + ".pd_") + if compress(full_path): + self.output(sys.stdout, rel_path[:-1] + '_') else: self.output(sys.stdout, rel_path) - + + # Copy the binary file as well + if code_file and code_id: + full_code_path = os.path.join(os.path.dirname(file), + code_file) + if os.path.exists(full_code_path): + rel_path = os.path.join(code_file, + code_id, + code_file).replace("\\", "/") + full_path = os.path.normpath(os.path.join(self.symbol_path, + rel_path)) + try: + os.makedirs(os.path.dirname(full_path)) + except OSError as e: + if e.errno != errno.EEXIST: + raise + shutil.copyfile(full_code_path, full_path) + if compress(full_path): + self.output(sys.stdout, rel_path[:-1] + '_') + else: + self.output(sys.stdout, rel_path) + def SourceServerIndexing(self, debug_file, guid, sourceFileStream, vcs_root): # Creates a .pdb.stream file in the mozilla\objdir to be used for source indexing debug_file = os.path.abspath(debug_file) streamFilename = debug_file + ".stream" stream_output_path = os.path.abspath(streamFilename) # Call SourceIndex to create the .stream file result = SourceIndex(sourceFileStream, stream_output_path, vcs_root) if self.copy_debug: @@ -765,17 +803,17 @@ class Dumper_Linux(Dumper): file(1) reports as being ELF files. It expects to find the file command in PATH.""" if not Dumper.ShouldProcess(self, file): return False if file.endswith(".so") or os.access(file, os.X_OK): return self.RunFileCommand(file).startswith("ELF") return False - def CopyDebug(self, file, debug_file, guid): + def CopyDebug(self, file, debug_file, guid, code_file, code_id): # We want to strip out the debug info, and add a # .gnu_debuglink section to the object, so the debugger can # actually load our debug info later. file_dbg = file + ".dbg" if subprocess.call([self.objcopy, '--only-keep-debug', file, file_dbg]) == 0 and \ subprocess.call([self.objcopy, '--add-gnu-debuglink=%s' % file_dbg, file]) == 0: rel_path = os.path.join(debug_file, guid, @@ -876,17 +914,17 @@ class Dumper_Mac(Dumper): result['status'] = False result['files'] = (file, ) return result result['status'] = True result['files'] = (dsymbundle, file) return result - def CopyDebug(self, file, debug_file, guid): + def CopyDebug(self, file, debug_file, guid, code_file, code_id): """ProcessFiles has already produced a dSYM bundle, so we should just copy that to the destination directory. However, we'll package it into a .tar.bz2 because the debug symbols are pretty huge, and also because it's a bundle, so it's a directory. |file| here is the dSYM bundle, and |debug_file| is the original filename.""" rel_path = os.path.join(debug_file, guid, os.path.basename(file) + ".tar.bz2")