Bug 1263073 - partner repacks should not block other uploads r=nthomas a=release DONTBUILD
authorRail Aliiev <rail@mozilla.com>
Mon, 11 Apr 2016 21:37:59 -0400
changeset 325787 cf019107c664178d735351d3be7a921b225a54e5
parent 325786 def03a4832426d67b714fb6ccfbde9bf11dc55d7
child 325788 80ee357092b5dfc37cec4127c30075ce2ccfc9a3
push id1128
push userjlund@mozilla.com
push dateWed, 01 Jun 2016 01:31:59 +0000
treeherdermozilla-release@fe0d30de989d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersnthomas, release
bugs1263073
milestone47.0a2
Bug 1263073 - partner repacks should not block other uploads r=nthomas a=release DONTBUILD MozReview-Commit-ID: 69DSyJrB6hI
testing/mozharness/scripts/release/push-candidate-to-releases.py
--- a/testing/mozharness/scripts/release/push-candidate-to-releases.py
+++ b/testing/mozharness/scripts/release/push-candidate-to-releases.py
@@ -145,28 +145,38 @@ class ReleasePusher(BaseScript, Virtuale
                             aws_secret_access_key=self.aws_secret_key)
         self.info("Getting bucket {}".format(self.config["bucket_name"]))
         bucket = conn.get_bucket(self.config["bucket_name"])
 
         # ensure the destination is empty
         self.info("Checking destination {} is empty".format(self._get_releases_prefix()))
         keys = [k for k in bucket.list(prefix=self._get_releases_prefix())]
         if keys:
-            self.fatal("Destination already exists with %s keys, aborting" %
-                       len(keys))
+            self.warning("Destination already exists with %s keys" % len(keys))
 
         def worker(item):
             source, destination = item
 
-            self.info("Copying {} to {}".format(source, destination))
-            return retry(bucket.copy_key,
-                         args=(destination,
-                               self.config["bucket_name"],
-                               source),
-                         sleeptime=5, max_sleeptime=60,
+            def copy_key():
+                dest_key = bucket.get_key(destination)
+                source_key = bucket.get_key(source)
+                if not dest_key:
+                    self.info("Copying {} to {}".format(source, destination))
+                    bucket.copy_key(destination, self.config["bucket_name"],
+                                    source)
+                elif source_key.etag == dest_key.etag:
+                    self.warning(
+                        "{} already exists with the same content ({}), skipping copy".format(
+                            destination, dest_key.etag))
+                else:
+                    self.fatal(
+                        "{} already exists with the different content (src: {}, dest: {}), aborting".format(
+                            destination, source_key.etag, dest_key.etag))
+
+            return retry(copy_key, sleeptime=5, max_sleeptime=60,
                          retry_exceptions=(S3CopyError, S3ResponseError))
 
         def find_release_files():
             candidates_prefix = self._get_candidates_prefix()
             release_prefix = self._get_releases_prefix()
             self.info("Getting key names from candidates")
             for key in bucket.list(prefix=candidates_prefix):
                 keyname = key.name