Bug 1309912 - Add explicit timeout for urllib2.urlopen() instead of relying on global timeout r=dustin
authorArmen Zambrano Gasparnian <armenzg@mozilla.com>
Thu, 13 Oct 2016 11:46:51 -0400
changeset 317954 7aa1eb0ca75e95797a4de504e539230f81b99543
parent 317953 6a000381265d312481d4213e1ff66c937391312f
child 317955 61c3fb27d8e97db9b00fd2fe7e0bed5d12e8ad95
push id33172
push userarmenzg@mozilla.com
push dateFri, 14 Oct 2016 11:50:55 +0000
treeherderautoland@7aa1eb0ca75e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdustin
bugs1309912
milestone52.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1309912 - Add explicit timeout for urllib2.urlopen() instead of relying on global timeout r=dustin For Mozharness' download and extract action we're blocking when trying to read the contents from a url. This mostly happens on Mac machines. urllib2.urlopen()'s documentation mentions about a global timeout, however, it does not seem to kick in or perhaps this won't do what we expect it to. MozReview-Commit-ID: L9IDnDSjgzO
testing/mozharness/mozharness/base/script.py
--- a/testing/mozharness/mozharness/base/script.py
+++ b/testing/mozharness/mozharness/base/script.py
@@ -372,27 +372,29 @@ class ScriptMixin(PlatformMixin):
             expected_file_size = os.stat(url.replace('file://', '')).st_size
 
             # In case we're referrencing a file without file://
             if parsed_url.scheme == '':
                 url = 'file://%s' % os.path.abspath(url)
                 parsed_url = urlparse.urlparse(url)
 
         request = urllib2.Request(url)
-        # Exceptions to be retried:
-        # Bug 1300663 - HTTPError: HTTP Error 404: Not Found
-        # Bug 1300413 - HTTPError: HTTP Error 500: Internal Server Error
-        # Bug 1300943 - HTTPError: HTTP Error 503: Service Unavailable
-        # Bug 1300953 - URLError: <urlopen error [Errno -2] Name or service not known>
-        # Bug 1301594 - URLError: <urlopen error [Errno 10054] An existing connection was ...
-        # Bug 1301597 - URLError: <urlopen error [Errno 8] _ssl.c:504: EOF occurred in ...
-        # Bug 1301855 - URLError: <urlopen error [Errno 60] Operation timed out>
-        # Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
-        # Bug 1301807 - BadStatusLine: ''
-        response = urllib2.urlopen(request)
+        # When calling fetch_url_into_memory() you should retry when we raise one of these exceptions:
+        # * Bug 1300663 - HTTPError: HTTP Error 404: Not Found
+        # * Bug 1300413 - HTTPError: HTTP Error 500: Internal Server Error
+        # * Bug 1300943 - HTTPError: HTTP Error 503: Service Unavailable
+        # * Bug 1300953 - URLError: <urlopen error [Errno -2] Name or service not known>
+        # * Bug 1301594 - URLError: <urlopen error [Errno 10054] An existing connection was ...
+        # * Bug 1301597 - URLError: <urlopen error [Errno 8] _ssl.c:504: EOF occurred in ...
+        # * Bug 1301855 - URLError: <urlopen error [Errno 60] Operation timed out>
+        # * Bug 1302237 - URLError: <urlopen error [Errno 104] Connection reset by peer>
+        # * Bug 1301807 - BadStatusLine: ''
+        #
+        # Bug 1309912 - Adding timeout in hopes to solve blocking on response.read() (bug 1300413)
+        response = urllib2.urlopen(request, timeout=30)
 
         if parsed_url.scheme in ('http', 'https'):
             expected_file_size = int(response.headers.get('Content-Length'))
 
         self.info('Http code: {}'.format(response.getcode()))
         for k in sorted(response.headers.keys()):
             if k.lower().startswith('x-amz-') or k in ('Content-Encoding', 'Content-Type', 'via'):
                 self.info('{}: {}'.format(k, response.headers.get(k)))