Bug 1147977 - Add additional timeouts and retries to curl downloads in tc-vcs r=jonasfj
authorjlal@mozilla.com
Thu, 26 Mar 2015 14:27:17 -0700
changeset 264758 2f37a253a534c60c820aed589a58f715ea4abd1d
parent 264757 a57ba29a0170b3d9c34b79f8eda782f0b9bae248
child 264759 495e9f1af5c614ab33f5087f93798155fcc52e14
push id4718
push userraliiev@mozilla.com
push dateMon, 11 May 2015 18:39:53 +0000
treeherdermozilla-beta@c20c4ef55f08 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonasfj
bugs1147977
milestone39.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1147977 - Add additional timeouts and retries to curl downloads in tc-vcs r=jonasfj
testing/docker/tester/Dockerfile
testing/docker/tester/VERSION
testing/docker/tester/tc-vcs-config.yml
--- a/testing/docker/tester/Dockerfile
+++ b/testing/docker/tester/Dockerfile
@@ -5,16 +5,17 @@ MAINTAINER    Jonas Finnemann Jensen <jo
 COPY           b2g-desktop-config.py         /home/worker/b2g-desktop-config.py
 COPY           dot-config                    /home/worker/.config
 COPY           dot-pulse                     /home/worker/.pulse
 COPY           bin                           /home/worker/bin
 COPY           mozharness_configs            /home/worker/mozharness_configs
 COPY           buildprops.json               /home/worker/buildprops.json
 ADD            https://s3-us-west-2.amazonaws.com/test-caching/packages/linux64-stackwalk /usr/local/bin/linux64-minidump_stackwalk
 ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
+COPY           tc-vcs-config.yml /etc/taskcluster-vcs.yml
 
 
 # Run test setup script
 RUN chmod u+x /home/worker/bin/buildbot_step
 RUN chmod u+x /usr/local/bin/linux64-minidump_stackwalk
 RUN apt-get install -y python-pip && pip install virtualenv;
 RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
 RUN chown -R worker:worker /home/worker/* /home/worker/.*
--- a/testing/docker/tester/VERSION
+++ b/testing/docker/tester/VERSION
@@ -1,1 +1,1 @@
-0.2.9
+0.2.10
new file mode 100644
--- /dev/null
+++ b/testing/docker/tester/tc-vcs-config.yml
@@ -0,0 +1,40 @@
+# Default configuration used by the tc-vs tools these can be overridden by
+# passing the config you wish to use over the command line...
+git: git
+hg: hg
+
+repoCache:
+  # Repo url to clone when running repo init..
+  repoUrl: https://git.mozilla.org/external/google/gerrit/git-repo.git
+  # Version of repo to utilize...
+  repoRevision: master
+  # The root where all downloaded cache files are stored on the local machine...
+  cacheDir: '{{env.HOME}}/.tc-vcs-repo/'
+  # Name/prefixed used as part of the base url.
+  cacheName: sources/{{name}}.tar.gz
+  # Command used to upload the tarball
+  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
+  # Large http get requests are often slower using nodes built in http layer so
+  # we utilize a subprocess which is responsible for fetching...
+  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
+  # Used to create clone tarball
+  compress: tar -czf {{dest}} {{source}}
+  # All cache urls use tar + gz this is the command used to extract those files
+  # downloaded by the "get" command.
+  extract: tar -x -z -C {{dest}} -f {{source}}
+
+cloneCache:
+  # The root where all downloaded cache files are stored on the local machine...
+  cacheDir: '{{env.HOME}}/.tc-vcs/'
+  # Command used to upload the tarball
+  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
+  # Large http get requests are often slower using nodes built in http layer so
+  # we utilize a subprocess which is responsible for fetching...
+  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
+  # Used to create clone tarball
+  compress: tar -czf {{dest}} {{source}}
+  # All cache urls use tar + gz this is the command used to extract those files
+  # downloaded by the "get" command.
+  extract: tar -x -z --strip-components 1 -C {{dest}} -f {{source}}
+  # Name/prefixed used as part of the base url.
+  cacheName: clones/{{name}}.tar.gz