taskcluster/docker/desktop1604-test/tc-vcs-config.yml
author CuriousLearner <sanyam.khurana01@gmail.com>
Thu, 20 Oct 2016 18:25:34 +0530
changeset 326973 f374e4333d3a17f72d89063670115d71a9643a5f
parent 326508 testing/docker/desktop1604-test/tc-vcs-config.yml@0f3603e365f029bc9ea5f926d32f6f0f8aa4d998
child 341997 e0c38f8b6ebcc23320ede1928c09eda7526b71da
permissions -rw-r--r--
Bug 1302763 - Move docker images out of testing/docker into taskcluster/docker; r=dustin r=CuriousLearner MozReview-Commit-ID: 7v1uCDB5qoN

# Default configuration used by the tc-vs tools these can be overridden by
# passing the config you wish to use over the command line...
git: git
hg: hg

repoCache:
  # Repo url to clone when running repo init..
  repoUrl: https://gerrit.googlesource.com/git-repo.git
  # Version of repo to utilize...
  repoRevision: master
  # The root where all downloaded cache files are stored on the local machine...
  cacheDir: '{{env.HOME}}/.tc-vcs-repo/'
  # Name/prefixed used as part of the base url.
  cacheName: sources/{{name}}.tar.gz
  # Command used to upload the tarball
  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
  # Large http get requests are often slower using nodes built in http layer so
  # we utilize a subprocess which is responsible for fetching...
  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
  # Used to create clone tarball
  compress: tar -czf {{dest}} {{source}}
  # All cache urls use tar + gz this is the command used to extract those files
  # downloaded by the "get" command.
  extract: tar -x -z -C {{dest}} -f {{source}}

cloneCache:
  # The root where all downloaded cache files are stored on the local machine...
  cacheDir: '{{env.HOME}}/.tc-vcs/'
  # Command used to upload the tarball
  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
  # Large http get requests are often slower using nodes built in http layer so
  # we utilize a subprocess which is responsible for fetching...
  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
  # Used to create clone tarball
  compress: tar -czf {{dest}} {{source}}
  # All cache urls use tar + gz this is the command used to extract those files
  # downloaded by the "get" command.
  extract: tar -x -z --strip-components 1 -C {{dest}} -f {{source}}
  # Name/prefixed used as part of the base url.
  cacheName: clones/{{name}}.tar.gz