Bug 1382729: Upload documentation to S3 automatically; r=gps
☠☠ backed out by 1be0c1da0607 ☠ ☠
authorDustin J. Mitchell <dustin@mozilla.com>
Fri, 28 Jul 2017 18:32:04 +0000
changeset 420614 19f92c04608cec275dab73e8acad5141de8a5c44
parent 420613 e1ec4e403b9d6326fe38f51be3422790df8deb22
child 420615 57ff68e8e3fc476bbe5bc73addcca13bb5ce0097
push id7566
push usermtabara@mozilla.com
push dateWed, 02 Aug 2017 08:25:16 +0000
treeherdermozilla-beta@86913f512c3c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgps
bugs1382729
milestone56.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1382729: Upload documentation to S3 automatically; r=gps This uses credentials stored in the Taskcluster secret service. The task should only run on mozilla-central to avoid confusion between branches. MozReview-Commit-ID: 31XfTg0sCht
taskcluster/ci/source-test/doc.yml
tools/docs/mach_commands.py
--- a/taskcluster/ci/source-test/doc.yml
+++ b/taskcluster/ci/source-test/doc.yml
@@ -1,15 +1,16 @@
-sphinx:
+doc-generate:
     description: Generate the Sphinx documentation
     platform: lint/opt
     treeherder:
         symbol: tc(Doc)
         kind: test
         tier: 1
+    run-on-projects: [try]
     worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
     worker:
         docker-image: {in-tree: "lint"}
         max-run-time: 1800
         artifacts:
             - type: file
               name: public/docs.tar.gz
               path: /home/worker/checkouts/gecko/docs.tar.gz
@@ -21,8 +22,33 @@ sphinx:
             rm -rf docs-out/html/Mozilla_Source_Tree_Docs/_venv &&
             mv docs-out/html/Mozilla_Source_Tree_Docs docs &&
             tar -czf docs.tar.gz docs
     when:
         files-changed:
             - '**/*.py'
             - '**/*.rst'
             - 'tools/docs/**'
+
+doc-upload:
+    description: Generate and upload the Sphinx documentation
+    platform: lint/opt
+    treeherder:
+        symbol: tc(DocUp)
+        kind: test
+        tier: 3
+    run-on-projects: [mozilla-central]
+    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
+    worker:
+        docker-image: {in-tree: "lint"}
+        max-run-time: 1800
+    run:
+        using: run-task
+        command: >
+            cd /home/worker/checkouts/gecko &&
+            ./mach upload-doc --bucket gecko-docs.mozilla.org --region us-west-2
+    scopes:
+        - secrets:get:project/releng/gecko/build/level-3/gecko-docs-upload
+    when:
+        files-changed:
+            - '**/*.py'
+            - '**/*.rst'
+            - 'tools/docs/**'
--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -105,13 +105,76 @@ class Documentation(MachCommandBase):
 
     def _find_doc_dir(self, path):
         search_dirs = ('doc', 'docs')
         for d in search_dirs:
             p = os.path.join(path, d)
             if os.path.isfile(os.path.join(p, 'conf.py')):
                 return p
 
+    @Command('doc-upload', category='devenv',
+        description='Generate and upload documentation from the tree.')
+    @CommandArgument('--bucket', required=True,
+        help='Target S3 bucket.')
+    @CommandArgument('--region', required=True,
+        help='Region containing target S3 bucket.')
+    @CommandArgument('what', nargs='*', metavar='DIRECTORY [, DIRECTORY]',
+        help='Path(s) to documentation to build and upload.')
+    def upload_docs(self, bucket, region, what=None):
+        self._activate_virtualenv()
+        self.virtualenv_manager.install_pip_package('boto3==1.4.4')
+
+        outdir = os.path.join(self.topobjdir, 'docs')
+        self.build_docs(what=what, outdir=outdir, format='html')
+
+        self.s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'), bucket, region)
+
+    def s3_upload(self, root, bucket, region):
+        """Upload the contents of outdir recursively to S3"""
+        import boto3
+        import mimetypes
+        import requests
+
+        # Get the credentials from the TC secrets service.  Note that these are
+        # only available to level-3 pushes.
+        if 'TASK_ID' in os.environ:
+            print("Using AWS credentials from the secrets service")
+            session = requests.Session()
+            secrets_url = 'http://taskcluster/secrets/repo:hg.mozilla.org/mozilla-central/gecko-docs-upload'
+            res = session.get(secrets_url)
+            res.raise_for_status()
+            secret = res.json()
+            session = boto3.session.Session(
+                aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
+                aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],
+                region_name=region)
+        else:
+            print("Trying to use your AWS credentials..")
+            session = boto3.session.Session(region_name=region)
+        s3 = session.client('s3')
+
+        try:
+            old_cwd = os.getcwd()
+            os.chdir(root)
+
+            for dir, dirs, filenames in os.walk('.'):
+                if dir == '.':
+                    # ignore a few things things in the root directory
+                    bad = [d for d in dirs if d.startswith('.') or d in ('_venv', '_staging')]
+                    for b in bad:
+                        dirs.remove(b)
+                for filename in filenames:
+                    pathname = os.path.join(dir, filename)[2:]  # strip '.''
+                    content_type, content_encoding = mimetypes.guess_type(pathname)
+                    extra_args = {}
+                    if content_type:
+                        extra_args['ContentType'] = content_type
+                    if content_encoding:
+                        extra_args['ContentEncoding'] = content_encoding
+                    print('uploading', pathname)
+                    s3.upload_file(pathname, bucket, pathname, ExtraArgs=extra_args)
+        finally:
+            os.chdir(old_cwd)
 
 def die(msg, exit_code=1):
     msg = '%s: %s' % (sys.argv[0], msg)
     print(msg, file=sys.stderr)
     return exit_code