Backed out changeset 19f92c04608c for breaking try pushes a=bustage
authorDustin J. Mitchell <dustin@mozilla.com>
Mon, 31 Jul 2017 16:04:01 +0000
changeset 420671 1be0c1da06076f85c69cd8a9d244e0164ec544d9
parent 420670 26516ba270816a6cc90f5c42a9b66701369a551f
child 420672 f632eede0f19b8d81ee2bb0de48c2cc996d5906f
push id7566
push usermtabara@mozilla.com
push dateWed, 02 Aug 2017 08:25:16 +0000
treeherdermozilla-beta@86913f512c3c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbustage
milestone56.0a1
backs out19f92c04608cec275dab73e8acad5141de8a5c44
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out changeset 19f92c04608c for breaking try pushes a=bustage MozReview-Commit-ID: FowbO5T9aUl
taskcluster/ci/source-test/doc.yml
tools/docs/mach_commands.py
--- a/taskcluster/ci/source-test/doc.yml
+++ b/taskcluster/ci/source-test/doc.yml
@@ -1,16 +1,15 @@
-doc-generate:
+sphinx:
     description: Generate the Sphinx documentation
     platform: lint/opt
     treeherder:
         symbol: tc(Doc)
         kind: test
         tier: 1
-    run-on-projects: [try]
     worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
     worker:
         docker-image: {in-tree: "lint"}
         max-run-time: 1800
         artifacts:
             - type: file
               name: public/docs.tar.gz
               path: /home/worker/checkouts/gecko/docs.tar.gz
@@ -22,33 +21,8 @@ doc-generate:
             rm -rf docs-out/html/Mozilla_Source_Tree_Docs/_venv &&
             mv docs-out/html/Mozilla_Source_Tree_Docs docs &&
             tar -czf docs.tar.gz docs
     when:
         files-changed:
             - '**/*.py'
             - '**/*.rst'
             - 'tools/docs/**'
-
-doc-upload:
-    description: Generate and upload the Sphinx documentation
-    platform: lint/opt
-    treeherder:
-        symbol: tc(DocUp)
-        kind: test
-        tier: 3
-    run-on-projects: [mozilla-central]
-    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
-    worker:
-        docker-image: {in-tree: "lint"}
-        max-run-time: 1800
-    run:
-        using: run-task
-        command: >
-            cd /home/worker/checkouts/gecko &&
-            ./mach upload-doc --bucket gecko-docs.mozilla.org --region us-west-2
-    scopes:
-        - secrets:get:project/releng/gecko/build/level-3/gecko-docs-upload
-    when:
-        files-changed:
-            - '**/*.py'
-            - '**/*.rst'
-            - 'tools/docs/**'
--- a/tools/docs/mach_commands.py
+++ b/tools/docs/mach_commands.py
@@ -105,76 +105,13 @@ class Documentation(MachCommandBase):
 
     def _find_doc_dir(self, path):
         search_dirs = ('doc', 'docs')
         for d in search_dirs:
             p = os.path.join(path, d)
             if os.path.isfile(os.path.join(p, 'conf.py')):
                 return p
 
-    @Command('doc-upload', category='devenv',
-        description='Generate and upload documentation from the tree.')
-    @CommandArgument('--bucket', required=True,
-        help='Target S3 bucket.')
-    @CommandArgument('--region', required=True,
-        help='Region containing target S3 bucket.')
-    @CommandArgument('what', nargs='*', metavar='DIRECTORY [, DIRECTORY]',
-        help='Path(s) to documentation to build and upload.')
-    def upload_docs(self, bucket, region, what=None):
-        self._activate_virtualenv()
-        self.virtualenv_manager.install_pip_package('boto3==1.4.4')
-
-        outdir = os.path.join(self.topobjdir, 'docs')
-        self.build_docs(what=what, outdir=outdir, format='html')
-
-        self.s3_upload(os.path.join(outdir, 'html', 'Mozilla_Source_Tree_Docs'), bucket, region)
-
-    def s3_upload(self, root, bucket, region):
-        """Upload the contents of outdir recursively to S3"""
-        import boto3
-        import mimetypes
-        import requests
-
-        # Get the credentials from the TC secrets service.  Note that these are
-        # only available to level-3 pushes.
-        if 'TASK_ID' in os.environ:
-            print("Using AWS credentials from the secrets service")
-            session = requests.Session()
-            secrets_url = 'http://taskcluster/secrets/repo:hg.mozilla.org/mozilla-central/gecko-docs-upload'
-            res = session.get(secrets_url)
-            res.raise_for_status()
-            secret = res.json()
-            session = boto3.session.Session(
-                aws_access_key_id=secret['AWS_ACCESS_KEY_ID'],
-                aws_secret_access_key=secret['AWS_SECRET_ACCESS_KEY'],
-                region_name=region)
-        else:
-            print("Trying to use your AWS credentials..")
-            session = boto3.session.Session(region_name=region)
-        s3 = session.client('s3')
-
-        try:
-            old_cwd = os.getcwd()
-            os.chdir(root)
-
-            for dir, dirs, filenames in os.walk('.'):
-                if dir == '.':
-                    # ignore a few things things in the root directory
-                    bad = [d for d in dirs if d.startswith('.') or d in ('_venv', '_staging')]
-                    for b in bad:
-                        dirs.remove(b)
-                for filename in filenames:
-                    pathname = os.path.join(dir, filename)[2:]  # strip '.''
-                    content_type, content_encoding = mimetypes.guess_type(pathname)
-                    extra_args = {}
-                    if content_type:
-                        extra_args['ContentType'] = content_type
-                    if content_encoding:
-                        extra_args['ContentEncoding'] = content_encoding
-                    print('uploading', pathname)
-                    s3.upload_file(pathname, bucket, pathname, ExtraArgs=extra_args)
-        finally:
-            os.chdir(old_cwd)
 
 def die(msg, exit_code=1):
     msg = '%s: %s' % (sys.argv[0], msg)
     print(msg, file=sys.stderr)
     return exit_code