--- a/bin/try_mailer.py
+++ b/bin/try_mailer.py
@@ -216,17 +216,17 @@ if __name__ == '__main__':
# Generate the message
if tm_options.all_emails:
msgdict = makeTryMessage(build, log_url)
elif tm_options.failure:
if result != SUCCESS and result != RETRY:
msgdict = makeTryMessage(build, log_url)
# Send it!
- if msgdict != None:
+ if msgdict is not None:
if options.to_author:
options.to.append(msgdict['author'])
msg = formatMessage(msgdict, options.from_, options.to)
print msg
s = SMTP()
s.connect(options.relayhost)
s.sendmail(options.from_, options.to, msg.as_string())
--- a/changes/hgpoller.py
+++ b/changes/hgpoller.py
@@ -305,17 +305,17 @@ class BaseHgPoller(BasePoller):
# Keep the comments and revision of the last change of this push.
# We're going through the changes in reverse order, so we
# should use the comments and revision of the first change
# in this loop
if c['node'] is None:
c['desc'] = change['desc']
c['node'] = change['node']
- title = change['desc'].split('\n', 1)[0];
+ title = change['desc'].split('\n', 1)[0]
if len(title) > 100:
trim_pos = title.rfind(' ', 0, 100)
if trim_pos == -1:
trim_pos = 100
title = title[:trim_pos]
# The commit titles are stored in a Change property, which
# are limited to 1024 chars in the database (see
# change_properties in buildbot/db/scheme/tables.sql). In
--- a/l10n.py
+++ b/l10n.py
@@ -88,17 +88,17 @@ class L10nMixin(object):
a URL that contains the list of locales
"""
self.branch = branch
self.baseTag = baseTag
if localesURL:
self.localesURL = localesURL
else:
# Make sure that branch is not none when using this path
- assert branch != None
+ assert branch is not None
# revision will be expanded later
self.localesURL = "%s%s/raw-file/%%(revision)s/%s" % \
(repo, branch, localesFile)
# if the user wants to use something different than all locales
# check ParseLocalesFile function to note that we now need a dictionary
# with the locale as the key and a list of platform as the value for
# each key to build a specific locale e.g. locales={'fr':['osx']}
--- a/misc.py
+++ b/misc.py
@@ -69,17 +69,17 @@ def get_l10n_repositories(file, l10nRepo
"""
if not l10nRepoPath.endswith('/'):
l10nRepoPath = l10nRepoPath + '/'
repositories = {}
for localeLine in open(file).readlines():
locale, revision = localeLine.rstrip().split()
if revision == 'FIXME':
raise Exception('Found FIXME in %s for locale "%s"' %
- (file, locale))
+ (file, locale))
locale = urljoin(l10nRepoPath, locale)
repositories[locale] = {
'revision': revision,
'relbranchOverride': relbranch,
'bumpFiles': []
}
return repositories
@@ -93,17 +93,17 @@ def get_locales_from_json(jsonFile, l10n
platformLocales = collections.defaultdict(dict)
file = open(jsonFile)
localesJson = json.load(file)
for locale in localesJson.keys():
revision = localesJson[locale]['revision']
if revision == 'FIXME':
raise Exception('Found FIXME in %s for locale "%s"' %
- (jsonFile, locale))
+ (jsonFile, locale))
localeUrl = urljoin(l10nRepoPath, locale)
l10nRepositories[localeUrl] = {
'revision': revision,
'relbranchOverride': relbranch,
'bumpFiles': []
}
for platform in localesJson[locale]['platforms']:
platformLocales[platform][locale] = localesJson[
@@ -223,17 +223,17 @@ def changeContainsProperties(change, pro
def generateTestBuilderNames(name_prefix, suites_name, suites):
test_builders = []
if isinstance(suites, dict) and "totalChunks" in suites:
totalChunks = suites['totalChunks']
for i in range(totalChunks):
test_builders.append('%s %s-%i' %
- (name_prefix, suites_name, i + 1))
+ (name_prefix, suites_name, i + 1))
else:
test_builders.append('%s %s' % (name_prefix, suites_name))
return test_builders
def _getLastTimeOnBuilder(builder, slavename):
# New builds are at the end of the buildCache, so
@@ -999,17 +999,17 @@ def generateDesktopMozharnessBuilders(na
'product': pf['stage_product'],
}
dep_signing_servers = secrets.get(pf.get('dep_signing_servers'))
nightly_signing_servers = secrets.get(pf.get('nightly_signing_servers'))
# grab the l10n schedulers that nightlies will trigger (if any)
triggered_nightly_schedulers = []
if (config['enable_l10n'] and platform in config['l10n_platforms'] and
- '%s nightly' % pf['base_name'] in l10nNightlyBuilders):
+ '%s nightly' % pf['base_name'] in l10nNightlyBuilders):
triggered_nightly_schedulers = [
l10nNightlyBuilders['%s nightly' % pf['base_name']]['l10n_builder']
]
# if we do a generic dep build
if pf.get('enable_dep', True):
factory = makeMHFactory(config, pf, mh_cfg=mh_cfg,
extra_args=base_extra_args,
@@ -1096,16 +1096,17 @@ def generateDesktopMozharnessBuilders(na
'properties': mh_build_properties.copy(),
}
desktop_mh_builders.append(pgo_builder)
builds_created['done_pgo_build'] = True
# finally let's return which builders we did so we know what's left to do!
return desktop_mh_builders
+
def generateBranchObjects(config, name, secrets=None):
"""name is the name of branch which is usually the last part of the path
to the repository. For example, 'mozilla-central', 'mozilla-aurora', or
'mozilla-1.9.1'.
config is a dictionary containing all of the necessary configuration
information for a branch. The required keys depends greatly on what's
enabled for a branch (unittests, xulrunner, l10n, etc). The best way
to figure out what you need to pass is by looking at existing configs
@@ -1219,17 +1220,17 @@ def generateBranchObjects(config, name,
# Fill the l10n dep dict
if config['enable_l10n'] and platform in config['l10n_platforms'] and \
config['enable_l10n_onchange']:
l10nBuilders[base_name] = {}
l10nBuilders[base_name]['tree'] = config['l10n_tree']
l10nBuilders[base_name]['l10n_builder'] = \
'%s %s %s l10n dep' % (pf['product_name'].capitalize(),
- name, platform)
+ name, platform)
l10nBuilders[base_name]['platform'] = platform
# Check if branch wants nightly builds
if config['enable_nightly']:
if 'enable_nightly' in pf:
do_nightly = pf['enable_nightly']
else:
do_nightly = True
else:
@@ -1253,17 +1254,17 @@ def generateBranchObjects(config, name,
builder = '%s nightly' % base_name
nightlyBuilders.append(builder)
# Fill the l10nNightly dict
if config['enable_l10n'] and platform in config['l10n_platforms']:
l10nNightlyBuilders[builder] = {}
l10nNightlyBuilders[builder]['tree'] = config['l10n_tree']
l10nNightlyBuilders[builder]['l10n_builder'] = \
'%s %s %s l10n nightly' % (pf['product_name'].capitalize(),
- name, platform)
+ name, platform)
l10nNightlyBuilders[builder]['platform'] = platform
if platform in ('linux64',):
if config.get('enable_blocklist_update', False) or \
config.get('enable_hsts_update', False) or \
config.get('enable_hpkp_update', False):
weeklyBuilders.append('%s periodic file update' % base_name)
if pf.get('enable_xulrunner', config['enable_xulrunner']):
xulrunnerNightlyBuilders.append('%s xulrunner nightly' % base_name)
@@ -1455,17 +1456,17 @@ def generateBranchObjects(config, name,
platform = l10nNightlyBuilders[builder]['platform']
branchObjects['schedulers'].append(TriggerableL10n(
name=l10n_builder,
platform=platform,
builderNames=[l10n_builder],
branch=config['repo_path'],
baseTag='default',
localesURL=config.get(
- 'localesURL', None)
+ 'localesURL', None)
))
if weeklyBuilders:
weekly_scheduler = Nightly(
name='weekly-%s' % scheduler_name_prefix,
branch=config['repo_path'],
dayOfWeek=5, # Saturday
hour=[3], minute=[02],
@@ -1489,19 +1490,19 @@ def generateBranchObjects(config, name,
# etc builders
# For now, let's just record when we create desktop builds like
# generic, pgo, and nightly via mozharness and fall back to buildbot
# for other builder factory logic (outside the mozharness_config
# condition)
# NOTE: when we no longer need to fall back for remaining builders,
# we will not need these booleans
builder_tracker = {
- 'done_generic_build': False, # this is the basic pf
- 'done_pgo_build': False, # generic pf + pgo
- 'done_nightly_build': False, # generic pf + nightly
+ 'done_generic_build': False, # this is the basic pf
+ 'done_pgo_build': False, # generic pf + pgo
+ 'done_nightly_build': False, # generic pf + nightly
'done_nonunified_build': False, # generic pf + nonunified
}
if 'mozharness_desktop_build' in pf:
# platform is a desktop pf and is able to do use mozharness
if config.get('desktop_mozharness_builds_enabled'):
# branch has desktop mozharness builds enabled
branchObjects['builders'].extend(
@@ -1794,18 +1795,18 @@ def generateBranchObjects(config, name,
# We have some platforms which need to be built every X hours with PGO.
# These builds are as close to regular dep builds as we can make them,
# other than PGO
# builder_tracker just checks to see if we used
# mozharness to create this builder already. Once we port all
# builders to mozharness we won't need pgo_builder at
# all
if (config['pgo_strategy'] in ('periodic', 'try') and
- platform in config['pgo_platforms'] and not
- builder_tracker['done_pgo_build']):
+ platform in config['pgo_platforms'] and not
+ builder_tracker['done_pgo_build']):
pgo_kwargs = factory_kwargs.copy()
pgo_kwargs["doPostLinkerSize"] = pf.get('enable_post_linker_size', False)
pgo_kwargs['profiledBuild'] = True
pgo_kwargs['stagePlatform'] += '-pgo'
pgo_kwargs['unittestBranch'] = pgoUnittestBranch
pgo_factory = factory_class(**pgo_kwargs)
pgo_builder = {
'name': '%s pgo-build' % pf['base_name'],
@@ -2354,17 +2355,17 @@ def generateBranchObjects(config, name,
branchObjects['builders'].append(mozilla2_l10n_dep_builder)
if config['enable_valgrind'] and \
platform in config['valgrind_platforms']:
valgrind_env = pf['env'].copy()
valgrind_env['REVISION'] = WithProperties("%(revision)s")
mozilla2_valgrind_factory = ScriptFactory(
scriptRepo="%s%s" % (config['hgurl'],
- config['build_tools_repo_path']),
+ config['build_tools_repo_path']),
scriptName='scripts/valgrind/valgrind.sh',
use_mock=pf.get('use_mock'),
mock_target=pf.get('mock_target'),
mock_packages=pf.get('mock_packages'),
mock_copyin_files=pf.get('mock_copyin_files'),
env=valgrind_env,
reboot_command=['python',
'scripts/buildfarm/maintenance/count_and_reboot.py',
@@ -2377,20 +2378,20 @@ def generateBranchObjects(config, name,
'slavenames': pf['slaves'],
'builddir': '%s-%s-valgrind' % (name, platform),
'slavebuilddir': normalizeName('%s-%s-valgrind' % (name, platform), pf['stage_product']),
'factory': mozilla2_valgrind_factory,
'category': name,
'env': valgrind_env,
'nextSlave': _nextAWSSlave_wait_sort,
'properties': {'branch': name,
- 'platform': platform,
- 'stage_platform': stage_platform,
- 'product': pf['stage_product'],
- 'slavebuilddir': normalizeName('%s-%s-valgrind' % (name, platform), pf['stage_product'])},
+ 'platform': platform,
+ 'stage_platform': stage_platform,
+ 'product': pf['stage_product'],
+ 'slavebuilddir': normalizeName('%s-%s-valgrind' % (name, platform), pf['stage_product'])},
}
branchObjects['builders'].append(mozilla2_valgrind_builder)
if platform in ('linux64',):
if config.get('enable_blocklist_update', False) or \
config.get('enable_hsts_update', False) or \
config.get('enable_hpkp_update', False):
periodicFileUpdateBuilder = generatePeriodicFileUpdateBuilder(
@@ -2574,34 +2575,36 @@ def generateTalosBranchObjects(branch, b
'platform': slave_platform,
'stage_platform': stage_platform,
'product': stage_product,
'builddir': builddir,
'slavebuilddir': slavebuilddir,
}
def _makeGenerateMozharnessTalosBuilderArgs(suite, talos_branch, platform,
- factory_kwargs, branch_config, platform_config):
+ factory_kwargs, branch_config, platform_config):
mh_conf = platform_config['mozharness_config']
extra_args = []
if 'android' not in platform:
extra_args = ['--suite', suite,
- '--add-option',
- ','.join(['--webServer', 'localhost']),
- '--branch-name', talos_branch,
- '--system-bits', mh_conf['system_bits'],
- '--cfg', mh_conf['config_file']]
+ '--add-option',
+ ','.join(['--webServer', 'localhost']),
+ '--branch-name', talos_branch,
+ '--system-bits', mh_conf['system_bits'],
+ '--cfg', mh_conf['config_file']]
if factory_kwargs['fetchSymbols']:
extra_args += ['--download-symbols', 'ondemand']
if factory_kwargs["talos_from_source_code"]:
extra_args.append('--use-talos-json')
scriptpath = "scripts/talos_script.py"
else:
- extra_args.extend (['--talos-suite', suite, '--cfg', 'android/android_panda_talos_releng.py', '--branch-name', talos_branch])
+ extra_args.extend(['--talos-suite', suite,
+ '--cfg', 'android/android_panda_talos_releng.py',
+ '--branch-name', talos_branch])
scriptpath = "scripts/android_panda_talos.py"
# add branch config specification if blobber is enabled
if branch_config.get('blob_upload'):
extra_args.extend(['--blob-upload-branch', talos_branch])
args = {
'platform': platform,
'mozharness_repo': branch_config['mozharness_repo'],
'script_path': scriptpath,
@@ -2651,17 +2654,17 @@ def generateTalosBranchObjects(branch, b
'platform': slave_platform,
'stage_platform': stage_platform + '-pgo',
'product': stage_product,
'builddir': builddir,
'slavebuilddir': slavebuilddir,
}
if branch_config.get('mozharness_talos') and not platform_config.get('is_mobile'):
args = _makeGenerateMozharnessTalosBuilderArgs(suite, talosBranch, platform,
- factory_kwargs, branch_config, platform_config)
+ factory_kwargs, branch_config, platform_config)
pgo_factory = generateMozharnessTalosBuilder(**args)
properties['script_repo_revision'] = branch_config['mozharness_tag']
else:
pgo_factory_kwargs = factory_kwargs.copy()
pgo_factory_kwargs['branchName'] = branchName
pgo_factory_kwargs['talosBranch'] = talosBranch
pgo_factory = factory_class(**pgo_factory_kwargs)
@@ -2699,17 +2702,17 @@ def generateTalosBranchObjects(branch, b
for test_type in testTypes:
test_builders = []
pgo_builders = []
triggeredUnittestBuilders = []
pgoUnittestBuilders = []
unittest_suites = "%s_unittest_suites" % test_type
build_dir_prefix = platform_config[slave_platform].get(
- 'build_dir_prefix', slave_platform)
+ 'build_dir_prefix', slave_platform)
if test_type == "debug":
# Debug tests always need to download symbols for
# runtime assertions
pf = branch_config['platforms'][platform]
if pf.get('download_symbols', False) or pf.get('download_symbols_ondemand', True):
# Copy the platform config so we can modify it here
# safely
branch_config['platforms'][platform] = deepcopy(branch_config['platforms'][platform])
@@ -2732,17 +2735,17 @@ def generateTalosBranchObjects(branch, b
if create_pgo_builders and test_type == 'opt':
pgo_builders.extend(generateTestBuilderNames(
'%s %s pgo test' % (platform_name, branch), suites_name, suites))
if type(suites) is dict and suites.has_key('trychooser_suites'):
for s in suites['trychooser_suites']:
builders_with_sets_mapping[s] = suites_name
scheduler_slave_platform_identifier = platform_config[slave_platform].get(
- 'scheduler_slave_platform_identifier', slave_platform)
+ 'scheduler_slave_platform_identifier', slave_platform)
triggeredUnittestBuilders.append(
(
'tests-%s-%s-%s-unittest' % (
branch, scheduler_slave_platform_identifier, test_type),
test_builders, merge_tests))
if create_pgo_builders and test_type == 'opt':
pgoUnittestBuilders.append(
(
@@ -2772,17 +2775,17 @@ def generateTalosBranchObjects(branch, b
test_builder_kwargs['mozharness_repo'] = branch_config['mozharness_repo']
test_builder_kwargs['mozharness_tag'] = branch_config['mozharness_tag']
test_builder_kwargs['mozharness'] = True
# allow mozharness_python to be overridden per test slave platform in case Python
# not installed to a consistent location.
if 'mozharness_config' in platform_config[slave_platform] and \
'mozharness_python' in platform_config[slave_platform]['mozharness_config']:
test_builder_kwargs['mozharness_python'] = \
- platform_config[slave_platform]['mozharness_config']['mozharness_python']
+ platform_config[slave_platform]['mozharness_config']['mozharness_python']
else:
test_builder_kwargs['mozharness_python'] = platform_config['mozharness_config']['mozharness_python']
if suites_name in branch_config['platforms'][platform][slave_platform].get('suite_config', {}):
test_builder_kwargs['mozharness_suite_config'] = deepcopy(branch_config['platforms'][platform][slave_platform]['suite_config'][suites_name])
else:
test_builder_kwargs[
'mozharness_suite_config'] = {}
test_builder_kwargs['mozharness_suite_config']['hg_bin'] = platform_config['mozharness_config']['hg_bin']
@@ -2993,17 +2996,17 @@ def generatePeriodicFileUpdateBuilder(co
extra_args.extend(['--blocklist'])
if config['enable_hsts_update'] is True:
extra_args.extend(['--hsts'])
if config['enable_hpkp_update'] is True:
extra_args.extend(['--hpkp'])
periodic_file_update_factory = ScriptFactory(
"%s%s" % (config['hgurl'],
- config['build_tools_repo_path']),
+ config['build_tools_repo_path']),
'scripts/periodic_file_updates/periodic_file_updates.sh',
interpreter='bash',
extra_args=extra_args,
)
periodic_file_update_builder = {
'name': '%s periodic file update' % base_name,
'slavenames': slaves,
'builddir': '%s-%s-periodicupdate' % (branch_name, platform),
@@ -3212,17 +3215,17 @@ def generateJetpackObjects(config, SLAVE
types.append('debug')
for type_ in types:
if type_ == 'debug':
ftp_url = ftp_url + "-debug"
f = ScriptFactory(
config['scripts_repo'],
'buildfarm/utils/run_jetpack.py',
extra_args=(
- "-p", platform, "-t", jetpack_tarball, "-b", branch,
+ "-p", platform, "-t", jetpack_tarball, "-b", branch,
"-f", ftp_url, "-e", config['platforms'][platform]['ext'],),
interpreter='python',
log_eval_func=rc_eval_func({1: WARNINGS, 2: FAILURE,
4: EXCEPTION, 5: RETRY}),
reboot_command=['python',
'scripts/buildfarm/maintenance/count_and_reboot.py',
'-f', './reboot_count.txt',
'-n', '0',
--- a/process/factory.py
+++ b/process/factory.py
@@ -377,17 +377,17 @@ class MockMixin(object):
target=self.mock_target,
packages=self.mock_packages,
timeout=2700,
))
class TooltoolMixin(object):
def addTooltoolStep(self, **kwargs):
- command=[
+ command = [
'sh',
WithProperties(
'%(toolsdir)s/scripts/tooltool/tooltool_wrapper.sh'),
self.tooltool_manifest_src,
self.tooltool_url_list[0],
self.tooltool_bootstrap,
]
if self.tooltool_script:
@@ -517,17 +517,17 @@ class MozillaBuildFactory(RequestSorting
))
if self.clobberURL is not None:
self.addStep(MozillaClobberer(
name='checking_clobber_times',
branch=self.clobberBranch,
clobber_url=self.clobberURL,
clobberer_path=WithProperties(
- '%(builddir)s/tools/clobberer/clobberer.py'),
+ '%(builddir)s/tools/clobberer/clobberer.py'),
clobberTime=self.clobberTime
))
if self.buildSpace > 0:
command = ['python', 'tools/buildfarm/maintenance/purge_builds.py',
'-s', str(self.buildSpace)]
for i in self.ignore_dirs:
@@ -568,17 +568,17 @@ class MozillaBuildFactory(RequestSorting
command=command,
description=['cleaning', 'old', 'builds'],
descriptionDone=['clean', 'old', 'builds'],
haltOnFailure=True,
workdir='.',
timeout=3600, # One hour, because Windows is slow
extract_fn=parse_purge_builds,
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, purge_error),
+ c, s, purge_error),
env=self.env,
))
if self.use_mock:
self.addStep(MockReset(
target=self.mock_target,
))
self.addStep(MockInit(
@@ -592,20 +592,20 @@ class MozillaBuildFactory(RequestSorting
if 'SCHEDULED REBOOT' in cmd.logs['stdio'].getText():
return True
except:
pass
return False
self.addStep(DisconnectStep(
name='maybe_rebooting',
command=[
- 'python', 'tools/buildfarm/maintenance/count_and_reboot.py',
- '-f', '../reboot_count.txt',
- '-n', str(self.buildsBeforeReboot),
- '-z'],
+ 'python', 'tools/buildfarm/maintenance/count_and_reboot.py',
+ '-f', '../reboot_count.txt',
+ '-n', str(self.buildsBeforeReboot),
+ '-z'],
description=['maybe rebooting'],
force_disconnect=do_disconnect,
warnOnFailure=False,
flunkOnFailure=False,
alwaysRun=True,
workdir='.'
))
@@ -625,18 +625,18 @@ class MozillaBuildFactory(RequestSorting
return '%s://%s/%s' % (proto, hgHost, repoPath)
def getPackageFilename(self, platform, platform_variation):
if 'android-armv6' in self.complete_platform:
packageFilename = '*arm-armv6.apk'
elif 'android-x86' in self.complete_platform:
packageFilename = '*android-i386.apk'
elif 'android' in self.complete_platform:
- packageFilename = '*arm.apk' # the arm.apk is to avoid
- # unsigned/unaligned apks
+ # the arm.apk is to avoid unsigned/unaligned apks
+ packageFilename = '*arm.apk'
elif 'maemo' in self.complete_platform:
packageFilename = '*.linux-*-arm.tar.*'
elif platform.startswith("linux64"):
packageFilename = '*.linux-x86_64*.tar.bz2'
elif platform.startswith("linux"):
packageFilename = '*.linux-i686*.tar.bz2'
elif platform.startswith("macosx"):
packageFilename = '*.dmg'
@@ -1204,17 +1204,17 @@ class MercurialBuildFactory(MozillaBuild
pkg_patterns = []
for product in ('firefox-', 'fennec', 'seamonkey', 'thunderbird'):
pkg_patterns.append('%s/dist/%s*' % (self.mozillaObjdir,
product))
self.addStep(ShellCommand(
name='rm_old_pkg',
command="rm -rf %s %s/dist/install/sea/*.exe " %
- (' '.join(pkg_patterns), self.mozillaObjdir),
+ (' '.join(pkg_patterns), self.mozillaObjdir),
env=self.env,
description=['deleting', 'old', 'package'],
descriptionDone=['delete', 'old', 'package']
))
def addGaiaSourceSteps(self):
if self.gaiaRevisionFile:
def parse_gaia_revision(rc, stdout, stderr):
@@ -1380,28 +1380,28 @@ class MercurialBuildFactory(MozillaBuild
def addBuildInfoSteps(self):
"""Helper function for getting build information into properties.
Looks for self._gotBuildInfo to make sure we only run this set of steps
once."""
if not getattr(self, '_gotBuildInfo', False):
self.addStep(SetProperty(
command=[
'python', 'build%s/config/printconfigsetting.py' % self.mozillaSrcDir,
- 'build/%s/dist/bin/application.ini' % self.mozillaObjdir,
- 'App', 'BuildID'],
+ 'build/%s/dist/bin/application.ini' % self.mozillaObjdir,
+ 'App', 'BuildID'],
property='buildid',
workdir='.',
description=['getting', 'buildid'],
descriptionDone=['got', 'buildid'],
))
self.addStep(SetProperty(
command=[
'python', 'build%s/config/printconfigsetting.py' % self.mozillaSrcDir,
- 'build/%s/dist/bin/application.ini' % self.mozillaObjdir,
- 'App', 'SourceStamp'],
+ 'build/%s/dist/bin/application.ini' % self.mozillaObjdir,
+ 'App', 'SourceStamp'],
property='sourcestamp',
workdir='.',
description=['getting', 'sourcestamp'],
descriptionDone=['got', 'sourcestamp']
))
self._gotBuildInfo = True
def addBuildAnalysisSteps(self):
@@ -1446,17 +1446,17 @@ class MercurialBuildFactory(MozillaBuild
'TinderboxPrint: num_ctors: %(num_ctors:-unknown)s'),
))
def addPostLinkerSizeSteps(self):
# Analyze the linker max vsize
def get_linker_vsize(rc, stdout, stderr):
try:
vsize = int(stdout)
- testresults = [ ('libxul_link', 'libxul_link', vsize, str(vsize)) ]
+ testresults = [('libxul_link', 'libxul_link', vsize, str(vsize))]
return dict(vsize=vsize, testresults=testresults)
except:
return {'testresults': []}
self.addStep(SetProperty(
name='get_linker_vsize',
command=['cat', '%s\\toolkit\\library\\linker-vsize' % self.mozillaObjdir],
extract_fn=get_linker_vsize,
@@ -1549,17 +1549,17 @@ class MercurialBuildFactory(MozillaBuild
del env['MOZ_SIGN_CMD']
if 'mac' in self.platform:
# Need to run this target or else the packaging targets will
# fail.
self.addStep(ShellCommand(
name='postflight_all',
command=self.makeCmd + [
- '-f', 'client.mk', 'postflight_all'],
+ '-f', 'client.mk', 'postflight_all'],
env=env,
haltOnFailure=False,
flunkOnFailure=False,
warnOnFailure=False,
))
pkg_targets = ['package', 'package-tests']
if self.enableInstaller:
pkg_targets.append('installer')
@@ -1587,17 +1587,17 @@ class MercurialBuildFactory(MozillaBuild
workdir='build',
mock=self.use_mock,
target=self.mock_target,
))
if self.l10nCheckTest:
self.addStep(MockCommand(
name='make l10n check pretty',
command=self.makeCmd + [
- 'l10n-check', 'MOZ_PKG_PRETTYNAMES=1'],
+ 'l10n-check', 'MOZ_PKG_PRETTYNAMES=1'],
workdir='build/%s' % self.objdir,
env=env,
haltOnFailure=False,
flunkOnFailure=False,
warnOnFailure=True,
mock=self.use_mock,
target=self.mock_target,
))
@@ -1749,17 +1749,17 @@ class MercurialBuildFactory(MozillaBuild
# packageFilename is undefined (bug 739959)
if self.productName != 'b2g' and self.packageFilename:
self.addFilePropertiesSteps(filename=self.packageFilename,
directory='build/%s/dist' % self.mozillaObjdir,
fileType='package',
haltOnFailure=True)
if self.createSnippet and 'android' not in self.complete_platform:
- self.addCreateUpdateSteps();
+ self.addCreateUpdateSteps()
# Call out to a subclass to do the actual uploading
self.doUpload(uploadMulti=self.multiLocale)
def addCreateSnippetsSteps(self, milestone_extra=''):
if 'android' in self.complete_platform:
cmd = [
'python',
@@ -1839,17 +1839,17 @@ class MercurialBuildFactory(MozillaBuild
'%(toolsdir)s/scripts/updates/balrog-submitter.py'),
'--build-properties', 'buildprops_balrog.json',
'--api-root', self.balrog_api_root,
'--username', self.balrog_username,
'-t', type_, '--verbose',
]
if self.balrog_credentials_file:
credentialsFile = os.path.join(os.getcwd(),
- self.balrog_credentials_file)
+ self.balrog_credentials_file)
target_file_name = os.path.basename(credentialsFile)
cmd.extend(['--credentials-file', target_file_name])
self.addStep(FileDownload(
mastersrc=credentialsFile,
slavedest=target_file_name,
workdir='.',
flunkOnFailure=False,
))
@@ -1984,17 +1984,17 @@ class TryBuildFactory(MercurialBuildFact
build.source.changes) > 0 else "",
))
def doUpload(self, postUploadBuildDir=None, uploadMulti=False):
self.addStep(SetBuildProperty(
name='set_who',
property_name='who',
value=lambda build: str(build.source.changes[0].who) if len(
- build.source.changes) > 0 else "nobody@example.com",
+ build.source.changes) > 0 else "nobody@example.com",
haltOnFailure=True
))
uploadEnv = self.env.copy()
uploadEnv.update({
'UPLOAD_HOST': self.stageServer,
'UPLOAD_USER': self.stageUsername,
'UPLOAD_TO_TEMP': '1',
@@ -2030,17 +2030,17 @@ class TryBuildFactory(MercurialBuildFact
workdir=objdir,
mock=self.use_mock,
target=self.mock_target,
extract_fn=parse_make_upload,
haltOnFailure=True,
description=["upload"],
timeout=40 * 60, # 40 minutes
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, upload_errors),
+ c, s, upload_errors),
locks=[upload_lock.access('counting')],
mock_workdir_prefix=None,
))
talosBranch = "%s-%s-talos" % (self.branchName, self.complete_platform)
sendchange_props = {
'buildid': WithProperties('%(buildid:-)s'),
'builduid': WithProperties('%(builduid:-)s'),
@@ -2132,17 +2132,17 @@ class NightlyBuildFactory(MercurialBuild
def getCompleteMarPatternMatch(self):
marPattern = getPlatformFtpDir(self.platform)
if not marPattern:
return False
marPattern += '.complete.mar'
return marPattern
def previousMarExists(self, step):
- return "previousMarFilename" in step.build.getProperties() and len(step.build.getProperty("previousMarFilename")) > 0;
+ return "previousMarFilename" in step.build.getProperties() and len(step.build.getProperty("previousMarFilename")) > 0
def addCreatePartialUpdateSteps(self, extraArgs=None):
'''This function expects that the following build properties are
already set: buildid, completeMarFilename
'''
# These tools (mar+mbsdiff) should now be built (or downloaded).
mar = '../dist/host/bin/mar'
mbsdiff = '../dist/host/bin/mbsdiff'
@@ -2165,18 +2165,17 @@ class NightlyBuildFactory(MercurialBuild
haltOnFailure=True,
))
self.addStep(MockCommand(
name='unpack_current_mar',
command=['perl',
WithProperties('%(basedir)s/' +
self.absMozillaSrcDir +
'/tools/update-packaging/unwrap_full_update.pl'),
- WithProperties(
- '../dist/update/%(completeMarFilename)s')],
+ WithProperties('../dist/update/%(completeMarFilename)s')],
env=updateEnv,
haltOnFailure=True,
workdir='%s/current' % self.absMozillaObjDir,
mock=self.use_mock,
target=self.mock_target,
))
# The mar file name will be the same from one day to the next,
# *except* when we do a version bump for a release. To cope with
@@ -2187,18 +2186,18 @@ class NightlyBuildFactory(MercurialBuild
self.addStep(SetProperty(
name='get_previous_mar_filename',
description=['get', 'previous', 'mar', 'filename'],
command=['bash', '-c',
WithProperties(
'ssh -l %s -i ~/.ssh/%s %s ' % (self.stageUsername,
self.stageSshKey,
self.stageServer) +
- 'ls -1t %s | grep %s$ | head -n 1' % (self.latestDir,
- marPattern))
+ 'ls -1t %s | grep %s$ | head -n 1' % (self.latestDir,
+ marPattern))
],
extract_fn=marFilenameToProperty(prop_name='previousMarFilename'),
flunkOnFailure=False,
haltOnFailure=False,
warnOnFailure=True
))
previousMarURL = WithProperties('http://%s' % self.stageServer +
'%s' % self.latestDir +
@@ -2491,17 +2490,17 @@ class NightlyBuildFactory(MercurialBuild
command=self.makeCmd + ['-f', 'client.mk', 'upload'],
env=uploadEnv,
workdir='build',
extract_fn=parse_make_upload,
haltOnFailure=True,
description=["upload"],
timeout=60 * 60, # 60 minutes
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, upload_errors),
+ c, s, upload_errors),
locks=[upload_lock.access('counting')],
mock=self.use_mock,
target=self.mock_target,
))
else:
objdir = WithProperties(
'%(basedir)s/' + self.baseWorkDir + '/' + self.objdir)
if self.platform.startswith('win'):
@@ -2707,17 +2706,17 @@ class ReleaseBuildFactory(MercurialBuild
workdir=self.absMozillaObjDir,
haltOnFailure=True,
))
self.addStep(RetryingMockCommand(
name='get_previous_mar',
description=['get', 'previous', 'mar'],
command=[
'wget', '-O', 'previous.mar', '--no-check-certificate',
- previousMarURL],
+ previousMarURL],
mock=self.use_mock,
target=self.mock_target,
workdir='%s/dist' % self.absMozillaObjDir,
haltOnFailure=True,
))
self.addStep(MockCommand(
name='unpack_previous_mar',
description=['unpack', 'previous', 'mar'],
@@ -2883,17 +2882,17 @@ class ReleaseBuildFactory(MercurialBuild
command=self.makeCmd + ['upload'] + upload_vars,
env=uploadEnv,
workdir=objdir,
extract_fn=parse_make_upload,
haltOnFailure=True,
description=['upload'],
timeout=60 * 60, # 60 minutes
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, upload_errors),
+ c, s, upload_errors),
target=self.mock_target,
mock=self.use_mock,
mock_workdir_prefix=None,
))
if self.productName == 'fennec' and not uploadMulti:
cmd = ['scp']
if self.stageSshKey:
@@ -2981,17 +2980,17 @@ class XulrunnerReleaseBuildFactory(Relea
self.addStep(RetryingMockProperty(
command=self.makeCmd + ['-f', 'client.mk', 'upload'],
env=uploadEnv,
workdir='build',
extract_fn=get_url,
haltOnFailure=True,
description=['upload'],
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, upload_errors),
+ c, s, upload_errors),
mock=self.use_mock,
target=self.mock_target,
))
def identToProperties(default_prop=None):
'''Create a method that is used in a SetProperty step to map the
output of make ident to build properties.
@@ -3232,38 +3231,38 @@ class BaseRepackFactory(MozillaBuildFact
name='make_configure',
env=self.env,
command=self.makeCmd + ['-f', 'client.mk', 'configure'],
workdir=self.absSrcDir,
description=['make config'],
haltOnFailure=True,
mock=self.use_mock,
target=self.mock_target,
- )))
+ )))
self.addStep(MockCommand(**self.processCommand(
name='rm_CLOBBER_files',
env=self.env,
- command=['rm','-rf', '%s/CLOBBER' % self.absMozillaObjDir,
- '%s/CLOBBER' % self.absMozillaSrcDir,],
+ command=['rm', '-rf', '%s/CLOBBER' % self.absMozillaObjDir,
+ '%s/CLOBBER' % self.absMozillaSrcDir],
workdir='.',
description=['remove CLOBBER files'],
haltOnFailure=True,
mock=self.use_mock,
target=self.mock_target,
)))
self.addStep(MockCommand(**self.processCommand(
name='compile_nsinstall',
env=self.env,
command=self.makeCmd,
workdir='%s/config' % self.absMozillaObjDir,
description=['compile nsinstall'],
haltOnFailure=True,
mock=self.use_mock,
target=self.mock_target,
- )))
+ )))
def tinderboxPrint(self, propName, propValue):
self.addStep(OutputStep(
name='tinderboxprint_%s' % propName,
data=['TinderboxPrint:',
'%s:' % propName,
propValue]
))
@@ -3281,17 +3280,17 @@ class BaseRepackFactory(MozillaBuildFact
command=self.makeCmd + ['upload',
WithProperties(
'AB_CD=%(locale)s')],
env=self.uploadEnv,
workdir='%s/%s/locales' % (self.absObjDir, self.appName),
haltOnFailure=True,
flunkOnFailure=True,
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, upload_errors),
+ c, s, upload_errors),
locks=[upload_lock.access('counting')],
extract_fn=parse_make_upload,
mock=self.use_mock,
target=self.mock_target,
))
def getPartialInfo(build):
return [{
@@ -3420,17 +3419,17 @@ class BaseRepackFactory(MozillaBuildFact
description='update compare-locales',
workdir='%s/compare-locales' % self.baseWorkDir,
haltOnFailure=True
))
def compareLocales(self):
if self.mergeLocales:
mergeLocaleOptions = ['-m',
- WithProperties('%(basedir)s/' + \
+ WithProperties('%(basedir)s/' +
"%s/merged" % self.baseWorkDir)]
flunkOnFailure = False
haltOnFailure = False
warnOnFailure = True
else:
mergeLocaleOptions = []
flunkOnFailure = True
haltOnFailure = True
@@ -3538,17 +3537,17 @@ class BaseRepackFactory(MozillaBuildFact
target=self.mock_target,
))
prettyEnv['WIN32_INSTALLER_IN'] = WithProperties(
'%(win32_installer_in)s')
self.addStep(MockCommand(
name='repack_installers_pretty',
description=['repack', 'installers', 'pretty'],
command=self.makeCmd + [WithProperties('installers-%(locale)s'),
- WithProperties('LOCALE_MERGEDIR=%(basedir)s/' + \
+ WithProperties('LOCALE_MERGEDIR=%(basedir)s/' +
"%s/merged" % self.baseWorkDir)],
env=prettyEnv,
haltOnFailure=False,
flunkOnFailure=False,
warnOnFailure=True,
workdir='%s/%s/locales' % (self.absObjDir, self.appName),
mock=self.use_mock,
target=self.mock_target,
@@ -3617,17 +3616,17 @@ class NightlyRepackFactory(BaseRepackFac
uploadArgs['to_latest'] = True
self.postUploadCmd = postUploadCmdPrefix(**uploadArgs)
# 2) preparation for updates
if l10nNightlyUpdate and self.nightly:
env.update({'MOZ_MAKE_COMPLETE_MAR': '1',
'DOWNLOAD_BASE_URL': '%s/nightly' % self.downloadBaseURL})
- if not '--enable-update-packaging' in self.extraConfigureArgs:
+ if '--enable-update-packaging' not in self.extraConfigureArgs:
self.extraConfigureArgs += ['--enable-update-packaging']
BaseRepackFactory.__init__(self, env=env, **kwargs)
if l10nNightlyUpdate:
assert ausBaseUploadDir and updatePlatform and downloadBaseURL
assert ausUser and ausSshKey and ausHost
@@ -3666,18 +3665,17 @@ class NightlyRepackFactory(BaseRepackFac
else:
return self.ausFullUploadDir
def updateSources(self):
self.addStep(ShellCommand(
name='update_locale_source',
command=['hg', 'up', '-C', '-r', self.l10nTag],
description='update workdir',
- workdir=WithProperties(
- 'build/l10n/%(locale)s'),
+ workdir=WithProperties('build/l10n/%(locale)s'),
haltOnFailure=True
))
self.addStep(SetProperty(
command=['hg', 'ident', '-i'],
haltOnFailure=True,
property='l10n_revision',
workdir=WithProperties('build/l10n/%(locale)s')
))
@@ -3706,27 +3704,27 @@ class NightlyRepackFactory(BaseRepackFac
Requires that we run make unpack first.
'''
self.addStep(MockCommand(
name='make_unpack',
command=self.makeCmd + ['unpack'],
descriptionDone='unpacked en-US',
haltOnFailure=True,
env=self.env,
- workdir = '%s/%s/locales' % (self.absObjDir,
- self.appName),
+ workdir='%s/%s/locales' % (self.absObjDir,
+ self.appName),
mock=self.use_mock,
target=self.mock_target,
))
self.addStep(MockProperty(
command=self.makeCmd + ['ident'],
haltOnFailure=True,
env=self.env,
- workdir = '%s/%s/locales' % (self.absObjDir,
- self.appName),
+ workdir='%s/%s/locales' % (self.absObjDir,
+ self.appName),
extract_fn=identToProperties(),
mock=self.use_mock,
target=self.mock_target,
))
if self.clientPyConfig:
self.addStep(MockCommand(
name='update_comm_enUS_revision',
command=['hg', 'update', '-C', '-r',
@@ -3822,17 +3820,17 @@ class NightlyRepackFactory(BaseRepackFac
return '.%(locale)s.' + NightlyBuildFactory.getCompleteMarPatternMatch(self)
def doRepack(self):
self.downloadMarTools()
self.addStep(MockCommand(
name='repack_installers',
description=['repack', 'installers'],
command=self.makeCmd + [WithProperties('installers-%(locale)s'),
- WithProperties('LOCALE_MERGEDIR=%(basedir)s/' + \
+ WithProperties('LOCALE_MERGEDIR=%(basedir)s/' +
"%s/merged" % self.baseWorkDir)],
env=self.env,
haltOnFailure=True,
workdir='%s/%s/locales' % (self.absObjDir, self.appName),
mock=self.use_mock,
target=self.mock_target,
))
self.addStep(FindFile(
@@ -4696,17 +4694,17 @@ class MozillaTestFactory(MozillaBuildFac
max_depth=4,
property_name="exepath",
name="find_executable",
))
elif self.platform.startswith('win'):
self.addStep(SetBuildProperty(
property_name="exepath",
value="%s/%s.exe" % (
- self.productName, self.productName),
+ self.productName, self.productName),
))
else:
self.addStep(SetBuildProperty(
property_name="exepath",
value="%s/%s%s" % (self.productName, self.productName,
self.posixBinarySuffix),
))
@@ -4816,17 +4814,17 @@ class MozillaTestFactory(MozillaBuildFac
# but is important on 10.7
self.addStep(ShellCommand(
name="clear_saved_state",
flunkOnFailure=False,
warnOnFailure=False,
haltOnFailure=False,
workdir='/Users/cltbld',
command=['bash', '-c',
- 'rm -rf Library/Saved\ Application\ State/*.savedState']
+ 'rm -rf Library/Saved\ Application\ State/*.savedState']
))
if self.buildsBeforeReboot and self.buildsBeforeReboot > 0:
# This step is to deal with minis running linux that don't reboot properly
# see bug561442
if self.resetHwClock and 'linux' in self.platform:
self.addStep(ShellCommand(
name='set_time',
description=['set', 'time'],
@@ -5000,18 +4998,18 @@ class UnittestPackagedBuildFactory(Mozil
def isVirtualenvSetup(step):
return ("mozmillVirtualenvSetup" in step.build.getProperties() and
len(step.build.getProperty("mozmillVirtualenvSetup")) > 0)
self.addStep(ShellCommand(
name='setup virtualenv',
command=[
- 'python', 'resources/installmozmill.py',
- MOZMILL_VIRTUALENV_DIR, '../mozbase'],
+ 'python', 'resources/installmozmill.py',
+ MOZMILL_VIRTUALENV_DIR, '../mozbase'],
doStepIf=isVirtualenvSetup,
flunkOnFailure=True,
haltOnFailure=True,
workdir='build/mozmill'
))
bindir = 'Scripts' if self.platform.startswith(
'win') else 'bin'
@@ -5038,16 +5036,17 @@ class UnittestPackagedBuildFactory(Mozil
doStepIf=lambda step: not isVirtualenvSetup(step),
env=mozmill_env,
workdir='build/mozmill',
)
if self.platform.startswith('macosx64'):
self.addStep(resolution_step())
+
class TalosFactory(RequestSortingBuildFactory):
extName = 'addon.xpi'
"""Create working talos build factory"""
def __init__(self, OS, supportUrlBase, envName, buildBranch, branchName,
configOptions, talosCmd, customManifest=None, customTalos=None,
workdirBase=None, fetchSymbols=False, plugins=None, pagesets=[],
remoteTests=False, productName="firefox", remoteExtras=None,
talosAddOns=[], releaseTester=False, credentialsFile=None,
@@ -5138,30 +5137,30 @@ class TalosFactory(RequestSortingBuildFa
def addInfoSteps(self):
if self.remoteTests:
self.addStep(ShellCommand(
name="set_shutdown_flag",
description="Setting the shutdown flag",
command=['touch', '../shutdown.stamp'],
workdir='.',
- ))
+ ))
self.addStep(SetProperty(
command=['bash', '-c', 'echo $SUT_IP'],
property='sut_ip'
))
def addCleanupSteps(self):
self.addStep(ShellCommand(
- name='cleanup',
- workdir=self.workdirBase,
- description="Cleanup",
- command='nohup rm -rf *',
- env=self.env)
- )
+ name='cleanup',
+ workdir=self.workdirBase,
+ description="Cleanup",
+ command='nohup rm -rf *',
+ env=self.env)
+ )
self.addStep(ShellCommand(
name='create talos dir',
workdir=self.workdirBase,
description="talos dir creation",
command='mkdir talos',
env=self.env)
)
if not self.remoteTests:
@@ -5306,27 +5305,27 @@ class TalosFactory(RequestSortingBuildFa
workdir=os.path.join(self.workdirBase, "talos"),
flunkOnFailure=False,
))
if self.customManifest:
self.addStep(FileDownload(
mastersrc=self.customManifest,
slavedest="tp3.manifest",
workdir=os.path.join(
- self.workdirBase, "talos/page_load_test"),
+ self.workdirBase, "talos/page_load_test"),
haltOnFailure=True,
))
if self.customTalos is None and not self.remoteTests:
self.addStep(DownloadFile(
url=WithProperties("%(repo_path)s/raw-file/%(revision)s/testing/talos/talos_from_code.py"),
workdir=self.workdirBase,
haltOnFailure=True,
wget_args=['--progress=dot:mega',
- '--no-check-certificate'],
+ '--no-check-certificate'],
log_eval_func=lambda c, s: regex_log_evaluator(
c, s, talos_hgweb_errors),
))
self.addStep(ShellCommand(
name='download files specified in talos.json',
command=[self.pythonWithJson(
self.OS), 'talos_from_code.py',
'--talos-json-url',
@@ -5364,41 +5363,41 @@ class TalosFactory(RequestSortingBuildFa
workdir=self.workdirBase,
))
if self.talos_from_source_code:
self.addStep(RetryingShellCommand(
name='get_talos_from_code_py',
description="Downloading talos_from_code.py",
command=['wget', '--no-check-certificate',
WithProperties(
- "%(repo_path)s/raw-file/%(revision)s/testing/talos/talos_from_code.py")],
+ "%(repo_path)s/raw-file/%(revision)s/testing/talos/talos_from_code.py")],
workdir=self.workdirBase,
haltOnFailure=True,
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, talos_hgweb_errors),
+ c, s, talos_hgweb_errors),
))
self.addStep(RetryingShellCommand(
name='run_talos_from_code_py',
description="Running talos_from_code.py",
command=[self.pythonWithJson(
self.OS), 'talos_from_code.py',
'--talos-json-url',
WithProperties(
- '%(repo_path)s/raw-file/%(revision)s/testing/talos/talos.json')],
+ '%(repo_path)s/raw-file/%(revision)s/testing/talos/talos.json')],
workdir=self.workdirBase,
haltOnFailure=True,
log_eval_func=lambda c, s: regex_log_evaluator(
- c, s, talos_hgweb_errors),
+ c, s, talos_hgweb_errors),
))
else:
self.addStep(RetryingShellCommand(
name='get_talos_zip',
command=[
- 'wget', '-O', 'talos.zip', '--no-check-certificate',
- 'http://talos-bundles.pvt.build.mozilla.org/zips/talos.mobile.old.zip'],
+ 'wget', '-O', 'talos.zip', '--no-check-certificate',
+ 'http://talos-bundles.pvt.build.mozilla.org/zips/talos.mobile.old.zip'],
workdir=self.workdirBase,
haltOnFailure=True,
))
self.addStep(UnpackFile(
filename='talos.zip',
workdir=self.workdirBase,
haltOnFailure=True,
description="Unpack talos.zip",
@@ -5431,41 +5430,41 @@ class TalosFactory(RequestSortingBuildFa
))
def addPluginInstallSteps(self):
if self.plugins:
# 64 bit
if self.OS in ('w764', 'ubuntu64_hw'):
self.addStep(DownloadFile(
url=WithProperties(
- "%s/%s" % (self.supportUrlBase, self.plugins['64'])),
+ "%s/%s" % (self.supportUrlBase, self.plugins['64'])),
workdir=os.path.join(
- self.workdirBase, "talos/base_profile"),
+ self.workdirBase, "talos/base_profile"),
haltOnFailure=True,
))
self.addStep(UnpackFile(
filename=os.path.basename(self.plugins['64']),
workdir=os.path.join(
- self.workdirBase, "talos/base_profile"),
+ self.workdirBase, "talos/base_profile"),
haltOnFailure=True,
))
def addPagesetInstallSteps(self):
for pageset in self.pagesets:
self.addStep(DownloadFile(
url=WithProperties(
"%s/%s" % (self.supportUrlBase, pageset)),
workdir=os.path.join(
- self.workdirBase, "talos/page_load_test"),
+ self.workdirBase, "talos/page_load_test"),
haltOnFailure=True,
))
self.addStep(UnpackFile(
filename=os.path.basename(pageset),
workdir=os.path.join(
- self.workdirBase, "talos/page_load_test"),
+ self.workdirBase, "talos/page_load_test"),
haltOnFailure=True,
))
def addDownloadSymbolsStep(self):
def get_symbols_url(build):
suffixes = ('.tar.bz2', '.dmg', '.zip', '.apk')
buildURL = build.getProperty('fileURL')
@@ -5545,17 +5544,17 @@ class TalosFactory(RequestSortingBuildFa
flunkOnFailure=False,
warnOnFailure=False,
alwaysRun=True,
workdir=self.workdirBase,
description="Reboot Device",
timeout=60 * 30,
command=[
'python', '-u', '/builds/sut_tools/reboot.py',
- WithProperties("%(sut_ip)s"),
+ WithProperties("%(sut_ip)s"),
],
env=self.env,
log_eval_func=lambda c, s: SUCCESS,
))
else:
self.addStep(DisconnectStep(
name='reboot',
flunkOnFailure=False,
@@ -5715,18 +5714,17 @@ class PartnerRepackFactory(ReleaseFactor
command=['bash', '-c',
'ssh -oIdentityFile=~/.ssh/%s %s@%s touch %s/%s/%s'
% (self.stageSshKey, self.stageUsername,
self.stagingServer, self.candidatesDir,
self.partnerUploadDir, 'partner_build_%s' % platform),
],
workdir='%s/scripts/repacked_builds/%s/build%s' % (self.partnersRepackDir,
self.version,
- str(
- self.buildNumber)),
+ str(self.buildNumber)),
description=['upload', 'partner', 'status'],
haltOnFailure=True
))
def rc_eval_func(exit_statuses):
def eval_func(cmd, step):
rc = cmd.rc
@@ -5880,18 +5878,18 @@ class ScriptFactory(RequestSortingBuildF
mastersrc=os.path.join(os.getcwd(), 'BuildSlaves.py'),
slavedest='oauth.txt',
workdir='.',
flunkOnFailure=False,
))
self.addStep(OutputStep(
name='tinderboxprint_script_revlink',
data=WithProperties(
- 'TinderboxPrint: %s_revlink: %s/rev/%%(script_repo_revision)s' % \
- (scriptRepo.split('/')[-1], scriptRepo)),
+ 'TinderboxPrint: %s_revlink: %s/rev/%%(script_repo_revision)s' %
+ (scriptRepo.split('/')[-1], scriptRepo)),
))
self.runScript()
self.addCleanupSteps()
self.reboot()
def addCleanupSteps(self):
# remove oauth.txt file, we don't wanna to leave keys lying around
if self.use_credentials_file:
--- a/process/release.py
+++ b/process/release.py
@@ -63,22 +63,22 @@ def generateReleaseBranchObjects(release
tools_repo_path = releaseConfig.get('build_tools_repo_path',
branchConfig['build_tools_repo_path'])
tools_repo = '%s%s' % (branchConfig['hgurl'], tools_repo_path)
config_repo = '%s%s' % (branchConfig['hgurl'],
branchConfig['config_repo_path'])
mozharness_repo_path = releaseConfig.get('mozharness_repo_path',
branchConfig['mozharness_repo_path'])
mozharness_repo = '%s%s' % (branchConfig['hgurl'], mozharness_repo_path)
- clobberer_url = releaseConfig.get(
- 'base_clobber_url', branchConfig['base_clobber_url'])
- balrog_api_root=releaseConfig.get('balrog_api_root',
- branchConfig.get('balrog_api_root', None))
- balrog_username=releaseConfig.get('balrog_username',
- branchConfig.get('balrog_username', None))
+ clobberer_url = releaseConfig.get('base_clobber_url',
+ branchConfig['base_clobber_url'])
+ balrog_api_root = releaseConfig.get('balrog_api_root',
+ branchConfig.get('balrog_api_root', None))
+ balrog_username = releaseConfig.get('balrog_username',
+ branchConfig.get('balrog_username', None))
branchConfigFile = getRealpath('localconfig.py')
unix_slaves = []
mock_slaves = []
all_slaves = []
for p in branchConfig['platforms']:
if p == 'b2g':
continue
@@ -314,17 +314,19 @@ def generateReleaseBranchObjects(release
updates_upstream_builders = []
post_signing_builders = []
post_update_builders = []
deliverables_builders = []
xr_deliverables_builders = []
post_deliverables_builders = []
post_antivirus_builders = []
email_message_id = getMessageId()
- ##### Builders
+
+ # Builders #
+
builder_env = {
'BUILDBOT_CONFIGS': '%s%s' % (branchConfig['hgurl'],
branchConfig['config_repo_path']),
'BUILDBOTCUSTOM': '%s%s' % (branchConfig['hgurl'],
branchConfig['buildbotcustom_repo_path']),
'CLOBBERER_URL': clobberer_url,
}
@@ -612,17 +614,17 @@ def generateReleaseBranchObjects(release
if platform in releaseConfig['l10nPlatforms']:
triggeredSchedulers = [builderPrefix('%s_repack' % platform)]
else:
triggeredSchedulers = None
multiLocaleConfig = releaseConfig.get(
'multilocale_config', {}).get('platforms', {}).get(platform)
mozharnessMultiOptions = releaseConfig.get(
'multilocale_config', {}).get('multilocaleOptions')
- balrog_credentials_file=releaseConfig.get('balrog_credentials_file',
+ balrog_credentials_file = releaseConfig.get('balrog_credentials_file',
branchConfig.get('balrog_credentials_file', None))
# Turn pymake on by default for Windows, and off by default for
# other platforms.
if 'win' in platform:
enable_pymake = pf.get('enable_pymake', True)
else:
enable_pymake = pf.get('enable_pymake', False)
build_factory = ReleaseBuildFactory(
@@ -747,17 +749,17 @@ def generateReleaseBranchObjects(release
])
if releaseConfig.get('enablePartialMarsAtBuildTime', True):
extra_args.append('--generate-partials')
if releaseConfig.get('l10nUsePymake', True) and \
platform in ('win32', 'win64'):
extra_args.append('--use-pymake')
if pf.get('tooltool_l10n_manifest_src'):
extra_args.extend(['--tooltool-manifest',
- pf.get('tooltool_l10n_manifest_src')])
+ pf.get('tooltool_l10n_manifest_src')])
if pf.get('tooltool_script'):
for script in pf['tooltool_script']:
extra_args.extend(['--tooltool-script', script])
for url in branchConfig['tooltool_url_list']:
extra_args.extend(['--tooltool-url', url])
if balrog_api_root:
extra_args.extend([
"--balrog-api-root", balrog_api_root,
@@ -773,17 +775,19 @@ def generateReleaseBranchObjects(release
extra_args=extra_args,
use_mock=use_mock(platform),
mock_target=pf.get('mock_target'),
mock_packages=pf.get('mock_packages'),
mock_copyin_files=pf.get('mock_copyin_files'),
use_credentials_file=True,
copy_properties=['buildid'],
)
- # TODO: how to make this work with balrog, where we need 4 properties set (but webstatus only allows for 3). can we avoid the need for script_repo_revision or release_tag?
+ # TODO: how to make this work with balrog, where we need 4 properties
+ # set (but webstatus only allows for 3).
+ # Can we avoid the need for script_repo_revision or release_tag?
builders.append({
'name': builderPrefix("standalone_repack", platform),
'slavenames': pf.get('l10n_slaves', pf['slaves']),
'category': builderPrefix(''),
'builddir': builderPrefix("standalone_repack", platform),
'slavebuilddir': normalizeName(builderPrefix(
'standalone_repack', platform), releaseConfig['productName']),
'factory': standalone_factory,
@@ -795,18 +799,17 @@ def generateReleaseBranchObjects(release
"standalone_repack", platform), releaseConfig['productName']),
'platform': platform,
'branch': 'release-%s' % sourceRepoInfo['name'],
'release_config': releaseConfigFile,
}
})
for n, builderName in l10nBuilders(platform).iteritems():
- builddir = builderPrefix('%s_repack' % platform) + \
- '_' + str(n)
+ builddir = builderPrefix('%s_repack' % platform) + '_' + str(n)
properties = {
'builddir': builddir,
'slavebuilddir': normalizeName(builddir, releaseConfig['productName']),
'release_config': releaseConfigFile,
'platform': platform,
'branch': 'release-%s' % sourceRepoInfo['name'],
'chunkTotal': int(l10nChunks),
'chunkNum': int(n),
@@ -881,18 +884,18 @@ def generateReleaseBranchObjects(release
'properties': properties,
})
builders.append(makeDummyBuilder(
name=builderPrefix('repack_complete', platform),
slaves=all_slaves,
category=builderPrefix(''),
properties={
- 'platform': platform,
- 'branch': 'release-%s' % sourceRepoInfo['name'],
+ 'platform': platform,
+ 'branch': 'release-%s' % sourceRepoInfo['name'],
},
))
updates_upstream_builders.append(
builderPrefix('repack_complete', platform))
deliverables_builders.append(
builderPrefix('repack_complete', platform))
if platform in releaseConfig['unittestPlatforms']:
@@ -1135,17 +1138,17 @@ def generateReleaseBranchObjects(release
if releaseConfig.get('verifyConfigs') and \
not releaseConfig.get('skip_updates'):
pf = branchConfig['platforms']['linux']
try:
moz_repo_path = releaseConfig[
'sourceRepositories']['mozilla']['path']
except KeyError:
moz_repo_path = sourceRepoInfo['path']
- balrog_credentials_file=releaseConfig.get('balrog_credentials_file',
+ balrog_credentials_file = releaseConfig.get('balrog_credentials_file',
branchConfig.get('balrog_credentials_file', None))
updates_factory = ReleaseUpdatesFactory(
hgHost=branchConfig['hghost'],
repoPath=sourceRepoInfo['path'],
buildToolsRepoPath=tools_repo_path,
configRepoPath=branchConfig['config_repo_path'],
patcherConfig=releaseConfig['patcherConfig'],
verifyConfigs=releaseConfig['verifyConfigs'],
@@ -1349,18 +1352,18 @@ def generateReleaseBranchObjects(release
post_deliverables_builders.append(builderPrefix('check_permissions'))
if not releaseConfig.get('disableVirusCheck'):
antivirus_factory = ScriptFactory(
scriptRepo=tools_repo,
script_timeout=3 * 60 * 60,
scriptName='scripts/release/stage-tasks.sh',
extra_args=['antivirus',
- '--ssh-user', branchConfig['stage_username'],
- '--ssh-key', branchConfig['stage_ssh_key'],
+ '--ssh-user', branchConfig['stage_username'],
+ '--ssh-key', branchConfig['stage_ssh_key'],
],
)
builders.append({
'name': builderPrefix('antivirus'),
'slavenames': unix_slaves,
'category': builderPrefix(''),
'builddir': builderPrefix('antivirus'),
@@ -1620,17 +1623,17 @@ def generateReleaseBranchObjects(release
'properties': {
'slavebuilddir': normalizeName(builderPrefix('bncr_sub'), releaseConfig['productName']),
'release_config': releaseConfigFile,
'platform': None,
'branch': 'release-%s' % sourceRepoInfo['name'],
}
})
- ##### Change sources and Schedulers
+ # Change sources and Schedulers #
reset_schedulers_scheduler = Scheduler(
name=builderPrefix(
'%s_reset_schedulers' % releaseConfig['productName']),
branch=sourceRepoInfo['path'],
treeStableTimer=None,
builderNames=[builderPrefix(
'%s_reset_schedulers' % releaseConfig['productName'])],
@@ -1773,18 +1776,18 @@ def generateReleaseBranchObjects(release
if releaseConfig.get('enableAutomaticPushToMirrors') and \
releaseConfig.get('verifyConfigs'):
if releaseConfig.get('disableVirusCheck'):
post_update_builders.append(builderPrefix('%s_push_to_mirrors' % releaseConfig['productName']))
else:
post_antivirus_builders.append(builderPrefix('%s_push_to_mirrors' % releaseConfig['productName']))
if releaseConfig.get('enableAutomaticPushToMirrors') and \
- hasPlatformSubstring(releaseConfig['enUSPlatforms'], 'android'):
- post_deliverables_builders.append(builderPrefix('%s_push_to_mirrors' % releaseConfig['productName']))
+ hasPlatformSubstring(releaseConfig['enUSPlatforms'], 'android'):
+ post_deliverables_builders.append(builderPrefix('%s_push_to_mirrors' % releaseConfig['productName']))
if not hasPlatformSubstring(releaseConfig['enUSPlatforms'], 'android'):
schedulers.append(AggregatingScheduler(
name=builderPrefix(
'%s_signing_done' % releaseConfig['productName']),
branch=sourceRepoInfo['path'],
upstreamBuilders=updates_upstream_builders,
builderNames=post_signing_builders,
@@ -1828,17 +1831,17 @@ def generateReleaseBranchObjects(release
if releaseConfig['doPartnerRepacks'] and \
not hasPlatformSubstring(releaseConfig['enUSPlatforms'], 'android'):
# TODO: revisit this once we have android partner repacks
for platform in releaseConfig.get('partnerRepackPlatforms',
releaseConfig['l10nPlatforms']):
schedulers.append(AggregatingScheduler(
name=builderPrefix(
'%s_l10n_done' % releaseConfig['productName'],
- platform),
+ platform),
branch=sourceRepoInfo['path'],
upstreamBuilders=[builderPrefix('repack_complete', platform)],
builderNames=[builderPrefix('partner_repack', platform)],
))
upstream_builders = [builderPrefix('%s_push_to_mirrors' % releaseConfig['productName'])]
if releaseConfig.get('verifyConfigs'):
upstream_builders.append(builderPrefix('updates'))
if not releaseConfig.get('disableBouncerEntries'):
@@ -1908,57 +1911,57 @@ def generateReleaseBranchObjects(release
for recipient in releaseConfig['ImportantRecipients']:
if hasPlatformSubstring(releaseConfig['enUSPlatforms'], 'android'):
# send a message when android signing is complete
status.append(ChangeNotifier(
fromaddr="release@mozilla.com",
relayhost="mail.build.mozilla.org",
sendToInterestedUsers=False,
extraRecipients=[recipient],
- extraHeaders={'In-Reply-To':
- email_message_id, 'References': email_message_id},
+ extraHeaders={'In-Reply-To': email_message_id,
+ 'References': email_message_id},
branches=[builderPrefix('android_post_signing')],
messageFormatter=createReleaseChangeMessage,
changeIsImportant=lambda c:
changeContainsProperties(c, dict(who=enUS_signed_apk_url))
))
# send the nice(passing) release messages
status.append(MailNotifier(
fromaddr='release@mozilla.com',
sendToInterestedUsers=False,
extraRecipients=releaseConfig['ImportantRecipients'],
extraHeaders={'In-Reply-To': email_message_id,
- 'References': email_message_id},
+ 'References': email_message_id},
mode='passing',
builders=important_builders,
relayhost='mail.build.mozilla.org',
messageFormatter=createReleaseMessage,
))
# send all release messages
status.append(MailNotifier(
fromaddr='release@mozilla.com',
sendToInterestedUsers=False,
extraRecipients=releaseConfig['AllRecipients'],
extraHeaders={'In-Reply-To': email_message_id,
- 'References': email_message_id},
+ 'References': email_message_id},
mode='all',
builders=[b['name'] for b in builders + test_builders],
relayhost='mail.build.mozilla.org',
messageFormatter=createReleaseMessage,
))
if releaseConfig.get('AVVendorsRecipients'):
status.append(MailNotifier(
fromaddr='release@mozilla.com',
sendToInterestedUsers=False,
extraRecipients=releaseConfig['AVVendorsRecipients'],
- extraHeaders={'In-Reply-To':
- email_message_id, 'References': email_message_id},
+ extraHeaders={'In-Reply-To': email_message_id,
+ 'References': email_message_id},
mode='passing',
builders=[builderPrefix('updates')],
relayhost='mail.build.mozilla.org',
messageFormatter=createReleaseAVVendorsMessage,
))
builders.extend(test_builders)
@@ -1976,9 +1979,8 @@ def generateReleaseBranchObjects(release
props['product'] = releaseConfig['productName'].capitalize()
return {
"builders": builders,
"status": status,
"change_source": change_source,
"schedulers": schedulers,
}
-
--- a/status/db/model.py
+++ b/status/db/model.py
@@ -23,40 +23,34 @@ def connect(url, drop_all=False, **kwarg
log.warn("DBMSG: Warning, dropping all tables")
Base.metadata.drop_all()
Base.metadata.create_all()
global Session
Session = sqlalchemy.orm.sessionmaker(bind=Base.metadata.bind)
return Session
file_changes = Table('file_changes', Base.metadata,
- Column(
- 'file_id', Integer, ForeignKey('files.id'), nullable=False,
- index=True),
+ Column('file_id', Integer, ForeignKey('files.id'),
+ nullable=False, index=True),
Column('change_id', Integer, ForeignKey('changes.id'),
nullable=False, index=True),
)
build_properties = Table('build_properties', Base.metadata,
- Column(
- 'property_id', Integer, ForeignKey(
- 'properties.id'),
- nullable=False, index=True),
+ Column('property_id', Integer, ForeignKey('properties.id'),
+ nullable=False, index=True),
Column('build_id', Integer, ForeignKey('builds.id'),
nullable=False, index=True),
)
request_properties = Table('request_properties', Base.metadata,
- Column(
- 'property_id', Integer, ForeignKey(
- 'properties.id'),
- nullable=False, index=True),
- Column(
- 'request_id', Integer, ForeignKey('requests.id'),
- nullable=False, index=True),
+ Column('property_id', Integer, ForeignKey('properties.id'),
+ nullable=False, index=True),
+ Column('request_id', Integer, ForeignKey('requests.id'),
+ nullable=False, index=True),
)
# TODO: track ordering?
build_requests = Table('build_requests', Base.metadata,
Column('build_id', Integer, ForeignKey('builds.id'),
nullable=False, index=True),
Column('request_id', Integer, ForeignKey('requests.id'),
nullable=False, index=True),
@@ -135,17 +129,19 @@ class Property(Base):
@classmethod
def fromBBProperties(cls, session, props):
"""Return a list of Property objects that reflect a buildbot Properties
object."""
names = [unicode(p[0]) for p in props.asList()]
values = [p[1] for p in props.asList()]
sources = [unicode(p[2]) for p in props.asList()]
- all = session.query(cls).filter(cls.name.in_(names)).filter(sqlalchemy.or_(cls.value.in_(values), cls.value == None)).filter(cls.source.in_(sources)).all()
+ all = (session.query(cls).filter(cls.name.in_(names))
+ .filter(sqlalchemy.or_(cls.value.in_(values), cls.value is None))
+ .filter(cls.source.in_(sources)).all())
retval = []
for prop in all:
if prop.name in props and props[prop.name] == prop.value and \
props.getPropertySource(prop.name) == prop.source:
retval.append(prop)
new_props = set(names) - set([p.name for p in retval])
@@ -259,19 +255,19 @@ class Builder(Base):
name = unicode(name)
b = session.query(
cls).filter_by(name=name, master_id=master_id).first()
if not b:
b = cls(name=name, master_id=master_id)
session.add(b)
return b
-Builder.slaves = relation(BuilderSlave, primaryjoin=
- and_(BuilderSlave.builder_id == Builder.id,
- BuilderSlave.removed == None))
+Builder.slaves = relation(BuilderSlave,
+ primaryjoin=and_(BuilderSlave.builder_id == Builder.id,
+ BuilderSlave.removed is None))
class Change(Base):
__tablename__ = "changes"
id = Column(Integer, primary_key=True)
number = Column(Integer, nullable=False)
branch = Column(Unicode(50), nullable=True)
revision = Column(Unicode(50), nullable=True)
--- a/status/errors.py
+++ b/status/errors.py
@@ -1,11 +1,12 @@
import re
-from buildbot.status.builder import EXCEPTION, FAILURE, RETRY, WARNINGS
+from buildbot.status.builder import FAILURE, RETRY, WARNINGS
+
def re_compile(s):
return re.compile(s)
global_errors = ((re_compile("No space left on device"), RETRY),
# Bug 1018531: only RETRY with "Remote Device Error" if *not* preceded by "Caught Exception: "
(re_compile("(?<!INFO - Caught Exception: )Remote Device Error"), RETRY),
(re_compile("devicemanager.DMError"), RETRY),
--- a/status/generators.py
+++ b/status/generators.py
@@ -45,20 +45,20 @@ def getSensibleCommitTitle(titles):
"""
Returns the first non-trychooser title with unnecessary cruft removed.
"""
for title in titles:
# Remove trychooser syntax.
title = re.sub(r'\btry: .*', '', title)
# Remove MQ cruft.
- title = re.sub(r'^(imported patch|\[mq\]:) ', '', title);
+ title = re.sub(r'^(imported patch|\[mq\]:) ', '', title)
# Remove review, feedback, etc. annotations.
title = re.sub(r'\b(r|sr|f|a)[=\?].*', '', title)
# Remove trailing punctuation and whitespace.
- title = re.sub(r'[;,\-\. ]+$', '', title).strip();
+ title = re.sub(r'[;,\-\. ]+$', '', title).strip()
if title:
return title
return titles[0]
--- a/status/mail.py
+++ b/status/mail.py
@@ -70,17 +70,17 @@ class ChangeNotifier(base.StatusReceiver
assert isinstance(r, str)
assert mail.VALID_EMAIL.search(
r) # require full email addresses, not User names
self.extraRecipients = extraRecipients
else:
self.extraRecipients = []
# you should either limit on branches or categories, not both
- assert not (self.branches != None and self.categories != None)
+ assert not (self.branches is not None and self.categories is not None)
def setServiceParent(self, parent):
"""
@type parent: L{buildbot.master.BuildMaster}
"""
base.StatusReceiverMultiService.setServiceParent(self, parent)
self.setup()
@@ -191,17 +191,17 @@ class ChangeNotifier(base.StatusReceiver
# if we're sending to interested users move the extra's to the CC
# list so they can tell if they are also interested in the change
# unless there are no interested users
if self.sendToInterestedUsers and len(recipients):
extra_recips = self.extraRecipients[:]
extra_recips.sort()
m['CC'] = ", ".join(extra_recips)
else:
- [recipients.add(r) for r in self.extraRecipients[:]]
+ [recipients.add(r2) for r2 in self.extraRecipients[:]]
rlist = list(recipients)
rlist.sort()
m['To'] = ", ".join(rlist)
# The extras weren't part of the TO list so add them now
if self.sendToInterestedUsers:
for r in self.extraRecipients:
--- a/status/pulse.py
+++ b/status/pulse.py
@@ -130,17 +130,17 @@ class PulseStatus(StatusPush):
e['master_incarnation'] = \
self.status.botmaster.master_incarnation
# Transform time tuples to standard pulse time format
to_write.append(e)
end = time.time()
log.msg("Pulse %s: Processed %i events (%i heartbeats) "
"in %.2f seconds" %
- (hexid(self), count, heartbeats, (end - start)))
+ (hexid(self), count, heartbeats, (end - start)))
try:
self.queuedir.add(json.dumps(to_write))
except:
# Try again later?
self.queue.insertBackChunk(events)
log.err()
# If we still have more stuff, send it in a bit
@@ -177,17 +177,17 @@ class PulseStatus(StatusPush):
# OH NOES!
try:
log.msg("Pulse %s: heartbeat" % (hexid(self),))
self.push("heartbeat")
except:
log.msg("Pulse %s: failed to send heartbeat" % (hexid(self),))
log.err()
- ### Events we publish
+ # Events we publish #
def buildStarted(self, builderName, build):
builderName = escape(self._translateBuilderName(builderName))
self.push("build.%s.%i.started" % (builderName, build.number),
build=build)
return self
def buildFinished(self, builderName, build, results):
@@ -213,56 +213,56 @@ class PulseStatus(StatusPush):
def requestCancelled(self, builder, request):
builderName = escape(self._translateBuilderName(builder.name))
self.push("request.%s.cancelled" % builderName, request=request)
def stepStarted(self, build, step):
builderName = escape(self._translateBuilderName(build.builder.name))
self.push("build.%s.%i.step.%s.started" %
- (builderName, build.number, escape(step.name)),
+ (builderName, build.number, escape(step.name)),
properties=build.getProperties().asList(),
step=step)
# If logging is enabled, return ourself to subscribe to log events for
# this step
if self.send_logs:
return self
def stepFinished(self, build, step, results):
builderName = escape(self._translateBuilderName(build.builder.name))
self.push("build.%s.%i.step.%s.finished" %
- (builderName, build.number, escape(step.name)),
+ (builderName, build.number, escape(step.name)),
properties=build.getProperties().asList(),
step=step,
results=results)
- ### Optional logging events
+ # Optional logging events #
def logStarted(self, build, step, log):
builderName = escape(self._translateBuilderName(build.builder.name))
self.push("build.%s.%i.step.%s.log.%s.started" %
- (builderName, build.number, escape(step.name), log.name))
+ (builderName, build.number, escape(step.name), log.name))
return self
def logChunk(self, build, step, log, channel, text):
# TODO: Strip out bad UTF-8 characters
builderName = escape(self._translateBuilderName(build.builder.name))
self.push("build.%s.%i.step.%s.log.%s.chunk" %
- (builderName, build.number, escape(step.name), log.name),
+ (builderName, build.number, escape(step.name), log.name),
channel=channel,
text=text,
)
def logFinished(self, build, step, log):
builderName = escape(self._translateBuilderName(build.builder.name))
self.push("build.%s.%i.step.%s.log.%s.finished" %
- (builderName, build.number, escape(step.name), log.name))
+ (builderName, build.number, escape(step.name), log.name))
return self
- ### Events we ignore
+ # Events we ignore #
def buildsetSubmitted(self, buildset):
pass
def builderChangedState(self, builderName, state):
pass
def builderRemoved(self, builderName):
--- a/status/queued_command.py
+++ b/status/queued_command.py
@@ -16,17 +16,17 @@ class QueuedCommandHandler(base.StatusRe
base.StatusReceiverMultiService.__init__(self)
self.command = command
self.queuedir = queuedir
self.categories = categories
self.builders = builders
# you should either limit on builders or categories, not both
- if self.builders != None and self.categories != None:
+ if self.builders is not None and self.categories is not None:
twlog.err("Please specify only builders to ignore or categories to include")
raise ValueError("Please specify only builders or categories")
self.watched = []
def startService(self):
base.StatusReceiverMultiService.startService(self)
self.master_status = self.parent.getStatus()
@@ -35,17 +35,17 @@ class QueuedCommandHandler(base.StatusRe
def stopService(self):
self.master_status.unsubscribe(self)
for w in self.watched:
w.unsubscribe(self)
base.StatusReceiverMultiService.stopService(self)
def builderAdded(self, name, builder):
# only subscribe to builders we are interested in
- if self.categories != None and builder.category not in self.categories:
+ if self.categories is not None and builder.category not in self.categories:
return None
self.watched.append(builder)
return self # subscribe to this builder
def buildStarted(self, builderName, build):
pass
--- a/steps/unittest.py
+++ b/steps/unittest.py
@@ -41,39 +41,39 @@ def emphasizeFailureText(text):
# Expected values for leaked: False, no leak; True, leaked; None, report
# failure.
def summaryText(passCount, failCount, knownFailCount=None,
crashed=False, leaked=False):
# Format the tests counts.
if passCount < 0 or failCount < 0 or \
- (knownFailCount != None and knownFailCount < 0):
+ (knownFailCount is not None and knownFailCount < 0):
# Explicit failure case.
summary = emphasizeFailureText("T-FAIL")
elif passCount == 0 and failCount == 0 and \
- (knownFailCount == None or knownFailCount == 0):
+ (knownFailCount is None or knownFailCount == 0):
# Implicit failure case.
summary = emphasizeFailureText("T-FAIL")
else:
# Handle failCount.
failCountStr = str(failCount)
if failCount > 0:
failCountStr = emphasizeFailureText(failCountStr)
# Format the counts.
summary = "%d/%s" % (passCount, failCountStr)
- if knownFailCount != None:
+ if knownFailCount is not None:
summary += "/%d" % knownFailCount
# Format the crash status.
if crashed:
summary += " %s" % emphasizeFailureText("CRASH")
# Format the leak status.
- if leaked != False:
+ if leaked is not False:
summary += " %s" % emphasizeFailureText(
(leaked and "LEAK") or "L-FAIL")
return summary
# otherIdent can be None if the test suite does not have this feature (yet).
@@ -615,16 +615,17 @@ class RemoteMochitestStep(MochitestMixin
self.command.extend(['--test-path', testPath])
if testManifest:
self.command.extend(['--run-only-tests', testManifest])
if symbols_path:
self.command.append(
WithProperties("--symbols-path=%s" % symbols_path))
self.command.extend(self.getChunkOptions(totalChunks, thisChunk))
+
class RemoteMochitestBrowserChromeStep(RemoteMochitestStep):
def __init__(self, **kwargs):
self.super_class = RemoteMochitestStep
RemoteMochitestStep.__init__(self, **kwargs)
def createSummary(self, log):
self.addCompleteLog(
'summary', summarizeLogRemoteMochitest(self.name, log))
--- a/test/test_misc_nextslaves.py
+++ b/test/test_misc_nextslaves.py
@@ -103,17 +103,16 @@ class TestNextAWSSlave(unittest.TestCase
self.assertEquals("slave-ec2",
f(self.builder, ondemand).slave.slavename)
self.assertEquals(_get_pending.called, 0)
# Spot instances should be preferred
self.assertEquals("slave-spot-001",
f(self.builder, spot + ondemand).slave.slavename)
-
def test_nextAWSSlave_AWS_wait(self):
"""Test that we'll wait up to aws_wait for inhouse instances to become
available"""
f = _nextAWSSlave(aws_wait=60)
inhouse, ondemand, spot = _classifyAWSSlaves(self.slaves)
# We need to mock out _get_pending so that we don't have to create a db
# for these tests
with mock.patch.object(buildbotcustom.misc, "_get_pending") as \
@@ -170,17 +169,17 @@ class TestGetPending(unittest.TestCase):
basedir = "test_misc_nextslaves"
def setUp(self):
if os.path.exists(self.basedir):
shutil.rmtree(self.basedir)
os.makedirs(self.basedir)
spec = dbspec.DBSpec.from_url("sqlite:///state.sqlite", self.basedir)
# For testing against mysql, uncomment this
- #spec = dbspec.DBSpec.from_url("mysql://buildbot@localhost/buildbot_schedulers", self.basedir)
+ # spec = dbspec.DBSpec.from_url("mysql://buildbot@localhost/buildbot_schedulers", self.basedir)
manager = DBSchemaManager(spec, self.basedir)
manager.upgrade()
self.dbc = connector.DBConnector(spec)
self.dbc.start()
def tearDown(self):
self.dbc.stop()
--- a/test/test_status.py
+++ b/test/test_status.py
@@ -15,12 +15,13 @@ SENSIBLE_TITLE_TESTCASES = [
['test imported patch test.patch', ['test imported patch test.patch']],
['Bug 1 - Test', ['Bug 1 - Test; r=me']],
['Bug 1 - Test', ['Bug 1 - Test. r?me f=you']],
['Bug 1', [' Bug 1;,.- ']],
]
+
class TestGenerator(unittest.TestCase):
def testGetSensibleCommitTitle(self):
for case in SENSIBLE_TITLE_TESTCASES:
self.assertEquals(getSensibleCommitTitle(case[1]), case[0])
--- a/test/test_test_order.py
+++ b/test/test_test_order.py
@@ -292,19 +292,18 @@ class TestTestOrder(unittest.TestCase):
# Rebuild r1
def addRebuild(t):
c3 = Change(
who='me!', branch='b1', revision='r1', files=['http://path/to/build'],
comments='really important', properties={'buildid': '20110214000001'}, when=6)
self.dbc.addChangeToDatabase(c3)
ss = SourceStamp(branch='b1', changes=[c3], revision='r1')
- ss1 = ss.getAbsoluteSourceStamp('r1')
ssid = self.dbc.get_sourcestampid(ss, t)
- bsid = self.dbc.create_buildset(
+ self.dbc.create_buildset(
ssid, "rebuild", Properties(), ["b1"], t)
d.addCallback(lambda ign: self.dbc.runInteractionNow(addRebuild))
builder = self.makeBuilder(RequestSortingBuildFactory)
# Check that we have three build requests
def checkRequests(ign):
--- a/test/test_try_parser.py
+++ b/test/test_try_parser.py
@@ -219,24 +219,16 @@ class TestTryParser(unittest.TestCase):
self.customBuilders = TryParser(
tm, VALID_BUILDER_B2G_NAMES, BUILDER_PRETTY_B2G_NAMES)
builders = ['b2g_try_emulator build']
builders = self.filterBuilders(['emulator'],
pretties=BUILDER_PRETTY_B2G_NAMES,
valid=VALID_BUILDER_B2G_NAMES)
self.assertEquals(sorted(self.customBuilders), sorted(builders))
- def test_AllPlatformsBoth(self):
- tm = 'try: -b od -p all'
- self.customBuilders = TryParser(
- tm, VALID_BUILDER_NAMES, BUILDER_PRETTY_NAMES)
- builders = [b for b in BUILDER_PRETTY_NAMES.values(
- ) if 'nondefault' not in b]
- self.assertEqual(sorted(self.customBuilders), sorted(builders))
-
def test_FullPlatformsBoth(self):
tm = 'try: -b od -p full'
self.customBuilders = TryParser(
tm, VALID_BUILDER_NAMES, BUILDER_PRETTY_NAMES)
builders = VALID_BUILDER_NAMES
self.assertEqual(sorted(self.customBuilders), sorted(builders))
def test_FullPlatformsOpt(self):
@@ -516,18 +508,18 @@ class TestTryParser(unittest.TestCase):
builders = [
b for b in self.baselineBuilders if '5.1' in b or 'crash' in b]
self.assertEqual(sorted(self.customBuilders), sorted(builders))
def test_bug875252(self):
tm = 'try: -b do -p win32 -u crashtest[5.1,Windows XP]'
self.customBuilders = TryParser(tm, VALID_TESTER_NAMES, TESTER_PRETTY_NAMES, None, UNITTEST_SUITES)
builders = [b for b in self.baselineBuilders
- if 'crashtest' in b
- and ('5.1' in b or 'Windows XP' in b)]
+ if 'crashtest' in b
+ and ('5.1' in b or 'Windows XP' in b)]
self.assertEqual(sorted(self.customBuilders), sorted(builders))
def test_HiddenCharactersAndOldSyntax(self):
tm = 'attributes\ntry: -b o -p linux64 -m none -u reftest -t none'
self.customBuilders = TryParser(tm, VALID_BUILDER_NAMES, BUILDER_PRETTY_NAMES, None, UNITTEST_SUITES)
builders = [BUILDER_PRETTY_NAMES['linux64']]
self.assertEqual(sorted(self.customBuilders), sorted(builders))