☠☠ backed out by 4c9ab7d45852 ☠ ☠ | |
author | Ricky Stewart <rstewart@mozilla.com> |
Wed, 21 Oct 2020 21:29:30 +0000 | |
changeset 553925 | 94ec15429e21553241fb9addd2c15478068e72b6 |
parent 553924 | 7558c8821a074b6f7c1e7d9314976e6b66176e5c |
child 553926 | 2d480000f34ebdb1b02bf671dafd89b5a1a228f7 |
push id | 37885 |
push user | apavel@mozilla.com |
push date | Thu, 22 Oct 2020 14:54:09 +0000 |
treeherder | mozilla-central@5684c9b12b5e [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | sylvestre, perftest-reviewers, geckoview-reviewers, agi |
bugs | 1672023 |
milestone | 84.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/browser/components/migration/tests/marionette/test_refresh_firefox.py +++ b/browser/components/migration/tests/marionette/test_refresh_firefox.py @@ -36,75 +36,87 @@ class TestFirefoxRefresh(MarionetteTestC _formHistoryValue = "special-pumpkin-value" _formAutofillAvailable = False _formAutofillAddressGuid = None _expectedURLs = ["about:robots", "about:mozilla"] def savePassword(self): - self.runCode(""" + self.runCode( + """ let myLogin = new global.LoginInfo( "test.marionette.mozilla.com", "http://test.marionette.mozilla.com/some/form/", null, arguments[0], arguments[1], "username", "password" ); Services.logins.addLogin(myLogin) - """, script_args=(self._username, self._password)) + """, + script_args=(self._username, self._password), + ) def createBookmarkInMenu(self): - error = self.runAsyncCode(""" + error = self.runAsyncCode( + """ // let url = arguments[0]; // let title = arguments[1]; // let resolve = arguments[arguments.length - 1]; let [url, title, resolve] = arguments; PlacesUtils.bookmarks.insert({ parentGuid: PlacesUtils.bookmarks.menuGuid, url, title }).then(() => resolve(false), resolve); - """, script_args=(self._bookmarkURL, self._bookmarkText)) + """, + script_args=(self._bookmarkURL, self._bookmarkText), + ) if error: print(error) def createBookmarksOnToolbar(self): - error = self.runAsyncCode(""" + error = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; let children = []; for (let i = 1; i <= 5; i++) { children.push({url: `about:rights?p=${i}`, title: `Bookmark ${i}`}); } PlacesUtils.bookmarks.insertTree({ guid: PlacesUtils.bookmarks.toolbarGuid, children }).then(() => resolve(false), resolve); - """) + """ + ) if error: print(error) def createHistory(self): - error = self.runAsyncCode(""" + error = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; PlacesUtils.history.insert({ url: arguments[0], title: arguments[1], visits: [{ date: new Date(Date.now() - 5000), referrer: "about:mozilla" }] }).then(() => resolve(false), ex => resolve("Unexpected error in adding visit: " + ex)); - """, script_args=(self._historyURL, self._historyTitle)) + """, + script_args=(self._historyURL, self._historyTitle), + ) if error: print(error) def createFormHistory(self): - error = self.runAsyncCode(""" + error = self.runAsyncCode( + """ let updateDefinition = { op: "add", fieldname: arguments[0], value: arguments[1], firstUsed: (Date.now() - 5000) * 1000, }; let finished = false; let resolve = arguments[arguments.length - 1]; @@ -114,24 +126,27 @@ class TestFirefoxRefresh(MarionetteTestC resolve(error); }, handleCompletion() { if (!finished) { resolve(false); } } }); - """, script_args=(self._formHistoryFieldName, self._formHistoryValue)) + """, + script_args=(self._formHistoryFieldName, self._formHistoryValue), + ) if error: print(error) def createFormAutofill(self): if not self._formAutofillAvailable: return - self._formAutofillAddressGuid = self.runAsyncCode(""" + self._formAutofillAddressGuid = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; const TEST_ADDRESS_1 = { "given-name": "John", "additional-name": "R.", "family-name": "Smith", organization: "World Wide Web Consortium", "street-address": "32 Vassar Street\\\nMIT Room 32-G524", "address-level2": "Cambridge", @@ -139,29 +154,39 @@ class TestFirefoxRefresh(MarionetteTestC "postal-code": "02139", country: "US", tel: "+15195555555", email: "user@example.com", }; return global.formAutofillStorage.initialize().then(() => { return global.formAutofillStorage.addresses.add(TEST_ADDRESS_1); }).then(resolve); - """) + """ + ) def createCookie(self): - self.runCode(""" + self.runCode( + """ // Expire in 15 minutes: let expireTime = Math.floor(Date.now() / 1000) + 15 * 60; Services.cookies.add(arguments[0], arguments[1], arguments[2], arguments[3], true, false, false, expireTime, {}, Ci.nsICookie.SAMESITE_NONE, Ci.nsICookie.SCHEME_UNSET); - """, script_args=(self._cookieHost, self._cookiePath, self._cookieName, self._cookieValue)) + """, + script_args=( + self._cookieHost, + self._cookiePath, + self._cookieName, + self._cookieValue, + ), + ) def createSession(self): - self.runAsyncCode(""" + self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; const COMPLETE_STATE = Ci.nsIWebProgressListener.STATE_STOP + Ci.nsIWebProgressListener.STATE_IS_NETWORK; let {TabStateFlusher} = Cu.import("resource:///modules/sessionstore/TabStateFlusher.jsm", {}); let expectedURLs = Array.from(arguments[0]) gBrowser.addTabsProgressListener({ onStateChange(browser, webprogress, request, flags, status) { try { @@ -188,197 +213,234 @@ class TestFirefoxRefresh(MarionetteTestC } // Close any other tabs that might be open: let allTabs = Array.from(gBrowser.tabs); for (let tab of allTabs) { if (!expectedTabs.has(tab)) { gBrowser.removeTab(tab); } } - """, script_args=(self._expectedURLs,)) # NOQA: E501 + """, # NOQA: E501 + script_args=(self._expectedURLs,), + ) def createFxa(self): # This script will write an entry to the login manager and create # a signedInUser.json in the profile dir. - self.runAsyncCode(""" + self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; Cu.import("resource://gre/modules/FxAccountsStorage.jsm"); let storage = new FxAccountsStorageManager(); let data = {email: "test@test.com", uid: "uid", keyFetchToken: "top-secret"}; storage.initialize(data); storage.finalize().then(resolve); - """) + """ + ) def createSync(self): # This script will write the canonical preference which indicates a user # is signed into sync. - self.marionette.execute_script(""" + self.marionette.execute_script( + """ Services.prefs.setStringPref("services.sync.username", "test@test.com"); - """) + """ + ) def checkPassword(self): - loginInfo = self.marionette.execute_script(""" + loginInfo = self.marionette.execute_script( + """ let ary = Services.logins.findLogins( "test.marionette.mozilla.com", "http://test.marionette.mozilla.com/some/form/", null, {}); return ary.length ? ary : {username: "null", password: "null"}; - """) + """ + ) self.assertEqual(len(loginInfo), 1) - self.assertEqual(loginInfo[0]['username'], self._username) - self.assertEqual(loginInfo[0]['password'], self._password) + self.assertEqual(loginInfo[0]["username"], self._username) + self.assertEqual(loginInfo[0]["password"], self._password) - loginCount = self.marionette.execute_script(""" + loginCount = self.marionette.execute_script( + """ return Services.logins.getAllLogins().length; - """) + """ + ) # Note that we expect 2 logins - one from us, one from sync. self.assertEqual(loginCount, 2, "No other logins are present") def checkBookmarkInMenu(self): - titleInBookmarks = self.runAsyncCode(""" + titleInBookmarks = self.runAsyncCode( + """ let [url, resolve] = arguments; PlacesUtils.bookmarks.fetch({url}).then( bookmark => resolve(bookmark ? bookmark.title : ""), ex => resolve(ex) ); - """, script_args=(self._bookmarkURL,)) + """, + script_args=(self._bookmarkURL,), + ) self.assertEqual(titleInBookmarks, self._bookmarkText) def checkBookmarkToolbarVisibility(self): - toolbarVisible = self.marionette.execute_script(""" + toolbarVisible = self.marionette.execute_script( + """ const BROWSER_DOCURL = AppConstants.BROWSER_CHROME_URL; return Services.xulStore.getValue(BROWSER_DOCURL, "PersonalToolbar", "collapsed"); - """) + """ + ) if toolbarVisible == "": toolbarVisible = "false" self.assertEqual(toolbarVisible, "false") def checkHistory(self): - historyResult = self.runAsyncCode(""" + historyResult = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; PlacesUtils.history.fetch(arguments[0]).then(pageInfo => { if (!pageInfo) { resolve("No visits found"); } else { resolve(pageInfo); } }).catch(e => { resolve("Unexpected error in fetching page: " + e); }); - """, script_args=(self._historyURL,)) + """, + script_args=(self._historyURL,), + ) if type(historyResult) == str: self.fail(historyResult) return - self.assertEqual(historyResult['title'], self._historyTitle) + self.assertEqual(historyResult["title"], self._historyTitle) def checkFormHistory(self): - formFieldResults = self.runAsyncCode(""" + formFieldResults = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; let results = []; global.FormHistory.search(["value"], {fieldname: arguments[0]}, { handleError(error) { results = error; }, handleResult(result) { results.push(result); }, handleCompletion() { resolve(results); }, }); - """, script_args=(self._formHistoryFieldName,)) + """, + script_args=(self._formHistoryFieldName,), + ) if type(formFieldResults) == str: self.fail(formFieldResults) return formFieldResultCount = len(formFieldResults) - self.assertEqual(formFieldResultCount, 1, - "Should have exactly 1 entry for this field, got %d" % - formFieldResultCount) + self.assertEqual( + formFieldResultCount, + 1, + "Should have exactly 1 entry for this field, got %d" % formFieldResultCount, + ) if formFieldResultCount == 1: - self.assertEqual( - formFieldResults[0]['value'], self._formHistoryValue) + self.assertEqual(formFieldResults[0]["value"], self._formHistoryValue) - formHistoryCount = self.runAsyncCode(""" + formHistoryCount = self.runAsyncCode( + """ let [resolve] = arguments; let count; let callbacks = { handleResult: rv => count = rv, handleCompletion() { resolve(count); }, }; global.FormHistory.count({}, callbacks); - """) - self.assertEqual(formHistoryCount, 1, - "There should be only 1 entry in the form history") + """ + ) + self.assertEqual( + formHistoryCount, 1, "There should be only 1 entry in the form history" + ) def checkFormAutofill(self): if not self._formAutofillAvailable: return - formAutofillResults = self.runAsyncCode(""" + formAutofillResults = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1]; return global.formAutofillStorage.initialize().then(() => { return global.formAutofillStorage.addresses.getAll() }).then(resolve); - """,) + """, + ) if type(formAutofillResults) == str: self.fail(formAutofillResults) return formAutofillAddressCount = len(formAutofillResults) - self.assertEqual(formAutofillAddressCount, 1, - "Should have exactly 1 saved address, got %d" % formAutofillAddressCount) + self.assertEqual( + formAutofillAddressCount, + 1, + "Should have exactly 1 saved address, got %d" % formAutofillAddressCount, + ) if formAutofillAddressCount == 1: self.assertEqual( - formAutofillResults[0]['guid'], self._formAutofillAddressGuid) + formAutofillResults[0]["guid"], self._formAutofillAddressGuid + ) def checkCookie(self): - cookieInfo = self.runCode(""" + cookieInfo = self.runCode( + """ try { let cookies = Services.cookies.getCookiesFromHost(arguments[0], {}); let cookie = null; for (let hostCookie of cookies) { // getCookiesFromHost returns any cookie from the BASE host. if (hostCookie.rawHost != arguments[0]) continue; if (cookie != null) { return "more than 1 cookie! That shouldn't happen!"; } cookie = hostCookie; } return {path: cookie.path, name: cookie.name, value: cookie.value}; } catch (ex) { return "got exception trying to fetch cookie: " + ex; } - """, script_args=(self._cookieHost,)) + """, + script_args=(self._cookieHost,), + ) if not isinstance(cookieInfo, dict): self.fail(cookieInfo) return - self.assertEqual(cookieInfo['path'], self._cookiePath) - self.assertEqual(cookieInfo['value'], self._cookieValue) - self.assertEqual(cookieInfo['name'], self._cookieName) + self.assertEqual(cookieInfo["path"], self._cookiePath) + self.assertEqual(cookieInfo["value"], self._cookieValue) + self.assertEqual(cookieInfo["name"], self._cookieName) def checkSession(self): - tabURIs = self.runCode(""" + tabURIs = self.runCode( + """ return [... gBrowser.browsers].map(b => b.currentURI && b.currentURI.spec) - """) + """ + ) self.assertSequenceEqual(tabURIs, ["about:welcomeback"]) # Dismiss modal dialog if any. This is mainly to dismiss the check for # default browser dialog if it shows up. try: alert = self.marionette.switch_to_alert() alert.dismiss() except NoAlertPresentException: pass - tabURIs = self.runAsyncCode(""" + tabURIs = self.runAsyncCode( + """ let resolve = arguments[arguments.length - 1] let mm = gBrowser.selectedBrowser.messageManager; let {TabStateFlusher} = Cu.import("resource:///modules/sessionstore/TabStateFlusher.jsm", {}); window.addEventListener("SSWindowStateReady", function testSSPostReset() { window.removeEventListener("SSWindowStateReady", testSSPostReset, false); Promise.all(gBrowser.browsers.map(b => TabStateFlusher.flush(b))).then(function() { resolve([... gBrowser.browsers].map(b => b.currentURI && b.currentURI.spec)); @@ -391,46 +453,51 @@ class TestFirefoxRefresh(MarionetteTestC } else { content.window.addEventListener("load", function(event) { content.document.getElementById("errorTryAgain").click(); }, { once: true }); } }; mm.loadFrameScript("data:application/javascript,(" + fs.toString() + ")()", true); - """) # NOQA: E501 + """ # NOQA: E501 + ) self.assertSequenceEqual(tabURIs, self._expectedURLs) def checkFxA(self): - result = self.runAsyncCode(""" + result = self.runAsyncCode( + """ Cu.import("resource://gre/modules/FxAccountsStorage.jsm"); let resolve = arguments[arguments.length - 1]; let storage = new FxAccountsStorageManager(); let result = {}; storage.initialize(); storage.getAccountData().then(data => { result.accountData = data; return storage.finalize(); }).then(() => { resolve(result); }).catch(err => { resolve(err.toString()); }); - """) + """ + ) if type(result) != dict: self.fail(result) return self.assertEqual(result["accountData"]["email"], "test@test.com") self.assertEqual(result["accountData"]["uid"], "uid") self.assertEqual(result["accountData"]["keyFetchToken"], "top-secret") def checkSync(self, expect_sync_user): - pref_value = self.marionette.execute_script(""" + pref_value = self.marionette.execute_script( + """ return Services.prefs.getStringPref("services.sync.username", null); - """) + """ + ) expected_value = "test@test.com" if expect_sync_user else None self.assertEqual(pref_value, expected_value) def checkProfile(self, has_migrated=False, expect_sync_user=True): self.checkPassword() self.checkBookmarkInMenu() self.checkHistory() self.checkFormHistory() @@ -451,45 +518,45 @@ class TestFirefoxRefresh(MarionetteTestC self.createFormAutofill() self.createCookie() self.createSession() self.createFxa() self.createSync() def setUpScriptData(self): self.marionette.set_context(self.marionette.CONTEXT_CHROME) - self.runCode(""" + self.runCode( + """ window.global = {}; global.LoginInfo = Components.Constructor("@mozilla.org/login-manager/loginInfo;1", "nsILoginInfo", "init"); global.profSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(Ci.nsIToolkitProfileService); global.Preferences = Cu.import("resource://gre/modules/Preferences.jsm", {}).Preferences; global.FormHistory = Cu.import("resource://gre/modules/FormHistory.jsm", {}).FormHistory; - """) # NOQA: E501 - self._formAutofillAvailable = self.runCode(""" + """ # NOQA: E501 + ) + self._formAutofillAvailable = self.runCode( + """ try { global.formAutofillStorage = Cu.import("resource://formautofill/FormAutofillStorage.jsm", {}).formAutofillStorage; } catch(e) { return false; } return true; - """) # NOQA: E501 + """ # NOQA: E501 + ) def runCode(self, script, *args, **kwargs): - return self.marionette.execute_script(script, - new_sandbox=False, - sandbox=self._sandbox, - *args, - **kwargs) + return self.marionette.execute_script( + script, new_sandbox=False, sandbox=self._sandbox, *args, **kwargs + ) def runAsyncCode(self, script, *args, **kwargs): - return self.marionette.execute_async_script(script, - new_sandbox=False, - sandbox=self._sandbox, - *args, - **kwargs) + return self.marionette.execute_async_script( + script, new_sandbox=False, sandbox=self._sandbox, *args, **kwargs + ) def setUp(self): MarionetteTestCase.setUp(self) self.setUpScriptData() self.cleanups = [] def tearDown(self): @@ -506,34 +573,40 @@ class TestFirefoxRefresh(MarionetteTestC import mozfile for cleanup in self.cleanups: if cleanup.desktop_backup_path: mozfile.remove(cleanup.desktop_backup_path) if cleanup.reset_profile_path: # Remove ourselves from profiles.ini - self.runCode(""" + self.runCode( + """ let name = arguments[0]; let profile = global.profSvc.getProfileByName(name); profile.remove(false) global.profSvc.flush(); - """, script_args=(cleanup.profile_name_to_remove,)) + """, + script_args=(cleanup.profile_name_to_remove,), + ) # Remove the local profile dir if it's not the same as the profile dir: - different_path = cleanup.reset_profile_local_path != cleanup.reset_profile_path + different_path = ( + cleanup.reset_profile_local_path != cleanup.reset_profile_path + ) if cleanup.reset_profile_local_path and different_path: mozfile.remove(cleanup.reset_profile_local_path) # And delete all the files. mozfile.remove(cleanup.reset_profile_path) def doReset(self): profileName = "marionette-test-profile-" + str(int(time.time() * 1000)) cleanup = PendingCleanup(profileName) - self.runCode(""" + self.runCode( + """ // Ensure the current (temporary) profile is in profiles.ini: let profD = Services.dirsvc.get("ProfD", Ci.nsIFile); let profileName = arguments[1]; let myProfile = global.profSvc.createProfile(profD, profileName); global.profSvc.flush() // Now add the reset parameters: let env = Cc["@mozilla.org/process/environment;1"].getService(Ci.nsIEnvironment); @@ -541,51 +614,66 @@ class TestFirefoxRefresh(MarionetteTestC prefsToKeep.push("datareporting.policy.dataSubmissionPolicyBypassNotification"); let prefObj = {}; for (let pref of prefsToKeep) { prefObj[pref] = global.Preferences.get(pref); } env.set("MOZ_MARIONETTE_PREF_STATE_ACROSS_RESTARTS", JSON.stringify(prefObj)); env.set("MOZ_RESET_PROFILE_RESTART", "1"); env.set("XRE_PROFILE_PATH", arguments[0]); - """, script_args=(self.marionette.instance.profile.profile, profileName,)) + """, + script_args=( + self.marionette.instance.profile.profile, + profileName, + ), + ) - profileLeafName = os.path.basename(os.path.normpath( - self.marionette.instance.profile.profile)) + profileLeafName = os.path.basename( + os.path.normpath(self.marionette.instance.profile.profile) + ) # Now restart the browser to get it reset: self.marionette.restart(clean=False, in_app=True) self.setUpScriptData() # Determine the new profile path (we'll need to remove it when we're done) - [cleanup.reset_profile_path, cleanup.reset_profile_local_path] = self.runCode(""" + [cleanup.reset_profile_path, cleanup.reset_profile_local_path] = self.runCode( + """ let profD = Services.dirsvc.get("ProfD", Ci.nsIFile); let localD = Services.dirsvc.get("ProfLD", Ci.nsIFile); return [profD.path, localD.path]; - """) + """ + ) # Determine the backup path - cleanup.desktop_backup_path = self.runCode(""" + cleanup.desktop_backup_path = self.runCode( + """ let container; try { container = Services.dirsvc.get("Desk", Ci.nsIFile); } catch (ex) { container = Services.dirsvc.get("Home", Ci.nsIFile); } let bundle = Services.strings.createBundle("chrome://mozapps/locale/profile/profileSelection.properties"); let dirName = bundle.formatStringFromName("resetBackupDirectory", [Services.appinfo.name]); container.append(dirName); container.append(arguments[0]); return container.path; - """, script_args=(profileLeafName,)) # NOQA: E501 + """, # NOQA: E501 + script_args=(profileLeafName,), + ) - self.assertTrue(os.path.isdir(cleanup.reset_profile_path), - "Reset profile path should be present") - self.assertTrue(os.path.isdir(cleanup.desktop_backup_path), - "Backup profile path should be present") + self.assertTrue( + os.path.isdir(cleanup.reset_profile_path), + "Reset profile path should be present", + ) + self.assertTrue( + os.path.isdir(cleanup.desktop_backup_path), + "Backup profile path should be present", + ) self.assertIn(cleanup.profile_name_to_remove, cleanup.reset_profile_path) return cleanup def testResetEverything(self): self.createProfileData() self.checkProfile(expect_sync_user=True)
--- a/build/pgo/genpgocert.py +++ b/build/pgo/genpgocert.py @@ -18,17 +18,17 @@ import sys from mozbuild.base import MozbuildObject, BinaryNotFoundException from mozfile import NamedTemporaryFile, TemporaryDirectory from mozprofile.permissions import ServerLocations from distutils.spawn import find_executable dbFiles = [ re.compile("^cert[0-9]+\.db$"), re.compile("^key[0-9]+\.db$"), - re.compile("^secmod\.db$") + re.compile("^secmod\.db$"), ] def unlinkDbFiles(path): for root, dirs, files in os.walk(path): for name in files: for dbFile in dbFiles: if dbFile.match(name) and os.path.exists(os.path.join(root, name)): @@ -48,162 +48,207 @@ def runUtil(util, args, inputdata=None, env = os.environ.copy() if mozinfo.os == "linux": pathvar = "LD_LIBRARY_PATH" app_path = os.path.dirname(util) if pathvar in env: env[pathvar] = "%s%s%s" % (app_path, os.pathsep, env[pathvar]) else: env[pathvar] = app_path - proc = subprocess.Popen([util] + args, env=env, - stdin=subprocess.PIPE if inputdata else None, - stdout=outputstream, - universal_newlines=True) + proc = subprocess.Popen( + [util] + args, + env=env, + stdin=subprocess.PIPE if inputdata else None, + stdout=outputstream, + universal_newlines=True, + ) proc.communicate(inputdata) return proc.returncode def createRandomFile(randomFile): for count in xrange(0, 2048): randomFile.write(chr(random.randint(0, 255))) def writeCertspecForServerLocations(fd): - locations = ServerLocations(os.path.join(build.topsrcdir, - "build", "pgo", - "server-locations.txt")) + locations = ServerLocations( + os.path.join(build.topsrcdir, "build", "pgo", "server-locations.txt") + ) SAN = [] - for loc in [i for i in iter(locations) if i.scheme == "https" and "nocert" not in i.options]: + for loc in [ + i for i in iter(locations) if i.scheme == "https" and "nocert" not in i.options + ]: customCertOption = False customCertRE = re.compile("^cert=(?:\w+)") for _ in [i for i in loc.options if customCertRE.match(i)]: customCertOption = True break if "ipV4Address" in loc.options: loc.host = "ip4:" + loc.host if not customCertOption: SAN.append(loc.host) - fd.write("issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization\n") # NOQA: E501 + fd.write( + "issuer:printableString/CN=Temporary Certificate Authority/O=Mozilla Testing/OU=Profile Guided Optimization\n" # NOQA: E501 + ) fd.write("subject:{}\n".format(SAN[0])) fd.write("extension:subjectAlternativeName:{}\n".format(",".join(SAN))) def constructCertDatabase(build, srcDir): try: certutil = build.get_binary_path(what="certutil") pk12util = build.get_binary_path(what="pk12util") except BinaryNotFoundException as e: - print('{}\n\n{}\n'.format(e, e.help())) + print("{}\n\n{}\n".format(e, e.help())) return 1 openssl = find_executable("openssl") - pycert = os.path.join(build.topsrcdir, "security", "manager", "ssl", "tests", - "unit", "pycert.py") - pykey = os.path.join(build.topsrcdir, "security", "manager", "ssl", "tests", - "unit", "pykey.py") + pycert = os.path.join( + build.topsrcdir, "security", "manager", "ssl", "tests", "unit", "pycert.py" + ) + pykey = os.path.join( + build.topsrcdir, "security", "manager", "ssl", "tests", "unit", "pykey.py" + ) with NamedTemporaryFile(mode="wt+") as pwfile, TemporaryDirectory() as pemfolder: pwfile.write("\n") pwfile.flush() if dbFilesExist(srcDir): # Make sure all DB files from src are really deleted unlinkDbFiles(srcDir) # Copy all .certspec and .keyspec files to a temporary directory for root, dirs, files in os.walk(srcDir): - for spec in [i for i in files if i.endswith(".certspec") or i.endswith(".keyspec")]: - shutil.copyfile(os.path.join(root, spec), - os.path.join(pemfolder, spec)) + for spec in [ + i for i in files if i.endswith(".certspec") or i.endswith(".keyspec") + ]: + shutil.copyfile(os.path.join(root, spec), os.path.join(pemfolder, spec)) # Write a certspec for the "server-locations.txt" file to that temporary directory pgoserver_certspec = os.path.join(pemfolder, "pgoserver.certspec") if os.path.exists(pgoserver_certspec): raise Exception( - "{} already exists, which isn't allowed".format(pgoserver_certspec)) + "{} already exists, which isn't allowed".format(pgoserver_certspec) + ) with open(pgoserver_certspec, "w") as fd: writeCertspecForServerLocations(fd) # Generate certs for all certspecs for root, dirs, files in os.walk(pemfolder): for certspec in [i for i in files if i.endswith(".certspec")]: name = certspec.split(".certspec")[0] pem = os.path.join(pemfolder, "{}.cert.pem".format(name)) print("Generating public certificate {} (pem={})".format(name, pem)) with open(os.path.join(root, certspec), "r") as certspec_file: certspec_data = certspec_file.read() with open(pem, "w") as pem_file: status = runUtil( - pycert, [], inputdata=certspec_data, outputstream=pem_file) + pycert, [], inputdata=certspec_data, outputstream=pem_file + ) if status: return status - status = runUtil(certutil, [ - "-A", "-n", name, "-t", "P,,", "-i", pem, - "-d", srcDir, "-f", pwfile.name - ]) + status = runUtil( + certutil, + [ + "-A", + "-n", + name, + "-t", + "P,,", + "-i", + pem, + "-d", + srcDir, + "-f", + pwfile.name, + ], + ) if status: return status for keyspec in [i for i in files if i.endswith(".keyspec")]: parts = keyspec.split(".") name = parts[0] key_type = parts[1] if key_type not in ["ca", "client", "server"]: - raise Exception("{}: keyspec filenames must be of the form XXX.client.keyspec " - "or XXX.ca.keyspec (key_type={})".format( - keyspec, key_type)) + raise Exception( + "{}: keyspec filenames must be of the form XXX.client.keyspec " + "or XXX.ca.keyspec (key_type={})".format(keyspec, key_type) + ) key_pem = os.path.join(pemfolder, "{}.key.pem".format(name)) print("Generating private key {} (pem={})".format(name, key_pem)) with open(os.path.join(root, keyspec), "r") as keyspec_file: keyspec_data = keyspec_file.read() with open(key_pem, "w") as pem_file: status = runUtil( - pykey, [], inputdata=keyspec_data, outputstream=pem_file) + pykey, [], inputdata=keyspec_data, outputstream=pem_file + ) if status: return status cert_pem = os.path.join(pemfolder, "{}.cert.pem".format(name)) if not os.path.exists(cert_pem): - raise Exception("There has to be a corresponding certificate named {} for " - "the keyspec {}".format( - cert_pem, keyspec)) + raise Exception( + "There has to be a corresponding certificate named {} for " + "the keyspec {}".format(cert_pem, keyspec) + ) p12 = os.path.join(pemfolder, "{}.key.p12".format(name)) - print("Converting private key {} to PKCS12 (p12={})".format( - key_pem, p12)) - status = runUtil(openssl, ["pkcs12", "-export", "-inkey", key_pem, "-in", - cert_pem, "-name", name, "-out", p12, "-passout", - "file:"+pwfile.name]) + print( + "Converting private key {} to PKCS12 (p12={})".format(key_pem, p12) + ) + status = runUtil( + openssl, + [ + "pkcs12", + "-export", + "-inkey", + key_pem, + "-in", + cert_pem, + "-name", + name, + "-out", + p12, + "-passout", + "file:" + pwfile.name, + ], + ) if status: return status print("Importing private key {} to database".format(key_pem)) status = runUtil( - pk12util, ["-i", p12, "-d", srcDir, "-w", pwfile.name, "-k", pwfile.name]) + pk12util, + ["-i", p12, "-d", srcDir, "-w", pwfile.name, "-k", pwfile.name], + ) if status: return status if key_type == "ca": - shutil.copyfile(cert_pem, os.path.join( - srcDir, "{}.ca".format(name))) + shutil.copyfile( + cert_pem, os.path.join(srcDir, "{}.ca".format(name)) + ) elif key_type == "client": - shutil.copyfile(p12, os.path.join( - srcDir, "{}.client".format(name))) + shutil.copyfile(p12, os.path.join(srcDir, "{}.client".format(name))) elif key_type == "server": pass # Nothing to do for server keys else: raise Exception( - "State error: Unknown keyspec key_type: {}".format(key_type)) + "State error: Unknown keyspec key_type: {}".format(key_type) + ) return 0 build = MozbuildObject.from_environment() certdir = os.path.join(build.topsrcdir, "build", "pgo", "certs") certificateStatus = constructCertDatabase(build, certdir) if certificateStatus:
--- a/config/check_macroassembler_style.py +++ b/config/check_macroassembler_style.py @@ -23,321 +23,320 @@ from __future__ import absolute_import from __future__ import print_function import difflib import os import re import sys -architecture_independent = set(['generic']) -all_unsupported_architectures_names = set(['mips32', 'mips64', 'mips_shared']) -all_architecture_names = set(['x86', 'x64', 'arm', 'arm64']) -all_shared_architecture_names = set(['x86_shared', 'arm', 'arm64']) +architecture_independent = set(["generic"]) +all_unsupported_architectures_names = set(["mips32", "mips64", "mips_shared"]) +all_architecture_names = set(["x86", "x64", "arm", "arm64"]) +all_shared_architecture_names = set(["x86_shared", "arm", "arm64"]) reBeforeArg = "(?<=[(,\s])" reArgType = "(?P<type>[\w\s:*&]+)" reArgName = "(?P<name>\s\w+)" reArgDefault = "(?P<default>(?:\s=[^,)]+)?)" reAfterArg = "(?=[,)])" -reMatchArg = re.compile(reBeforeArg + reArgType + - reArgName + reArgDefault + reAfterArg) +reMatchArg = re.compile(reBeforeArg + reArgType + reArgName + reArgDefault + reAfterArg) def get_normalized_signatures(signature, fileAnnot=None): # Remove static - signature = signature.replace('static', '') + signature = signature.replace("static", "") # Remove semicolon. - signature = signature.replace(';', ' ') + signature = signature.replace(";", " ") # Normalize spaces. - signature = re.sub(r'\s+', ' ', signature).strip() + signature = re.sub(r"\s+", " ", signature).strip() # Remove new-line induced spaces after opening braces. - signature = re.sub(r'\(\s+', '(', signature).strip() + signature = re.sub(r"\(\s+", "(", signature).strip() # Match arguments, and keep only the type. - signature = reMatchArg.sub('\g<type>', signature) + signature = reMatchArg.sub("\g<type>", signature) # Remove class name - signature = signature.replace('MacroAssembler::', '') + signature = signature.replace("MacroAssembler::", "") # Extract list of architectures - archs = ['generic'] + archs = ["generic"] if fileAnnot: - archs = [fileAnnot['arch']] + archs = [fileAnnot["arch"]] - if 'DEFINED_ON(' in signature: + if "DEFINED_ON(" in signature: archs = re.sub( - r'.*DEFINED_ON\((?P<archs>[^()]*)\).*', '\g<archs>', signature).split(',') + r".*DEFINED_ON\((?P<archs>[^()]*)\).*", "\g<archs>", signature + ).split(",") archs = [a.strip() for a in archs] - signature = re.sub(r'\s+DEFINED_ON\([^()]*\)', '', signature) + signature = re.sub(r"\s+DEFINED_ON\([^()]*\)", "", signature) - elif 'PER_ARCH' in signature: + elif "PER_ARCH" in signature: archs = all_architecture_names - signature = re.sub(r'\s+PER_ARCH', '', signature) + signature = re.sub(r"\s+PER_ARCH", "", signature) - elif 'PER_SHARED_ARCH' in signature: + elif "PER_SHARED_ARCH" in signature: archs = all_shared_architecture_names - signature = re.sub(r'\s+PER_SHARED_ARCH', '', signature) + signature = re.sub(r"\s+PER_SHARED_ARCH", "", signature) - elif 'OOL_IN_HEADER' in signature: - assert archs == ['generic'] - signature = re.sub(r'\s+OOL_IN_HEADER', '', signature) + elif "OOL_IN_HEADER" in signature: + assert archs == ["generic"] + signature = re.sub(r"\s+OOL_IN_HEADER", "", signature) else: # No signature annotation, the list of architectures remains unchanged. pass # Extract inline annotation inline = False if fileAnnot: - inline = fileAnnot['inline'] + inline = fileAnnot["inline"] - if 'inline ' in signature: - signature = re.sub(r'inline\s+', '', signature) + if "inline " in signature: + signature = re.sub(r"inline\s+", "", signature) inline = True - inlinePrefx = '' + inlinePrefx = "" if inline: - inlinePrefx = 'inline ' - signatures = [ - {'arch': a, 'sig': inlinePrefx + signature} - for a in archs - ] + inlinePrefx = "inline " + signatures = [{"arch": a, "sig": inlinePrefx + signature} for a in archs] return signatures -file_suffixes = set([ - a.replace('_', '-') for a in - all_architecture_names.union(all_shared_architecture_names) - .union(all_unsupported_architectures_names) -]) +file_suffixes = set( + [ + a.replace("_", "-") + for a in all_architecture_names.union(all_shared_architecture_names).union( + all_unsupported_architectures_names + ) + ] +) def get_file_annotation(filename): origFilename = filename - filename = filename.split('/')[-1] + filename = filename.split("/")[-1] inline = False - if filename.endswith('.cpp'): - filename = filename[:-len('.cpp')] - elif filename.endswith('-inl.h'): + if filename.endswith(".cpp"): + filename = filename[: -len(".cpp")] + elif filename.endswith("-inl.h"): inline = True - filename = filename[:-len('-inl.h')] - elif filename.endswith('.h'): + filename = filename[: -len("-inl.h")] + elif filename.endswith(".h"): # This allows the definitions block in MacroAssembler.h to be # style-checked. inline = True - filename = filename[:-len('.h')] + filename = filename[: -len(".h")] else: - raise Exception('unknown file name', origFilename) + raise Exception("unknown file name", origFilename) - arch = 'generic' + arch = "generic" for suffix in file_suffixes: - if filename == 'MacroAssembler-' + suffix: + if filename == "MacroAssembler-" + suffix: arch = suffix break - return { - 'inline': inline, - 'arch': arch.replace('-', '_') - } + return {"inline": inline, "arch": arch.replace("-", "_")} def get_macroassembler_definitions(filename): try: fileAnnot = get_file_annotation(filename) except Exception: return [] style_section = False - lines = '' + lines = "" signatures = [] with open(filename) as f: for line in f: - if '//{{{ check_macroassembler_style' in line: + if "//{{{ check_macroassembler_style" in line: if style_section: - raise 'check_macroassembler_style section already opened.' + raise "check_macroassembler_style section already opened." style_section = True braces_depth = 0 - elif '//}}} check_macroassembler_style' in line: + elif "//}}} check_macroassembler_style" in line: style_section = False if not style_section: continue # Ignore preprocessor directives. - if line.startswith('#'): + if line.startswith("#"): continue # Remove comments from the processed line. - line = re.sub(r'//.*', '', line) + line = re.sub(r"//.*", "", line) # Locate and count curly braces. - open_curly_brace = line.find('{') + open_curly_brace = line.find("{") was_braces_depth = braces_depth - braces_depth = braces_depth + line.count('{') - line.count('}') + braces_depth = braces_depth + line.count("{") - line.count("}") # Raise an error if the check_macroassembler_style macro is used # across namespaces / classes scopes. if braces_depth < 0: - raise 'check_macroassembler_style annotations are not well scoped.' + raise "check_macroassembler_style annotations are not well scoped." # If the current line contains an opening curly brace, check if # this line combines with the previous one can be identified as a # MacroAssembler function signature. if open_curly_brace != -1 and was_braces_depth == 0: lines = lines + line[:open_curly_brace] - if 'MacroAssembler::' in lines: - signatures.extend( - get_normalized_signatures(lines, fileAnnot)) - lines = '' + if "MacroAssembler::" in lines: + signatures.extend(get_normalized_signatures(lines, fileAnnot)) + lines = "" continue # We do not aggregate any lines if we are scanning lines which are # in-between a set of curly braces. if braces_depth > 0: continue if was_braces_depth != 0: - line = line[line.rfind('}') + 1:] + line = line[line.rfind("}") + 1 :] # This logic is used to remove template instantiation, static # variable definitions and function declaration from the next # function definition. - last_semi_colon = line.rfind(';') + last_semi_colon = line.rfind(";") if last_semi_colon != -1: - lines = '' - line = line[last_semi_colon + 1:] + lines = "" + line = line[last_semi_colon + 1 :] # Aggregate lines of non-braced text, which corresponds to the space # where we are expecting to find function definitions. lines = lines + line return signatures def get_macroassembler_declaration(filename): style_section = False - lines = '' + lines = "" signatures = [] with open(filename) as f: for line in f: - if '//{{{ check_macroassembler_decl_style' in line: + if "//{{{ check_macroassembler_decl_style" in line: style_section = True - elif '//}}} check_macroassembler_decl_style' in line: + elif "//}}} check_macroassembler_decl_style" in line: style_section = False if not style_section: continue # Ignore preprocessor directives. - if line.startswith('#'): + if line.startswith("#"): continue - line = re.sub(r'//.*', '', line) - if len(line.strip()) == 0 or 'public:' in line or 'private:' in line: - lines = '' + line = re.sub(r"//.*", "", line) + if len(line.strip()) == 0 or "public:" in line or "private:" in line: + lines = "" continue lines = lines + line # Continue until we have a complete declaration - if ';' not in lines: + if ";" not in lines: continue # Skip member declarations: which are lines ending with a # semi-colon without any list of arguments. - if ')' not in lines: - lines = '' + if ")" not in lines: + lines = "" continue signatures.extend(get_normalized_signatures(lines)) - lines = '' + lines = "" return signatures def append_signatures(d, sigs): for s in sigs: - if s['sig'] not in d: - d[s['sig']] = [] - d[s['sig']].append(s['arch']) + if s["sig"] not in d: + d[s["sig"]] = [] + d[s["sig"]].append(s["arch"]) return d def generate_file_content(signatures): output = [] for s in sorted(signatures.keys()): archs = set(sorted(signatures[s])) archs -= all_unsupported_architectures_names if len(archs.symmetric_difference(architecture_independent)) == 0: - output.append(s + ';\n') - if s.startswith('inline'): + output.append(s + ";\n") + if s.startswith("inline"): # TODO, bug 1432600: This is mistaken for OOL_IN_HEADER # functions. (Such annotation is already removed by the time # this function sees the signature here.) - output.append(' is defined in MacroAssembler-inl.h\n') + output.append(" is defined in MacroAssembler-inl.h\n") else: - output.append(' is defined in MacroAssembler.cpp\n') + output.append(" is defined in MacroAssembler.cpp\n") else: if len(archs.symmetric_difference(all_architecture_names)) == 0: - output.append(s + ' PER_ARCH;\n') + output.append(s + " PER_ARCH;\n") elif len(archs.symmetric_difference(all_shared_architecture_names)) == 0: - output.append(s + ' PER_SHARED_ARCH;\n') + output.append(s + " PER_SHARED_ARCH;\n") else: - output.append( - s + ' DEFINED_ON(' + ', '.join(sorted(archs)) + ');\n') + output.append(s + " DEFINED_ON(" + ", ".join(sorted(archs)) + ");\n") for a in sorted(archs): - a = a.replace('_', '-') - masm = '%s/MacroAssembler-%s' % (a, a) - if s.startswith('inline'): - output.append(' is defined in %s-inl.h\n' % masm) + a = a.replace("_", "-") + masm = "%s/MacroAssembler-%s" % (a, a) + if s.startswith("inline"): + output.append(" is defined in %s-inl.h\n" % masm) else: - output.append(' is defined in %s.cpp\n' % masm) + output.append(" is defined in %s.cpp\n" % masm) return output def check_style(): # We read from the header file the signature of each function. - decls = dict() # type: dict(signature => ['x86', 'x64']) + decls = dict() # type: dict(signature => ['x86', 'x64']) # We infer from each file the signature of each MacroAssembler function. - defs = dict() # type: dict(signature => ['x86', 'x64']) + defs = dict() # type: dict(signature => ['x86', 'x64']) - root_dir = os.path.join('js', 'src', 'jit') + root_dir = os.path.join("js", "src", "jit") for dirpath, dirnames, filenames in os.walk(root_dir): for filename in filenames: - if 'MacroAssembler' not in filename: + if "MacroAssembler" not in filename: continue - filepath = os.path.join(dirpath, filename).replace('\\', '/') + filepath = os.path.join(dirpath, filename).replace("\\", "/") - if filepath.endswith('MacroAssembler.h'): + if filepath.endswith("MacroAssembler.h"): decls = append_signatures( - decls, get_macroassembler_declaration(filepath)) - defs = append_signatures( - defs, get_macroassembler_definitions(filepath)) + decls, get_macroassembler_declaration(filepath) + ) + defs = append_signatures(defs, get_macroassembler_definitions(filepath)) if not decls or not defs: raise Exception("Did not find any definitions or declarations") # Compare declarations and definitions output. - difflines = difflib.unified_diff(generate_file_content(decls), - generate_file_content(defs), - fromfile='check_macroassembler_style.py declared syntax', - tofile='check_macroassembler_style.py found definitions') + difflines = difflib.unified_diff( + generate_file_content(decls), + generate_file_content(defs), + fromfile="check_macroassembler_style.py declared syntax", + tofile="check_macroassembler_style.py found definitions", + ) ok = True for diffline in difflines: ok = False - print(diffline, end='') + print(diffline, end="") return ok def main(): ok = check_style() if ok: - print('TEST-PASS | check_macroassembler_style.py | ok') + print("TEST-PASS | check_macroassembler_style.py | ok") else: - print('TEST-UNEXPECTED-FAIL | check_macroassembler_style.py | actual output does not match expected output; diff is above') # noqa: E501 + print( + "TEST-UNEXPECTED-FAIL | check_macroassembler_style.py | actual output does not match expected output; diff is above" # noqa: E501 + ) sys.exit(0 if ok else 1) -if __name__ == '__main__': +if __name__ == "__main__": main()
--- a/js/src/devtools/rootAnalysis/t/testlib.py +++ b/js/src/devtools/rootAnalysis/t/testlib.py @@ -3,93 +3,106 @@ import os import re import subprocess from sixgill import Body from collections import defaultdict, namedtuple scriptdir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) -HazardSummary = namedtuple('HazardSummary', [ - 'function', - 'variable', - 'type', - 'GCFunction', - 'location']) +HazardSummary = namedtuple( + "HazardSummary", ["function", "variable", "type", "GCFunction", "location"] +) -Callgraph = namedtuple('Callgraph', [ - 'functionNames', - 'nameToId', - 'mangledToUnmangled', - 'unmangledToMangled', - 'calleesOf', - 'callersOf', - 'tags', - 'calleeGraph', - 'callerGraph']) +Callgraph = namedtuple( + "Callgraph", + [ + "functionNames", + "nameToId", + "mangledToUnmangled", + "unmangledToMangled", + "calleesOf", + "callersOf", + "tags", + "calleeGraph", + "callerGraph", + ], +) def equal(got, expected): if got != expected: print("Got '%s', expected '%s'" % (got, expected)) def extract_unmangled(func): - return func.split('$')[-1] + return func.split("$")[-1] class Test(object): def __init__(self, indir, outdir, cfg, verbose=0): self.indir = indir self.outdir = outdir self.cfg = cfg self.verbose = verbose def infile(self, path): return os.path.join(self.indir, path) def binpath(self, prog): return os.path.join(self.cfg.sixgill_bin, prog) - def compile(self, source, options=''): + def compile(self, source, options=""): env = os.environ - env['CCACHE_DISABLE'] = '1' + env["CCACHE_DISABLE"] = "1" cmd = "{CXX} -c {source} -O3 -std=c++11 -fplugin={sixgill} -fplugin-arg-xgill-mangle=1 {options}".format( # NOQA: E501 source=self.infile(source), - CXX=self.cfg.cxx, sixgill=self.cfg.sixgill_plugin, - options=options) + CXX=self.cfg.cxx, + sixgill=self.cfg.sixgill_plugin, + options=options, + ) if self.cfg.verbose: print("Running %s" % cmd) subprocess.check_call(["sh", "-c", cmd]) def load_db_entry(self, dbname, pattern): - '''Look up an entry from an XDB database file, 'pattern' may be an exact - matching string, or an re pattern object matching a single entry.''' + """Look up an entry from an XDB database file, 'pattern' may be an exact + matching string, or an re pattern object matching a single entry.""" - if hasattr(pattern, 'match'): - output = subprocess.check_output([self.binpath("xdbkeys"), dbname + ".xdb"], - universal_newlines=True) + if hasattr(pattern, "match"): + output = subprocess.check_output( + [self.binpath("xdbkeys"), dbname + ".xdb"], universal_newlines=True + ) matches = list(filter(lambda _: re.search(pattern, _), output.splitlines())) if len(matches) == 0: raise Exception("entry not found") if len(matches) > 1: raise Exception("multiple entries found") pattern = matches[0] - output = subprocess.check_output([self.binpath("xdbfind"), "-json", dbname + ".xdb", - pattern], - universal_newlines=True) + output = subprocess.check_output( + [self.binpath("xdbfind"), "-json", dbname + ".xdb", pattern], + universal_newlines=True, + ) return json.loads(output) def run_analysis_script(self, phase, upto=None): - open("defaults.py", "w").write('''\ + open("defaults.py", "w").write( + """\ analysis_scriptdir = '{scriptdir}' sixgill_bin = '{bindir}' -'''.format(scriptdir=scriptdir, bindir=self.cfg.sixgill_bin)) - cmd = [os.path.join(scriptdir, "analyze.py"), '-v' if self.verbose else '-q', phase] +""".format( + scriptdir=scriptdir, bindir=self.cfg.sixgill_bin + ) + ) + cmd = [ + os.path.join(scriptdir, "analyze.py"), + "-v" if self.verbose else "-q", + phase, + ] if upto: cmd += ["--upto", upto] cmd.append("--source=%s" % self.indir) cmd.append("--objdir=%s" % self.outdir) cmd.append("--js=%s" % self.cfg.js) if self.cfg.verbose: cmd.append("--verbose") print("Running " + " ".join(cmd)) @@ -102,40 +115,46 @@ sixgill_bin = '{bindir}' self.run_analysis_script("gcTypes") def load_text_file(self, filename, extract=lambda l: l): fullpath = os.path.join(self.outdir, filename) values = (extract(line.strip()) for line in open(fullpath, "r")) return list(filter(lambda _: _ is not None, values)) def load_suppressed_functions(self): - return set(self.load_text_file("limitedFunctions.lst", extract=lambda l: l.split(' ')[1])) + return set( + self.load_text_file( + "limitedFunctions.lst", extract=lambda l: l.split(" ")[1] + ) + ) def load_gcTypes(self): def grab_type(line): - m = re.match(r'^(GC\w+): (.*)', line) + m = re.match(r"^(GC\w+): (.*)", line) if m: - return (m.group(1) + 's', m.group(2)) + return (m.group(1) + "s", m.group(2)) return None gctypes = defaultdict(list) - for collection, typename in self.load_text_file('gcTypes.txt', extract=grab_type): + for collection, typename in self.load_text_file( + "gcTypes.txt", extract=grab_type + ): gctypes[collection].append(typename) return gctypes def load_typeInfo(self, filename="typeInfo.txt"): with open(os.path.join(self.outdir, filename)) as fh: return json.load(fh) def load_gcFunctions(self): - return self.load_text_file('gcFunctions.lst', extract=extract_unmangled) + return self.load_text_file("gcFunctions.lst", extract=extract_unmangled) def load_callgraph(self): data = Callgraph( - functionNames=['dummy'], + functionNames=["dummy"], nameToId={}, mangledToUnmangled={}, unmangledToMangled={}, calleesOf=defaultdict(list), callersOf=defaultdict(list), tags=defaultdict(set), calleeGraph=defaultdict(dict), callerGraph=defaultdict(dict), @@ -147,64 +166,66 @@ sixgill_bin = '{bindir}' def add_call(caller, callee, limit): data.calleesOf[caller].append(callee) data.callersOf[callee].append(caller) data.calleeGraph[caller][callee] = True data.callerGraph[callee][caller] = True def process(line): - if line.startswith('#'): + if line.startswith("#"): name = line.split(" ", 1)[1] data.nameToId[name] = len(data.functionNames) data.functionNames.append(name) return - if line.startswith('='): - m = re.match(r'^= (\d+) (.*)', line) + if line.startswith("="): + m = re.match(r"^= (\d+) (.*)", line) mangled = data.functionNames[int(m.group(1))] unmangled = m.group(2) data.nameToId[unmangled] = id data.mangledToUnmangled[mangled] = unmangled data.unmangledToMangled[unmangled] = mangled return limit = 0 - m = re.match(r'^\w (?:/(\d+))? ', line) + m = re.match(r"^\w (?:/(\d+))? ", line) if m: limit = int(m[1]) - tokens = line.split(' ') - if tokens[0] in ('D', 'R'): + tokens = line.split(" ") + if tokens[0] in ("D", "R"): _, caller, callee = tokens add_call(lookup(caller), lookup(callee), limit) - elif tokens[0] == 'T': - data.tags[tokens[1]].add(line.split(' ', 2)[2]) - elif tokens[0] in ('F', 'V'): - m = re.match(r'^[FV] (\d+) (\d+) CLASS (.*?) FIELD (.*)', line) + elif tokens[0] == "T": + data.tags[tokens[1]].add(line.split(" ", 2)[2]) + elif tokens[0] in ("F", "V"): + m = re.match(r"^[FV] (\d+) (\d+) CLASS (.*?) FIELD (.*)", line) caller, callee, csu, field = m.groups() add_call(lookup(caller), lookup(callee), limit) - elif tokens[0] == 'I': - m = re.match(r'^I (\d+) VARIABLE ([^\,]*)', line) + elif tokens[0] == "I": + m = re.match(r"^I (\d+) VARIABLE ([^\,]*)", line) pass - self.load_text_file('callgraph.txt', extract=process) + self.load_text_file("callgraph.txt", extract=process) return data def load_hazards(self): def grab_hazard(line): m = re.match( - r"Function '(.*?)' has unrooted '(.*?)' of type '(.*?)' live across GC call '(.*?)' at (.*)", line) # NOQA: E501 + r"Function '(.*?)' has unrooted '(.*?)' of type '(.*?)' live across GC call '(.*?)' at (.*)", # NOQA: E501 + line, + ) if m: info = list(m.groups()) info[0] = info[0].split("$")[-1] info[3] = info[3].split("$")[-1] return HazardSummary(*info) return None - return self.load_text_file('rootingHazards.txt', extract=grab_hazard) + return self.load_text_file("rootingHazards.txt", extract=grab_hazard) def process_body(self, body): return Body(body) def process_bodies(self, bodies): return [self.process_body(b) for b in bodies]
--- a/js/src/util/make_unicode.py +++ b/js/src/util/make_unicode.py @@ -30,63 +30,66 @@ from contextlib import closing from functools import partial from itertools import chain, tee from operator import is_not, itemgetter from zipfile import ZipFile if sys.version_info.major == 2: from itertools import ifilter as filter, imap as map, izip_longest as zip_longest from urllib2 import urlopen + range = xrange else: from itertools import zip_longest from urllib.request import urlopen class codepoint_dict(dict): def name(self, code_point): (_, _, name, alias) = self[code_point] - return '{}{}'.format(name, (' (' + alias + ')' if alias else '')) + return "{}{}".format(name, (" (" + alias + ")" if alias else "")) def full_name(self, code_point): (_, _, name, alias) = self[code_point] - return 'U+{:04X} {}{}'.format(code_point, name, (' (' + alias + ')' if alias else '')) + return "U+{:04X} {}{}".format( + code_point, name, (" (" + alias + ")" if alias else "") + ) # ECMAScript 2016 # §11.2 White Space whitespace = [ # python doesn't support using control character names :( 0x9, # CHARACTER TABULATION - 0xb, # LINE TABULATION - 0xc, # FORM FEED - ord(u'\N{SPACE}'), - ord(u'\N{NO-BREAK SPACE}'), - ord(u'\N{ZERO WIDTH NO-BREAK SPACE}'), # also BOM + 0xB, # LINE TABULATION + 0xC, # FORM FEED + ord("\N{SPACE}"), + ord("\N{NO-BREAK SPACE}"), + ord("\N{ZERO WIDTH NO-BREAK SPACE}"), # also BOM ] # §11.3 Line Terminators line_terminator = [ - 0xa, # LINE FEED - 0xd, # CARRIAGE RETURN - ord(u'\N{LINE SEPARATOR}'), - ord(u'\N{PARAGRAPH SEPARATOR}'), + 0xA, # LINE FEED + 0xD, # CARRIAGE RETURN + ord("\N{LINE SEPARATOR}"), + ord("\N{PARAGRAPH SEPARATOR}"), ] # These are also part of IdentifierPart §11.6 Names and Keywords compatibility_identifier_part = [ - ord(u'\N{ZERO WIDTH NON-JOINER}'), - ord(u'\N{ZERO WIDTH JOINER}'), + ord("\N{ZERO WIDTH NON-JOINER}"), + ord("\N{ZERO WIDTH JOINER}"), ] FLAG_SPACE = 1 << 0 FLAG_UNICODE_ID_START = 1 << 1 FLAG_UNICODE_ID_CONTINUE_ONLY = 1 << 2 -MAX_BMP = 0xffff +MAX_BMP = 0xFFFF public_domain = """ /* * Any copyright is dedicated to the Public Domain. * http://creativecommons.org/licenses/publicdomain/ */ """ @@ -104,94 +107,93 @@ warning_message = """\ unicode_version_message = """\ /* Unicode version: {0} */ """ def read_unicode_data(unicode_data): """ - If you want to understand how this wonderful file format works checkout - Unicode Standard Annex #44 - Unicode Character Database - http://www.unicode.org/reports/tr44/ + If you want to understand how this wonderful file format works checkout + Unicode Standard Annex #44 - Unicode Character Database + http://www.unicode.org/reports/tr44/ """ - reader = csv.reader(unicode_data, delimiter=str(';')) + reader = csv.reader(unicode_data, delimiter=str(";")) while True: row = next(reader, None) if row is None: return name = row[1] # We need to expand the UAX #44 4.2.3 Code Point Range - if name.startswith('<') and name.endswith('First>'): + if name.startswith("<") and name.endswith("First>"): next_row = next(reader) for i in range(int(row[0], 16), int(next_row[0], 16) + 1): row[0] = i row[1] = name[1:-8] yield row else: row[0] = int(row[0], 16) yield row def read_case_folding(case_folding): """ - File format is: - <code>; <status>; <mapping>; # <name> + File format is: + <code>; <status>; <mapping>; # <name> """ for line in case_folding: - if line == '\n' or line.startswith('#'): + if line == "\n" or line.startswith("#"): continue - row = line.split('; ') - if row[1] in ['F', 'T']: + row = line.split("; ") + if row[1] in ["F", "T"]: continue - assert row[1] in ['C', 'S'],\ - "expect either (C)ommon or (S)imple case foldings" + assert row[1] in ["C", "S"], "expect either (C)ommon or (S)imple case foldings" code = int(row[0], 16) mapping = int(row[2], 16) yield (code, mapping) def read_derived_core_properties(derived_core_properties): for line in derived_core_properties: - if line == '\n' or line.startswith('#'): + if line == "\n" or line.startswith("#"): continue - row = line.split('#')[0].split(';') + row = line.split("#")[0].split(";") char_range = row[0].strip() char_property = row[1].strip() - if '..' not in char_range: + if ".." not in char_range: yield (int(char_range, 16), char_property) else: - [start, end] = char_range.split('..') + [start, end] = char_range.split("..") for char in range(int(start, 16), int(end, 16) + 1): yield (char, char_property) def read_special_casing(special_casing): # Format: # <code>; <lower>; <title>; <upper>; (<condition_list>;)? # <comment> for line in special_casing: - if line == '\n' or line.startswith('#'): + if line == "\n" or line.startswith("#"): continue - row = line.split('#')[0].split(';') + row = line.split("#")[0].split(";") code = int(row[0].strip(), 16) lower = row[1].strip() - lower = [int(c, 16) for c in lower.split(' ')] if lower else [] + lower = [int(c, 16) for c in lower.split(" ")] if lower else [] upper = row[3].strip() - upper = [int(c, 16) for c in upper.split(' ')] if upper else [] + upper = [int(c, 16) for c in upper.split(" ")] if upper else [] languages = [] contexts = [] condition = row[4].strip() if condition: - for cond in condition.split(' '): + for cond in condition.split(" "): if cond[0].islower(): languages.append(cond) else: contexts.append(cond) pass yield (code, lower, upper, languages, contexts) @@ -220,62 +222,72 @@ def make_non_bmp_convert_macro(out_file, # Find continuous range in convert_map. convert_list = [] entry = None for code in sorted(convert_map.keys()): lead, trail = utf16_encode(code) converted = convert_map[code] diff = converted - code - if (entry and code == entry['code'] + entry['length'] and - diff == entry['diff'] and lead == entry['lead']): - entry['length'] += 1 + if ( + entry + and code == entry["code"] + entry["length"] + and diff == entry["diff"] + and lead == entry["lead"] + ): + entry["length"] += 1 continue entry = { - 'code': code, - 'diff': diff, - 'length': 1, - 'lead': lead, - 'trail': trail, + "code": code, + "diff": diff, + "length": 1, + "lead": lead, + "trail": trail, } convert_list.append(entry) # Generate macro call for each range. lines = [] comment = [] for entry in convert_list: - from_code = entry['code'] - to_code = entry['code'] + entry['length'] - 1 - diff = entry['diff'] + from_code = entry["code"] + to_code = entry["code"] + entry["length"] - 1 + diff = entry["diff"] - lead = entry['lead'] - from_trail = entry['trail'] - to_trail = entry['trail'] + entry['length'] - 1 + lead = entry["lead"] + from_trail = entry["trail"] + to_trail = entry["trail"] + entry["length"] - 1 - lines.append(' MACRO(0x{:x}, 0x{:x}, 0x{:x}, 0x{:x}, 0x{:x}, {:d})'.format( - from_code, to_code, lead, from_trail, to_trail, diff)) - comment.append('// {} .. {}'.format(codepoint_table.full_name(from_code), - codepoint_table.full_name(to_code))) + lines.append( + " MACRO(0x{:x}, 0x{:x}, 0x{:x}, 0x{:x}, 0x{:x}, {:d})".format( + from_code, to_code, lead, from_trail, to_trail, diff + ) + ) + comment.append( + "// {} .. {}".format( + codepoint_table.full_name(from_code), codepoint_table.full_name(to_code) + ) + ) - out_file.write('\n'.join(comment)) - out_file.write('\n') - out_file.write('#define FOR_EACH_NON_BMP_{}(MACRO) \\\n'.format(name)) - out_file.write(' \\\n'.join(lines)) - out_file.write('\n') + out_file.write("\n".join(comment)) + out_file.write("\n") + out_file.write("#define FOR_EACH_NON_BMP_{}(MACRO) \\\n".format(name)) + out_file.write(" \\\n".join(lines)) + out_file.write("\n") def process_derived_core_properties(derived_core_properties): id_start = set() id_continue = set() for (char, prop) in read_derived_core_properties(derived_core_properties): - if prop == 'ID_Start': + if prop == "ID_Start": id_start.add(char) - if prop == 'ID_Continue': + if prop == "ID_Continue": id_continue.add(char) return (id_start, id_continue) def process_unicode_data(unicode_data, derived_core_properties): dummy = (0, 0, 0) table = [dummy] @@ -313,31 +325,31 @@ def process_unicode_data(unicode_data, d codepoint_table[code] = (upper, lower, name, alias) if code > MAX_BMP: if code != lower: non_bmp_lower_map[code] = lower if code != upper: non_bmp_upper_map[code] = upper - if category == 'Zs': + if category == "Zs": non_bmp_space_set[code] = 1 test_space_table.append(code) if code in id_start: non_bmp_id_start_set[code] = 1 if code in id_continue: non_bmp_id_cont_set[code] = 1 continue assert lower <= MAX_BMP and upper <= MAX_BMP flags = 0 # we combine whitespace and lineterminators because in pratice we don't need them separated - if category == 'Zs' or code in whitespace or code in line_terminator: + if category == "Zs" or code in whitespace or code in line_terminator: flags |= FLAG_SPACE test_space_table.append(code) # §11.6 (IdentifierStart) if code in id_start: flags |= FLAG_UNICODE_ID_START # §11.6 (IdentifierPart) @@ -345,34 +357,38 @@ def process_unicode_data(unicode_data, d flags |= FLAG_UNICODE_ID_CONTINUE_ONLY up_d = upper - code low_d = lower - code assert up_d > -65535 and up_d < 65535 assert low_d > -65535 and low_d < 65535 - upper = up_d & 0xffff - lower = low_d & 0xffff + upper = up_d & 0xFFFF + lower = low_d & 0xFFFF item = (upper, lower, flags) i = cache.get(item) if i is None: assert item not in table cache[item] = i = len(table) table.append(item) index[code] = i return ( - table, index, - non_bmp_lower_map, non_bmp_upper_map, + table, + index, + non_bmp_lower_map, + non_bmp_upper_map, non_bmp_space_set, - non_bmp_id_start_set, non_bmp_id_cont_set, - codepoint_table, test_space_table, + non_bmp_id_start_set, + non_bmp_id_cont_set, + codepoint_table, + test_space_table, ) def process_case_folding(case_folding): folding_map = {} rev_folding_map = {} folding_dummy = (0,) folding_table = [folding_dummy] @@ -414,30 +430,27 @@ def process_case_folding(case_folding): if code > MAX_BMP: continue folding_d = folding - code assert folding_d > -65535 and folding_d < 65535 - folding = folding_d & 0xffff + folding = folding_d & 0xFFFF item = (folding,) i = folding_cache.get(item) if i is None: assert item not in folding_table folding_cache[item] = i = len(folding_table) folding_table.append(item) folding_index[code] = i - return ( - folding_table, folding_index, - folding_tests - ) + return (folding_table, folding_index, folding_tests) def process_special_casing(special_casing, table, index): # Unconditional special casing. unconditional_tolower = {} unconditional_toupper = {} # Conditional special casing, language independent. @@ -445,22 +458,26 @@ def process_special_casing(special_casin conditional_toupper = {} # Conditional special casing, language dependent. lang_conditional_tolower = {} lang_conditional_toupper = {} def caseInfo(code): (upper, lower, flags) = table[index[code]] - return ((code + lower) & 0xffff, (code + upper) & 0xffff) + return ((code + lower) & 0xFFFF, (code + upper) & 0xFFFF) - for (code, lower, upper, languages, contexts) in read_special_casing(special_casing): - assert code <= MAX_BMP, 'Unexpected character outside of BMP: %s' % code - assert len(languages) <= 1, 'Expected zero or one language ids: %s' % languages - assert len(contexts) <= 1, 'Expected zero or one casing contexts: %s' % languages + for (code, lower, upper, languages, contexts) in read_special_casing( + special_casing + ): + assert code <= MAX_BMP, "Unexpected character outside of BMP: %s" % code + assert len(languages) <= 1, "Expected zero or one language ids: %s" % languages + assert len(contexts) <= 1, ( + "Expected zero or one casing contexts: %s" % languages + ) (default_lower, default_upper) = caseInfo(code) special_lower = len(lower) != 1 or lower[0] != default_lower special_upper = len(upper) != 1 or upper[0] != default_upper # Invariant: If |code| has casing per UnicodeData.txt, then it also has # casing rules in SpecialCasing.txt. assert code == default_lower or len(lower) != 1 or code != lower[0] @@ -494,20 +511,20 @@ def process_special_casing(special_casin (lower, _) = caseInfo(code) return lower def upperCase(code): (_, upper) = caseInfo(code) return upper def ascii(char_dict): - return (ch for ch in char_dict.keys() if ch <= 0x7f) + return (ch for ch in char_dict.keys() if ch <= 0x7F) def latin1(char_dict): - return (ch for ch in char_dict.keys() if ch <= 0xff) + return (ch for ch in char_dict.keys() if ch <= 0xFF) def is_empty(iterable): return not any(True for _ in iterable) def is_equals(iter1, iter2): return all(x == y for (x, y) in zip_longest(iter1, iter2)) # Ensure no ASCII characters have special case mappings. @@ -538,185 +555,240 @@ def process_special_casing(special_casin # Verify U+0130 and U+03A3 have simple lower case mappings. assert all(ch != lowerCase(ch) for ch in [0x0130, 0x03A3]) # Ensure Azeri, Lithuanian, and Turkish are the only languages with conditional case mappings. assert is_equals(["az", "lt", "tr"], sorted(lang_conditional_tolower.keys())) assert is_equals(["az", "lt", "tr"], sorted(lang_conditional_toupper.keys())) # Maximum case mapping length is three characters. - assert max(map(len, chain( - unconditional_tolower.values(), - unconditional_toupper.values(), - map(itemgetter(0), conditional_tolower.values()), - map(itemgetter(0), conditional_toupper.values()), - map(itemgetter(0), chain.from_iterable(d.values() - for d in lang_conditional_tolower.values())), - map(itemgetter(0), chain.from_iterable(d.values() - for d in lang_conditional_toupper.values())), - ))) <= 3 + assert ( + max( + map( + len, + chain( + unconditional_tolower.values(), + unconditional_toupper.values(), + map(itemgetter(0), conditional_tolower.values()), + map(itemgetter(0), conditional_toupper.values()), + map( + itemgetter(0), + chain.from_iterable( + d.values() for d in lang_conditional_tolower.values() + ), + ), + map( + itemgetter(0), + chain.from_iterable( + d.values() for d in lang_conditional_toupper.values() + ), + ), + ), + ) + ) + <= 3 + ) # Ensure all case mapping contexts are known (see Unicode 9.0, §3.13 Default Case Algorithms). - assert set([ - 'After_I', 'After_Soft_Dotted', 'Final_Sigma', 'More_Above', 'Not_Before_Dot', - ]).issuperset(set(filter(partial(is_not, None), chain( - map(itemgetter(1), conditional_tolower.values()), - map(itemgetter(1), conditional_toupper.values()), - map(itemgetter(1), chain.from_iterable(d.values() - for d in lang_conditional_tolower.values())), - map(itemgetter(1), chain.from_iterable(d.values() - for d in lang_conditional_toupper.values())), - )))) + assert set( + [ + "After_I", + "After_Soft_Dotted", + "Final_Sigma", + "More_Above", + "Not_Before_Dot", + ] + ).issuperset( + set( + filter( + partial(is_not, None), + chain( + map(itemgetter(1), conditional_tolower.values()), + map(itemgetter(1), conditional_toupper.values()), + map( + itemgetter(1), + chain.from_iterable( + d.values() for d in lang_conditional_tolower.values() + ), + ), + map( + itemgetter(1), + chain.from_iterable( + d.values() for d in lang_conditional_toupper.values() + ), + ), + ), + ) + ) + ) # Special casing for U+00DF (LATIN SMALL LETTER SHARP S). - assert upperCase(0x00DF) == 0x00DF and unconditional_toupper[0x00DF] == [0x0053, 0x0053] + assert upperCase(0x00DF) == 0x00DF and unconditional_toupper[0x00DF] == [ + 0x0053, + 0x0053, + ] # Special casing for U+0130 (LATIN CAPITAL LETTER I WITH DOT ABOVE). assert unconditional_tolower[0x0130] == [0x0069, 0x0307] # Special casing for U+03A3 (GREEK CAPITAL LETTER SIGMA). - assert lowerCase(0x03A3) == 0x03C3 and conditional_tolower[0x03A3] == ([0x03C2], 'Final_Sigma') + assert lowerCase(0x03A3) == 0x03C3 and conditional_tolower[0x03A3] == ( + [0x03C2], + "Final_Sigma", + ) return (unconditional_tolower, unconditional_toupper) -def make_non_bmp_file(version, - non_bmp_lower_map, non_bmp_upper_map, - codepoint_table): - file_name = 'UnicodeNonBMP.h' - with io.open(file_name, mode='w', encoding='utf-8') as non_bmp_file: +def make_non_bmp_file(version, non_bmp_lower_map, non_bmp_upper_map, codepoint_table): + file_name = "UnicodeNonBMP.h" + with io.open(file_name, mode="w", encoding="utf-8") as non_bmp_file: non_bmp_file.write(mpl_license) - non_bmp_file.write('\n') + non_bmp_file.write("\n") non_bmp_file.write(warning_message) non_bmp_file.write(unicode_version_message.format(version)) - non_bmp_file.write(""" + non_bmp_file.write( + """ #ifndef util_UnicodeNonBMP_h #define util_UnicodeNonBMP_h // |MACRO| receives the following arguments // MACRO(FROM, TO, LEAD, TRAIL_FROM, TRAIL_TO, DIFF) // FROM: code point where the range starts // TO: code point where the range ends // LEAD: common lead surrogate of FROM and TO // TRAIL_FROM: trail surrogate of FROM // TRAIL_FROM: trail surrogate of TO // DIFF: the difference between the code point in the range and // converted code point -""") +""" + ) - make_non_bmp_convert_macro(non_bmp_file, 'LOWERCASE', non_bmp_lower_map, codepoint_table) - non_bmp_file.write('\n') - make_non_bmp_convert_macro(non_bmp_file, 'UPPERCASE', non_bmp_upper_map, codepoint_table) + make_non_bmp_convert_macro( + non_bmp_file, "LOWERCASE", non_bmp_lower_map, codepoint_table + ) + non_bmp_file.write("\n") + make_non_bmp_convert_macro( + non_bmp_file, "UPPERCASE", non_bmp_upper_map, codepoint_table + ) - non_bmp_file.write(""" + non_bmp_file.write( + """ #endif /* util_UnicodeNonBMP_h */ -""") +""" + ) def write_special_casing_methods(unconditional_toupper, codepoint_table, println): def hexlit(n): """ Returns C++ hex-literal for |n|. """ - return '0x{:04X}'.format(n) + return "0x{:04X}".format(n) def describe_range(ranges, depth): - indent = depth * ' ' + indent = depth * " " for (start, end) in ranges: if start == end: - println(indent, '// {}'.format(codepoint_table.full_name(start))) + println(indent, "// {}".format(codepoint_table.full_name(start))) else: - println(indent, '// {} .. {}'.format(codepoint_table.full_name(start), - codepoint_table.full_name(end))) + println( + indent, + "// {} .. {}".format( + codepoint_table.full_name(start), codepoint_table.full_name(end) + ), + ) def out_range(start, end): """ Tests if the input character isn't a member of the set {x | start <= x <= end}. """ - if (start == end): - return 'ch != {}'.format(hexlit(start)) - return 'ch < {} || ch > {}'.format(hexlit(start), hexlit(end)) + if start == end: + return "ch != {}".format(hexlit(start)) + return "ch < {} || ch > {}".format(hexlit(start), hexlit(end)) def in_range(start, end, parenthesize=False): """ Tests if the input character is in the set {x | start <= x <= end}. """ - if (start == end): - return 'ch == {}'.format(hexlit(start)) - (left, right) = ('(', ')') if parenthesize else ('', '') - return '{}ch >= {} && ch <= {}{}'.format(left, hexlit(start), hexlit(end), right) + if start == end: + return "ch == {}".format(hexlit(start)) + (left, right) = ("(", ")") if parenthesize else ("", "") + return "{}ch >= {} && ch <= {}{}".format( + left, hexlit(start), hexlit(end), right + ) def in_any_range(ranges, spaces): """ Tests if the input character is included in any of the given ranges. """ lines = [[]] for (start, end) in ranges: expr = in_range(start, end, parenthesize=True) - line = ' || '.join(lines[-1] + [expr]) - if len(line) < (100 - len(spaces) - len(' ||')): + line = " || ".join(lines[-1] + [expr]) + if len(line) < (100 - len(spaces) - len(" ||")): lines[-1].append(expr) else: lines.append([expr]) - return ' ||\n{}'.format(spaces).join(' || '.join(t) for t in lines) + return " ||\n{}".format(spaces).join(" || ".join(t) for t in lines) def write_range_accept(parent_list, child_list, depth): """ Accepts the input character if it matches any code unit in |child_list|. """ (min_parent, max_parent) = (parent_list[0], parent_list[-1]) (min_child, max_child) = (child_list[0], child_list[-1]) assert min_child >= min_parent assert max_child <= max_parent - indent = depth * ' ' + indent = depth * " " child_ranges = list(int_ranges(child_list)) has_successor = max_child != max_parent # If |child_list| is a contiguous list of code units, emit a simple # range check: |min_child <= input <= max_child|. if len(child_ranges) == 1: describe_range(child_ranges, depth) if has_successor: - println(indent, 'if (ch <= {}) {{'.format(hexlit(max_child))) - println(indent, ' return ch >= {};'.format(hexlit(min_child))) - println(indent, '}') + println(indent, "if (ch <= {}) {{".format(hexlit(max_child))) + println(indent, " return ch >= {};".format(hexlit(min_child))) + println(indent, "}") else: - println(indent, 'return {};'.format(in_range(min_child, max_child))) + println(indent, "return {};".format(in_range(min_child, max_child))) return # Otherwise create a disjunction over the subranges in |child_ranges|. if not has_successor: - spaces = indent + len('return ') * ' ' + spaces = indent + len("return ") * " " else: - spaces = indent + len(' return ') * ' ' + spaces = indent + len(" return ") * " " range_test_expr = in_any_range(child_ranges, spaces) if min_child != min_parent: - println(indent, 'if (ch < {}) {{'.format(hexlit(min_child))) - println(indent, ' return false;') - println(indent, '}') + println(indent, "if (ch < {}) {{".format(hexlit(min_child))) + println(indent, " return false;") + println(indent, "}") # If there's no successor block, we can omit the |input <= max_child| check, # because it was already checked when we emitted the parent range test. if not has_successor: describe_range(child_ranges, depth) - println(indent, 'return {};'.format(range_test_expr)) + println(indent, "return {};".format(range_test_expr)) else: - println(indent, 'if (ch <= {}) {{'.format(hexlit(max_child))) + println(indent, "if (ch <= {}) {{".format(hexlit(max_child))) describe_range(child_ranges, depth + 1) - println(indent, ' return {};'.format(range_test_expr)) - println(indent, '}') + println(indent, " return {};".format(range_test_expr)) + println(indent, "}") def write_ChangesWhenUpperCasedSpecialCasing(): """ Checks if the input has a special upper case mapping. """ - println('bool') - println('js::unicode::ChangesWhenUpperCasedSpecialCasing(char16_t ch)') - println('{') + println("bool") + println("js::unicode::ChangesWhenUpperCasedSpecialCasing(char16_t ch)") + println("{") assert unconditional_toupper, "|unconditional_toupper| is not empty" # Sorted list of code units with special upper case mappings. code_list = sorted(unconditional_toupper.keys()) # Fail-fast if the input character isn't a special casing character. - println(' if ({}) {{'.format(out_range(code_list[0], code_list[-1]))) - println(' return false;') - println(' }') + println(" if ({}) {{".format(out_range(code_list[0], code_list[-1]))) + println(" return false;") + println(" }") for i in range(0, 16): # Check if the input characters is in the range: # |start_point <= input < end_point|. start_point = i << 12 end_point = (i + 1) << 12 matches = [cu for cu in code_list if start_point <= cu < end_point] @@ -731,271 +803,325 @@ def write_special_casing_methods(uncondi continue # Otherwise split into further subranges. # Only enter the if-block if the input is less-or-equals to the # largest value in the current range. is_last_block = matches[-1] == code_list[-1] if not is_last_block: - println(' if (ch <= {}) {{'.format(hexlit(matches[-1]))) + println(" if (ch <= {}) {{".format(hexlit(matches[-1]))) else: - println(' if (ch < {}) {{'.format(hexlit(matches[0]))) - println(' return false;') - println(' }') + println(" if (ch < {}) {{".format(hexlit(matches[0]))) + println(" return false;") + println(" }") for j in range(0, 16): inner_start = start_point + (j << 8) inner_end = start_point + ((j + 1) << 8) inner_matches = [cu for cu in matches if inner_start <= cu < inner_end] if inner_matches: d = 1 if is_last_block else 2 write_range_accept(matches, inner_matches, depth=d) if not is_last_block: - println(' }') + println(" }") - println('}') + println("}") def write_LengthUpperCaseSpecialCasing(): """ Slow case: Special casing character was found, returns its mapping length. """ - println('size_t') - println('js::unicode::LengthUpperCaseSpecialCasing(char16_t ch)') - println('{') + println("size_t") + println("js::unicode::LengthUpperCaseSpecialCasing(char16_t ch)") + println("{") - println(' switch(ch) {') - for (code, converted) in sorted(unconditional_toupper.items(), key=itemgetter(0)): - println(' case {}: return {}; // {}'.format(hexlit(code), len(converted), - codepoint_table.name(code))) - println(' }') - println('') + println(" switch(ch) {") + for (code, converted) in sorted( + unconditional_toupper.items(), key=itemgetter(0) + ): + println( + " case {}: return {}; // {}".format( + hexlit(code), len(converted), codepoint_table.name(code) + ) + ) + println(" }") + println("") println(' MOZ_ASSERT_UNREACHABLE("Bad character input.");') - println(' return 0;') + println(" return 0;") - println('}') + println("}") def write_AppendUpperCaseSpecialCasing(): """ Slow case: Special casing character was found, append its mapping characters. """ - println('void') - println('js::unicode::AppendUpperCaseSpecialCasing(char16_t ch, char16_t* elements, size_t* index)') # NOQA: E501 - println('{') + println("void") + println( + "js::unicode::AppendUpperCaseSpecialCasing(char16_t ch, char16_t* elements, size_t* index)" # NOQA: E501 + ) + println("{") - println(' switch(ch) {') - for (code, converted) in sorted(unconditional_toupper.items(), key=itemgetter(0)): - println(' case {}: // {}'.format(hexlit(code), codepoint_table.name(code))) + println(" switch(ch) {") + for (code, converted) in sorted( + unconditional_toupper.items(), key=itemgetter(0) + ): + println( + " case {}: // {}".format(hexlit(code), codepoint_table.name(code)) + ) for ch in converted: - println(' elements[(*index)++] = {}; // {}' - .format(hexlit(ch), - codepoint_table.name(ch))) - println(' return;') - println(' }') - println('') + println( + " elements[(*index)++] = {}; // {}".format( + hexlit(ch), codepoint_table.name(ch) + ) + ) + println(" return;") + println(" }") + println("") println(' MOZ_ASSERT_UNREACHABLE("Bad character input.");') - println('}') + println("}") write_ChangesWhenUpperCasedSpecialCasing() - println('') + println("") write_LengthUpperCaseSpecialCasing() - println('') + println("") write_AppendUpperCaseSpecialCasing() def write_ascii_lookup_tables(table, index, write, println): def is_id_compat(code): - return code == ord(u'\N{DOLLAR SIGN}') or code == ord(u'\N{LOW LINE}') + return code == ord("\N{DOLLAR SIGN}") or code == ord("\N{LOW LINE}") def is_id_start(code): (upper, lower, flags) = table[index[code]] return (flags & FLAG_UNICODE_ID_START) or is_id_compat(code) def is_id_continue(code): (upper, lower, flags) = table[index[code]] return (flags & FLAG_UNICODE_ID_CONTINUE_ONLY) or is_id_start(code) def is_space(code): (upper, lower, flags) = table[index[code]] return flags & FLAG_SPACE def write_entries(name, predicate): - println('const bool unicode::{}[] = {{'.format(name)) + println("const bool unicode::{}[] = {{".format(name)) header = "".join("{0: <6}".format(x) for x in range(0, 10)).rstrip() - println('/* {} */'.format(header)) + println("/* {} */".format(header)) for i in range(0, 13): - write('/* {0: >2} */'.format(i)) + write("/* {0: >2} */".format(i)) for j in range(0, 10): code = i * 10 + j - if (code <= 0x7f): - write(' {},'.format('true' if predicate(code) else '____')) - println('') - println('};') + if code <= 0x7F: + write(" {},".format("true" if predicate(code) else "____")) + println("") + println("};") - println('') - println('#define ____ false') + println("") + println("#define ____ false") - println(""" + println( + """ /* * Identifier start chars: * - 36: $ * - 65..90: A..Z * - 95: _ * - 97..122: a..z - */""") - write_entries('js_isidstart', is_id_start) + */""" + ) + write_entries("js_isidstart", is_id_start) - println(""" + println( + """ /* * Identifier chars: * - 36: $ * - 48..57: 0..9 * - 65..90: A..Z * - 95: _ * - 97..122: a..z - */""") - write_entries('js_isident', is_id_continue) + */""" + ) + write_entries("js_isident", is_id_continue) - println(""" -/* Whitespace chars: '\\t', '\\n', '\\v', '\\f', '\\r', ' '. */""") - write_entries('js_isspace', is_space) + println( + """ +/* Whitespace chars: '\\t', '\\n', '\\v', '\\f', '\\r', ' '. */""" + ) + write_entries("js_isspace", is_space) - println('') - println('#undef ____') + println("") + println("#undef ____") def write_latin1_lookup_tables(table, index, write, println): def case_info(code): assert 0 <= code and code <= MAX_BMP (upper, lower, flags) = table[index[code]] - return ((code + upper) & 0xffff, (code + lower) & 0xffff, flags) + return ((code + upper) & 0xFFFF, (code + lower) & 0xFFFF, flags) def toLowerCase(code): (_, lower, _) = case_info(code) - assert lower <= 0xff, "lower-case of Latin-1 is always Latin-1" + assert lower <= 0xFF, "lower-case of Latin-1 is always Latin-1" return lower def write_entries(name, mapper): - println('const JS::Latin1Char unicode::{}[] = {{'.format(name)) + println("const JS::Latin1Char unicode::{}[] = {{".format(name)) header = "".join("{0: <6}".format(x) for x in range(0, 16)).rstrip() - println('/* {} */'.format(header)) + println("/* {} */".format(header)) for i in range(0, 16): - write('/* {0: >2} */'.format(i)) + write("/* {0: >2} */".format(i)) for j in range(0, 16): code = i * 16 + j - if (code <= 0xff): - write(' 0x{:02X},'.format(mapper(code))) - println('') - println('};') + if code <= 0xFF: + write(" 0x{:02X},".format(mapper(code))) + println("") + println("};") - println('') - write_entries('latin1ToLowerCaseTable', toLowerCase) + println("") + write_entries("latin1ToLowerCaseTable", toLowerCase) -def make_bmp_mapping_test(version, codepoint_table, unconditional_tolower, unconditional_toupper): +def make_bmp_mapping_test( + version, codepoint_table, unconditional_tolower, unconditional_toupper +): def unicodeEsc(n): - return '\\u{:04X}'.format(n) + return "\\u{:04X}".format(n) - file_name = '../tests/non262/String/string-upper-lower-mapping.js' - with io.open(file_name, mode='w', encoding='utf-8') as output: - write = partial(print, file=output, sep='', end='') - println = partial(print, file=output, sep='', end='\n') + file_name = "../tests/non262/String/string-upper-lower-mapping.js" + with io.open(file_name, mode="w", encoding="utf-8") as output: + write = partial(print, file=output, sep="", end="") + println = partial(print, file=output, sep="", end="\n") write(warning_message) write(unicode_version_message.format(version)) write(public_domain) - println('var mapping = [') + println("var mapping = [") for code in range(0, MAX_BMP + 1): entry = codepoint_table.get(code) if entry: (upper, lower, _, _) = entry - upper = unconditional_toupper[code] if code in unconditional_toupper else [upper] - lower = unconditional_tolower[code] if code in unconditional_tolower else [lower] - println(' ["{}", "{}"], /* {} */'.format("".join(map(unicodeEsc, upper)), - "".join(map(unicodeEsc, lower)), - codepoint_table.name(code))) + upper = ( + unconditional_toupper[code] + if code in unconditional_toupper + else [upper] + ) + lower = ( + unconditional_tolower[code] + if code in unconditional_tolower + else [lower] + ) + println( + ' ["{}", "{}"], /* {} */'.format( + "".join(map(unicodeEsc, upper)), + "".join(map(unicodeEsc, lower)), + codepoint_table.name(code), + ) + ) else: println(' ["{0}", "{0}"],'.format(unicodeEsc(code))) - println('];') - write(""" + println("];") + write( + """ assertEq(mapping.length, 0x10000); for (var i = 0; i <= 0xffff; i++) { var char = String.fromCharCode(i); var info = mapping[i]; assertEq(char.toUpperCase(), info[0]); assertEq(char.toLowerCase(), info[1]); } if (typeof reportCompare === "function") reportCompare(true, true); -""") +""" + ) -def make_non_bmp_mapping_test(version, non_bmp_upper_map, non_bmp_lower_map, codepoint_table): - file_name = '../tests/non262/String/string-code-point-upper-lower-mapping.js' - with io.open(file_name, mode='w', encoding='utf-8') as test_non_bmp_mapping: +def make_non_bmp_mapping_test( + version, non_bmp_upper_map, non_bmp_lower_map, codepoint_table +): + file_name = "../tests/non262/String/string-code-point-upper-lower-mapping.js" + with io.open(file_name, mode="w", encoding="utf-8") as test_non_bmp_mapping: test_non_bmp_mapping.write(warning_message) test_non_bmp_mapping.write(unicode_version_message.format(version)) test_non_bmp_mapping.write(public_domain) for code in sorted(non_bmp_upper_map.keys()): - test_non_bmp_mapping.write("""\ + test_non_bmp_mapping.write( + """\ assertEq(String.fromCodePoint(0x{:04X}).toUpperCase().codePointAt(0), 0x{:04X}); // {}, {} -""".format(code, non_bmp_upper_map[code], - codepoint_table.name(code), codepoint_table.name(non_bmp_upper_map[code]))) +""".format( + code, + non_bmp_upper_map[code], + codepoint_table.name(code), + codepoint_table.name(non_bmp_upper_map[code]), + ) + ) for code in sorted(non_bmp_lower_map.keys()): - test_non_bmp_mapping.write("""\ + test_non_bmp_mapping.write( + """\ assertEq(String.fromCodePoint(0x{:04X}).toLowerCase().codePointAt(0), 0x{:04X}); // {}, {} -""".format(code, non_bmp_lower_map[code], - codepoint_table.name(code), codepoint_table.name(non_bmp_lower_map[code]))) +""".format( + code, + non_bmp_lower_map[code], + codepoint_table.name(code), + codepoint_table.name(non_bmp_lower_map[code]), + ) + ) - test_non_bmp_mapping.write(""" + test_non_bmp_mapping.write( + """ if (typeof reportCompare === "function") reportCompare(true, true); -""") +""" + ) def make_space_test(version, test_space_table, codepoint_table): def hex_and_name(c): - return ' 0x{:04X} /* {} */'.format(c, codepoint_table.name(c)) + return " 0x{:04X} /* {} */".format(c, codepoint_table.name(c)) - file_name = '../tests/non262/String/string-space-trim.js' - with io.open(file_name, mode='w', encoding='utf-8') as test_space: + file_name = "../tests/non262/String/string-space-trim.js" + with io.open(file_name, mode="w", encoding="utf-8") as test_space: test_space.write(warning_message) test_space.write(unicode_version_message.format(version)) test_space.write(public_domain) - test_space.write('var onlySpace = String.fromCharCode(\n') - test_space.write(',\n'.join(map(hex_and_name, test_space_table))) - test_space.write('\n);\n') - test_space.write(""" + test_space.write("var onlySpace = String.fromCharCode(\n") + test_space.write(",\n".join(map(hex_and_name, test_space_table))) + test_space.write("\n);\n") + test_space.write( + """ assertEq(onlySpace.trim(), ""); assertEq((onlySpace + 'aaaa').trim(), 'aaaa'); assertEq(('aaaa' + onlySpace).trim(), 'aaaa'); assertEq((onlySpace + 'aaaa' + onlySpace).trim(), 'aaaa'); if (typeof reportCompare === "function") reportCompare(true, true); -""") +""" + ) def make_regexp_space_test(version, test_space_table, codepoint_table): def hex_and_name(c): - return ' 0x{:04X} /* {} */'.format(c, codepoint_table.name(c)) + return " 0x{:04X} /* {} */".format(c, codepoint_table.name(c)) - file_name = '../tests/non262/RegExp/character-class-escape-s.js' - with io.open(file_name, mode='w', encoding='utf-8') as test_space: + file_name = "../tests/non262/RegExp/character-class-escape-s.js" + with io.open(file_name, mode="w", encoding="utf-8") as test_space: test_space.write(warning_message) test_space.write(unicode_version_message.format(version)) test_space.write(public_domain) - test_space.write('var onlySpace = String.fromCodePoint(\n') - test_space.write(',\n'.join(map(hex_and_name, test_space_table))) - test_space.write('\n);\n') - test_space.write(""" + test_space.write("var onlySpace = String.fromCodePoint(\n") + test_space.write(",\n".join(map(hex_and_name, test_space_table))) + test_space.write("\n);\n") + test_space.write( + """ assertEq(/^\s+$/.exec(onlySpace) !== null, true); assertEq(/^[\s]+$/.exec(onlySpace) !== null, true); assertEq(/^[^\s]+$/.exec(onlySpace) === null, true); assertEq(/^\S+$/.exec(onlySpace) === null, true); assertEq(/^[\S]+$/.exec(onlySpace) === null, true); assertEq(/^[^\S]+$/.exec(onlySpace) !== null, true); @@ -1005,59 +1131,72 @@ assertEq(/^[\s]+$/u.exec(onlySpace) !== assertEq(/^[^\s]+$/u.exec(onlySpace) === null, true); assertEq(/^\S+$/u.exec(onlySpace) === null, true); assertEq(/^[\S]+$/u.exec(onlySpace) === null, true); assertEq(/^[^\S]+$/u.exec(onlySpace) !== null, true); if (typeof reportCompare === "function") reportCompare(true, true); -""") +""" + ) def make_icase_test(version, folding_tests, codepoint_table): def char_hex(c): - return '0x{:04X}'.format(c) + return "0x{:04X}".format(c) - file_name = '../tests/non262/RegExp/unicode-ignoreCase.js' - with io.open(file_name, mode='w', encoding='utf-8') as test_icase: + file_name = "../tests/non262/RegExp/unicode-ignoreCase.js" + with io.open(file_name, mode="w", encoding="utf-8") as test_icase: test_icase.write(warning_message) test_icase.write(unicode_version_message.format(version)) test_icase.write(public_domain) - test_icase.write(""" + test_icase.write( + """ var BUGNUMBER = 1135377; var summary = "Implement RegExp unicode flag -- ignoreCase flag."; print(BUGNUMBER + ": " + summary); function test(code, ...equivs) { var codeRe = new RegExp(String.fromCodePoint(code) + "+", "iu"); var ans = String.fromCodePoint(code) + equivs.map(c => String.fromCodePoint(c)).join(""); assertEqArray(codeRe.exec("<" + ans + ">"), [ans]); codeRe = new RegExp("[" + String.fromCodePoint(code) + "]+", "iu"); assertEqArray(codeRe.exec("<" + ans + ">"), [ans]); } -""") +""" + ) for args in folding_tests: - test_icase.write('test({}); // {}\n'.format(', '.join(map(char_hex, args)), - ', '.join(map(codepoint_table.name, - args)))) - test_icase.write(""" + test_icase.write( + "test({}); // {}\n".format( + ", ".join(map(char_hex, args)), + ", ".join(map(codepoint_table.name, args)), + ) + ) + test_icase.write( + """ if (typeof reportCompare === "function") reportCompare(true, true); -""") +""" + ) -def make_unicode_file(version, - table, index, - folding_table, folding_index, - non_bmp_space_set, - non_bmp_id_start_set, non_bmp_id_cont_set, - unconditional_toupper, - codepoint_table): +def make_unicode_file( + version, + table, + index, + folding_table, + folding_index, + non_bmp_space_set, + non_bmp_id_start_set, + non_bmp_id_cont_set, + unconditional_toupper, + codepoint_table, +): index1, index2, shift = splitbins(index) # Don't forget to update CharInfo in Unicode.h if you need to change this assert shift == 6 folding_index1, folding_index2, folding_shift = splitbins(folding_index) # Don't forget to update CaseFoldInfo in Unicode.h if you need to change this @@ -1072,17 +1211,19 @@ def make_unicode_file(version, assert test == table[idx] # verify correctness for char in folding_index: test = folding_table[folding_index[char]] idx = folding_index1[char >> folding_shift] - idx = folding_index2[(idx << folding_shift) + (char & ((1 << folding_shift) - 1))] + idx = folding_index2[ + (idx << folding_shift) + (char & ((1 << folding_shift) - 1)) + ] assert test == folding_table[idx] comment = """ /* * So how does indexing work? * First let's have a look at a char16_t, 16-bits: * [................] @@ -1124,112 +1265,127 @@ def make_unicode_file(version, * push index >> shift to index1 * * increase shift * stop if you found the best shift */ """ def dump(data, name, println): - println('const uint8_t unicode::{}[] = {{'.format(name)) + println("const uint8_t unicode::{}[] = {{".format(name)) - line = pad = ' ' * 4 + line = pad = " " * 4 lines = [] for entry in data: assert entry < 256 s = str(entry) s = s.rjust(3) if len(line + s) + 5 > 99: lines.append(line.rstrip()) - line = pad + s + ', ' + line = pad + s + ", " else: - line = line + s + ', ' + line = line + s + ", " lines.append(line.rstrip()) - println('\n'.join(lines)) - println('};') + println("\n".join(lines)) + println("};") def write_table(data_type, name, tbl, idx1_name, idx1, idx2_name, idx2, println): - println('const {} unicode::{}[] = {{'.format(data_type, name)) + println("const {} unicode::{}[] = {{".format(data_type, name)) for d in tbl: - println(' {{ {} }},'.format(', '.join(str(e) for e in d))) - println('};') - println('') + println(" {{ {} }},".format(", ".join(str(e) for e in d))) + println("};") + println("") dump(idx1, idx1_name, println) - println('') + println("") dump(idx2, idx2_name, println) - println('') + println("") def write_supplemental_identifier_method(name, group_set, println): - println('bool') - println('js::unicode::{}(uint32_t codePoint)'.format(name)) - println('{') + println("bool") + println("js::unicode::{}(uint32_t codePoint)".format(name)) + println("{") for (from_code, to_code) in int_ranges(group_set.keys()): - println(' if (codePoint >= 0x{:X} && codePoint <= 0x{:X}) {{ // {} .. {}' - .format(from_code, - to_code, - codepoint_table.name(from_code), - codepoint_table.name(to_code))) - println(' return true;') - println(' }') - println(' return false;') - println('}') - println('') + println( + " if (codePoint >= 0x{:X} && codePoint <= 0x{:X}) {{ // {} .. {}".format( + from_code, + to_code, + codepoint_table.name(from_code), + codepoint_table.name(to_code), + ) + ) + println(" return true;") + println(" }") + println(" return false;") + println("}") + println("") - file_name = 'Unicode.cpp' - with io.open(file_name, 'w', encoding='utf-8') as data_file: - write = partial(print, file=data_file, sep='', end='') - println = partial(print, file=data_file, sep='', end='\n') + file_name = "Unicode.cpp" + with io.open(file_name, "w", encoding="utf-8") as data_file: + write = partial(print, file=data_file, sep="", end="") + println = partial(print, file=data_file, sep="", end="\n") write(warning_message) write(unicode_version_message.format(version)) write(public_domain) println('#include "util/Unicode.h"') - println('') - println('using namespace js;') - println('using namespace js::unicode;') + println("") + println("using namespace js;") + println("using namespace js::unicode;") write(comment) - write_table('CharacterInfo', - 'js_charinfo', table, - 'index1', index1, - 'index2', index2, - println) + write_table( + "CharacterInfo", + "js_charinfo", + table, + "index1", + index1, + "index2", + index2, + println, + ) - write_table('FoldingInfo', - 'js_foldinfo', folding_table, - 'folding_index1', folding_index1, - 'folding_index2', folding_index2, - println) + write_table( + "FoldingInfo", + "js_foldinfo", + folding_table, + "folding_index1", + folding_index1, + "folding_index2", + folding_index2, + println, + ) # If the following assert fails, it means space character is added to # non-BMP area. In that case the following code should be uncommented # and the corresponding code should be added to frontend. (At least # unicode::IsSpace will require updating to handle this.) assert len(non_bmp_space_set.keys()) == 0 - write_supplemental_identifier_method('IsIdentifierStartNonBMP', non_bmp_id_start_set, - println) + write_supplemental_identifier_method( + "IsIdentifierStartNonBMP", non_bmp_id_start_set, println + ) - write_supplemental_identifier_method('IsIdentifierPartNonBMP', non_bmp_id_cont_set, - println) + write_supplemental_identifier_method( + "IsIdentifierPartNonBMP", non_bmp_id_cont_set, println + ) write_special_casing_methods(unconditional_toupper, codepoint_table, println) write_ascii_lookup_tables(table, index, write, println) write_latin1_lookup_tables(table, index, write, println) def getsize(data): """ return smallest possible integer size for the given array """ maxdata = max(data) - assert maxdata < 2**32 + assert maxdata < 2 ** 32 if maxdata < 256: return 1 elif maxdata < 65536: return 2 else: return 4 @@ -1241,157 +1397,176 @@ def splitbins(t): many of the ints are the same. t1 and t2 are lists of ints, and shift is an int, chosen to minimize the combined size of t1 and t2 (in C code), and where for each i in range(len(t)), t[i] == t2[(t1[i >> shift] << shift) + (i & mask)] where mask is a bitmask isolating the last "shift" bits. """ def dump(t1, t2, shift, bytes): - print("%d+%d bins at shift %d; %d bytes" % ( - len(t1), len(t2), shift, bytes), file=sys.stderr) - print("Size of original table:", len(t)*getsize(t), - "bytes", file=sys.stderr) - n = len(t)-1 # last valid index - maxshift = 0 # the most we can shift n and still have something left + print( + "%d+%d bins at shift %d; %d bytes" % (len(t1), len(t2), shift, bytes), + file=sys.stderr, + ) + print("Size of original table:", len(t) * getsize(t), "bytes", file=sys.stderr) + + n = len(t) - 1 # last valid index + maxshift = 0 # the most we can shift n and still have something left if n > 0: while n >> 1: n >>= 1 maxshift += 1 del n bytes = sys.maxsize # smallest total size so far - t = tuple(t) # so slices can be dict keys + t = tuple(t) # so slices can be dict keys for shift in range(maxshift + 1): t1 = [] t2 = [] - size = 2**shift + size = 2 ** shift bincache = {} for i in range(0, len(t), size): - bin = t[i:i + size] + bin = t[i : i + size] index = bincache.get(bin) if index is None: index = len(t2) bincache[bin] = index t2.extend(bin) t1.append(index >> shift) # determine memory size b = len(t1) * getsize(t1) + len(t2) * getsize(t2) if b < bytes: best = t1, t2, shift bytes = b t1, t2, shift = best - print("Best:", end=' ', file=sys.stderr) + print("Best:", end=" ", file=sys.stderr) dump(t1, t2, shift, bytes) # exhaustively verify that the decomposition is correct - mask = 2**shift - 1 + mask = 2 ** shift - 1 for i in range(len(t)): assert t[i] == t2[(t1[i >> shift] << shift) + (i & mask)] return best def update_unicode(args): base_path = os.getcwd() version = args.version if version is not None: - baseurl = 'https://unicode.org/Public' - if version == 'UNIDATA': - url = '%s/%s' % (baseurl, version) + baseurl = "https://unicode.org/Public" + if version == "UNIDATA": + url = "%s/%s" % (baseurl, version) else: - url = '%s/%s/ucd' % (baseurl, version) + url = "%s/%s/ucd" % (baseurl, version) - print('Arguments:') + print("Arguments:") if version is not None: - print('\tVersion: %s' % version) - print('\tDownload url: %s' % url) + print("\tVersion: %s" % version) + print("\tDownload url: %s" % url) - request_url = '{}/UCD.zip'.format(url) + request_url = "{}/UCD.zip".format(url) with closing(urlopen(request_url)) as downloaded_file: downloaded_data = io.BytesIO(downloaded_file.read()) with ZipFile(downloaded_data) as zip_file: - for fname in ['UnicodeData.txt', - 'CaseFolding.txt', - 'DerivedCoreProperties.txt', - 'SpecialCasing.txt']: + for fname in [ + "UnicodeData.txt", + "CaseFolding.txt", + "DerivedCoreProperties.txt", + "SpecialCasing.txt", + ]: zip_file.extract(fname, path=base_path) else: - print('\tUsing local files.') - print('\tAlways make sure you have the newest Unicode files!') - print('') + print("\tUsing local files.") + print("\tAlways make sure you have the newest Unicode files!") + print("") def version_from_file(f, fname): pat_version = re.compile(r"# %s-(?P<version>\d+\.\d+\.\d+).txt" % fname) return pat_version.match(f.readline()).group("version") - with io.open(os.path.join(base_path, 'UnicodeData.txt'), - 'r', encoding='utf-8') as unicode_data, \ - io.open(os.path.join(base_path, 'CaseFolding.txt'), - 'r', encoding='utf-8') as case_folding, \ - io.open(os.path.join(base_path, 'DerivedCoreProperties.txt'), - 'r', encoding='utf-8') as derived_core_properties, \ - io.open(os.path.join(base_path, 'SpecialCasing.txt'), - 'r', encoding='utf-8') as special_casing: - unicode_version = version_from_file(derived_core_properties, 'DerivedCoreProperties') + with io.open( + os.path.join(base_path, "UnicodeData.txt"), "r", encoding="utf-8" + ) as unicode_data, io.open( + os.path.join(base_path, "CaseFolding.txt"), "r", encoding="utf-8" + ) as case_folding, io.open( + os.path.join(base_path, "DerivedCoreProperties.txt"), "r", encoding="utf-8" + ) as derived_core_properties, io.open( + os.path.join(base_path, "SpecialCasing.txt"), "r", encoding="utf-8" + ) as special_casing: + unicode_version = version_from_file( + derived_core_properties, "DerivedCoreProperties" + ) - print('Processing...') - ( - table, index, - non_bmp_lower_map, non_bmp_upper_map, - non_bmp_space_set, - non_bmp_id_start_set, non_bmp_id_cont_set, - codepoint_table, test_space_table - ) = process_unicode_data(unicode_data, derived_core_properties) + print("Processing...") ( - folding_table, folding_index, - folding_tests - ) = process_case_folding(case_folding) - ( - unconditional_tolower, unconditional_toupper - ) = process_special_casing(special_casing, table, index) + table, + index, + non_bmp_lower_map, + non_bmp_upper_map, + non_bmp_space_set, + non_bmp_id_start_set, + non_bmp_id_cont_set, + codepoint_table, + test_space_table, + ) = process_unicode_data(unicode_data, derived_core_properties) + (folding_table, folding_index, folding_tests) = process_case_folding( + case_folding + ) + (unconditional_tolower, unconditional_toupper) = process_special_casing( + special_casing, table, index + ) - print('Generating...') - make_unicode_file(unicode_version, - table, index, - folding_table, folding_index, - non_bmp_space_set, - non_bmp_id_start_set, non_bmp_id_cont_set, - unconditional_toupper, - codepoint_table) - make_non_bmp_file(unicode_version, - non_bmp_lower_map, non_bmp_upper_map, - codepoint_table) + print("Generating...") + make_unicode_file( + unicode_version, + table, + index, + folding_table, + folding_index, + non_bmp_space_set, + non_bmp_id_start_set, + non_bmp_id_cont_set, + unconditional_toupper, + codepoint_table, + ) + make_non_bmp_file( + unicode_version, non_bmp_lower_map, non_bmp_upper_map, codepoint_table + ) - make_bmp_mapping_test(unicode_version, - codepoint_table, unconditional_tolower, unconditional_toupper) - make_non_bmp_mapping_test(unicode_version, non_bmp_upper_map, - non_bmp_lower_map, codepoint_table) + make_bmp_mapping_test( + unicode_version, codepoint_table, unconditional_tolower, unconditional_toupper + ) + make_non_bmp_mapping_test( + unicode_version, non_bmp_upper_map, non_bmp_lower_map, codepoint_table + ) make_space_test(unicode_version, test_space_table, codepoint_table) make_regexp_space_test(unicode_version, test_space_table, codepoint_table) make_icase_test(unicode_version, folding_tests, codepoint_table) -if __name__ == '__main__': +if __name__ == "__main__": import argparse # This script must be run from js/src/util to work correctly. - if '/'.join(os.path.normpath(os.getcwd()).split(os.sep)[-3:]) != 'js/src/util': - raise RuntimeError('%s must be run from js/src/util' % sys.argv[0]) + if "/".join(os.path.normpath(os.getcwd()).split(os.sep)[-3:]) != "js/src/util": + raise RuntimeError("%s must be run from js/src/util" % sys.argv[0]) - parser = argparse.ArgumentParser(description='Update Unicode data.') + parser = argparse.ArgumentParser(description="Update Unicode data.") - parser.add_argument('--version', - help='Optional Unicode version number. If specified, downloads the\ + parser.add_argument( + "--version", + help='Optional Unicode version number. If specified, downloads the\ selected version from <https://unicode.org/Public>. If not specified\ uses the existing local files to generate the Unicode data. The\ number must match a published Unicode version, e.g. use\ "--version=8.0.0" to download Unicode 8 files. Alternatively use\ - "--version=UNIDATA" to download the latest published version.') + "--version=UNIDATA" to download the latest published version.', + ) parser.set_defaults(func=update_unicode) args = parser.parse_args() args.func(args)
--- a/mobile/android/mach_commands.py +++ b/mobile/android/mach_commands.py @@ -46,249 +46,361 @@ def REMOVED(cls): See https://developer.mozilla.org/en-US/docs/Simple_Firefox_for_Android_build#Developing_Firefox_for_Android_in_Android_Studio_or_IDEA_IntelliJ. # NOQA: E501 """ return False @CommandProvider class MachCommands(MachCommandBase): - @Command('android', category='devenv', - description='Run Android-specific commands.', - conditions=[conditions.is_android]) + @Command( + "android", + category="devenv", + description="Run Android-specific commands.", + conditions=[conditions.is_android], + ) def android(self): pass - @SubCommand('android', 'assemble-app', - """Assemble Firefox for Android. - See http://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html#firefox-for-android-with-gradle""") # NOQA: E501 - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand( + "android", + "assemble-app", + """Assemble Firefox for Android. + See http://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html#firefox-for-android-with-gradle""", # NOQA: E501 + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_assemble_app(self, args): - ret = self.gradle(self.substs['GRADLE_ANDROID_APP_TASKS'] + - ['-x', 'lint'] + args, verbose=True) + ret = self.gradle( + self.substs["GRADLE_ANDROID_APP_TASKS"] + ["-x", "lint"] + args, + verbose=True, + ) return ret - @SubCommand('android', 'generate-sdk-bindings', - """Generate SDK bindings used when building GeckoView.""") - @CommandArgument('inputs', nargs='+', help='config files, ' - 'like [/path/to/ClassName-classes.txt]+') - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand( + "android", + "generate-sdk-bindings", + """Generate SDK bindings used when building GeckoView.""", + ) + @CommandArgument( + "inputs", + nargs="+", + help="config files, " "like [/path/to/ClassName-classes.txt]+", + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_generate_sdk_bindings(self, inputs, args): import itertools def stem(input): # Turn "/path/to/ClassName-classes.txt" into "ClassName". - return os.path.basename(input).rsplit('-classes.txt', 1)[0] + return os.path.basename(input).rsplit("-classes.txt", 1)[0] - bindings_inputs = list(itertools.chain(*((input, stem(input)) for input in inputs))) - bindings_args = '-Pgenerate_sdk_bindings_args={}'.format(';'.join(bindings_inputs)) + bindings_inputs = list( + itertools.chain(*((input, stem(input)) for input in inputs)) + ) + bindings_args = "-Pgenerate_sdk_bindings_args={}".format( + ";".join(bindings_inputs) + ) ret = self.gradle( - self.substs['GRADLE_ANDROID_GENERATE_SDK_BINDINGS_TASKS'] + [bindings_args] + args, - verbose=True) + self.substs["GRADLE_ANDROID_GENERATE_SDK_BINDINGS_TASKS"] + + [bindings_args] + + args, + verbose=True, + ) return ret - @SubCommand('android', 'generate-generated-jni-wrappers', - """Generate GeckoView JNI wrappers used when building GeckoView.""") - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand( + "android", + "generate-generated-jni-wrappers", + """Generate GeckoView JNI wrappers used when building GeckoView.""", + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_generate_generated_jni_wrappers(self, args): ret = self.gradle( - self.substs['GRADLE_ANDROID_GENERATE_GENERATED_JNI_WRAPPERS_TASKS'] + args, - verbose=True) + self.substs["GRADLE_ANDROID_GENERATE_GENERATED_JNI_WRAPPERS_TASKS"] + args, + verbose=True, + ) return ret - @SubCommand('android', 'api-lint', - """Run Android api-lint. -REMOVED/DEPRECATED: Use 'mach lint --linter android-api-lint'.""") + @SubCommand( + "android", + "api-lint", + """Run Android api-lint. +REMOVED/DEPRECATED: Use 'mach lint --linter android-api-lint'.""", + ) def android_apilint_REMOVED(self): print(LINT_DEPRECATION_MESSAGE) return 1 - @SubCommand('android', 'test', - """Run Android test. -REMOVED/DEPRECATED: Use 'mach lint --linter android-test'.""") + @SubCommand( + "android", + "test", + """Run Android test. +REMOVED/DEPRECATED: Use 'mach lint --linter android-test'.""", + ) def android_test_REMOVED(self): print(LINT_DEPRECATION_MESSAGE) return 1 - @SubCommand('android', 'lint', - """Run Android lint. -REMOVED/DEPRECATED: Use 'mach lint --linter android-lint'.""") + @SubCommand( + "android", + "lint", + """Run Android lint. +REMOVED/DEPRECATED: Use 'mach lint --linter android-lint'.""", + ) def android_lint_REMOVED(self): print(LINT_DEPRECATION_MESSAGE) return 1 - @SubCommand('android', 'checkstyle', - """Run Android checkstyle. -REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""") + @SubCommand( + "android", + "checkstyle", + """Run Android checkstyle. +REMOVED/DEPRECATED: Use 'mach lint --linter android-checkstyle'.""", + ) def android_checkstyle_REMOVED(self): print(LINT_DEPRECATION_MESSAGE) return 1 - @SubCommand('android', 'gradle-dependencies', - """Collect Android Gradle dependencies. - See http://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html#firefox-for-android-with-gradle""") # NOQA: E501 - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand( + "android", + "gradle-dependencies", + """Collect Android Gradle dependencies. + See http://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html#firefox-for-android-with-gradle""", # NOQA: E501 + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_gradle_dependencies(self, args): # We don't want to gate producing dependency archives on clean # lint or checkstyle, particularly because toolchain versions # can change the outputs for those processes. - self.gradle(self.substs['GRADLE_ANDROID_DEPENDENCIES_TASKS'] + - ["--continue"] + args, verbose=True) + self.gradle( + self.substs["GRADLE_ANDROID_DEPENDENCIES_TASKS"] + ["--continue"] + args, + verbose=True, + ) return 0 - @SubCommand('android', 'archive-geckoview', - """Create GeckoView archives. - See http://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html#firefox-for-android-with-gradle""") # NOQA: E501 - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand( + "android", + "archive-geckoview", + """Create GeckoView archives. + See http://firefox-source-docs.mozilla.org/build/buildsystem/toolchains.html#firefox-for-android-with-gradle""", # NOQA: E501 + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_archive_geckoview(self, args): ret = self.gradle( - self.substs['GRADLE_ANDROID_ARCHIVE_GECKOVIEW_TASKS'] + args, - verbose=True) + self.substs["GRADLE_ANDROID_ARCHIVE_GECKOVIEW_TASKS"] + args, verbose=True + ) return ret - @SubCommand('android', 'build-geckoview_example', - """Build geckoview_example """) - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand("android", "build-geckoview_example", """Build geckoview_example """) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_build_geckoview_example(self, args): - self.gradle(self.substs['GRADLE_ANDROID_BUILD_GECKOVIEW_EXAMPLE_TASKS'] + args, - verbose=True) + self.gradle( + self.substs["GRADLE_ANDROID_BUILD_GECKOVIEW_EXAMPLE_TASKS"] + args, + verbose=True, + ) - print('Execute `mach android install-geckoview_example` ' - 'to push the geckoview_example and test APKs to a device.') + print( + "Execute `mach android install-geckoview_example` " + "to push the geckoview_example and test APKs to a device." + ) return 0 - @SubCommand('android', 'install-geckoview_example', - """Install geckoview_example """) - @CommandArgument('args', nargs=argparse.REMAINDER) + @SubCommand( + "android", "install-geckoview_example", """Install geckoview_example """ + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def android_install_geckoview_example(self, args): - self.gradle(self.substs['GRADLE_ANDROID_INSTALL_GECKOVIEW_EXAMPLE_TASKS'] + args, - verbose=True) + self.gradle( + self.substs["GRADLE_ANDROID_INSTALL_GECKOVIEW_EXAMPLE_TASKS"] + args, + verbose=True, + ) - print('Execute `mach android build-geckoview_example` ' - 'to just build the geckoview_example and test APKs.') + print( + "Execute `mach android build-geckoview_example` " + "to just build the geckoview_example and test APKs." + ) return 0 - @SubCommand('android', 'geckoview-docs', - """Create GeckoView javadoc and optionally upload to Github""") - @CommandArgument('--archive', action='store_true', - help='Generate a javadoc archive.') - @CommandArgument('--upload', metavar='USER/REPO', - help='Upload geckoview documentation to Github, ' - 'using the specified USER/REPO.') - @CommandArgument('--upload-branch', metavar='BRANCH[/PATH]', - default='gh-pages', - help='Use the specified branch/path for documentation commits.') - @CommandArgument('--javadoc-path', metavar='/PATH', - default='javadoc', - help='Use the specified path for javadoc commits.') - @CommandArgument('--upload-message', metavar='MSG', - default='GeckoView docs upload', - help='Use the specified message for commits.') - def android_geckoview_docs(self, archive, upload, upload_branch, javadoc_path, - upload_message): + @SubCommand( + "android", + "geckoview-docs", + """Create GeckoView javadoc and optionally upload to Github""", + ) + @CommandArgument( + "--archive", action="store_true", help="Generate a javadoc archive." + ) + @CommandArgument( + "--upload", + metavar="USER/REPO", + help="Upload geckoview documentation to Github, " + "using the specified USER/REPO.", + ) + @CommandArgument( + "--upload-branch", + metavar="BRANCH[/PATH]", + default="gh-pages", + help="Use the specified branch/path for documentation commits.", + ) + @CommandArgument( + "--javadoc-path", + metavar="/PATH", + default="javadoc", + help="Use the specified path for javadoc commits.", + ) + @CommandArgument( + "--upload-message", + metavar="MSG", + default="GeckoView docs upload", + help="Use the specified message for commits.", + ) + def android_geckoview_docs( + self, archive, upload, upload_branch, javadoc_path, upload_message + ): - tasks = (self.substs['GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS'] if archive or upload - else self.substs['GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS']) + tasks = ( + self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS"] + if archive or upload + else self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS"] + ) ret = self.gradle(tasks, verbose=True) if ret or not upload: return ret # Upload to Github. fmt = { - 'level': os.environ.get('MOZ_SCM_LEVEL', '0'), - 'project': os.environ.get('MH_BRANCH', 'unknown'), - 'revision': os.environ.get('GECKO_HEAD_REV', 'tip'), + "level": os.environ.get("MOZ_SCM_LEVEL", "0"), + "project": os.environ.get("MH_BRANCH", "unknown"), + "revision": os.environ.get("GECKO_HEAD_REV", "tip"), } env = {} # In order to push to GitHub from TaskCluster, we store a private key # in the TaskCluster secrets store in the format {"content": "<KEY>"}, # and the corresponding public key as a writable deploy key for the # destination repo on GitHub. - secret = os.environ.get('GECKOVIEW_DOCS_UPLOAD_SECRET', '').format(**fmt) + secret = os.environ.get("GECKOVIEW_DOCS_UPLOAD_SECRET", "").format(**fmt) if secret: # Set up a private key from the secrets store if applicable. import requests - req = requests.get('http://taskcluster/secrets/v1/secret/' + secret) + + req = requests.get("http://taskcluster/secrets/v1/secret/" + secret) req.raise_for_status() - keyfile = mozpath.abspath('gv-docs-upload-key') - with open(keyfile, 'w') as f: + keyfile = mozpath.abspath("gv-docs-upload-key") + with open(keyfile, "w") as f: os.chmod(keyfile, 0o600) - f.write(req.json()['secret']['content']) + f.write(req.json()["secret"]["content"]) # Turn off strict host key checking so ssh does not complain about # unknown github.com host. We're not pushing anything sensitive, so # it's okay to not check GitHub's host keys. - env['GIT_SSH_COMMAND'] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile + env["GIT_SSH_COMMAND"] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile # Clone remote repo. branch = upload_branch.format(**fmt) - repo_url = 'git@github.com:%s.git' % upload - repo_path = mozpath.abspath('gv-docs-repo') - self.run_process(['git', 'clone', '--branch', upload_branch, '--depth', '1', - repo_url, repo_path], append_env=env, pass_thru=True) - env['GIT_DIR'] = mozpath.join(repo_path, '.git') - env['GIT_WORK_TREE'] = repo_path - env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = 'GeckoView Docs Bot' - env['GIT_AUTHOR_EMAIL'] = env['GIT_COMMITTER_EMAIL'] = 'nobody@mozilla.com' + repo_url = "git@github.com:%s.git" % upload + repo_path = mozpath.abspath("gv-docs-repo") + self.run_process( + [ + "git", + "clone", + "--branch", + upload_branch, + "--depth", + "1", + repo_url, + repo_path, + ], + append_env=env, + pass_thru=True, + ) + env["GIT_DIR"] = mozpath.join(repo_path, ".git") + env["GIT_WORK_TREE"] = repo_path + env["GIT_AUTHOR_NAME"] = env["GIT_COMMITTER_NAME"] = "GeckoView Docs Bot" + env["GIT_AUTHOR_EMAIL"] = env["GIT_COMMITTER_EMAIL"] = "nobody@mozilla.com" # Copy over user documentation. import mozfile # Extract new javadoc to specified directory inside repo. - src_tar = mozpath.join(self.topobjdir, 'gradle', 'build', 'mobile', 'android', - 'geckoview', 'libs', 'geckoview-javadoc.jar') + src_tar = mozpath.join( + self.topobjdir, + "gradle", + "build", + "mobile", + "android", + "geckoview", + "libs", + "geckoview-javadoc.jar", + ) dst_path = mozpath.join(repo_path, javadoc_path.format(**fmt)) mozfile.remove(dst_path) mozfile.extract_zip(src_tar, dst_path) # Commit and push. - self.run_process(['git', 'add', '--all'], append_env=env, pass_thru=True) - if self.run_process(['git', 'diff', '--cached', '--quiet'], - append_env=env, pass_thru=True, ensure_exit_code=False) != 0: + self.run_process(["git", "add", "--all"], append_env=env, pass_thru=True) + if ( + self.run_process( + ["git", "diff", "--cached", "--quiet"], + append_env=env, + pass_thru=True, + ensure_exit_code=False, + ) + != 0 + ): # We have something to commit. - self.run_process(['git', 'commit', - '--message', upload_message.format(**fmt)], - append_env=env, pass_thru=True) - self.run_process(['git', 'push', 'origin', branch], - append_env=env, pass_thru=True) + self.run_process( + ["git", "commit", "--message", upload_message.format(**fmt)], + append_env=env, + pass_thru=True, + ) + self.run_process( + ["git", "push", "origin", branch], append_env=env, pass_thru=True + ) mozfile.remove(repo_path) if secret: mozfile.remove(keyfile) return 0 - @Command('gradle', category='devenv', - description='Run gradle.', - conditions=[conditions.is_android]) - @CommandArgument('-v', '--verbose', action='store_true', - help='Verbose output for what commands the build is running.') - @CommandArgument('args', nargs=argparse.REMAINDER) + @Command( + "gradle", + category="devenv", + description="Run gradle.", + conditions=[conditions.is_android], + ) + @CommandArgument( + "-v", + "--verbose", + action="store_true", + help="Verbose output for what commands the build is running.", + ) + @CommandArgument("args", nargs=argparse.REMAINDER) def gradle(self, args, verbose=False): if not verbose: # Avoid logging the command self.log_manager.terminal_handler.setLevel(logging.CRITICAL) # In automation, JAVA_HOME is set via mozconfig, which needs # to be specially handled in each mach command. This turns # $JAVA_HOME/bin/java into $JAVA_HOME. - java_home = os.path.dirname(os.path.dirname(self.substs['JAVA'])) + java_home = os.path.dirname(os.path.dirname(self.substs["JAVA"])) - gradle_flags = self.substs.get('GRADLE_FLAGS', '') or \ - os.environ.get('GRADLE_FLAGS', '') + gradle_flags = self.substs.get("GRADLE_FLAGS", "") or os.environ.get( + "GRADLE_FLAGS", "" + ) gradle_flags = shell_split(gradle_flags) # We force the Gradle JVM to run with the UTF-8 encoding, since we # filter strings.xml, which is really UTF-8; the ellipsis character is # replaced with ??? in some encodings (including ASCII). It's not yet # possible to filter with encodings in Gradle # (https://github.com/gradle/gradle/pull/520) and it's challenging to # do our filtering with Gradle's Ant support. Moreover, all of the @@ -299,124 +411,175 @@ REMOVED/DEPRECATED: Use 'mach lint --lin # It's not even enough to set the encoding just for Gradle; it # needs to be for JVMs spawned by Gradle as well. This # happens during the maven deployment generating the GeckoView # documents; this works around "error: unmappable character # for encoding ASCII" in exoplayer2. See # https://discuss.gradle.org/t/unmappable-character-for-encoding-ascii-when-building-a-utf-8-project/10692/11 # NOQA: E501 # and especially https://stackoverflow.com/a/21755671. - if self.substs.get('MOZ_AUTOMATION'): - gradle_flags += ['--console=plain'] + if self.substs.get("MOZ_AUTOMATION"): + gradle_flags += ["--console=plain"] env = os.environ.copy() - env.update({ - 'GRADLE_OPTS': '-Dfile.encoding=utf-8', - 'JAVA_HOME': java_home, - 'JAVA_TOOL_OPTIONS': '-Dfile.encoding=utf-8', - }) + env.update( + { + "GRADLE_OPTS": "-Dfile.encoding=utf-8", + "JAVA_HOME": java_home, + "JAVA_TOOL_OPTIONS": "-Dfile.encoding=utf-8", + } + ) # Set ANDROID_SDK_ROOT if --with-android-sdk was set. # See https://bugzilla.mozilla.org/show_bug.cgi?id=1576471 - android_sdk_root = self.substs.get('ANDROID_SDK_ROOT', '') + android_sdk_root = self.substs.get("ANDROID_SDK_ROOT", "") if android_sdk_root: - env['ANDROID_SDK_ROOT'] = android_sdk_root + env["ANDROID_SDK_ROOT"] = android_sdk_root return self.run_process( - [self.substs['GRADLE']] + gradle_flags + args, + [self.substs["GRADLE"]] + gradle_flags + args, explicit_env=env, pass_thru=True, # Allow user to run gradle interactively. ensure_exit_code=False, # Don't throw on non-zero exit code. - cwd=mozpath.join(self.topsrcdir)) + cwd=mozpath.join(self.topsrcdir), + ) - @Command('gradle-install', category='devenv', - conditions=[REMOVED]) + @Command("gradle-install", category="devenv", conditions=[REMOVED]) def gradle_install_REMOVED(self): pass @CommandProvider class AndroidEmulatorCommands(MachCommandBase): """ - Run the Android emulator with one of the AVDs used in the Mozilla - automated test environment. If necessary, the AVD is fetched from - the tooltool server and installed. + Run the Android emulator with one of the AVDs used in the Mozilla + automated test environment. If necessary, the AVD is fetched from + the tooltool server and installed. """ - @Command('android-emulator', category='devenv', - conditions=[], - description='Run the Android emulator with an AVD from test automation. ' - 'Environment variable MOZ_EMULATOR_COMMAND_ARGS, if present, will ' - 'over-ride the command line arguments used to launch the emulator.') - @CommandArgument('--version', metavar='VERSION', - choices=['arm-4.3', 'x86-7.0'], - help='Specify which AVD to run in emulator. ' - 'One of "arm-4.3" (Android 4.3 supporting armv7 binaries), or ' - '"x86-7.0" (Android 7.0 supporting x86 or x86_64 binaries, ' - 'recommended for most applications). ' - 'By default, "arm-4.3" will be used if the current build environment ' - 'architecture is arm; otherwise "x86-7.0".') - @CommandArgument('--wait', action='store_true', - help='Wait for emulator to be closed.') - @CommandArgument('--force-update', action='store_true', - help='Update AVD definition even when AVD is already installed.') - @CommandArgument('--gpu', - help='Over-ride the emulator -gpu argument.') - @CommandArgument('--verbose', action='store_true', - help='Log informative status messages.') - def emulator(self, version, wait=False, force_update=False, gpu=None, verbose=False): + + @Command( + "android-emulator", + category="devenv", + conditions=[], + description="Run the Android emulator with an AVD from test automation. " + "Environment variable MOZ_EMULATOR_COMMAND_ARGS, if present, will " + "over-ride the command line arguments used to launch the emulator.", + ) + @CommandArgument( + "--version", + metavar="VERSION", + choices=["arm-4.3", "x86-7.0"], + help="Specify which AVD to run in emulator. " + 'One of "arm-4.3" (Android 4.3 supporting armv7 binaries), or ' + '"x86-7.0" (Android 7.0 supporting x86 or x86_64 binaries, ' + "recommended for most applications). " + 'By default, "arm-4.3" will be used if the current build environment ' + 'architecture is arm; otherwise "x86-7.0".', + ) + @CommandArgument( + "--wait", action="store_true", help="Wait for emulator to be closed." + ) + @CommandArgument( + "--force-update", + action="store_true", + help="Update AVD definition even when AVD is already installed.", + ) + @CommandArgument("--gpu", help="Over-ride the emulator -gpu argument.") + @CommandArgument( + "--verbose", action="store_true", help="Log informative status messages." + ) + def emulator( + self, version, wait=False, force_update=False, gpu=None, verbose=False + ): from mozrunner.devices.android_device import AndroidEmulator - emulator = AndroidEmulator(version, verbose, substs=self.substs, - device_serial='emulator-5554') + emulator = AndroidEmulator( + version, verbose, substs=self.substs, device_serial="emulator-5554" + ) if emulator.is_running(): # It is possible to run multiple emulators simultaneously, but: # - if more than one emulator is using the same avd, errors may # occur due to locked resources; # - additional parameters must be specified when running tests, # to select a specific device. # To avoid these complications, allow just one emulator at a time. - self.log(logging.ERROR, "emulator", {}, - "An Android emulator is already running.\n" - "Close the existing emulator and re-run this command.") + self.log( + logging.ERROR, + "emulator", + {}, + "An Android emulator is already running.\n" + "Close the existing emulator and re-run this command.", + ) return 1 if not emulator.is_available(): - self.log(logging.WARN, "emulator", {}, - "Emulator binary not found.\n" - "Install the Android SDK and make sure 'emulator' is in your PATH.") + self.log( + logging.WARN, + "emulator", + {}, + "Emulator binary not found.\n" + "Install the Android SDK and make sure 'emulator' is in your PATH.", + ) return 2 if not emulator.check_avd(force_update): - self.log(logging.INFO, "emulator", {}, - "Fetching and installing AVD. This may take a few minutes...") + self.log( + logging.INFO, + "emulator", + {}, + "Fetching and installing AVD. This may take a few minutes...", + ) emulator.update_avd(force_update) - self.log(logging.INFO, "emulator", {}, - "Starting Android emulator running %s..." % - emulator.get_avd_description()) + self.log( + logging.INFO, + "emulator", + {}, + "Starting Android emulator running %s..." % emulator.get_avd_description(), + ) emulator.start(gpu) if emulator.wait_for_start(): - self.log(logging.INFO, "emulator", {}, - "Android emulator is running.") + self.log(logging.INFO, "emulator", {}, "Android emulator is running.") else: # This is unusual but the emulator may still function. - self.log(logging.WARN, "emulator", {}, - "Unable to verify that emulator is running.") + self.log( + logging.WARN, + "emulator", + {}, + "Unable to verify that emulator is running.", + ) if conditions.is_android(self): - self.log(logging.INFO, "emulator", {}, - "Use 'mach install' to install or update Firefox on your emulator.") + self.log( + logging.INFO, + "emulator", + {}, + "Use 'mach install' to install or update Firefox on your emulator.", + ) else: - self.log(logging.WARN, "emulator", {}, - "No Firefox for Android build detected.\n" - "Switch to a Firefox for Android build context or use 'mach bootstrap'\n" - "to setup an Android build environment.") + self.log( + logging.WARN, + "emulator", + {}, + "No Firefox for Android build detected.\n" + "Switch to a Firefox for Android build context or use 'mach bootstrap'\n" + "to setup an Android build environment.", + ) if wait: - self.log(logging.INFO, "emulator", {}, - "Waiting for Android emulator to close...") + self.log( + logging.INFO, "emulator", {}, "Waiting for Android emulator to close..." + ) rc = emulator.wait() if rc is not None: - self.log(logging.INFO, "emulator", {}, - "Android emulator completed with return code %d." % rc) + self.log( + logging.INFO, + "emulator", + {}, + "Android emulator completed with return code %d." % rc, + ) else: - self.log(logging.WARN, "emulator", {}, - "Unable to retrieve Android emulator return code.") + self.log( + logging.WARN, + "emulator", + {}, + "Unable to retrieve Android emulator return code.", + ) return 0
--- a/python/mozbuild/mozbuild/mach_commands.py +++ b/python/mozbuild/mozbuild/mach_commands.py @@ -33,166 +33,214 @@ from mozbuild.base import ( MachCommandBase, MachCommandConditions as conditions, MozbuildObject, ) from mozbuild.util import MOZBUILD_METRICS_PATH here = os.path.abspath(os.path.dirname(__file__)) -EXCESSIVE_SWAP_MESSAGE = ''' +EXCESSIVE_SWAP_MESSAGE = """ =================== PERFORMANCE WARNING Your machine experienced a lot of swap activity during the build. This is possibly a sign that your machine doesn't have enough physical memory or not enough available memory to perform the build. It's also possible some other system activity during the build is to blame. If you feel this message is not appropriate for your machine configuration, please file a Firefox Build System :: General bug at https://bugzilla.mozilla.org/enter_bug.cgi?product=Firefox%20Build%20System&component=General and tell us about your machine and build configuration so we can adjust the warning heuristic. =================== -''' +""" class StoreDebugParamsAndWarnAction(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): - sys.stderr.write('The --debugparams argument is deprecated. Please ' + - 'use --debugger-args instead.\n\n') + sys.stderr.write( + "The --debugparams argument is deprecated. Please " + + "use --debugger-args instead.\n\n" + ) setattr(namespace, self.dest, values) @CommandProvider class Watch(MachCommandBase): """Interface to watch and re-build the tree.""" - @Command('watch', category='post-build', description='Watch and re-build the tree.', - conditions=[conditions.is_firefox]) - @CommandArgument('-v', '--verbose', action='store_true', - help='Verbose output for what commands the watcher is running.') + @Command( + "watch", + category="post-build", + description="Watch and re-build the tree.", + conditions=[conditions.is_firefox], + ) + @CommandArgument( + "-v", + "--verbose", + action="store_true", + help="Verbose output for what commands the watcher is running.", + ) def watch(self, verbose=False): """Watch and re-build the source tree.""" if not conditions.is_artifact_build(self): - print('mach watch requires an artifact build. See ' - 'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Simple_Firefox_build') # noqa + print( + "mach watch requires an artifact build. See " + "https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Simple_Firefox_build" # noqa + ) return 1 - if not self.substs.get('WATCHMAN', None): - print('mach watch requires watchman to be installed. See ' - 'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching') # noqa + if not self.substs.get("WATCHMAN", None): + print( + "mach watch requires watchman to be installed. See " + "https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching" # noqa + ) return 1 self.activate_virtualenv() try: - self.virtualenv_manager.install_pip_package('pywatchman==1.4.1') + self.virtualenv_manager.install_pip_package("pywatchman==1.4.1") except Exception: - print('Could not install pywatchman from pip. See ' - 'https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching') # noqa + print( + "Could not install pywatchman from pip. See " + "https://developer.mozilla.org/docs/Mozilla/Developer_guide/Build_Instructions/Incremental_builds_with_filesystem_watching" # noqa + ) return 1 from mozbuild.faster_daemon import Daemon + daemon = Daemon(self.config_environment) try: return daemon.watch() except KeyboardInterrupt: # Suppress ugly stack trace when user hits Ctrl-C. sys.exit(3) @CommandProvider class CargoProvider(MachCommandBase): """Invoke cargo in useful ways.""" - @Command('cargo', category='build', - description='Invoke cargo in useful ways.') + @Command("cargo", category="build", description="Invoke cargo in useful ways.") def cargo(self): - self._sub_mach(['help', 'cargo']) + self._sub_mach(["help", "cargo"]) return 1 - @SubCommand('cargo', 'check', - description='Run `cargo check` on a given crate. Defaults to gkrust.') - @CommandArgument('--all-crates', default=None, action='store_true', - help='Check all of the crates in the tree.') - @CommandArgument('crates', default=None, nargs='*', help='The crate name(s) to check.') - @CommandArgument('--jobs', '-j', default='1', nargs='?', metavar='jobs', type=int, - help='Run the tests in parallel using multiple processes.') - @CommandArgument('-v', '--verbose', action='store_true', - help='Verbose output.') + @SubCommand( + "cargo", + "check", + description="Run `cargo check` on a given crate. Defaults to gkrust.", + ) + @CommandArgument( + "--all-crates", + default=None, + action="store_true", + help="Check all of the crates in the tree.", + ) + @CommandArgument( + "crates", default=None, nargs="*", help="The crate name(s) to check." + ) + @CommandArgument( + "--jobs", + "-j", + default="1", + nargs="?", + metavar="jobs", + type=int, + help="Run the tests in parallel using multiple processes.", + ) + @CommandArgument("-v", "--verbose", action="store_true", help="Verbose output.") def check(self, all_crates=None, crates=None, jobs=0, verbose=False): # XXX duplication with `mach vendor rust` crates_and_roots = { - 'gkrust': 'toolkit/library/rust', - 'gkrust-gtest': 'toolkit/library/gtest/rust', - 'js': 'js/rust', - 'mozjs_sys': 'js/src', - 'baldrdash': 'js/src/wasm/cranelift', - 'geckodriver': 'testing/geckodriver', + "gkrust": "toolkit/library/rust", + "gkrust-gtest": "toolkit/library/gtest/rust", + "js": "js/rust", + "mozjs_sys": "js/src", + "baldrdash": "js/src/wasm/cranelift", + "geckodriver": "testing/geckodriver", } if all_crates: crates = crates_and_roots.keys() elif crates is None or crates == []: - crates = ['gkrust'] + crates = ["gkrust"] for crate in crates: root = crates_and_roots.get(crate, None) if not root: - print('Cannot locate crate %s. Please check your spelling or ' - 'add the crate information to the list.' % crate) + print( + "Cannot locate crate %s. Please check your spelling or " + "add the crate information to the list." % crate + ) return 1 check_targets = [ - 'force-cargo-library-check', - 'force-cargo-host-library-check', - 'force-cargo-program-check', - 'force-cargo-host-program-check', + "force-cargo-library-check", + "force-cargo-host-library-check", + "force-cargo-program-check", + "force-cargo-host-program-check", ] - ret = self._run_make(srcdir=False, directory=root, - ensure_exit_code=0, silent=not verbose, - print_directory=False, target=check_targets, - num_jobs=jobs) + ret = self._run_make( + srcdir=False, + directory=root, + ensure_exit_code=0, + silent=not verbose, + print_directory=False, + target=check_targets, + num_jobs=jobs, + ) if ret != 0: return ret return 0 @CommandProvider class Doctor(MachCommandBase): """Provide commands for diagnosing common build environment problems""" - @Command('doctor', category='devenv', - description='') - @CommandArgument('--fix', default=None, action='store_true', - help='Attempt to fix found problems.') + + @Command("doctor", category="devenv", description="") + @CommandArgument( + "--fix", + default=None, + action="store_true", + help="Attempt to fix found problems.", + ) def doctor(self, fix=None): self.activate_virtualenv() from mozbuild.doctor import Doctor + doctor = Doctor(self.topsrcdir, self.topobjdir, fix) return doctor.check_all() @CommandProvider(metrics_path=MOZBUILD_METRICS_PATH) class Clobber(MachCommandBase): NO_AUTO_LOG = True - CLOBBER_CHOICES = set(['objdir', 'python', 'gradle']) + CLOBBER_CHOICES = set(["objdir", "python", "gradle"]) - @Command('clobber', category='build', - description='Clobber the tree (delete the object directory).') - @CommandArgument('what', default=['objdir', 'python'], nargs='*', - help='Target to clobber, must be one of {{{}}} (default ' - 'objdir and python).'.format( - ', '.join(CLOBBER_CHOICES))) - @CommandArgument('--full', action='store_true', - help='Perform a full clobber') + @Command( + "clobber", + category="build", + description="Clobber the tree (delete the object directory).", + ) + @CommandArgument( + "what", + default=["objdir", "python"], + nargs="*", + help="Target to clobber, must be one of {{{}}} (default " + "objdir and python).".format(", ".join(CLOBBER_CHOICES)), + ) + @CommandArgument("--full", action="store_true", help="Perform a full clobber") def clobber(self, what, full=False): """Clean up the source and object directories. Performing builds and running various commands generate various files. Sometimes it is necessary to clean up these files in order to make things work again. This command can be used to perform that cleanup. @@ -212,109 +260,146 @@ class Clobber(MachCommandBase): The `gradle` target will remove the "gradle" subdirectory of the object directory. By default, the command clobbers the `objdir` and `python` targets. """ what = set(what) invalid = what - self.CLOBBER_CHOICES if invalid: - print('Unknown clobber target(s): {}'.format(', '.join(invalid))) + print("Unknown clobber target(s): {}".format(", ".join(invalid))) return 1 ret = 0 - if 'objdir' in what: + if "objdir" in what: from mozbuild.controller.clobber import Clobberer + try: - Clobberer(self.topsrcdir, self.topobjdir, self.substs).remove_objdir(full) + Clobberer(self.topsrcdir, self.topobjdir, self.substs).remove_objdir( + full + ) except OSError as e: - if sys.platform.startswith('win'): + if sys.platform.startswith("win"): if isinstance(e, WindowsError) and e.winerror in (5, 32): - self.log(logging.ERROR, 'file_access_error', {'error': e}, - "Could not clobber because a file was in use. If the " - "application is running, try closing it. {error}") + self.log( + logging.ERROR, + "file_access_error", + {"error": e}, + "Could not clobber because a file was in use. If the " + "application is running, try closing it. {error}", + ) return 1 raise - if 'python' in what: + if "python" in what: if conditions.is_hg(self): - cmd = ['hg', '--config', 'extensions.purge=', 'purge', '--all', - '-I', 'glob:**.py[cdo]', '-I', 'glob:**/__pycache__', - '-I', 'path:third_party/python/'] + cmd = [ + "hg", + "--config", + "extensions.purge=", + "purge", + "--all", + "-I", + "glob:**.py[cdo]", + "-I", + "glob:**/__pycache__", + "-I", + "path:third_party/python/", + ] elif conditions.is_git(self): - cmd = ['git', 'clean', '-d', '-f', '-x', '*.py[cdo]', '*/__pycache__/*', - 'third_party/python/'] + cmd = [ + "git", + "clean", + "-d", + "-f", + "-x", + "*.py[cdo]", + "*/__pycache__/*", + "third_party/python/", + ] else: # We don't know what is tracked/untracked if we don't have VCS. # So we can't clean python/ and third_party/python/. - cmd = ['find', '.', '-type', 'f', '-name', '*.py[cdo]', - '-delete'] + cmd = ["find", ".", "-type", "f", "-name", "*.py[cdo]", "-delete"] subprocess.call(cmd, cwd=self.topsrcdir) - cmd = ['find', '.', '-type', 'd', '-name', '__pycache__', - '-empty', '-delete'] + cmd = [ + "find", + ".", + "-type", + "d", + "-name", + "__pycache__", + "-empty", + "-delete", + ] ret = subprocess.call(cmd, cwd=self.topsrcdir) - if 'gradle' in what: - shutil.rmtree(mozpath.join(self.topobjdir, 'gradle')) + if "gradle" in what: + shutil.rmtree(mozpath.join(self.topobjdir, "gradle")) return ret @property def substs(self): try: return super(Clobber, self).substs except BuildEnvironmentNotFoundException: return {} @CommandProvider class Logs(MachCommandBase): """Provide commands to read mach logs.""" + NO_AUTO_LOG = True - @Command('show-log', category='post-build', - description='Display mach logs') - @CommandArgument('log_file', nargs='?', type=argparse.FileType('rb'), - help='Filename to read log data from. Defaults to the log of the last ' - 'mach command.') + @Command("show-log", category="post-build", description="Display mach logs") + @CommandArgument( + "log_file", + nargs="?", + type=argparse.FileType("rb"), + help="Filename to read log data from. Defaults to the log of the last " + "mach command.", + ) def show_log(self, log_file=None): if not log_file: - path = self._get_state_filename('last_log.json') - log_file = open(path, 'rb') + path = self._get_state_filename("last_log.json") + log_file = open(path, "rb") if os.isatty(sys.stdout.fileno()): env = dict(os.environ) - if 'LESS' not in env: + if "LESS" not in env: # Sensible default flags if none have been set in the user # environment. - env[b'LESS'] = b'FRX' - less = subprocess.Popen(['less'], stdin=subprocess.PIPE, env=env) + env[b"LESS"] = b"FRX" + less = subprocess.Popen(["less"], stdin=subprocess.PIPE, env=env) # Various objects already have a reference to sys.stdout, so we # can't just change it, we need to change the file descriptor under # it to redirect to less's input. # First keep a copy of the sys.stdout file descriptor. output_fd = os.dup(sys.stdout.fileno()) os.dup2(less.stdin.fileno(), sys.stdout.fileno()) startTime = 0 for line in log_file: created, action, params = json.loads(line) if not startTime: startTime = created - self.log_manager.terminal_handler.formatter.start_time = \ - created - if 'line' in params: - record = logging.makeLogRecord({ - 'created': created, - 'name': self._logger.name, - 'levelno': logging.INFO, - 'msg': '{line}', - 'params': params, - 'action': action, - }) + self.log_manager.terminal_handler.formatter.start_time = created + if "line" in params: + record = logging.makeLogRecord( + { + "created": created, + "name": self._logger.name, + "levelno": logging.INFO, + "msg": "{line}", + "params": params, + "action": action, + } + ) self._logger.handle(record) if self.log_manager.terminal: # Close less's input so that it knows that we're done sending data. less.stdin.close() # Since the less's input file descriptor is now also the stdout # file descriptor, we still actually have a non-closed system file # descriptor for less's input. Replacing sys.stdout's file @@ -325,236 +410,343 @@ class Logs(MachCommandBase): @CommandProvider class Warnings(MachCommandBase): """Provide commands for inspecting warnings.""" @property def database_path(self): - return self._get_state_filename('warnings.json') + return self._get_state_filename("warnings.json") @property def database(self): from mozbuild.compilation.warnings import WarningsDatabase path = self.database_path database = WarningsDatabase() if os.path.exists(path): database.load_from_file(path) return database - @Command('warnings-summary', category='post-build', - description='Show a summary of compiler warnings.') - @CommandArgument('-C', '--directory', default=None, - help='Change to a subdirectory of the build directory first.') - @CommandArgument('report', default=None, nargs='?', - help='Warnings report to display. If not defined, show the most ' - 'recent report.') + @Command( + "warnings-summary", + category="post-build", + description="Show a summary of compiler warnings.", + ) + @CommandArgument( + "-C", + "--directory", + default=None, + help="Change to a subdirectory of the build directory first.", + ) + @CommandArgument( + "report", + default=None, + nargs="?", + help="Warnings report to display. If not defined, show the most " + "recent report.", + ) def summary(self, directory=None, report=None): database = self.database if directory: dirpath = self.join_ensure_dir(self.topsrcdir, directory) if not dirpath: return 1 else: dirpath = None type_counts = database.type_counts(dirpath) - sorted_counts = sorted(type_counts.items(), - key=operator.itemgetter(1)) + sorted_counts = sorted(type_counts.items(), key=operator.itemgetter(1)) total = 0 for k, v in sorted_counts: - print('%d\t%s' % (v, k)) + print("%d\t%s" % (v, k)) total += v - print('%d\tTotal' % total) + print("%d\tTotal" % total) - @Command('warnings-list', category='post-build', - description='Show a list of compiler warnings.') - @CommandArgument('-C', '--directory', default=None, - help='Change to a subdirectory of the build directory first.') - @CommandArgument('--flags', default=None, nargs='+', - help='Which warnings flags to match.') - @CommandArgument('report', default=None, nargs='?', - help='Warnings report to display. If not defined, show the most ' - 'recent report.') + @Command( + "warnings-list", + category="post-build", + description="Show a list of compiler warnings.", + ) + @CommandArgument( + "-C", + "--directory", + default=None, + help="Change to a subdirectory of the build directory first.", + ) + @CommandArgument( + "--flags", default=None, nargs="+", help="Which warnings flags to match." + ) + @CommandArgument( + "report", + default=None, + nargs="?", + help="Warnings report to display. If not defined, show the most " + "recent report.", + ) def list(self, directory=None, flags=None, report=None): database = self.database by_name = sorted(database.warnings) topsrcdir = mozpath.normpath(self.topsrcdir) if directory: directory = mozpath.normsep(directory) dirpath = self.join_ensure_dir(topsrcdir, directory) if not dirpath: return 1 if flags: # Flatten lists of flags. - flags = set(itertools.chain(*[flaglist.split(',') for flaglist in flags])) + flags = set(itertools.chain(*[flaglist.split(",") for flaglist in flags])) for warning in by_name: - filename = mozpath.normsep(warning['filename']) + filename = mozpath.normsep(warning["filename"]) if filename.startswith(topsrcdir): - filename = filename[len(topsrcdir) + 1:] + filename = filename[len(topsrcdir) + 1 :] if directory and not filename.startswith(directory): continue - if flags and warning['flag'] not in flags: + if flags and warning["flag"] not in flags: continue - if warning['column'] is not None: - print('%s:%d:%d [%s] %s' % ( - filename, warning['line'], warning['column'], - warning['flag'], warning['message'])) + if warning["column"] is not None: + print( + "%s:%d:%d [%s] %s" + % ( + filename, + warning["line"], + warning["column"], + warning["flag"], + warning["message"], + ) + ) else: - print('%s:%d [%s] %s' % (filename, warning['line'], - warning['flag'], warning['message'])) + print( + "%s:%d [%s] %s" + % (filename, warning["line"], warning["flag"], warning["message"]) + ) def join_ensure_dir(self, dir1, dir2): dir1 = mozpath.normpath(dir1) dir2 = mozpath.normsep(dir2) joined_path = mozpath.join(dir1, dir2) if os.path.isdir(joined_path): return joined_path - print('Specified directory not found.') + print("Specified directory not found.") return None @CommandProvider class GTestCommands(MachCommandBase): - @Command('gtest', category='testing', - description='Run GTest unit tests (C++ tests).') - @CommandArgument('gtest_filter', default=b"*", nargs='?', metavar='gtest_filter', - help="test_filter is a ':'-separated list of wildcard patterns " - "(called the positive patterns), optionally followed by a '-' " - "and another ':'-separated pattern list (called the negative patterns).") - @CommandArgument('--jobs', '-j', default='1', nargs='?', metavar='jobs', type=int, - help='Run the tests in parallel using multiple processes.') - @CommandArgument('--tbpl-parser', '-t', action='store_true', - help='Output test results in a format that can be parsed by TBPL.') - @CommandArgument('--shuffle', '-s', action='store_true', - help='Randomize the execution order of tests.') - @CommandArgument('--enable-webrender', action='store_true', - default=False, dest='enable_webrender', - help='Enable the WebRender compositor in Gecko.') - @CommandArgumentGroup('Android') - @CommandArgument('--package', - default='org.mozilla.geckoview.test', - group='Android', - help='Package name of test app.') - @CommandArgument('--adbpath', - dest='adb_path', - group='Android', - help='Path to adb binary.') - @CommandArgument('--deviceSerial', - dest='device_serial', - group='Android', - help="adb serial number of remote device. " - "Required when more than one device is connected to the host. " - "Use 'adb devices' to see connected devices.") - @CommandArgument('--remoteTestRoot', - dest='remote_test_root', - group='Android', - help='Remote directory to use as test root ' - '(eg. /data/local/tmp/test_root).') - @CommandArgument('--libxul', - dest='libxul_path', - group='Android', - help='Path to gtest libxul.so.') - @CommandArgument('--no-install', action='store_true', - default=False, - group='Android', - help='Skip the installation of the APK.') - @CommandArgumentGroup('debugging') - @CommandArgument('--debug', action='store_true', group='debugging', - help='Enable the debugger. Not specifying a --debugger option will result in ' - 'the default debugger being used.') - @CommandArgument('--debugger', default=None, type=str, group='debugging', - help='Name of debugger to use.') - @CommandArgument('--debugger-args', default=None, metavar='params', type=str, - group='debugging', - help='Command-line arguments to pass to the debugger itself; ' - 'split as the Bourne shell would.') - def gtest(self, shuffle, jobs, gtest_filter, tbpl_parser, enable_webrender, - package, adb_path, device_serial, remote_test_root, libxul_path, no_install, - debug, debugger, debugger_args): + @Command( + "gtest", category="testing", description="Run GTest unit tests (C++ tests)." + ) + @CommandArgument( + "gtest_filter", + default=b"*", + nargs="?", + metavar="gtest_filter", + help="test_filter is a ':'-separated list of wildcard patterns " + "(called the positive patterns), optionally followed by a '-' " + "and another ':'-separated pattern list (called the negative patterns).", + ) + @CommandArgument( + "--jobs", + "-j", + default="1", + nargs="?", + metavar="jobs", + type=int, + help="Run the tests in parallel using multiple processes.", + ) + @CommandArgument( + "--tbpl-parser", + "-t", + action="store_true", + help="Output test results in a format that can be parsed by TBPL.", + ) + @CommandArgument( + "--shuffle", + "-s", + action="store_true", + help="Randomize the execution order of tests.", + ) + @CommandArgument( + "--enable-webrender", + action="store_true", + default=False, + dest="enable_webrender", + help="Enable the WebRender compositor in Gecko.", + ) + @CommandArgumentGroup("Android") + @CommandArgument( + "--package", + default="org.mozilla.geckoview.test", + group="Android", + help="Package name of test app.", + ) + @CommandArgument( + "--adbpath", dest="adb_path", group="Android", help="Path to adb binary." + ) + @CommandArgument( + "--deviceSerial", + dest="device_serial", + group="Android", + help="adb serial number of remote device. " + "Required when more than one device is connected to the host. " + "Use 'adb devices' to see connected devices.", + ) + @CommandArgument( + "--remoteTestRoot", + dest="remote_test_root", + group="Android", + help="Remote directory to use as test root " "(eg. /data/local/tmp/test_root).", + ) + @CommandArgument( + "--libxul", dest="libxul_path", group="Android", help="Path to gtest libxul.so." + ) + @CommandArgument( + "--no-install", + action="store_true", + default=False, + group="Android", + help="Skip the installation of the APK.", + ) + @CommandArgumentGroup("debugging") + @CommandArgument( + "--debug", + action="store_true", + group="debugging", + help="Enable the debugger. Not specifying a --debugger option will result in " + "the default debugger being used.", + ) + @CommandArgument( + "--debugger", + default=None, + type=str, + group="debugging", + help="Name of debugger to use.", + ) + @CommandArgument( + "--debugger-args", + default=None, + metavar="params", + type=str, + group="debugging", + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", + ) + def gtest( + self, + shuffle, + jobs, + gtest_filter, + tbpl_parser, + enable_webrender, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + no_install, + debug, + debugger, + debugger_args, + ): # We lazy build gtest because it's slow to link try: self.config_environment except Exception: print("Please run |./mach build| before |./mach gtest|.") return 1 - res = self._mach_context.commands.dispatch('build', self._mach_context, - what=['recurse_gtest']) + res = self._mach_context.commands.dispatch( + "build", self._mach_context, what=["recurse_gtest"] + ) if res: print("Could not build xul-gtest") return res - if self.substs.get('MOZ_WIDGET_TOOLKIT') == 'cocoa': - self._run_make(directory='browser/app', target='repackage', - ensure_exit_code=True) + if self.substs.get("MOZ_WIDGET_TOOLKIT") == "cocoa": + self._run_make( + directory="browser/app", target="repackage", ensure_exit_code=True + ) - cwd = os.path.join(self.topobjdir, '_tests', 'gtest') + cwd = os.path.join(self.topobjdir, "_tests", "gtest") if not os.path.isdir(cwd): os.makedirs(cwd) if conditions.is_android(self): if jobs != 1: print("--jobs is not supported on Android and will be ignored") if debug or debugger or debugger_args: - print("--debug options are not supported on Android and will be ignored") + print( + "--debug options are not supported on Android and will be ignored" + ) from mozrunner.devices.android_device import InstallIntent - return self.android_gtest(cwd, shuffle, gtest_filter, - package, adb_path, device_serial, - remote_test_root, libxul_path, - enable_webrender, - InstallIntent.NO if no_install else InstallIntent.YES) - if package or adb_path or device_serial or remote_test_root or libxul_path or no_install: + return self.android_gtest( + cwd, + shuffle, + gtest_filter, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + enable_webrender, + InstallIntent.NO if no_install else InstallIntent.YES, + ) + + if ( + package + or adb_path + or device_serial + or remote_test_root + or libxul_path + or no_install + ): print("One or more Android-only options will be ignored") - app_path = self.get_binary_path('app') - args = [app_path, '-unittest', '--gtest_death_test_style=threadsafe'] + app_path = self.get_binary_path("app") + args = [app_path, "-unittest", "--gtest_death_test_style=threadsafe"] - if sys.platform.startswith('win') and \ - 'MOZ_LAUNCHER_PROCESS' in self.defines: - args.append('--wait-for-browser') + if sys.platform.startswith("win") and "MOZ_LAUNCHER_PROCESS" in self.defines: + args.append("--wait-for-browser") if debug or debugger or debugger_args: args = self.prepend_debugger_args(args, debugger, debugger_args) if not args: return 1 # Use GTest environment variable to control test execution # For details see: # https://code.google.com/p/googletest/wiki/AdvancedGuide#Running_Test_Programs:_Advanced_Options - gtest_env = {b'GTEST_FILTER': gtest_filter} + gtest_env = {b"GTEST_FILTER": gtest_filter} # Note: we must normalize the path here so that gtest on Windows sees # a MOZ_GMP_PATH which has only Windows dir seperators, because # nsIFile cannot open the paths with non-Windows dir seperators. xre_path = os.path.join(os.path.normpath(self.topobjdir), "dist", "bin") gtest_env["MOZ_XRE_DIR"] = xre_path gtest_env["MOZ_GMP_PATH"] = os.pathsep.join( - os.path.join(xre_path, p, "1.0") - for p in ('gmp-fake', 'gmp-fakeopenh264') + os.path.join(xre_path, p, "1.0") for p in ("gmp-fake", "gmp-fakeopenh264") ) gtest_env[b"MOZ_RUN_GTEST"] = b"True" if shuffle: gtest_env[b"GTEST_SHUFFLE"] = b"True" if tbpl_parser: @@ -562,301 +754,468 @@ class GTestCommands(MachCommandBase): if enable_webrender: gtest_env[b"MOZ_WEBRENDER"] = b"1" gtest_env[b"MOZ_ACCELERATED"] = b"1" else: gtest_env[b"MOZ_WEBRENDER"] = b"0" if jobs == 1: - return self.run_process(args=args, - append_env=gtest_env, - cwd=cwd, - ensure_exit_code=False, - pass_thru=True) + return self.run_process( + args=args, + append_env=gtest_env, + cwd=cwd, + ensure_exit_code=False, + pass_thru=True, + ) from mozprocess import ProcessHandlerMixin import functools def handle_line(job_id, line): # Prepend the jobId - line = '[%d] %s' % (job_id + 1, line.strip()) - self.log(logging.INFO, "GTest", {'line': line}, '{line}') + line = "[%d] %s" % (job_id + 1, line.strip()) + self.log(logging.INFO, "GTest", {"line": line}, "{line}") gtest_env["GTEST_TOTAL_SHARDS"] = str(jobs) processes = {} for i in range(0, jobs): gtest_env["GTEST_SHARD_INDEX"] = str(i) - processes[i] = ProcessHandlerMixin([app_path, "-unittest"], - cwd=cwd, - env=gtest_env, - processOutputLine=[ - functools.partial(handle_line, i)], - universal_newlines=True) + processes[i] = ProcessHandlerMixin( + [app_path, "-unittest"], + cwd=cwd, + env=gtest_env, + processOutputLine=[functools.partial(handle_line, i)], + universal_newlines=True, + ) processes[i].run() exit_code = 0 for process in processes.values(): status = process.wait() if status: exit_code = status # Clamp error code to 255 to prevent overflowing multiple of # 256 into 0 if exit_code > 255: exit_code = 255 return exit_code - def android_gtest(self, test_dir, shuffle, gtest_filter, - package, adb_path, device_serial, remote_test_root, libxul_path, - enable_webrender, install): + def android_gtest( + self, + test_dir, + shuffle, + gtest_filter, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + enable_webrender, + install, + ): # setup logging for mozrunner from mozlog.commandline import setup_logging - format_args = {'level': self._mach_context.settings['test']['level']} - default_format = self._mach_context.settings['test']['format'] - setup_logging('mach-gtest', {}, {default_format: sys.stdout}, format_args) + + format_args = {"level": self._mach_context.settings["test"]["level"]} + default_format = self._mach_context.settings["test"]["format"] + setup_logging("mach-gtest", {}, {default_format: sys.stdout}, format_args) # ensure that a device is available and test app is installed - from mozrunner.devices.android_device import (verify_android_device, get_adb_path) - verify_android_device(self, install=install, app=package, device_serial=device_serial) + from mozrunner.devices.android_device import verify_android_device, get_adb_path + + verify_android_device( + self, install=install, app=package, device_serial=device_serial + ) if not adb_path: adb_path = get_adb_path(self) if not libxul_path: - libxul_path = os.path.join(self.topobjdir, "dist", "bin", "gtest", "libxul.so") + libxul_path = os.path.join( + self.topobjdir, "dist", "bin", "gtest", "libxul.so" + ) # run gtest via remotegtests.py exit_code = 0 import imp - path = os.path.join('testing', 'gtest', 'remotegtests.py') - with open(path, 'r') as fh: - imp.load_module('remotegtests', fh, path, - ('.py', 'r', imp.PY_SOURCE)) + + path = os.path.join("testing", "gtest", "remotegtests.py") + with open(path, "r") as fh: + imp.load_module("remotegtests", fh, path, (".py", "r", imp.PY_SOURCE)) import remotegtests + tester = remotegtests.RemoteGTests() - if not tester.run_gtest(test_dir, shuffle, gtest_filter, package, adb_path, device_serial, - remote_test_root, libxul_path, None, enable_webrender): + if not tester.run_gtest( + test_dir, + shuffle, + gtest_filter, + package, + adb_path, + device_serial, + remote_test_root, + libxul_path, + None, + enable_webrender, + ): exit_code = 1 tester.cleanup() return exit_code def prepend_debugger_args(self, args, debugger, debugger_args): - ''' + """ Given an array with program arguments, prepend arguments to run it under a debugger. :param args: The executable and arguments used to run the process normally. :param debugger: The debugger to use, or empty to use the default debugger. :param debugger_args: Any additional parameters to pass to the debugger. - ''' + """ import mozdebug if not debugger: # No debugger name was provided. Look for the default ones on # current OS. - debugger = mozdebug.get_default_debugger_name(mozdebug.DebuggerSearch.KeepLooking) + debugger = mozdebug.get_default_debugger_name( + mozdebug.DebuggerSearch.KeepLooking + ) if debugger: debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args) if not debugger or not debuggerInfo: print("Could not find a suitable debugger in your PATH.") return None # Parameters come from the CLI. We need to convert them before # their use. if debugger_args: from mozbuild import shellutil + try: debugger_args = shellutil.split(debugger_args) except shellutil.MetaCharacterException as e: - print("The --debugger_args you passed require a real shell to parse them.") + print( + "The --debugger_args you passed require a real shell to parse them." + ) print("(We can't handle the %r character.)" % e.char) return None # Prepend the debugger args. args = [debuggerInfo.path] + debuggerInfo.args + args return args @CommandProvider class Package(MachCommandBase): """Package the built product for distribution.""" - @Command('package', category='post-build', - description='Package the built product for distribution as an APK, DMG, etc.') - @CommandArgument('-v', '--verbose', action='store_true', - help='Verbose output for what commands the packaging process is running.') + @Command( + "package", + category="post-build", + description="Package the built product for distribution as an APK, DMG, etc.", + ) + @CommandArgument( + "-v", + "--verbose", + action="store_true", + help="Verbose output for what commands the packaging process is running.", + ) def package(self, verbose=False): - ret = self._run_make(directory=".", target='package', - silent=not verbose, ensure_exit_code=False) + ret = self._run_make( + directory=".", target="package", silent=not verbose, ensure_exit_code=False + ) if ret == 0: - self.notify('Packaging complete') + self.notify("Packaging complete") return ret def _get_android_install_parser(): parser = argparse.ArgumentParser() - parser.add_argument('--app', default='org.mozilla.geckoview_example', - help='Android package to install ' - '(default: org.mozilla.geckoview_example)') - parser.add_argument('--verbose', '-v', action='store_true', - help='Print verbose output when installing.') + parser.add_argument( + "--app", + default="org.mozilla.geckoview_example", + help="Android package to install " "(default: org.mozilla.geckoview_example)", + ) + parser.add_argument( + "--verbose", + "-v", + action="store_true", + help="Print verbose output when installing.", + ) return parser def setup_install_parser(): build = MozbuildObject.from_environment(cwd=here) if conditions.is_android(build): return _get_android_install_parser() return argparse.ArgumentParser() @CommandProvider class Install(MachCommandBase): """Install a package.""" - @Command('install', category='post-build', - conditions=[conditions.has_build], - parser=setup_install_parser, - description='Install the package on the machine (or device in the case of Android).') + @Command( + "install", + category="post-build", + conditions=[conditions.has_build], + parser=setup_install_parser, + description="Install the package on the machine (or device in the case of Android).", + ) def install(self, **kwargs): if conditions.is_android(self): - from mozrunner.devices.android_device import (verify_android_device, InstallIntent) + from mozrunner.devices.android_device import ( + verify_android_device, + InstallIntent, + ) + ret = verify_android_device(self, install=InstallIntent.YES, **kwargs) == 0 else: - ret = self._run_make(directory=".", target='install', ensure_exit_code=False) + ret = self._run_make( + directory=".", target="install", ensure_exit_code=False + ) if ret == 0: - self.notify('Install complete') + self.notify("Install complete") return ret @SettingsProvider -class RunSettings(): +class RunSettings: config_settings = [ - ('runprefs.*', 'string', """ + ( + "runprefs.*", + "string", + """ Pass a pref into Firefox when using `mach run`, of the form `foo.bar=value`. Prefs will automatically be cast into the appropriate type. Integers can be single quoted to force them to be strings. -""".strip()), +""".strip(), + ), ] def _get_android_run_parser(): parser = argparse.ArgumentParser() - parser.add_argument('--app', default='org.mozilla.geckoview_example', - help='Android package to run ' - '(default: org.mozilla.geckoview_example)') - parser.add_argument('--intent', default='android.intent.action.VIEW', - help='Android intent action to launch with ' - '(default: android.intent.action.VIEW)') - parser.add_argument('--setenv', dest='env', action='append', default=[], - help='Set target environment variable, like FOO=BAR') - parser.add_argument('--profile', '-P', default=None, - help='Path to Gecko profile, like /path/to/host/profile ' - 'or /path/to/target/profile') - parser.add_argument('--url', default=None, - help='URL to open') - parser.add_argument('--no-install', action='store_true', default=False, - help='Do not try to install application on device before running ' - '(default: False)') - parser.add_argument('--no-wait', action='store_true', default=False, - help='Do not wait for application to start before returning ' - '(default: False)') - parser.add_argument('--fail-if-running', action='store_true', default=False, - help='Fail if application is already running (default: False)') - parser.add_argument('--restart', action='store_true', default=False, - help='Stop the application if it is already running (default: False)') + parser.add_argument( + "--app", + default="org.mozilla.geckoview_example", + help="Android package to run " "(default: org.mozilla.geckoview_example)", + ) + parser.add_argument( + "--intent", + default="android.intent.action.VIEW", + help="Android intent action to launch with " + "(default: android.intent.action.VIEW)", + ) + parser.add_argument( + "--setenv", + dest="env", + action="append", + default=[], + help="Set target environment variable, like FOO=BAR", + ) + parser.add_argument( + "--profile", + "-P", + default=None, + help="Path to Gecko profile, like /path/to/host/profile " + "or /path/to/target/profile", + ) + parser.add_argument("--url", default=None, help="URL to open") + parser.add_argument( + "--no-install", + action="store_true", + default=False, + help="Do not try to install application on device before running " + "(default: False)", + ) + parser.add_argument( + "--no-wait", + action="store_true", + default=False, + help="Do not wait for application to start before returning " + "(default: False)", + ) + parser.add_argument( + "--fail-if-running", + action="store_true", + default=False, + help="Fail if application is already running (default: False)", + ) + parser.add_argument( + "--restart", + action="store_true", + default=False, + help="Stop the application if it is already running (default: False)", + ) return parser def _get_jsshell_run_parser(): parser = argparse.ArgumentParser() - group = parser.add_argument_group('the compiled program') - group.add_argument('params', nargs='...', default=[], - help='Command-line arguments to be passed through to the program. Not ' - 'specifying a --profile or -P option will result in a temporary profile ' - 'being used.') + group = parser.add_argument_group("the compiled program") + group.add_argument( + "params", + nargs="...", + default=[], + help="Command-line arguments to be passed through to the program. Not " + "specifying a --profile or -P option will result in a temporary profile " + "being used.", + ) - group = parser.add_argument_group('debugging') - group.add_argument('--debug', action='store_true', - help='Enable the debugger. Not specifying a --debugger option will result ' - 'in the default debugger being used.') - group.add_argument('--debugger', default=None, type=str, - help='Name of debugger to use.') - group.add_argument('--debugger-args', default=None, metavar='params', type=str, - help='Command-line arguments to pass to the debugger itself; ' - 'split as the Bourne shell would.') - group.add_argument('--debugparams', action=StoreDebugParamsAndWarnAction, - default=None, type=str, dest='debugger_args', - help=argparse.SUPPRESS) + group = parser.add_argument_group("debugging") + group.add_argument( + "--debug", + action="store_true", + help="Enable the debugger. Not specifying a --debugger option will result " + "in the default debugger being used.", + ) + group.add_argument( + "--debugger", default=None, type=str, help="Name of debugger to use." + ) + group.add_argument( + "--debugger-args", + default=None, + metavar="params", + type=str, + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", + ) + group.add_argument( + "--debugparams", + action=StoreDebugParamsAndWarnAction, + default=None, + type=str, + dest="debugger_args", + help=argparse.SUPPRESS, + ) return parser def _get_desktop_run_parser(): parser = argparse.ArgumentParser() - group = parser.add_argument_group('the compiled program') - group.add_argument('params', nargs='...', default=[], - help='Command-line arguments to be passed through to the program. Not ' - 'specifying a --profile or -P option will result in a temporary profile ' - 'being used.') - group.add_argument('--packaged', action='store_true', - help='Run a packaged build.') - group.add_argument('--remote', '-r', action='store_true', - help='Do not pass the --no-remote argument by default.') - group.add_argument('--background', '-b', action='store_true', - help='Do not pass the --foreground argument by default on Mac.') - group.add_argument('--noprofile', '-n', action='store_true', - help='Do not pass the --profile argument by default.') - group.add_argument('--disable-e10s', action='store_true', - help='Run the program with electrolysis disabled.') - group.add_argument('--enable-crash-reporter', action='store_true', - help='Run the program with the crash reporter enabled.') - group.add_argument('--enable-fission', action='store_true', - help='Run the program with fission (site isolation) enabled.') - group.add_argument('--setpref', action='append', default=[], - help='Set the specified pref before starting the program. Can be set ' - 'multiple times. Prefs can also be set in ~/.mozbuild/machrc in the ' - '[runprefs] section - see `./mach settings` for more information.') - group.add_argument('--temp-profile', action='store_true', - help='Run the program using a new temporary profile created inside ' - 'the objdir.') - group.add_argument('--macos-open', action='store_true', - help="On macOS, run the program using the open(1) command. Per open(1), " - "the browser is launched \"just as if you had double-clicked the file's " - "icon\". The browser can not be launched under a debugger with this " - "option.") + group = parser.add_argument_group("the compiled program") + group.add_argument( + "params", + nargs="...", + default=[], + help="Command-line arguments to be passed through to the program. Not " + "specifying a --profile or -P option will result in a temporary profile " + "being used.", + ) + group.add_argument("--packaged", action="store_true", help="Run a packaged build.") + group.add_argument( + "--remote", + "-r", + action="store_true", + help="Do not pass the --no-remote argument by default.", + ) + group.add_argument( + "--background", + "-b", + action="store_true", + help="Do not pass the --foreground argument by default on Mac.", + ) + group.add_argument( + "--noprofile", + "-n", + action="store_true", + help="Do not pass the --profile argument by default.", + ) + group.add_argument( + "--disable-e10s", + action="store_true", + help="Run the program with electrolysis disabled.", + ) + group.add_argument( + "--enable-crash-reporter", + action="store_true", + help="Run the program with the crash reporter enabled.", + ) + group.add_argument( + "--enable-fission", + action="store_true", + help="Run the program with fission (site isolation) enabled.", + ) + group.add_argument( + "--setpref", + action="append", + default=[], + help="Set the specified pref before starting the program. Can be set " + "multiple times. Prefs can also be set in ~/.mozbuild/machrc in the " + "[runprefs] section - see `./mach settings` for more information.", + ) + group.add_argument( + "--temp-profile", + action="store_true", + help="Run the program using a new temporary profile created inside " + "the objdir.", + ) + group.add_argument( + "--macos-open", + action="store_true", + help="On macOS, run the program using the open(1) command. Per open(1), " + "the browser is launched \"just as if you had double-clicked the file's " + 'icon". The browser can not be launched under a debugger with this ' + "option.", + ) - group = parser.add_argument_group('debugging') - group.add_argument('--debug', action='store_true', - help='Enable the debugger. Not specifying a --debugger option will result ' - 'in the default debugger being used.') - group.add_argument('--debugger', default=None, type=str, - help='Name of debugger to use.') - group.add_argument('--debugger-args', default=None, metavar='params', type=str, - help='Command-line arguments to pass to the debugger itself; ' - 'split as the Bourne shell would.') - group.add_argument('--debugparams', action=StoreDebugParamsAndWarnAction, - default=None, type=str, dest='debugger_args', - help=argparse.SUPPRESS) + group = parser.add_argument_group("debugging") + group.add_argument( + "--debug", + action="store_true", + help="Enable the debugger. Not specifying a --debugger option will result " + "in the default debugger being used.", + ) + group.add_argument( + "--debugger", default=None, type=str, help="Name of debugger to use." + ) + group.add_argument( + "--debugger-args", + default=None, + metavar="params", + type=str, + help="Command-line arguments to pass to the debugger itself; " + "split as the Bourne shell would.", + ) + group.add_argument( + "--debugparams", + action=StoreDebugParamsAndWarnAction, + default=None, + type=str, + dest="debugger_args", + help=argparse.SUPPRESS, + ) - group = parser.add_argument_group('DMD') - group.add_argument('--dmd', action='store_true', - help='Enable DMD. The following arguments have no effect without this.') - group.add_argument('--mode', choices=['live', 'dark-matter', 'cumulative', 'scan'], - help='Profiling mode. The default is \'dark-matter\'.') - group.add_argument('--stacks', choices=['partial', 'full'], - help='Allocation stack trace coverage. The default is \'partial\'.') - group.add_argument('--show-dump-stats', action='store_true', - help='Show stats when doing dumps.') + group = parser.add_argument_group("DMD") + group.add_argument( + "--dmd", + action="store_true", + help="Enable DMD. The following arguments have no effect without this.", + ) + group.add_argument( + "--mode", + choices=["live", "dark-matter", "cumulative", "scan"], + help="Profiling mode. The default is 'dark-matter'.", + ) + group.add_argument( + "--stacks", + choices=["partial", "full"], + help="Allocation stack trace coverage. The default is 'partial'.", + ) + group.add_argument( + "--show-dump-stats", action="store_true", help="Show stats when doing dumps." + ) return parser def setup_run_parser(): build = MozbuildObject.from_environment(cwd=here) if conditions.is_android(build): return _get_android_run_parser() @@ -864,713 +1223,898 @@ def setup_run_parser(): return _get_jsshell_run_parser() return _get_desktop_run_parser() @CommandProvider class RunProgram(MachCommandBase): """Run the compiled program.""" - @Command('run', category='post-build', - conditions=[conditions.has_build_or_shell], - parser=setup_run_parser, - description='Run the compiled program, possibly under a debugger or DMD.') + @Command( + "run", + category="post-build", + conditions=[conditions.has_build_or_shell], + parser=setup_run_parser, + description="Run the compiled program, possibly under a debugger or DMD.", + ) def run(self, **kwargs): if conditions.is_android(self): return self._run_android(**kwargs) if conditions.is_jsshell(self): return self._run_jsshell(**kwargs) return self._run_desktop(**kwargs) - def _run_android(self, app='org.mozilla.geckoview_example', intent=None, env=[], profile=None, - url=None, no_install=None, no_wait=None, fail_if_running=None, restart=None): - from mozrunner.devices.android_device import (verify_android_device, - _get_device, - InstallIntent) + def _run_android( + self, + app="org.mozilla.geckoview_example", + intent=None, + env=[], + profile=None, + url=None, + no_install=None, + no_wait=None, + fail_if_running=None, + restart=None, + ): + from mozrunner.devices.android_device import ( + verify_android_device, + _get_device, + InstallIntent, + ) from six.moves import shlex_quote - if app == 'org.mozilla.geckoview_example': - activity_name = 'org.mozilla.geckoview_example.GeckoViewActivity' - elif app == 'org.mozilla.geckoview.test': - activity_name = 'org.mozilla.geckoview.test.TestRunnerActivity' - elif 'fennec' in app or 'firefox' in app: - activity_name = 'org.mozilla.gecko.BrowserApp' + if app == "org.mozilla.geckoview_example": + activity_name = "org.mozilla.geckoview_example.GeckoViewActivity" + elif app == "org.mozilla.geckoview.test": + activity_name = "org.mozilla.geckoview.test.TestRunnerActivity" + elif "fennec" in app or "firefox" in app: + activity_name = "org.mozilla.gecko.BrowserApp" else: - raise RuntimeError('Application not recognized: {}'.format(app)) + raise RuntimeError("Application not recognized: {}".format(app)) # `verify_android_device` respects `DEVICE_SERIAL` if it is set and sets it otherwise. - verify_android_device(self, app=app, - install=InstallIntent.NO if no_install else InstallIntent.YES) - device_serial = os.environ.get('DEVICE_SERIAL') + verify_android_device( + self, app=app, install=InstallIntent.NO if no_install else InstallIntent.YES + ) + device_serial = os.environ.get("DEVICE_SERIAL") if not device_serial: - print('No ADB devices connected.') + print("No ADB devices connected.") return 1 device = _get_device(self.substs, device_serial=device_serial) args = [] if profile: if os.path.isdir(profile): host_profile = profile # Always /data/local/tmp, rather than `device.test_root`, because GeckoView only # takes its configuration file from /data/local/tmp, and we want to follow suit. - target_profile = '/data/local/tmp/{}-profile'.format(app) + target_profile = "/data/local/tmp/{}-profile".format(app) device.rm(target_profile, recursive=True, force=True) device.push(host_profile, target_profile) - self.log(logging.INFO, "run", - {'host_profile': host_profile, 'target_profile': target_profile}, - 'Pushed profile from host "{host_profile}" to target "{target_profile}"') + self.log( + logging.INFO, + "run", + {"host_profile": host_profile, "target_profile": target_profile}, + 'Pushed profile from host "{host_profile}" to target "{target_profile}"', + ) else: target_profile = profile - self.log(logging.INFO, "run", - {'target_profile': target_profile}, - 'Using profile from target "{target_profile}"') + self.log( + logging.INFO, + "run", + {"target_profile": target_profile}, + 'Using profile from target "{target_profile}"', + ) - args = ['--profile', shlex_quote(target_profile)] + args = ["--profile", shlex_quote(target_profile)] extras = {} for i, e in enumerate(env): - extras['env{}'.format(i)] = e + extras["env{}".format(i)] = e if args: - extras['args'] = " ".join(args) - extras['use_multiprocess'] = True # Only GVE and TRA process this extra. + extras["args"] = " ".join(args) if env or args: restart = True if restart: fail_if_running = False - self.log(logging.INFO, "run", - {'app': app}, - 'Stopping {app} to ensure clean restart.') + self.log( + logging.INFO, + "run", + {"app": app}, + "Stopping {app} to ensure clean restart.", + ) device.stop_application(app) # We'd prefer to log the actual `am start ...` command, but it's not trivial to wire the # device's logger to mach's logger. - self.log(logging.INFO, "run", - {'app': app, 'activity_name': activity_name}, - 'Starting {app}/{activity_name}.') + self.log( + logging.INFO, + "run", + {"app": app, "activity_name": activity_name}, + "Starting {app}/{activity_name}.", + ) device.launch_application( app_name=app, activity_name=activity_name, intent=intent, extras=extras, url=url, wait=not no_wait, - fail_if_running=fail_if_running) + fail_if_running=fail_if_running, + ) return 0 def _run_jsshell(self, params, debug, debugger, debugger_args): try: - binpath = self.get_binary_path('app') + binpath = self.get_binary_path("app") except BinaryNotFoundException as e: - self.log(logging.ERROR, 'run', - {'error': str(e)}, - 'ERROR: {error}') - self.log(logging.INFO, 'run', - {'help': e.help()}, - '{help}') + self.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}") + self.log(logging.INFO, "run", {"help": e.help()}, "{help}") return 1 args = [binpath] if params: args.extend(params) extra_env = { - 'RUST_BACKTRACE': 'full', + "RUST_BACKTRACE": "full", } if debug or debugger or debugger_args: - if 'INSIDE_EMACS' in os.environ: + if "INSIDE_EMACS" in os.environ: self.log_manager.terminal_handler.setLevel(logging.WARNING) import mozdebug + if not debugger: # No debugger name was provided. Look for the default ones on # current OS. - debugger = mozdebug.get_default_debugger_name(mozdebug.DebuggerSearch.KeepLooking) + debugger = mozdebug.get_default_debugger_name( + mozdebug.DebuggerSearch.KeepLooking + ) if debugger: self.debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args) if not debugger or not self.debuggerInfo: print("Could not find a suitable debugger in your PATH.") return 1 # Parameters come from the CLI. We need to convert them before # their use. if debugger_args: from mozbuild import shellutil + try: debugger_args = shellutil.split(debugger_args) except shellutil.MetaCharacterException as e: - print("The --debugger-args you passed require a real shell to parse them.") + print( + "The --debugger-args you passed require a real shell to parse them." + ) print("(We can't handle the %r character.)" % e.char) return 1 # Prepend the debugger args. args = [self.debuggerInfo.path] + self.debuggerInfo.args + args - return self.run_process(args=args, ensure_exit_code=False, - pass_thru=True, append_env=extra_env) + return self.run_process( + args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env + ) - def _run_desktop(self, params, packaged, remote, background, noprofile, - disable_e10s, enable_crash_reporter, enable_fission, setpref, - temp_profile, macos_open, debug, debugger, debugger_args, dmd, - mode, stacks, show_dump_stats): + def _run_desktop( + self, + params, + packaged, + remote, + background, + noprofile, + disable_e10s, + enable_crash_reporter, + enable_fission, + setpref, + temp_profile, + macos_open, + debug, + debugger, + debugger_args, + dmd, + mode, + stacks, + show_dump_stats, + ): from mozprofile import Profile, Preferences try: if packaged: - binpath = self.get_binary_path(where='staged-package') + binpath = self.get_binary_path(where="staged-package") else: - binpath = self.get_binary_path('app') + binpath = self.get_binary_path("app") except BinaryNotFoundException as e: - self.log(logging.ERROR, 'run', - {'error': str(e)}, - 'ERROR: {error}') + self.log(logging.ERROR, "run", {"error": str(e)}, "ERROR: {error}") if packaged: - self.log(logging.INFO, 'run', - {'help': "It looks like your build isn\'t packaged. " - "You can run |./mach package| to package it."}, - '{help}') + self.log( + logging.INFO, + "run", + { + "help": "It looks like your build isn't packaged. " + "You can run |./mach package| to package it." + }, + "{help}", + ) else: - self.log(logging.INFO, 'run', - {'help': e.help()}, - '{help}') + self.log(logging.INFO, "run", {"help": e.help()}, "{help}") return 1 args = [] if macos_open: if debug: - print("The browser can not be launched in the debugger " - "when using the macOS open command.") + print( + "The browser can not be launched in the debugger " + "when using the macOS open command." + ) return 1 try: - m = re.search(r'^.+\.app', binpath) + m = re.search(r"^.+\.app", binpath) apppath = m.group(0) - args = ['open', apppath, '--args'] + args = ["open", apppath, "--args"] except Exception as e: - print("Couldn't get the .app path from the binary path. " - "The macOS open option can only be used on macOS") + print( + "Couldn't get the .app path from the binary path. " + "The macOS open option can only be used on macOS" + ) print(e) return 1 else: args = [binpath] if params: args.extend(params) if not remote: - args.append('-no-remote') + args.append("-no-remote") - if not background and sys.platform == 'darwin': - args.append('-foreground') + if not background and sys.platform == "darwin": + args.append("-foreground") - if sys.platform.startswith('win') and \ - 'MOZ_LAUNCHER_PROCESS' in self.defines: - args.append('-wait-for-browser') + if sys.platform.startswith("win") and "MOZ_LAUNCHER_PROCESS" in self.defines: + args.append("-wait-for-browser") - no_profile_option_given = \ - all(p not in params for p in ['-profile', '--profile', '-P']) + no_profile_option_given = all( + p not in params for p in ["-profile", "--profile", "-P"] + ) if no_profile_option_given and not noprofile: prefs = { - 'browser.aboutConfig.showWarning': False, - 'browser.shell.checkDefaultBrowser': False, - 'general.warnOnAboutConfig': False, + "browser.aboutConfig.showWarning": False, + "browser.shell.checkDefaultBrowser": False, + "general.warnOnAboutConfig": False, } prefs.update(self._mach_context.settings.runprefs) - prefs.update([p.split('=', 1) for p in setpref]) + prefs.update([p.split("=", 1) for p in setpref]) for pref in prefs: prefs[pref] = Preferences.cast(prefs[pref]) - tmpdir = os.path.join(self.topobjdir, 'tmp') + tmpdir = os.path.join(self.topobjdir, "tmp") if not os.path.exists(tmpdir): os.makedirs(tmpdir) - if (temp_profile): - path = tempfile.mkdtemp(dir=tmpdir, prefix='profile-') + if temp_profile: + path = tempfile.mkdtemp(dir=tmpdir, prefix="profile-") else: - path = os.path.join(tmpdir, 'profile-default') + path = os.path.join(tmpdir, "profile-default") profile = Profile(path, preferences=prefs) - args.append('-profile') + args.append("-profile") args.append(profile.profile) if not no_profile_option_given and setpref: print("setpref is only supported if a profile is not specified") return 1 if not no_profile_option_given: # The profile name may be non-ascii, but come from the # commandline as str, so convert here with a better guess at # an encoding than the default. - encoding = (sys.getfilesystemencoding() or - sys.getdefaultencoding()) - args = [unicode(a, encoding) if not isinstance(a, unicode) else a - for a in args] + encoding = sys.getfilesystemencoding() or sys.getdefaultencoding() + args = [ + unicode(a, encoding) if not isinstance(a, unicode) else a + for a in args + ] some_debugging_option = debug or debugger or debugger_args # By default, because Firefox is a GUI app, on Windows it will not # 'create' a console to which stdout/stderr is printed. This means # printf/dump debugging is invisible. We default to adding the # -attach-console argument to fix this. We avoid this if we're launched # under a debugger (which can do its own picking up of stdout/stderr). # We also check for both the -console and -attach-console flags: # -console causes Firefox to create a separate window; # -attach-console just ends us up with output that gets relayed via mach. # We shouldn't override the user using -console. For more info, see # https://bugzilla.mozilla.org/show_bug.cgi?id=1257155 - if sys.platform.startswith('win') and not some_debugging_option and \ - '-console' not in args and '--console' not in args and \ - '-attach-console' not in args and '--attach-console' not in args: - args.append('-attach-console') + if ( + sys.platform.startswith("win") + and not some_debugging_option + and "-console" not in args + and "--console" not in args + and "-attach-console" not in args + and "--attach-console" not in args + ): + args.append("-attach-console") extra_env = { - 'MOZ_DEVELOPER_REPO_DIR': self.topsrcdir, - 'MOZ_DEVELOPER_OBJ_DIR': self.topobjdir, - 'RUST_BACKTRACE': 'full', + "MOZ_DEVELOPER_REPO_DIR": self.topsrcdir, + "MOZ_DEVELOPER_OBJ_DIR": self.topobjdir, + "RUST_BACKTRACE": "full", } if not enable_crash_reporter: - extra_env['MOZ_CRASHREPORTER_DISABLE'] = '1' + extra_env["MOZ_CRASHREPORTER_DISABLE"] = "1" else: - extra_env['MOZ_CRASHREPORTER'] = '1' + extra_env["MOZ_CRASHREPORTER"] = "1" if disable_e10s: - version_file = os.path.join(self.topsrcdir, 'browser', 'config', 'version.txt') - f = open(version_file, 'r') - extra_env['MOZ_FORCE_DISABLE_E10S'] = f.read().strip() + version_file = os.path.join( + self.topsrcdir, "browser", "config", "version.txt" + ) + f = open(version_file, "r") + extra_env["MOZ_FORCE_DISABLE_E10S"] = f.read().strip() if enable_fission: - extra_env['MOZ_FORCE_ENABLE_FISSION'] = '1' + extra_env["MOZ_FORCE_ENABLE_FISSION"] = "1" if some_debugging_option: - if 'INSIDE_EMACS' in os.environ: + if "INSIDE_EMACS" in os.environ: self.log_manager.terminal_handler.setLevel(logging.WARNING) import mozdebug + if not debugger: # No debugger name was provided. Look for the default ones on # current OS. - debugger = mozdebug.get_default_debugger_name(mozdebug.DebuggerSearch.KeepLooking) + debugger = mozdebug.get_default_debugger_name( + mozdebug.DebuggerSearch.KeepLooking + ) if debugger: self.debuggerInfo = mozdebug.get_debugger_info(debugger, debugger_args) if not debugger or not self.debuggerInfo: print("Could not find a suitable debugger in your PATH.") return 1 # Parameters come from the CLI. We need to convert them before # their use. if debugger_args: from mozbuild import shellutil + try: debugger_args = shellutil.split(debugger_args) except shellutil.MetaCharacterException as e: - print("The --debugger-args you passed require a real shell to parse them.") + print( + "The --debugger-args you passed require a real shell to parse them." + ) print("(We can't handle the %r character.)" % e.char) return 1 # Prepend the debugger args. args = [self.debuggerInfo.path] + self.debuggerInfo.args + args if dmd: dmd_params = [] if mode: - dmd_params.append('--mode=' + mode) + dmd_params.append("--mode=" + mode) if stacks: - dmd_params.append('--stacks=' + stacks) + dmd_params.append("--stacks=" + stacks) if show_dump_stats: - dmd_params.append('--show-dump-stats=yes') + dmd_params.append("--show-dump-stats=yes") if dmd_params: - extra_env['DMD'] = ' '.join(dmd_params) + extra_env["DMD"] = " ".join(dmd_params) else: - extra_env['DMD'] = '1' + extra_env["DMD"] = "1" - return self.run_process(args=args, ensure_exit_code=False, - pass_thru=True, append_env=extra_env) + return self.run_process( + args=args, ensure_exit_code=False, pass_thru=True, append_env=extra_env + ) @CommandProvider class Buildsymbols(MachCommandBase): """Produce a package of debug symbols suitable for use with Breakpad.""" - @Command('buildsymbols', category='post-build', - description='Produce a package of Breakpad-format symbols.') + @Command( + "buildsymbols", + category="post-build", + description="Produce a package of Breakpad-format symbols.", + ) def buildsymbols(self): - return self._run_make(directory=".", target='buildsymbols', ensure_exit_code=False) + return self._run_make( + directory=".", target="buildsymbols", ensure_exit_code=False + ) @CommandProvider class MachDebug(MachCommandBase): - @Command('environment', category='build-dev', - description='Show info about the mach and build environment.') - @CommandArgument('--format', default='pretty', - choices=['pretty', 'json'], - help='Print data in the given format.') - @CommandArgument('--output', '-o', type=str, - help='Output to the given file.') - @CommandArgument('--verbose', '-v', action='store_true', - help='Print verbose output.') + @Command( + "environment", + category="build-dev", + description="Show info about the mach and build environment.", + ) + @CommandArgument( + "--format", + default="pretty", + choices=["pretty", "json"], + help="Print data in the given format.", + ) + @CommandArgument("--output", "-o", type=str, help="Output to the given file.") + @CommandArgument( + "--verbose", "-v", action="store_true", help="Print verbose output." + ) def environment(self, format, output=None, verbose=False): - func = getattr(self, '_environment_%s' % format.replace('.', '_')) + func = getattr(self, "_environment_%s" % format.replace(".", "_")) if output: # We want to preserve mtimes if the output file already exists # and the content hasn't changed. from mozbuild.util import FileAvoidWrite + with FileAvoidWrite(output) as out: return func(out, verbose) return func(sys.stdout, verbose) def _environment_pretty(self, out, verbose): state_dir = self._mach_context.state_dir import platform - print('platform:\n\t%s' % platform.platform(), file=out) - print('python version:\n\t%s' % sys.version, file=out) - print('python prefix:\n\t%s' % sys.prefix, file=out) - print('mach cwd:\n\t%s' % self._mach_context.cwd, file=out) - print('os cwd:\n\t%s' % os.getcwd(), file=out) - print('mach directory:\n\t%s' % self._mach_context.topdir, file=out) - print('state directory:\n\t%s' % state_dir, file=out) - print('object directory:\n\t%s' % self.topobjdir, file=out) + print("platform:\n\t%s" % platform.platform(), file=out) + print("python version:\n\t%s" % sys.version, file=out) + print("python prefix:\n\t%s" % sys.prefix, file=out) + print("mach cwd:\n\t%s" % self._mach_context.cwd, file=out) + print("os cwd:\n\t%s" % os.getcwd(), file=out) + print("mach directory:\n\t%s" % self._mach_context.topdir, file=out) + print("state directory:\n\t%s" % state_dir, file=out) + + print("object directory:\n\t%s" % self.topobjdir, file=out) - if self.mozconfig['path']: - print('mozconfig path:\n\t%s' % self.mozconfig['path'], file=out) - if self.mozconfig['configure_args']: - print('mozconfig configure args:', file=out) - for arg in self.mozconfig['configure_args']: - print('\t%s' % arg, file=out) + if self.mozconfig["path"]: + print("mozconfig path:\n\t%s" % self.mozconfig["path"], file=out) + if self.mozconfig["configure_args"]: + print("mozconfig configure args:", file=out) + for arg in self.mozconfig["configure_args"]: + print("\t%s" % arg, file=out) - if self.mozconfig['make_extra']: - print('mozconfig extra make args:', file=out) - for arg in self.mozconfig['make_extra']: - print('\t%s' % arg, file=out) + if self.mozconfig["make_extra"]: + print("mozconfig extra make args:", file=out) + for arg in self.mozconfig["make_extra"]: + print("\t%s" % arg, file=out) - if self.mozconfig['make_flags']: - print('mozconfig make flags:', file=out) - for arg in self.mozconfig['make_flags']: - print('\t%s' % arg, file=out) + if self.mozconfig["make_flags"]: + print("mozconfig make flags:", file=out) + for arg in self.mozconfig["make_flags"]: + print("\t%s" % arg, file=out) config = None try: config = self.config_environment except Exception: pass if config: - print('config topsrcdir:\n\t%s' % config.topsrcdir, file=out) - print('config topobjdir:\n\t%s' % config.topobjdir, file=out) + print("config topsrcdir:\n\t%s" % config.topsrcdir, file=out) + print("config topobjdir:\n\t%s" % config.topobjdir, file=out) if verbose: - print('config substitutions:', file=out) + print("config substitutions:", file=out) for k in sorted(config.substs): - print('\t%s: %s' % (k, config.substs[k]), file=out) + print("\t%s: %s" % (k, config.substs[k]), file=out) - print('config defines:', file=out) + print("config defines:", file=out) for k in sorted(config.defines): - print('\t%s' % k, file=out) + print("\t%s" % k, file=out) def _environment_json(self, out, verbose): import json class EnvironmentEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, MozbuildObject): result = { - 'topsrcdir': obj.topsrcdir, - 'topobjdir': obj.topobjdir, - 'mozconfig': obj.mozconfig, + "topsrcdir": obj.topsrcdir, + "topobjdir": obj.topobjdir, + "mozconfig": obj.mozconfig, } if verbose: - result['substs'] = obj.substs - result['defines'] = obj.defines + result["substs"] = obj.substs + result["defines"] = obj.defines return result elif isinstance(obj, set): return list(obj) return json.JSONEncoder.default(self, obj) + json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out) @CommandProvider class Repackage(MachCommandBase): - '''Repackages artifacts into different formats. + """Repackages artifacts into different formats. This is generally used after packages are signed by the signing scriptworkers in order to bundle things up into shippable formats, such as a .dmg on OSX or an installer exe on Windows. - ''' - @Command('repackage', category='misc', - description='Repackage artifacts into different formats.') + """ + + @Command( + "repackage", + category="misc", + description="Repackage artifacts into different formats.", + ) def repackage(self): print("Usage: ./mach repackage [dmg|installer|mar] [args...]") - @SubCommand('repackage', 'dmg', - description='Repackage a tar file into a .dmg for OSX') - @CommandArgument('--input', '-i', type=str, required=True, - help='Input filename') - @CommandArgument('--output', '-o', type=str, required=True, - help='Output filename') + @SubCommand( + "repackage", "dmg", description="Repackage a tar file into a .dmg for OSX" + ) + @CommandArgument("--input", "-i", type=str, required=True, help="Input filename") + @CommandArgument("--output", "-o", type=str, required=True, help="Output filename") def repackage_dmg(self, input, output): if not os.path.exists(input): - print('Input file does not exist: %s' % input) + print("Input file does not exist: %s" % input) return 1 - if not os.path.exists(os.path.join(self.topobjdir, 'config.status')): - print('config.status not found. Please run |mach configure| ' - 'prior to |mach repackage|.') + if not os.path.exists(os.path.join(self.topobjdir, "config.status")): + print( + "config.status not found. Please run |mach configure| " + "prior to |mach repackage|." + ) return 1 from mozbuild.repackaging.dmg import repackage_dmg + repackage_dmg(input, output) - @SubCommand('repackage', 'installer', - description='Repackage into a Windows installer exe') - @CommandArgument('--tag', type=str, required=True, - help='The .tag file used to build the installer') - @CommandArgument('--setupexe', type=str, required=True, - help='setup.exe file inside the installer') - @CommandArgument('--package', type=str, required=False, - help='Optional package .zip for building a full installer') - @CommandArgument('--output', '-o', type=str, required=True, - help='Output filename') - @CommandArgument('--package-name', type=str, required=False, - help='Name of the package being rebuilt') - @CommandArgument('--sfx-stub', type=str, required=True, - help='Path to the self-extraction stub.') - @CommandArgument('--use-upx', required=False, action='store_true', - help='Run UPX on the self-extraction stub.') - def repackage_installer(self, tag, setupexe, package, output, package_name, sfx_stub, use_upx): + @SubCommand( + "repackage", "installer", description="Repackage into a Windows installer exe" + ) + @CommandArgument( + "--tag", + type=str, + required=True, + help="The .tag file used to build the installer", + ) + @CommandArgument( + "--setupexe", + type=str, + required=True, + help="setup.exe file inside the installer", + ) + @CommandArgument( + "--package", + type=str, + required=False, + help="Optional package .zip for building a full installer", + ) + @CommandArgument("--output", "-o", type=str, required=True, help="Output filename") + @CommandArgument( + "--package-name", + type=str, + required=False, + help="Name of the package being rebuilt", + ) + @CommandArgument( + "--sfx-stub", type=str, required=True, help="Path to the self-extraction stub." + ) + @CommandArgument( + "--use-upx", + required=False, + action="store_true", + help="Run UPX on the self-extraction stub.", + ) + def repackage_installer( + self, tag, setupexe, package, output, package_name, sfx_stub, use_upx + ): from mozbuild.repackaging.installer import repackage_installer + repackage_installer( topsrcdir=self.topsrcdir, tag=tag, setupexe=setupexe, package=package, output=output, package_name=package_name, sfx_stub=sfx_stub, use_upx=use_upx, ) - @SubCommand('repackage', 'msi', - description='Repackage into a MSI') - @CommandArgument('--wsx', type=str, required=True, - help='The wsx file used to build the installer') - @CommandArgument('--version', type=str, required=True, - help='The Firefox version used to create the installer') - @CommandArgument('--locale', type=str, required=True, - help='The locale of the installer') - @CommandArgument('--arch', type=str, required=True, - help='The archtecture you are building.') - @CommandArgument('--setupexe', type=str, required=True, - help='setup.exe installer') - @CommandArgument('--candle', type=str, required=False, - help='location of candle binary') - @CommandArgument('--light', type=str, required=False, - help='location of light binary') - @CommandArgument('--output', '-o', type=str, required=True, - help='Output filename') - def repackage_msi(self, wsx, version, locale, arch, setupexe, candle, light, output): + @SubCommand("repackage", "msi", description="Repackage into a MSI") + @CommandArgument( + "--wsx", + type=str, + required=True, + help="The wsx file used to build the installer", + ) + @CommandArgument( + "--version", + type=str, + required=True, + help="The Firefox version used to create the installer", + ) + @CommandArgument( + "--locale", type=str, required=True, help="The locale of the installer" + ) + @CommandArgument( + "--arch", type=str, required=True, help="The archtecture you are building." + ) + @CommandArgument("--setupexe", type=str, required=True, help="setup.exe installer") + @CommandArgument( + "--candle", type=str, required=False, help="location of candle binary" + ) + @CommandArgument( + "--light", type=str, required=False, help="location of light binary" + ) + @CommandArgument("--output", "-o", type=str, required=True, help="Output filename") + def repackage_msi( + self, wsx, version, locale, arch, setupexe, candle, light, output + ): from mozbuild.repackaging.msi import repackage_msi + repackage_msi( topsrcdir=self.topsrcdir, wsx=wsx, version=version, locale=locale, arch=arch, setupexe=setupexe, candle=candle, light=light, output=output, ) - @SubCommand('repackage', 'mar', - description='Repackage into complete MAR file') - @CommandArgument('--input', '-i', type=str, required=True, - help='Input filename') - @CommandArgument('--mar', type=str, required=True, - help='Mar binary path') - @CommandArgument('--output', '-o', type=str, required=True, - help='Output filename') - @CommandArgument('--arch', type=str, required=True, - help='The archtecture you are building.') - @CommandArgument('--mar-channel-id', type=str, - help='Mar channel id') + @SubCommand("repackage", "mar", description="Repackage into complete MAR file") + @CommandArgument("--input", "-i", type=str, required=True, help="Input filename") + @CommandArgument("--mar", type=str, required=True, help="Mar binary path") + @CommandArgument("--output", "-o", type=str, required=True, help="Output filename") + @CommandArgument( + "--arch", type=str, required=True, help="The archtecture you are building." + ) + @CommandArgument("--mar-channel-id", type=str, help="Mar channel id") def repackage_mar(self, input, mar, output, arch, mar_channel_id): from mozbuild.repackaging.mar import repackage_mar + repackage_mar( self.topsrcdir, input, mar, output, arch=arch, mar_channel_id=mar_channel_id, ) @SettingsProvider -class TelemetrySettings(): +class TelemetrySettings: config_settings = [ - ('build.telemetry', 'boolean', """ + ( + "build.telemetry", + "boolean", + """ Enable submission of build system telemetry. - """.strip(), False), + """.strip(), + False, + ), ] @CommandProvider class L10NCommands(MachCommandBase): - @Command('package-multi-locale', category='post-build', - description='Package a multi-locale version of the built product ' - 'for distribution as an APK, DMG, etc.') - @CommandArgument('--locales', metavar='LOCALES', nargs='+', - required=True, - help='List of locales to package, including "en-US"') - @CommandArgument('--verbose', action='store_true', - help='Log informative status messages.') + @Command( + "package-multi-locale", + category="post-build", + description="Package a multi-locale version of the built product " + "for distribution as an APK, DMG, etc.", + ) + @CommandArgument( + "--locales", + metavar="LOCALES", + nargs="+", + required=True, + help='List of locales to package, including "en-US"', + ) + @CommandArgument( + "--verbose", action="store_true", help="Log informative status messages." + ) def package_l10n(self, verbose=False, locales=[]): - if 'RecursiveMake' not in self.substs['BUILD_BACKENDS']: - print('Artifact builds do not support localization. ' - 'If you know what you are doing, you can use:\n' - 'ac_add_options --disable-compile-environment\n' - 'export BUILD_BACKENDS=FasterMake,RecursiveMake\n' - 'in your mozconfig.') + if "RecursiveMake" not in self.substs["BUILD_BACKENDS"]: + print( + "Artifact builds do not support localization. " + "If you know what you are doing, you can use:\n" + "ac_add_options --disable-compile-environment\n" + "export BUILD_BACKENDS=FasterMake,RecursiveMake\n" + "in your mozconfig." + ) return 1 - if 'en-US' not in locales: - self.log(logging.WARN, 'package-multi-locale', {'locales': locales}, - 'List of locales does not include default locale "en-US": ' - '{locales}; adding "en-US"') - locales.append('en-US') + if "en-US" not in locales: + self.log( + logging.WARN, + "package-multi-locale", + {"locales": locales}, + 'List of locales does not include default locale "en-US": ' + '{locales}; adding "en-US"', + ) + locales.append("en-US") locales = list(sorted(locales)) append_env = { # We are only (re-)packaging, we don't want to (re-)build # anything inside Gradle. - 'GRADLE_INVOKED_WITHIN_MACH_BUILD': '1', - 'MOZ_CHROME_MULTILOCALE': ' '.join(locales), + "GRADLE_INVOKED_WITHIN_MACH_BUILD": "1", + "MOZ_CHROME_MULTILOCALE": " ".join(locales), } for locale in locales: - if locale == 'en-US': - self.log(logging.INFO, 'package-multi-locale', {'locale': locale}, - 'Skipping default locale {locale}') + if locale == "en-US": + self.log( + logging.INFO, + "package-multi-locale", + {"locale": locale}, + "Skipping default locale {locale}", + ) continue - self.log(logging.INFO, 'package-multi-locale', {'locale': locale}, - 'Processing chrome Gecko resources for locale {locale}') + self.log( + logging.INFO, + "package-multi-locale", + {"locale": locale}, + "Processing chrome Gecko resources for locale {locale}", + ) self.run_process( - [mozpath.join(self.topsrcdir, 'mach'), 'build', 'chrome-{}'.format(locale)], + [ + mozpath.join(self.topsrcdir, "mach"), + "build", + "chrome-{}".format(locale), + ], append_env=append_env, pass_thru=True, ensure_exit_code=True, - cwd=mozpath.join(self.topsrcdir)) + cwd=mozpath.join(self.topsrcdir), + ) - if self.substs['MOZ_BUILD_APP'] == 'mobile/android': - self.log(logging.INFO, 'package-multi-locale', {}, - 'Invoking `mach android assemble-app`') + if self.substs["MOZ_BUILD_APP"] == "mobile/android": + self.log( + logging.INFO, + "package-multi-locale", + {}, + "Invoking `mach android assemble-app`", + ) self.run_process( - [mozpath.join(self.topsrcdir, 'mach'), 'android', 'assemble-app'], + [mozpath.join(self.topsrcdir, "mach"), "android", "assemble-app"], append_env=append_env, pass_thru=True, ensure_exit_code=True, - cwd=mozpath.join(self.topsrcdir)) + cwd=mozpath.join(self.topsrcdir), + ) - self.log(logging.INFO, 'package-multi-locale', {}, - 'Invoking multi-locale `mach package`') + self.log( + logging.INFO, + "package-multi-locale", + {}, + "Invoking multi-locale `mach package`", + ) self._run_make( directory=self.topobjdir, - target=['package', 'AB_CD=multi'], + target=["package", "AB_CD=multi"], append_env=append_env, pass_thru=True, - ensure_exit_code=True) + ensure_exit_code=True, + ) - if self.substs['MOZ_BUILD_APP'] == 'mobile/android': - self.log(logging.INFO, 'package-multi-locale', {}, - 'Invoking `mach android archive-geckoview`') + if self.substs["MOZ_BUILD_APP"] == "mobile/android": + self.log( + logging.INFO, + "package-multi-locale", + {}, + "Invoking `mach android archive-geckoview`", + ) self.run_process( - [mozpath.join(self.topsrcdir, 'mach'), 'android', - 'archive-geckoview'], + [mozpath.join(self.topsrcdir, "mach"), "android", "archive-geckoview"], append_env=append_env, pass_thru=True, ensure_exit_code=True, - cwd=mozpath.join(self.topsrcdir)) + cwd=mozpath.join(self.topsrcdir), + ) return 0 @CommandProvider class CreateMachEnvironment(MachCommandBase): """Create the mach virtualenvs.""" - @Command('create-mach-environment', category='devenv', - description=( - 'Create the `mach` virtualenvs. If executed with python3 (the ' - 'default when entering from `mach`), create both a python3 ' - 'and python2.7 virtualenv. If executed with python2, only ' - 'create the python2.7 virtualenv.')) + @Command( + "create-mach-environment", + category="devenv", + description=( + "Create the `mach` virtualenvs. If executed with python3 (the " + "default when entering from `mach`), create both a python3 " + "and python2.7 virtualenv. If executed with python2, only " + "create the python2.7 virtualenv." + ), + ) @CommandArgument( - '-f', '--force', action='store_true', - help=('Force re-creating the virtualenv even if it is already ' - 'up-to-date.')) + "-f", + "--force", + action="store_true", + help=("Force re-creating the virtualenv even if it is already " "up-to-date."), + ) def create_mach_environment(self, force=False): from mozboot.util import get_mach_virtualenv_root from mozbuild.pythonutil import find_python2_executable from mozbuild.virtualenv import VirtualenvManager from six import PY2 virtualenv_path = get_mach_virtualenv_root(py2=PY2) if sys.executable.startswith(virtualenv_path): - print('You can only create a mach environment with the system ' - 'Python. Re-run this `mach` command with the system Python.', - file=sys.stderr) + print( + "You can only create a mach environment with the system " + "Python. Re-run this `mach` command with the system Python.", + file=sys.stderr, + ) return 1 manager = VirtualenvManager( - self.topsrcdir, virtualenv_path, sys.stdout, - os.path.join(self.topsrcdir, 'build', - 'mach_virtualenv_packages.txt'), - populate_local_paths=False) + self.topsrcdir, + virtualenv_path, + sys.stdout, + os.path.join(self.topsrcdir, "build", "mach_virtualenv_packages.txt"), + populate_local_paths=False, + ) if manager.up_to_date(sys.executable) and not force: - print('virtualenv at %s is already up to date.' % virtualenv_path) + print("virtualenv at %s is already up to date." % virtualenv_path) else: manager.build(sys.executable) - manager.install_pip_package('zstandard>=0.9.0,<=0.13.0') + manager.install_pip_package("zstandard>=0.9.0,<=0.13.0") if not PY2: # This can fail on some platforms. See # https://bugzilla.mozilla.org/show_bug.cgi?id=1660120 try: - manager.install_pip_package('glean_sdk~=32.3.1') + manager.install_pip_package("glean_sdk~=32.3.1") except subprocess.CalledProcessError: - print('Could not install glean_sdk, so telemetry will not be ' - 'collected. Continuing.') - print('Python 3 mach environment created.') + print( + "Could not install glean_sdk, so telemetry will not be " + "collected. Continuing." + ) + print("Python 3 mach environment created.") python2, _ = find_python2_executable() if not python2: - print('WARNING! Could not find a Python 2 executable to create ' - 'a Python 2 virtualenv', file=sys.stderr) + print( + "WARNING! Could not find a Python 2 executable to create " + "a Python 2 virtualenv", + file=sys.stderr, + ) return 0 args = [ - python2, os.path.join(self.topsrcdir, 'mach'), - 'create-mach-environment' + python2, + os.path.join(self.topsrcdir, "mach"), + "create-mach-environment", ] if force: - args.append('-f') + args.append("-f") ret = subprocess.call(args) if ret: - print('WARNING! Failed to create a Python 2 mach environment.', - file=sys.stderr) + print( + "WARNING! Failed to create a Python 2 mach environment.", + file=sys.stderr, + ) else: - print('Python 2 mach environment created.') + print("Python 2 mach environment created.")
--- a/python/mozbuild/mozbuild/telemetry.py +++ b/python/mozbuild/mozbuild/telemetry.py @@ -1,18 +1,18 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, print_function, unicode_literals -''' +""" This file contains a voluptuous schema definition for build system telemetry, and functions to fill an instance of that schema for a single mach invocation. -''' +""" import json import os import math import platform import pprint import sys from datetime import datetime @@ -26,341 +26,394 @@ from voluptuous import ( Schema, ) from voluptuous.validators import Datetime import mozpack.path as mozpath from .base import BuildEnvironmentNotFoundException from .configure.constants import CompilerType -schema = Schema({ - Required('client_id', description='A UUID to uniquely identify a client'): Any(*string_types), - Required('time', description='Time at which this event happened'): Datetime(), - Required('command', description='The mach command that was invoked'): Any(*string_types), - Required('argv', description=( - 'Full mach commandline. ' + - 'If the commandline contains ' + - 'absolute paths they will be sanitized.')): [Any(*string_types)], - Required('success', description='true if the command succeeded'): bool, - Optional('exception', description=( - 'If a Python exception was encountered during the execution ' + - 'of the command, this value contains the result of calling `repr` ' + - 'on the exception object.')): Any(*string_types), - Optional('file_types_changed', description=( - 'This array contains a list of objects with {ext, count} properties giving the count ' + - 'of files changed since the last invocation grouped by file type')): [ +schema = Schema( + { + Required("client_id", description="A UUID to uniquely identify a client"): Any( + *string_types + ), + Required("time", description="Time at which this event happened"): Datetime(), + Required("command", description="The mach command that was invoked"): Any( + *string_types + ), + Required( + "argv", + description=( + "Full mach commandline. " + + "If the commandline contains " + + "absolute paths they will be sanitized." + ), + ): [Any(*string_types)], + Required("success", description="true if the command succeeded"): bool, + Optional( + "exception", + description=( + "If a Python exception was encountered during the execution " + + "of the command, this value contains the result of calling `repr` " + + "on the exception object." + ), + ): Any(*string_types), + Optional( + "file_types_changed", + description=( + "This array contains a list of objects with {ext, count} properties giving the " + + "count of files changed since the last invocation grouped by file type" + ), + ): [ { - Required('ext', description='File extension'): Any(*string_types), - Required('count', description='Count of changed files with this extension'): int, + Required("ext", description="File extension"): Any(*string_types), + Required( + "count", description="Count of changed files with this extension" + ): int, } ], - Required('duration_ms', description='Command duration in milliseconds'): int, - Required('build_opts', description='Selected build options'): { - Optional('compiler', description='The compiler type in use (CC_TYPE)'): - Any(*CompilerType.POSSIBLE_VALUES), - Optional('artifact', description='true if --enable-artifact-builds'): bool, - Optional('debug', description='true if build is debug (--enable-debug)'): bool, - Optional('opt', description='true if build is optimized (--enable-optimize)'): bool, - Optional('ccache', description='true if ccache is in use (--with-ccache)'): bool, - Optional('sccache', description='true if ccache in use is sccache'): bool, - Optional('icecream', description='true if icecream in use'): bool, - }, - Optional('build_attrs', description='Attributes characterizing a build'): { - Optional('cpu_percent', description='cpu utilization observed during a build'): int, - Optional('clobber', description='true if the build was a clobber/full build'): bool, - }, - Required('system'): { - # We don't need perfect granularity here. - Required('os', description='Operating system'): Any('windows', 'macos', 'linux', 'other'), - Optional('cpu_brand', description='CPU brand string from CPUID'): Any(*string_types), - Optional('logical_cores', description='Number of logical CPU cores present'): int, - Optional('physical_cores', description='Number of physical CPU cores present'): int, - Optional('memory_gb', description='System memory in GB'): int, - Optional('drive_is_ssd', - description='true if the source directory is on a solid-state disk'): bool, - Optional('virtual_machine', - description='true if the OS appears to be running in a virtual machine'): bool, - }, -}) + Required("duration_ms", description="Command duration in milliseconds"): int, + Required("build_opts", description="Selected build options"): { + Optional("compiler", description="The compiler type in use (CC_TYPE)"): Any( + *CompilerType.POSSIBLE_VALUES + ), + Optional("artifact", description="true if --enable-artifact-builds"): bool, + Optional( + "debug", description="true if build is debug (--enable-debug)" + ): bool, + Optional( + "opt", description="true if build is optimized (--enable-optimize)" + ): bool, + Optional( + "ccache", description="true if ccache is in use (--with-ccache)" + ): bool, + Optional("sccache", description="true if ccache in use is sccache"): bool, + Optional("icecream", description="true if icecream in use"): bool, + }, + Optional("build_attrs", description="Attributes characterizing a build"): { + Optional( + "cpu_percent", description="cpu utilization observed during a build" + ): int, + Optional( + "clobber", description="true if the build was a clobber/full build" + ): bool, + }, + Required("system"): { + # We don't need perfect granularity here. + Required("os", description="Operating system"): Any( + "windows", "macos", "linux", "other" + ), + Optional("cpu_brand", description="CPU brand string from CPUID"): Any( + *string_types + ), + Optional( + "logical_cores", description="Number of logical CPU cores present" + ): int, + Optional( + "physical_cores", description="Number of physical CPU cores present" + ): int, + Optional("memory_gb", description="System memory in GB"): int, + Optional( + "drive_is_ssd", + description="true if the source directory is on a solid-state disk", + ): bool, + Optional( + "virtual_machine", + description="true if the OS appears to be running in a virtual machine", + ): bool, + }, + } +) def get_client_id(state_dir): - ''' + """ Get a client id, which is a UUID, from a file in the state directory. If the file doesn't exist, generate a UUID and save it to a file. - ''' - path = os.path.join(state_dir, 'telemetry_client_id.json') + """ + path = os.path.join(state_dir, "telemetry_client_id.json") if os.path.exists(path): - with open(path, 'r') as f: - return json.load(f)['client_id'] + with open(path, "r") as f: + return json.load(f)["client_id"] import uuid + # uuid4 is random, other uuid types may include identifiers from the local system. client_id = str(uuid.uuid4()) if PY3: - file_mode = 'w' + file_mode = "w" else: - file_mode = 'wb' + file_mode = "wb" with open(path, file_mode) as f: - json.dump({'client_id': client_id}, f) + json.dump({"client_id": client_id}, f) return client_id def cpu_brand_linux(): - ''' + """ Read the CPU brand string out of /proc/cpuinfo on Linux. - ''' - with open('/proc/cpuinfo', 'r') as f: + """ + with open("/proc/cpuinfo", "r") as f: for line in f: - if line.startswith('model name'): - _, brand = line.split(': ', 1) + if line.startswith("model name"): + _, brand = line.split(": ", 1) return brand.rstrip() # not found? return None def cpu_brand_windows(): - ''' + """ Read the CPU brand string from the registry on Windows. - ''' + """ try: import _winreg except ImportError: import winreg as _winreg try: - h = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, - r'HARDWARE\DESCRIPTION\System\CentralProcessor\0') - (brand, ty) = _winreg.QueryValueEx(h, 'ProcessorNameString') + h = _winreg.OpenKey( + _winreg.HKEY_LOCAL_MACHINE, + r"HARDWARE\DESCRIPTION\System\CentralProcessor\0", + ) + (brand, ty) = _winreg.QueryValueEx(h, "ProcessorNameString") if ty == _winreg.REG_SZ: return brand except WindowsError: pass return None def cpu_brand_mac(): - ''' + """ Get the CPU brand string via sysctl on macos. - ''' + """ import ctypes import ctypes.util libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c")) # First, find the required buffer size. bufsize = ctypes.c_size_t(0) - result = libc.sysctlbyname(b'machdep.cpu.brand_string', None, ctypes.byref(bufsize), - None, 0) + result = libc.sysctlbyname( + b"machdep.cpu.brand_string", None, ctypes.byref(bufsize), None, 0 + ) if result != 0: return None bufsize.value += 1 buf = ctypes.create_string_buffer(bufsize.value) # Now actually get the value. - result = libc.sysctlbyname(b'machdep.cpu.brand_string', buf, ctypes.byref(bufsize), None, 0) + result = libc.sysctlbyname( + b"machdep.cpu.brand_string", buf, ctypes.byref(bufsize), None, 0 + ) if result != 0: return None return buf.value.decode() def get_cpu_brand(): - ''' + """ Get the CPU brand string as returned by CPUID. - ''' + """ return { - 'Linux': cpu_brand_linux, - 'Windows': cpu_brand_windows, - 'Darwin': cpu_brand_mac, + "Linux": cpu_brand_linux, + "Windows": cpu_brand_windows, + "Darwin": cpu_brand_mac, }.get(platform.system(), lambda: None)() def get_os_name(): return { - 'Linux': 'linux', - 'Windows': 'windows', - 'Darwin': 'macos', - }.get(platform.system(), 'other') + "Linux": "linux", + "Windows": "windows", + "Darwin": "macos", + }.get(platform.system(), "other") def get_psutil_stats(): - '''Return whether psutil exists and its associated stats. + """Return whether psutil exists and its associated stats. @returns (bool, int, int, int) whether psutil exists, the logical CPU count, physical CPU count, and total number of bytes of memory. - ''' + """ try: import psutil return ( True, psutil.cpu_count(), psutil.cpu_count(logical=False), - psutil.virtual_memory().total) + psutil.virtual_memory().total, + ) except ImportError: return False, None, None, None def get_system_info(): - ''' + """ Gather info to fill the `system` keys in the schema. - ''' + """ # Normalize OS names a bit, and bucket non-tier-1 platforms into "other". has_psutil, logical_cores, physical_cores, memory_total = get_psutil_stats() info = { - 'os': get_os_name(), + "os": get_os_name(), } if has_psutil: # `total` on Linux is gathered from /proc/meminfo's `MemTotal`, which is the # total amount of physical memory minus some kernel usage, so round up to the # nearest GB to get a sensible answer. - info['memory_gb'] = int(math.ceil(float(memory_total) / (1024 * 1024 * 1024))) - info['logical_cores'] = logical_cores + info["memory_gb"] = int(math.ceil(float(memory_total) / (1024 * 1024 * 1024))) + info["logical_cores"] = logical_cores if physical_cores is not None: - info['physical_cores'] = physical_cores + info["physical_cores"] = physical_cores cpu_brand = get_cpu_brand() if cpu_brand is not None: - info['cpu_brand'] = cpu_brand + info["cpu_brand"] = cpu_brand # TODO: drive_is_ssd, virtual_machine: https://bugzilla.mozilla.org/show_bug.cgi?id=1481613 return info def get_build_opts(substs): - ''' + """ Translate selected items from `substs` into `build_opts` keys in the schema. - ''' + """ try: opts = { - k: ty(substs.get(s, None)) for (k, s, ty) in ( + k: ty(substs.get(s, None)) + for (k, s, ty) in ( # Selected substitutions. - ('artifact', 'MOZ_ARTIFACT_BUILDS', bool), - ('debug', 'MOZ_DEBUG', bool), - ('opt', 'MOZ_OPTIMIZE', bool), - ('ccache', 'CCACHE', bool), - ('sccache', 'MOZ_USING_SCCACHE', bool), + ("artifact", "MOZ_ARTIFACT_BUILDS", bool), + ("debug", "MOZ_DEBUG", bool), + ("opt", "MOZ_OPTIMIZE", bool), + ("ccache", "CCACHE", bool), + ("sccache", "MOZ_USING_SCCACHE", bool), ) } - compiler = substs.get('CC_TYPE', None) + compiler = substs.get("CC_TYPE", None) if compiler: - opts['compiler'] = str(compiler) - if substs.get('CXX_IS_ICECREAM', None): - opts['icecream'] = True + opts["compiler"] = str(compiler) + if substs.get("CXX_IS_ICECREAM", None): + opts["icecream"] = True return opts except BuildEnvironmentNotFoundException: return {} def get_build_attrs(attrs): - ''' + """ Extracts clobber and cpu usage info from command attributes. - ''' + """ res = {} - clobber = attrs.get('clobber') + clobber = attrs.get("clobber") if clobber: - res['clobber'] = clobber - usage = attrs.get('usage') + res["clobber"] = clobber + usage = attrs.get("usage") if usage: - cpu_percent = usage.get('cpu_percent') + cpu_percent = usage.get("cpu_percent") if cpu_percent: - res['cpu_percent'] = int(round(cpu_percent)) + res["cpu_percent"] = int(round(cpu_percent)) return res def filter_args(command, argv, instance): - ''' + """ Given the full list of command-line arguments, remove anything up to and including `command`, and attempt to filter absolute pathnames out of any arguments after that. - ''' + """ # Each key is a pathname and the values are replacement sigils paths = { - instance.topsrcdir: '$topsrcdir/', - instance.topobjdir: '$topobjdir/', - mozpath.normpath(os.path.expanduser('~')): '$HOME/', + instance.topsrcdir: "$topsrcdir/", + instance.topobjdir: "$topobjdir/", + mozpath.normpath(os.path.expanduser("~")): "$HOME/", # This might override one of the existing entries, that's OK. # We don't use a sigil here because we treat all arguments as potentially relative # paths, so we'd like to get them back as they were specified. - mozpath.normpath(os.getcwd()): '', + mozpath.normpath(os.getcwd()): "", } args = list(argv) while args: a = args.pop(0) if a == command: break def filter_path(p): p = mozpath.abspath(p) base = mozpath.basedir(p, paths.keys()) if base: return paths[base] + mozpath.relpath(p, base) # Best-effort. - return '<path omitted>' + return "<path omitted>" + return [filter_path(arg) for arg in args] -def gather_telemetry(command, success, start_time, end_time, mach_context, - instance, command_attrs): - ''' +def gather_telemetry( + command, success, start_time, end_time, mach_context, instance, command_attrs +): + """ Gather telemetry about the build and the user's system and pass it to the telemetry handler to be stored for later submission. Any absolute paths on the command line will be made relative to a relevant base path or replaced with a placeholder to avoid including paths from developer's machines. - ''' + """ try: substs = instance.substs except BuildEnvironmentNotFoundException: substs = {} data = { - 'client_id': get_client_id(mach_context.state_dir), + "client_id": get_client_id(mach_context.state_dir), # Get an rfc3339 datetime string. - 'time': datetime.utcfromtimestamp(start_time).strftime('%Y-%m-%dT%H:%M:%S.%fZ'), - 'command': command, - 'argv': filter_args(command, sys.argv, instance), - 'success': success, + "time": datetime.utcfromtimestamp(start_time).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "command": command, + "argv": filter_args(command, sys.argv, instance), + "success": success, # TODO: use a monotonic clock: https://bugzilla.mozilla.org/show_bug.cgi?id=1481624 - 'duration_ms': int((end_time - start_time) * 1000), - 'build_opts': get_build_opts(substs), - 'build_attrs': get_build_attrs(command_attrs), - 'system': get_system_info(), + "duration_ms": int((end_time - start_time) * 1000), + "build_opts": get_build_opts(substs), + "build_attrs": get_build_attrs(command_attrs), + "system": get_system_info(), # TODO: exception: https://bugzilla.mozilla.org/show_bug.cgi?id=1481617 # TODO: file_types_changed: https://bugzilla.mozilla.org/show_bug.cgi?id=1481774 } try: # Validate against the schema. schema(data) return data except MultipleInvalid as exc: - msg = ['Build telemetry is invalid:'] + msg = ["Build telemetry is invalid:"] for error in exc.errors: msg.append(str(error)) - print('\n'.join(msg) + '\n' + pprint.pformat(data)) + print("\n".join(msg) + "\n" + pprint.pformat(data)) return None def verify_statedir(statedir): - ''' + """ Verifies the statedir is structured correctly. Returns the outgoing, submitted and log paths. Requires presence of the following directories; will raise if absent: - statedir/telemetry - statedir/telemetry/outgoing Creates the following directories and files if absent (first submission): - statedir/telemetry/submitted - ''' + """ - telemetry_dir = os.path.join(statedir, 'telemetry') - outgoing = os.path.join(telemetry_dir, 'outgoing') - submitted = os.path.join(telemetry_dir, 'submitted') - telemetry_log = os.path.join(telemetry_dir, 'telemetry.log') + telemetry_dir = os.path.join(statedir, "telemetry") + outgoing = os.path.join(telemetry_dir, "outgoing") + submitted = os.path.join(telemetry_dir, "submitted") + telemetry_log = os.path.join(telemetry_dir, "telemetry.log") if not os.path.isdir(telemetry_dir): - raise Exception('{} does not exist'.format(telemetry_dir)) + raise Exception("{} does not exist".format(telemetry_dir)) if not os.path.isdir(outgoing): - raise Exception('{} does not exist'.format(outgoing)) + raise Exception("{} does not exist".format(outgoing)) if not os.path.isdir(submitted): os.mkdir(submitted) return outgoing, submitted, telemetry_log
--- a/python/mozbuild/mozbuild/test/backend/test_build.py +++ b/python/mozbuild/mozbuild/test/backend/test_build.py @@ -21,217 +21,225 @@ from mozbuild.base import MozbuildObject from mozbuild.frontend.emitter import TreeMetadataEmitter from mozbuild.frontend.reader import BuildReader from mozbuild.util import ensureParentDir from mozpack.files import FileFinder from tempfile import mkdtemp BASE_SUBSTS = [ - ('PYTHON', mozpath.normsep(sys.executable)), - ('PYTHON3', mozpath.normsep(sys.executable)), - ('MOZ_UI_LOCALE', 'en-US'), + ("PYTHON", mozpath.normsep(sys.executable)), + ("PYTHON3", mozpath.normsep(sys.executable)), + ("MOZ_UI_LOCALE", "en-US"), ] class TestBuild(unittest.TestCase): def setUp(self): self._old_env = dict(os.environ) - os.environ.pop('MOZCONFIG', None) - os.environ.pop('MOZ_OBJDIR', None) - os.environ.pop('MOZ_PGO', None) + os.environ.pop("MOZCONFIG", None) + os.environ.pop("MOZ_OBJDIR", None) + os.environ.pop("MOZ_PGO", None) def tearDown(self): os.environ.clear() os.environ.update(self._old_env) @contextmanager def do_test_backend(self, *backends, **kwargs): # Create the objdir in the srcdir to ensure that they share # the same drive on Windows. topobjdir = mkdtemp(dir=buildconfig.topsrcdir) try: - config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir, - **kwargs) + config = ConfigEnvironment(buildconfig.topsrcdir, topobjdir, **kwargs) reader = BuildReader(config) emitter = TreeMetadataEmitter(config) - moz_build = mozpath.join(config.topsrcdir, 'test.mozbuild') - definitions = list(emitter.emit( - reader.read_mozbuild(moz_build, config))) + moz_build = mozpath.join(config.topsrcdir, "test.mozbuild") + definitions = list(emitter.emit(reader.read_mozbuild(moz_build, config))) for backend in backends: backend(config).consume(definitions) yield config except Exception: raise finally: - if not os.environ.get('MOZ_NO_CLEANUP'): + if not os.environ.get("MOZ_NO_CLEANUP"): shutil.rmtree(topobjdir) @contextmanager def line_handler(self): lines = [] def handle_make_line(line): lines.append(line) try: yield handle_make_line except Exception: - print('\n'.join(lines)) + print("\n".join(lines)) raise - if os.environ.get('MOZ_VERBOSE_MAKE'): - print('\n'.join(lines)) + if os.environ.get("MOZ_VERBOSE_MAKE"): + print("\n".join(lines)) def test_recursive_make(self): substs = list(BASE_SUBSTS) - with self.do_test_backend(RecursiveMakeBackend, - substs=substs) as config: - build = MozbuildObject(config.topsrcdir, None, None, - config.topobjdir) + with self.do_test_backend(RecursiveMakeBackend, substs=substs) as config: + build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) overrides = [ - 'install_manifest_depends=', - 'MOZ_JAR_MAKER_FILE_FORMAT=flat', - 'TEST_MOZBUILD=1', + "install_manifest_depends=", + "MOZ_JAR_MAKER_FILE_FORMAT=flat", + "TEST_MOZBUILD=1", ] with self.line_handler() as handle_make_line: - build._run_make(directory=config.topobjdir, target=overrides, - silent=False, line_handler=handle_make_line) + build._run_make( + directory=config.topobjdir, + target=overrides, + silent=False, + line_handler=handle_make_line, + ) self.validate(config) def test_faster_recursive_make(self): substs = list(BASE_SUBSTS) + [ - ('BUILD_BACKENDS', 'FasterMake+RecursiveMake'), + ("BUILD_BACKENDS", "FasterMake+RecursiveMake"), ] - with self.do_test_backend(get_backend_class( - 'FasterMake+RecursiveMake'), substs=substs) as config: - buildid = mozpath.join(config.topobjdir, 'config', 'buildid') + with self.do_test_backend( + get_backend_class("FasterMake+RecursiveMake"), substs=substs + ) as config: + buildid = mozpath.join(config.topobjdir, "config", "buildid") ensureParentDir(buildid) - with open(buildid, 'w') as fh: - fh.write('20100101012345\n') + with open(buildid, "w") as fh: + fh.write("20100101012345\n") - build = MozbuildObject(config.topsrcdir, None, None, - config.topobjdir) + build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) overrides = [ - 'install_manifest_depends=', - 'MOZ_JAR_MAKER_FILE_FORMAT=flat', - 'TEST_MOZBUILD=1', + "install_manifest_depends=", + "MOZ_JAR_MAKER_FILE_FORMAT=flat", + "TEST_MOZBUILD=1", ] with self.line_handler() as handle_make_line: - build._run_make(directory=config.topobjdir, target=overrides, - silent=False, line_handler=handle_make_line) + build._run_make( + directory=config.topobjdir, + target=overrides, + silent=False, + line_handler=handle_make_line, + ) self.validate(config) def test_faster_make(self): substs = list(BASE_SUBSTS) + [ - ('MOZ_BUILD_APP', 'dummy_app'), - ('MOZ_WIDGET_TOOLKIT', 'dummy_widget'), + ("MOZ_BUILD_APP", "dummy_app"), + ("MOZ_WIDGET_TOOLKIT", "dummy_widget"), ] - with self.do_test_backend(RecursiveMakeBackend, FasterMakeBackend, - substs=substs) as config: - buildid = mozpath.join(config.topobjdir, 'config', 'buildid') + with self.do_test_backend( + RecursiveMakeBackend, FasterMakeBackend, substs=substs + ) as config: + buildid = mozpath.join(config.topobjdir, "config", "buildid") ensureParentDir(buildid) - with open(buildid, 'w') as fh: - fh.write('20100101012345\n') + with open(buildid, "w") as fh: + fh.write("20100101012345\n") - build = MozbuildObject(config.topsrcdir, None, None, - config.topobjdir) + build = MozbuildObject(config.topsrcdir, None, None, config.topobjdir) overrides = [ - 'TEST_MOZBUILD=1', + "TEST_MOZBUILD=1", ] with self.line_handler() as handle_make_line: - build._run_make(directory=mozpath.join(config.topobjdir, - 'faster'), - target=overrides, silent=False, - line_handler=handle_make_line) + build._run_make( + directory=mozpath.join(config.topobjdir, "faster"), + target=overrides, + silent=False, + line_handler=handle_make_line, + ) self.validate(config) def validate(self, config): self.maxDiff = None - test_path = mozpath.join('$SRCDIR', 'python', 'mozbuild', 'mozbuild', - 'test', 'backend', 'data', 'build') + test_path = mozpath.join( + "$SRCDIR", + "python", + "mozbuild", + "mozbuild", + "test", + "backend", + "data", + "build", + ) result = { p: six.ensure_text(f.open().read()) - for p, f in FileFinder(mozpath.join(config.topobjdir, 'dist')) + for p, f in FileFinder(mozpath.join(config.topobjdir, "dist")) } self.assertTrue(len(result)) - self.assertEqual(result, { - 'bin/baz.ini': 'baz.ini: FOO is foo\n', - 'bin/child/bar.ini': 'bar.ini\n', - 'bin/child2/foo.css': 'foo.css: FOO is foo\n', - 'bin/child2/qux.ini': 'qux.ini: BAR is not defined\n', - 'bin/chrome.manifest': - 'manifest chrome/foo.manifest\n' - 'manifest components/components.manifest\n', - 'bin/chrome/foo.manifest': - 'content bar foo/child/\n' - 'content foo foo/\n' - 'override chrome://foo/bar.svg#hello ' - 'chrome://bar/bar.svg#hello\n', - 'bin/chrome/foo/bar.js': 'bar.js\n', - 'bin/chrome/foo/child/baz.jsm': - '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path), - 'bin/chrome/foo/child/hoge.js': - '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n' % (test_path), - 'bin/chrome/foo/foo.css': 'foo.css: FOO is foo\n', - 'bin/chrome/foo/foo.js': 'foo.js\n', - 'bin/chrome/foo/qux.js': 'bar.js\n', - 'bin/components/bar.js': - '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n' % (test_path), - 'bin/components/components.manifest': - 'component {foo} foo.js\ncomponent {bar} bar.js\n', - 'bin/components/foo.js': 'foo.js\n', - 'bin/defaults/pref/prefs.js': 'prefs.js\n', - 'bin/foo.ini': 'foo.ini\n', - 'bin/modules/baz.jsm': - '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path), - 'bin/modules/child/bar.jsm': 'bar.jsm\n', - 'bin/modules/child2/qux.jsm': - '//@line 4 "%s/qux.jsm"\nqux.jsm: BAR is not defined\n' + self.assertEqual( + result, + { + "bin/baz.ini": "baz.ini: FOO is foo\n", + "bin/child/bar.ini": "bar.ini\n", + "bin/child2/foo.css": "foo.css: FOO is foo\n", + "bin/child2/qux.ini": "qux.ini: BAR is not defined\n", + "bin/chrome.manifest": "manifest chrome/foo.manifest\n" + "manifest components/components.manifest\n", + "bin/chrome/foo.manifest": "content bar foo/child/\n" + "content foo foo/\n" + "override chrome://foo/bar.svg#hello " + "chrome://bar/bar.svg#hello\n", + "bin/chrome/foo/bar.js": "bar.js\n", + "bin/chrome/foo/child/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n' + % (test_path), + "bin/chrome/foo/child/hoge.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n' + % (test_path), + "bin/chrome/foo/foo.css": "foo.css: FOO is foo\n", + "bin/chrome/foo/foo.js": "foo.js\n", + "bin/chrome/foo/qux.js": "bar.js\n", + "bin/components/bar.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is foo\n' + % (test_path), + "bin/components/components.manifest": "component {foo} foo.js\ncomponent {bar} bar.js\n", # NOQA: E501 + "bin/components/foo.js": "foo.js\n", + "bin/defaults/pref/prefs.js": "prefs.js\n", + "bin/foo.ini": "foo.ini\n", + "bin/modules/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is foo\n' + % (test_path), + "bin/modules/child/bar.jsm": "bar.jsm\n", + "bin/modules/child2/qux.jsm": '//@line 4 "%s/qux.jsm"\nqux.jsm: BAR is not defined\n' # NOQA: E501 % (test_path), - 'bin/modules/foo.jsm': 'foo.jsm\n', - 'bin/res/resource': 'resource\n', - 'bin/res/child/resource2': 'resource2\n', - - 'bin/app/baz.ini': 'baz.ini: FOO is bar\n', - 'bin/app/child/bar.ini': 'bar.ini\n', - 'bin/app/child2/qux.ini': 'qux.ini: BAR is defined\n', - 'bin/app/chrome.manifest': - 'manifest chrome/foo.manifest\n' - 'manifest components/components.manifest\n', - 'bin/app/chrome/foo.manifest': - 'content bar foo/child/\n' - 'content foo foo/\n' - 'override chrome://foo/bar.svg#hello ' - 'chrome://bar/bar.svg#hello\n', - 'bin/app/chrome/foo/bar.js': 'bar.js\n', - 'bin/app/chrome/foo/child/baz.jsm': - '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path), - 'bin/app/chrome/foo/child/hoge.js': - '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n' % (test_path), - 'bin/app/chrome/foo/foo.css': 'foo.css: FOO is bar\n', - 'bin/app/chrome/foo/foo.js': 'foo.js\n', - 'bin/app/chrome/foo/qux.js': 'bar.js\n', - 'bin/app/components/bar.js': - '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n' % (test_path), - 'bin/app/components/components.manifest': - 'component {foo} foo.js\ncomponent {bar} bar.js\n', - 'bin/app/components/foo.js': 'foo.js\n', - 'bin/app/defaults/preferences/prefs.js': 'prefs.js\n', - 'bin/app/foo.css': 'foo.css: FOO is bar\n', - 'bin/app/foo.ini': 'foo.ini\n', - 'bin/app/modules/baz.jsm': - '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path), - 'bin/app/modules/child/bar.jsm': 'bar.jsm\n', - 'bin/app/modules/child2/qux.jsm': - '//@line 2 "%s/qux.jsm"\nqux.jsm: BAR is defined\n' + "bin/modules/foo.jsm": "foo.jsm\n", + "bin/res/resource": "resource\n", + "bin/res/child/resource2": "resource2\n", + "bin/app/baz.ini": "baz.ini: FOO is bar\n", + "bin/app/child/bar.ini": "bar.ini\n", + "bin/app/child2/qux.ini": "qux.ini: BAR is defined\n", + "bin/app/chrome.manifest": "manifest chrome/foo.manifest\n" + "manifest components/components.manifest\n", + "bin/app/chrome/foo.manifest": "content bar foo/child/\n" + "content foo foo/\n" + "override chrome://foo/bar.svg#hello " + "chrome://bar/bar.svg#hello\n", + "bin/app/chrome/foo/bar.js": "bar.js\n", + "bin/app/chrome/foo/child/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n' + % (test_path), + "bin/app/chrome/foo/child/hoge.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n' % (test_path), - 'bin/app/modules/foo.jsm': 'foo.jsm\n', - }) + "bin/app/chrome/foo/foo.css": "foo.css: FOO is bar\n", + "bin/app/chrome/foo/foo.js": "foo.js\n", + "bin/app/chrome/foo/qux.js": "bar.js\n", + "bin/app/components/bar.js": '//@line 2 "%s/bar.js"\nbar.js: FOO is bar\n' + % (test_path), + "bin/app/components/components.manifest": "component {foo} foo.js\ncomponent {bar} bar.js\n", # NOQA: E501 + "bin/app/components/foo.js": "foo.js\n", + "bin/app/defaults/preferences/prefs.js": "prefs.js\n", + "bin/app/foo.css": "foo.css: FOO is bar\n", + "bin/app/foo.ini": "foo.ini\n", + "bin/app/modules/baz.jsm": '//@line 2 "%s/baz.jsm"\nbaz.jsm: FOO is bar\n' + % (test_path), + "bin/app/modules/child/bar.jsm": "bar.jsm\n", + "bin/app/modules/child2/qux.jsm": '//@line 2 "%s/qux.jsm"\nqux.jsm: BAR is defined\n' # NOQA: E501 + % (test_path), + "bin/app/modules/foo.jsm": "foo.jsm\n", + }, + ) -if __name__ == '__main__': +if __name__ == "__main__": main()
--- a/python/mozbuild/mozbuild/test/backend/test_recursivemake.py +++ b/python/mozbuild/mozbuild/test/backend/test_recursivemake.py @@ -26,640 +26,732 @@ from mozbuild.frontend.reader import Bui from mozbuild.test.backend.common import BackendTester import mozpack.path as mozpath class TestRecursiveMakeTraversal(unittest.TestCase): def test_traversal(self): traversal = RecursiveMakeTraversal() - traversal.add('', dirs=['A', 'B', 'C']) - traversal.add('', dirs=['D']) - traversal.add('A') - traversal.add('B', dirs=['E', 'F']) - traversal.add('C', dirs=['G', 'H']) - traversal.add('D', dirs=['I', 'K']) - traversal.add('D', dirs=['J', 'L']) - traversal.add('E') - traversal.add('F') - traversal.add('G') - traversal.add('H') - traversal.add('I', dirs=['M', 'N']) - traversal.add('J', dirs=['O', 'P']) - traversal.add('K', dirs=['Q', 'R']) - traversal.add('L', dirs=['S']) - traversal.add('M') - traversal.add('N', dirs=['T']) - traversal.add('O') - traversal.add('P', dirs=['U']) - traversal.add('Q') - traversal.add('R', dirs=['V']) - traversal.add('S', dirs=['W']) - traversal.add('T') - traversal.add('U') - traversal.add('V') - traversal.add('W', dirs=['X']) - traversal.add('X') + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("", dirs=["D"]) + traversal.add("A") + traversal.add("B", dirs=["E", "F"]) + traversal.add("C", dirs=["G", "H"]) + traversal.add("D", dirs=["I", "K"]) + traversal.add("D", dirs=["J", "L"]) + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I", dirs=["M", "N"]) + traversal.add("J", dirs=["O", "P"]) + traversal.add("K", dirs=["Q", "R"]) + traversal.add("L", dirs=["S"]) + traversal.add("M") + traversal.add("N", dirs=["T"]) + traversal.add("O") + traversal.add("P", dirs=["U"]) + traversal.add("Q") + traversal.add("R", dirs=["V"]) + traversal.add("S", dirs=["W"]) + traversal.add("T") + traversal.add("U") + traversal.add("V") + traversal.add("W", dirs=["X"]) + traversal.add("X") - parallels = set(('G', 'H', 'I', 'J', 'O', 'P', 'Q', 'R', 'U')) + parallels = set(("G", "H", "I", "J", "O", "P", "Q", "R", "U")) def filter(current, subdirs): - return (current, [d for d in subdirs.dirs if d in parallels], - [d for d in subdirs.dirs if d not in parallels]) + return ( + current, + [d for d in subdirs.dirs if d in parallels], + [d for d in subdirs.dirs if d not in parallels], + ) start, deps = traversal.compute_dependencies(filter) - self.assertEqual(start, ('X',)) + self.assertEqual(start, ("X",)) self.maxDiff = None - self.assertEqual(deps, { - 'A': ('',), - 'B': ('A',), - 'C': ('F',), - 'D': ('G', 'H'), - 'E': ('B',), - 'F': ('E',), - 'G': ('C',), - 'H': ('C',), - 'I': ('D',), - 'J': ('D',), - 'K': ('T', 'O', 'U'), - 'L': ('Q', 'V'), - 'M': ('I',), - 'N': ('M',), - 'O': ('J',), - 'P': ('J',), - 'Q': ('K',), - 'R': ('K',), - 'S': ('L',), - 'T': ('N',), - 'U': ('P',), - 'V': ('R',), - 'W': ('S',), - 'X': ('W',), - }) + self.assertEqual( + deps, + { + "A": ("",), + "B": ("A",), + "C": ("F",), + "D": ("G", "H"), + "E": ("B",), + "F": ("E",), + "G": ("C",), + "H": ("C",), + "I": ("D",), + "J": ("D",), + "K": ("T", "O", "U"), + "L": ("Q", "V"), + "M": ("I",), + "N": ("M",), + "O": ("J",), + "P": ("J",), + "Q": ("K",), + "R": ("K",), + "S": ("L",), + "T": ("N",), + "U": ("P",), + "V": ("R",), + "W": ("S",), + "X": ("W",), + }, + ) - self.assertEqual(list(traversal.traverse('', filter)), - ['', 'A', 'B', 'E', 'F', 'C', 'G', 'H', 'D', 'I', - 'M', 'N', 'T', 'J', 'O', 'P', 'U', 'K', 'Q', 'R', - 'V', 'L', 'S', 'W', 'X']) + self.assertEqual( + list(traversal.traverse("", filter)), + [ + "", + "A", + "B", + "E", + "F", + "C", + "G", + "H", + "D", + "I", + "M", + "N", + "T", + "J", + "O", + "P", + "U", + "K", + "Q", + "R", + "V", + "L", + "S", + "W", + "X", + ], + ) - self.assertEqual(list(traversal.traverse('C', filter)), - ['C', 'G', 'H']) + self.assertEqual(list(traversal.traverse("C", filter)), ["C", "G", "H"]) def test_traversal_2(self): traversal = RecursiveMakeTraversal() - traversal.add('', dirs=['A', 'B', 'C']) - traversal.add('A') - traversal.add('B', dirs=['D', 'E', 'F']) - traversal.add('C', dirs=['G', 'H', 'I']) - traversal.add('D') - traversal.add('E') - traversal.add('F') - traversal.add('G') - traversal.add('H') - traversal.add('I') + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("A") + traversal.add("B", dirs=["D", "E", "F"]) + traversal.add("C", dirs=["G", "H", "I"]) + traversal.add("D") + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I") start, deps = traversal.compute_dependencies() - self.assertEqual(start, ('I',)) - self.assertEqual(deps, { - 'A': ('',), - 'B': ('A',), - 'C': ('F',), - 'D': ('B',), - 'E': ('D',), - 'F': ('E',), - 'G': ('C',), - 'H': ('G',), - 'I': ('H',), - }) + self.assertEqual(start, ("I",)) + self.assertEqual( + deps, + { + "A": ("",), + "B": ("A",), + "C": ("F",), + "D": ("B",), + "E": ("D",), + "F": ("E",), + "G": ("C",), + "H": ("G",), + "I": ("H",), + }, + ) def test_traversal_filter(self): traversal = RecursiveMakeTraversal() - traversal.add('', dirs=['A', 'B', 'C']) - traversal.add('A') - traversal.add('B', dirs=['D', 'E', 'F']) - traversal.add('C', dirs=['G', 'H', 'I']) - traversal.add('D') - traversal.add('E') - traversal.add('F') - traversal.add('G') - traversal.add('H') - traversal.add('I') + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("A") + traversal.add("B", dirs=["D", "E", "F"]) + traversal.add("C", dirs=["G", "H", "I"]) + traversal.add("D") + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I") def filter(current, subdirs): - if current == 'B': + if current == "B": current = None return current, [], subdirs.dirs start, deps = traversal.compute_dependencies(filter) - self.assertEqual(start, ('I',)) - self.assertEqual(deps, { - 'A': ('',), - 'C': ('F',), - 'D': ('A',), - 'E': ('D',), - 'F': ('E',), - 'G': ('C',), - 'H': ('G',), - 'I': ('H',), - }) + self.assertEqual(start, ("I",)) + self.assertEqual( + deps, + { + "A": ("",), + "C": ("F",), + "D": ("A",), + "E": ("D",), + "F": ("E",), + "G": ("C",), + "H": ("G",), + "I": ("H",), + }, + ) def test_traversal_parallel(self): traversal = RecursiveMakeTraversal() - traversal.add('', dirs=['A', 'B', 'C']) - traversal.add('A') - traversal.add('B', dirs=['D', 'E', 'F']) - traversal.add('C', dirs=['G', 'H', 'I']) - traversal.add('D') - traversal.add('E') - traversal.add('F') - traversal.add('G') - traversal.add('H') - traversal.add('I') - traversal.add('J') + traversal.add("", dirs=["A", "B", "C"]) + traversal.add("A") + traversal.add("B", dirs=["D", "E", "F"]) + traversal.add("C", dirs=["G", "H", "I"]) + traversal.add("D") + traversal.add("E") + traversal.add("F") + traversal.add("G") + traversal.add("H") + traversal.add("I") + traversal.add("J") def filter(current, subdirs): return current, subdirs.dirs, [] start, deps = traversal.compute_dependencies(filter) - self.assertEqual(start, ('A', 'D', 'E', 'F', 'G', 'H', 'I', 'J')) - self.assertEqual(deps, { - 'A': ('',), - 'B': ('',), - 'C': ('',), - 'D': ('B',), - 'E': ('B',), - 'F': ('B',), - 'G': ('C',), - 'H': ('C',), - 'I': ('C',), - 'J': ('',), - }) + self.assertEqual(start, ("A", "D", "E", "F", "G", "H", "I", "J")) + self.assertEqual( + deps, + { + "A": ("",), + "B": ("",), + "C": ("",), + "D": ("B",), + "E": ("B",), + "F": ("B",), + "G": ("C",), + "H": ("C",), + "I": ("C",), + "J": ("",), + }, + ) class TestRecursiveMakeBackend(BackendTester): def test_basic(self): """Ensure the RecursiveMakeBackend works without error.""" - env = self._consume('stub0', RecursiveMakeBackend) - self.assertTrue(os.path.exists(mozpath.join(env.topobjdir, - 'backend.RecursiveMakeBackend'))) - self.assertTrue(os.path.exists(mozpath.join(env.topobjdir, - 'backend.RecursiveMakeBackend.in'))) + env = self._consume("stub0", RecursiveMakeBackend) + self.assertTrue( + os.path.exists(mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend")) + ) + self.assertTrue( + os.path.exists( + mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend.in") + ) + ) def test_output_files(self): """Ensure proper files are generated.""" - env = self._consume('stub0', RecursiveMakeBackend) + env = self._consume("stub0", RecursiveMakeBackend) - expected = ['', 'dir1', 'dir2'] + expected = ["", "dir1", "dir2"] for d in expected: - out_makefile = mozpath.join(env.topobjdir, d, 'Makefile') - out_backend = mozpath.join(env.topobjdir, d, 'backend.mk') + out_makefile = mozpath.join(env.topobjdir, d, "Makefile") + out_backend = mozpath.join(env.topobjdir, d, "backend.mk") self.assertTrue(os.path.exists(out_makefile)) self.assertTrue(os.path.exists(out_backend)) def test_makefile_conversion(self): """Ensure Makefile.in is converted properly.""" - env = self._consume('stub0', RecursiveMakeBackend) + env = self._consume("stub0", RecursiveMakeBackend) - p = mozpath.join(env.topobjdir, 'Makefile') + p = mozpath.join(env.topobjdir, "Makefile") - lines = [l.strip() for l in open(p, 'rt').readlines()[1:] if not l.startswith('#')] - self.assertEqual(lines, [ - 'DEPTH := .', - 'topobjdir := %s' % env.topobjdir, - 'topsrcdir := %s' % env.topsrcdir, - 'srcdir := %s' % env.topsrcdir, - 'srcdir_rel := %s' % mozpath.relpath(env.topsrcdir, env.topobjdir), - 'relativesrcdir := .', - 'include $(DEPTH)/config/autoconf.mk', - '', - 'FOO := foo', - '', - 'include $(topsrcdir)/config/recurse.mk', - ]) + lines = [ + l.strip() for l in open(p, "rt").readlines()[1:] if not l.startswith("#") + ] + self.assertEqual( + lines, + [ + "DEPTH := .", + "topobjdir := %s" % env.topobjdir, + "topsrcdir := %s" % env.topsrcdir, + "srcdir := %s" % env.topsrcdir, + "srcdir_rel := %s" % mozpath.relpath(env.topsrcdir, env.topobjdir), + "relativesrcdir := .", + "include $(DEPTH)/config/autoconf.mk", + "", + "FOO := foo", + "", + "include $(topsrcdir)/config/recurse.mk", + ], + ) def test_missing_makefile_in(self): """Ensure missing Makefile.in results in Makefile creation.""" - env = self._consume('stub0', RecursiveMakeBackend) + env = self._consume("stub0", RecursiveMakeBackend) - p = mozpath.join(env.topobjdir, 'dir2', 'Makefile') + p = mozpath.join(env.topobjdir, "dir2", "Makefile") self.assertTrue(os.path.exists(p)) - lines = [l.strip() for l in open(p, 'rt').readlines()] + lines = [l.strip() for l in open(p, "rt").readlines()] self.assertEqual(len(lines), 10) - self.assertTrue(lines[0].startswith('# THIS FILE WAS AUTOMATICALLY')) + self.assertTrue(lines[0].startswith("# THIS FILE WAS AUTOMATICALLY")) def test_backend_mk(self): """Ensure backend.mk file is written out properly.""" - env = self._consume('stub0', RecursiveMakeBackend) + env = self._consume("stub0", RecursiveMakeBackend) - p = mozpath.join(env.topobjdir, 'backend.mk') + p = mozpath.join(env.topobjdir, "backend.mk") - lines = [l.strip() for l in open(p, 'rt').readlines()[2:]] - self.assertEqual(lines, [ - 'DIRS := dir1 dir2', - ]) + lines = [l.strip() for l in open(p, "rt").readlines()[2:]] + self.assertEqual( + lines, + [ + "DIRS := dir1 dir2", + ], + ) # Make env.substs writable to add ENABLE_TESTS env.substs = dict(env.substs) - env.substs['ENABLE_TESTS'] = '1' - self._consume('stub0', RecursiveMakeBackend, env=env) - p = mozpath.join(env.topobjdir, 'backend.mk') + env.substs["ENABLE_TESTS"] = "1" + self._consume("stub0", RecursiveMakeBackend, env=env) + p = mozpath.join(env.topobjdir, "backend.mk") - lines = [l.strip() for l in open(p, 'rt').readlines()[2:]] - self.assertEqual(lines, [ - 'DIRS := dir1 dir2 dir3', - ]) + lines = [l.strip() for l in open(p, "rt").readlines()[2:]] + self.assertEqual( + lines, + [ + "DIRS := dir1 dir2 dir3", + ], + ) def test_mtime_no_change(self): """Ensure mtime is not updated if file content does not change.""" - env = self._consume('stub0', RecursiveMakeBackend) + env = self._consume("stub0", RecursiveMakeBackend) - makefile_path = mozpath.join(env.topobjdir, 'Makefile') - backend_path = mozpath.join(env.topobjdir, 'backend.mk') + makefile_path = mozpath.join(env.topobjdir, "Makefile") + backend_path = mozpath.join(env.topobjdir, "backend.mk") makefile_mtime = os.path.getmtime(makefile_path) backend_mtime = os.path.getmtime(backend_path) reader = BuildReader(env) emitter = TreeMetadataEmitter(env) backend = RecursiveMakeBackend(env) backend.consume(emitter.emit(reader.read_topsrcdir())) self.assertEqual(os.path.getmtime(makefile_path), makefile_mtime) self.assertEqual(os.path.getmtime(backend_path), backend_mtime) def test_substitute_config_files(self): """Ensure substituted config files are produced.""" - env = self._consume('substitute_config_files', RecursiveMakeBackend) + env = self._consume("substitute_config_files", RecursiveMakeBackend) - p = mozpath.join(env.topobjdir, 'foo') + p = mozpath.join(env.topobjdir, "foo") self.assertTrue(os.path.exists(p)) - lines = [l.strip() for l in open(p, 'rt').readlines()] - self.assertEqual(lines, [ - 'TEST = foo', - ]) + lines = [l.strip() for l in open(p, "rt").readlines()] + self.assertEqual( + lines, + [ + "TEST = foo", + ], + ) def test_install_substitute_config_files(self): """Ensure we recurse into the dirs that install substituted config files.""" - env = self._consume('install_substitute_config_files', RecursiveMakeBackend) + env = self._consume("install_substitute_config_files", RecursiveMakeBackend) - root_deps_path = mozpath.join(env.topobjdir, 'root-deps.mk') - lines = [l.strip() for l in open(root_deps_path, 'rt').readlines()] + root_deps_path = mozpath.join(env.topobjdir, "root-deps.mk") + lines = [l.strip() for l in open(root_deps_path, "rt").readlines()] # Make sure we actually recurse into the sub directory during export to # install the subst file. - self.assertTrue(any(l == 'recurse_export: sub/export' for l in lines)) + self.assertTrue(any(l == "recurse_export: sub/export" for l in lines)) def test_variable_passthru(self): """Ensure variable passthru is written out correctly.""" - env = self._consume('variable_passthru', RecursiveMakeBackend) + env = self._consume("variable_passthru", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = { - 'RCFILE': [ - 'RCFILE := $(srcdir)/foo.rc', + "RCFILE": [ + "RCFILE := $(srcdir)/foo.rc", ], - 'RCINCLUDE': [ - 'RCINCLUDE := $(srcdir)/bar.rc', + "RCINCLUDE": [ + "RCINCLUDE := $(srcdir)/bar.rc", ], - 'WIN32_EXE_LDFLAGS': [ - 'WIN32_EXE_LDFLAGS += -subsystem:console', + "WIN32_EXE_LDFLAGS": [ + "WIN32_EXE_LDFLAGS += -subsystem:console", ], } for var, val in expected.items(): # print("test_variable_passthru[%s]" % (var)) found = [str for str in lines if str.startswith(var)] self.assertEqual(found, val) def test_sources(self): """Ensure SOURCES, HOST_SOURCES and WASM_SOURCES are handled properly.""" - env = self._consume('sources', RecursiveMakeBackend) + env = self._consume("sources", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = { - 'ASFILES': [ - 'ASFILES += $(srcdir)/bar.s', - 'ASFILES += $(srcdir)/foo.asm', + "ASFILES": [ + "ASFILES += $(srcdir)/bar.s", + "ASFILES += $(srcdir)/foo.asm", ], - 'CMMSRCS': [ - 'CMMSRCS += $(srcdir)/bar.mm', - 'CMMSRCS += $(srcdir)/foo.mm', + "CMMSRCS": [ + "CMMSRCS += $(srcdir)/bar.mm", + "CMMSRCS += $(srcdir)/foo.mm", ], - 'CSRCS': [ - 'CSRCS += $(srcdir)/bar.c', - 'CSRCS += $(srcdir)/foo.c', + "CSRCS": [ + "CSRCS += $(srcdir)/bar.c", + "CSRCS += $(srcdir)/foo.c", ], - 'HOST_CPPSRCS': [ - 'HOST_CPPSRCS += $(srcdir)/bar.cpp', - 'HOST_CPPSRCS += $(srcdir)/foo.cpp', + "HOST_CPPSRCS": [ + "HOST_CPPSRCS += $(srcdir)/bar.cpp", + "HOST_CPPSRCS += $(srcdir)/foo.cpp", ], - 'HOST_CSRCS': [ - 'HOST_CSRCS += $(srcdir)/bar.c', - 'HOST_CSRCS += $(srcdir)/foo.c', + "HOST_CSRCS": [ + "HOST_CSRCS += $(srcdir)/bar.c", + "HOST_CSRCS += $(srcdir)/foo.c", ], - 'SSRCS': [ - 'SSRCS += $(srcdir)/baz.S', - 'SSRCS += $(srcdir)/foo.S', + "SSRCS": [ + "SSRCS += $(srcdir)/baz.S", + "SSRCS += $(srcdir)/foo.S", ], - 'WASM_CSRCS': [ - 'WASM_CSRCS += $(srcdir)/bar.c', - ('WASM_CSRCS += $(srcdir)/third_party/rust/rlbox_lucet_sandbox/' - 'c_src/lucet_sandbox_wrapper.c'), + "WASM_CSRCS": [ + "WASM_CSRCS += $(srcdir)/bar.c", + ( + "WASM_CSRCS += $(srcdir)/third_party/rust/rlbox_lucet_sandbox/" + "c_src/lucet_sandbox_wrapper.c" + ), ], - 'WASM_CPPSRCS': [ - 'WASM_CPPSRCS += $(srcdir)/bar.cpp', + "WASM_CPPSRCS": [ + "WASM_CPPSRCS += $(srcdir)/bar.cpp", ], } for var, val in expected.items(): found = [str for str in lines if str.startswith(var)] self.assertEqual(found, val) def test_exports(self): """Ensure EXPORTS is handled properly.""" - env = self._consume('exports', RecursiveMakeBackend) + env = self._consume("exports", RecursiveMakeBackend) # EXPORTS files should appear in the dist_include install manifest. - m = InstallManifest(path=mozpath.join(env.topobjdir, - '_build_manifests', 'install', 'dist_include')) + m = InstallManifest( + path=mozpath.join( + env.topobjdir, "_build_manifests", "install", "dist_include" + ) + ) self.assertEqual(len(m), 7) - self.assertIn('foo.h', m) - self.assertIn('mozilla/mozilla1.h', m) - self.assertIn('mozilla/dom/dom2.h', m) + self.assertIn("foo.h", m) + self.assertIn("mozilla/mozilla1.h", m) + self.assertIn("mozilla/dom/dom2.h", m) def test_generated_files(self): """Ensure GENERATED_FILES is handled properly.""" - env = self._consume('generated-files', RecursiveMakeBackend) + env = self._consume("generated-files", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'include $(topsrcdir)/config/AB_rCD.mk', - 'PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub', - 'bar.c: $(MDDEPDIR)/bar.c.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp', - '$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py' % env.topsrcdir, - '$(REPORT_BUILD)', - '$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', - 'EXPORT_TARGETS += $(MDDEPDIR)/foo.h.stub', - 'foo.h: $(MDDEPDIR)/foo.h.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp', - '$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir), - '$(REPORT_BUILD)', - '$(call py_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa - '@$(TOUCH) $@', - '', + "include $(topsrcdir)/config/AB_rCD.mk", + "PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub", + "bar.c: $(MDDEPDIR)/bar.c.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp", + "$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py" % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "EXPORT_TARGETS += $(MDDEPDIR)/foo.h.stub", + "foo.h: $(MDDEPDIR)/foo.h.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.h.pp", + "$(MDDEPDIR)/foo.h.stub: %s/generate-foo.py $(srcdir)/foo-data" + % (env.topsrcdir), + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-foo.py main foo.h $(MDDEPDIR)/foo.h.pp $(MDDEPDIR)/foo.h.stub $(srcdir)/foo-data)" # noqa + % (env.topsrcdir), + "@$(TOUCH) $@", + "", ] self.maxDiff = None self.assertEqual(lines, expected) def test_generated_files_force(self): """Ensure GENERATED_FILES with .force is handled properly.""" - env = self._consume('generated-files-force', RecursiveMakeBackend) + env = self._consume("generated-files-force", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'include $(topsrcdir)/config/AB_rCD.mk', - 'PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub', - 'bar.c: $(MDDEPDIR)/bar.c.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp', - '$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE' % env.topsrcdir, - '$(REPORT_BUILD)', - '$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', - 'PRE_COMPILE_TARGETS += $(MDDEPDIR)/foo.c.stub', - 'foo.c: $(MDDEPDIR)/foo.c.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp', - '$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data' % (env.topsrcdir), - '$(REPORT_BUILD)', - '$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)' % (env.topsrcdir), # noqa - '@$(TOUCH) $@', - '', + "include $(topsrcdir)/config/AB_rCD.mk", + "PRE_COMPILE_TARGETS += $(MDDEPDIR)/bar.c.stub", + "bar.c: $(MDDEPDIR)/bar.c.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar.c.pp", + "$(MDDEPDIR)/bar.c.stub: %s/generate-bar.py FORCE" % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-bar.py baz bar.c $(MDDEPDIR)/bar.c.pp $(MDDEPDIR)/bar.c.stub)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "PRE_COMPILE_TARGETS += $(MDDEPDIR)/foo.c.stub", + "foo.c: $(MDDEPDIR)/foo.c.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.c.pp", + "$(MDDEPDIR)/foo.c.stub: %s/generate-foo.py $(srcdir)/foo-data" + % (env.topsrcdir), + "$(REPORT_BUILD)", + "$(call py_action,file_generate,%s/generate-foo.py main foo.c $(MDDEPDIR)/foo.c.pp $(MDDEPDIR)/foo.c.stub $(srcdir)/foo-data)" # noqa + % (env.topsrcdir), + "@$(TOUCH) $@", + "", ] self.maxDiff = None self.assertEqual(lines, expected) def test_localized_generated_files(self): """Ensure LOCALIZED_GENERATED_FILES is handled properly.""" - env = self._consume('localized-generated-files', RecursiveMakeBackend) + env = self._consume("localized-generated-files", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'include $(topsrcdir)/config/AB_rCD.mk', - 'MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub', - 'foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp', - '$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa - '$(REPORT_BUILD)', - '$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', - 'LOCALIZED_FILES_0_FILES += foo.xyz', - 'LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/', - 'LOCALIZED_FILES_0_TARGET := misc', - 'INSTALL_TARGETS += LOCALIZED_FILES_0', + "include $(topsrcdir)/config/AB_rCD.mk", + "MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub", + "foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp", + "$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "LOCALIZED_FILES_0_FILES += foo.xyz", + "LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/", + "LOCALIZED_FILES_0_TARGET := misc", + "INSTALL_TARGETS += LOCALIZED_FILES_0", ] self.maxDiff = None self.assertEqual(lines, expected) def test_localized_generated_files_force(self): """Ensure LOCALIZED_GENERATED_FILES with .force is handled properly.""" - env = self._consume('localized-generated-files-force', RecursiveMakeBackend) + env = self._consume("localized-generated-files-force", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'include $(topsrcdir)/config/AB_rCD.mk', - 'MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub', - 'foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp', - '$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa - '$(REPORT_BUILD)', - '$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', - 'MISC_TARGETS += $(MDDEPDIR)/abc.xyz.stub', - 'abc.xyz: $(MDDEPDIR)/abc.xyz.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp', - '$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE' % env.topsrcdir, # noqa - '$(REPORT_BUILD)', - '$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', + "include $(topsrcdir)/config/AB_rCD.mk", + "MISC_TARGETS += $(MDDEPDIR)/foo.xyz.stub", + "foo.xyz: $(MDDEPDIR)/foo.xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo.xyz.pp", + "$(MDDEPDIR)/foo.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo.xyz $(MDDEPDIR)/foo.xyz.pp $(MDDEPDIR)/foo.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "MISC_TARGETS += $(MDDEPDIR)/abc.xyz.stub", + "abc.xyz: $(MDDEPDIR)/abc.xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/abc.xyz.pp", + "$(MDDEPDIR)/abc.xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input FORCE" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main abc.xyz $(MDDEPDIR)/abc.xyz.pp $(MDDEPDIR)/abc.xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", ] self.maxDiff = None self.assertEqual(lines, expected) def test_localized_generated_files_AB_CD(self): """Ensure LOCALIZED_GENERATED_FILES is handled properly when {AB_CD} and {AB_rCD} are used.""" - env = self._consume('localized-generated-files-AB_CD', RecursiveMakeBackend) + env = self._consume("localized-generated-files-AB_CD", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'include $(topsrcdir)/config/AB_rCD.mk', - 'MISC_TARGETS += $(MDDEPDIR)/foo$(AB_CD).xyz.stub', - 'foo$(AB_CD).xyz: $(MDDEPDIR)/foo$(AB_CD).xyz.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp', - '$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa - '$(REPORT_BUILD)', - '$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', - 'bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp', - '$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa - '$(REPORT_BUILD)', - '$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', - 'zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;', - 'EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp', - '$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)' % env.topsrcdir, # noqa - '$(REPORT_BUILD)', - '$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)' % env.topsrcdir, # noqa - '@$(TOUCH) $@', - '', + "include $(topsrcdir)/config/AB_rCD.mk", + "MISC_TARGETS += $(MDDEPDIR)/foo$(AB_CD).xyz.stub", + "foo$(AB_CD).xyz: $(MDDEPDIR)/foo$(AB_CD).xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/foo$(AB_CD).xyz.pp", + "$(MDDEPDIR)/foo$(AB_CD).xyz.stub: %s/generate-foo.py $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main foo$(AB_CD).xyz $(MDDEPDIR)/foo$(AB_CD).xyz.pp $(MDDEPDIR)/foo$(AB_CD).xyz.stub $(call MERGE_FILE,localized-input) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "bar$(AB_rCD).xyz: $(MDDEPDIR)/bar$(AB_rCD).xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/bar$(AB_rCD).xyz.pp", + "$(MDDEPDIR)/bar$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main bar$(AB_rCD).xyz $(MDDEPDIR)/bar$(AB_rCD).xyz.pp $(MDDEPDIR)/bar$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,inner/locales) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", + "zot$(AB_rCD).xyz: $(MDDEPDIR)/zot$(AB_rCD).xyz.stub ;", + "EXTRA_MDDEPEND_FILES += $(MDDEPDIR)/zot$(AB_rCD).xyz.pp", + "$(MDDEPDIR)/zot$(AB_rCD).xyz.stub: %s/generate-foo.py $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input $(if $(IS_LANGUAGE_REPACK),FORCE)" # noqa + % env.topsrcdir, + "$(REPORT_BUILD)", + "$(call py_action,file_generate,--locale=$(AB_CD) %s/generate-foo.py main zot$(AB_rCD).xyz $(MDDEPDIR)/zot$(AB_rCD).xyz.pp $(MDDEPDIR)/zot$(AB_rCD).xyz.stub $(call MERGE_RELATIVE_FILE,localized-input,locales) $(srcdir)/non-localized-input)" # noqa + % env.topsrcdir, + "@$(TOUCH) $@", + "", ] self.maxDiff = None self.assertEqual(lines, expected) def test_exports_generated(self): """Ensure EXPORTS that are listed in GENERATED_FILES are handled properly.""" - env = self._consume('exports-generated', RecursiveMakeBackend) + env = self._consume("exports-generated", RecursiveMakeBackend) # EXPORTS files should appear in the dist_include install manifest. - m = InstallManifest(path=mozpath.join(env.topobjdir, - '_build_manifests', 'install', 'dist_include')) + m = InstallManifest( + path=mozpath.join( + env.topobjdir, "_build_manifests", "install", "dist_include" + ) + ) self.assertEqual(len(m), 8) - self.assertIn('foo.h', m) - self.assertIn('mozilla/mozilla1.h', m) - self.assertIn('mozilla/dom/dom1.h', m) - self.assertIn('gfx/gfx.h', m) - self.assertIn('bar.h', m) - self.assertIn('mozilla/mozilla2.h', m) - self.assertIn('mozilla/dom/dom2.h', m) - self.assertIn('mozilla/dom/dom3.h', m) + self.assertIn("foo.h", m) + self.assertIn("mozilla/mozilla1.h", m) + self.assertIn("mozilla/dom/dom1.h", m) + self.assertIn("gfx/gfx.h", m) + self.assertIn("bar.h", m) + self.assertIn("mozilla/mozilla2.h", m) + self.assertIn("mozilla/dom/dom2.h", m) + self.assertIn("mozilla/dom/dom3.h", m) # EXPORTS files that are also GENERATED_FILES should be handled as # INSTALL_TARGETS. - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'include $(topsrcdir)/config/AB_rCD.mk', - 'dist_include_FILES += bar.h', - 'dist_include_DEST := $(DEPTH)/dist/include/', - 'dist_include_TARGET := export', - 'INSTALL_TARGETS += dist_include', - 'dist_include_mozilla_FILES += mozilla2.h', - 'dist_include_mozilla_DEST := $(DEPTH)/dist/include/mozilla', - 'dist_include_mozilla_TARGET := export', - 'INSTALL_TARGETS += dist_include_mozilla', - 'dist_include_mozilla_dom_FILES += dom2.h', - 'dist_include_mozilla_dom_FILES += dom3.h', - 'dist_include_mozilla_dom_DEST := $(DEPTH)/dist/include/mozilla/dom', - 'dist_include_mozilla_dom_TARGET := export', - 'INSTALL_TARGETS += dist_include_mozilla_dom', + "include $(topsrcdir)/config/AB_rCD.mk", + "dist_include_FILES += bar.h", + "dist_include_DEST := $(DEPTH)/dist/include/", + "dist_include_TARGET := export", + "INSTALL_TARGETS += dist_include", + "dist_include_mozilla_FILES += mozilla2.h", + "dist_include_mozilla_DEST := $(DEPTH)/dist/include/mozilla", + "dist_include_mozilla_TARGET := export", + "INSTALL_TARGETS += dist_include_mozilla", + "dist_include_mozilla_dom_FILES += dom2.h", + "dist_include_mozilla_dom_FILES += dom3.h", + "dist_include_mozilla_dom_DEST := $(DEPTH)/dist/include/mozilla/dom", + "dist_include_mozilla_dom_TARGET := export", + "INSTALL_TARGETS += dist_include_mozilla_dom", ] self.maxDiff = None self.assertEqual(lines, expected) def test_resources(self): """Ensure RESOURCE_FILES is handled properly.""" - env = self._consume('resources', RecursiveMakeBackend) + env = self._consume("resources", RecursiveMakeBackend) # RESOURCE_FILES should appear in the dist_bin install manifest. - m = InstallManifest(path=os.path.join(env.topobjdir, - '_build_manifests', 'install', 'dist_bin')) + m = InstallManifest( + path=os.path.join(env.topobjdir, "_build_manifests", "install", "dist_bin") + ) self.assertEqual(len(m), 10) - self.assertIn('res/foo.res', m) - self.assertIn('res/fonts/font1.ttf', m) - self.assertIn('res/fonts/desktop/desktop2.ttf', m) + self.assertIn("res/foo.res", m) + self.assertIn("res/fonts/font1.ttf", m) + self.assertIn("res/fonts/desktop/desktop2.ttf", m) - self.assertIn('res/bar.res.in', m) - self.assertIn('res/tests/test.manifest', m) - self.assertIn('res/tests/extra.manifest', m) + self.assertIn("res/bar.res.in", m) + self.assertIn("res/tests/test.manifest", m) + self.assertIn("res/tests/extra.manifest", m) def test_test_manifests_files_written(self): """Ensure test manifests get turned into files.""" - env = self._consume('test-manifests-written', RecursiveMakeBackend) + env = self._consume("test-manifests-written", RecursiveMakeBackend) - tests_dir = mozpath.join(env.topobjdir, '_tests') - m_master = mozpath.join(tests_dir, 'testing', 'mochitest', 'tests', 'mochitest.ini') - x_master = mozpath.join(tests_dir, 'xpcshell', 'xpcshell.ini') + tests_dir = mozpath.join(env.topobjdir, "_tests") + m_master = mozpath.join( + tests_dir, "testing", "mochitest", "tests", "mochitest.ini" + ) + x_master = mozpath.join(tests_dir, "xpcshell", "xpcshell.ini") self.assertTrue(os.path.exists(m_master)) self.assertTrue(os.path.exists(x_master)) - lines = [l.strip() for l in open(x_master, 'rt').readlines()] - self.assertEqual(lines, [ - '# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.', - '', - '[include:dir1/xpcshell.ini]', - '[include:xpcshell.ini]', - ]) + lines = [l.strip() for l in open(x_master, "rt").readlines()] + self.assertEqual( + lines, + [ + "# THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.", + "", + "[include:dir1/xpcshell.ini]", + "[include:xpcshell.ini]", + ], + ) def test_test_manifest_pattern_matches_recorded(self): """Pattern matches in test manifests' support-files should be recorded.""" - env = self._consume('test-manifests-written', RecursiveMakeBackend) - m = InstallManifest(path=mozpath.join(env.topobjdir, - '_build_manifests', 'install', '_test_files')) + env = self._consume("test-manifests-written", RecursiveMakeBackend) + m = InstallManifest( + path=mozpath.join( + env.topobjdir, "_build_manifests", "install", "_test_files" + ) + ) # This is not the most robust test in the world, but it gets the job # done. - entries = [e for e in m._dests.keys() if '**' in e] + entries = [e for e in m._dests.keys() if "**" in e] self.assertEqual(len(entries), 1) - self.assertIn('support/**', entries[0]) + self.assertIn("support/**", entries[0]) def test_test_manifest_deffered_installs_written(self): """Shared support files are written to their own data file by the backend.""" - env = self._consume('test-manifest-shared-support', RecursiveMakeBackend) + env = self._consume("test-manifest-shared-support", RecursiveMakeBackend) # First, read the generated for ini manifest contents. - test_files_manifest = mozpath.join(env.topobjdir, - '_build_manifests', - 'install', - '_test_files') + test_files_manifest = mozpath.join( + env.topobjdir, "_build_manifests", "install", "_test_files" + ) m = InstallManifest(path=test_files_manifest) # Then, synthesize one from the test-installs.pkl file. This should # allow us to re-create a subset of the above. - env = self._consume('test-manifest-shared-support', TestManifestBackend) - test_installs_path = mozpath.join(env.topobjdir, 'test-installs.pkl') + env = self._consume("test-manifest-shared-support", TestManifestBackend) + test_installs_path = mozpath.join(env.topobjdir, "test-installs.pkl") - with open(test_installs_path, 'rb') as fh: + with open(test_installs_path, "rb") as fh: test_installs = pickle.load(fh) - self.assertEqual(set(test_installs.keys()), - set(['child/test_sub.js', - 'child/data/**', - 'child/another-file.sjs'])) + self.assertEqual( + set(test_installs.keys()), + set(["child/test_sub.js", "child/data/**", "child/another-file.sjs"]), + ) for key in test_installs.keys(): self.assertIn(key, test_installs) synthesized_manifest = InstallManifest() for item, installs in test_installs.items(): for install_info in installs: if len(install_info) == 3: synthesized_manifest.add_pattern_link(*install_info) @@ -668,518 +760,556 @@ class TestRecursiveMakeBackend(BackendTe self.assertEqual(len(synthesized_manifest), 3) for item, info in synthesized_manifest._dests.items(): self.assertIn(item, m) self.assertEqual(info, m._dests[item]) def test_xpidl_generation(self): """Ensure xpidl files and directories are written out.""" - env = self._consume('xpidl', RecursiveMakeBackend) + env = self._consume("xpidl", RecursiveMakeBackend) # Install manifests should contain entries. - install_dir = mozpath.join(env.topobjdir, '_build_manifests', - 'install') - self.assertTrue(os.path.isfile(mozpath.join(install_dir, 'xpidl'))) + install_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") + self.assertTrue(os.path.isfile(mozpath.join(install_dir, "xpidl"))) - m = InstallManifest(path=mozpath.join(install_dir, 'xpidl')) - self.assertIn('.deps/my_module.pp', m) + m = InstallManifest(path=mozpath.join(install_dir, "xpidl")) + self.assertIn(".deps/my_module.pp", m) - m = InstallManifest(path=mozpath.join(install_dir, 'xpidl')) - self.assertIn('my_module.xpt', m) + m = InstallManifest(path=mozpath.join(install_dir, "xpidl")) + self.assertIn("my_module.xpt", m) - m = InstallManifest(path=mozpath.join(install_dir, 'dist_include')) - self.assertIn('foo.h', m) + m = InstallManifest(path=mozpath.join(install_dir, "dist_include")) + self.assertIn("foo.h", m) - p = mozpath.join(env.topobjdir, 'config/makefiles/xpidl') + p = mozpath.join(env.topobjdir, "config/makefiles/xpidl") self.assertTrue(os.path.isdir(p)) - self.assertTrue(os.path.isfile(mozpath.join(p, 'Makefile'))) + self.assertTrue(os.path.isfile(mozpath.join(p, "Makefile"))) def test_test_support_files_tracked(self): - env = self._consume('test-support-binaries-tracked', RecursiveMakeBackend) - m = InstallManifest(path=mozpath.join(env.topobjdir, - '_build_manifests', 'install', '_tests')) + env = self._consume("test-support-binaries-tracked", RecursiveMakeBackend) + m = InstallManifest( + path=mozpath.join(env.topobjdir, "_build_manifests", "install", "_tests") + ) self.assertEqual(len(m), 4) - self.assertIn('xpcshell/tests/mozbuildtest/test-library.dll', m) - self.assertIn('xpcshell/tests/mozbuildtest/test-one.exe', m) - self.assertIn('xpcshell/tests/mozbuildtest/test-two.exe', m) - self.assertIn('xpcshell/tests/mozbuildtest/host-test-library.dll', m) + self.assertIn("xpcshell/tests/mozbuildtest/test-library.dll", m) + self.assertIn("xpcshell/tests/mozbuildtest/test-one.exe", m) + self.assertIn("xpcshell/tests/mozbuildtest/test-two.exe", m) + self.assertIn("xpcshell/tests/mozbuildtest/host-test-library.dll", m) def test_old_install_manifest_deleted(self): # Simulate an install manifest from a previous backend version. Ensure # it is deleted. - env = self._get_environment('stub0') - purge_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install') - manifest_path = mozpath.join(purge_dir, 'old_manifest') + env = self._get_environment("stub0") + purge_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") + manifest_path = mozpath.join(purge_dir, "old_manifest") os.makedirs(purge_dir) m = InstallManifest() m.write(path=manifest_path) - with open(mozpath.join( - env.topobjdir, 'backend.RecursiveMakeBackend'), 'w') as f: - f.write('%s\n' % manifest_path) + with open( + mozpath.join(env.topobjdir, "backend.RecursiveMakeBackend"), "w" + ) as f: + f.write("%s\n" % manifest_path) self.assertTrue(os.path.exists(manifest_path)) - self._consume('stub0', RecursiveMakeBackend, env) + self._consume("stub0", RecursiveMakeBackend, env) self.assertFalse(os.path.exists(manifest_path)) def test_install_manifests_written(self): - env, objs = self._emit('stub0') + env, objs = self._emit("stub0") backend = RecursiveMakeBackend(env) m = InstallManifest() - backend._install_manifests['testing'] = m - m.add_link(__file__, 'self') + backend._install_manifests["testing"] = m + m.add_link(__file__, "self") backend.consume(objs) - man_dir = mozpath.join(env.topobjdir, '_build_manifests', 'install') + man_dir = mozpath.join(env.topobjdir, "_build_manifests", "install") self.assertTrue(os.path.isdir(man_dir)) - expected = ['testing'] + expected = ["testing"] for e in expected: full = mozpath.join(man_dir, e) self.assertTrue(os.path.exists(full)) m2 = InstallManifest(path=full) self.assertEqual(m, m2) def test_ipdl_sources(self): """Test that PREPROCESSED_IPDL_SOURCES and IPDL_SOURCES are written to ipdlsrcs.mk correctly.""" - env = self._get_environment('ipdl_sources') + env = self._get_environment("ipdl_sources") # Make substs writable so we can set the value of IPDL_ROOT to reflect # the correct objdir. env.substs = dict(env.substs) - env.substs['IPDL_ROOT'] = env.topobjdir + env.substs["IPDL_ROOT"] = env.topobjdir - self._consume('ipdl_sources', RecursiveMakeBackend, env) + self._consume("ipdl_sources", RecursiveMakeBackend, env) - manifest_path = mozpath.join(env.topobjdir, 'ipdlsrcs.mk') - lines = [l.strip() for l in open(manifest_path, 'rt').readlines()] + manifest_path = mozpath.join(env.topobjdir, "ipdlsrcs.mk") + lines = [l.strip() for l in open(manifest_path, "rt").readlines()] # Handle Windows paths correctly - topsrcdir = env.topsrcdir.replace(os.sep, '/') + topsrcdir = env.topsrcdir.replace(os.sep, "/") expected = [ - "ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" % tuple([topsrcdir] * 4), # noqa + "ALL_IPDLSRCS := bar1.ipdl foo1.ipdl %s/bar/bar.ipdl %s/bar/bar2.ipdlh %s/foo/foo.ipdl %s/foo/foo2.ipdlh" # noqa + % tuple([topsrcdir] * 4), "CPPSRCS := UnifiedProtocols0.cpp", "IPDLDIRS := %s %s/bar %s/foo" % (env.topobjdir, topsrcdir, topsrcdir), ] - found = [str for str in lines if str.startswith(('ALL_IPDLSRCS', - 'CPPSRCS', - 'IPDLDIRS'))] + found = [ + str + for str in lines + if str.startswith(("ALL_IPDLSRCS", "CPPSRCS", "IPDLDIRS")) + ] self.assertEqual(found, expected) def test_defines(self): """Test that DEFINES are written to backend.mk correctly.""" - env = self._consume('defines', RecursiveMakeBackend) + env = self._consume("defines", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] - var = 'DEFINES' + var = "DEFINES" defines = [val for val in lines if val.startswith(var)] - expected = ['DEFINES += -DFOO \'-DBAZ="ab\'\\\'\'cd"\' -UQUX -DBAR=7 -DVALUE=xyz'] + expected = ["DEFINES += -DFOO '-DBAZ=\"ab'\\''cd\"' -UQUX -DBAR=7 -DVALUE=xyz"] self.assertEqual(defines, expected) def test_local_includes(self): """Test that LOCAL_INCLUDES are written to backend.mk correctly.""" - env = self._consume('local_includes', RecursiveMakeBackend) + env = self._consume("local_includes", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'LOCAL_INCLUDES += -I$(srcdir)/bar/baz', - 'LOCAL_INCLUDES += -I$(srcdir)/foo', + "LOCAL_INCLUDES += -I$(srcdir)/bar/baz", + "LOCAL_INCLUDES += -I$(srcdir)/foo", ] - found = [str for str in lines if str.startswith('LOCAL_INCLUDES')] + found = [str for str in lines if str.startswith("LOCAL_INCLUDES")] self.assertEqual(found, expected) def test_generated_includes(self): """Test that GENERATED_INCLUDES are written to backend.mk correctly.""" - env = self._consume('generated_includes', RecursiveMakeBackend) + env = self._consume("generated_includes", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'LOCAL_INCLUDES += -I$(CURDIR)/bar/baz', - 'LOCAL_INCLUDES += -I$(CURDIR)/foo', + "LOCAL_INCLUDES += -I$(CURDIR)/bar/baz", + "LOCAL_INCLUDES += -I$(CURDIR)/foo", ] - found = [str for str in lines if str.startswith('LOCAL_INCLUDES')] + found = [str for str in lines if str.startswith("LOCAL_INCLUDES")] self.assertEqual(found, expected) def test_rust_library(self): """Test that a Rust library is written to backend.mk correctly.""" - env = self._consume('rust-library', RecursiveMakeBackend) + env = self._consume("rust-library", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:] - # Strip out computed flags, they're a PITA to test. - if not l.startswith('COMPUTED_')] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] expected = [ - 'RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libtest_library.a' % env.topobjdir, # noqa - 'CARGO_FILE := $(srcdir)/Cargo.toml', - 'CARGO_TARGET_DIR := %s' % env.topobjdir, + "RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libtest_library.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, ] self.assertEqual(lines, expected) def test_host_rust_library(self): """Test that a Rust library is written to backend.mk correctly.""" - env = self._consume('host-rust-library', RecursiveMakeBackend) + env = self._consume("host-rust-library", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:] - # Strip out computed flags, they're a PITA to test. - if not l.startswith('COMPUTED_')] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] expected = [ - 'HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a' % env.topobjdir, # noqa - 'CARGO_FILE := $(srcdir)/Cargo.toml', - 'CARGO_TARGET_DIR := %s' % env.topobjdir, + "HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, ] self.assertEqual(lines, expected) def test_host_rust_library_with_features(self): """Test that a host Rust library with features is written to backend.mk correctly.""" - env = self._consume('host-rust-library-features', RecursiveMakeBackend) + env = self._consume("host-rust-library-features", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:] - # Strip out computed flags, they're a PITA to test. - if not l.startswith('COMPUTED_')] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] expected = [ - 'HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a' % env.topobjdir, # noqa - 'CARGO_FILE := $(srcdir)/Cargo.toml', - 'CARGO_TARGET_DIR := %s' % env.topobjdir, - 'HOST_RUST_LIBRARY_FEATURES := musthave cantlivewithout', + "HOST_RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libhostrusttool.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, + "HOST_RUST_LIBRARY_FEATURES := musthave cantlivewithout", ] self.assertEqual(lines, expected) def test_rust_library_with_features(self): """Test that a Rust library with features is written to backend.mk correctly.""" - env = self._consume('rust-library-features', RecursiveMakeBackend) + env = self._consume("rust-library-features", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:] - # Strip out computed flags, they're a PITA to test. - if not l.startswith('COMPUTED_')] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] expected = [ - 'RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libfeature_library.a' % env.topobjdir, # noqa - 'CARGO_FILE := $(srcdir)/Cargo.toml', - 'CARGO_TARGET_DIR := %s' % env.topobjdir, - 'RUST_LIBRARY_FEATURES := musthave cantlivewithout', + "RUST_LIBRARY_FILE := %s/x86_64-unknown-linux-gnu/release/libfeature_library.a" + % env.topobjdir, # noqa + "CARGO_FILE := $(srcdir)/Cargo.toml", + "CARGO_TARGET_DIR := %s" % env.topobjdir, + "RUST_LIBRARY_FEATURES := musthave cantlivewithout", ] self.assertEqual(lines, expected) def test_rust_programs(self): """Test that {HOST_,}RUST_PROGRAMS are written to backend.mk correctly.""" - env = self._consume('rust-programs', RecursiveMakeBackend) + env = self._consume("rust-programs", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'code/backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:] - # Strip out computed flags, they're a PITA to test. - if not l.startswith('COMPUTED_')] + backend_path = mozpath.join(env.topobjdir, "code/backend.mk") + lines = [ + l.strip() + for l in open(backend_path, "rt").readlines()[2:] + # Strip out computed flags, they're a PITA to test. + if not l.startswith("COMPUTED_") + ] expected = [ - 'CARGO_FILE := %s/code/Cargo.toml' % env.topsrcdir, - 'CARGO_TARGET_DIR := .', - 'RUST_PROGRAMS += i686-pc-windows-msvc/release/target.exe', - 'RUST_CARGO_PROGRAMS += target', - 'HOST_RUST_PROGRAMS += i686-pc-windows-msvc/release/host.exe', - 'HOST_RUST_CARGO_PROGRAMS += host', + "CARGO_FILE := %s/code/Cargo.toml" % env.topsrcdir, + "CARGO_TARGET_DIR := .", + "RUST_PROGRAMS += i686-pc-windows-msvc/release/target.exe", + "RUST_CARGO_PROGRAMS += target", + "HOST_RUST_PROGRAMS += i686-pc-windows-msvc/release/host.exe", + "HOST_RUST_CARGO_PROGRAMS += host", ] self.assertEqual(lines, expected) - root_deps_path = mozpath.join(env.topobjdir, 'root-deps.mk') - lines = [l.strip() for l in open(root_deps_path, 'rt').readlines()] + root_deps_path = mozpath.join(env.topobjdir, "root-deps.mk") + lines = [l.strip() for l in open(root_deps_path, "rt").readlines()] - self.assertTrue(any(l == 'recurse_compile: code/host code/target' for l in lines)) + self.assertTrue( + any(l == "recurse_compile: code/host code/target" for l in lines) + ) def test_final_target(self): """Test that FINAL_TARGET is written to backend.mk correctly.""" - env = self._consume('final_target', RecursiveMakeBackend) + env = self._consume("final_target", RecursiveMakeBackend) final_target_rule = "FINAL_TARGET = $(if $(XPI_NAME),$(DIST)/xpi-stage/$(XPI_NAME),$(DIST)/bin)$(DIST_SUBDIR:%=/%)" # noqa expected = dict() expected[env.topobjdir] = [] - expected[mozpath.join(env.topobjdir, 'both')] = [ - 'XPI_NAME = mycrazyxpi', - 'DIST_SUBDIR = asubdir', - final_target_rule + expected[mozpath.join(env.topobjdir, "both")] = [ + "XPI_NAME = mycrazyxpi", + "DIST_SUBDIR = asubdir", + final_target_rule, ] - expected[mozpath.join(env.topobjdir, 'dist-subdir')] = [ - 'DIST_SUBDIR = asubdir', - final_target_rule + expected[mozpath.join(env.topobjdir, "dist-subdir")] = [ + "DIST_SUBDIR = asubdir", + final_target_rule, ] - expected[mozpath.join(env.topobjdir, 'xpi-name')] = [ - 'XPI_NAME = mycrazyxpi', - final_target_rule + expected[mozpath.join(env.topobjdir, "xpi-name")] = [ + "XPI_NAME = mycrazyxpi", + final_target_rule, ] - expected[mozpath.join(env.topobjdir, 'final-target')] = [ - 'FINAL_TARGET = $(DEPTH)/random-final-target' + expected[mozpath.join(env.topobjdir, "final-target")] = [ + "FINAL_TARGET = $(DEPTH)/random-final-target" ] for key, expected_rules in six.iteritems(expected): - backend_path = mozpath.join(key, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] - found = [str for str in lines if - str.startswith('FINAL_TARGET') or str.startswith('XPI_NAME') or - str.startswith('DIST_SUBDIR')] + backend_path = mozpath.join(key, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] + found = [ + str + for str in lines + if str.startswith("FINAL_TARGET") + or str.startswith("XPI_NAME") + or str.startswith("DIST_SUBDIR") + ] self.assertEqual(found, expected_rules) def test_final_target_pp_files(self): """Test that FINAL_TARGET_PP_FILES is written to backend.mk correctly.""" - env = self._consume('dist-files', RecursiveMakeBackend) + env = self._consume("dist-files", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'DIST_FILES_0 += $(srcdir)/install.rdf', - 'DIST_FILES_0 += $(srcdir)/main.js', - 'DIST_FILES_0_PATH := $(DEPTH)/dist/bin/', - 'DIST_FILES_0_TARGET := misc', - 'PP_TARGETS += DIST_FILES_0', + "DIST_FILES_0 += $(srcdir)/install.rdf", + "DIST_FILES_0 += $(srcdir)/main.js", + "DIST_FILES_0_PATH := $(DEPTH)/dist/bin/", + "DIST_FILES_0_TARGET := misc", + "PP_TARGETS += DIST_FILES_0", ] - found = [str for str in lines if 'DIST_FILES' in str] + found = [str for str in lines if "DIST_FILES" in str] self.assertEqual(found, expected) def test_localized_files(self): """Test that LOCALIZED_FILES is written to backend.mk correctly.""" - env = self._consume('localized-files', RecursiveMakeBackend) + env = self._consume("localized-files", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'LOCALIZED_FILES_0_FILES += $(wildcard $(LOCALE_SRCDIR)/abc/*.abc)', - 'LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,bar.ini)', - 'LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,foo.js)', - 'LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/', - 'LOCALIZED_FILES_0_TARGET := misc', - 'INSTALL_TARGETS += LOCALIZED_FILES_0', + "LOCALIZED_FILES_0_FILES += $(wildcard $(LOCALE_SRCDIR)/abc/*.abc)", + "LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,bar.ini)", + "LOCALIZED_FILES_0_FILES += $(call MERGE_FILE,foo.js)", + "LOCALIZED_FILES_0_DEST = $(FINAL_TARGET)/", + "LOCALIZED_FILES_0_TARGET := misc", + "INSTALL_TARGETS += LOCALIZED_FILES_0", ] - found = [str for str in lines if 'LOCALIZED_FILES' in str] + found = [str for str in lines if "LOCALIZED_FILES" in str] self.assertEqual(found, expected) def test_localized_pp_files(self): """Test that LOCALIZED_PP_FILES is written to backend.mk correctly.""" - env = self._consume('localized-pp-files', RecursiveMakeBackend) + env = self._consume("localized-pp-files", RecursiveMakeBackend) - backend_path = mozpath.join(env.topobjdir, 'backend.mk') - lines = [l.strip() for l in open(backend_path, 'rt').readlines()[2:]] + backend_path = mozpath.join(env.topobjdir, "backend.mk") + lines = [l.strip() for l in open(backend_path, "rt").readlines()[2:]] expected = [ - 'LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,bar.ini)', - 'LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,foo.js)', - 'LOCALIZED_PP_FILES_0_PATH = $(FINAL_TARGET)/', - 'LOCALIZED_PP_FILES_0_TARGET := misc', - 'LOCALIZED_PP_FILES_0_FLAGS := --silence-missing-directive-warnings', - 'PP_TARGETS += LOCALIZED_PP_FILES_0', + "LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,bar.ini)", + "LOCALIZED_PP_FILES_0 += $(call MERGE_FILE,foo.js)", + "LOCALIZED_PP_FILES_0_PATH = $(FINAL_TARGET)/", + "LOCALIZED_PP_FILES_0_TARGET := misc", + "LOCALIZED_PP_FILES_0_FLAGS := --silence-missing-directive-warnings", + "PP_TARGETS += LOCALIZED_PP_FILES_0", ] - found = [str for str in lines if 'LOCALIZED_PP_FILES' in str] + found = [str for str in lines if "LOCALIZED_PP_FILES" in str] self.assertEqual(found, expected) def test_config(self): """Test that CONFIGURE_SUBST_FILES are properly handled.""" - env = self._consume('test_config', RecursiveMakeBackend) + env = self._consume("test_config", RecursiveMakeBackend) self.assertEqual( - open(os.path.join(env.topobjdir, 'file'), 'r').readlines(), [ - '#ifdef foo\n', - 'bar baz\n', - '@bar@\n', - ]) + open(os.path.join(env.topobjdir, "file"), "r").readlines(), + [ + "#ifdef foo\n", + "bar baz\n", + "@bar@\n", + ], + ) def test_prog_lib_c_only(self): """Test that C-only binary artifacts are marked as such.""" - env = self._consume('prog-lib-c-only', RecursiveMakeBackend) + env = self._consume("prog-lib-c-only", RecursiveMakeBackend) # PROGRAM C-onlyness. - with open(os.path.join(env.topobjdir, 'c-program', 'backend.mk'), 'r') as fh: + with open(os.path.join(env.topobjdir, "c-program", "backend.mk"), "r") as fh: lines = fh.readlines() lines = [line.rstrip() for line in lines] - self.assertIn('PROG_IS_C_ONLY_c_test_program := 1', lines) + self.assertIn("PROG_IS_C_ONLY_c_test_program := 1", lines) - with open(os.path.join(env.topobjdir, 'cxx-program', 'backend.mk'), 'r') as fh: + with open(os.path.join(env.topobjdir, "cxx-program", "backend.mk"), "r") as fh: lines = fh.readlines() lines = [line.rstrip() for line in lines] # Test for only the absence of the variable, not the precise # form of the variable assignment. for line in lines: - self.assertNotIn('PROG_IS_C_ONLY_cxx_test_program', line) + self.assertNotIn("PROG_IS_C_ONLY_cxx_test_program", line) # SIMPLE_PROGRAMS C-onlyness. - with open(os.path.join(env.topobjdir, 'c-simple-programs', 'backend.mk'), 'r') as fh: + with open( + os.path.join(env.topobjdir, "c-simple-programs", "backend.mk"), "r" + ) as fh: lines = fh.readlines() lines = [line.rstrip() for lin