author | arthur.iakab <aiakab@mozilla.com> |
Wed, 11 Apr 2018 01:17:20 +0300 | |
changeset 412773 | 9ad2b8aabfae6a1c47599fc66d781d5a2a3aa38a |
parent 412772 | 8b752dbedbc270b1adeb1c50d572acf61f27fa42 (current diff) |
parent 412663 | 0528a414c2a86dad0623779abde5301d37337934 (diff) |
child 412774 | 7709fe9d13c7f16371995ea48dea6d08dbfaeb64 |
push id | 33818 |
push user | apavel@mozilla.com |
push date | Wed, 11 Apr 2018 14:36:40 +0000 |
treeherder | mozilla-central@cfe6399e142c [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
milestone | 61.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/Cargo.lock +++ b/Cargo.lock @@ -1551,16 +1551,24 @@ version = "0.0.1" name = "proc-macro2" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] +name = "proc-macro2" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "procedural-masquerade" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pulse" version = "0.2.0" dependencies = [ @@ -1589,16 +1597,24 @@ source = "registry+https://github.com/ru name = "quote" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "proc-macro2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] +name = "quote" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "rand" version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1942,40 +1958,32 @@ dependencies = [ "size_of_test 0.0.1", "smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "style 0.0.1", "style_traits 0.0.1", ] [[package]] name = "syn" -version = "0.11.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", - "synom 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "proc-macro2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "synom" -version = "0.11.2" +name = "syn" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "synstructure" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "proc-macro2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2163,21 +2171,16 @@ source = "registry+https://github.com/ru [[package]] name = "unicode-width" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unicode-xid" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unicode-xid" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "unreachable" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ @@ -2422,18 +2425,18 @@ dependencies = [ "xpcom 0.1.0", ] [[package]] name = "xpcom_macros" version = "0.1.0" dependencies = [ "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "yaml-rust" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2596,20 +2599,22 @@ dependencies = [ "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2" "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903" "checksum plane-split 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "69c557e11e3a1533bc969fa596e5011e1d9f76dd61cd102ef942c9f8654b17a2" "checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0" "checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" "checksum proc-macro2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d1cb7aaaa4bf022ec2b14ff2f2ba1643a22f3cee88df014a85e14b392282c61d" +"checksum proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118" "checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260" "checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4" "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" "checksum quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408" +"checksum quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0ff51282f28dc1b53fd154298feaa2e77c5ea0dba68e1fd8b03b72fbe13d2a" "checksum rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "6475140dfd8655aeb72e1fd4b7a1cc1c202be65d71669476e392fe62532b9edd" "checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d" "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8" "checksum redox_syscall 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "ab105df655884ede59d45b7070c8a65002d921461ee813a024558ca16030eea0" "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" "checksum ron 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "da06feaa07f69125ab9ddc769b11de29090122170b402547f64b86fe16ebc399" "checksum runloop 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d79b4b604167921892e84afbbaad9d5ad74e091bf6c511d9dbfb0593f09fabd" @@ -2630,19 +2635,18 @@ dependencies = [ "checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23" "checksum smallbitvec 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "79b776f00dfe01df905fa3b2eaa1659522e99e3fc4a7b1334171622205c4bdcf" "checksum smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44db0ecb22921ef790d17ae13a3f6d15784183ff5f2a01aa32098c7498d2b4b9" "checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b" "checksum string_cache 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39cb4173bcbd1319da31faa5468a7e3870683d7a237150b0b0aaafd546f6ad12" "checksum string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "479cde50c3539481f33906a387f2bd17c8e87cb848c35b6021d41fb81ff9b4d7" "checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" -"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" "checksum syn 0.12.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9e1c669ed757c0ebd04337f6a5bb972d05e0c08fe2540dd3ee3dd9e4daf1604c" -"checksum synom 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "27e31aa4b09b9f4cb12dff3c30ba503e17b1a624413d764d32dab76e3920e5bc" +"checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59" "checksum synstructure 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "010366096045d8250555904c58da03377289e7f4b2ce7a5b1027e2b532f41000" "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6" "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" "checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209" "checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83" "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693" "checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963" "checksum thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf947d192a9be60ef5131cc7a4648886ba89d712f16700ebbf80c8a69d05d48f" @@ -2654,17 +2658,16 @@ dependencies = [ "checksum traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079" "checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" "checksum uluru 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "519130f0ea964ba540a9d8af1373738c2226f1d465eda07e61db29feb5479db9" "checksum unicase 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" "checksum unicode-normalization 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f" "checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3" "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" -"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" "checksum url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f808aadd8cfec6ef90e4a14eb46f24511824d1ac596b9682703c87056c8678b7" "checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" "checksum uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "78c590b5bd79ed10aad8fb75f078a59d8db445af6c743e55c4a53227fc01c13f" "checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b" "checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
--- a/browser/base/content/tabbrowser.js +++ b/browser/base/content/tabbrowser.js @@ -1474,16 +1474,23 @@ window._gBrowser = { // == 1 false YES // == 1 true NO // > 1 false/true NO var multiple = aURIs.length > 1; var owner = multiple || aLoadInBackground ? null : this.selectedTab; var firstTabAdded = null; var targetTabIndex = -1; + // When bulk opening tabs, such as from a bookmark folder, we want to insertAfterCurrent + // if necessary, but we also will set the bulkOrderedOpen flag so that the bookmarks + // open in the same order they are in the folder. + if (multiple && aNewIndex < 0 && Services.prefs.getBoolPref("browser.tabs.insertAfterCurrent")) { + aNewIndex = this.selectedTab._tPos + 1; + } + if (aReplace) { let browser; if (aTargetTab) { browser = this.getBrowserForTab(aTargetTab); targetTabIndex = aTargetTab._tPos; } else { browser = this.selectedBrowser; targetTabIndex = this.tabContainer.selectedIndex; @@ -1506,31 +1513,33 @@ window._gBrowser = { } else { firstTabAdded = this.addTab(aURIs[0], { ownerTab: owner, skipAnimation: multiple, allowThirdPartyFixup: aAllowThirdPartyFixup, postData: aPostDatas[0], userContextId: aUserContextId, triggeringPrincipal: aTriggeringPrincipal, + bulkOrderedOpen: multiple, }); if (aNewIndex !== -1) { this.moveTabTo(firstTabAdded, aNewIndex); targetTabIndex = firstTabAdded._tPos; } } let tabNum = targetTabIndex; for (let i = 1; i < aURIs.length; ++i) { let tab = this.addTab(aURIs[i], { skipAnimation: true, allowThirdPartyFixup: aAllowThirdPartyFixup, postData: aPostDatas[i], userContextId: aUserContextId, triggeringPrincipal: aTriggeringPrincipal, + bulkOrderedOpen: true, }); if (targetTabIndex !== -1) this.moveTabTo(tab, ++tabNum); } if (firstTabAdded && !aLoadInBackground) { this.selectedTab = firstTabAdded; }
--- a/browser/base/content/test/tabs/browser_new_tab_insert_position.js +++ b/browser/base/content/test/tabs/browser_new_tab_insert_position.js @@ -39,40 +39,44 @@ function promiseRemoveThenUndoCloseTab(t // Compare the current browser tab order against the session state ordering, they should always match. function verifyTabState(state) { let newStateTabs = JSON.parse(state).windows[0].tabs; for (let i = 0; i < gBrowser.tabs.length; i++) { is(gBrowser.tabs[i].linkedBrowser.currentURI.spec, newStateTabs[i].entries[0].url, `tab pos ${i} matched ${gBrowser.tabs[i].linkedBrowser.currentURI.spec}`); } } +const bulkLoad = ["http://mochi.test:8888/#5", "http://mochi.test:8888/#6", + "http://mochi.test:8888/#7", "http://mochi.test:8888/#8"]; + +const sessData = { + windows: [{ + tabs: [ + {entries: [{url: "http://mochi.test:8888/#0", triggeringPrincipal_base64}]}, + {entries: [{url: "http://mochi.test:8888/#1", triggeringPrincipal_base64}]}, + {entries: [{url: "http://mochi.test:8888/#3", triggeringPrincipal_base64}]}, + {entries: [{url: "http://mochi.test:8888/#4", triggeringPrincipal_base64}]}, + ], + }], +}; +const urlbarURL = "http://example.com/#urlbar"; + async function doTest(aInsertRelatedAfterCurrent, aInsertAfterCurrent) { const kDescription = "(aInsertRelatedAfterCurrent=" + aInsertRelatedAfterCurrent + ", aInsertAfterCurrent=" + aInsertAfterCurrent + "): "; await SpecialPowers.pushPrefEnv({set: [ ["browser.tabs.opentabfor.middleclick", true], ["browser.tabs.loadBookmarksInBackground", false], ["browser.tabs.insertRelatedAfterCurrent", aInsertRelatedAfterCurrent], ["browser.tabs.insertAfterCurrent", aInsertAfterCurrent], ]}); let oldState = SessionStore.getBrowserState(); - let sessData = { - windows: [{ - tabs: [ - {entries: [{url: "http://mochi.test:8888/#0", triggeringPrincipal_base64}]}, - {entries: [{url: "http://mochi.test:8888/#1", triggeringPrincipal_base64}]}, - {entries: [{url: "http://mochi.test:8888/#3", triggeringPrincipal_base64}]}, - {entries: [{url: "http://mochi.test:8888/#4", triggeringPrincipal_base64}]}, - ], - }], - }; - await promiseBrowserStateRestored(sessData); // Create a *opener* tab page which has a link to "example.com". let pageURL = getRootDirectory(gTestPath).replace("chrome://mochitests/content", "http://example.com"); pageURL = `${pageURL}file_new_tab_page.html`; let openerTab = await BrowserTestUtils.openNewForegroundTab(gBrowser, pageURL); const openerTabIndex = 1; gBrowser.moveTabTo(openerTab, openerTabIndex); @@ -95,17 +99,16 @@ async function doTest(aInsertRelatedAfte is(openTab.owner, openerTab, "tab owner is set correctly"); } is(openTab.openerTab, openerTab, "opener tab is set"); // Open an unrelated tab from the URL bar and test its position. openTabIndex = aInsertAfterCurrent ? openerTabIndex + 1 : gBrowser.tabs.length; openTabDescription = aInsertAfterCurrent ? "immediately to the right" : "at rightmost"; - const urlbarURL = "http://example.com/#urlbar"; gURLBar.focus(); gURLBar.select(); newTabPromise = BrowserTestUtils.waitForNewTab(gBrowser, urlbarURL, true); EventUtils.sendString(urlbarURL); EventUtils.synthesizeKey("KEY_Alt", { altKey: true, code: "AltLeft", type: "keydown" }); EventUtils.synthesizeKey("KEY_Enter", { altKey: true, code: "Enter" }); EventUtils.synthesizeKey("KEY_Alt", { altKey: false, code: "AltLeft", type: "keyup" }); let unrelatedTab = await newTabPromise; @@ -140,16 +143,28 @@ async function doTest(aInsertRelatedAfte // Remove the tab at the end, then undo. It should reappear where it was. await promiseRemoveThenUndoCloseTab(gBrowser.tabs[gBrowser.tabs.length - 1]); verifyTabState(newState); // Remove a tab in the middle, then undo. It should reappear where it was. await promiseRemoveThenUndoCloseTab(gBrowser.tabs[2]); verifyTabState(newState); + // Bug 1442679 - Test bulk opening with loadTabs loads the tabs in order + + let loadPromises = Promise.all(bulkLoad.map(url => BrowserTestUtils.waitForNewTab(gBrowser, url, false, true))); + // loadTabs will insertAfterCurrent + let nextTab = aInsertAfterCurrent ? gBrowser.selectedTab._tPos + 1 : gBrowser.tabs.length; + + gBrowser.loadTabs(bulkLoad, true); + await loadPromises; + for (let i = nextTab, j = 0; j < bulkLoad.length; i++, j++) { + is(gBrowser.tabs[i].linkedBrowser.currentURI.spec, bulkLoad[j], `bulkLoad tab pos ${i} matched`); + } + // Now we want to test that positioning remains correct after a session restore. // Restore pre-test state so we can restore and test tab ordering. await promiseBrowserStateRestored(oldState); // Restore test state and verify it is as it was. await promiseBrowserStateRestored(newState); verifyTabState(newState);
--- a/browser/components/places/content/controller.js +++ b/browser/components/places/content/controller.js @@ -171,17 +171,17 @@ PlacesController.prototype = { case "placesCmd_new:separator": return this._canInsert() && !PlacesUtils.asQuery(this._view.result.root).queryOptions.excludeItems && this._view.result.sortingMode == Ci.nsINavHistoryQueryOptions.SORT_BY_NONE; case "placesCmd_show:info": { let selectedNode = this._view.selectedNode; return selectedNode && (PlacesUtils.nodeIsTagQuery(selectedNode) || - PlacesUtils.getConcreteItemId(selectedNode) != -1); + PlacesUtils.nodeIsBookmark(selectedNode)); } case "placesCmd_reload": { // Livemark containers let selectedNode = this._view.selectedNode; return selectedNode && this.hasCachedLivemarkInfo(selectedNode); } case "placesCmd_sortBy:name": { let selectedNode = this._view.selectedNode;
--- a/browser/components/places/tests/browser/browser_bookmarkProperties_readOnlyRoot.js +++ b/browser/components/places/tests/browser/browser_bookmarkProperties_readOnlyRoot.js @@ -1,21 +1,21 @@ "use strict"; -add_task(async function() { - info("Bug 479348 - Properties on a root should be read-only."); - +add_task(async function test_dialog() { + info("Bug 479348 - Properties dialog on a root should be read-only."); await withSidebarTree("bookmarks", async function(tree) { tree.selectItems([PlacesUtils.bookmarks.unfiledGuid]); - Assert.ok(tree.controller.isCommandEnabled("placesCmd_show:info"), - "'placesCmd_show:info' on current selected node is enabled"); + Assert.ok(!tree.controller.isCommandEnabled("placesCmd_show:info"), + "'placesCmd_show:info' on current selected node is disabled"); await withBookmarksDialog( true, function openDialog() { + // Even if the cmd is disabled, we can execute it regardless. tree.controller.doCommand("placesCmd_show:info"); }, async function test(dialogWin) { // Check that the dialog is read-only. Assert.ok(dialogWin.gEditItemOverlay.readOnly, "Dialog is read-only"); // Check that accept button is disabled let acceptButton = dialogWin.document.documentElement.getButton("accept"); Assert.ok(acceptButton.disabled, "Accept button is disabled"); @@ -24,8 +24,30 @@ add_task(async function() { let namepicker = dialogWin.document.getElementById("editBMPanel_namePicker"); Assert.ok(namepicker.readOnly, "Name field is read-only"); Assert.equal(namepicker.value, PlacesUtils.getString("OtherBookmarksFolderTitle"), "Node title is correct"); } ); }); }); + +add_task(async function test_library() { + info("Bug 479348 - Library info pane on a root should be read-only."); + let library = await promiseLibrary("UnfiledBookmarks"); + registerCleanupFunction(async function() { + await promiseLibraryClosed(library); + }); + let PlacesOrganizer = library.PlacesOrganizer; + let tree = PlacesOrganizer._places; + tree.focus(); + Assert.ok(!tree.controller.isCommandEnabled("placesCmd_show:info"), + "'placesCmd_show:info' on current selected node is disabled"); + + // Check that the pane is read-only. + Assert.ok(library.gEditItemOverlay.readOnly, "Info pane is read-only"); + + // Check that name picker is read only + let namepicker = library.document.getElementById("editBMPanel_namePicker"); + Assert.ok(namepicker.readOnly, "Name field is read-only"); + Assert.equal(namepicker.value, + PlacesUtils.getString("OtherBookmarksFolderTitle"), "Node title is correct"); +});
--- a/browser/components/places/tests/browser/head.js +++ b/browser/components/places/tests/browser/head.js @@ -204,17 +204,17 @@ var withBookmarksDialog = async function skipOverlayWait = false) { let closed = false; let dialogPromise = new Promise(resolve => { Services.ww.registerNotification(function winObserver(subject, topic, data) { if (topic == "domwindowopened") { let win = subject.QueryInterface(Ci.nsIDOMWindow); win.addEventListener("load", function() { ok(win.location.href.startsWith(dialogUrl), - "The bookmark properties dialog is open"); + "The bookmark properties dialog is open: " + win.location.href); // This is needed for the overlay. waitForFocus(() => { resolve(win); }, win); }, {once: true}); } else if (topic == "domwindowclosed") { Services.ww.unregisterNotification(winObserver); closed = true; @@ -250,29 +250,24 @@ var withBookmarksDialog = async function let closePromise = () => Promise.resolve(); if (closeFn) { closePromise = closeFn(dialogWin); } try { await taskFn(dialogWin); } finally { - if (!closed && !autoCancel) { - // Give the dialog a little time to close itself in the manually closing - // case. - await BrowserTestUtils.waitForCondition(() => closed, - "The test should have closed the dialog!"); - } - if (!closed) { + if (!closed && autoCancel) { info("withBookmarksDialog: canceling the dialog"); - doc.documentElement.cancelDialog(); - await closePromise; } + // Give the dialog a little time to close itself. + await BrowserTestUtils.waitForCondition(() => closed, + "The dialog should be closed!"); } }; /** * Opens the contextual menu on the element pointed by the given selector. * * @param selector * Valid selector syntax
--- a/browser/components/privatebrowsing/test/browser/browser_privatebrowsing_favicon.js +++ b/browser/components/privatebrowsing/test/browser/browser_privatebrowsing_favicon.js @@ -1,16 +1,14 @@ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // This test make sure that the favicon of the private browsing is isolated. -const CC = Components.Constructor; - const TEST_SITE = "http://mochi.test:8888"; const TEST_CACHE_SITE = "http://www.example.com"; const TEST_DIRECTORY = "/browser/browser/components/privatebrowsing/test/browser/"; const TEST_PAGE = TEST_SITE + TEST_DIRECTORY + "file_favicon.html"; const TEST_CACHE_PAGE = TEST_CACHE_SITE + TEST_DIRECTORY + "file_favicon.html"; const FAVICON_URI = TEST_SITE + TEST_DIRECTORY + "file_favicon.png"; const FAVICON_CACHE_URI = TEST_CACHE_SITE + TEST_DIRECTORY + "file_favicon.png"; @@ -179,31 +177,27 @@ async function openTab(aBrowser, aURL) { aBrowser.selectedTab = tab; tab.ownerGlobal.focus(); let browser = aBrowser.getBrowserForTab(tab); await BrowserTestUtils.browserLoaded(browser); return {tab, browser}; } -// A clean up function to prevent affecting other tests. -registerCleanupFunction(() => { - // Clear all cookies. +registerCleanupFunction(async () => { Services.cookies.removeAll(); - - // Clear all image caches and network caches. clearAllImageCaches(); - Services.cache2.clear(); + await PlacesUtils.history.clear(); + await PlacesUtils.bookmarks.eraseEverything(); }); add_task(async function test_favicon_privateBrowsing() { // Clear all image caches before running the test. clearAllImageCaches(); - // Clear all favicons in Places. await clearAllPlacesFavicons(); // Create a private browsing window. let privateWindow = await BrowserTestUtils.openNewBrowserWindow({ private: true }); let pageURI = makeURI(TEST_PAGE); // Generate two random cookies for non-private window and private window @@ -216,44 +210,50 @@ add_task(async function test_favicon_pri await assignCookies(privateWindow.gBrowser, TEST_SITE, cookies[0]); // Open a tab in non-private window and add a cookie into it. await assignCookies(gBrowser, TEST_SITE, cookies[1]); // Add the observer earlier in case we don't capture events in time. let promiseObserveFavicon = observeFavicon(true, cookies[0], pageURI); + // The page must be bookmarked for favicon requests to go through in PB mode. + await PlacesUtils.bookmarks.insert({ + parentGuid: PlacesUtils.bookmarks.unfiledGuid, + url: TEST_PAGE + }); + // Open a tab for the private window. let tabInfo = await openTab(privateWindow.gBrowser, TEST_PAGE); - // Waiting until favicon requests are all made. + info("Waiting until favicon requests are all made in private window."); await promiseObserveFavicon; // Close the tab. BrowserTestUtils.removeTab(tabInfo.tab); // FIXME: We need to wait for the next event tick here to avoid observing // the previous tab info in the next step (bug 1446725). await new Promise(executeSoon); // Add the observer earlier in case we don't capture events in time. promiseObserveFavicon = observeFavicon(false, cookies[1], pageURI); // Open a tab for the non-private window. tabInfo = await openTab(gBrowser, TEST_PAGE); - // Waiting until favicon requests are all made. + info("Waiting until favicon requests are all made in non-private window."); await promiseObserveFavicon; // Close the tab. BrowserTestUtils.removeTab(tabInfo.tab); await BrowserTestUtils.closeWindow(privateWindow); }); add_task(async function test_favicon_cache_privateBrowsing() { - // Clear all image cahces and network cache before running the test. + // Clear all image caches and network cache before running the test. clearAllImageCaches(); Services.cache2.clear(); // Clear all favicons in Places. await clearAllPlacesFavicons(); // Add an observer for making sure the favicon has been loaded and cached. @@ -269,16 +269,22 @@ add_task(async function test_favicon_cac // Check that the favicon response has come from the network and it has the // correct privateBrowsingId. is(response.topic, "http-on-examine-response", "The favicon image should be loaded through network."); is(response.privateBrowsingId, 0, "We should observe the network response for the non-private tab."); // Create a private browsing window. let privateWindow = await BrowserTestUtils.openNewBrowserWindow({ private: true }); + // The page must be bookmarked for favicon requests to go through in PB mode. + await PlacesUtils.bookmarks.insert({ + parentGuid: PlacesUtils.bookmarks.unfiledGuid, + url: TEST_CACHE_PAGE + }); + // Open a tab for the private window. let tabInfoPrivate = await openTab(privateWindow.gBrowser, TEST_CACHE_PAGE); // Wait for the favicon response of the private tab. response = await waitOnFaviconResponse(FAVICON_CACHE_URI); // Make sure the favicon is loaded through the network and its privateBrowsingId is correct. is(response.topic, "http-on-examine-response", "The favicon image should be loaded through the network again.");
--- a/devtools/client/inspector/markup/test/browser_markup_accessibility_semantics.js +++ b/devtools/client/inspector/markup/test/browser_markup_accessibility_semantics.js @@ -4,33 +4,33 @@ "use strict"; // Test that inspector markup view has all expected ARIA properties set and // updated. const TOP_CONTAINER_LEVEL = 3; -add_task(function* () { - let {inspector} = yield openInspectorForURL(` +add_task(async function() { + let {inspector} = await openInspectorForURL(` data:text/html;charset=utf-8, <h1>foo</h1> <span>bar</span> <ul> <li></li> </ul>`); let markup = inspector.markup; let doc = markup.doc; let win = doc.defaultView; let rootElt = markup.getContainer(markup._rootNode).elt; - let bodyContainer = yield getContainerForSelector("body", inspector); - let spanContainer = yield getContainerForSelector("span", inspector); - let headerContainer = yield getContainerForSelector("h1", inspector); - let listContainer = yield getContainerForSelector("ul", inspector); + let bodyContainer = await getContainerForSelector("body", inspector); + let spanContainer = await getContainerForSelector("span", inspector); + let headerContainer = await getContainerForSelector("h1", inspector); + let listContainer = await getContainerForSelector("ul", inspector); // Focus on the tree element. rootElt.focus(); // Test tree related semantics is(rootElt.getAttribute("role"), "tree", "Root container should have tree semantics"); is(rootElt.getAttribute("aria-dropeffect"), "none", @@ -66,33 +66,33 @@ add_task(function* () { ok(!spanContainer.tagLine.hasAttribute("aria-expanded"), "Non expandable tree items should not have aria-expanded attribute"); ok(!headerContainer.tagLine.hasAttribute("aria-expanded"), "Non expandable tree items should not have aria-expanded attribute"); is(listContainer.tagLine.getAttribute("aria-expanded"), "false", "Closed tree item should have aria-expanded unset"); info("Selecting and expanding list container"); - yield selectNode("ul", inspector); + await selectNode("ul", inspector); EventUtils.synthesizeKey("VK_RIGHT", {}, win); - yield waitForMultipleChildrenUpdates(inspector); + await waitForMultipleChildrenUpdates(inspector); is(rootElt.getAttribute("aria-activedescendant"), listContainer.tagLine.getAttribute("id"), "Active descendant should not be set to list container tagLine"); is(listContainer.tagLine.getAttribute("aria-expanded"), "true", "Open tree item should have aria-expanded set"); - let listItemContainer = yield getContainerForSelector("li", inspector); + let listItemContainer = await getContainerForSelector("li", inspector); is(listItemContainer.tagLine.getAttribute("aria-level"), TOP_CONTAINER_LEVEL + 1, "Grand child container tagLine should have nested level up to date"); is(listItemContainer.children.getAttribute("role"), "presentation", "Container with no children should have its children element ignored by " + "accessibility"); info("Collapsing list container"); EventUtils.synthesizeKey("VK_LEFT", {}, win); - yield waitForMultipleChildrenUpdates(inspector); + await waitForMultipleChildrenUpdates(inspector); is(listContainer.tagLine.getAttribute("aria-expanded"), "false", "Closed tree item should have aria-expanded unset"); });
--- a/dom/plugins/test/crashtests/crashtests.list +++ b/dom/plugins/test/crashtests/crashtests.list @@ -4,11 +4,11 @@ HTTP load 110650-1.html skip-if(!haveTestPlugin) HTTP script 539897-1.html asserts-if(winWidget&&browserIsRemote,0-1) skip-if(!haveTestPlugin) HTTP script 540114-1.html HTTP load 570884.html # This test relies on the reading of screenX/Y forcing a round trip to # the X server, which is a bad assumption for <browser remote>. # Plugin arch is going to change anyway with OOP content so skipping # this test for now is OK. skip-if(!haveTestPlugin||http.platform!="X11") HTTP load 598862.html -skip-if(Android||(webrender&&winWidget)) HTTP load 626602-1.html # bug 908363; bug 1322815 for webrender +skip-if(Android) HTTP load 626602-1.html # bug 908363 HTTP load 752340.html HTTP load 843086.xhtml
--- a/gfx/layers/apz/public/APZSampler.h +++ b/gfx/layers/apz/public/APZSampler.h @@ -70,22 +70,22 @@ public: void MarkAsyncTransformAppliedToContent(const LayerMetricsWrapper& aLayer); bool HasUnusedAsyncTransform(const LayerMetricsWrapper& aLayer); /** * This can be used to assert that the current thread is the * sampler thread (which samples the async transform). * This does nothing if thread assertions are disabled. */ - void AssertOnSamplerThread(); + void AssertOnSamplerThread() const; /** * Returns true if currently on the APZSampler's "sampler thread". */ - bool IsSamplerThread(); + bool IsSamplerThread() const; protected: virtual ~APZSampler(); private: RefPtr<APZCTreeManager> mApz; };
--- a/gfx/layers/apz/public/APZUpdater.h +++ b/gfx/layers/apz/public/APZUpdater.h @@ -2,22 +2,34 @@ /* vim: set ts=8 sts=2 et sw=2 tw=80: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef mozilla_layers_APZUpdater_h #define mozilla_layers_APZUpdater_h +#include <deque> +#include <unordered_map> + +#include "base/platform_thread.h" // for PlatformThreadId #include "LayersTypes.h" #include "mozilla/layers/APZTestData.h" +#include "mozilla/layers/WebRenderScrollData.h" +#include "mozilla/StaticMutex.h" +#include "mozilla/webrender/WebRenderTypes.h" #include "nsThreadUtils.h" #include "Units.h" namespace mozilla { + +namespace wr { +struct WrWindowId; +} // namespace wr + namespace layers { class APZCTreeManager; class FocusTarget; class Layer; class WebRenderScrollData; /** @@ -29,77 +41,189 @@ class WebRenderScrollData; */ class APZUpdater { NS_INLINE_DECL_THREADSAFE_REFCOUNTING(APZUpdater) public: explicit APZUpdater(const RefPtr<APZCTreeManager>& aApz); bool HasTreeManager(const RefPtr<APZCTreeManager>& aApz); + void SetWebRenderWindowId(const wr::WindowId& aWindowId); - void ClearTree(); + /** + * This function is invoked from rust on the scene builder thread when it + * is created. It effectively tells the APZUpdater "the current thread is + * the updater thread for this window id" and allows APZUpdater to remember + * which thread it is. + */ + static void SetUpdaterThread(const wr::WrWindowId& aWindowId); + static void PrepareForSceneSwap(const wr::WrWindowId& aWindowId); + static void CompleteSceneSwap(const wr::WrWindowId& aWindowId, + wr::WrPipelineInfo* aInfo); + static void ProcessPendingTasks(const wr::WrWindowId& aWindowId); + + void ClearTree(LayersId aRootLayersId); void UpdateFocusState(LayersId aRootLayerTreeId, LayersId aOriginatingLayersId, const FocusTarget& aFocusTarget); void UpdateHitTestingTree(LayersId aRootLayerTreeId, Layer* aRoot, bool aIsFirstPaint, LayersId aOriginatingLayersId, uint32_t aPaintSequenceNumber); - void UpdateHitTestingTree(LayersId aRootLayerTreeId, - const WebRenderScrollData& aScrollData, - bool aIsFirstPaint, - LayersId aOriginatingLayersId, - uint32_t aPaintSequenceNumber); + /** + * This should be called (in the WR-enabled case) when the compositor receives + * a new WebRenderScrollData for a layers id. The |aScrollData| parameter is + * the scroll data for |aOriginatingLayersId| and |aEpoch| is the corresponding + * epoch for the transaction that transferred the scroll data. This function + * will store the new scroll data and update the focus state and hit-testing + * tree. + */ + void UpdateScrollDataAndTreeState(LayersId aRootLayerTreeId, + LayersId aOriginatingLayersId, + const wr::Epoch& aEpoch, + WebRenderScrollData&& aScrollData); void NotifyLayerTreeAdopted(LayersId aLayersId, const RefPtr<APZUpdater>& aOldUpdater); void NotifyLayerTreeRemoved(LayersId aLayersId); bool GetAPZTestData(LayersId aLayersId, APZTestData* aOutData); void SetTestAsyncScrollOffset(LayersId aLayersId, const FrameMetrics::ViewID& aScrollId, const CSSPoint& aOffset); void SetTestAsyncZoom(LayersId aLayersId, const FrameMetrics::ViewID& aScrollId, const LayerToParentLayerScale& aZoom); + // This can only be called on the updater thread. + const WebRenderScrollData* GetScrollData(LayersId aLayersId) const; + /** * This can be used to assert that the current thread is the * updater thread (which samples the async transform). * This does nothing if thread assertions are disabled. */ - void AssertOnUpdaterThread(); + void AssertOnUpdaterThread() const; /** * Runs the given task on the APZ "updater thread" for this APZUpdater. If * this function is called from the updater thread itself then the task is * run immediately without getting queued. + * The layers id argument should be the id of the layer tree that is + * requesting this task to be run. Conceptually each layer tree has a separate + * task queue, so that if one layer tree is blocked waiting for a scene build + * then tasks for the other layer trees can still be processed. */ - void RunOnUpdaterThread(already_AddRefed<Runnable> aTask); + void RunOnUpdaterThread(LayersId aLayersId, already_AddRefed<Runnable> aTask); /** * Returns true if currently on the APZUpdater's "updater thread". */ - bool IsUpdaterThread(); + bool IsUpdaterThread() const; /** * Dispatches the given task to the APZ "controller thread", but does it *from* * the updater thread. That is, if the thread on which this function is called * is not the updater thread, the task is first dispatched to the updater thread. * When the updater thread runs it (or if this is called directly on the updater * thread), that is when the task gets dispatched to the controller thread. * The controller thread then actually runs the task. + * The layers id argument should be the id of the layer tree that is + * requesting this task to be run; in most cases this will probably just be + * the root layers id of the compositor. */ - void RunOnControllerThread(already_AddRefed<Runnable> aTask); + void RunOnControllerThread(LayersId aLayersId, already_AddRefed<Runnable> aTask); protected: virtual ~APZUpdater(); + bool UsingWebRenderUpdaterThread() const; + static already_AddRefed<APZUpdater> GetUpdater(const wr::WrWindowId& aWindowId); + + void ProcessQueue(); + private: RefPtr<APZCTreeManager> mApz; + + // Map from layers id to WebRenderScrollData. This can only be touched on + // the updater thread. + std::unordered_map<LayersId, + WebRenderScrollData, + LayersId::HashFn> mScrollData; + + // Stores epoch state for a particular layers id. This structure is only + // accessed on the updater thread. + struct EpochState { + // The epoch for the most recent scroll data sent from the content side. + wr::Epoch mRequired; + // The epoch for the most recent scene built and swapped in on the WR side. + Maybe<wr::Epoch> mBuilt; + // True if and only if the layers id is the root layers id for the compositor + bool mIsRoot; + + EpochState(); + + // Whether or not the state for this layers id is such that it blocks + // processing of tasks for the layer tree. This happens if the root layers + // id or a "visible" layers id has scroll data for an epoch newer than what + // has been built. A "visible" layers id is one that is attached to the full + // layer tree (i.e. there is a chain of reflayer items from the root layer + // tree to the relevant layer subtree. This is not always the case; for + // instance a content process may send the compositor layers for a document + // before the chrome has attached the remote iframe to the root document. + // Since WR only builds pipelines for "visible" layers ids, |mBuilt| being + // populated means that the layers id is "visible". + bool IsBlocked() const; + }; + + // Map from layers id to epoch state. + // This data structure can only be touched on the updater thread. + std::unordered_map<LayersId, + EpochState, + LayersId::HashFn> mEpochData; + + // Used to manage the mapping from a WR window id to APZUpdater. These are only + // used if WebRender is enabled. Both sWindowIdMap and mWindowId should only + // be used while holding the sWindowIdLock. + static StaticMutex sWindowIdLock; + static std::unordered_map<uint64_t, APZUpdater*> sWindowIdMap; + Maybe<wr::WrWindowId> mWindowId; + + // If WebRender and async scene building are enabled, this holds the thread id + // of the scene builder thread (which is the updater thread) for the + // compositor associated with this APZUpdater instance. It may be populated + // even if async scene building is not enabled, but in that case we don't + // care about the contents. + // This is written to once during init and never cleared, and so reading it + // from multiple threads during normal operation (after initialization) + // without locking should be fine. + Maybe<PlatformThreadId> mUpdaterThreadId; +#ifdef DEBUG + // This flag is used to ensure that we don't ever try to do updater-thread + // stuff before the updater thread has been properly initialized. + mutable bool mUpdaterThreadQueried; +#endif + + // Helper struct that pairs each queued runnable with the layers id that it + // is associated with. This allows us to easily implement the conceptual + // separation of mUpdaterQueue into independent queues per layers id. + struct QueuedTask { + LayersId mLayersId; + RefPtr<Runnable> mRunnable; + }; + + // Lock used to protect mUpdaterQueue + Mutex mQueueLock; + // Holds a queue of tasks to be run on the updater thread, + // when the updater thread is a WebRender thread, since it won't have a + // message loop we can dispatch to. Note that although this is a single queue + // it is conceptually separated into multiple ones, one per layers id. Tasks + // for a given layers id will always run in FIFO order, but there is no + // guaranteed ordering for tasks with different layers ids. + std::deque<QueuedTask> mUpdaterQueue; }; } // namespace layers } // namespace mozilla #endif // mozilla_layers_APZUpdater_h
--- a/gfx/layers/apz/src/APZCTreeManager.cpp +++ b/gfx/layers/apz/src/APZCTreeManager.cpp @@ -514,25 +514,24 @@ APZCTreeManager::UpdateHitTestingTree(La LayerMetricsWrapper root(aRoot); UpdateHitTestingTreeImpl(aRootLayerTreeId, root, aIsFirstPaint, aOriginatingLayersId, aPaintSequenceNumber); } void APZCTreeManager::UpdateHitTestingTree(LayersId aRootLayerTreeId, - const WebRenderScrollData& aScrollData, + const WebRenderScrollDataWrapper& aScrollWrapper, bool aIsFirstPaint, LayersId aOriginatingLayersId, uint32_t aPaintSequenceNumber) { AssertOnUpdaterThread(); - WebRenderScrollDataWrapper wrapper(&aScrollData); - UpdateHitTestingTreeImpl(aRootLayerTreeId, wrapper, aIsFirstPaint, + UpdateHitTestingTreeImpl(aRootLayerTreeId, aScrollWrapper, aIsFirstPaint, aOriginatingLayersId, aPaintSequenceNumber); } bool APZCTreeManager::PushStateToWR(wr::TransactionBuilder& aTxn, const TimeStamp& aSampleTime, nsTArray<wr::WrTransformProperty>& aTransformArray) { @@ -1972,16 +1971,17 @@ APZCTreeManager::UpdateZoomConstraints(c { if (!GetUpdater()->IsUpdaterThread()) { // This can happen if we're in the UI process and got a call directly from // nsBaseWidget (as opposed to over PAPZCTreeManager). We want this function // to run on the updater thread, so bounce it over. MOZ_ASSERT(XRE_IsParentProcess()); GetUpdater()->RunOnUpdaterThread( + aGuid.mLayersId, NewRunnableMethod<ScrollableLayerGuid, Maybe<ZoomConstraints>>( "APZCTreeManager::UpdateZoomConstraints", this, &APZCTreeManager::UpdateZoomConstraints, aGuid, aConstraints)); return; } @@ -3237,16 +3237,30 @@ APZCTreeManager::GetUpdater() const void APZCTreeManager::AssertOnUpdaterThread() { GetUpdater()->AssertOnUpdaterThread(); } void +APZCTreeManager::LockTree() +{ + AssertOnUpdaterThread(); + mTreeLock.Lock(); +} + +void +APZCTreeManager::UnlockTree() +{ + AssertOnUpdaterThread(); + mTreeLock.Unlock(); +} + +void APZCTreeManager::SetDPI(float aDpiValue) { APZThreadUtils::AssertOnControllerThread(); mDPI = aDpiValue; } float APZCTreeManager::GetDPI() const
--- a/gfx/layers/apz/src/APZCTreeManager.h +++ b/gfx/layers/apz/src/APZCTreeManager.h @@ -52,17 +52,17 @@ class OverscrollHandoffChain; struct OverscrollHandoffState; class FocusTarget; struct FlingHandoffState; struct ScrollableLayerGuidHash; class LayerMetricsWrapper; class InputQueue; class GeckoContentController; class HitTestingTreeNode; -class WebRenderScrollData; +class WebRenderScrollDataWrapper; struct AncestorTransform; struct ScrollThumbData; /** * ****************** NOTE ON LOCK ORDERING IN APZ ************************** * * There are two main kinds of locks used by APZ: APZCTreeManager::mTreeLock * ("the tree lock") and AsyncPanZoomController::mRecursiveMutex ("APZC locks"). @@ -184,17 +184,17 @@ public: /** * Same as the above UpdateHitTestingTree, except slightly modified to take * the scrolling data passed over PWebRenderBridge instead of the raw layer * tree. This version is used when WebRender is enabled because we don't have * shadow layers in that scenario. */ void UpdateHitTestingTree(LayersId aRootLayerTreeId, - const WebRenderScrollData& aScrollData, + const WebRenderScrollDataWrapper& aScrollWrapper, bool aIsFirstPaint, LayersId aOriginatingLayersId, uint32_t aPaintSequenceNumber); /** * Called when webrender is enabled, from the sampler thread. This function * walks through the tree of APZC instances and tells webrender about the * async scroll position. It also advances APZ animations to the specified @@ -528,23 +528,35 @@ public: bool aScrollbarIsDescendant, AsyncTransformComponentMatrix* aOutClipTransform); // Assert that the current thread is the sampler thread for this APZCTM. void AssertOnSamplerThread(); // Assert that the current thread is the updater thread for this APZCTM. void AssertOnUpdaterThread(); + // Returns a pointer to the WebRenderAPI for the root layers id this APZCTreeManager + // is for. This might be null (for example, if WebRender is not enabled). + already_AddRefed<wr::WebRenderAPI> GetWebRenderAPI() const; + protected: // Protected destructor, to discourage deletion outside of Release(): virtual ~APZCTreeManager(); APZSampler* GetSampler() const; APZUpdater* GetUpdater() const; + // We need to allow APZUpdater to lock and unlock this tree during a WR + // scene swap. We do this using private helpers to avoid exposing these + // functions to the world. +private: + friend class APZUpdater; + void LockTree(); + void UnlockTree(); + // Protected hooks for gtests subclass virtual AsyncPanZoomController* NewAPZCInstance(LayersId aLayersId, GeckoContentController* aController); public: // Public hooks for gtests subclass virtual TimeStamp GetFrameTime(); public: @@ -682,20 +694,16 @@ private: const AsyncPanZoomController* apzc); void NotifyScrollbarDragRejected(const ScrollableLayerGuid& aGuid) const; void NotifyAutoscrollRejected(const ScrollableLayerGuid& aGuid) const; // Requires the caller to hold mTreeLock. LayerToParentLayerMatrix4x4 ComputeTransformForNode(const HitTestingTreeNode* aNode) const; - // Returns a pointer to the WebRenderAPI for the root layers id this APZCTreeManager - // is for. This might be null (for example, if WebRender is not enabled). - already_AddRefed<wr::WebRenderAPI> GetWebRenderAPI() const; - // Returns a pointer to the GeckoContentController for the given layers id. already_AddRefed<GeckoContentController> GetContentController(LayersId aLayersId) const; protected: /* The input queue where input events are held until we know enough to * figure out where they're going. Protected so gtests can access it. */ RefPtr<InputQueue> mInputQueue;
--- a/gfx/layers/apz/src/APZSampler.cpp +++ b/gfx/layers/apz/src/APZSampler.cpp @@ -141,23 +141,23 @@ APZSampler::HasUnusedAsyncTransform(cons AsyncPanZoomController* apzc = aLayer.GetApzc(); return apzc && !apzc->GetAsyncTransformAppliedToContent() && !AsyncTransformComponentMatrix(apzc->GetCurrentAsyncTransform(AsyncPanZoomController::eForCompositing)).IsIdentity(); } void -APZSampler::AssertOnSamplerThread() +APZSampler::AssertOnSamplerThread() const { if (APZThreadUtils::GetThreadAssertionsEnabled()) { MOZ_ASSERT(IsSamplerThread()); } } bool -APZSampler::IsSamplerThread() +APZSampler::IsSamplerThread() const { return CompositorThreadHolder::IsInCompositorThread(); } } // namespace layers } // namespace mozilla
--- a/gfx/layers/apz/src/APZUpdater.cpp +++ b/gfx/layers/apz/src/APZUpdater.cpp @@ -7,56 +7,167 @@ #include "mozilla/layers/APZUpdater.h" #include "APZCTreeManager.h" #include "AsyncPanZoomController.h" #include "base/task.h" #include "mozilla/layers/APZThreadUtils.h" #include "mozilla/layers/CompositorThread.h" #include "mozilla/layers/SynchronousTask.h" -#include "mozilla/layers/WebRenderScrollData.h" +#include "mozilla/layers/WebRenderScrollDataWrapper.h" +#include "mozilla/webrender/WebRenderAPI.h" namespace mozilla { namespace layers { +StaticMutex APZUpdater::sWindowIdLock; +std::unordered_map<uint64_t, APZUpdater*> APZUpdater::sWindowIdMap; + + APZUpdater::APZUpdater(const RefPtr<APZCTreeManager>& aApz) : mApz(aApz) +#ifdef DEBUG + , mUpdaterThreadQueried(false) +#endif + , mQueueLock("APZUpdater::QueueLock") { MOZ_ASSERT(aApz); mApz->SetUpdater(this); } APZUpdater::~APZUpdater() { mApz->SetUpdater(nullptr); + + StaticMutexAutoLock lock(sWindowIdLock); + if (mWindowId) { + // Ensure that ClearTree was called and the task got run + MOZ_ASSERT(sWindowIdMap.find(wr::AsUint64(*mWindowId)) == sWindowIdMap.end()); + } } bool APZUpdater::HasTreeManager(const RefPtr<APZCTreeManager>& aApz) { return aApz.get() == mApz.get(); } void -APZUpdater::ClearTree() +APZUpdater::SetWebRenderWindowId(const wr::WindowId& aWindowId) +{ + StaticMutexAutoLock lock(sWindowIdLock); + MOZ_ASSERT(!mWindowId); + mWindowId = Some(aWindowId); + sWindowIdMap[wr::AsUint64(aWindowId)] = this; +} + +/*static*/ void +APZUpdater::SetUpdaterThread(const wr::WrWindowId& aWindowId) +{ + if (RefPtr<APZUpdater> updater = GetUpdater(aWindowId)) { + // Ensure nobody tried to use the updater thread before this point. + MOZ_ASSERT(!updater->mUpdaterThreadQueried); + updater->mUpdaterThreadId = Some(PlatformThread::CurrentId()); + } +} + +/*static*/ void +APZUpdater::PrepareForSceneSwap(const wr::WrWindowId& aWindowId) +{ + if (RefPtr<APZUpdater> updater = GetUpdater(aWindowId)) { + updater->mApz->LockTree(); + } +} + +/*static*/ void +APZUpdater::CompleteSceneSwap(const wr::WrWindowId& aWindowId, + wr::WrPipelineInfo* aInfo) +{ + RefPtr<APZUpdater> updater = GetUpdater(aWindowId); + if (!updater) { + // This should only happen in cases where PrepareForSceneSwap also got a + // null updater. No updater-thread tasks get run between PrepareForSceneSwap + // and this function, so there is no opportunity for the updater mapping + // to have gotten removed from sWindowIdMap in between the two calls. + return; + } + + wr::WrPipelineId pipeline; + wr::WrEpoch epoch; + while (wr_pipeline_info_next_removed_pipeline(aInfo, &pipeline)) { + LayersId layersId = wr::AsLayersId(pipeline); + updater->mEpochData.erase(layersId); + } + // Reset the built info for all pipelines, then put it back for the ones + // that got built in this scene swap. + for (auto& i : updater->mEpochData) { + i.second.mBuilt = Nothing(); + } + while (wr_pipeline_info_next_epoch(aInfo, &pipeline, &epoch)) { + LayersId layersId = wr::AsLayersId(pipeline); + updater->mEpochData[layersId].mBuilt = Some(epoch); + } + wr_pipeline_info_delete(aInfo); + + // Run any tasks that got unblocked, then unlock the tree. The order is + // important because we want to run all the tasks up to and including the + // UpdateHitTestingTree calls corresponding to the built epochs, and we + // want to run those before we release the lock (i.e. atomically with the + // scene swap). This ensures that any hit-tests always encounter a consistent + // state between the APZ tree and the built scene in WR. + // + // While we could add additional information to the queued tasks to figure + // out the minimal set of tasks we want to run here, it's easier and harmless + // to just run all the queued and now-unblocked tasks inside the lock. + // + // Note that the ProcessQueue here might remove the window id -> APZUpdater + // mapping from sWindowIdMap, but we still unlock the tree successfully to + // leave things in a good state. + updater->ProcessQueue(); + + updater->mApz->UnlockTree(); +} + +/*static*/ void +APZUpdater::ProcessPendingTasks(const wr::WrWindowId& aWindowId) +{ + if (RefPtr<APZUpdater> updater = GetUpdater(aWindowId)) { + updater->ProcessQueue(); + } +} + +void +APZUpdater::ClearTree(LayersId aRootLayersId) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); - RunOnUpdaterThread(NewRunnableMethod( - "APZUpdater::ClearTree", - mApz, - &APZCTreeManager::ClearTree)); + RefPtr<APZUpdater> self = this; + RunOnUpdaterThread(aRootLayersId, NS_NewRunnableFunction( + "APZUpdater::ClearTree", + [=]() { + self->mApz->ClearTree(); + + // Once ClearTree is called on the APZCTreeManager, we are in a shutdown + // phase. After this point it's ok if WebRender cannot get a hold of the + // updater via the window id, and it's a good point to remove the mapping + // and avoid leaving a dangling pointer to this object. + StaticMutexAutoLock lock(sWindowIdLock); + if (self->mWindowId) { + sWindowIdMap.erase(wr::AsUint64(*(self->mWindowId))); + } + } + )); } void APZUpdater::UpdateFocusState(LayersId aRootLayerTreeId, LayersId aOriginatingLayersId, const FocusTarget& aFocusTarget) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); - RunOnUpdaterThread(NewRunnableMethod<LayersId, LayersId, FocusTarget>( + RunOnUpdaterThread(aOriginatingLayersId, NewRunnableMethod<LayersId, LayersId, FocusTarget>( "APZUpdater::UpdateFocusState", mApz, &APZCTreeManager::UpdateFocusState, aRootLayerTreeId, aOriginatingLayersId, aFocusTarget)); } @@ -69,79 +180,94 @@ APZUpdater::UpdateHitTestingTree(LayersI { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); AssertOnUpdaterThread(); mApz->UpdateHitTestingTree(aRootLayerTreeId, aRoot, aIsFirstPaint, aOriginatingLayersId, aPaintSequenceNumber); } void -APZUpdater::UpdateHitTestingTree(LayersId aRootLayerTreeId, - const WebRenderScrollData& aScrollData, - bool aIsFirstPaint, - LayersId aOriginatingLayersId, - uint32_t aPaintSequenceNumber) +APZUpdater::UpdateScrollDataAndTreeState(LayersId aRootLayerTreeId, + LayersId aOriginatingLayersId, + const wr::Epoch& aEpoch, + WebRenderScrollData&& aScrollData) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); - // use the local variable to resolve the function overload. - auto func = static_cast<void (APZCTreeManager::*)(LayersId, - const WebRenderScrollData&, - bool, - LayersId, - uint32_t)> - (&APZCTreeManager::UpdateHitTestingTree); - RunOnUpdaterThread(NewRunnableMethod<LayersId, - WebRenderScrollData, - bool, - LayersId, - uint32_t>( - "APZUpdater::UpdateHitTestingTree", - mApz, - func, - aRootLayerTreeId, - aScrollData, - aIsFirstPaint, - aOriginatingLayersId, - aPaintSequenceNumber)); + RefPtr<APZUpdater> self = this; + // Insert an epoch requirement update into the queue, so that + // tasks inserted into the queue after this point only get executed + // once the epoch requirement is satisfied. In particular, the + // UpdateHitTestingTree call below needs to wait until the epoch requirement + // is satisfied, which is why it is a separate task in the queue. + RunOnUpdaterThread(aOriginatingLayersId, NS_NewRunnableFunction( + "APZUpdater::UpdateEpochRequirement", + [=]() { + if (aRootLayerTreeId == aOriginatingLayersId) { + self->mEpochData[aOriginatingLayersId].mIsRoot = true; + } + self->mEpochData[aOriginatingLayersId].mRequired = aEpoch; + } + )); + RunOnUpdaterThread(aOriginatingLayersId, NS_NewRunnableFunction( + "APZUpdater::UpdateHitTestingTree", + [=,aScrollData=Move(aScrollData)]() { + self->mApz->UpdateFocusState(aRootLayerTreeId, + aOriginatingLayersId, aScrollData.GetFocusTarget()); + + self->mScrollData[aOriginatingLayersId] = aScrollData; + auto root = self->mScrollData.find(aRootLayerTreeId); + if (root == self->mScrollData.end()) { + return; + } + self->mApz->UpdateHitTestingTree(aRootLayerTreeId, + WebRenderScrollDataWrapper(*self, &(root->second)), + aScrollData.IsFirstPaint(), aOriginatingLayersId, + aScrollData.GetPaintSequenceNumber()); + } + )); } void APZUpdater::NotifyLayerTreeAdopted(LayersId aLayersId, const RefPtr<APZUpdater>& aOldUpdater) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); - RunOnUpdaterThread(NewRunnableMethod<LayersId, RefPtr<APZCTreeManager>>( + RunOnUpdaterThread(aLayersId, NewRunnableMethod<LayersId, RefPtr<APZCTreeManager>>( "APZUpdater::NotifyLayerTreeAdopted", mApz, &APZCTreeManager::NotifyLayerTreeAdopted, aLayersId, aOldUpdater ? aOldUpdater->mApz : nullptr)); } void APZUpdater::NotifyLayerTreeRemoved(LayersId aLayersId) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); - RunOnUpdaterThread(NewRunnableMethod<LayersId>( - "APZUpdater::NotifyLayerTreeRemoved", - mApz, - &APZCTreeManager::NotifyLayerTreeRemoved, - aLayersId)); + RefPtr<APZUpdater> self = this; + RunOnUpdaterThread(aLayersId, NS_NewRunnableFunction( + "APZUpdater::NotifyLayerTreeRemoved", + [=]() { + self->mEpochData.erase(aLayersId); + self->mScrollData.erase(aLayersId); + self->mApz->NotifyLayerTreeRemoved(aLayersId); + } + )); } bool APZUpdater::GetAPZTestData(LayersId aLayersId, APZTestData* aOutData) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); RefPtr<APZCTreeManager> apz = mApz; bool ret = false; SynchronousTask waiter("APZUpdater::GetAPZTestData"); - RunOnUpdaterThread(NS_NewRunnableFunction( + RunOnUpdaterThread(aLayersId, NS_NewRunnableFunction( "APZUpdater::GetAPZTestData", [&]() { AutoCompleteTask notifier(&waiter); ret = apz->GetAPZTestData(aLayersId, aOutData); } )); // Wait until the task posted above has run and populated aOutData and ret @@ -152,17 +278,17 @@ APZUpdater::GetAPZTestData(LayersId aLay void APZUpdater::SetTestAsyncScrollOffset(LayersId aLayersId, const FrameMetrics::ViewID& aScrollId, const CSSPoint& aOffset) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); RefPtr<APZCTreeManager> apz = mApz; - RunOnUpdaterThread(NS_NewRunnableFunction( + RunOnUpdaterThread(aLayersId, NS_NewRunnableFunction( "APZUpdater::SetTestAsyncScrollOffset", [=]() { RefPtr<AsyncPanZoomController> apzc = apz->GetTargetAPZC(aLayersId, aScrollId); if (apzc) { apzc->SetTestAsyncScrollOffset(aOffset); } else { NS_WARNING("Unable to find APZC in SetTestAsyncScrollOffset"); } @@ -172,67 +298,256 @@ APZUpdater::SetTestAsyncScrollOffset(Lay void APZUpdater::SetTestAsyncZoom(LayersId aLayersId, const FrameMetrics::ViewID& aScrollId, const LayerToParentLayerScale& aZoom) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); RefPtr<APZCTreeManager> apz = mApz; - RunOnUpdaterThread(NS_NewRunnableFunction( + RunOnUpdaterThread(aLayersId, NS_NewRunnableFunction( "APZUpdater::SetTestAsyncZoom", [=]() { RefPtr<AsyncPanZoomController> apzc = apz->GetTargetAPZC(aLayersId, aScrollId); if (apzc) { apzc->SetTestAsyncZoom(aZoom); } else { NS_WARNING("Unable to find APZC in SetTestAsyncZoom"); } } )); } +const WebRenderScrollData* +APZUpdater::GetScrollData(LayersId aLayersId) const +{ + AssertOnUpdaterThread(); + auto it = mScrollData.find(aLayersId); + return (it == mScrollData.end() ? nullptr : &(it->second)); +} + void -APZUpdater::AssertOnUpdaterThread() +APZUpdater::AssertOnUpdaterThread() const { if (APZThreadUtils::GetThreadAssertionsEnabled()) { MOZ_ASSERT(IsUpdaterThread()); } } void -APZUpdater::RunOnUpdaterThread(already_AddRefed<Runnable> aTask) +APZUpdater::RunOnUpdaterThread(LayersId aLayersId, already_AddRefed<Runnable> aTask) { RefPtr<Runnable> task = aTask; - MessageLoop* loop = CompositorThreadHolder::Loop(); - if (!loop) { - // Could happen during startup - NS_WARNING("Dropping task posted to updater thread"); + if (IsUpdaterThread()) { + task->Run(); return; } - if (IsUpdaterThread()) { - task->Run(); + if (UsingWebRenderUpdaterThread()) { + // If the updater thread is a WebRender thread, and we're not on it + // right now, save the task in the queue. We will run tasks from the queue + // during the callback from the updater thread, which we trigger by the + // call to WakeSceneBuilder. + + { // scope lock + MutexAutoLock lock(mQueueLock); + mUpdaterQueue.push_back(QueuedTask { aLayersId, task }); + } + RefPtr<wr::WebRenderAPI> api = mApz->GetWebRenderAPI(); + if (api) { + api->WakeSceneBuilder(); + } else { + // Not sure if this can happen, but it might be possible. If it does, + // the task is in the queue, but if we didn't get a WebRenderAPI it + // might never run, or it might run later if we manage to get a + // WebRenderAPI later. For now let's just emit a warning, this can + // probably be upgraded to an assert later. + NS_WARNING("Possibly dropping task posted to updater thread"); + } + return; + } + + if (MessageLoop* loop = CompositorThreadHolder::Loop()) { + loop->PostTask(task.forget()); } else { - loop->PostTask(task.forget()); + // Could happen during startup + NS_WARNING("Dropping task posted to updater thread"); } } bool -APZUpdater::IsUpdaterThread() +APZUpdater::IsUpdaterThread() const { + if (UsingWebRenderUpdaterThread()) { + return PlatformThread::CurrentId() == *mUpdaterThreadId; + } return CompositorThreadHolder::IsInCompositorThread(); } void -APZUpdater::RunOnControllerThread(already_AddRefed<Runnable> aTask) +APZUpdater::RunOnControllerThread(LayersId aLayersId, already_AddRefed<Runnable> aTask) { MOZ_ASSERT(CompositorThreadHolder::IsInCompositorThread()); - RunOnUpdaterThread(NewRunnableFunction( + RunOnUpdaterThread(aLayersId, NewRunnableFunction( "APZUpdater::RunOnControllerThread", &APZThreadUtils::RunOnControllerThread, Move(aTask))); } +bool +APZUpdater::UsingWebRenderUpdaterThread() const +{ + if (!gfxPrefs::WebRenderAsyncSceneBuild()) { + return false; + } + // If mUpdaterThreadId is not set at the point that this is called, then + // that means that either (a) WebRender is not enabled for the compositor + // to which this APZUpdater is attached or (b) we are attempting to do + // something updater-related before WebRender is up and running. In case + // (a) falling back to the compositor thread is correct, and in case (b) + // we should stop doing the updater-related thing so early. We catch this + // case by setting the mUpdaterThreadQueried flag and asserting on WR + // initialization. +#ifdef DEBUG + mUpdaterThreadQueried = true; +#endif + return mUpdaterThreadId.isSome(); +} + +/*static*/ already_AddRefed<APZUpdater> +APZUpdater::GetUpdater(const wr::WrWindowId& aWindowId) +{ + RefPtr<APZUpdater> updater; + StaticMutexAutoLock lock(sWindowIdLock); + auto it = sWindowIdMap.find(wr::AsUint64(aWindowId)); + if (it != sWindowIdMap.end()) { + updater = it->second; + } + return updater.forget(); +} + +void +APZUpdater::ProcessQueue() +{ + { // scope lock to check for emptiness + MutexAutoLock lock(mQueueLock); + if (mUpdaterQueue.empty()) { + return; + } + } + + std::deque<QueuedTask> blockedTasks; + while (true) { + QueuedTask task; + + { // scope lock to extract a task + MutexAutoLock lock(mQueueLock); + if (mUpdaterQueue.empty()) { + // If we're done processing mUpdaterQueue, swap the tasks that are + // still blocked back in and finish + std::swap(mUpdaterQueue, blockedTasks); + break; + } + task = mUpdaterQueue.front(); + mUpdaterQueue.pop_front(); + } + + // We check the task to see if it is blocked. Note that while this + // ProcessQueue function is executing, a particular layers is cannot go + // from blocked to unblocked, because only CompleteSceneSwap can unblock + // a layers id, and that also runs on the updater thread. If somehow + // a layers id gets unblocked while we're processing the queue, then it + // might result in tasks getting executed out of order. + + auto it = mEpochData.find(task.mLayersId); + if (it != mEpochData.end() && it->second.IsBlocked()) { + // If this task is blocked, put it into the blockedTasks queue that + // we will replace mUpdaterQueue with + blockedTasks.push_back(task); + } else { + // Run and discard the task + task.mRunnable->Run(); + } + } +} + +APZUpdater::EpochState::EpochState() + : mRequired{0} + , mIsRoot(false) +{ +} + +bool +APZUpdater::EpochState::IsBlocked() const +{ + // The root is a special case because we basically assume it is "visible" + // even before it is built for the first time. This is because building the + // scene automatically makes it visible, and we need to make sure the APZ + // scroll data gets applied atomically with that happening. + // + // Layer subtrees on the other hand do not automatically become visible upon + // being built, because there must be a another layer tree update to change + // the visibility (i.e. an ancestor layer tree update that adds the necessary + // reflayer to complete the chain of reflayers). + // + // So in the case of non-visible subtrees, we know that no hit-test will + // actually end up hitting that subtree either before or after the scene swap, + // because the subtree will remain non-visible. That in turns means that we + // can apply the APZ scroll data for that subtree epoch before the scene is + // built, because it's not going to get used anyway. And that means we don't + // need to block the queue for non-visible subtrees. Which is a good thing, + // because in practice it seems like we often have non-visible subtrees sent + // to the compositor from content. + if (mIsRoot && !mBuilt) { + return true; + } + return mBuilt && (*mBuilt < mRequired); +} + } // namespace layers } // namespace mozilla + +// Rust callback implementations + +void +apz_register_updater(mozilla::wr::WrWindowId aWindowId) +{ + mozilla::layers::APZUpdater::SetUpdaterThread(aWindowId); +} + +void +apz_pre_scene_swap(mozilla::wr::WrWindowId aWindowId) +{ + // This should never get called unless async scene building is enabled. + MOZ_ASSERT(gfxPrefs::WebRenderAsyncSceneBuild()); + mozilla::layers::APZUpdater::PrepareForSceneSwap(aWindowId); +} + +void +apz_post_scene_swap(mozilla::wr::WrWindowId aWindowId, + mozilla::wr::WrPipelineInfo* aInfo) +{ + // This should never get called unless async scene building is enabled. + MOZ_ASSERT(gfxPrefs::WebRenderAsyncSceneBuild()); + mozilla::layers::APZUpdater::CompleteSceneSwap(aWindowId, aInfo); +} + +void +apz_run_updater(mozilla::wr::WrWindowId aWindowId) +{ + // This should never get called unless async scene building is enabled. + MOZ_ASSERT(gfxPrefs::WebRenderAsyncSceneBuild()); + mozilla::layers::APZUpdater::ProcessPendingTasks(aWindowId); +} + +void +apz_deregister_updater(mozilla::wr::WrWindowId aWindowId) +{ + // Run anything that's still left. Note that this function gets called even + // if async scene building is off, but in that case we don't want to do + // anything (because the updater thread will be the compositor thread, and + // this will be called on the scene builder thread). + if (gfxPrefs::WebRenderAsyncSceneBuild()) { + mozilla::layers::APZUpdater::ProcessPendingTasks(aWindowId); + } +}
--- a/gfx/layers/ipc/APZCTreeManagerParent.cpp +++ b/gfx/layers/ipc/APZCTreeManagerParent.cpp @@ -38,17 +38,17 @@ APZCTreeManagerParent::ChildAdopted(RefP MOZ_ASSERT(aAPZUpdater->HasTreeManager(aAPZCTreeManager)); mTreeManager = Move(aAPZCTreeManager); mUpdater = Move(aAPZUpdater); } mozilla::ipc::IPCResult APZCTreeManagerParent::RecvSetKeyboardMap(const KeyboardMap& aKeyboardMap) { - mUpdater->RunOnControllerThread(NewRunnableMethod<KeyboardMap>( + mUpdater->RunOnControllerThread(mLayersId, NewRunnableMethod<KeyboardMap>( "layers::IAPZCTreeManager::SetKeyboardMap", mTreeManager, &IAPZCTreeManager::SetKeyboardMap, aKeyboardMap)); return IPC_OK(); } @@ -60,30 +60,31 @@ APZCTreeManagerParent::RecvZoomToRect( { if (aGuid.mLayersId != mLayersId) { // Guard against bad data from hijacked child processes NS_ERROR("Unexpected layers id in RecvZoomToRect; dropping message..."); return IPC_FAIL_NO_REASON(this); } mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<ScrollableLayerGuid, CSSRect, uint32_t>( "layers::IAPZCTreeManager::ZoomToRect", mTreeManager, &IAPZCTreeManager::ZoomToRect, aGuid, aRect, aFlags)); return IPC_OK(); } mozilla::ipc::IPCResult APZCTreeManagerParent::RecvContentReceivedInputBlock( const uint64_t& aInputBlockId, const bool& aPreventDefault) { - mUpdater->RunOnControllerThread(NewRunnableMethod<uint64_t, bool>( + mUpdater->RunOnControllerThread(mLayersId, NewRunnableMethod<uint64_t, bool>( "layers::IAPZCTreeManager::ContentReceivedInputBlock", mTreeManager, &IAPZCTreeManager::ContentReceivedInputBlock, aInputBlockId, aPreventDefault)); return IPC_OK(); } @@ -96,16 +97,17 @@ APZCTreeManagerParent::RecvSetTargetAPZC for (size_t i = 0; i < aTargets.Length(); i++) { if (aTargets[i].mLayersId != mLayersId) { // Guard against bad data from hijacked child processes NS_ERROR("Unexpected layers id in RecvSetTargetAPZC; dropping message..."); return IPC_FAIL_NO_REASON(this); } } mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<uint64_t, StoreCopyPassByRRef<nsTArray<ScrollableLayerGuid>>>( "layers::IAPZCTreeManager::SetTargetAPZC", mTreeManager, &IAPZCTreeManager::SetTargetAPZC, aInputBlockId, aTargets)); @@ -125,30 +127,31 @@ APZCTreeManagerParent::RecvUpdateZoomCon mTreeManager->UpdateZoomConstraints(aGuid, aConstraints); return IPC_OK(); } mozilla::ipc::IPCResult APZCTreeManagerParent::RecvSetDPI(const float& aDpiValue) { - mUpdater->RunOnControllerThread(NewRunnableMethod<float>( + mUpdater->RunOnControllerThread(mLayersId, NewRunnableMethod<float>( "layers::IAPZCTreeManager::SetDPI", mTreeManager, &IAPZCTreeManager::SetDPI, aDpiValue)); return IPC_OK(); } mozilla::ipc::IPCResult APZCTreeManagerParent::RecvSetAllowedTouchBehavior( const uint64_t& aInputBlockId, nsTArray<TouchBehaviorFlags>&& aValues) { mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<uint64_t, StoreCopyPassByRRef<nsTArray<TouchBehaviorFlags>>>( "layers::IAPZCTreeManager::SetAllowedTouchBehavior", mTreeManager, &IAPZCTreeManager::SetAllowedTouchBehavior, aInputBlockId, Move(aValues))); @@ -162,16 +165,17 @@ APZCTreeManagerParent::RecvStartScrollba { if (aGuid.mLayersId != mLayersId) { // Guard against bad data from hijacked child processes NS_ERROR("Unexpected layers id in RecvStartScrollbarDrag; dropping message..."); return IPC_FAIL_NO_REASON(this); } mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<ScrollableLayerGuid, AsyncDragMetrics>( "layers::IAPZCTreeManager::StartScrollbarDrag", mTreeManager, &IAPZCTreeManager::StartScrollbarDrag, aGuid, aDragMetrics)); return IPC_OK(); @@ -185,44 +189,47 @@ APZCTreeManagerParent::RecvStartAutoscro // Unlike RecvStartScrollbarDrag(), this message comes from the parent // process (via nsBaseWidget::mAPZC) rather than from the child process // (via TabChild::mApzcTreeManager), so there is no need to check the // layers id against mLayersId (and in any case, it wouldn't match, because // mLayersId stores the parent process's layers id, while nsBaseWidget is // sending the child process's layers id). mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<ScrollableLayerGuid, ScreenPoint>( "layers::IAPZCTreeManager::StartAutoscroll", mTreeManager, &IAPZCTreeManager::StartAutoscroll, aGuid, aAnchorLocation)); return IPC_OK(); } mozilla::ipc::IPCResult APZCTreeManagerParent::RecvStopAutoscroll(const ScrollableLayerGuid& aGuid) { // See RecvStartAutoscroll() for why we don't check the layers id. mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<ScrollableLayerGuid>( "layers::IAPZCTreeManager::StopAutoscroll", mTreeManager, &IAPZCTreeManager::StopAutoscroll, aGuid)); return IPC_OK(); } mozilla::ipc::IPCResult APZCTreeManagerParent::RecvSetLongTapEnabled(const bool& aLongTapEnabled) { mUpdater->RunOnControllerThread( + mLayersId, NewRunnableMethod<bool>( "layers::IAPZCTreeManager::SetLongTapEnabled", mTreeManager, &IAPZCTreeManager::SetLongTapEnabled, aLongTapEnabled)); return IPC_OK(); }
--- a/gfx/layers/ipc/CompositorBridgeParent.cpp +++ b/gfx/layers/ipc/CompositorBridgeParent.cpp @@ -635,17 +635,17 @@ CompositorBridgeParent::ActorDestroy(Act RemoveCompositor(mCompositorBridgeID); mCompositionManager = nullptr; MOZ_ASSERT((mApzSampler != nullptr) == (mApzcTreeManager != nullptr)); MOZ_ASSERT((mApzUpdater != nullptr) == (mApzcTreeManager != nullptr)); if (mApzUpdater) { mApzSampler = nullptr; - mApzUpdater->ClearTree(); + mApzUpdater->ClearTree(mRootLayerTreeID); mApzUpdater = nullptr; mApzcTreeManager = nullptr; } { // scope lock MonitorAutoLock lock(*sIndirectLayerTreesLock); sIndirectLayerTrees.erase(mRootLayerTreeID); } @@ -1761,17 +1761,24 @@ CompositorBridgeParent::AllocPWebRenderB #endif MOZ_ASSERT(wr::AsLayersId(aPipelineId) == mRootLayerTreeID); MOZ_ASSERT(!mWrBridge); MOZ_ASSERT(!mCompositor); MOZ_ASSERT(!mCompositorScheduler); MOZ_ASSERT(mWidget); RefPtr<widget::CompositorWidget> widget = mWidget; - RefPtr<wr::WebRenderAPI> api = wr::WebRenderAPI::Create(this, Move(widget), aSize); + wr::WrWindowId windowId = wr::NewWindowId(); + if (mApzUpdater) { + // If APZ is enabled, we need to register the APZ updater with the window id + // before the updater thread is created in WebRenderAPI::Create, so + // that the callback from the updater thread can find the right APZUpdater. + mApzUpdater->SetWebRenderWindowId(windowId); + } + RefPtr<wr::WebRenderAPI> api = wr::WebRenderAPI::Create(this, Move(widget), windowId, aSize); if (!api) { mWrBridge = WebRenderBridgeParent::CreateDestroyed(aPipelineId); mWrBridge.get()->AddRef(); // IPDL reference *aIdNamespace = mWrBridge->GetIdNamespace(); *aTextureFactoryIdentifier = TextureFactoryIdentifier(LayersBackend::LAYERS_NONE); return mWrBridge; } mAsyncImageManager = new AsyncImagePipelineManager(api->Clone()); @@ -1823,28 +1830,32 @@ Maybe<TimeStamp> CompositorBridgeParent::GetTestingTimeStamp() const { return mTestTime; } void EraseLayerState(LayersId aId) { - MonitorAutoLock lock(*sIndirectLayerTreesLock); + RefPtr<APZUpdater> apz; - auto iter = sIndirectLayerTrees.find(aId); - if (iter != sIndirectLayerTrees.end()) { - CompositorBridgeParent* parent = iter->second.mParent; - if (parent) { - if (RefPtr<APZUpdater> apz = parent->GetAPZUpdater()) { - apz->NotifyLayerTreeRemoved(aId); + { // scope lock + MonitorAutoLock lock(*sIndirectLayerTreesLock); + auto iter = sIndirectLayerTrees.find(aId); + if (iter != sIndirectLayerTrees.end()) { + CompositorBridgeParent* parent = iter->second.mParent; + if (parent) { + apz = parent->GetAPZUpdater(); } + sIndirectLayerTrees.erase(iter); } + } - sIndirectLayerTrees.erase(iter); + if (apz) { + apz->NotifyLayerTreeRemoved(aId); } } /*static*/ void CompositorBridgeParent::DeallocateLayerTreeId(LayersId aId) { MOZ_ASSERT(NS_IsMainThread()); // Here main thread notifies compositor to remove an element from
--- a/gfx/layers/ipc/CrossProcessCompositorBridgeParent.cpp +++ b/gfx/layers/ipc/CrossProcessCompositorBridgeParent.cpp @@ -128,19 +128,20 @@ CrossProcessCompositorBridgeParent::Allo CompositorBridgeParent::LayerTreeState& state = sIndirectLayerTrees[aLayersId]; // If the widget has shutdown its compositor, we may not have had a chance yet // to unmap our layers id, and we could get here without a parent compositor. // In this case return an empty APZCTM. if (!state.mParent) { // Note: we immediately call ClearTree since otherwise the APZCTM will // retain a reference to itself, through the checkerboard observer. - RefPtr<APZCTreeManager> temp = new APZCTreeManager(LayersId{0}); + LayersId dummyId{0}; + RefPtr<APZCTreeManager> temp = new APZCTreeManager(dummyId); RefPtr<APZUpdater> tempUpdater = new APZUpdater(temp); - tempUpdater->ClearTree(); + tempUpdater->ClearTree(dummyId); return new APZCTreeManagerParent(aLayersId, temp, tempUpdater); } state.mParent->AllocateAPZCTreeManagerParent(lock, aLayersId, state); return state.mApzcTreeManagerParent; } bool CrossProcessCompositorBridgeParent::DeallocPAPZCTreeManagerParent(PAPZCTreeManagerParent* aActor)
--- a/gfx/layers/wr/AsyncImagePipelineManager.cpp +++ b/gfx/layers/wr/AsyncImagePipelineManager.cpp @@ -25,17 +25,17 @@ AsyncImagePipelineManager::AsyncImagePip , mFilter(wr::ImageRendering::Auto) , mMixBlendMode(wr::MixBlendMode::Normal) {} AsyncImagePipelineManager::AsyncImagePipelineManager(already_AddRefed<wr::WebRenderAPI>&& aApi) : mApi(aApi) , mIdNamespace(mApi->GetNamespace()) , mResourceId(0) - , mAsyncImageEpoch(0) + , mAsyncImageEpoch{0} , mWillGenerateFrame(false) , mDestroyed(false) { MOZ_COUNT_CTOR(AsyncImagePipelineManager); } AsyncImagePipelineManager::~AsyncImagePipelineManager() { @@ -124,23 +124,23 @@ AsyncImagePipelineManager::RemoveAsyncIm { if (mDestroyed) { return; } uint64_t id = wr::AsUint64(aPipelineId); if (auto entry = mAsyncImagePipelines.Lookup(id)) { AsyncImagePipeline* holder = entry.Data(); - ++mAsyncImageEpoch; // Update webrender epoch - aTxn.ClearDisplayList(wr::NewEpoch(mAsyncImageEpoch), aPipelineId); + wr::Epoch epoch = GetNextImageEpoch(); + aTxn.ClearDisplayList(epoch, aPipelineId); for (wr::ImageKey key : holder->mKeys) { aTxn.DeleteImage(key); } entry.Remove(); - RemovePipeline(aPipelineId, wr::NewEpoch(mAsyncImageEpoch)); + RemovePipeline(aPipelineId, epoch); } } void AsyncImagePipelineManager::UpdateAsyncImagePipeline(const wr::PipelineId& aPipelineId, const LayoutDeviceRect& aScBounds, const gfx::Matrix4x4& aScTransform, const gfx::MaybeIntSize& aScaleToSize, @@ -266,18 +266,17 @@ AsyncImagePipelineManager::UpdateWithout void AsyncImagePipelineManager::ApplyAsyncImages() { if (mDestroyed || mAsyncImagePipelines.Count() == 0) { return; } - ++mAsyncImageEpoch; // Update webrender epoch - wr::Epoch epoch = wr::NewEpoch(mAsyncImageEpoch); + wr::Epoch epoch = GetNextImageEpoch(); // TODO: We can improve upon this by using two transactions: one for everything that // doesn't change the display list (in other words does not cause the scene to be // re-built), and one for the rest. This way, if an async pipeline needs to re-build // its display list, other async pipelines can still be rendered while the scene is // building. wr::TransactionBuilder txn; @@ -411,10 +410,17 @@ AsyncImagePipelineManager::PipelineRemov // Remove Pipeline entry.Remove(); } // If mDestroyedEpoch contains nothing it means we reused the same pipeline id (probably because // we moved the tab to another window). In this case we need to keep the holder. } } +wr::Epoch +AsyncImagePipelineManager::GetNextImageEpoch() +{ + mAsyncImageEpoch.mHandle++; + return mAsyncImageEpoch; +} + } // namespace layers } // namespace mozilla
--- a/gfx/layers/wr/AsyncImagePipelineManager.h +++ b/gfx/layers/wr/AsyncImagePipelineManager.h @@ -92,16 +92,17 @@ public: aNotifications->AppendElements(Move(mImageCompositeNotifications)); } void SetWillGenerateFrame(); bool GetAndResetWillGenerateFrame(); private: + wr::Epoch GetNextImageEpoch(); uint32_t GetNextResourceId() { return ++mResourceId; } wr::IdNamespace GetNamespace() { return mIdNamespace; } wr::ImageKey GenerateImageKey() { wr::ImageKey key; key.mNamespace = GetNamespace(); key.mHandle = GetNextResourceId(); return key; @@ -166,17 +167,17 @@ private: TextureHost::ResourceUpdateOp); RefPtr<wr::WebRenderAPI> mApi; wr::IdNamespace mIdNamespace; uint32_t mResourceId; nsClassHashtable<nsUint64HashKey, PipelineTexturesHolder> mPipelineTexturesHolders; nsClassHashtable<nsUint64HashKey, AsyncImagePipeline> mAsyncImagePipelines; - uint32_t mAsyncImageEpoch; + wr::Epoch mAsyncImageEpoch; bool mWillGenerateFrame; bool mDestroyed; // Render time for the current composition. TimeStamp mCompositionTime; // When nonnull, during rendering, some compositable indicated that it will // change its rendering at this time. In order not to miss it, we composite
--- a/gfx/layers/wr/WebRenderBridgeParent.cpp +++ b/gfx/layers/wr/WebRenderBridgeParent.cpp @@ -168,17 +168,17 @@ WebRenderBridgeParent::WebRenderBridgePa , mPipelineId(aPipelineId) , mWidget(aWidget) , mApi(aApi) , mAsyncImageManager(aImageMgr) , mCompositorScheduler(aScheduler) , mAnimStorage(aAnimStorage) , mChildLayerObserverEpoch(0) , mParentLayerObserverEpoch(0) - , mWrEpoch(0) + , mWrEpoch{0} , mIdNamespace(aApi->GetNamespace()) , mPaused(false) , mDestroyed(false) , mForceRendering(false) , mReceivedDisplayList(false) { MOZ_ASSERT(mAsyncImageManager); MOZ_ASSERT(mAnimStorage); @@ -189,17 +189,17 @@ WebRenderBridgeParent::WebRenderBridgePa } } WebRenderBridgeParent::WebRenderBridgeParent(const wr::PipelineId& aPipelineId) : mCompositorBridge(nullptr) , mPipelineId(aPipelineId) , mChildLayerObserverEpoch(0) , mParentLayerObserverEpoch(0) - , mWrEpoch(0) + , mWrEpoch{0} , mIdNamespace{0} , mPaused(false) , mDestroyed(true) , mForceRendering(false) , mReceivedDisplayList(false) { } @@ -501,35 +501,39 @@ WebRenderBridgeParent::GetRootCompositor CompositorBridgeParent::GetIndirectShadowTree(GetLayersId()); if (!lts) { return nullptr; } return lts->mParent; } void -WebRenderBridgeParent::UpdateAPZ(bool aUpdateHitTestingTree) +WebRenderBridgeParent::UpdateAPZFocusState(const FocusTarget& aFocus) { CompositorBridgeParent* cbp = GetRootCompositorBridgeParent(); if (!cbp) { return; } LayersId rootLayersId = cbp->RootLayerTreeId(); - RefPtr<WebRenderBridgeParent> rootWrbp = cbp->GetWebRenderBridgeParent(); - if (!rootWrbp) { + if (RefPtr<APZUpdater> apz = cbp->GetAPZUpdater()) { + apz->UpdateFocusState(rootLayersId, GetLayersId(), aFocus); + } +} + +void +WebRenderBridgeParent::UpdateAPZScrollData(const wr::Epoch& aEpoch, + WebRenderScrollData&& aData) +{ + CompositorBridgeParent* cbp = GetRootCompositorBridgeParent(); + if (!cbp) { return; } + LayersId rootLayersId = cbp->RootLayerTreeId(); if (RefPtr<APZUpdater> apz = cbp->GetAPZUpdater()) { - apz->UpdateFocusState(rootLayersId, GetLayersId(), - mScrollData.GetFocusTarget()); - if (aUpdateHitTestingTree) { - apz->UpdateHitTestingTree(rootLayersId, rootWrbp->GetScrollData(), - mScrollData.IsFirstPaint(), GetLayersId(), - mScrollData.GetPaintSequenceNumber()); - } + apz->UpdateScrollDataAndTreeState(rootLayersId, GetLayersId(), aEpoch, Move(aData)); } } bool WebRenderBridgeParent::PushAPZStateToWR(wr::TransactionBuilder& aTxn, nsTArray<wr::WrTransformProperty>& aTransformArray) { CompositorBridgeParent* cbp = GetRootCompositorBridgeParent(); @@ -545,23 +549,16 @@ WebRenderBridgeParent::PushAPZStateToWR( if (frameInterval != TimeDuration::Forever()) { animationTime += frameInterval; } return apz->PushStateToWR(aTxn, animationTime, aTransformArray); } return false; } -const WebRenderScrollData& -WebRenderBridgeParent::GetScrollData() const -{ - MOZ_ASSERT(mozilla::layers::CompositorThreadHolder::IsInCompositorThread()); - return mScrollData; -} - mozilla::ipc::IPCResult WebRenderBridgeParent::RecvSetDisplayList(const gfx::IntSize& aSize, InfallibleTArray<WebRenderParentCommand>&& aCommands, InfallibleTArray<OpDestroy>&& aToDestroy, const uint64_t& aFwdTransactionId, const uint64_t& aTransactionId, const wr::LayoutSize& aContentSize, ipc::ByteBuf&& dl, @@ -583,59 +580,65 @@ WebRenderBridgeParent::RecvSetDisplayLis AUTO_PROFILER_TRACING("Paint", "SetDisplayList"); UpdateFwdTransactionId(aFwdTransactionId); // This ensures that destroy operations are always processed. It is not safe // to early-return from RecvDPEnd without doing so. AutoWebRenderBridgeParentAsyncMessageSender autoAsyncMessageSender(this, &aToDestroy); - uint32_t wrEpoch = GetNextWrEpoch(); + wr::Epoch wrEpoch = GetNextWrEpoch(); mAsyncImageManager->SetCompositionTime(TimeStamp::Now()); ProcessWebRenderParentCommands(aCommands); wr::TransactionBuilder txn; if (!UpdateResources(aResourceUpdates, aSmallShmems, aLargeShmems, txn)) { return IPC_FAIL(this, "Failed to deserialize resource updates"); } mReceivedDisplayList = true; + // aScrollData is moved into this function but that is not reflected by the + // function signature due to the way the IPDL generator works. We remove the + // const so that we can move this structure all the way to the desired + // destination. + // Also note that this needs to happen before the display list transaction is + // sent to WebRender, so that the UpdateHitTestingTree call is guaranteed to + // be in the updater queue at the time that the scene swap completes. + UpdateAPZScrollData(wrEpoch, Move(const_cast<WebRenderScrollData&>(aScrollData))); + wr::Vec<uint8_t> dlData(Move(dl)); // If id namespaces do not match, it means the command is obsolete, probably // because the tab just moved to a new window. // In that case do not send the commands to webrender. if (mIdNamespace == aIdNamespace) { if (mWidget) { LayoutDeviceIntSize widgetSize = mWidget->GetClientSize(); LayoutDeviceIntRect docRect(LayoutDeviceIntPoint(), widgetSize); txn.SetWindowParameters(widgetSize, docRect); } gfx::Color clearColor(0.f, 0.f, 0.f, 0.f); - txn.SetDisplayList(clearColor, wr::NewEpoch(wrEpoch), LayerSize(aSize.width, aSize.height), + txn.SetDisplayList(clearColor, wrEpoch, LayerSize(aSize.width, aSize.height), mPipelineId, aContentSize, dlDesc, dlData); mApi->SendTransaction(txn); ScheduleGenerateFrame(); if (ShouldParentObserveEpoch()) { mCompositorBridge->ObserveLayerUpdate(GetLayersId(), GetChildLayerObserverEpoch(), true); } } HoldPendingTransactionId(wrEpoch, aTransactionId, aTxnStartTime, aFwdTime); - mScrollData = aScrollData; - UpdateAPZ(true); - if (mIdNamespace != aIdNamespace) { // Pretend we composited since someone is wating for this event, // though DisplayList was not pushed to webrender. TimeStamp now = TimeStamp::Now(); mCompositorBridge->DidComposite(GetLayersId(), now, now); } wr::IpcResourceUpdateQueue::ReleaseShmems(this, aSmallShmems); @@ -668,48 +671,46 @@ WebRenderBridgeParent::RecvEmptyTransact AutoWebRenderBridgeParentAsyncMessageSender autoAsyncMessageSender(this, &aToDestroy); if (!aCommands.IsEmpty()) { mAsyncImageManager->SetCompositionTime(TimeStamp::Now()); ProcessWebRenderParentCommands(aCommands); ScheduleGenerateFrame(); } - mScrollData.SetFocusTarget(aFocusTarget); - UpdateAPZ(false); + UpdateAPZFocusState(aFocusTarget); if (!aCommands.IsEmpty()) { wr::TransactionBuilder txn; - uint32_t wrEpoch = GetNextWrEpoch(); - txn.UpdateEpoch(mPipelineId, wr::NewEpoch(wrEpoch)); + wr::Epoch wrEpoch = GetNextWrEpoch(); + txn.UpdateEpoch(mPipelineId, wrEpoch); mApi->SendTransaction(txn); HoldPendingTransactionId(wrEpoch, aTransactionId, aTxnStartTime, aFwdTime); } else { bool sendDidComposite = false; if (mPendingTransactionIds.empty()) { sendDidComposite = true; } - HoldPendingTransactionId(mWrEpoch, aTransactionId, aTxnStartTime, aFwdTime); + HoldPendingTransactionId(WrEpoch(), aTransactionId, aTxnStartTime, aFwdTime); // If WebRenderBridgeParent does not have pending DidComposites, // send DidComposite now. if (sendDidComposite) { TimeStamp now = TimeStamp::Now(); mCompositorBridge->DidComposite(GetLayersId(), now, now); } } return IPC_OK(); } mozilla::ipc::IPCResult WebRenderBridgeParent::RecvSetFocusTarget(const FocusTarget& aFocusTarget) { - mScrollData.SetFocusTarget(aFocusTarget); - UpdateAPZ(false); + UpdateAPZFocusState(aFocusTarget); return IPC_OK(); } mozilla::ipc::IPCResult WebRenderBridgeParent::RecvParentCommands(nsTArray<WebRenderParentCommand>&& aCommands) { if (mDestroyed) { return IPC_OK(); @@ -940,17 +941,17 @@ WebRenderBridgeParent::RecvClearCachedRe { if (mDestroyed) { return IPC_OK(); } mCompositorBridge->ObserveLayerUpdate(GetLayersId(), GetChildLayerObserverEpoch(), false); // Clear resources wr::TransactionBuilder txn; - txn.ClearDisplayList(wr::NewEpoch(GetNextWrEpoch()), mPipelineId); + txn.ClearDisplayList(GetNextWrEpoch(), mPipelineId); mApi->SendTransaction(txn); // Schedule generate frame to clean up Pipeline ScheduleGenerateFrame(); // Remove animations. for (std::unordered_set<uint64_t>::iterator iter = mActiveAnimations.begin(); iter != mActiveAnimations.end(); iter++) { mAnimStorage->ClearById(*iter); } mActiveAnimations.clear(); @@ -1236,23 +1237,23 @@ WebRenderBridgeParent::CompositeToTarget } txn.GenerateFrame(); mApi->SendTransaction(txn); } void -WebRenderBridgeParent::HoldPendingTransactionId(uint32_t aWrEpoch, +WebRenderBridgeParent::HoldPendingTransactionId(const wr::Epoch& aWrEpoch, uint64_t aTransactionId, const TimeStamp& aTxnStartTime, const TimeStamp& aFwdTime) { MOZ_ASSERT(aTransactionId > LastPendingTransactionId()); - mPendingTransactionIds.push(PendingTransactionId(wr::NewEpoch(aWrEpoch), aTransactionId, aTxnStartTime, aFwdTime)); + mPendingTransactionIds.push(PendingTransactionId(aWrEpoch, aTransactionId, aTxnStartTime, aFwdTime)); } uint64_t WebRenderBridgeParent::LastPendingTransactionId() { uint64_t id = 0; if (!mPendingTransactionIds.empty()) { id = mPendingTransactionIds.back().mId; @@ -1366,20 +1367,20 @@ WebRenderBridgeParent::Resume() void WebRenderBridgeParent::ClearResources() { if (!mApi) { return; } - uint32_t wrEpoch = GetNextWrEpoch(); + wr::Epoch wrEpoch = GetNextWrEpoch(); wr::TransactionBuilder txn; - txn.ClearDisplayList(wr::NewEpoch(wrEpoch), mPipelineId); + txn.ClearDisplayList(wrEpoch, mPipelineId); mReceivedDisplayList = false; // Schedule generate frame to clean up Pipeline ScheduleGenerateFrame(); // WrFontKeys and WrImageKeys are deleted during WebRenderAPI destruction. for (auto iter = mExternalImageIds.Iter(); !iter.Done(); iter.Next()) { iter.Data()->ClearWrBridge(); } @@ -1387,17 +1388,17 @@ WebRenderBridgeParent::ClearResources() for (auto iter = mAsyncCompositables.Iter(); !iter.Done(); iter.Next()) { wr::PipelineId pipelineId = wr::AsPipelineId(iter.Key()); RefPtr<WebRenderImageHost> host = iter.Data(); host->ClearWrBridge(); mAsyncImageManager->RemoveAsyncImagePipeline(pipelineId, txn); } mAsyncCompositables.Clear(); - mAsyncImageManager->RemovePipeline(mPipelineId, wr::NewEpoch(wrEpoch)); + mAsyncImageManager->RemovePipeline(mPipelineId, wrEpoch); txn.RemovePipeline(mPipelineId); mApi->SendTransaction(txn); for (std::unordered_set<uint64_t>::iterator iter = mActiveAnimations.begin(); iter != mActiveAnimations.end(); iter++) { mAnimStorage->ClearById(*iter); } mActiveAnimations.clear(); @@ -1494,21 +1495,22 @@ WebRenderBridgeParent::GetTextureFactory mApi->GetMaxTextureSize(), mApi->GetUseANGLE(), false, false, false, mApi->GetSyncHandle()); } -uint32_t +wr::Epoch WebRenderBridgeParent::GetNextWrEpoch() { - MOZ_RELEASE_ASSERT(mWrEpoch != UINT32_MAX); - return ++mWrEpoch; + MOZ_RELEASE_ASSERT(mWrEpoch.mHandle != UINT32_MAX); + mWrEpoch.mHandle++; + return mWrEpoch; } void WebRenderBridgeParent::ExtractImageCompositeNotifications(nsTArray<ImageCompositeNotificationInfo>* aNotifications) { MOZ_ASSERT(mWidget); if (mDestroyed) { return;
--- a/gfx/layers/wr/WebRenderBridgeParent.h +++ b/gfx/layers/wr/WebRenderBridgeParent.h @@ -54,17 +54,17 @@ public: RefPtr<wr::WebRenderAPI>&& aApi, RefPtr<AsyncImagePipelineManager>&& aImageMgr, RefPtr<CompositorAnimationStorage>&& aAnimStorage); static WebRenderBridgeParent* CreateDestroyed(const wr::PipelineId& aPipelineId); wr::PipelineId PipelineId() { return mPipelineId; } already_AddRefed<wr::WebRenderAPI> GetWebRenderAPI() { return do_AddRef(mApi); } - wr::Epoch WrEpoch() { return wr::NewEpoch(mWrEpoch); } + wr::Epoch WrEpoch() const { return mWrEpoch; } AsyncImagePipelineManager* AsyncImageManager() { return mAsyncImageManager; } CompositorVsyncScheduler* CompositorScheduler() { return mCompositorScheduler.get(); } mozilla::ipc::IPCResult RecvNewCompositable(const CompositableHandle& aHandle, const TextureInfo& aInfo) override; mozilla::ipc::IPCResult RecvReleaseCompositable(const CompositableHandle& aHandle) override; mozilla::ipc::IPCResult RecvCreate(const gfx::IntSize& aSize) override; @@ -148,36 +148,33 @@ public: // CompositableParentManager bool IsSameProcess() const override; base::ProcessId GetChildProcessId() override; void NotifyNotUsed(PTextureParent* aTexture, uint64_t aTransactionId) override; void SendAsyncMessage(const InfallibleTArray<AsyncParentMessageData>& aMessage) override; void SendPendingAsyncMessages() override; void SetAboutToSendAsyncMessages() override; - void HoldPendingTransactionId(uint32_t aWrEpoch, + void HoldPendingTransactionId(const wr::Epoch& aWrEpoch, uint64_t aTransactionId, const TimeStamp& aTxnStartTime, const TimeStamp& aFwdTime); uint64_t LastPendingTransactionId(); uint64_t FlushPendingTransactionIds(); uint64_t FlushTransactionIdsForEpoch(const wr::Epoch& aEpoch, const TimeStamp& aEndTime); TextureFactoryIdentifier GetTextureFactoryIdentifier(); void ExtractImageCompositeNotifications(nsTArray<ImageCompositeNotificationInfo>* aNotifications); wr::IdNamespace GetIdNamespace() { return mIdNamespace; } - void UpdateAPZ(bool aUpdateHitTestingTree); - const WebRenderScrollData& GetScrollData() const; - void FlushRendering(); void FlushRenderingAsync(); /** * Schedule generating WebRender frame definitely at next composite timing. * * WebRenderBridgeParent uses composite timing to check if there is an update * to AsyncImagePipelines. If there is no update, WebRenderBridgeParent skips @@ -193,16 +190,19 @@ public: wr::WebRenderAPI* aApi, AsyncImagePipelineManager* aImageMgr, CompositorAnimationStorage* aAnimStorage); private: explicit WebRenderBridgeParent(const wr::PipelineId& aPipelineId); virtual ~WebRenderBridgeParent(); + void UpdateAPZFocusState(const FocusTarget& aFocus); + void UpdateAPZScrollData(const wr::Epoch& aEpoch, WebRenderScrollData&& aData); + bool UpdateResources(const nsTArray<OpUpdateResource>& aResourceUpdates, const nsTArray<RefCountedShmem>& aSmallShmems, const nsTArray<ipc::Shmem>& aLargeShmems, wr::TransactionBuilder& aUpdates); bool AddExternalImage(wr::ExternalImageId aExtId, wr::ImageKey aKey, wr::TransactionBuilder& aResources); LayersId GetLayersId() const; @@ -221,21 +221,21 @@ private: // Have APZ push the async scroll state to WR. Returns true if an APZ // animation is in effect and we need to schedule another composition. // If scrollbars need their transforms updated, the provided aTransformArray // is populated with the property update details. bool PushAPZStateToWR(wr::TransactionBuilder& aTxn, nsTArray<wr::WrTransformProperty>& aTransformArray); - uint32_t GetNextWrEpoch(); + wr::Epoch GetNextWrEpoch(); private: struct PendingTransactionId { - PendingTransactionId(wr::Epoch aEpoch, uint64_t aId, const TimeStamp& aTxnStartTime, const TimeStamp& aFwdTime) + PendingTransactionId(const wr::Epoch& aEpoch, uint64_t aId, const TimeStamp& aTxnStartTime, const TimeStamp& aFwdTime) : mEpoch(aEpoch) , mId(aId) , mTxnStartTime(aTxnStartTime) , mFwdTime(aFwdTime) {} wr::Epoch mEpoch; uint64_t mId; TimeStamp mTxnStartTime; @@ -259,24 +259,21 @@ private: // These fields keep track of the latest layer observer epoch values in the child and the // parent. mChildLayerObserverEpoch is the latest epoch value received from the child. // mParentLayerObserverEpoch is the latest epoch value that we have told TabParent about // (via ObserveLayerUpdate). uint64_t mChildLayerObserverEpoch; uint64_t mParentLayerObserverEpoch; std::queue<PendingTransactionId> mPendingTransactionIds; - uint32_t mWrEpoch; + wr::Epoch mWrEpoch; wr::IdNamespace mIdNamespace; bool mPaused; bool mDestroyed; bool mForceRendering; bool mReceivedDisplayList; - - // Can only be accessed on the compositor thread. - WebRenderScrollData mScrollData; }; } // namespace layers } // namespace mozilla #endif // mozilla_layers_WebRenderBridgeParent_h
--- a/gfx/layers/wr/WebRenderLayerManager.cpp +++ b/gfx/layers/wr/WebRenderLayerManager.cpp @@ -186,16 +186,19 @@ WebRenderLayerManager::EndEmptyTransacti // because we need to repaint the window overlay which we only currently // support in a full transaction. // XXX If we end up hitting this branch a lot we can probably optimize it // by just sending an updated window overlay image instead of rebuilding // the entire WR display list. return false; } + // Since we don't do repeat transactions right now, just set the time + mAnimationReadyTime = TimeStamp::Now(); + // With the WebRenderLayerManager we reject attempts to set most kind of // "pending data" for empty transactions. Any place that attempts to update // transforms or scroll offset, for example, will get failure return values // back, and will fall back to a full transaction. Therefore the only piece // of "pending" information we need to send in an empty transaction are the // APZ focus state and canvases's CompositableOperations. if (aFlags & EndTransactionFlags::END_NO_COMPOSITE &&
--- a/gfx/layers/wr/WebRenderScrollDataWrapper.h +++ b/gfx/layers/wr/WebRenderScrollDataWrapper.h @@ -39,18 +39,20 @@ namespace layers { * This class being declared a MOZ_STACK_CLASS should help with that. * * Refer to LayerMetricsWrapper.h for actual documentation on the exposed API. */ class MOZ_STACK_CLASS WebRenderScrollDataWrapper { public: // Basic constructor for external callers. Starts the walker at the root of // the tree. - explicit WebRenderScrollDataWrapper(const WebRenderScrollData* aData = nullptr) - : mData(aData) + explicit WebRenderScrollDataWrapper(const APZUpdater& aUpdater, + const WebRenderScrollData* aData = nullptr) + : mUpdater(&aUpdater) + , mData(aData) , mLayerIndex(0) , mContainingSubtreeLastIndex(0) , mLayer(nullptr) , mMetadataIndex(0) { if (!mData) { return; } @@ -70,20 +72,22 @@ public: mMetadataIndex--; } } private: // Internal constructor for walking from one WebRenderLayerScrollData to // another. In this case we need to recompute the mMetadataIndex to be the // "topmost" scroll metadata on the new layer. - WebRenderScrollDataWrapper(const WebRenderScrollData* aData, + WebRenderScrollDataWrapper(const APZUpdater* aUpdater, + const WebRenderScrollData* aData, size_t aLayerIndex, size_t aContainingSubtreeLastIndex) - : mData(aData) + : mUpdater(aUpdater) + , mData(aData) , mLayerIndex(aLayerIndex) , mContainingSubtreeLastIndex(aContainingSubtreeLastIndex) , mLayer(nullptr) , mMetadataIndex(0) { MOZ_ASSERT(mData); mLayer = mData->GetLayerData(mLayerIndex); MOZ_ASSERT(mLayer); @@ -93,22 +97,24 @@ private: mMetadataIndex = mLayer->GetScrollMetadataCount(); if (mMetadataIndex > 0) { mMetadataIndex--; } } // Internal constructor for walking from one metadata to another metadata on // the same WebRenderLayerScrollData. - WebRenderScrollDataWrapper(const WebRenderScrollData* aData, + WebRenderScrollDataWrapper(const APZUpdater* aUpdater, + const WebRenderScrollData* aData, size_t aLayerIndex, size_t aContainingSubtreeLastIndex, const WebRenderLayerScrollData* aLayer, uint32_t aMetadataIndex) - : mData(aData) + : mUpdater(aUpdater) + , mData(aData) , mLayerIndex(aLayerIndex) , mContainingSubtreeLastIndex(aContainingSubtreeLastIndex) , mLayer(aLayer) , mMetadataIndex(aMetadataIndex) { MOZ_ASSERT(mData); MOZ_ASSERT(mLayer); MOZ_ASSERT(mLayer == mData->GetLayerData(mLayerIndex)); @@ -129,66 +135,63 @@ public: WebRenderScrollDataWrapper GetLastChild() const { MOZ_ASSERT(IsValid()); if (!AtBottomLayer()) { // If we're still walking around in the virtual container layers created // by the ScrollMetadata array, we just need to update the metadata index // and that's it. - return WebRenderScrollDataWrapper(mData, mLayerIndex, + return WebRenderScrollDataWrapper(mUpdater, mData, mLayerIndex, mContainingSubtreeLastIndex, mLayer, mMetadataIndex - 1); } // Otherwise, we need to walk to a different WebRenderLayerScrollData in // mData. // Since mData contains the layer in depth-first, last-to-first order, // the index after mLayerIndex must be mLayerIndex's last child, if it // has any children (indicated by GetDescendantCount() > 0). Furthermore // we compute the first index outside the subtree rooted at this node // (in |subtreeLastIndex|) and pass that in to the child wrapper to use as // its mContainingSubtreeLastIndex. if (mLayer->GetDescendantCount() > 0) { size_t prevSiblingIndex = mLayerIndex + 1 + mLayer->GetDescendantCount(); size_t subtreeLastIndex = std::min(mContainingSubtreeLastIndex, prevSiblingIndex); - return WebRenderScrollDataWrapper(mData, mLayerIndex + 1, subtreeLastIndex); + return WebRenderScrollDataWrapper(mUpdater, mData, mLayerIndex + 1, subtreeLastIndex); } // We've run out of descendants. But! If the original layer was a RefLayer, // then it connects to another layer tree and we need to traverse that too. // So return a WebRenderScrollDataWrapper for the root of the child layer // tree. if (mLayer->GetReferentId()) { - CompositorBridgeParent::LayerTreeState* lts = - CompositorBridgeParent::GetIndirectShadowTree(mLayer->GetReferentId().value()); - if (lts && lts->mWrBridge) { - return WebRenderScrollDataWrapper(&(lts->mWrBridge->GetScrollData())); - } + return WebRenderScrollDataWrapper(*mUpdater, + mUpdater->GetScrollData(*mLayer->GetReferentId())); } - return WebRenderScrollDataWrapper(); + return WebRenderScrollDataWrapper(*mUpdater); } WebRenderScrollDataWrapper GetPrevSibling() const { MOZ_ASSERT(IsValid()); if (!AtTopLayer()) { // The virtual container layers don't have siblings - return WebRenderScrollDataWrapper(); + return WebRenderScrollDataWrapper(*mUpdater); } // Skip past the descendants to get to the previous sibling. However, we // might be at the last sibling already. size_t prevSiblingIndex = mLayerIndex + 1 + mLayer->GetDescendantCount(); if (prevSiblingIndex < mContainingSubtreeLastIndex) { - return WebRenderScrollDataWrapper(mData, prevSiblingIndex, mContainingSubtreeLastIndex); + return WebRenderScrollDataWrapper(mUpdater, mData, prevSiblingIndex, mContainingSubtreeLastIndex); } - return WebRenderScrollDataWrapper(); + return WebRenderScrollDataWrapper(*mUpdater); } const ScrollMetadata& Metadata() const { MOZ_ASSERT(IsValid()); if (mMetadataIndex >= mLayer->GetScrollMetadataCount()) { return *ScrollMetadata::sNullMetadata; @@ -335,16 +338,17 @@ private: } bool AtTopLayer() const { return mLayer->GetScrollMetadataCount() == 0 || mMetadataIndex == mLayer->GetScrollMetadataCount() - 1; } private: + const APZUpdater* mUpdater; const WebRenderScrollData* mData; // The index (in mData->mLayerScrollData) of the WebRenderLayerScrollData this // wrapper is pointing to. size_t mLayerIndex; // The upper bound on the set of valid indices inside the subtree rooted at // the parent of this "layer". That is, any layer index |i| in the range // mLayerIndex <= i < mContainingSubtreeLastIndex is guaranteed to point to // a layer that is a descendant of "parent", where "parent" is the parent
--- a/gfx/thebes/gfxPrefs.h +++ b/gfx/thebes/gfxPrefs.h @@ -502,16 +502,17 @@ private: DECL_GFX_PREF(Once, "gfx.use-iosurface-textures", UseIOSurfaceTextures, bool, false); DECL_GFX_PREF(Once, "gfx.use-mutex-on-present", UseMutexOnPresent, bool, false); DECL_GFX_PREF(Once, "gfx.use-surfacetexture-textures", UseSurfaceTextureTextures, bool, false); DECL_GFX_PREF(Live, "gfx.vsync.collect-scroll-transforms", CollectScrollTransforms, bool, false); DECL_GFX_PREF(Once, "gfx.vsync.compositor.unobserve-count", CompositorUnobserveCount, int32_t, 10); DECL_GFX_PREF(Once, "gfx.webrender.all", WebRenderAll, bool, false); + DECL_GFX_PREF(Once, "gfx.webrender.async-scene-build", WebRenderAsyncSceneBuild, bool, false); DECL_GFX_PREF(Once, "gfx.webrender.enabled", WebRenderEnabledDoNotUseDirectly, bool, false); DECL_OVERRIDE_PREF(Live, "gfx.webrender.blob-images", WebRenderBlobImages, gfxPrefs::WebRenderAll()); DECL_GFX_PREF(Live, "gfx.webrender.blob.invalidation", WebRenderBlobInvalidation, bool, false); DECL_GFX_PREF(Live, "gfx.webrender.highlight-painted-layers",WebRenderHighlightPaintedLayers, bool, false); DECL_GFX_PREF(Live, "gfx.webrender.hit-test", WebRenderHitTest, bool, true); // Use vsync events generated by hardware DECL_GFX_PREF(Once, "gfx.work-around-driver-bugs", WorkAroundDriverBugs, bool, true);
--- a/gfx/webrender_bindings/WebRenderAPI.cpp +++ b/gfx/webrender_bindings/WebRenderAPI.cpp @@ -1,16 +1,18 @@ /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim: set ts=8 sts=2 et sw=2 tw=80: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "WebRenderAPI.h" + #include "DisplayItemClipChain.h" +#include "gfxPrefs.h" #include "LayersLogging.h" #include "mozilla/webrender/RendererOGL.h" #include "mozilla/gfx/gfxVars.h" #include "mozilla/layers/CompositorThread.h" #include "mozilla/webrender/RenderCompositor.h" #include "mozilla/widget/CompositorWidget.h" #include "mozilla/layers/SynchronousTask.h" @@ -129,18 +131,25 @@ public: private: layers::SynchronousTask* mTask; }; TransactionBuilder::TransactionBuilder() { - mTxn = wr_transaction_new(); - mResourceUpdates = wr_resource_updates_new(); + // We need the if statement to avoid miscompilation on windows, see + // bug 1449982 comment 22. + if (gfxPrefs::WebRenderAsyncSceneBuild()) { + mTxn = wr_transaction_new(true); + mResourceUpdates = wr_resource_updates_new(); + } else { + mResourceUpdates = wr_resource_updates_new(); + mTxn = wr_transaction_new(false); + } } TransactionBuilder::~TransactionBuilder() { wr_transaction_delete(mTxn); wr_resource_updates_delete(mResourceUpdates); } @@ -246,49 +255,47 @@ WebRenderAPI::InitExternalLogHandler() /*static*/ void WebRenderAPI::ShutdownExternalLogHandler() { mozilla::wr::wr_shutdown_external_log_handler(); } /*static*/ already_AddRefed<WebRenderAPI> -WebRenderAPI::Create(layers::CompositorBridgeParentBase* aBridge, +WebRenderAPI::Create(layers::CompositorBridgeParent* aBridge, RefPtr<widget::CompositorWidget>&& aWidget, + const wr::WrWindowId& aWindowId, LayoutDeviceIntSize aSize) { MOZ_ASSERT(aBridge); MOZ_ASSERT(aWidget); static_assert(sizeof(size_t) == sizeof(uintptr_t), "The FFI bindings assume size_t is the same size as uintptr_t!"); - static uint64_t sNextId = 1; - auto id = NewWindowId(sNextId++); - wr::DocumentHandle* docHandle = nullptr; uint32_t maxTextureSize = 0; bool useANGLE = false; layers::SyncHandle syncHandle = 0; // Dispatch a synchronous task because the DocumentHandle object needs to be created // on the render thread. If need be we could delay waiting on this task until // the next time we need to access the DocumentHandle object. layers::SynchronousTask task("Create Renderer"); auto event = MakeUnique<NewRenderer>(&docHandle, aBridge, &maxTextureSize, &useANGLE, Move(aWidget), &task, aSize, &syncHandle); - RenderThread::Get()->RunEvent(id, Move(event)); + RenderThread::Get()->RunEvent(aWindowId, Move(event)); task.Wait(); if (!docHandle) { return nullptr; } - return RefPtr<WebRenderAPI>(new WebRenderAPI(docHandle, id, maxTextureSize, useANGLE, syncHandle)).forget(); + return RefPtr<WebRenderAPI>(new WebRenderAPI(docHandle, aWindowId, maxTextureSize, useANGLE, syncHandle)).forget(); } already_AddRefed<WebRenderAPI> WebRenderAPI::Clone() { wr::DocumentHandle* docHandle = nullptr; wr_api_clone(mDocHandle, &docHandle); @@ -495,16 +502,22 @@ WebRenderAPI::Resume() // implies that all frame data have been processed when the renderer runs this event. RunOnRenderThread(Move(event)); task.Wait(); return result; } void +WebRenderAPI::WakeSceneBuilder() +{ + wr_api_wake_scene_builder(mDocHandle); +} + +void WebRenderAPI::WaitFlushed() { class WaitFlushedEvent : public RendererEvent { public: explicit WaitFlushedEvent(layers::SynchronousTask* aTask) : mTask(aTask) {
--- a/gfx/webrender_bindings/WebRenderAPI.h +++ b/gfx/webrender_bindings/WebRenderAPI.h @@ -25,17 +25,17 @@ namespace mozilla { struct DisplayItemClipChain; namespace widget { class CompositorWidget; } namespace layers { -class CompositorBridgeParentBase; +class CompositorBridgeParent; class WebRenderBridgeParent; } namespace wr { class DisplayListBuilder; class RendererOGL; class RendererEvent; @@ -147,18 +147,19 @@ protected: }; class WebRenderAPI { NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebRenderAPI); public: /// This can be called on the compositor thread only. - static already_AddRefed<WebRenderAPI> Create(layers::CompositorBridgeParentBase* aBridge, + static already_AddRefed<WebRenderAPI> Create(layers::CompositorBridgeParent* aBridge, RefPtr<widget::CompositorWidget>&& aWidget, + const wr::WrWindowId& aWindowId, LayoutDeviceIntSize aSize); already_AddRefed<WebRenderAPI> CreateDocument(LayoutDeviceIntSize aSize, int8_t aLayerIndex); // Redirect the WR's log to gfxCriticalError/Note. static void InitExternalLogHandler(); static void ShutdownExternalLogHandler(); @@ -177,16 +178,18 @@ public: void RunOnRenderThread(UniquePtr<RendererEvent> aEvent); void Readback(gfx::IntSize aSize, uint8_t *aBuffer, uint32_t aBufferSize); void Pause(); bool Resume(); + void WakeSceneBuilder(); + wr::WrIdNamespace GetNamespace(); uint32_t GetMaxTextureSize() const { return mMaxTextureSize; } bool GetUseANGLE() const { return mUseANGLE; } layers::SyncHandle GetSyncHandle() const { return mSyncHandle; } void Capture(); protected:
--- a/gfx/webrender_bindings/WebRenderTypes.cpp +++ b/gfx/webrender_bindings/WebRenderTypes.cpp @@ -6,16 +6,26 @@ #include "WebRenderTypes.h" #include "mozilla/ipc/ByteBuf.h" namespace mozilla { namespace wr { +WindowId +NewWindowId() +{ + static uint64_t sNextId = 1; + + WindowId id; + id.mHandle = sNextId++; + return id; +} + void Assign_WrVecU8(wr::WrVecU8& aVec, mozilla::ipc::ByteBuf&& aOther) { aVec.data = aOther.mData; aVec.length = aOther.mLen; aVec.capacity = aOther.mCapacity; aOther.mData = nullptr; aOther.mLen = 0;
--- a/gfx/webrender_bindings/WebRenderTypes.h +++ b/gfx/webrender_bindings/WebRenderTypes.h @@ -39,27 +39,18 @@ typedef wr::WrExternalImageId ExternalIm typedef wr::WrDebugFlags DebugFlags; typedef mozilla::Maybe<mozilla::wr::WrImageMask> MaybeImageMask; typedef Maybe<ExternalImageId> MaybeExternalImageId; typedef Maybe<FontInstanceOptions> MaybeFontInstanceOptions; typedef Maybe<FontInstancePlatformOptions> MaybeFontInstancePlatformOptions; -inline WindowId NewWindowId(uint64_t aId) { - WindowId id; - id.mHandle = aId; - return id; -} - -inline Epoch NewEpoch(uint32_t aEpoch) { - Epoch e; - e.mHandle = aEpoch; - return e; -} +/* Generate a brand new window id and return it. */ +WindowId NewWindowId(); inline DebugFlags NewDebugFlags(uint32_t aFlags) { DebugFlags flags; flags.mBits = aFlags; return flags; } inline Maybe<wr::ImageFormat>
--- a/gfx/webrender_bindings/src/bindings.rs +++ b/gfx/webrender_bindings/src/bindings.rs @@ -7,16 +7,17 @@ use std::sync::Arc; use std::os::raw::{c_void, c_char, c_float}; use gleam::gl; use webrender::api::*; use webrender::{ReadPixelsFormat, Renderer, RendererOptions, ThreadListener}; use webrender::{ExternalImage, ExternalImageHandler, ExternalImageSource}; use webrender::DebugFlags; use webrender::{ApiRecordingReceiver, BinaryRecorder}; +use webrender::{PipelineInfo, SceneBuilderHooks}; use webrender::{ProgramCache, UploadMethod, VertexUsageHint}; use thread_profiler::register_thread_with_profiler; use moz2d_renderer::Moz2dImageRenderer; use app_units::Au; use rayon; use euclid::SideOffsets2D; use log; @@ -596,26 +597,29 @@ pub unsafe extern "C" fn wr_renderer_del // let renderer go out of scope and get dropped } pub struct WrPipelineInfo { epochs: Vec<(WrPipelineId, WrEpoch)>, removed_pipelines: Vec<PipelineId>, } -#[no_mangle] -pub unsafe extern "C" fn wr_renderer_flush_pipeline_info(renderer: &mut Renderer) -> *mut WrPipelineInfo { - let info = renderer.flush_pipeline_info(); - let pipeline_epochs = Box::new( +impl WrPipelineInfo { + fn new(info: PipelineInfo) -> Self { WrPipelineInfo { epochs: info.epochs.into_iter().collect(), removed_pipelines: info.removed_pipelines, } - ); - return Box::into_raw(pipeline_epochs); + } +} + +#[no_mangle] +pub unsafe extern "C" fn wr_renderer_flush_pipeline_info(renderer: &mut Renderer) -> *mut WrPipelineInfo { + let info = renderer.flush_pipeline_info(); + Box::into_raw(Box::new(WrPipelineInfo::new(info))) } #[no_mangle] pub unsafe extern "C" fn wr_pipeline_info_next_epoch( info: &mut WrPipelineInfo, out_pipeline: &mut WrPipelineId, out_epoch: &mut WrEpoch ) -> bool { @@ -640,16 +644,62 @@ pub unsafe extern "C" fn wr_pipeline_inf } /// cbindgen:postfix=WR_DESTRUCTOR_SAFE_FUNC #[no_mangle] pub unsafe extern "C" fn wr_pipeline_info_delete(info: *mut WrPipelineInfo) { Box::from_raw(info); } +#[allow(improper_ctypes)] // this is needed so that rustc doesn't complain about passing the *mut WrPipelineInfo to an extern function +extern "C" { + fn apz_register_updater(window_id: WrWindowId); + fn apz_pre_scene_swap(window_id: WrWindowId); + // This function takes ownership of the pipeline_info and is responsible for + // freeing it via wr_pipeline_info_delete. + fn apz_post_scene_swap(window_id: WrWindowId, pipeline_info: *mut WrPipelineInfo); + fn apz_run_updater(window_id: WrWindowId); + fn apz_deregister_updater(window_id: WrWindowId); +} + +struct APZCallbacks { + window_id: WrWindowId, +} + +impl APZCallbacks { + pub fn new(window_id: WrWindowId) -> Self { + APZCallbacks { + window_id, + } + } +} + +impl SceneBuilderHooks for APZCallbacks { + fn register(&self) { + unsafe { apz_register_updater(self.window_id) } + } + + fn pre_scene_swap(&self) { + unsafe { apz_pre_scene_swap(self.window_id) } + } + + fn post_scene_swap(&self, info: PipelineInfo) { + let info = Box::into_raw(Box::new(WrPipelineInfo::new(info))); + unsafe { apz_post_scene_swap(self.window_id, info) } + } + + fn poke(&self) { + unsafe { apz_run_updater(self.window_id) } + } + + fn deregister(&self) { + unsafe { apz_deregister_updater(self.window_id) } + } +} + extern "C" { fn gecko_profiler_register_thread(name: *const ::std::os::raw::c_char); fn gecko_profiler_unregister_thread(); } struct GeckoProfilerThreadListener {} impl GeckoProfilerThreadListener { @@ -779,16 +829,17 @@ pub extern "C" fn wr_window_new(window_i match CStr::from_ptr(override_charptr).to_str() { Ok(override_str) => Some(PathBuf::from(override_str)), _ => None } } }, renderer_id: Some(window_id.0), upload_method, + scene_builder_hooks: Some(Box::new(APZCallbacks::new(window_id))), ..Default::default() }; let notifier = Box::new(CppNotifier { window_id: window_id, }); let (renderer, sender) = match Renderer::new(gl, notifier, opts) { Ok((renderer, sender)) => (renderer, sender), @@ -858,18 +909,27 @@ pub unsafe extern "C" fn wr_api_delete(d /// cbindgen:postfix=WR_DESTRUCTOR_SAFE_FUNC #[no_mangle] pub unsafe extern "C" fn wr_api_shut_down(dh: &mut DocumentHandle) { dh.api.shut_down(); } #[no_mangle] -pub extern "C" fn wr_transaction_new() -> *mut Transaction { - Box::into_raw(Box::new(Transaction::new())) +pub extern "C" fn wr_transaction_new(do_async: bool) -> *mut Transaction { + let mut transaction = Transaction::new(); + // Ensure that we either use async scene building or not based on the + // gecko pref, regardless of what the default is. We can remove this once + // the scene builder thread is enabled everywhere and working well. + if do_async { + transaction.use_scene_builder_thread(); + } else { + transaction.skip_scene_builder(); + } + Box::into_raw(Box::new(transaction)) } /// cbindgen:postfix=WR_DESTRUCTOR_SAFE_FUNC #[no_mangle] pub extern "C" fn wr_transaction_delete(txn: *mut Transaction) { unsafe { let _ = Box::from_raw(txn); } } @@ -1313,16 +1373,21 @@ pub extern "C" fn wr_resource_updates_de } } #[no_mangle] pub unsafe extern "C" fn wr_api_get_namespace(dh: &mut DocumentHandle) -> WrIdNamespace { dh.api.get_namespace_id() } +#[no_mangle] +pub unsafe extern "C" fn wr_api_wake_scene_builder(dh: &mut DocumentHandle) { + dh.api.wake_scene_builder(); +} + // RenderThread WIP notes: // In order to separate the compositor thread (or ipc receiver) and the render // thread, some of the logic below needs to be rewritten. In particular // the WrWindowState and Notifier implementations aren't designed to work with // a separate render thread. // As part of that I am moving the bindings closer to WebRender's API boundary, // and moving more of the logic in C++ land. // This work is tracked by bug 1328602.
--- a/gfx/webrender_bindings/webrender_ffi.h +++ b/gfx/webrender_bindings/webrender_ffi.h @@ -80,19 +80,28 @@ struct FontInstanceFlags { FONT_SMOOTHING = 1 << 16, FORCE_AUTOHINT = 1 << 16, NO_AUTOHINT = 1 << 17, VERTICAL_LAYOUT = 1 << 18 }; }; +struct WrWindowId; +struct WrPipelineInfo; + } // namespace wr } // namespace mozilla +void apz_register_updater(mozilla::wr::WrWindowId aWindowId); +void apz_pre_scene_swap(mozilla::wr::WrWindowId aWindowId); +void apz_post_scene_swap(mozilla::wr::WrWindowId aWindowId, mozilla::wr::WrPipelineInfo* aInfo); +void apz_run_updater(mozilla::wr::WrWindowId aWindowId); +void apz_deregister_updater(mozilla::wr::WrWindowId aWindowId); + } // extern "C" // Some useful defines to stub out webrender binding functions for when we // build gecko without webrender. We try to tell the compiler these functions // are unreachable in that case, but VC++ emits a warning if it finds any // unreachable functions invoked from destructors. That warning gets turned into // an error and causes the build to fail. So for wr_* functions called by // destructors in C++ classes, use WR_DESTRUCTOR_SAFE_FUNC instead, which omits
--- a/gfx/webrender_bindings/webrender_ffi_generated.h +++ b/gfx/webrender_bindings/webrender_ffi_generated.h @@ -310,16 +310,30 @@ struct FontKey { }; using WrFontKey = FontKey; using VecU8 = Vec<uint8_t>; using ArcVecU8 = Arc<VecU8>; +struct WrWindowId { + uint64_t mHandle; + + bool operator==(const WrWindowId& aOther) const { + return mHandle == aOther.mHandle; + } + bool operator<(const WrWindowId& aOther) const { + return mHandle < aOther.mHandle; + } + bool operator<=(const WrWindowId& aOther) const { + return mHandle <= aOther.mHandle; + } +}; + template<typename T, typename U> struct TypedSize2D { T width; T height; bool operator==(const TypedSize2D& aOther) const { return width == aOther.width && height == aOther.height; @@ -740,30 +754,16 @@ struct MutByteSlice { uintptr_t len; bool operator==(const MutByteSlice& aOther) const { return buffer == aOther.buffer && len == aOther.len; } }; -struct WrWindowId { - uint64_t mHandle; - - bool operator==(const WrWindowId& aOther) const { - return mHandle == aOther.mHandle; - } - bool operator<(const WrWindowId& aOther) const { - return mHandle < aOther.mHandle; - } - bool operator<=(const WrWindowId& aOther) const { - return mHandle <= aOther.mHandle; - } -}; - struct Epoch { uint32_t mHandle; bool operator==(const Epoch& aOther) const { return mHandle == aOther.mHandle; } bool operator<(const Epoch& aOther) const { return mHandle < aOther.mHandle; @@ -942,16 +942,27 @@ extern void AddFontData(WrFontKey aKey, const ArcVecU8 *aVec); extern void AddNativeFontHandle(WrFontKey aKey, void *aHandle, uint32_t aIndex); extern void DeleteFontData(WrFontKey aKey); +extern void apz_deregister_updater(WrWindowId aWindowId); + +extern void apz_post_scene_swap(WrWindowId aWindowId, + WrPipelineInfo *aPipelineInfo); + +extern void apz_pre_scene_swap(WrWindowId aWindowId); + +extern void apz_register_updater(WrWindowId aWindowId); + +extern void apz_run_updater(WrWindowId aWindowId); + extern void gecko_printf_stderr_output(const char *aMsg); extern void gecko_profiler_register_thread(const char *aName); extern void gecko_profiler_unregister_thread(); extern void gfx_critical_error(const char *aMsg); @@ -1030,16 +1041,20 @@ void wr_api_send_transaction(DocumentHan Transaction *aTransaction) WR_FUNC; WR_INLINE void wr_api_shut_down(DocumentHandle *aDh) WR_DESTRUCTOR_SAFE_FUNC; WR_INLINE +void wr_api_wake_scene_builder(DocumentHandle *aDh) +WR_FUNC; + +WR_INLINE void wr_clear_item_tag(WrState *aState) WR_FUNC; WR_INLINE void wr_dec_ref_arc(const VecU8 *aArc) WR_DESTRUCTOR_SAFE_FUNC; WR_INLINE @@ -1575,17 +1590,17 @@ WR_INLINE void wr_transaction_generate_frame(Transaction *aTxn) WR_FUNC; WR_INLINE bool wr_transaction_is_empty(const Transaction *aTxn) WR_FUNC; WR_INLINE -Transaction *wr_transaction_new() +Transaction *wr_transaction_new(bool aDoAsync) WR_FUNC; WR_INLINE void wr_transaction_remove_pipeline(Transaction *aTxn, WrPipelineId aPipelineId) WR_FUNC; WR_INLINE
--- a/layout/tools/reftest/reftest.jsm +++ b/layout/tools/reftest/reftest.jsm @@ -363,16 +363,19 @@ function ReadTests() { } if (testList) { logger.debug("Reading test objects from: " + testList); let promise = OS.File.read(testList).then(function onSuccess(array) { let decoder = new TextDecoder(); g.urls = JSON.parse(decoder.decode(array)).map(CreateUrls); StartTests(); + }).catch(function onFailure(e) { + logger.error("Failed to load test objects: " + e); + DoneTests(); }); } else if (manifests) { // Parse reftest manifests // XXX There is a race condition in the manifest parsing code which // sometimes shows up on Android jsreftests (bug 1416125). It seems // adding/removing log statements can change its frequency. logger.debug("Reading " + manifests.length + " manifests"); manifests = JSON.parse(manifests);
new file mode 100644 --- /dev/null +++ b/python/docs/index.rst @@ -0,0 +1,48 @@ +================================= +Using third-party Python packages +================================= + +When using third-party Python packages, there are two options: + +#. Install/use a vendored version of the package. +#. Install the package from a package index, such as PyPI or our internal + mirror. + +Vendoring Python packages +========================= + +If the Python package is to be used in the building of Firefox itself, then we +**MUST** use a vendored version. This ensures that to build Firefox we only +require a checkout of the source, and do not depend on a package index. This +ensures that building Firefox is deterministic and dependable, avoids packages +from changing out from under us, and means we’re not affected when 3rd party +services are offline. We don't want a DoS against PyPI or a random package +maintainer removing an old tarball to delay a Firefox chemspill. + +Where possible, the following policy applies to **ALL** vendored packages: + +* Vendored libraries **SHOULD NOT** be modified except as required to + successfully vendor them. +* Vendored libraries **SHOULD** be released copies of libraries available on + PyPI. + +Using a Python package index +============================ + +If the Python package is not used in the building of Firefox then it can be +installed from a package index. Some tasks are not permitted to use external +resources, and for those we can publish packages to an internal PyPI mirror. +See `how to upload to internal PyPI <https://wiki.mozilla.org/ReleaseEngineering/How_To/Upload_to_internal_Pypi>`_ +for more details. If you are not restricted, you can install packages from PyPI +or another package index. + +All packages installed from a package index **MUST** specify hashes to ensure +compatibiliy and protect against remote tampering. Hash-checking mode can be +forced on when using ``pip`` be specifying the ``--require-hashes`` +command-line option. See `hash-checking mode <https://pip.pypa.io/en/stable/reference/pip_install/#hash-checking-mode>`_ for +more details. + +Note that when using a Python package index there is a risk that the service +could be unavailable, or packages may be updated or even pulled without notice. +These issues are less likely with our internal PyPI mirror, but still possible. +If this is undesirable, then consider vendoring the package.
--- a/python/moz.build +++ b/python/moz.build @@ -43,16 +43,18 @@ SPHINX_PYTHON_PACKAGE_DIRS += [ 'mozversioncontrol/mozversioncontrol', ] with Files('**.py'): SCHEDULES.inclusive += ['docs'] SPHINX_TREES['mach'] = 'mach/docs' +SPHINX_TREES['python'] = 'docs' + with Files('mach/docs/**'): SCHEDULES.exclusive = ['docs'] PYTHON_UNITTEST_MANIFESTS += [ 'mach/mach/test/python.ini', 'mozbuild/dumbmake/test/python.ini', 'mozlint/test/python.ini', 'mozterm/test/python.ini',
--- a/testing/mozbase/mozfile/mozfile/mozfile.py +++ b/testing/mozbase/mozfile/mozfile/mozfile.py @@ -247,39 +247,21 @@ def depth(directory): while True: directory, remainder = os.path.split(directory) level += 1 if not remainder: break return level -# ASCII delimeters -ascii_delimeters = { - 'vertical_line': '|', - 'item_marker': '+', - 'last_child': '\\' -} - -# unicode delimiters -unicode_delimeters = { - 'vertical_line': '│', - 'item_marker': '├', - 'last_child': '└' -} - - -def tree(directory, - item_marker=unicode_delimeters['item_marker'], - vertical_line=unicode_delimeters['vertical_line'], - last_child=unicode_delimeters['last_child'], - sort_key=lambda x: x.lower()): - """ - display tree directory structure for `directory` - """ +def tree(directory, sort_key=lambda x: x.lower()): + """Display tree directory structure for `directory`.""" + vertical_line = u'│' + item_marker = u'├' + last_child = u'└' retval = [] indent = [] last = {} top = depth(directory) for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
--- a/testing/mozbase/mozfile/tests/manifest.ini +++ b/testing/mozbase/mozfile/tests/manifest.ini @@ -1,8 +1,9 @@ [DEFAULT] subsuite = mozbase, os == "linux" [test_extract.py] [test_load.py] [test_move_remove.py] [test_tempdir.py] [test_tempfile.py] +[test_tree.py] [test_url.py]
new file mode 100644 --- /dev/null +++ b/testing/mozbase/mozfile/tests/test_tree.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# coding=UTF-8 + +from __future__ import absolute_import + +import os +import shutil +import tempfile +import unittest + +import mozunit + +from mozfile import tree + + +class TestTree(unittest.TestCase): + """Test the tree function.""" + + def test_unicode_paths(self): + """Test creating tree structure from a Unicode path.""" + try: + tmpdir = tempfile.mkdtemp(suffix=u'tmp🍪') + os.mkdir(os.path.join(tmpdir, u'dir🍪')) + with open(os.path.join(tmpdir, u'file🍪'), 'w') as f: + f.write('foo') + + self.assertEqual(u'{}\n├file🍪\n└dir🍪'.format(tmpdir), tree(tmpdir)) + finally: + shutil.rmtree(tmpdir) + + +if __name__ == '__main__': + mozunit.main()
--- a/testing/mozbase/mozprofile/mozprofile/profile.py +++ b/testing/mozbase/mozprofile/mozprofile/profile.py @@ -353,17 +353,21 @@ class Profile(object): if return_parts: return parts retval = '%s\n' % ('\n\n'.join(['[%s]: %s' % (key, value) for key, value in parts])) return retval - __str__ = summary + def __str__(self): + return unicode(self).encode('utf-8') + + def __unicode__(self): + return self.summary() class FirefoxProfile(Profile): """Specialized Profile subclass for Firefox""" preferences = { # Don't automatically update the application 'app.update.enabled': False, # Don't restore the last open set of tabs if the browser has crashed
--- a/testing/mozbase/mozprofile/tests/test_profile_view.py +++ b/testing/mozbase/mozprofile/tests/test_profile_view.py @@ -39,23 +39,25 @@ class TestProfilePrint(unittest.TestCase self.assertEqual(set(parts.keys()), keys) self.assertEqual(pref_string, parts['user.js'].strip()) except BaseException: raise finally: mozfile.rmtree(tempdir) - def test_strcast(self): - """ - test casting to a string - """ + def test_str_cast(self): + """Test casting to a string.""" + profile = mozprofile.Profile() + self.assertEqual(str(profile), profile.summary().encode("utf-8")) + def test_unicode_cast(self): + """Test casting to a unicode string.""" profile = mozprofile.Profile() - self.assertEqual(str(profile), profile.summary()) + self.assertEqual(unicode(profile), profile.summary()) def test_profile_diff(self): profile1 = mozprofile.Profile() profile2 = mozprofile.Profile(preferences=dict(foo='bar')) # diff a profile against itself; no difference self.assertEqual([], mozprofile.diff(profile1, profile1))
deleted file mode 100644 --- a/testing/web-platform/meta/css/css-shapes/shape-outside/shape-image/gradients/shape-outside-linear-gradient-005.html.ini +++ /dev/null @@ -1,3 +0,0 @@ -[shape-outside-linear-gradient-005.html] - disabled: - if os == "mac": // https://bugzilla.mozilla.org/show_bug.cgi?id=1451123
deleted file mode 100644 --- a/testing/web-platform/meta/css/css-shapes/shape-outside/shape-image/gradients/shape-outside-linear-gradient-006.html.ini +++ /dev/null @@ -1,3 +0,0 @@ -[shape-outside-linear-gradient-006.html] - disabled: - if os == "mac": // https://bugzilla.mozilla.org/show_bug.cgi?id=1451123
deleted file mode 100644 --- a/testing/web-platform/meta/css/css-shapes/shape-outside/shape-image/gradients/shape-outside-linear-gradient-007.html.ini +++ /dev/null @@ -1,3 +0,0 @@ -[shape-outside-linear-gradient-007.html] - disabled: - if os == "mac": // https://bugzilla.mozilla.org/show_bug.cgi?id=1451123
deleted file mode 100644 --- a/testing/web-platform/meta/css/css-shapes/shape-outside/shape-image/gradients/shape-outside-linear-gradient-008.html.ini +++ /dev/null @@ -1,3 +0,0 @@ -[shape-outside-linear-gradient-008.html] - disabled: - if os == "mac": // https://bugzilla.mozilla.org/show_bug.cgi?id=1451123
copy from third_party/rust/proc-macro2/.cargo-checksum.json copy to third_party/rust/proc-macro2-0.2.2/.cargo-checksum.json
copy from third_party/rust/proc-macro2/.travis.yml copy to third_party/rust/proc-macro2-0.2.2/.travis.yml
copy from third_party/rust/proc-macro2/Cargo.toml copy to third_party/rust/proc-macro2-0.2.2/Cargo.toml
rename from third_party/rust/syn-0.11.11/LICENSE-APACHE rename to third_party/rust/proc-macro2-0.2.2/LICENSE-APACHE
new file mode 100644 --- /dev/null +++ b/third_party/rust/proc-macro2-0.2.2/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 Alex Crichton + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE.
copy from third_party/rust/proc-macro2/README.md copy to third_party/rust/proc-macro2-0.2.2/README.md
copy from third_party/rust/proc-macro2/src/lib.rs copy to third_party/rust/proc-macro2-0.2.2/src/lib.rs
rename from third_party/rust/proc-macro2/src/macros.rs rename to third_party/rust/proc-macro2-0.2.2/src/macros.rs
copy from third_party/rust/proc-macro2/src/stable.rs copy to third_party/rust/proc-macro2-0.2.2/src/stable.rs
copy from third_party/rust/proc-macro2/src/strnom.rs copy to third_party/rust/proc-macro2-0.2.2/src/strnom.rs
copy from third_party/rust/proc-macro2/src/unstable.rs copy to third_party/rust/proc-macro2-0.2.2/src/unstable.rs
copy from third_party/rust/proc-macro2/tests/test.rs copy to third_party/rust/proc-macro2-0.2.2/tests/test.rs
--- a/third_party/rust/proc-macro2/.cargo-checksum.json +++ b/third_party/rust/proc-macro2/.cargo-checksum.json @@ -1,1 +1,1 @@ -{"files":{".travis.yml":"e455a0ed5c3dd056d31f4c7be088bc94f21cab6595a23f2f015b1efc0ac2b55c","Cargo.toml":"0b700f1e7b8ba76ce4678d36b6906d38455e88f51085ea9f120d6ca63f13d5d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"adf2e822923878c2ebf4a0a782898c598fc6f57a3af905b85d57fc716c836687","src/lib.rs":"fbae25504264b185d877fb8784d4d88333ea34a7cbeddca3277dc8421f179933","src/macros.rs":"414505e520b8d705b4ce5a64ec2e82d6d1af0b88567454169486a668fbc1e9c8","src/stable.rs":"6363c4c0ef989c2ec81aa75be71c69a103d45a1de439f3f3bcb6806d8a78a172","src/strnom.rs":"1baded8543a9930798fb16092fe51e9074591902e327e0f94eb1c908a6370de9","src/unstable.rs":"110d27103e37427b3d1dcb45b6ba9dc9f5641a255766a43d5db0f4fd10a341ed","tests/test.rs":"9e75d5289abc1dc58c1df00ae051d8c3cd2c0d7830cca5ad689007c05acffe26"},"package":"d1cb7aaaa4bf022ec2b14ff2f2ba1643a22f3cee88df014a85e14b392282c61d"} \ No newline at end of file +{"files":{".travis.yml":"872a0d195dcb1e84f28aa994f301c7139f70360bb42dee3954df5ee965efea15","Cargo.toml":"6ed5d7b9bf8805abd76f9e2a9be99b98e2cb70d9b97980b8aa09b6082d26a94d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"ce05336717e1e90724491a2f54487c41c752fa2d32396639439f7c6d0f1e6776","src/lib.rs":"e99fedcb4b410c626fe1a3ab722c8b4f98baed2c64c2dff28c4eb62da354f2e2","src/stable.rs":"fd8d86f7542d211030056a7cdcc58b86131180d54f461910a4a067269eee9d4a","src/strnom.rs":"129fe22f0b50e5a64fca82e731c959135381c910e19f3305ef35420e0aadde08","src/unstable.rs":"b43c713ac16d9de0ba0fa1b9bebe390122b4ad60ef2fc75408f721305fdcd46b","tests/test.rs":"a8229931093cd6b39f759c60ef097e59bc43c98f1b0e5eea06ecc8d5d0879853"},"package":"49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"} \ No newline at end of file
--- a/third_party/rust/proc-macro2/.travis.yml +++ b/third_party/rust/proc-macro2/.travis.yml @@ -7,16 +7,17 @@ matrix: - rust: stable - rust: beta - rust: nightly before_script: - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH script: - cargo test - cargo build --features nightly + - cargo build --no-default-features - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps after_success: - travis-cargo --only nightly doc-upload script: - cargo test
--- a/third_party/rust/proc-macro2/Cargo.toml +++ b/third_party/rust/proc-macro2/Cargo.toml @@ -7,25 +7,27 @@ # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] name = "proc-macro2" -version = "0.2.2" +version = "0.3.6" authors = ["Alex Crichton <alex@alexcrichton.com>"] description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n" homepage = "https://github.com/alexcrichton/proc-macro2" documentation = "https://docs.rs/proc-macro2" readme = "README.md" keywords = ["macros"] license = "MIT/Apache-2.0" repository = "https://github.com/alexcrichton/proc-macro2" [lib] doctest = false [dependencies.unicode-xid] version = "0.1" [features] -nightly = [] +default = ["proc-macro"] +nightly = ["proc-macro"] +proc-macro = []
--- a/third_party/rust/proc-macro2/README.md +++ b/third_party/rust/proc-macro2/README.md @@ -22,17 +22,17 @@ the real `proc_macro` crate once the sup ## Usage This crate by default compiles on the stable version of the compiler. It only uses the stable surface area of the `proc_macro` crate upstream in the compiler itself. Usage is done via: ```toml [dependencies] -proc-macro2 = "0.2" +proc-macro2 = "0.3" ``` followed by ```rust extern crate proc_macro; extern crate proc_macro2; @@ -52,17 +52,17 @@ If you'd like you can enable the `nightl cause it to compile against the **unstable and nightly-only** features of the `proc_macro` crate. This in turn requires a nightly compiler. This should help preserve span information, however, coming in from the compiler itself. You can enable this feature via: ```toml [dependencies] -proc-macro2 = { version = "0.2", features = ["nightly"] } +proc-macro2 = { version = "0.3", features = ["nightly"] } ``` ## Unstable Features `proc-macro2` supports exporting some methods from `proc_macro` which are currently highly unstable, and may not be stabilized in the first pass of `proc_macro` stabilizations. These features are not exported by default. Minor
--- a/third_party/rust/proc-macro2/src/lib.rs +++ b/third_party/rust/proc-macro2/src/lib.rs @@ -1,113 +1,135 @@ -//! A "shim crate" intended to multiplex the `proc_macro` API on to stable Rust. +//! A "shim crate" intended to multiplex the [`proc_macro`] API on to stable +//! Rust. //! //! Procedural macros in Rust operate over the upstream -//! `proc_macro::TokenStream` type. This type currently is quite conservative -//! and exposed no internal implementation details. Nightly compilers, however, -//! contain a much richer interface. This richer interface allows fine-grained -//! inspection of the token stream which avoids stringification/re-lexing and -//! also preserves span information. +//! [`proc_macro::TokenStream`][ts] type. This type currently is quite +//! conservative and exposed no internal implementation details. Nightly +//! compilers, however, contain a much richer interface. This richer interface +//! allows fine-grained inspection of the token stream which avoids +//! stringification/re-lexing and also preserves span information. //! -//! The upcoming APIs added to `proc_macro` upstream are the foundation for +//! The upcoming APIs added to [`proc_macro`] upstream are the foundation for //! productive procedural macros in the ecosystem. To help prepare the ecosystem //! for using them this crate serves to both compile on stable and nightly and //! mirrors the API-to-be. The intention is that procedural macros which switch //! to use this crate will be trivially able to switch to the upstream //! `proc_macro` crate once its API stabilizes. //! //! In the meantime this crate also has a `nightly` Cargo feature which -//! enables it to reimplement itself with the unstable API of `proc_macro`. +//! enables it to reimplement itself with the unstable API of [`proc_macro`]. //! This'll allow immediate usage of the beneficial upstream API, particularly //! around preserving span information. +//! +//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/ +//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/0.2.2")] - +#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.6")] #![cfg_attr(feature = "nightly", feature(proc_macro))] +#[cfg(feature = "proc-macro")] extern crate proc_macro; #[cfg(not(feature = "nightly"))] extern crate unicode_xid; use std::fmt; +use std::iter::FromIterator; +use std::marker; +use std::rc::Rc; use std::str::FromStr; -use std::iter::FromIterator; #[macro_use] #[cfg(not(feature = "nightly"))] mod strnom; #[path = "stable.rs"] #[cfg(not(feature = "nightly"))] mod imp; #[path = "unstable.rs"] #[cfg(feature = "nightly")] mod imp; -#[macro_use] -mod macros; +#[derive(Clone)] +pub struct TokenStream { + inner: imp::TokenStream, + _marker: marker::PhantomData<Rc<()>>, +} + +pub struct LexError { + inner: imp::LexError, + _marker: marker::PhantomData<Rc<()>>, +} -#[derive(Clone)] -pub struct TokenStream(imp::TokenStream); +impl TokenStream { + fn _new(inner: imp::TokenStream) -> TokenStream { + TokenStream { + inner: inner, + _marker: marker::PhantomData, + } + } -pub struct LexError(imp::LexError); + pub fn empty() -> TokenStream { + TokenStream::_new(imp::TokenStream::empty()) + } + + pub fn is_empty(&self) -> bool { + self.inner.is_empty() + } +} impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result<TokenStream, LexError> { - match src.parse() { - Ok(e) => Ok(TokenStream(e)), - Err(e) => Err(LexError(e)), - } + let e = src.parse().map_err(|e| LexError { + inner: e, + _marker: marker::PhantomData, + })?; + Ok(TokenStream::_new(e)) } } +#[cfg(feature = "proc-macro")] impl From<proc_macro::TokenStream> for TokenStream { fn from(inner: proc_macro::TokenStream) -> TokenStream { - TokenStream(inner.into()) + TokenStream::_new(inner.into()) } } +#[cfg(feature = "proc-macro")] impl From<TokenStream> for proc_macro::TokenStream { fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner.0.into() + inner.inner.into() } } -impl From<TokenTree> for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream(tree.into()) - } -} - -impl<T: Into<TokenStream>> FromIterator<T> for TokenStream { - fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self { - TokenStream(streams.into_iter().map(|t| t.into().0).collect()) +impl FromIterator<TokenTree> for TokenStream { + fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self { + TokenStream::_new(streams.into_iter().collect()) } } -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - TokenTreeIter(self.0.into_iter()) +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) } } -impl TokenStream { - pub fn empty() -> TokenStream { - TokenStream(imp::TokenStream::empty()) +impl fmt::Debug for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) } +} - pub fn is_empty(&self) -> bool { - self.0.is_empty() +impl fmt::Debug for LexError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) } } // Returned by reference, so we can't easily wrap it. #[cfg(procmacro2_semver_exempt)] pub use imp::FileName; #[cfg(procmacro2_semver_exempt)] @@ -142,196 +164,427 @@ impl fmt::Debug for SourceFile { #[cfg(procmacro2_semver_exempt)] pub struct LineColumn { pub line: usize, pub column: usize, } #[derive(Copy, Clone)] -pub struct Span(imp::Span); +pub struct Span { + inner: imp::Span, + _marker: marker::PhantomData<Rc<()>>, +} impl Span { - pub fn call_site() -> Span { - Span(imp::Span::call_site()) + fn _new(inner: imp::Span) -> Span { + Span { + inner: inner, + _marker: marker::PhantomData, + } } + pub fn call_site() -> Span { + Span::_new(imp::Span::call_site()) + } + + #[cfg(procmacro2_semver_exempt)] pub fn def_site() -> Span { - Span(imp::Span::def_site()) + Span::_new(imp::Span::def_site()) } /// Creates a new span with the same line/column information as `self` but /// that resolves symbols as though it were at `other`. + #[cfg(procmacro2_semver_exempt)] pub fn resolved_at(&self, other: Span) -> Span { - Span(self.0.resolved_at(other.0)) + Span::_new(self.inner.resolved_at(other.inner)) } /// Creates a new span with the same name resolution behavior as `self` but /// with the line/column information of `other`. + #[cfg(procmacro2_semver_exempt)] pub fn located_at(&self, other: Span) -> Span { - Span(self.0.located_at(other.0)) + Span::_new(self.inner.located_at(other.inner)) } /// This method is only available when the `"nightly"` feature is enabled. - #[cfg(feature = "nightly")] + #[cfg(all(feature = "nightly", feature = "proc-macro"))] pub fn unstable(self) -> proc_macro::Span { - self.0.unstable() + self.inner.unstable() } #[cfg(procmacro2_semver_exempt)] pub fn source_file(&self) -> SourceFile { - SourceFile(self.0.source_file()) + SourceFile(self.inner.source_file()) } #[cfg(procmacro2_semver_exempt)] pub fn start(&self) -> LineColumn { - let imp::LineColumn{ line, column } = self.0.start(); - LineColumn { line: line, column: column } + let imp::LineColumn { line, column } = self.inner.start(); + LineColumn { + line: line, + column: column, + } } #[cfg(procmacro2_semver_exempt)] pub fn end(&self) -> LineColumn { - let imp::LineColumn{ line, column } = self.0.end(); - LineColumn { line: line, column: column } + let imp::LineColumn { line, column } = self.inner.end(); + LineColumn { + line: line, + column: column, + } } #[cfg(procmacro2_semver_exempt)] pub fn join(&self, other: Span) -> Option<Span> { - self.0.join(other.0).map(Span) + self.inner.join(other.inner).map(Span::_new) + } + + #[cfg(procmacro2_semver_exempt)] + pub fn eq(&self, other: &Span) -> bool { + self.inner.eq(&other.inner) + } +} + +impl fmt::Debug for Span { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) } } #[derive(Clone, Debug)] -pub struct TokenTree { - pub span: Span, - pub kind: TokenNode, +pub enum TokenTree { + Group(Group), + Term(Term), + Op(Op), + Literal(Literal), } -impl From<TokenNode> for TokenTree { - fn from(kind: TokenNode) -> TokenTree { - TokenTree { span: Span::def_site(), kind: kind } +impl TokenTree { + pub fn span(&self) -> Span { + match *self { + TokenTree::Group(ref t) => t.span(), + TokenTree::Term(ref t) => t.span(), + TokenTree::Op(ref t) => t.span(), + TokenTree::Literal(ref t) => t.span(), + } + } + + pub fn set_span(&mut self, span: Span) { + match *self { + TokenTree::Group(ref mut t) => t.set_span(span), + TokenTree::Term(ref mut t) => t.set_span(span), + TokenTree::Op(ref mut t) => t.set_span(span), + TokenTree::Literal(ref mut t) => t.set_span(span), + } + } +} + +impl From<Group> for TokenTree { + fn from(g: Group) -> TokenTree { + TokenTree::Group(g) + } +} + +impl From<Term> for TokenTree { + fn from(g: Term) -> TokenTree { + TokenTree::Term(g) + } +} + +impl From<Op> for TokenTree { + fn from(g: Op) -> TokenTree { + TokenTree::Op(g) + } +} + +impl From<Literal> for TokenTree { + fn from(g: Literal) -> TokenTree { + TokenTree::Literal(g) } } impl fmt::Display for TokenTree { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - TokenStream::from(self.clone()).fmt(f) + match *self { + TokenTree::Group(ref t) => t.fmt(f), + TokenTree::Term(ref t) => t.fmt(f), + TokenTree::Op(ref t) => t.fmt(f), + TokenTree::Literal(ref t) => t.fmt(f), + } } } #[derive(Clone, Debug)] -pub enum TokenNode { - Group(Delimiter, TokenStream), - Term(Term), - Op(char, Spacing), - Literal(Literal), +pub struct Group { + delimiter: Delimiter, + stream: TokenStream, + span: Span, } #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Delimiter { Parenthesis, Brace, Bracket, None, } -#[derive(Copy, Clone)] -pub struct Term(imp::Term); +impl Group { + pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { + Group { + delimiter: delimiter, + stream: stream, + span: Span::call_site(), + } + } -impl Term { - pub fn intern(string: &str) -> Term { - Term(imp::Term::intern(string)) + pub fn delimiter(&self) -> Delimiter { + self.delimiter + } + + pub fn stream(&self) -> TokenStream { + self.stream.clone() } - pub fn as_str(&self) -> &str { - self.0.as_str() + pub fn span(&self) -> Span { + self.span + } + + pub fn set_span(&mut self, span: Span) { + self.span = span; } } +impl fmt::Display for Group { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.stream.fmt(f) + } +} + +#[derive(Copy, Clone, Debug)] +pub struct Op { + op: char, + spacing: Spacing, + span: Span, +} + #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Spacing { Alone, Joint, } +impl Op { + pub fn new(op: char, spacing: Spacing) -> Op { + Op { + op: op, + spacing: spacing, + span: Span::call_site(), + } + } + + pub fn op(&self) -> char { + self.op + } + + pub fn spacing(&self) -> Spacing { + self.spacing + } + + pub fn span(&self) -> Span { + self.span + } + + pub fn set_span(&mut self, span: Span) { + self.span = span; + } +} + +impl fmt::Display for Op { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.op.fmt(f) + } +} + +#[derive(Copy, Clone)] +pub struct Term { + inner: imp::Term, + _marker: marker::PhantomData<Rc<()>>, +} + +impl Term { + fn _new(inner: imp::Term) -> Term { + Term { + inner: inner, + _marker: marker::PhantomData, + } + } + + pub fn new(string: &str, span: Span) -> Term { + Term::_new(imp::Term::new(string, span.inner)) + } + + pub fn as_str(&self) -> &str { + self.inner.as_str() + } + + pub fn span(&self) -> Span { + Span::_new(self.inner.span()) + } + + pub fn set_span(&mut self, span: Span) { + self.inner.set_span(span.inner); + } +} + +impl fmt::Display for Term { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.as_str().fmt(f) + } +} + +impl fmt::Debug for Term { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } +} + #[derive(Clone)] -pub struct Literal(imp::Literal); +pub struct Literal { + inner: imp::Literal, + _marker: marker::PhantomData<Rc<()>>, +} macro_rules! int_literals { - ($($kind:ident,)*) => ($( - pub fn $kind(n: $kind) -> Literal { - Literal(n.into()) + ($($name:ident => $kind:ident,)*) => ($( + pub fn $name(n: $kind) -> Literal { + Literal::_new(imp::Literal::$name(n)) } )*) } impl Literal { - pub fn integer(s: i64) -> Literal { - Literal(imp::Literal::integer(s)) + fn _new(inner: imp::Literal) -> Literal { + Literal { + inner: inner, + _marker: marker::PhantomData, + } } int_literals! { - u8, u16, u32, u64, usize, - i8, i16, i32, i64, isize, + u8_suffixed => u8, + u16_suffixed => u16, + u32_suffixed => u32, + u64_suffixed => u64, + usize_suffixed => usize, + i8_suffixed => i8, + i16_suffixed => i16, + i32_suffixed => i32, + i64_suffixed => i64, + isize_suffixed => isize, + + u8_unsuffixed => u8, + u16_unsuffixed => u16, + u32_unsuffixed => u32, + u64_unsuffixed => u64, + usize_unsuffixed => usize, + i8_unsuffixed => i8, + i16_unsuffixed => i16, + i32_unsuffixed => i32, + i64_unsuffixed => i64, + isize_unsuffixed => isize, } - pub fn float(f: f64) -> Literal { - Literal(imp::Literal::float(f)) + pub fn f64_unsuffixed(f: f64) -> Literal { + assert!(f.is_finite()); + Literal::_new(imp::Literal::f64_unsuffixed(f)) } - pub fn f64(f: f64) -> Literal { - Literal(f.into()) + pub fn f64_suffixed(f: f64) -> Literal { + assert!(f.is_finite()); + Literal::_new(imp::Literal::f64_suffixed(f)) } - pub fn f32(f: f32) -> Literal { - Literal(f.into()) + pub fn f32_unsuffixed(f: f32) -> Literal { + assert!(f.is_finite()); + Literal::_new(imp::Literal::f32_unsuffixed(f)) + } + + pub fn f32_suffixed(f: f32) -> Literal { + assert!(f.is_finite()); + Literal::_new(imp::Literal::f32_suffixed(f)) } pub fn string(string: &str) -> Literal { - Literal(string.into()) + Literal::_new(imp::Literal::string(string)) } pub fn character(ch: char) -> Literal { - Literal(ch.into()) + Literal::_new(imp::Literal::character(ch)) } pub fn byte_string(s: &[u8]) -> Literal { - Literal(imp::Literal::byte_string(s)) + Literal::_new(imp::Literal::byte_string(s)) } - // ======================================================================= - // Not present upstream in proc_macro yet - - pub fn byte_char(b: u8) -> Literal { - Literal(imp::Literal::byte_char(b)) + pub fn span(&self) -> Span { + Span::_new(self.inner.span()) } - pub fn doccomment(s: &str) -> Literal { - Literal(imp::Literal::doccomment(s)) + pub fn set_span(&mut self, span: Span) { + self.inner.set_span(span.inner); } +} - pub fn raw_string(s: &str, pounds: usize) -> Literal { - Literal(imp::Literal::raw_string(s, pounds)) +impl fmt::Debug for Literal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) } +} - pub fn raw_byte_string(s: &str, pounds: usize) -> Literal { - Literal(imp::Literal::raw_byte_string(s, pounds)) +impl fmt::Display for Literal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) } } -pub struct TokenTreeIter(imp::TokenTreeIter); +pub mod token_stream { + use std::fmt; + use std::marker; + use std::rc::Rc; + + pub use TokenStream; + use TokenTree; + use imp; + + pub struct IntoIter { + inner: imp::TokenTreeIter, + _marker: marker::PhantomData<Rc<()>>, + } + + impl Iterator for IntoIter { + type Item = TokenTree; -impl Iterator for TokenTreeIter { - type Item = TokenTree; + fn next(&mut self) -> Option<TokenTree> { + self.inner.next() + } + } + + impl fmt::Debug for IntoIter { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.inner.fmt(f) + } + } - fn next(&mut self) -> Option<TokenTree> { - self.0.next() + impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = IntoIter; + + fn into_iter(self) -> IntoIter { + IntoIter { + inner: self.inner.into_iter(), + _marker: marker::PhantomData, + } + } } } - -forward_fmt!(Debug for LexError); -forward_fmt!(Debug for Literal); -forward_fmt!(Debug for Span); -forward_fmt!(Debug for Term); -forward_fmt!(Debug for TokenTreeIter); -forward_fmt!(Debug for TokenStream); -forward_fmt!(Display for Literal); -forward_fmt!(Display for TokenStream);
--- a/third_party/rust/proc-macro2/src/stable.rs +++ b/third_party/rust/proc-macro2/src/stable.rs @@ -1,26 +1,25 @@ -use std::ascii; +#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))] + use std::borrow::Borrow; use std::cell::RefCell; #[cfg(procmacro2_semver_exempt)] use std::cmp; use std::collections::HashMap; use std::fmt; use std::iter; -use std::marker::PhantomData; use std::rc::Rc; use std::str::FromStr; use std::vec; -use proc_macro; +use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult}; use unicode_xid::UnicodeXID; -use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break}; -use {TokenTree, TokenNode, Delimiter, Spacing}; +use {Delimiter, Group, Op, Spacing, TokenTree}; #[derive(Clone, Debug)] pub struct TokenStream { inner: Vec<TokenTree>, } #[derive(Debug)] pub struct LexError; @@ -46,110 +45,109 @@ fn get_cursor(src: &str) -> Cursor { rest: src, off: span.lo, } }) } #[cfg(not(procmacro2_semver_exempt))] fn get_cursor(src: &str) -> Cursor { - Cursor { - rest: src, - } + Cursor { rest: src } } impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result<TokenStream, LexError> { // Create a dummy file & add it to the codemap let cursor = get_cursor(src); match token_stream(cursor) { Ok((input, output)) => { if skip_whitespace(input).len() != 0 { Err(LexError) } else { - Ok(output.0) + Ok(output.inner) } } Err(LexError) => Err(LexError), } } } impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut joint = false; for (i, tt) in self.inner.iter().enumerate() { if i != 0 && !joint { write!(f, " ")?; } joint = false; - match tt.kind { - TokenNode::Group(delim, ref stream) => { - let (start, end) = match delim { + match *tt { + TokenTree::Group(ref tt) => { + let (start, end) = match tt.delimiter() { Delimiter::Parenthesis => ("(", ")"), Delimiter::Brace => ("{", "}"), Delimiter::Bracket => ("[", "]"), Delimiter::None => ("", ""), }; - if stream.0.inner.len() == 0 { + if tt.stream().inner.inner.len() == 0 { write!(f, "{} {}", start, end)? } else { - write!(f, "{} {} {}", start, stream, end)? + write!(f, "{} {} {}", start, tt.stream(), end)? } } - TokenNode::Term(ref sym) => write!(f, "{}", sym.as_str())?, - TokenNode::Op(ch, ref op) => { - write!(f, "{}", ch)?; - match *op { + TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?, + TokenTree::Op(ref tt) => { + write!(f, "{}", tt.op())?; + match tt.spacing() { Spacing::Alone => {} Spacing::Joint => joint = true, } } - TokenNode::Literal(ref literal) => { - write!(f, "{}", literal)?; - // handle comments - if (literal.0).0.starts_with("/") { - write!(f, "\n")?; - } - } + TokenTree::Literal(ref tt) => write!(f, "{}", tt)?, } } Ok(()) } } -impl From<proc_macro::TokenStream> for TokenStream { - fn from(inner: proc_macro::TokenStream) -> TokenStream { - inner.to_string().parse().expect("compiler token stream parse failed") +#[cfg(feature = "proc-macro")] +impl From<::proc_macro::TokenStream> for TokenStream { + fn from(inner: ::proc_macro::TokenStream) -> TokenStream { + inner + .to_string() + .parse() + .expect("compiler token stream parse failed") } } -impl From<TokenStream> for proc_macro::TokenStream { - fn from(inner: TokenStream) -> proc_macro::TokenStream { - inner.to_string().parse().expect("failed to parse to compiler tokens") +#[cfg(feature = "proc-macro")] +impl From<TokenStream> for ::proc_macro::TokenStream { + fn from(inner: TokenStream) -> ::proc_macro::TokenStream { + inner + .to_string() + .parse() + .expect("failed to parse to compiler tokens") } } - impl From<TokenTree> for TokenStream { fn from(tree: TokenTree) -> TokenStream { TokenStream { inner: vec![tree] } } } -impl iter::FromIterator<TokenStream> for TokenStream { - fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self { +impl iter::FromIterator<TokenTree> for TokenStream { + fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self { let mut v = Vec::new(); - for stream in streams.into_iter() { - v.extend(stream.inner); + for token in streams.into_iter() { + v.push(token); } TokenStream { inner: v } } } pub type TokenTreeIter = vec::IntoIter<TokenTree>; @@ -234,36 +232,39 @@ struct FileInfo { name: String, span: Span, lines: Vec<usize>, } #[cfg(procmacro2_semver_exempt)] impl FileInfo { fn offset_line_column(&self, offset: usize) -> LineColumn { - assert!(self.span_within(Span { lo: offset as u32, hi: offset as u32 })); + assert!(self.span_within(Span { + lo: offset as u32, + hi: offset as u32 + })); let offset = offset - self.span.lo as usize; match self.lines.binary_search(&offset) { Ok(found) => LineColumn { line: found + 1, - column: 0 + column: 0, }, Err(idx) => LineColumn { line: idx, - column: offset - self.lines[idx - 1] + column: offset - self.lines[idx - 1], }, } } fn span_within(&self, span: Span) -> bool { span.lo >= self.span.lo && span.hi <= self.span.hi } } -/// Computes the offsets of each line in the given source string. +/// Computesthe offsets of each line in the given source string. #[cfg(procmacro2_semver_exempt)] fn lines_offsets(s: &str) -> Vec<usize> { let mut lines = vec![0]; let mut prev = 0; while let Some(len) = s[prev..].find('\n') { prev += len + 1; lines.push(prev); } @@ -284,17 +285,20 @@ impl Codemap { // with a dummy file. self.files.last().unwrap().span.hi + 1 } fn add_file(&mut self, name: &str, src: &str) -> Span { let lines = lines_offsets(src); let lo = self.next_start_pos(); // XXX(nika): Shouild we bother doing a checked cast or checked add here? - let span = Span { lo: lo, hi: lo + (src.len() as u32) }; + let span = Span { + lo: lo, + hi: lo + (src.len() as u32), + }; self.files.push(FileInfo { name: name.to_owned(), span: span, lines: lines, }); span @@ -305,17 +309,17 @@ impl Codemap { if file.span_within(span) { return file; } } panic!("Invalid span with no related FileInfo!"); } } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Span { #[cfg(procmacro2_semver_exempt)] lo: u32, #[cfg(procmacro2_semver_exempt)] hi: u32, } impl Span { @@ -387,37 +391,81 @@ impl Span { }) }) } } #[derive(Copy, Clone)] pub struct Term { intern: usize, - not_send_sync: PhantomData<*const ()>, + span: Span, } thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new())); impl Term { - pub fn intern(string: &str) -> Term { + pub fn new(string: &str, span: Span) -> Term { + validate_term(string); + Term { intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)), - not_send_sync: PhantomData, + span: span, } } pub fn as_str(&self) -> &str { SYMBOLS.with(|interner| { let interner = interner.borrow(); let s = interner.get(self.intern); - unsafe { - &*(s as *const str) + unsafe { &*(s as *const str) } + }) + } + + pub fn span(&self) -> Span { + self.span + } + + pub fn set_span(&mut self, span: Span) { + self.span = span; + } +} + +fn validate_term(string: &str) { + let validate = if string.starts_with('\'') { + &string[1..] + } else if string.starts_with("r#") { + &string[2..] + } else { + string + }; + + if validate.is_empty() { + panic!("Term is not allowed to be empty; use Option<Term>"); + } + + if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') { + panic!("Term cannot be a number; use Literal instead"); + } + + fn xid_ok(string: &str) -> bool { + let mut chars = string.chars(); + let first = chars.next().unwrap(); + if !(UnicodeXID::is_xid_start(first) || first == '_') { + return false; + } + for ch in chars { + if !UnicodeXID::is_xid_continue(ch) { + return false; } - }) + } + true + } + + if !xid_ok(validate) { + panic!("{:?} is not a valid Term", string); } } impl fmt::Debug for Term { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Term").field(&self.as_str()).finish() } } @@ -439,272 +487,297 @@ impl Borrow<str> for MyRc { impl Interner { fn new() -> Interner { Interner { string_to_index: HashMap::new(), index_to_string: Vec::new(), } } - fn intern(&mut self, s: &str) -> usize { + fn intern(&mut self, s: &str) -> usize { if let Some(&idx) = self.string_to_index.get(s) { - return idx + return idx; } let s = Rc::new(s.to_string()); self.index_to_string.push(s.clone()); - self.string_to_index.insert(MyRc(s), self.index_to_string.len() - 1); + self.string_to_index + .insert(MyRc(s), self.index_to_string.len() - 1); self.index_to_string.len() - 1 } - fn get(&self, idx: usize) -> &str { - &self.index_to_string[idx] - } + fn get(&self, idx: usize) -> &str { + &self.index_to_string[idx] + } } #[derive(Clone, Debug)] -pub struct Literal(String); +pub struct Literal { + text: String, + span: Span, +} + +macro_rules! suffixed_numbers { + ($($name:ident => $kind:ident,)*) => ($( + pub fn $name(n: $kind) -> Literal { + Literal::_new(format!(concat!("{}", stringify!($kind)), n)) + } + )*) +} + +macro_rules! unsuffixed_numbers { + ($($name:ident => $kind:ident,)*) => ($( + pub fn $name(n: $kind) -> Literal { + Literal::_new(n.to_string()) + } + )*) +} impl Literal { - pub fn byte_char(byte: u8) -> Literal { - match byte { - 0 => Literal(format!("b'\\0'")), - b'\"' => Literal(format!("b'\"'")), - n => { - let mut escaped = "b'".to_string(); - escaped.extend(ascii::escape_default(n).map(|c| c as char)); - escaped.push('\''); - Literal(escaped) - } + fn _new(text: String) -> Literal { + Literal { + text: text, + span: Span::call_site(), } } + suffixed_numbers! { + u8_suffixed => u8, + u16_suffixed => u16, + u32_suffixed => u32, + u64_suffixed => u64, + usize_suffixed => usize, + i8_suffixed => i8, + i16_suffixed => i16, + i32_suffixed => i32, + i64_suffixed => i64, + isize_suffixed => isize, + + f32_suffixed => f32, + f64_suffixed => f64, + } + + unsuffixed_numbers! { + u8_unsuffixed => u8, + u16_unsuffixed => u16, + u32_unsuffixed => u32, + u64_unsuffixed => u64, + usize_unsuffixed => usize, + i8_unsuffixed => i8, + i16_unsuffixed => i16, + i32_unsuffixed => i32, + i64_unsuffixed => i64, + isize_unsuffixed => isize, + } + + pub fn f32_unsuffixed(f: f32) -> Literal { + let mut s = f.to_string(); + if !s.contains(".") { + s.push_str(".0"); + } + Literal::_new(s) + } + + pub fn f64_unsuffixed(f: f64) -> Literal { + let mut s = f.to_string(); + if !s.contains(".") { + s.push_str(".0"); + } + Literal::_new(s) + } + + pub fn string(t: &str) -> Literal { + let mut s = t.chars() + .flat_map(|c| c.escape_default()) + .collect::<String>(); + s.push('"'); + s.insert(0, '"'); + Literal::_new(s) + } + + pub fn character(t: char) -> Literal { + Literal::_new(format!("'{}'", t.escape_default().collect::<String>())) + } + pub fn byte_string(bytes: &[u8]) -> Literal { let mut escaped = "b\"".to_string(); for b in bytes { match *b { b'\0' => escaped.push_str(r"\0"), b'\t' => escaped.push_str(r"\t"), b'\n' => escaped.push_str(r"\n"), b'\r' => escaped.push_str(r"\r"), b'"' => escaped.push_str("\\\""), b'\\' => escaped.push_str("\\\\"), - b'\x20' ... b'\x7E' => escaped.push(*b as char), + b'\x20'...b'\x7E' => escaped.push(*b as char), _ => escaped.push_str(&format!("\\x{:02X}", b)), } } escaped.push('"'); - Literal(escaped) - } - - pub fn doccomment(s: &str) -> Literal { - Literal(s.to_string()) - } - - pub fn float(n: f64) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {}", n); - } - let mut s = n.to_string(); - if !s.contains('.') { - s += ".0"; - } - Literal(s) + Literal::_new(escaped) } - pub fn integer(s: i64) -> Literal { - Literal(s.to_string()) + pub fn span(&self) -> Span { + self.span } - pub fn raw_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("r"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(ret) - } - - pub fn raw_byte_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("br"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(ret) + pub fn set_span(&mut self, span: Span) { + self.span = span; } } impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -macro_rules! ints { - ($($t:ty,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - Literal(format!(concat!("{}", stringify!($t)), t)) - } - } - )*} -} - -ints! { - u8, u16, u32, u64, usize, - i8, i16, i32, i64, isize, -} - -macro_rules! floats { - ($($t:ty,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - assert!(!t.is_nan()); - assert!(!t.is_infinite()); - Literal(format!(concat!("{}", stringify!($t)), t)) - } - } - )*} -} - -floats! { - f32, f64, -} - -impl<'a> From<&'a str> for Literal { - fn from(t: &'a str) -> Literal { - let mut s = t.chars().flat_map(|c| c.escape_default()).collect::<String>(); - s.push('"'); - s.insert(0, '"'); - Literal(s) + self.text.fmt(f) } } -impl From<char> for Literal { - fn from(t: char) -> Literal { - Literal(format!("'{}'", t.escape_default().collect::<String>())) +fn token_stream(mut input: Cursor) -> PResult<::TokenStream> { + let mut trees = Vec::new(); + loop { + let input_no_ws = skip_whitespace(input); + if input_no_ws.rest.len() == 0 { + break + } + if let Ok((a, tokens)) = doc_comment(input_no_ws) { + input = a; + trees.extend(tokens); + continue + } + + let (a, tt) = match token_tree(input_no_ws) { + Ok(p) => p, + Err(_) => break, + }; + trees.push(tt); + input = a; } + Ok((input, ::TokenStream::_new(TokenStream { inner: trees }))) } -named!(token_stream -> ::TokenStream, map!( - many0!(token_tree), - |trees| ::TokenStream(TokenStream { inner: trees }) -)); - #[cfg(not(procmacro2_semver_exempt))] -fn token_tree(input: Cursor) -> PResult<TokenTree> { - let (input, kind) = token_kind(input)?; - Ok((input, TokenTree { - span: ::Span(Span {}), - kind: kind, - })) +fn spanned<'a, T>( + input: Cursor<'a>, + f: fn(Cursor<'a>) -> PResult<'a, T>, +) -> PResult<'a, (T, ::Span)> { + let (a, b) = f(skip_whitespace(input))?; + Ok((a, ((b, ::Span::_new(Span { }))))) } #[cfg(procmacro2_semver_exempt)] -fn token_tree(input: Cursor) -> PResult<TokenTree> { +fn spanned<'a, T>( + input: Cursor<'a>, + f: fn(Cursor<'a>) -> PResult<'a, T>, +) -> PResult<'a, (T, ::Span)> { let input = skip_whitespace(input); let lo = input.off; - let (input, kind) = token_kind(input)?; - let hi = input.off; - Ok((input, TokenTree { - span: ::Span(Span { - lo: lo, - hi: hi, - }), - kind: kind, - })) + let (a, b) = f(input)?; + let hi = a.off; + let span = ::Span::_new(Span { lo: lo, hi: hi }); + Ok((a, (b, span))) } -named!(token_kind -> TokenNode, alt!( - map!(delimited, |(d, s)| TokenNode::Group(d, s)) +fn token_tree(input: Cursor) -> PResult<TokenTree> { + let (rest, (mut tt, span)) = spanned(input, token_kind)?; + tt.set_span(span); + Ok((rest, tt)) +} + +named!(token_kind -> TokenTree, alt!( + map!(group, TokenTree::Group) | - map!(literal, TokenNode::Literal) // must be before symbol + map!(literal, TokenTree::Literal) // must be before symbol | symbol | - map!(op, |(op, kind)| TokenNode::Op(op, kind)) + map!(op, TokenTree::Op) )); -named!(delimited -> (Delimiter, ::TokenStream), alt!( +named!(group -> Group, alt!( delimited!( punct!("("), token_stream, punct!(")") - ) => { |ts| (Delimiter::Parenthesis, ts) } + ) => { |ts| Group::new(Delimiter::Parenthesis, ts) } | delimited!( punct!("["), token_stream, punct!("]") - ) => { |ts| (Delimiter::Bracket, ts) } + ) => { |ts| Group::new(Delimiter::Bracket, ts) } | delimited!( punct!("{"), token_stream, punct!("}") - ) => { |ts| (Delimiter::Brace, ts) } + ) => { |ts| Group::new(Delimiter::Brace, ts) } )); -fn symbol(mut input: Cursor) -> PResult<TokenNode> { +fn symbol(mut input: Cursor) -> PResult<TokenTree> { input = skip_whitespace(input); let mut chars = input.char_indices(); let lifetime = input.starts_with("'"); if lifetime { chars.next(); } + let raw = !lifetime && input.starts_with("r#"); + if raw { + chars.next(); + chars.next(); + } + match chars.next() { Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {} _ => return Err(LexError), } let mut end = input.len(); for (i, ch) in chars { if !UnicodeXID::is_xid_continue(ch) { end = i; break; } } - if lifetime && &input.rest[..end] != "'static" && KEYWORDS.contains(&&input.rest[1..end]) { + let a = &input.rest[..end]; + if a == "r#_" || lifetime && a != "'static" && KEYWORDS.contains(&&a[1..]) { Err(LexError) + } else if a == "_" { + Ok((input.advance(end), Op::new('_', Spacing::Alone).into())) } else { - let a = &input.rest[..end]; - if a == "_" { - Ok((input.advance(end), TokenNode::Op('_', Spacing::Alone))) - } else { - Ok((input.advance(end), TokenNode::Term(::Term::intern(a)))) - } + Ok(( + input.advance(end), + ::Term::new(a, ::Span::call_site()).into(), + )) } } // From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs static KEYWORDS: &'static [&'static str] = &[ - "abstract", "alignof", "as", "become", "box", "break", "const", "continue", - "crate", "do", "else", "enum", "extern", "false", "final", "fn", "for", - "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", - "offsetof", "override", "priv", "proc", "pub", "pure", "ref", "return", - "self", "Self", "sizeof", "static", "struct", "super", "trait", "true", - "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", - "yield", + "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do", + "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", + "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure", + "ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true", + "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield", ]; fn literal(input: Cursor) -> PResult<::Literal> { let input_no_ws = skip_whitespace(input); match literal_nocapture(input_no_ws) { Ok((a, ())) => { let start = input.len() - input_no_ws.len(); let len = input_no_ws.len() - a.len(); let end = start + len; - Ok((a, ::Literal(Literal(input.rest[start..end].to_string())))) + Ok(( + a, + ::Literal::_new(Literal::_new(input.rest[start..end].to_string())), + )) } Err(LexError) => Err(LexError), } } named!(literal_nocapture -> (), alt!( string | @@ -712,18 +785,16 @@ named!(literal_nocapture -> (), alt!( | byte | character | float | int - | - doc_comment )); named!(string -> (), alt!( quoted_string | preceded!( punct!("r"), raw_string @@ -745,47 +816,40 @@ fn cooked_string(input: Cursor) -> PResu } '\r' => { if let Some((_, '\n')) = chars.next() { // ... } else { break; } } - '\\' => { - match chars.next() { - Some((_, 'x')) => { - if !backslash_x_char(&mut chars) { - break + '\\' => match chars.next() { + Some((_, 'x')) => { + if !backslash_x_char(&mut chars) { + break; + } + } + Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\')) + | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {} + Some((_, 'u')) => { + if !backslash_u(&mut chars) { + break; + } + } + Some((_, '\n')) | Some((_, '\r')) => { + while let Some(&(_, ch)) = chars.peek() { + if ch.is_whitespace() { + chars.next(); + } else { + break; } } - Some((_, 'n')) | - Some((_, 'r')) | - Some((_, 't')) | - Some((_, '\\')) | - Some((_, '\'')) | - Some((_, '"')) | - Some((_, '0')) => {} - Some((_, 'u')) => { - if !backslash_u(&mut chars) { - break - } - } - Some((_, '\n')) | Some((_, '\r')) => { - while let Some(&(_, ch)) = chars.peek() { - if ch.is_whitespace() { - chars.next(); - } else { - break; - } - } - } - _ => break, } - } + _ => break, + }, _ch => {} } } Err(LexError) } named!(byte_string -> (), alt!( delimited!( @@ -809,45 +873,37 @@ fn cooked_byte_string(mut input: Cursor) } b'\r' => { if let Some((_, b'\n')) = bytes.next() { // ... } else { break; } } - b'\\' => { - match bytes.next() { - Some((_, b'x')) => { - if !backslash_x_byte(&mut bytes) { - break + b'\\' => match bytes.next() { + Some((_, b'x')) => { + if !backslash_x_byte(&mut bytes) { + break; + } + } + Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\')) + | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {} + Some((newline, b'\n')) | Some((newline, b'\r')) => { + let rest = input.advance(newline + 1); + for (offset, ch) in rest.char_indices() { + if !ch.is_whitespace() { + input = rest.advance(offset); + bytes = input.bytes().enumerate(); + continue 'outer; } } - Some((_, b'n')) | - Some((_, b'r')) | - Some((_, b't')) | - Some((_, b'\\')) | - Some((_, b'0')) | - Some((_, b'\'')) | - Some((_, b'"')) => {} - Some((newline, b'\n')) | - Some((newline, b'\r')) => { - let rest = input.advance(newline + 1); - for (offset, ch) in rest.char_indices() { - if !ch.is_whitespace() { - input = rest.advance(offset); - bytes = input.bytes().enumerate(); - continue 'outer; - } - } - break; - } - _ => break, + break; } - } + _ => break, + }, b if b < 0x80 => {} _ => break, } } Err(LexError) } fn raw_string(input: Cursor) -> PResult<()> { @@ -862,17 +918,17 @@ fn raw_string(input: Cursor) -> PResult< '#' => {} _ => return Err(LexError), } } for (byte_offset, ch) in chars { match ch { '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => { let rest = input.advance(byte_offset + 1 + n); - return Ok((rest, ())) + return Ok((rest, ())); } '\r' => {} _ => {} } } Err(LexError) } @@ -882,29 +938,22 @@ named!(byte -> (), do_parse!( cooked_byte >> tag!("'") >> (()) )); fn cooked_byte(input: Cursor) -> PResult<()> { let mut bytes = input.bytes().enumerate(); let ok = match bytes.next().map(|(_, b)| b) { - Some(b'\\') => { - match bytes.next().map(|(_, b)| b) { - Some(b'x') => backslash_x_byte(&mut bytes), - Some(b'n') | - Some(b'r') | - Some(b't') | - Some(b'\\') | - Some(b'0') | - Some(b'\'') | - Some(b'"') => true, - _ => false, - } - } + Some(b'\\') => match bytes.next().map(|(_, b)| b) { + Some(b'x') => backslash_x_byte(&mut bytes), + Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'') + | Some(b'"') => true, + _ => false, + }, b => b.is_some(), }; if ok { match bytes.next() { Some((offset, _)) => { if input.chars().as_str().is_char_boundary(offset) { Ok((input.advance(offset), ())) } else { @@ -923,30 +972,24 @@ named!(character -> (), do_parse!( cooked_char >> tag!("'") >> (()) )); fn cooked_char(input: Cursor) -> PResult<()> { let mut chars = input.char_indices(); let ok = match chars.next().map(|(_, ch)| ch) { - Some('\\') => { - match chars.next().map(|(_, ch)| ch) { - Some('x') => backslash_x_char(&mut chars), - Some('u') => backslash_u(&mut chars), - Some('n') | - Some('r') | - Some('t') | - Some('\\') | - Some('0') | - Some('\'') | - Some('"') => true, - _ => false, + Some('\\') => match chars.next().map(|(_, ch)| ch) { + Some('x') => backslash_x_char(&mut chars), + Some('u') => backslash_u(&mut chars), + Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => { + true } - } + _ => false, + }, ch => ch.is_some(), }; if ok { match chars.next() { Some((idx, _)) => Ok((input.advance(idx), ())), None => Ok((input.advance(input.len()), ())), } } else { @@ -962,33 +1005,36 @@ macro_rules! next_ch { _ => return false, }, None => return false } }; } fn backslash_x_char<I>(chars: &mut I) -> bool - where I: Iterator<Item = (usize, char)> +where + I: Iterator<Item = (usize, char)>, { next_ch!(chars @ '0'...'7'); next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F'); true } fn backslash_x_byte<I>(chars: &mut I) -> bool - where I: Iterator<Item = (usize, u8)> +where + I: Iterator<Item = (usize, u8)>, { next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F'); next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F'); true } fn backslash_u<I>(chars: &mut I) -> bool - where I: Iterator<Item = (usize, char)> +where + I: Iterator<Item = (usize, char)>, { next_ch!(chars @ '{'); next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F'); loop { let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}'); if c == '}' { return true; } @@ -1021,19 +1067,21 @@ fn float_digits(input: Cursor) -> PResul chars.next(); len += 1; } '.' => { if has_dot { break; } chars.next(); - if chars.peek() - .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch)) - .unwrap_or(false) { + if chars + .peek() + .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch)) + .unwrap_or(false) + { return Err(LexError); } len += 1; has_dot = true; } 'e' | 'E' => { chars.next(); len += 1; @@ -1078,28 +1126,17 @@ fn float_digits(input: Cursor) -> PResul } Ok((input.advance(len), ())) } fn int(input: Cursor) -> PResult<()> { let (rest, ()) = digits(input)?; for suffix in &[ - "isize", - "i8", - "i16", - "i32", - "i64", - "i128", - "usize", - "u8", - "u16", - "u32", - "u64", - "u128", + "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128" ] { if rest.starts_with(suffix) { return word_break(rest.advance(suffix.len())); } } word_break(rest) } @@ -1141,25 +1178,25 @@ fn digits(mut input: Cursor) -> PResult< } if empty { Err(LexError) } else { Ok((input.advance(len), ())) } } -fn op(input: Cursor) -> PResult<(char, Spacing)> { +fn op(input: Cursor) -> PResult<Op> { let input = skip_whitespace(input); match op_char(input) { Ok((rest, ch)) => { let kind = match op_char(rest) { Ok(_) => Spacing::Joint, Err(LexError) => Spacing::Alone, }; - Ok((rest, (ch, kind))) + Ok((rest, Op::new(ch, kind))) } Err(LexError) => Err(LexError), } } fn op_char(input: Cursor) -> PResult<char> { let mut chars = input.chars(); let first = match chars.next() { @@ -1171,36 +1208,58 @@ fn op_char(input: Cursor) -> PResult<cha let recognized = "~!@#$%^&*-=+|;:,<.>/?"; if recognized.contains(first) { Ok((input.advance(first.len_utf8()), first)) } else { Err(LexError) } } -named!(doc_comment -> (), alt!( +fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> { + let mut trees = Vec::new(); + let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?; + trees.push(TokenTree::Op(Op::new('#', Spacing::Alone))); + if inner { + trees.push(Op::new('!', Spacing::Alone).into()); + } + let mut stream = vec![ + TokenTree::Term(::Term::new("doc", span)), + TokenTree::Op(Op::new('=', Spacing::Alone)), + TokenTree::Literal(::Literal::string(comment)), + ]; + for tt in stream.iter_mut() { + tt.set_span(span); + } + trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into()); + for tt in trees.iter_mut() { + tt.set_span(span); + } + Ok((rest, trees)) +} + +named!(doc_comment_contents -> (&str, bool), alt!( do_parse!( punct!("//!") >> - take_until!("\n") >> - (()) + s: take_until_newline_or_eof!() >> + ((s, true)) ) | do_parse!( option!(whitespace) >> peek!(tag!("/*!")) >> - block_comment >> - (()) + s: block_comment >> + ((s, true)) ) | do_parse!( punct!("///") >> not!(tag!("/")) >> - take_until!("\n") >> - (()) + s: take_until_newline_or_eof!() >> + ((s, false)) ) | do_parse!( option!(whitespace) >> peek!(tuple!(tag!("/**"), not!(tag!("*")))) >> - block_comment >> - (()) + s: block_comment >> + ((s, false)) ) ));
--- a/third_party/rust/proc-macro2/src/strnom.rs +++ b/third_party/rust/proc-macro2/src/strnom.rs @@ -1,11 +1,11 @@ //! Adapted from [`nom`](https://github.com/Geal/nom). -use std::str::{Chars, CharIndices, Bytes}; +use std::str::{Bytes, CharIndices, Chars}; use unicode_xid::UnicodeXID; use imp::LexError; #[derive(Copy, Clone, Eq, PartialEq)] pub struct Cursor<'a> { pub rest: &'a str, @@ -68,28 +68,30 @@ pub fn whitespace(input: Cursor) -> PRes return Err(LexError); } let bytes = input.as_bytes(); let mut i = 0; while i < bytes.len() { let s = input.advance(i); if bytes[i] == b'/' { - if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) && - !s.starts_with("//!") { + if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) + && !s.starts_with("//!") + { if let Some(len) = s.find('\n') { i += len + 1; continue; } break; } else if s.starts_with("/**/") { i += 4; - continue - } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) && - !s.starts_with("/*!") { + continue; + } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) + && !s.starts_with("/*!") + { let (_, com) = block_comment(s)?; i += com.len(); continue; } } match bytes[i] { b' ' | 0x09...0x0d => { i += 1; @@ -99,21 +101,17 @@ pub fn whitespace(input: Cursor) -> PRes _ => { let ch = s.chars().next().unwrap(); if is_whitespace(ch) { i += ch.len_utf8(); continue; } } } - return if i > 0 { - Ok((s, ())) - } else { - Err(LexError) - }; + return if i > 0 { Ok((s, ())) } else { Err(LexError) }; } Ok((input.advance(input.len()), ())) } pub fn block_comment(input: Cursor) -> PResult<&str> { if !input.starts_with("/*") { return Err(LexError); } @@ -258,44 +256,24 @@ macro_rules! option { ($i:expr, $f:expr) => { match $f($i) { Ok((i, o)) => Ok((i, Some(o))), Err(LexError) => Ok(($i, None)), } }; } -macro_rules! take_until { - ($i:expr, $substr:expr) => {{ - if $substr.len() > $i.len() { - Err(LexError) +macro_rules! take_until_newline_or_eof { + ($i:expr,) => {{ + if $i.len() == 0 { + Ok(($i, "")) } else { - let substr_vec: Vec<char> = $substr.chars().collect(); - let mut window: Vec<char> = vec![]; - let mut offset = $i.len(); - let mut parsed = false; - for (o, c) in $i.char_indices() { - window.push(c); - if window.len() > substr_vec.len() { - window.remove(0); - } - if window == substr_vec { - parsed = true; - window.pop(); - let window_len: usize = window.iter() - .map(|x| x.len_utf8()) - .fold(0, |x, y| x + y); - offset = o - window_len; - break; - } - } - if parsed { - Ok(($i.advance(offset), &$i.rest[..offset])) - } else { - Err(LexError) + match $i.find('\n') { + Some(i) => Ok(($i.advance(i), &$i.rest[..i])), + None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])), } } }}; } macro_rules! tuple { ($i:expr, $($rest:tt)*) => { tuple_parser!($i, (), $($rest)*) @@ -406,42 +384,8 @@ macro_rules! map { Ok((i, o)) => Ok((i, call!(o, $g))) } }; ($i:expr, $f:expr, $g:expr) => { map!($i, call!($f), $g) }; } - -macro_rules! many0 { - ($i:expr, $f:expr) => {{ - let ret; - let mut res = ::std::vec::Vec::new(); - let mut input = $i; - - loop { - if input.is_empty() { - ret = Ok((input, res)); - break; - } - - match $f(input) { - Err(LexError) => { - ret = Ok((input, res)); - break; - } - Ok((i, o)) => { - // loop trip must always consume (otherwise infinite loops) - if i.len() == input.len() { - ret = Err(LexError); - break; - } - - res.push(o); - input = i; - } - } - } - - ret - }}; -}
--- a/third_party/rust/proc-macro2/src/unstable.rs +++ b/third_party/rust/proc-macro2/src/unstable.rs @@ -1,16 +1,17 @@ -use std::ascii; +#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))] + use std::fmt; use std::iter; use std::str::FromStr; use proc_macro; -use {TokenTree, TokenNode, Delimiter, Spacing}; +use {Delimiter, Group, Op, Spacing, TokenTree}; #[derive(Clone)] pub struct TokenStream(proc_macro::TokenStream); pub struct LexError(proc_macro::LexError); impl TokenStream { pub fn empty() -> TokenStream { @@ -44,185 +45,181 @@ impl From<proc_macro::TokenStream> for T impl From<TokenStream> for proc_macro::TokenStream { fn from(inner: TokenStream) -> proc_macro::TokenStream { inner.0 } } impl From<TokenTree> for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream(proc_macro::TokenTree { - span: (tree.span.0).0, - kind: match tree.kind { - TokenNode::Group(delim, s) => { - let delim = match delim { - Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, - Delimiter::Bracket => proc_macro::Delimiter::Bracket, - Delimiter::Brace => proc_macro::Delimiter::Brace, - Delimiter::None => proc_macro::Delimiter::None, - }; - proc_macro::TokenNode::Group(delim, (s.0).0) - } - TokenNode::Op(ch, kind) => { - let kind = match kind { - Spacing::Joint => proc_macro::Spacing::Joint, - Spacing::Alone => proc_macro::Spacing::Alone, - }; - proc_macro::TokenNode::Op(ch, kind) - } - TokenNode::Term(s) => { - proc_macro::TokenNode::Term((s.0).0) - } - TokenNode::Literal(l) => { - proc_macro::TokenNode::Literal((l.0).0) - } - }, - }.into()) + fn from(token: TokenTree) -> TokenStream { + let tt: proc_macro::TokenTree = match token { + TokenTree::Group(tt) => { + let delim = match tt.delimiter() { + Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, + Delimiter::Bracket => proc_macro::Delimiter::Bracket, + Delimiter::Brace => proc_macro::Delimiter::Brace, + Delimiter::None => proc_macro::Delimiter::None, + }; + let span = tt.span(); + let mut group = proc_macro::Group::new(delim, tt.stream.inner.0); + group.set_span(span.inner.0); + group.into() + } + TokenTree::Op(tt) => { + let spacing = match tt.spacing() { + Spacing::Joint => proc_macro::Spacing::Joint, + Spacing::Alone => proc_macro::Spacing::Alone, + }; + let mut op = proc_macro::Op::new(tt.op(), spacing); + op.set_span(tt.span().inner.0); + op.into() + } + TokenTree::Term(tt) => tt.inner.term.into(), + TokenTree::Literal(tt) => tt.inner.lit.into(), + }; + TokenStream(tt.into()) } } -impl iter::FromIterator<TokenStream> for TokenStream { - fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self { - let streams = streams.into_iter().map(|s| s.0); +impl iter::FromIterator<TokenTree> for TokenStream { + fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self { + let streams = streams.into_iter().map(TokenStream::from) + .flat_map(|t| t.0); TokenStream(streams.collect::<proc_macro::TokenStream>()) } } impl fmt::Debug for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } impl fmt::Debug for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } -pub struct TokenTreeIter(proc_macro::TokenTreeIter); +pub struct TokenTreeIter(proc_macro::token_stream::IntoIter); impl IntoIterator for TokenStream { type Item = TokenTree; type IntoIter = TokenTreeIter; fn into_iter(self) -> TokenTreeIter { TokenTreeIter(self.0.into_iter()) } } impl Iterator for TokenTreeIter { type Item = TokenTree; fn next(&mut self) -> Option<TokenTree> { - let token = match self.0.next() { - Some(n) => n, - None => return None, - }; - Some(TokenTree { - span: ::Span(Span(token.span)), - kind: match token.kind { - proc_macro::TokenNode::Group(delim, s) => { - let delim = match delim { - proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, - proc_macro::Delimiter::Bracket => Delimiter::Bracket, - proc_macro::Delimiter::Brace => Delimiter::Brace, - proc_macro::Delimiter::None => Delimiter::None, - }; - TokenNode::Group(delim, ::TokenStream(TokenStream(s))) - } - proc_macro::TokenNode::Op(ch, kind) => { - let kind = match kind { - proc_macro::Spacing::Joint => Spacing::Joint, - proc_macro::Spacing::Alone => Spacing::Alone, - }; - TokenNode::Op(ch, kind) - } - proc_macro::TokenNode::Term(s) => { - TokenNode::Term(::Term(Term(s))) - } - proc_macro::TokenNode::Literal(l) => { - TokenNode::Literal(::Literal(Literal(l))) - } - }, + let token = self.0.next()?; + Some(match token { + proc_macro::TokenTree::Group(tt) => { + let delim = match tt.delimiter() { + proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, + proc_macro::Delimiter::Bracket => Delimiter::Bracket, + proc_macro::Delimiter::Brace => Delimiter::Brace, + proc_macro::Delimiter::None => Delimiter::None, + }; + let stream = ::TokenStream::_new(TokenStream(tt.stream())); + let mut g = Group::new(delim, stream); + g.set_span(::Span::_new(Span(tt.span()))); + g.into() + } + proc_macro::TokenTree::Op(tt) => { + let spacing = match tt.spacing() { + proc_macro::Spacing::Joint => Spacing::Joint, + proc_macro::Spacing::Alone => Spacing::Alone, + }; + let mut o = Op::new(tt.op(), spacing); + o.set_span(::Span::_new(Span(tt.span()))); + o.into() + } + proc_macro::TokenTree::Term(s) => { + ::Term::_new(Term { + term: s, + }).into() + } + proc_macro::TokenTree::Literal(l) => { + ::Literal::_new(Literal { + lit: l, + }).into() + } }) } fn size_hint(&self) -> (usize, Option<usize>) { self.0.size_hint() } } impl fmt::Debug for TokenTreeIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("TokenTreeIter").finish() } } -#[cfg(procmacro2_semver_exempt)] #[derive(Clone, PartialEq, Eq)] pub struct FileName(String); -#[cfg(procmacro2_semver_exempt)] impl fmt::Display for FileName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } // NOTE: We have to generate our own filename object here because we can't wrap // the one provided by proc_macro. -#[cfg(procmacro2_semver_exempt)] #[derive(Clone, PartialEq, Eq)] pub struct SourceFile(proc_macro::SourceFile, FileName); -#[cfg(procmacro2_semver_exempt)] impl SourceFile { fn new(sf: proc_macro::SourceFile) -> Self { let filename = FileName(sf.path().to_string()); SourceFile(sf, filename) } /// Get the path to this source file as a string. pub fn path(&self) -> &FileName { &self.1 } pub fn is_real(&self) -> bool { self.0.is_real() } } -#[cfg(procmacro2_semver_exempt)] impl AsRef<FileName> for SourceFile { fn as_ref(&self) -> &FileName { self.path() } } -#[cfg(procmacro2_semver_exempt)] impl fmt::Debug for SourceFile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } -#[cfg(procmacro2_semver_exempt)] pub struct LineColumn { pub line: usize, pub column: usize, } #[derive(Copy, Clone)] pub struct Span(proc_macro::Span); impl From<proc_macro::Span> for ::Span { fn from(proc_span: proc_macro::Span) -> ::Span { - ::Span(Span(proc_span)) + ::Span::_new(Span(proc_span)) } } impl Span { pub fn call_site() -> Span { Span(proc_macro::Span::call_site()) } @@ -237,170 +234,166 @@ impl Span { pub fn located_at(&self, other: Span) -> Span { Span(self.0.located_at(other.0)) } pub fn unstable(self) -> proc_macro::Span { self.0 } - #[cfg(procmacro2_semver_exempt)] pub fn source_file(&self) -> SourceFile { SourceFile::new(self.0.source_file()) } - #[cfg(procmacro2_semver_exempt)] pub fn start(&self) -> LineColumn { - let proc_macro::LineColumn{ line, column } = self.0.start(); + let proc_macro::LineColumn { line, column } = self.0.start(); LineColumn { line, column } } - #[cfg(procmacro2_semver_exempt)] pub fn end(&self) -> LineColumn { - let proc_macro::LineColumn{ line, column } = self.0.end(); + let proc_macro::LineColumn { line, column } = self.0.end(); LineColumn { line, column } } - #[cfg(procmacro2_semver_exempt)] pub fn join(&self, other: Span) -> Option<Span> { self.0.join(other.0).map(Span) } + + pub fn eq(&self, other: &Span) -> bool { + self.0.eq(&other.0) + } } impl fmt::Debug for Span { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } #[derive(Copy, Clone)] -pub struct Term(proc_macro::Term); +pub struct Term { + term: proc_macro::Term, +} impl Term { - pub fn intern(string: &str) -> Term { - Term(proc_macro::Term::intern(string)) + pub fn new(string: &str, span: Span) -> Term { + Term { + term: proc_macro::Term::new(string, span.0), + } } pub fn as_str(&self) -> &str { - self.0.as_str() + self.term.as_str() + } + + pub fn span(&self) -> Span { + Span(self.term.span()) + } + + pub fn set_span(&mut self, span: Span) { + self.term.set_span(span.0); } } impl fmt::Debug for Term { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) + self.term.fmt(f) } } #[derive(Clone)] -pub struct Literal(proc_macro::Literal); +pub struct Literal { + lit: proc_macro::Literal, +} + +macro_rules! suffixed_numbers { + ($($name:ident => $kind:ident,)*) => ($( + pub fn $name(n: $kind) -> Literal { + Literal::_new(proc_macro::Literal::$name(n)) + } + )*) +} + +macro_rules! unsuffixed_integers { + ($($name:ident => $kind:ident,)*) => ($( + pub fn $name(n: $kind) -> Literal { + Literal::_new(proc_macro::Literal::$name(n)) + } + )*) +} impl Literal { - pub fn byte_char(byte: u8) -> Literal { - match byte { - 0 => Literal(to_literal("b'\\0'")), - b'\"' => Literal(to_literal("b'\"'")), - n => { - let mut escaped = "b'".to_string(); - escaped.extend(ascii::escape_default(n).map(|c| c as char)); - escaped.push('\''); - Literal(to_literal(&escaped)) - } + fn _new(lit: proc_macro::Literal) -> Literal { + Literal { + lit, } } + suffixed_numbers! { + u8_suffixed => u8, + u16_suffixed => u16, + u32_suffixed => u32, + u64_suffixed => u64, + usize_suffixed => usize, + i8_suffixed => i8, + i16_suffixed => i16, + i32_suffixed => i32, + i64_suffixed => i64, + isize_suffixed => isize, + + f32_suffixed => f32, + f64_suffixed => f64, + } + + unsuffixed_integers! { + u8_unsuffixed => u8, + u16_unsuffixed => u16, + u32_unsuffixed => u32, + u64_unsuffixed => u64, + usize_unsuffixed => usize, + i8_unsuffixed => i8, + i16_unsuffixed => i16, + i32_unsuffixed => i32, + i64_unsuffixed => i64, + isize_unsuffixed => isize, + } + + pub fn f32_unsuffixed(f: f32) -> Literal { + Literal::_new(proc_macro::Literal::f32_unsuffixed(f)) + } + + pub fn f64_unsuffixed(f: f64) -> Literal { + Literal::_new(proc_macro::Literal::f64_unsuffixed(f)) + } + + + pub fn string(t: &str) -> Literal { + Literal::_new(proc_macro::Literal::string(t)) + } + + pub fn character(t: char) -> Literal { + Literal::_new(proc_macro::Literal::character(t)) + } + pub fn byte_string(bytes: &[u8]) -> Literal { - Literal(proc_macro::Literal::byte_string(bytes)) - } - - pub fn doccomment(s: &str) -> Literal { - Literal(to_literal(s)) - } - - pub fn float(s: f64) -> Literal { - Literal(proc_macro::Literal::float(s)) - } - - pub fn integer(s: i64) -> Literal { - Literal(proc_macro::Literal::integer(s.into())) + Literal::_new(proc_macro::Literal::byte_string(bytes)) } - pub fn raw_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("r"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(to_literal(&ret)) + pub fn span(&self) -> Span { + Span(self.lit.span()) } - pub fn raw_byte_string(s: &str, pounds: usize) -> Literal { - let mut ret = format!("br"); - ret.extend((0..pounds).map(|_| "#")); - ret.push('"'); - ret.push_str(s); - ret.push('"'); - ret.extend((0..pounds).map(|_| "#")); - Literal(to_literal(&ret)) + pub fn set_span(&mut self, span: Span) { + self.lit.set_span(span.0); } } impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) + self.lit.fmt(f) } } impl fmt::Debug for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -fn to_literal(s: &str) -> proc_macro::Literal { - let stream = s.parse::<proc_macro::TokenStream>().unwrap(); - match stream.into_iter().next().unwrap().kind { - proc_macro::TokenNode::Literal(l) => l, - _ => unreachable!(), + self.lit.fmt(f) } } - -macro_rules! ints { - ($($t:ident,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - Literal(proc_macro::Literal::$t(t)) - } - } - )*} -} - -ints! { - u8, u16, u32, u64, usize, - i8, i16, i32, i64, isize, -} - -macro_rules! floats { - ($($t:ident,)*) => {$( - impl From<$t> for Literal { - fn from(t: $t) -> Literal { - Literal(proc_macro::Literal::$t(t)) - } - } - )*} -} - -floats! { - f32, f64, -} - -impl<'a> From<&'a str> for Literal { - fn from(t: &'a str) -> Literal { - Literal(proc_macro::Literal::string(t)) - } -} - -impl From<char> for Literal { - fn from(t: char) -> Literal { - Literal(proc_macro::Literal::character(t)) - } -}
--- a/third_party/rust/proc-macro2/tests/test.rs +++ b/third_party/rust/proc-macro2/tests/test.rs @@ -1,65 +1,125 @@ extern crate proc_macro2; -use std::str; +use std::str::{self, FromStr}; + +use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree}; -use proc_macro2::{Term, Literal, TokenStream}; +#[test] +fn terms() { + assert_eq!(Term::new("String", Span::call_site()).as_str(), "String"); + assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn"); + assert_eq!(Term::new("_", Span::call_site()).as_str(), "_"); +} + +#[test] +fn raw_terms() { + assert_eq!(Term::new("r#String", Span::call_site()).as_str(), "r#String"); + assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn"); + assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_"); +} -#[cfg(procmacro2_semver_exempt)] -use proc_macro2::TokenNode; +#[test] +fn lifetimes() { + assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a"); + assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static"); + assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_"); +} -#[cfg(procmacro2_semver_exempt)] -#[cfg(not(feature = "nightly"))] -use proc_macro2::Span; +#[test] +#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")] +fn term_empty() { + Term::new("", Span::call_site()); +} + +#[test] +#[should_panic(expected = "Term cannot be a number; use Literal instead")] +fn term_number() { + Term::new("255", Span::call_site()); +} #[test] -fn symbols() { - assert_eq!(Term::intern("foo").as_str(), "foo"); - assert_eq!(Term::intern("bar").as_str(), "bar"); +#[should_panic(expected = "\"a#\" is not a valid Term")] +fn term_invalid() { + Term::new("a#", Span::call_site()); +} + +#[test] +#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")] +fn raw_term_empty() { + Term::new("r#", Span::call_site()); +} + +#[test] +#[should_panic(expected = "Term cannot be a number; use Literal instead")] +fn raw_term_number() { + Term::new("r#255", Span::call_site()); +} + +#[test] +#[should_panic(expected = "\"r#a#\" is not a valid Term")] +fn raw_term_invalid() { + Term::new("r#a#", Span::call_site()); +} + +#[test] +#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")] +fn lifetime_empty() { + Term::new("'", Span::call_site()); +} + +#[test] +#[should_panic(expected = "Term cannot be a number; use Literal instead")] +fn lifetime_number() { + Term::new("'255", Span::call_site()); +} + +#[test] +#[should_panic(expected = r#""\'a#" is not a valid Term"#)] +fn lifetime_invalid() { + Term::new("'a#", Span::call_site()); } #[test] fn literals() { assert_eq!(Literal::string("foo").to_string(), "\"foo\""); assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); - assert_eq!(Literal::float(10.0).to_string(), "10.0"); + assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0"); } #[test] fn roundtrip() { fn roundtrip(p: &str) { println!("parse: {}", p); let s = p.parse::<TokenStream>().unwrap().to_string(); println!("first: {}", s); let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string(); assert_eq!(s, s2); } roundtrip("a"); roundtrip("<<"); roundtrip("<<="); - roundtrip(" - /// a - wut - "); - roundtrip(" + roundtrip( + " 1 1.0 1f32 2f64 1usize 4isize 4e10 1_000 1_0i32 8u8 9 0 0xffffffffffffffffffffffffffffffff - "); + ", + ); roundtrip("'a"); roundtrip("'static"); roundtrip("'\\u{10__FFFF}'"); roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); } #[test] fn fail() { @@ -68,64 +128,73 @@ fn fail() { panic!("should have failed to parse: {}", p); } } fail("1x"); fail("1u80"); fail("1f320"); fail("' static"); fail("'mut"); + fail("r#1"); + fail("r#_"); } #[cfg(procmacro2_semver_exempt)] #[test] fn span_test() { + use proc_macro2::TokenTree; + fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { let ts = p.parse::<TokenStream>().unwrap(); check_spans_internal(ts, &mut lines); } - fn check_spans_internal( - ts: TokenStream, - lines: &mut &[(usize, usize, usize, usize)], - ) { + fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) { for i in ts { if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { *lines = rest; - let start = i.span.start(); + let start = i.span().start(); assert_eq!(start.line, sline, "sline did not match for {}", i); assert_eq!(start.column, scol, "scol did not match for {}", i); - let end = i.span.end(); + let end = i.span().end(); assert_eq!(end.line, eline, "eline did not match for {}", i); assert_eq!(end.column, ecol, "ecol did not match for {}", i); - match i.kind { - TokenNode::Group(_, stream) => - check_spans_internal(stream, lines), + match i { + TokenTree::Group(ref g) => { + check_spans_internal(g.stream().clone(), lines); + } _ => {} } } } } - check_spans("\ + check_spans( + "\ /// This is a document comment testing 123 { testing 234 -}", &[ - (1, 0, 1, 30), - (2, 0, 2, 7), - (2, 8, 2, 11), - (3, 0, 5, 1), - (4, 2, 4, 9), - (4, 10, 4, 13), -]); +}", + &[ + (1, 0, 1, 30), // # + (1, 0, 1, 30), // [ ... ] + (1, 0, 1, 30), // doc + (1, 0, 1, 30), // = + (1, 0, 1, 30), // "This is..." + (2, 0, 2, 7), // testing + (2, 8, 2, 11), // 123 + (3, 0, 5, 1), // { ... } + (4, 2, 4, 9), // testing + (4, 10, 4, 13), // 234 + ], + ); } #[cfg(procmacro2_semver_exempt)] #[cfg(not(feature = "nightly"))] #[test] fn default_span() { let start = Span::call_site().start(); assert_eq!(start.line, 1); @@ -136,44 +205,100 @@ fn default_span() { let source_file = Span::call_site().source_file(); assert_eq!(source_file.path().to_string(), "<unspecified>"); assert!(!source_file.is_real()); } #[cfg(procmacro2_semver_exempt)] #[test] fn span_join() { - let source1 = - "aaa\nbbb".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>(); - let source2 = - "ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>(); + let source1 = "aaa\nbbb" + .parse::<TokenStream>() + .unwrap() + .into_iter() + .collect::<Vec<_>>(); + let source2 = "ccc\nddd" + .parse::<TokenStream>() + .unwrap() + .into_iter() + .collect::<Vec<_>>(); - assert!(source1[0].span.source_file() != source2[0].span.source_file()); - assert_eq!(source1[0].span.source_file(), source1[1].span.source_file()); + assert!(source1[0].span().source_file() != source2[0].span().source_file()); + assert_eq!( + source1[0].span().source_file(), + source1[1].span().source_file() + ); - let joined1 = source1[0].span.join(source1[1].span); - let joined2 = source1[0].span.join(source2[0].span); + let joined1 = source1[0].span().join(source1[1].span()); + let joined2 = source1[0].span().join(source2[0].span()); assert!(joined1.is_some()); assert!(joined2.is_none()); let start = joined1.unwrap().start(); let end = joined1.unwrap().end(); assert_eq!(start.line, 1); assert_eq!(start.column, 0); assert_eq!(end.line, 2); assert_eq!(end.column, 3); - assert_eq!(joined1.unwrap().source_file(), source1[0].span.source_file()); + assert_eq!( + joined1.unwrap().source_file(), + source1[0].span().source_file() + ); } #[test] fn no_panic() { let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); assert!(s.parse::<proc_macro2::TokenStream>().is_err()); } #[test] -fn tricky_doc_commaent() { +fn tricky_doc_comment() { let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap(); let tokens = stream.into_iter().collect::<Vec<_>>(); assert!(tokens.is_empty(), "not empty -- {:?}", tokens); + + let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap(); + let tokens = stream.into_iter().collect::<Vec<_>>(); + assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens); + match tokens[0] { + proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'), + _ => panic!("wrong token {:?}", tokens[0]), + } + let mut tokens = match tokens[1] { + proc_macro2::TokenTree::Group(ref tt) => { + assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket); + tt.stream().into_iter() + } + _ => panic!("wrong token {:?}", tokens[0]), + }; + + match tokens.next().unwrap() { + proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"), + t => panic!("wrong token {:?}", t), + } + match tokens.next().unwrap() { + proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='), + t => panic!("wrong token {:?}", t), + } + match tokens.next().unwrap() { + proc_macro2::TokenTree::Literal(ref tt) => { + assert_eq!(tt.to_string(), "\" doc\""); + } + t => panic!("wrong token {:?}", t), + } + assert!(tokens.next().is_none()); + + let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap(); + let tokens = stream.into_iter().collect::<Vec<_>>(); + assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens); } +#[test] +fn raw_identifier() { + let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); + match tts.next().unwrap() { + TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()), + wrong => panic!("wrong token {:?}", wrong), + } + assert!(tts.next().is_none()); +}
copy from third_party/rust/quote/.cargo-checksum.json copy to third_party/rust/quote-0.4.2/.cargo-checksum.json
rename from third_party/rust/synom/LICENSE-APACHE rename to third_party/rust/quote-0.4.2/LICENSE-APACHE
rename from third_party/rust/syn-0.11.11/LICENSE-MIT rename to third_party/rust/quote-0.4.2/LICENSE-MIT
copy from third_party/rust/quote/src/to_tokens.rs copy to third_party/rust/quote-0.4.2/src/to_tokens.rs
--- a/third_party/rust/quote/.cargo-checksum.json +++ b/third_party/rust/quote/.cargo-checksum.json @@ -1,1 +1,1 @@ -{"files":{"Cargo.toml":"c8e98953df6fdcc4bdf6a1b7d970c214e8f5eb0f21da327d1c0916735303cd3a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"f1812dcc3e666d6bebca97f3739058e1bd8de1a2542c9a8cb258d0a259bd59e1","src/lib.rs":"b63a044edeff7ae12d0733e0a7fe64babf9b593b624fa753639ad3f340f24031","src/to_tokens.rs":"3b7fe0934ce2d9c23d9851ec624349cfa6e9d5cd9ed31c67f25cecce50dc218f","src/tokens.rs":"963474535197c1a79bf60af570470e7a89dce43546ee3186920197fdb40bdd9b","tests/test.rs":"8db237707035f55af7c7ef82c2c3892a048411963dccd019da0148bacae8c3d2"},"package":"1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408"} \ No newline at end of file +{"files":{"Cargo.toml":"693459089a22ff7249a6bc2e6e9a7fd0b2413a67d91872b4d635159f6da0f998","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"bdb5b5375e8cd37b75b4e0269b8fa9fb22776df9762c1df11ec88eb4cd2dc097","src/lib.rs":"05bc9cac79ba5e0084876e37b49178e7147b351786552e464d2beafd1ee84243","src/to_tokens.rs":"77287ca901b02f988b208f5138dc70bea03473cca37e3014f901320a34e8974b","src/tokens.rs":"a4939fc092d6466d5a2e75474886152e880586b12e057c0d7bf7b3f22428b2de","tests/test.rs":"35bac59a637a8dc3919df51bfa0957b6f964f408cc63c7a81a3e759ab8557f55"},"package":"7b0ff51282f28dc1b53fd154298feaa2e77c5ea0dba68e1fd8b03b72fbe13d2a"} \ No newline at end of file
--- a/third_party/rust/quote/Cargo.toml +++ b/third_party/rust/quote/Cargo.toml @@ -7,18 +7,24 @@ # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] name = "quote" -version = "0.4.2" +version = "0.5.1" authors = ["David Tolnay <dtolnay@gmail.com>"] include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] description = "Quasi-quoting macro quote!(...)" documentation = "https://docs.rs/quote/" +readme = "README.md" keywords = ["syn"] license = "MIT/Apache-2.0" repository = "https://github.com/dtolnay/quote" [dependencies.proc-macro2] -version = "0.2" +version = "0.3" +default-features = false + +[features] +default = ["proc-macro"] +proc-macro = ["proc-macro2/proc-macro"]
--- a/third_party/rust/quote/README.md +++ b/third_party/rust/quote/README.md @@ -3,17 +3,17 @@ Rust Quasi-Quoting [](https://travis-ci.org/dtolnay/quote) [](https://crates.io/crates/quote) [](https://docs.rs/quote/) This crate provides the [`quote!`] macro for turning Rust syntax tree data structures into tokens of source code. -[`quote!`]: https://docs.rs/quote/0.4/quote/macro.quote.html +[`quote!`]: https://docs.rs/quote/0.5/quote/macro.quote.html Procedural macros in Rust receive a stream of tokens as input, execute arbitrary Rust code to determine how to manipulate those tokens, and produce a stream of tokens to hand back to the compiler to compile into the caller's crate. Quasi-quoting is a solution to one piece of that -- producing tokens to return to the compiler. The idea of quasi-quoting is that we write *code* that we treat as *data*. @@ -28,40 +28,40 @@ This crate is motivated by the procedura general-purpose Rust quasi-quoting library and is not specific to procedural macros. *Version requirement: Quote supports any compiler version back to Rust's very first support for procedural macros in Rust 1.15.0.* ```toml [dependencies] -quote = "0.4" +quote = "0.5" ``` ```rust #[macro_use] extern crate quote; ``` ## Syntax The quote crate provides a [`quote!`] macro within which you can write Rust code that gets packaged into a [`quote::Tokens`] and can be treated as data. You should think of `Tokens` as representing a fragment of Rust source code. Call `to_string()` on a `Tokens` to get back the fragment of source code as a string, or call `into()` to stream them as a `TokenStream` back to the compiler in a procedural macro. -[`quote::Tokens`]: https://docs.rs/quote/0.4/quote/struct.Tokens.html +[`quote::Tokens`]: https://docs.rs/quote/0.5/quote/struct.Tokens.html Within the `quote!` macro, interpolation is done with `#var`. Any type implementing the [`quote::ToTokens`] trait can be interpolated. This includes most Rust primitive types as well as most of the syntax tree types from [`syn`]. -[`quote::ToTokens`]: https://docs.rs/quote/0.4/quote/trait.ToTokens.html +[`quote::ToTokens`]: https://docs.rs/quote/0.5/quote/trait.ToTokens.html [`syn`]: https://github.com/dtolnay/syn ```rust let tokens = quote! { struct SerializeWith #generics #where_clause { value: &'a #field_ty, phantom: ::std::marker::PhantomData<#item_ty>, } @@ -104,17 +104,17 @@ Any interpolated tokens preserve the `Sp `ToTokens` implementation. Tokens that originate within a `quote!` invocation are spanned with [`Span::def_site()`]. [`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site A different span can be provided explicitly through the [`quote_spanned!`] macro. -[`quote_spanned!`]: https://docs.rs/quote/0.4/quote/macro.quote_spanned.html +[`quote_spanned!`]: https://docs.rs/quote/0.5/quote/macro.quote_spanned.html ### Recursion limit The `quote!` macro relies on deep recursion so some large invocations may fail with "recursion limit reached" when you compile. If it fails, bump up the recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even higher limit may be necessary for especially large invocations. You don't need this unless the compiler tells you that you need it.
--- a/third_party/rust/quote/src/lib.rs +++ b/third_party/rust/quote/src/lib.rs @@ -21,17 +21,17 @@ //! general-purpose Rust quasi-quoting library and is not specific to procedural //! macros. //! //! *Version requirement: Quote supports any compiler version back to Rust's //! very first support for procedural macros in Rust 1.15.0.* //! //! ```toml //! [dependencies] -//! quote = "0.4" +//! quote = "0.5" //! ``` //! //! ``` //! #[macro_use] //! extern crate quote; //! # //! # fn main() {} //! ``` @@ -86,20 +86,21 @@ //! ## Recursion limit //! //! The `quote!` macro relies on deep recursion so some large invocations may //! fail with "recursion limit reached" when you compile. If it fails, bump up //! the recursion limit by adding `#![recursion_limit = "128"]` to your crate. //! An even higher limit may be necessary for especially large invocations. // Quote types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/quote/0.4.2")] +#![doc(html_root_url = "https://docs.rs/quote/0.5.1")] +#[cfg(feature = "proc-macro")] +extern crate proc_macro; extern crate proc_macro2; -extern crate proc_macro; mod tokens; pub use tokens::Tokens; mod to_tokens; pub use to_tokens::ToTokens; // Not public API. @@ -107,28 +108,20 @@ pub use to_tokens::ToTokens; pub mod __rt { // Not public API. pub use proc_macro2::*; // Not public API. pub fn parse(tokens: &mut ::Tokens, span: Span, s: &str) { let s: TokenStream = s.parse().expect("invalid token stream"); tokens.append_all(s.into_iter().map(|mut t| { - t.span = span; + t.set_span(span); t })); } - - // Not public API. - pub fn append_kind(tokens: &mut ::Tokens, span: Span, kind: TokenNode) { - tokens.append(TokenTree { - span: span, - kind: kind, - }) - } } /// The whole point. /// /// Performs variable interpolation against the input and produces it as /// [`Tokens`]. For returning tokens to the compiler in a procedural macro, use /// `into()` to build a `TokenStream`. /// @@ -153,28 +146,31 @@ pub mod __rt { /// - `#(#var),*` — the character before the asterisk is used as a separator /// - `#( struct #var; )*` — the repetition can contain other tokens /// - `#( #k => println!("{}", #v), )*` — even multiple interpolations /// /// # Hygiene /// /// Any interpolated tokens preserve the `Span` information provided by their /// `ToTokens` implementation. Tokens that originate within the `quote!` -/// invocation are spanned with [`Span::def_site()`]. +/// invocation are spanned with [`Span::call_site()`]. /// -/// [`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site +/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.call_site /// /// A different span can be provided through the [`quote_spanned!`] macro. /// /// [`quote_spanned!`]: macro.quote_spanned.html /// /// # Example /// /// ``` +/// # #[cfg(feature = "proc-macro")] /// extern crate proc_macro; +/// # #[cfg(not(feature = "proc-macro"))] +/// # extern crate proc_macro2 as proc_macro; /// /// #[macro_use] /// extern crate quote; /// /// use proc_macro::TokenStream; /// /// # const IGNORE_TOKENS: &'static str = stringify! { /// #[proc_macro_derive(HeapSize)] @@ -201,17 +197,17 @@ pub mod __rt { /// // Hand the output tokens back to the compiler. /// expanded.into() /// } /// # /// # fn main() {} /// ``` #[macro_export] macro_rules! quote { - ($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::def_site()=> $($tt)*)); + ($($tt:tt)*) => (quote_spanned!($crate::__rt::Span::call_site()=> $($tt)*)); } /// Same as `quote!`, but applies a given span to all tokens originating within /// the macro invocation. /// /// # Syntax /// /// A span expression of type [`Span`], followed by `=>`, followed by the tokens @@ -239,16 +235,22 @@ macro_rules! quote { /// /// // On multiple lines, place the span at the top and use braces. /// let tokens = quote_spanned! {span=> /// Box::into_raw(Box::new(#init)) /// }; /// # } /// ``` /// +/// The lack of space before the `=>` should look jarring to Rust programmers +/// and this is intentional. The formatting is designed to be visibly +/// off-balance and draw the eye a particular way, due to the span expression +/// being evaluated in the context of the procedural macro and the remaining +/// tokens being evaluated in the generated code. +/// /// # Hygiene /// /// Any interpolated tokens preserve the `Span` information provided by their /// `ToTokens` implementation. Tokens that originate within the `quote_spanned!` /// invocation are spanned with the given span argument. /// /// # Example /// @@ -275,19 +277,19 @@ macro_rules! quote { /// # } /// # /// # impl ToTokens for Type { /// # fn to_tokens(&self, _tokens: &mut Tokens) {} /// # } /// # /// # fn main() { /// # let ty = Type; -/// # let def_site = Span::def_site(); +/// # let call_site = Span::call_site(); /// # -/// let ty_span = ty.span().resolved_at(def_site); +/// let ty_span = ty.span(); /// let assert_sync = quote_spanned! {ty_span=> /// struct _AssertSync where #ty: Sync; /// }; /// # } /// ``` /// /// If the assertion fails, the user will see an error like the following. The /// input span of their type is hightlighted in the error. @@ -444,57 +446,65 @@ macro_rules! quote_each_token { } quote_each_token!($tokens $span $($inner)*); } quote_each_token!($tokens $span $($rest)*); }; ($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => { quote_each_token!($tokens $span #); - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( + $tokens.append({ + let mut g = $crate::__rt::Group::new( $crate::__rt::Delimiter::Bracket, - quote_spanned!($span=> $($inner)*).into() - )); + quote_spanned!($span=> $($inner)*).into(), + ); + g.set_span($span); + g + }); quote_each_token!($tokens $span $($rest)*); }; ($tokens:ident $span:ident # $first:ident $($rest:tt)*) => { $crate::ToTokens::to_tokens(&$first, &mut $tokens); quote_each_token!($tokens $span $($rest)*); }; ($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => { - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( + $tokens.append({ + let mut g = $crate::__rt::Group::new( $crate::__rt::Delimiter::Parenthesis, - quote_spanned!($span=> $($first)*).into() - )); + quote_spanned!($span=> $($first)*).into(), + ); + g.set_span($span); + g + }); quote_each_token!($tokens $span $($rest)*); }; ($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => { - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( + $tokens.append({ + let mut g = $crate::__rt::Group::new( $crate::__rt::Delimiter::Bracket, - quote_spanned!($span=> $($first)*).into() - )); + quote_spanned!($span=> $($first)*).into(), + ); + g.set_span($span); + g + }); quote_each_token!($tokens $span $($rest)*); }; ($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => { - $crate::__rt::append_kind(&mut $tokens, - $span, - $crate::__rt::TokenNode::Group( + $tokens.append({ + let mut g = $crate::__rt::Group::new( $crate::__rt::Delimiter::Brace, - quote_spanned!($span=> $($first)*).into() - )); + quote_spanned!($span=> $($first)*).into(), + ); + g.set_span($span); + g + }); quote_each_token!($tokens $span $($rest)*); }; ($tokens:ident $span:ident $first:tt $($rest:tt)*) => { // TODO: this seems slow... special case some `:tt` arguments? $crate::__rt::parse(&mut $tokens, $span, stringify!($first)); quote_each_token!($tokens $span $($rest)*); };
--- a/third_party/rust/quote/src/to_tokens.rs +++ b/third_party/rust/quote/src/to_tokens.rs @@ -1,55 +1,42 @@ use super::Tokens; use std::borrow::Cow; -use proc_macro2::{Literal, Span, Term, TokenNode, TokenTree, TokenStream}; - -fn tt(kind: TokenNode) -> TokenTree { - TokenTree { - span: Span::def_site(), - kind: kind, - } -} +use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree}; /// Types that can be interpolated inside a [`quote!`] invocation. /// /// [`quote!`]: macro.quote.html pub trait ToTokens { /// Write `self` to the given `Tokens`. /// /// Example implementation for a struct representing Rust paths like /// `std::cmp::PartialEq`: /// /// ``` /// extern crate quote; /// use quote::{Tokens, ToTokens}; /// /// extern crate proc_macro2; - /// use proc_macro2::{TokenTree, TokenNode, Spacing, Span}; + /// use proc_macro2::{TokenTree, Spacing, Span, Op}; /// /// pub struct Path { /// pub global: bool, /// pub segments: Vec<PathSegment>, /// } /// /// impl ToTokens for Path { /// fn to_tokens(&self, tokens: &mut Tokens) { /// for (i, segment) in self.segments.iter().enumerate() { /// if i > 0 || self.global { /// // Double colon `::` - /// tokens.append(TokenTree { - /// span: Span::def_site(), - /// kind: TokenNode::Op(':', Spacing::Joint), - /// }); - /// tokens.append(TokenTree { - /// span: Span::def_site(), - /// kind: TokenNode::Op(':', Spacing::Alone), - /// }); + /// tokens.append(Op::new(':', Spacing::Joint)); + /// tokens.append(Op::new(':', Spacing::Alone)); /// } /// segment.to_tokens(tokens); /// } /// } /// } /// # /// # pub struct PathSegment; /// # @@ -100,70 +87,75 @@ impl<T: ToTokens> ToTokens for Option<T> if let Some(ref t) = *self { t.to_tokens(tokens); } } } impl ToTokens for str { fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(Literal::string(self)))); + tokens.append(Literal::string(self)); } } impl ToTokens for String { fn to_tokens(&self, tokens: &mut Tokens) { self.as_str().to_tokens(tokens); } } macro_rules! primitive { - ($($t:ident)*) => ($( + ($($t:ident => $name:ident)*) => ($( impl ToTokens for $t { fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(Literal::$t(*self)))); + tokens.append(Literal::$name(*self)); } } )*) } primitive! { - i8 i16 i32 i64 isize - u8 u16 u32 u64 usize - f32 f64 + i8 => i8_suffixed + i16 => i16_suffixed + i32 => i32_suffixed + i64 => i64_suffixed + isize => isize_suffixed + + u8 => u8_suffixed + u16 => u16_suffixed + u32 => u32_suffixed + u64 => u64_suffixed + usize => usize_suffixed + + f32 => f32_suffixed + f64 => f64_suffixed } impl ToTokens for char { fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(Literal::character(*self)))); + tokens.append(Literal::character(*self)); } } impl ToTokens for bool { fn to_tokens(&self, tokens: &mut Tokens) { let word = if *self { "true" } else { "false" }; - tokens.append(tt(TokenNode::Term(Term::intern(word)))); + tokens.append(Term::new(word, Span::call_site())); } } impl ToTokens for Term { fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Term(*self))); + tokens.append(self.clone()); } } impl ToTokens for Literal { fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(TokenNode::Literal(self.clone()))); - } -} - -impl ToTokens for TokenNode { - fn to_tokens(&self, tokens: &mut Tokens) { - tokens.append(tt(self.clone())); + tokens.append(self.clone()); } } impl ToTokens for TokenTree { fn to_tokens(&self, dst: &mut Tokens) { dst.append(self.clone()); } }
--- a/third_party/rust/quote/src/tokens.rs +++ b/third_party/rust/quote/src/tokens.rs @@ -1,12 +1,13 @@ use super::ToTokens; use std::fmt::{self, Debug, Display}; use std::hash::{Hash, Hasher}; +#[cfg(feature = "proc-macro")] use proc_macro; use proc_macro2::{TokenStream, TokenTree}; /// Tokens produced by a [`quote!`] invocation. /// /// [`quote!`]: macro.quote.html #[derive(Clone, Default)] pub struct Tokens { @@ -103,16 +104,17 @@ impl ToTokens for Tokens { } impl From<Tokens> for TokenStream { fn from(tokens: Tokens) -> TokenStream { tokens.tts.into_iter().collect() } } +#[cfg(feature = "proc-macro")] impl From<Tokens> for proc_macro::TokenStream { fn from(tokens: Tokens) -> proc_macro::TokenStream { TokenStream::from(tokens).into() } } /// Allows a `Tokens` to be passed to `Tokens::append_all`. impl IntoIterator for Tokens { @@ -120,18 +122,18 @@ impl IntoIterator for Tokens { type IntoIter = private::IntoIter; fn into_iter(self) -> Self::IntoIter { private::into_iter(self.tts.into_iter()) } } mod private { + use proc_macro2::TokenTree; use std::vec; - use proc_macro2::TokenTree; pub struct IntoIter(vec::IntoIter<TokenTree>); pub fn into_iter(tts: vec::IntoIter<TokenTree>) -> IntoIter { IntoIter(tts) } impl Iterator for IntoIter { @@ -165,52 +167,52 @@ impl Debug for Tokens { formatter .debug_tuple("Tokens") .field(&DebugAsDisplay(self)) .finish() } } fn tt_eq(a: &TokenTree, b: &TokenTree) -> bool { - use proc_macro2::{TokenNode, Delimiter, Spacing}; + use proc_macro2::{Delimiter, Spacing}; - match (&a.kind, &b.kind) { - (&TokenNode::Group(d1, ref s1), &TokenNode::Group(d2, ref s2)) => { - match (d1, d2) { + match (a, b) { + (&TokenTree::Group(ref s1), &TokenTree::Group(ref s2)) => { + match (s1.delimiter(), s2.delimiter()) { (Delimiter::Parenthesis, Delimiter::Parenthesis) | (Delimiter::Brace, Delimiter::Brace) | (Delimiter::Bracket, Delimiter::Bracket) | (Delimiter::None, Delimiter::None) => {} _ => return false, } - let s1 = s1.clone().into_iter(); - let mut s2 = s2.clone().into_iter(); + let s1 = s1.stream().clone().into_iter(); + let mut s2 = s2.stream().clone().into_iter(); for item1 in s1 { let item2 = match s2.next() { Some(item) => item, None => return false, }; if !tt_eq(&item1, &item2) { return false; } } s2.next().is_none() } - (&TokenNode::Op(o1, k1), &TokenNode::Op(o2, k2)) => { - o1 == o2 && match (k1, k2) { + (&TokenTree::Op(ref o1), &TokenTree::Op(ref o2)) => { + o1.op() == o2.op() && match (o1.spacing(), o2.spacing()) { (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true, _ => false, } } - (&TokenNode::Literal(ref l1), &TokenNode::Literal(ref l2)) => { + (&TokenTree::Literal(ref l1), &TokenTree::Literal(ref l2)) => { l1.to_string() == l2.to_string() } - (&TokenNode::Term(ref s1), &TokenNode::Term(ref s2)) => s1.as_str() == s2.as_str(), + (&TokenTree::Term(ref s1), &TokenTree::Term(ref s2)) => s1.as_str() == s2.as_str(), _ => false, } } impl PartialEq for Tokens { fn eq(&self, other: &Self) -> bool { if self.tts.len() != other.tts.len() { return false; @@ -219,43 +221,43 @@ impl PartialEq for Tokens { self.tts .iter() .zip(other.tts.iter()) .all(|(a, b)| tt_eq(a, b)) } } fn tt_hash<H: Hasher>(tt: &TokenTree, h: &mut H) { - use proc_macro2::{TokenNode, Delimiter, Spacing}; + use proc_macro2::{Delimiter, Spacing}; - match tt.kind { - TokenNode::Group(delim, ref stream) => { + match *tt { + TokenTree::Group(ref g) => { 0u8.hash(h); - match delim { + match g.delimiter() { Delimiter::Parenthesis => 0u8.hash(h), Delimiter::Brace => 1u8.hash(h), Delimiter::Bracket => 2u8.hash(h), Delimiter::None => 3u8.hash(h), } - for item in stream.clone() { + for item in g.stream().clone() { tt_hash(&item, h); } 0xffu8.hash(h); // terminator w/ a variant we don't normally hash } - TokenNode::Op(op, kind) => { + TokenTree::Op(ref t) => { 1u8.hash(h); - op.hash(h); - match kind { + t.op().hash(h); + match t.spacing() { Spacing::Alone => 0u8.hash(h), Spacing::Joint => 1u8.hash(h), } } - TokenNode::Literal(ref lit) => (2u8, lit.to_string()).hash(h), - TokenNode::Term(ref word) => (3u8, word.as_str()).hash(h), + TokenTree::Literal(ref lit) => (2u8, lit.to_string()).hash(h), + TokenTree::Term(ref word) => (3u8, word.as_str()).hash(h), } } impl<'a> Hash for Tokens { fn hash<H: Hasher>(&self, h: &mut H) { self.tts.len().hash(h); for tt in &self.tts { tt_hash(&tt, h);
--- a/third_party/rust/quote/tests/test.rs +++ b/third_party/rust/quote/tests/test.rs @@ -7,32 +7,29 @@ extern crate proc_macro2; extern crate quote; use proc_macro2::{Span, Term}; struct X; impl quote::ToTokens for X { fn to_tokens(&self, tokens: &mut quote::Tokens) { - tokens.append(proc_macro2::TokenTree { - kind: proc_macro2::TokenNode::Term(Term::intern("X")), - span: Span::def_site(), - }); + tokens.append(Term::new("X", Span::call_site())); } } #[test] fn test_quote_impl() { - let tokens = quote!( + let tokens = quote! { impl<'a, T: ToTokens> ToTokens for &'a T { fn to_tokens(&self, tokens: &mut Tokens) { (**self).to_tokens(tokens) } } - ); + }; let expected = concat!( "impl < 'a , T : ToTokens > ToTokens for & 'a T { ", "fn to_tokens ( & self , tokens : & mut Tokens ) { ", "( * * self ) . to_tokens ( tokens ) ", "} ", "}" ); @@ -180,18 +177,18 @@ fn test_string() { let s = "\0 a 'b \" c".to_string(); let tokens = quote!(#s); let expected = "\"\\u{0} a \\'b \\\" c\""; assert_eq!(expected, tokens.to_string()); } #[test] fn test_ident() { - let foo = Term::intern("Foo"); - let bar = Term::intern(&format!("Bar{}", 7)); + let foo = Term::new("Foo", Span::call_site()); + let bar = Term::new(&format!("Bar{}", 7), Span::call_site()); let tokens = quote!(struct #foo; enum #bar {}); let expected = "struct Foo ; enum Bar7 { }"; assert_eq!(expected, tokens.to_string()); } #[test] fn test_duplicate() { let ch = 'x'; @@ -255,29 +252,29 @@ fn test_empty_quote() { fn test_box_str() { let b = "str".to_owned().into_boxed_str(); let tokens = quote! { #b }; assert_eq!("\"str\"", tokens.to_string()); } #[test] fn test_cow() { - let owned: Cow<Term> = Cow::Owned(Term::intern("owned")); + let owned: Cow<Term> = Cow::Owned(Term::new("owned", Span::call_site())); - let ident = Term::intern("borrowed"); + let ident = Term::new("borrowed", Span::call_site()); let borrowed = Cow::Borrowed(&ident); let tokens = quote! { #owned #borrowed }; assert_eq!("owned borrowed", tokens.to_string()); } #[test] fn test_closure() { fn field_i(i: usize) -> Term { - Term::intern(&format!("__field{}", i)) + Term::new(&format!("__field{}", i), Span::call_site()) } let fields = (0usize..3) .map(field_i as fn(_) -> _) .map(|var| quote! { #var }); let tokens = quote! { #(#fields)* }; assert_eq!("__field0 __field1 __field2", tokens.to_string());
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/.cargo-checksum.json +++ /dev/null @@ -1,1 +0,0 @@ -{"files":{"Cargo.toml":"e1c76f5a888ab4a9047a9079a2c69a666170ef5bbdbd540720cbfe4b6c2a5b78","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"aa140842ad00ec4f0601fefdeef5046bfeae3834d58c9ba6d9721d65885fc016","src/aster/generics.rs":"030a8e7f4de881ef60c171fe48bcb65aec8d58f3558f755a3b9b953b2c9f1819","src/aster/ident.rs":"e9d082664f008a56bd854011310b4258ab072740ba82e57495b6e8a868a5f36b","src/aster/invoke.rs":"2b1b993973ab4f5c8fa6d6a286576b2542edce21fe9904f5133c470c072e6d3f","src/aster/lifetime.rs":"304101622e102586946802ae17a0a76d53a7f3a3e72e520d0e2ac3c8664db3ef","src/aster/mod.rs":"12220f73b0021e72b4c50f6a513cff174b9c7267209aa23f183043d96ccc9ab7","src/aster/path.rs":"7298bcbd522e10a48ec9d54a1959eec4a0b6368fda9ef10d6e19fc488507d5bb","src/aster/qpath.rs":"5ba33af56ccf74f5c516ed542d117d1f6ca9f7dfd1a74d08b4ac50d95666c497","src/aster/ty.rs":"07d783269047f3be20e73ccc962bb4c4cd63c869d73de8bae7bef25b53986d09","src/aster/ty_param.rs":"4f17c12e0b7cb306cfdfaea648eaccee5116923b2abb4d35d085d88f70c40385","src/aster/where_predicate.rs":"5fb8ec3fcb67bcc1d9bb7b64cf2f5beb601aac6502d6db30c0cdf8641fa248d1","src/attr.rs":"2c0c14c45f39af22ea10e0d15c24ef349b23408b6c4e24b6e91c48d38a5e5ca2","src/constant.rs":"b68686cdf371d76d7ac548184d52e46fa1312e84b02a5b504fedbbc54a3b26ff","src/data.rs":"1d6c3c29b1d94a01fb6ec41b4144c22a8ebd7a7fe9074d87fbe2fd1776f2f38b","src/derive.rs":"5d474fa52c19c4d46ff79be39038254887ca01f1786c0032b54e0b5ad8697b03","src/escape.rs":"7263b3df626ad26e5b82b329557584f7cdd61589977ce82c9e794e1b61f042b2","src/expr.rs":"77e22fbf2d1003366296a05d42806a69fdaaa73b4a02e6a99438d8fc886d06b6","src/fold.rs":"879928ea8de2b228f9073658ffa100c689ec85edabfa4f876f9aee3b13057522","src/generics.rs":"02ddd46f39d771d7f229d69f763278e75ee50a5af2c7d2746080e959639726f7","src/ident.rs":"9eb6354d2b58e14191e44592c122501232539b53480389ab9e35d426c3962123","src/item.rs":"c91ec1b423877590acd3fa01b094f452ef6b177db6c177056f33caf61f3fe92d","src/krate.rs":"78f89e1f12f5b790d99d88a3a013178585f6715a27eb26f604e72e763a47dfdf","src/lib.rs":"2931fc34ec99b9ce1776debaca8114eb3531c0851ca584239c03637c90b1cf7d","src/lit.rs":"f8cdfd540f038f699cb546fc3cfc43ec6f72551aa12ca351ea0beb9c8100fa4c","src/mac.rs":"b3ba8e7531980abecec4a9f86f68ae136c5982617e0e37aaa823d288ba6f5e4e","src/op.rs":"232f84ba605ed50e70ee02169dd551548872135cf56f155637917ec3bf810ce1","src/ty.rs":"d71d75de0c0a6d27bc1d425a4ce282e42f7d6126e34ecaa7798353dffb231229","src/visit.rs":"a0c4c7d9768bd5b8fab5441932fc4075e7dc827b73144e5972a04fc7c2e676ff"},"package":"d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"} \ No newline at end of file
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "syn" -version = "0.11.11" # don't forget to update version in readme for breaking changes -authors = ["David Tolnay <dtolnay@gmail.com>"] -license = "MIT/Apache-2.0" -description = "Nom parser for Rust source code" -repository = "https://github.com/dtolnay/syn" -documentation = "https://dtolnay.github.io/syn/syn/" -categories = ["development-tools::procedural-macro-helpers"] -include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] - -[features] -default = ["parsing", "printing"] -aster = [] -full = [] -parsing = ["unicode-xid", "synom"] -printing = ["quote"] -visit = [] -fold = [] - -[dependencies] -quote = { version = "0.3.7", optional = true } -unicode-xid = { version = "0.0.4", optional = true } -synom = { version = "0.11", path = "synom", optional = true } - -[dev-dependencies] -syntex_pos = "0.58" -syntex_syntax = "0.58" -tempdir = "0.3.5" -walkdir = "1.0.1"
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/README.md +++ /dev/null @@ -1,205 +0,0 @@ -Nom parser for Rust source code -=============================== - -[](https://travis-ci.org/dtolnay/syn) -[](https://crates.io/crates/syn) -[](https://dtolnay.github.io/syn/syn/) - -Parse Rust source code without a Syntex dependency, intended for use with -[Macros 1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md). - -Designed for fast compile time. - -- Compile time for `syn` (from scratch including all dependencies): **6 seconds** -- Compile time for the `syntex`/`quasi`/`aster` stack: **60+ seconds** - -If you get stuck with Macros 1.1 I am happy to provide help even if the issue is -not related to syn. Please file a ticket in this repo. - -## Usage with Macros 1.1 - -```toml -[dependencies] -syn = "0.11" -quote = "0.3" - -[lib] -proc-macro = true -``` - -```rust -extern crate proc_macro; -use proc_macro::TokenStream; - -extern crate syn; - -#[macro_use] -extern crate quote; - -#[proc_macro_derive(MyMacro)] -pub fn my_macro(input: TokenStream) -> TokenStream { - let source = input.to_string(); - - // Parse the string representation into a syntax tree - let ast = syn::parse_derive_input(&source).unwrap(); - - // Build the output, possibly using quasi-quotation - let expanded = quote! { - // ... - }; - - // Parse back to a token stream and return it - expanded.parse().unwrap() -} -``` - -## Complete example - -Suppose we have the following simple trait which returns the number of fields in -a struct: - -```rust -trait NumFields { - fn num_fields() -> usize; -} -``` - -A complete Macros 1.1 implementation of `#[derive(NumFields)]` based on `syn` -and [`quote`](https://github.com/dtolnay/quote) looks like this: - -```rust -extern crate proc_macro; -use proc_macro::TokenStream; - -extern crate syn; - -#[macro_use] -extern crate quote; - -#[proc_macro_derive(NumFields)] -pub fn num_fields(input: TokenStream) -> TokenStream { - let source = input.to_string(); - - // Parse the string representation into a syntax tree - let ast = syn::parse_derive_input(&source).unwrap(); - - // Build the output - let expanded = expand_num_fields(&ast); - - // Return the generated impl as a TokenStream - expanded.parse().unwrap() -} - -fn expand_num_fields(ast: &syn::DeriveInput) -> quote::Tokens { - let n = match ast.body { - syn::Body::Struct(ref data) => data.fields().len(), - syn::Body::Enum(_) => panic!("#[derive(NumFields)] can only be used with structs"), - }; - - // Used in the quasi-quotation below as `#name` - let name = &ast.ident; - - // Helper is provided for handling complex generic types correctly and effortlessly - let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); - - quote! { - // The generated impl - impl #impl_generics ::mycrate::NumFields for #name #ty_generics #where_clause { - fn num_fields() -> usize { - #n - } - } - } -} -``` - -For a more elaborate example that involves trait bounds, enums, and different -kinds of structs, check out [`DeepClone`] and [`deep-clone-derive`]. - -[`DeepClone`]: https://github.com/asajeffrey/deep-clone -[`deep-clone-derive`]: https://github.com/asajeffrey/deep-clone/blob/master/deep-clone-derive/lib.rs - -## Testing - -Macros 1.1 has a restriction that your proc-macro crate must export nothing but -`proc_macro_derive` functions, and also `proc_macro_derive` procedural macros -cannot be used from the same crate in which they are defined. These restrictions -may be lifted in the future but for now they make writing tests a bit trickier -than for other types of code. - -In particular, you will not be able to write test functions like `#[test] fn -it_works() { ... }` in line with your code. Instead, either put tests in a -[`tests` directory](https://doc.rust-lang.org/book/testing.html#the-tests-directory) -or in a separate crate entirely. - -Additionally, if your procedural macro implements a particular trait, that trait -must be defined in a separate crate from the procedural macro. - -As a concrete example, suppose your procedural macro crate is called `my_derive` -and it implements a trait called `my_crate::MyTrait`. Your unit tests for the -procedural macro can go in `my_derive/tests/test.rs` or into a separate crate -`my_tests/tests/test.rs`. Either way the test would look something like this: - -```rust -#[macro_use] -extern crate my_derive; - -extern crate my_crate; -use my_crate::MyTrait; - -#[test] -fn it_works() { - #[derive(MyTrait)] - struct S { /* ... */ } - - /* test the thing */ -} -``` - -## Debugging - -When developing a procedural macro it can be helpful to look at what the -generated code looks like. Use `cargo rustc -- -Zunstable-options ---pretty=expanded` or the -[`cargo expand`](https://github.com/dtolnay/cargo-expand) subcommand. - -To show the expanded code for some crate that uses your procedural macro, run -`cargo expand` from that crate. To show the expanded code for one of your own -test cases, run `cargo expand --test the_test_case` where the last argument is -the name of the test file without the `.rs` extension. - -This write-up by Brandon W Maister discusses debugging in more detail: -[Debugging Rust's new Custom Derive -system](https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/). - -## Optional features - -Syn puts a lot of functionality behind optional features in order to optimize -compile time for the most common use cases. These are the available features and -their effect on compile time. Dependencies are included in the compile times. - -Features | Compile time | Functionality ---- | --- | --- -*(none)* | 3 sec | The data structures representing the AST of Rust structs, enums, and types. -parsing | 6 sec | Parsing Rust source code containing structs and enums into an AST. -printing | 4 sec | Printing an AST of structs and enums as Rust source code. -**parsing, printing** | **6 sec** | **This is the default.** Parsing and printing of Rust structs and enums. This is typically what you want for implementing Macros 1.1 custom derives. -full | 4 sec | The data structures representing the full AST of all possible Rust code. -full, parsing | 9 sec | Parsing any valid Rust source code to an AST. -full, printing | 6 sec | Turning an AST into Rust source code. -full, parsing, printing | 11 sec | Parsing and printing any Rust syntax. - -## License - -Licensed under either of - - * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions.
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/generics.rs +++ /dev/null @@ -1,231 +0,0 @@ -use {Generics, Ident, LifetimeDef, TyParam, WhereClause, WherePredicate}; -use aster::invoke::{Identity, Invoke}; -use aster::lifetime::{IntoLifetime, LifetimeDefBuilder, IntoLifetimeDef}; -use aster::path::IntoPath; -use aster::ty_param::TyParamBuilder; -use aster::where_predicate::WherePredicateBuilder; - -pub struct GenericsBuilder<F = Identity> { - callback: F, - lifetimes: Vec<LifetimeDef>, - ty_params: Vec<TyParam>, - predicates: Vec<WherePredicate>, -} - -impl GenericsBuilder { - pub fn new() -> Self { - GenericsBuilder::with_callback(Identity) - } - - pub fn from_generics(generics: Generics) -> Self { - GenericsBuilder::from_generics_with_callback(generics, Identity) - } -} - -impl<F> GenericsBuilder<F> - where F: Invoke<Generics> -{ - pub fn with_callback(callback: F) -> Self { - GenericsBuilder { - callback: callback, - lifetimes: Vec::new(), - ty_params: Vec::new(), - predicates: Vec::new(), - } - } - - pub fn from_generics_with_callback(generics: Generics, callback: F) -> Self { - GenericsBuilder { - callback: callback, - lifetimes: generics.lifetimes, - ty_params: generics.ty_params, - predicates: generics.where_clause.predicates, - } - } - - pub fn with(self, generics: Generics) -> Self { - self.with_lifetimes(generics.lifetimes.into_iter()) - .with_ty_params(generics.ty_params.into_iter()) - .with_predicates(generics.where_clause.predicates.into_iter()) - } - - pub fn with_lifetimes<I, L>(mut self, iter: I) -> Self - where I: IntoIterator<Item = L>, - L: IntoLifetimeDef - { - let iter = iter.into_iter().map(|lifetime_def| lifetime_def.into_lifetime_def()); - self.lifetimes.extend(iter); - self - } - - pub fn with_lifetime_names<I, N>(mut self, iter: I) -> Self - where I: IntoIterator<Item = N>, - N: Into<Ident> - { - for name in iter { - self = self.lifetime_name(name); - } - self - } - - pub fn with_lifetime(mut self, lifetime: LifetimeDef) -> Self { - self.lifetimes.push(lifetime); - self - } - - pub fn lifetime_name<N>(self, name: N) -> Self - where N: Into<Ident> - { - self.lifetime(name).build() - } - - pub fn lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self> - where N: Into<Ident> - { - LifetimeDefBuilder::with_callback(name, self) - } - - pub fn with_ty_params<I>(mut self, iter: I) -> Self - where I: IntoIterator<Item = TyParam> - { - self.ty_params.extend(iter); - self - } - - pub fn with_ty_param_ids<I, T>(mut self, iter: I) -> Self - where I: IntoIterator<Item = T>, - T: Into<Ident> - { - for id in iter { - self = self.ty_param_id(id); - } - self - } - - pub fn with_ty_param(mut self, ty_param: TyParam) -> Self { - self.ty_params.push(ty_param); - self - } - - pub fn ty_param_id<I>(self, id: I) -> Self - where I: Into<Ident> - { - self.ty_param(id).build() - } - - pub fn ty_param<I>(self, id: I) -> TyParamBuilder<Self> - where I: Into<Ident> - { - TyParamBuilder::with_callback(id, self) - } - - pub fn with_predicates<I>(mut self, iter: I) -> Self - where I: IntoIterator<Item = WherePredicate> - { - self.predicates.extend(iter); - self - } - - pub fn with_predicate(mut self, predicate: WherePredicate) -> Self { - self.predicates.push(predicate); - self - } - - pub fn predicate(self) -> WherePredicateBuilder<Self> { - WherePredicateBuilder::with_callback(self) - } - - pub fn add_lifetime_bound<L>(mut self, lifetime: L) -> Self - where L: IntoLifetime - { - let lifetime = lifetime.into_lifetime(); - - for lifetime_def in &mut self.lifetimes { - lifetime_def.bounds.push(lifetime.clone()); - } - - for ty_param in &mut self.ty_params { - *ty_param = TyParamBuilder::from_ty_param(ty_param.clone()) - .lifetime_bound(lifetime.clone()) - .build(); - } - - self - } - - pub fn add_ty_param_bound<P>(mut self, path: P) -> Self - where P: IntoPath - { - let path = path.into_path(); - - for ty_param in &mut self.ty_params { - *ty_param = TyParamBuilder::from_ty_param(ty_param.clone()) - .trait_bound(path.clone()) - .build() - .build(); - } - - self - } - - pub fn strip_bounds(self) -> Self { - self.strip_lifetimes().strip_ty_params().strip_predicates() - } - - pub fn strip_lifetimes(mut self) -> Self { - for lifetime in &mut self.lifetimes { - lifetime.bounds = vec![]; - } - self - } - - pub fn strip_ty_params(mut self) -> Self { - for ty_param in &mut self.ty_params { - ty_param.bounds = vec![]; - } - self - } - - pub fn strip_predicates(mut self) -> Self { - self.predicates = vec![]; - self - } - - pub fn build(self) -> F::Result { - self.callback.invoke(Generics { - lifetimes: self.lifetimes, - ty_params: self.ty_params, - where_clause: WhereClause { predicates: self.predicates }, - }) - } -} - -impl<F> Invoke<LifetimeDef> for GenericsBuilder<F> - where F: Invoke<Generics> -{ - type Result = Self; - - fn invoke(self, lifetime: LifetimeDef) -> Self { - self.with_lifetime(lifetime) - } -} - -impl<F> Invoke<TyParam> for GenericsBuilder<F> - where F: Invoke<Generics> -{ - type Result = Self; - - fn invoke(self, ty_param: TyParam) -> Self { - self.with_ty_param(ty_param) - } -} - -impl<F> Invoke<WherePredicate> for GenericsBuilder<F> - where F: Invoke<Generics> -{ - type Result = Self; - - fn invoke(self, predicate: WherePredicate) -> Self { - self.with_predicate(predicate) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/ident.rs +++ /dev/null @@ -1,39 +0,0 @@ -use Ident; - -pub trait ToIdent { - fn to_ident(&self) -> Ident; -} - -impl ToIdent for Ident { - fn to_ident(&self) -> Ident { - self.clone() - } -} - -impl<'a> ToIdent for &'a str { - fn to_ident(&self) -> Ident { - (**self).into() - } -} - -impl ToIdent for String { - fn to_ident(&self) -> Ident { - self.clone().into() - } -} - -impl<'a, T> ToIdent for &'a T - where T: ToIdent -{ - fn to_ident(&self) -> Ident { - (**self).to_ident() - } -} - -impl<'a, T> ToIdent for &'a mut T - where T: ToIdent -{ - fn to_ident(&self) -> Ident { - (**self).to_ident() - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/invoke.rs +++ /dev/null @@ -1,16 +0,0 @@ -pub trait Invoke<A> { - type Result; - - fn invoke(self, arg: A) -> Self::Result; -} - -#[derive(Copy, Clone)] -pub struct Identity; - -impl<A> Invoke<A> for Identity { - type Result = A; - - fn invoke(self, arg: A) -> A { - arg - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/lifetime.rs +++ /dev/null @@ -1,103 +0,0 @@ -use {Ident, Lifetime, LifetimeDef}; -use aster::invoke::{Invoke, Identity}; - -// //////////////////////////////////////////////////////////////////////////// - -pub trait IntoLifetime { - fn into_lifetime(self) -> Lifetime; -} - -impl IntoLifetime for Lifetime { - fn into_lifetime(self) -> Lifetime { - self - } -} - -impl<'a> IntoLifetime for &'a str { - fn into_lifetime(self) -> Lifetime { - Lifetime { ident: self.into() } - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub trait IntoLifetimeDef { - fn into_lifetime_def(self) -> LifetimeDef; -} - -impl IntoLifetimeDef for LifetimeDef { - fn into_lifetime_def(self) -> LifetimeDef { - self - } -} - -impl IntoLifetimeDef for Lifetime { - fn into_lifetime_def(self) -> LifetimeDef { - LifetimeDef { - attrs: vec![], - lifetime: self, - bounds: vec![], - } - } -} - -impl<'a> IntoLifetimeDef for &'a str { - fn into_lifetime_def(self) -> LifetimeDef { - self.into_lifetime().into_lifetime_def() - } -} - -impl IntoLifetimeDef for String { - fn into_lifetime_def(self) -> LifetimeDef { - (*self).into_lifetime().into_lifetime_def() - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct LifetimeDefBuilder<F = Identity> { - callback: F, - lifetime: Lifetime, - bounds: Vec<Lifetime>, -} - -impl LifetimeDefBuilder { - pub fn new<N>(name: N) -> Self - where N: Into<Ident> - { - LifetimeDefBuilder::with_callback(name, Identity) - } -} - -impl<F> LifetimeDefBuilder<F> - where F: Invoke<LifetimeDef> -{ - pub fn with_callback<N>(name: N, callback: F) -> Self - where N: Into<Ident> - { - let lifetime = Lifetime { ident: name.into() }; - - LifetimeDefBuilder { - callback: callback, - lifetime: lifetime, - bounds: Vec::new(), - } - } - - pub fn bound<N>(mut self, name: N) -> Self - where N: Into<Ident> - { - let lifetime = Lifetime { ident: name.into() }; - - self.bounds.push(lifetime); - self - } - - pub fn build(self) -> F::Result { - self.callback.invoke(LifetimeDef { - attrs: vec![], - lifetime: self.lifetime, - bounds: self.bounds, - }) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/mod.rs +++ /dev/null @@ -1,33 +0,0 @@ -use super::*; - -pub mod generics; -pub mod ident; -pub mod invoke; -pub mod lifetime; -pub mod path; -pub mod qpath; -pub mod ty; -pub mod ty_param; -pub mod where_predicate; - -pub fn id<I>(id: I) -> Ident - where I: Into<Ident> -{ - id.into() -} - -pub fn from_generics(generics: Generics) -> generics::GenericsBuilder { - generics::GenericsBuilder::from_generics(generics) -} - -pub fn where_predicate() -> where_predicate::WherePredicateBuilder { - where_predicate::WherePredicateBuilder::new() -} - -pub fn ty() -> ty::TyBuilder { - ty::TyBuilder::new() -} - -pub fn path() -> path::PathBuilder { - path::PathBuilder::new() -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/path.rs +++ /dev/null @@ -1,327 +0,0 @@ -use {AngleBracketedParameterData, Generics, Ident, Lifetime, ParenthesizedParameterData, Path, - PathParameters, PathSegment, Ty, TypeBinding}; -use aster::ident::ToIdent; -use aster::invoke::{Invoke, Identity}; -use aster::lifetime::IntoLifetime; -use aster::ty::TyBuilder; - -// //////////////////////////////////////////////////////////////////////////// - -pub trait IntoPath { - fn into_path(self) -> Path; -} - -impl IntoPath for Path { - fn into_path(self) -> Path { - self - } -} - -impl IntoPath for Ident { - fn into_path(self) -> Path { - PathBuilder::new().id(self).build() - } -} - -impl<'a> IntoPath for &'a str { - fn into_path(self) -> Path { - PathBuilder::new().id(self).build() - } -} - -impl IntoPath for String { - fn into_path(self) -> Path { - (&*self).into_path() - } -} - -impl<'a, T> IntoPath for &'a [T] - where T: ToIdent -{ - fn into_path(self) -> Path { - PathBuilder::new().ids(self).build() - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct PathBuilder<F = Identity> { - callback: F, - global: bool, -} - -impl PathBuilder { - pub fn new() -> Self { - PathBuilder::with_callback(Identity) - } -} - -impl<F> PathBuilder<F> - where F: Invoke<Path> -{ - pub fn with_callback(callback: F) -> Self { - PathBuilder { - callback: callback, - global: false, - } - } - - pub fn build(self, path: Path) -> F::Result { - self.callback.invoke(path) - } - - pub fn global(mut self) -> Self { - self.global = true; - self - } - - pub fn ids<I, T>(self, ids: I) -> PathSegmentsBuilder<F> - where I: IntoIterator<Item = T>, - T: ToIdent - { - let mut ids = ids.into_iter(); - let id = ids.next().expect("passed path with no id"); - - self.id(id).ids(ids) - } - - pub fn id<I>(self, id: I) -> PathSegmentsBuilder<F> - where I: ToIdent - { - self.segment(id).build() - } - - pub fn segment<I>(self, id: I) -> PathSegmentBuilder<PathSegmentsBuilder<F>> - where I: ToIdent - { - PathSegmentBuilder::with_callback(id, - PathSegmentsBuilder { - callback: self.callback, - global: self.global, - segments: Vec::new(), - }) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct PathSegmentsBuilder<F = Identity> { - callback: F, - global: bool, - segments: Vec<PathSegment>, -} - -impl<F> PathSegmentsBuilder<F> - where F: Invoke<Path> -{ - pub fn ids<I, T>(mut self, ids: I) -> PathSegmentsBuilder<F> - where I: IntoIterator<Item = T>, - T: ToIdent - { - for id in ids { - self = self.id(id); - } - - self - } - - pub fn id<T>(self, id: T) -> PathSegmentsBuilder<F> - where T: ToIdent - { - self.segment(id).build() - } - - pub fn segment<T>(self, id: T) -> PathSegmentBuilder<Self> - where T: ToIdent - { - PathSegmentBuilder::with_callback(id, self) - } - - pub fn build(self) -> F::Result { - self.callback.invoke(Path { - global: self.global, - segments: self.segments, - }) - } -} - -impl<F> Invoke<PathSegment> for PathSegmentsBuilder<F> { - type Result = Self; - - fn invoke(mut self, segment: PathSegment) -> Self { - self.segments.push(segment); - self - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct PathSegmentBuilder<F = Identity> { - callback: F, - id: Ident, - lifetimes: Vec<Lifetime>, - tys: Vec<Ty>, - bindings: Vec<TypeBinding>, -} - -impl<F> PathSegmentBuilder<F> - where F: Invoke<PathSegment> -{ - pub fn with_callback<I>(id: I, callback: F) -> Self - where I: ToIdent - { - PathSegmentBuilder { - callback: callback, - id: id.to_ident(), - lifetimes: Vec::new(), - tys: Vec::new(), - bindings: Vec::new(), - } - } - - pub fn with_generics(self, generics: Generics) -> Self { - // Strip off the bounds. - let lifetimes = generics.lifetimes.iter().map(|lifetime_def| lifetime_def.lifetime.clone()); - - let tys = - generics.ty_params.iter().map(|ty_param| TyBuilder::new().id(ty_param.ident.clone())); - - self.with_lifetimes(lifetimes).with_tys(tys) - } - - pub fn with_lifetimes<I, L>(mut self, iter: I) -> Self - where I: IntoIterator<Item = L>, - L: IntoLifetime - { - let iter = iter.into_iter().map(|lifetime| lifetime.into_lifetime()); - self.lifetimes.extend(iter); - self - } - - pub fn with_lifetime<L>(mut self, lifetime: L) -> Self - where L: IntoLifetime - { - self.lifetimes.push(lifetime.into_lifetime()); - self - } - - pub fn lifetime<N>(self, name: N) -> Self - where N: ToIdent - { - let lifetime = Lifetime { ident: name.to_ident() }; - self.with_lifetime(lifetime) - } - - pub fn with_tys<I>(mut self, iter: I) -> Self - where I: IntoIterator<Item = Ty> - { - self.tys.extend(iter); - self - } - - pub fn with_ty(mut self, ty: Ty) -> Self { - self.tys.push(ty); - self - } - - pub fn ty(self) -> TyBuilder<Self> { - TyBuilder::with_callback(self) - } - - pub fn with_binding(mut self, binding: TypeBinding) -> Self { - self.bindings.push(binding); - self - } - - pub fn binding<T>(self, id: T) -> TyBuilder<TypeBindingBuilder<F>> - where T: ToIdent - { - TyBuilder::with_callback(TypeBindingBuilder { - id: id.to_ident(), - builder: self, - }) - } - - pub fn no_return(self) -> F::Result { - self.build_return(None) - } - - pub fn return_(self) -> TyBuilder<PathSegmentReturnBuilder<F>> { - TyBuilder::with_callback(PathSegmentReturnBuilder(self)) - } - - pub fn build_return(self, output: Option<Ty>) -> F::Result { - let data = ParenthesizedParameterData { - inputs: self.tys, - output: output, - }; - - let parameters = PathParameters::Parenthesized(data); - - self.callback.invoke(PathSegment { - ident: self.id, - parameters: parameters, - }) - } - - pub fn build(self) -> F::Result { - let data = AngleBracketedParameterData { - lifetimes: self.lifetimes, - types: self.tys, - bindings: self.bindings, - }; - - let parameters = PathParameters::AngleBracketed(data); - - self.callback.invoke(PathSegment { - ident: self.id, - parameters: parameters, - }) - } -} - -impl<F> Invoke<Ty> for PathSegmentBuilder<F> - where F: Invoke<PathSegment> -{ - type Result = Self; - - fn invoke(self, ty: Ty) -> Self { - self.with_ty(ty) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TypeBindingBuilder<F> { - id: Ident, - builder: PathSegmentBuilder<F>, -} - -impl<F> Invoke<Ty> for TypeBindingBuilder<F> - where F: Invoke<PathSegment> -{ - type Result = PathSegmentBuilder<F>; - - fn invoke(self, ty: Ty) -> Self::Result { - let id = self.id; - - self.builder.with_binding(TypeBinding { - ident: id, - ty: ty, - }) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct PathSegmentReturnBuilder<F>(PathSegmentBuilder<F>); - -impl<F> Invoke<Ty> for PathSegmentReturnBuilder<F> - where F: Invoke<PathSegment> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> Self::Result { - self.0.build_return(Some(ty)) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/qpath.rs +++ /dev/null @@ -1,143 +0,0 @@ -use {Path, PathSegment, QSelf, Ty}; -use aster::ident::ToIdent; -use aster::invoke::{Invoke, Identity}; -use aster::path::{PathBuilder, PathSegmentBuilder}; -use aster::ty::TyBuilder; - -// //////////////////////////////////////////////////////////////////////////// - -pub struct QPathBuilder<F = Identity> { - callback: F, -} - -impl QPathBuilder { - pub fn new() -> Self { - QPathBuilder::with_callback(Identity) - } -} - -impl<F> QPathBuilder<F> - where F: Invoke<(QSelf, Path)> -{ - /// Construct a `QPathBuilder` that will call the `callback` with a constructed `QSelf` - /// and `Path`. - pub fn with_callback(callback: F) -> Self { - QPathBuilder { callback: callback } - } - - /// Build a qualified path first by starting with a type builder. - pub fn with_ty(self, ty: Ty) -> QPathTyBuilder<F> { - QPathTyBuilder { - builder: self, - ty: ty, - } - } - - /// Build a qualified path first by starting with a type builder. - pub fn ty(self) -> TyBuilder<Self> { - TyBuilder::with_callback(self) - } - - /// Build a qualified path with a concrete type and path. - pub fn build(self, qself: QSelf, path: Path) -> F::Result { - self.callback.invoke((qself, path)) - } -} - -impl<F> Invoke<Ty> for QPathBuilder<F> - where F: Invoke<(QSelf, Path)> -{ - type Result = QPathTyBuilder<F>; - - fn invoke(self, ty: Ty) -> QPathTyBuilder<F> { - self.with_ty(ty) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct QPathTyBuilder<F> { - builder: QPathBuilder<F>, - ty: Ty, -} - -impl<F> QPathTyBuilder<F> - where F: Invoke<(QSelf, Path)> -{ - /// Build a qualified path with a path builder. - pub fn as_(self) -> PathBuilder<Self> { - PathBuilder::with_callback(self) - } - - pub fn id<T>(self, id: T) -> F::Result - where T: ToIdent - { - let path = Path { - global: false, - segments: vec![], - }; - self.as_().build(path).id(id) - } - - pub fn segment<T>(self, id: T) -> PathSegmentBuilder<QPathQSelfBuilder<F>> - where T: ToIdent - { - let path = Path { - global: false, - segments: vec![], - }; - self.as_().build(path).segment(id) - } -} - -impl<F> Invoke<Path> for QPathTyBuilder<F> - where F: Invoke<(QSelf, Path)> -{ - type Result = QPathQSelfBuilder<F>; - - fn invoke(self, path: Path) -> QPathQSelfBuilder<F> { - QPathQSelfBuilder { - builder: self.builder, - qself: QSelf { - ty: Box::new(self.ty), - position: path.segments.len(), - }, - path: path, - } - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct QPathQSelfBuilder<F> { - builder: QPathBuilder<F>, - qself: QSelf, - path: Path, -} - -impl<F> QPathQSelfBuilder<F> - where F: Invoke<(QSelf, Path)> -{ - pub fn id<T>(self, id: T) -> F::Result - where T: ToIdent - { - self.segment(id).build() - } - - pub fn segment<T>(self, id: T) -> PathSegmentBuilder<QPathQSelfBuilder<F>> - where T: ToIdent - { - PathSegmentBuilder::with_callback(id, self) - } -} - -impl<F> Invoke<PathSegment> for QPathQSelfBuilder<F> - where F: Invoke<(QSelf, Path)> -{ - type Result = F::Result; - - fn invoke(mut self, segment: PathSegment) -> F::Result { - self.path.segments.push(segment); - self.builder.build(self.qself, self.path) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/ty.rs +++ /dev/null @@ -1,488 +0,0 @@ -use {Generics, Lifetime, MutTy, Mutability, Path, QSelf, Ty, TyParamBound}; -use aster::ident::ToIdent; -use aster::invoke::{Invoke, Identity}; -use aster::lifetime::IntoLifetime; -use aster::path::PathBuilder; -use aster::qpath::QPathBuilder; -use aster::ty_param::TyParamBoundBuilder; - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyBuilder<F = Identity> { - callback: F, -} - -impl TyBuilder { - pub fn new() -> Self { - TyBuilder::with_callback(Identity) - } -} - -impl<F> TyBuilder<F> - where F: Invoke<Ty> -{ - pub fn with_callback(callback: F) -> Self { - TyBuilder { callback: callback } - } - - pub fn build(self, ty: Ty) -> F::Result { - self.callback.invoke(ty) - } - - pub fn id<I>(self, id: I) -> F::Result - where I: ToIdent - { - self.path().id(id).build() - } - - pub fn build_path(self, path: Path) -> F::Result { - self.build(Ty::Path(None, path)) - } - - pub fn build_qpath(self, qself: QSelf, path: Path) -> F::Result { - self.build(Ty::Path(Some(qself), path)) - } - - pub fn path(self) -> PathBuilder<TyPathBuilder<F>> { - PathBuilder::with_callback(TyPathBuilder(self)) - } - - pub fn qpath(self) -> QPathBuilder<TyQPathBuilder<F>> { - QPathBuilder::with_callback(TyQPathBuilder(self)) - } - - pub fn isize(self) -> F::Result { - self.id("isize") - } - - pub fn i8(self) -> F::Result { - self.id("i8") - } - - pub fn i16(self) -> F::Result { - self.id("i16") - } - - pub fn i32(self) -> F::Result { - self.id("i32") - } - - pub fn i64(self) -> F::Result { - self.id("i64") - } - - pub fn usize(self) -> F::Result { - self.id("usize") - } - - pub fn u8(self) -> F::Result { - self.id("u8") - } - - pub fn u16(self) -> F::Result { - self.id("u16") - } - - pub fn u32(self) -> F::Result { - self.id("u32") - } - - pub fn u64(self) -> F::Result { - self.id("u64") - } - - pub fn f32(self) -> F::Result { - self.id("f32") - } - - pub fn f64(self) -> F::Result { - self.id("f64") - } - - pub fn bool(self) -> F::Result { - self.id("bool") - } - - pub fn unit(self) -> F::Result { - self.tuple().build() - } - - pub fn tuple(self) -> TyTupleBuilder<F> { - TyTupleBuilder { - builder: self, - tys: vec![], - } - } - - pub fn build_slice(self, ty: Ty) -> F::Result { - self.build(Ty::Slice(Box::new(ty))) - } - - pub fn slice(self) -> TyBuilder<TySliceBuilder<F>> { - TyBuilder::with_callback(TySliceBuilder(self)) - } - - pub fn ref_(self) -> TyRefBuilder<F> { - TyRefBuilder { - builder: self, - lifetime: None, - mutability: Mutability::Immutable, - } - } - - pub fn never(self) -> F::Result { - self.build(Ty::Never) - } - - pub fn infer(self) -> F::Result { - self.build(Ty::Infer) - } - - pub fn option(self) -> TyBuilder<TyOptionBuilder<F>> { - TyBuilder::with_callback(TyOptionBuilder(self)) - } - - pub fn result(self) -> TyBuilder<TyResultOkBuilder<F>> { - TyBuilder::with_callback(TyResultOkBuilder(self)) - } - - pub fn phantom_data(self) -> TyBuilder<TyPhantomDataBuilder<F>> { - TyBuilder::with_callback(TyPhantomDataBuilder(self)) - } - - pub fn box_(self) -> TyBuilder<TyBoxBuilder<F>> { - TyBuilder::with_callback(TyBoxBuilder(self)) - } - - pub fn iterator(self) -> TyBuilder<TyIteratorBuilder<F>> { - TyBuilder::with_callback(TyIteratorBuilder(self)) - } - - pub fn impl_trait(self) -> TyImplTraitTyBuilder<F> { - TyImplTraitTyBuilder { - builder: self, - bounds: Vec::new(), - } - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyPathBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Path> for TyPathBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, path: Path) -> F::Result { - self.0.build_path(path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyQPathBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<(QSelf, Path)> for TyQPathBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, (qself, path): (QSelf, Path)) -> F::Result { - self.0.build_qpath(qself, path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TySliceBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Ty> for TySliceBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - self.0.build_slice(ty) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyRefBuilder<F> { - builder: TyBuilder<F>, - lifetime: Option<Lifetime>, - mutability: Mutability, -} - -impl<F> TyRefBuilder<F> - where F: Invoke<Ty> -{ - pub fn mut_(mut self) -> Self { - self.mutability = Mutability::Mutable; - self - } - - pub fn lifetime<N>(mut self, name: N) -> Self - where N: ToIdent - { - self.lifetime = Some(Lifetime { ident: name.to_ident() }); - self - } - - pub fn build_ty(self, ty: Ty) -> F::Result { - let ty = MutTy { - ty: ty, - mutability: self.mutability, - }; - self.builder.build(Ty::Rptr(self.lifetime, Box::new(ty))) - } - - pub fn ty(self) -> TyBuilder<Self> { - TyBuilder::with_callback(self) - } -} - -impl<F> Invoke<Ty> for TyRefBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - self.build_ty(ty) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyOptionBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Ty> for TyOptionBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - let path = PathBuilder::new() - .global() - .id("std") - .id("option") - .segment("Option") - .with_ty(ty) - .build() - .build(); - - self.0.build_path(path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyResultOkBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Ty> for TyResultOkBuilder<F> - where F: Invoke<Ty> -{ - type Result = TyBuilder<TyResultErrBuilder<F>>; - - fn invoke(self, ty: Ty) -> TyBuilder<TyResultErrBuilder<F>> { - TyBuilder::with_callback(TyResultErrBuilder(self.0, ty)) - } -} - -pub struct TyResultErrBuilder<F>(TyBuilder<F>, Ty); - -impl<F> Invoke<Ty> for TyResultErrBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - let path = PathBuilder::new() - .global() - .id("std") - .id("result") - .segment("Result") - .with_ty(self.1) - .with_ty(ty) - .build() - .build(); - - self.0.build_path(path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyPhantomDataBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Ty> for TyPhantomDataBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - let path = PathBuilder::new() - .global() - .id("std") - .id("marker") - .segment("PhantomData") - .with_ty(ty) - .build() - .build(); - - self.0.build_path(path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyBoxBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Ty> for TyBoxBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - let path = PathBuilder::new() - .global() - .id("std") - .id("boxed") - .segment("Box") - .with_ty(ty) - .build() - .build(); - - self.0.build_path(path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyIteratorBuilder<F>(TyBuilder<F>); - -impl<F> Invoke<Ty> for TyIteratorBuilder<F> - where F: Invoke<Ty> -{ - type Result = F::Result; - - fn invoke(self, ty: Ty) -> F::Result { - let path = PathBuilder::new() - .global() - .id("std") - .id("iter") - .segment("Iterator") - .binding("Item") - .build(ty.clone()) - .build() - .build(); - - self.0.build_path(path) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyImplTraitTyBuilder<F> { - builder: TyBuilder<F>, - bounds: Vec<TyParamBound>, -} - -impl<F> TyImplTraitTyBuilder<F> - where F: Invoke<Ty> -{ - pub fn with_bounds<I>(mut self, iter: I) -> Self - where I: Iterator<Item = TyParamBound> - { - self.bounds.extend(iter); - self - } - - pub fn with_bound(mut self, bound: TyParamBound) -> Self { - self.bounds.push(bound); - self - } - - pub fn bound(self) -> TyParamBoundBuilder<Self> { - TyParamBoundBuilder::with_callback(self) - } - - pub fn with_generics(self, generics: Generics) -> Self { - self.with_lifetimes(generics.lifetimes.into_iter().map(|def| def.lifetime)) - } - - pub fn with_lifetimes<I, L>(mut self, lifetimes: I) -> Self - where I: Iterator<Item = L>, - L: IntoLifetime - { - for lifetime in lifetimes { - self = self.lifetime(lifetime); - } - - self - } - - pub fn lifetime<L>(self, lifetime: L) -> Self - where L: IntoLifetime - { - self.bound().lifetime(lifetime) - } - - pub fn build(self) -> F::Result { - let bounds = self.bounds; - self.builder.build(Ty::ImplTrait(bounds)) - } -} - -impl<F> Invoke<TyParamBound> for TyImplTraitTyBuilder<F> - where F: Invoke<Ty> -{ - type Result = Self; - - fn invoke(self, bound: TyParamBound) -> Self { - self.with_bound(bound) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyTupleBuilder<F> { - builder: TyBuilder<F>, - tys: Vec<Ty>, -} - -impl<F> TyTupleBuilder<F> - where F: Invoke<Ty> -{ - pub fn with_tys<I>(mut self, iter: I) -> Self - where I: IntoIterator<Item = Ty> - { - self.tys.extend(iter); - self - } - - pub fn with_ty(mut self, ty: Ty) -> Self { - self.tys.push(ty); - self - } - - pub fn ty(self) -> TyBuilder<Self> { - TyBuilder::with_callback(self) - } - - pub fn build(self) -> F::Result { - self.builder.build(Ty::Tup(self.tys)) - } -} - -impl<F> Invoke<Ty> for TyTupleBuilder<F> - where F: Invoke<Ty> -{ - type Result = Self; - - fn invoke(self, ty: Ty) -> Self { - self.with_ty(ty) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/ty_param.rs +++ /dev/null @@ -1,262 +0,0 @@ -use {Ident, LifetimeDef, Path, PolyTraitRef, TraitBoundModifier, Ty, TyParam, TyParamBound}; -use aster::invoke::{Invoke, Identity}; -use aster::lifetime::{IntoLifetime, IntoLifetimeDef, LifetimeDefBuilder}; -use aster::path::{IntoPath, PathBuilder}; -use aster::ty::TyBuilder; - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyParamBuilder<F = Identity> { - callback: F, - id: Ident, - bounds: Vec<TyParamBound>, - default: Option<Ty>, -} - -impl TyParamBuilder { - pub fn new<I>(id: I) -> Self - where I: Into<Ident> - { - TyParamBuilder::with_callback(id, Identity) - } - - pub fn from_ty_param(ty_param: TyParam) -> Self { - TyParamBuilder::from_ty_param_with_callback(Identity, ty_param) - } -} - -impl<F> TyParamBuilder<F> - where F: Invoke<TyParam> -{ - pub fn with_callback<I>(id: I, callback: F) -> Self - where I: Into<Ident> - { - TyParamBuilder { - callback: callback, - id: id.into(), - bounds: Vec::new(), - default: None, - } - } - - pub fn from_ty_param_with_callback(callback: F, ty_param: TyParam) -> Self { - TyParamBuilder { - callback: callback, - id: ty_param.ident, - bounds: ty_param.bounds, - default: ty_param.default, - } - } - - pub fn with_default(mut self, ty: Ty) -> Self { - self.default = Some(ty); - self - } - - pub fn default(self) -> TyBuilder<Self> { - TyBuilder::with_callback(self) - } - - pub fn with_bound(mut self, bound: TyParamBound) -> Self { - self.bounds.push(bound); - self - } - - pub fn bound(self) -> TyParamBoundBuilder<Self> { - TyParamBoundBuilder::with_callback(self) - } - - pub fn with_trait_bound(self, trait_ref: PolyTraitRef) -> Self { - self.bound().build_trait(trait_ref, TraitBoundModifier::None) - } - - pub fn trait_bound<P>(self, path: P) -> PolyTraitRefBuilder<Self> - where P: IntoPath - { - PolyTraitRefBuilder::with_callback(path, self) - } - - pub fn lifetime_bound<L>(mut self, lifetime: L) -> Self - where L: IntoLifetime - { - let lifetime = lifetime.into_lifetime(); - - self.bounds.push(TyParamBound::Region(lifetime)); - self - } - - pub fn build(self) -> F::Result { - self.callback.invoke(TyParam { - attrs: vec![], - ident: self.id, - bounds: self.bounds, - default: self.default, - }) - } -} - -impl<F> Invoke<Ty> for TyParamBuilder<F> - where F: Invoke<TyParam> -{ - type Result = Self; - - fn invoke(self, ty: Ty) -> Self { - self.with_default(ty) - } -} - -impl<F> Invoke<TyParamBound> for TyParamBuilder<F> - where F: Invoke<TyParam> -{ - type Result = Self; - - fn invoke(self, bound: TyParamBound) -> Self { - self.with_bound(bound) - } -} - -impl<F> Invoke<PolyTraitRef> for TyParamBuilder<F> - where F: Invoke<TyParam> -{ - type Result = Self; - - fn invoke(self, trait_ref: PolyTraitRef) -> Self { - self.with_trait_bound(trait_ref) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TyParamBoundBuilder<F = Identity> { - callback: F, -} - -impl TyParamBoundBuilder { - pub fn new() -> Self { - TyParamBoundBuilder::with_callback(Identity) - } -} - -impl<F> TyParamBoundBuilder<F> - where F: Invoke<TyParamBound> -{ - pub fn with_callback(callback: F) -> Self { - TyParamBoundBuilder { callback: callback } - } - - pub fn build_trait(self, poly_trait: PolyTraitRef, modifier: TraitBoundModifier) -> F::Result { - let bound = TyParamBound::Trait(poly_trait, modifier); - self.callback.invoke(bound) - } - - pub fn trait_<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>> - where P: IntoPath - { - let builder = TraitTyParamBoundBuilder { - builder: self, - modifier: TraitBoundModifier::None, - }; - - PolyTraitRefBuilder::with_callback(path, builder) - } - - pub fn maybe_trait<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>> - where P: IntoPath - { - let builder = TraitTyParamBoundBuilder { - builder: self, - modifier: TraitBoundModifier::Maybe, - }; - - PolyTraitRefBuilder::with_callback(path, builder) - } - - pub fn iterator(self, ty: Ty) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>> { - let path = PathBuilder::new() - .global() - .id("std") - .id("iter") - .segment("Iterator") - .binding("Item") - .build(ty) - .build() - .build(); - self.trait_(path) - } - - pub fn lifetime<L>(self, lifetime: L) -> F::Result - where L: IntoLifetime - { - let lifetime = lifetime.into_lifetime(); - self.callback.invoke(TyParamBound::Region(lifetime)) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct TraitTyParamBoundBuilder<F> { - builder: TyParamBoundBuilder<F>, - modifier: TraitBoundModifier, -} - -impl<F> Invoke<PolyTraitRef> for TraitTyParamBoundBuilder<F> - where F: Invoke<TyParamBound> -{ - type Result = F::Result; - - fn invoke(self, poly_trait: PolyTraitRef) -> Self::Result { - self.builder.build_trait(poly_trait, self.modifier) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct PolyTraitRefBuilder<F> { - callback: F, - trait_ref: Path, - lifetimes: Vec<LifetimeDef>, -} - -impl<F> PolyTraitRefBuilder<F> - where F: Invoke<PolyTraitRef> -{ - pub fn with_callback<P>(path: P, callback: F) -> Self - where P: IntoPath - { - PolyTraitRefBuilder { - callback: callback, - trait_ref: path.into_path(), - lifetimes: Vec::new(), - } - } - - pub fn with_lifetime<L>(mut self, lifetime: L) -> Self - where L: IntoLifetimeDef - { - self.lifetimes.push(lifetime.into_lifetime_def()); - self - } - - pub fn lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self> - where N: Into<Ident> - { - LifetimeDefBuilder::with_callback(name, self) - } - - pub fn build(self) -> F::Result { - self.callback.invoke(PolyTraitRef { - bound_lifetimes: self.lifetimes, - trait_ref: self.trait_ref, - }) - } -} - -impl<F> Invoke<LifetimeDef> for PolyTraitRefBuilder<F> - where F: Invoke<PolyTraitRef> -{ - type Result = Self; - - fn invoke(self, lifetime: LifetimeDef) -> Self { - self.with_lifetime(lifetime) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/aster/where_predicate.rs +++ /dev/null @@ -1,259 +0,0 @@ -use {Ident, Lifetime, LifetimeDef, Ty, TyParamBound, WhereBoundPredicate, WherePredicate, - WhereRegionPredicate}; -use aster::invoke::{Invoke, Identity}; -use aster::lifetime::{IntoLifetime, IntoLifetimeDef, LifetimeDefBuilder}; -use aster::path::IntoPath; -use aster::ty::TyBuilder; -use aster::ty_param::{TyParamBoundBuilder, PolyTraitRefBuilder, TraitTyParamBoundBuilder}; - -// //////////////////////////////////////////////////////////////////////////// - -pub struct WherePredicateBuilder<F = Identity> { - callback: F, -} - -impl WherePredicateBuilder { - pub fn new() -> Self { - WherePredicateBuilder::with_callback(Identity) - } -} - -impl<F> WherePredicateBuilder<F> - where F: Invoke<WherePredicate> -{ - pub fn with_callback(callback: F) -> Self { - WherePredicateBuilder { callback: callback } - } - - pub fn bound(self) -> TyBuilder<Self> { - TyBuilder::with_callback(self) - } - - pub fn lifetime<L>(self, lifetime: L) -> WhereRegionPredicateBuilder<F> - where L: IntoLifetime - { - WhereRegionPredicateBuilder { - callback: self.callback, - lifetime: lifetime.into_lifetime(), - bounds: Vec::new(), - } - } -} - -impl<F> Invoke<Ty> for WherePredicateBuilder<F> - where F: Invoke<WherePredicate> -{ - type Result = WhereBoundPredicateTyBuilder<F>; - - fn invoke(self, ty: Ty) -> Self::Result { - WhereBoundPredicateTyBuilder { - callback: self.callback, - ty: ty, - bound_lifetimes: Vec::new(), - } - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct WhereBoundPredicateBuilder<F> { - callback: F, -} - -impl<F> Invoke<Ty> for WhereBoundPredicateBuilder<F> - where F: Invoke<WherePredicate> -{ - type Result = WhereBoundPredicateTyBuilder<F>; - - fn invoke(self, ty: Ty) -> Self::Result { - WhereBoundPredicateTyBuilder { - callback: self.callback, - ty: ty, - bound_lifetimes: Vec::new(), - } - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct WhereBoundPredicateTyBuilder<F> { - callback: F, - ty: Ty, - bound_lifetimes: Vec<LifetimeDef>, -} - -impl<F> WhereBoundPredicateTyBuilder<F> - where F: Invoke<WherePredicate> -{ - pub fn with_for_lifetime<L>(mut self, lifetime: L) -> Self - where L: IntoLifetimeDef - { - self.bound_lifetimes.push(lifetime.into_lifetime_def()); - self - } - - pub fn for_lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self> - where N: Into<Ident> - { - LifetimeDefBuilder::with_callback(name, self) - } - - pub fn with_bound(self, bound: TyParamBound) -> WhereBoundPredicateTyBoundsBuilder<F> { - WhereBoundPredicateTyBoundsBuilder { - callback: self.callback, - ty: self.ty, - bound_lifetimes: self.bound_lifetimes, - bounds: vec![bound], - } - } - - pub fn bound(self) -> TyParamBoundBuilder<WhereBoundPredicateTyBoundsBuilder<F>> { - let builder = WhereBoundPredicateTyBoundsBuilder { - callback: self.callback, - ty: self.ty, - bound_lifetimes: self.bound_lifetimes, - bounds: vec![], - }; - TyParamBoundBuilder::with_callback(builder) - } - - pub fn trait_<P> - (self, - path: P) - -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<WhereBoundPredicateTyBoundsBuilder<F>>> - where P: IntoPath - { - self.bound().trait_(path) - } - - pub fn lifetime<L>(self, lifetime: L) -> WhereBoundPredicateTyBoundsBuilder<F> - where L: IntoLifetime - { - self.bound().lifetime(lifetime) - } -} - -impl<F> Invoke<LifetimeDef> for WhereBoundPredicateTyBuilder<F> - where F: Invoke<WherePredicate> -{ - type Result = Self; - - fn invoke(self, lifetime: LifetimeDef) -> Self { - self.with_for_lifetime(lifetime) - } -} - -impl<F> Invoke<TyParamBound> for WhereBoundPredicateTyBuilder<F> - where F: Invoke<WherePredicate> -{ - type Result = WhereBoundPredicateTyBoundsBuilder<F>; - - fn invoke(self, bound: TyParamBound) -> Self::Result { - self.with_bound(bound) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct WhereBoundPredicateTyBoundsBuilder<F> { - callback: F, - ty: Ty, - bound_lifetimes: Vec<LifetimeDef>, - bounds: Vec<TyParamBound>, -} - -impl<F> WhereBoundPredicateTyBoundsBuilder<F> - where F: Invoke<WherePredicate> -{ - pub fn with_for_lifetime<L>(mut self, lifetime: L) -> Self - where L: IntoLifetimeDef - { - self.bound_lifetimes.push(lifetime.into_lifetime_def()); - self - } - - pub fn for_lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self> - where N: Into<Ident> - { - LifetimeDefBuilder::with_callback(name, self) - } - - pub fn with_bound(mut self, bound: TyParamBound) -> Self { - self.bounds.push(bound); - self - } - - pub fn bound(self) -> TyParamBoundBuilder<Self> { - TyParamBoundBuilder::with_callback(self) - } - - pub fn trait_<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<Self>> - where P: IntoPath - { - self.bound().trait_(path) - } - - pub fn lifetime<L>(self, lifetime: L) -> Self - where L: IntoLifetime - { - self.bound().lifetime(lifetime) - } - - pub fn build(self) -> F::Result { - let predicate = WhereBoundPredicate { - bound_lifetimes: self.bound_lifetimes, - bounded_ty: self.ty, - bounds: self.bounds, - }; - - self.callback.invoke(WherePredicate::BoundPredicate(predicate)) - } -} - -impl<F> Invoke<LifetimeDef> for WhereBoundPredicateTyBoundsBuilder<F> - where F: Invoke<WherePredicate> -{ - type Result = Self; - - fn invoke(self, lifetime: LifetimeDef) -> Self { - self.with_for_lifetime(lifetime) - } -} - -impl<F> Invoke<TyParamBound> for WhereBoundPredicateTyBoundsBuilder<F> - where F: Invoke<WherePredicate> -{ - type Result = Self; - - fn invoke(self, bound: TyParamBound) -> Self { - self.with_bound(bound) - } -} - -// //////////////////////////////////////////////////////////////////////////// - -pub struct WhereRegionPredicateBuilder<F> { - callback: F, - lifetime: Lifetime, - bounds: Vec<Lifetime>, -} - -impl<F> WhereRegionPredicateBuilder<F> - where F: Invoke<WherePredicate> -{ - pub fn bound<L>(mut self, lifetime: L) -> Self - where L: IntoLifetime - { - self.bounds.push(lifetime.into_lifetime()); - self - } - - pub fn build(self) -> F::Result { - let predicate = WhereRegionPredicate { - lifetime: self.lifetime, - bounds: self.bounds, - }; - - self.callback.invoke(WherePredicate::RegionPredicate(predicate)) - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/attr.rs +++ /dev/null @@ -1,305 +0,0 @@ -use super::*; - -use std::iter; - -/// Doc-comments are promoted to attributes that have `is_sugared_doc` = true -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct Attribute { - pub style: AttrStyle, - pub value: MetaItem, - pub is_sugared_doc: bool, -} - -impl Attribute { - pub fn name(&self) -> &str { - self.value.name() - } -} - -/// Distinguishes between Attributes that decorate items and Attributes that -/// are contained as statements within items. These two cases need to be -/// distinguished for pretty-printing. -#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] -pub enum AttrStyle { - /// Attribute of the form `#![...]`. - Outer, - - /// Attribute of the form `#[...]`. - Inner, -} - -/// A compile-time attribute item. -/// -/// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]` -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub enum MetaItem { - /// Word meta item. - /// - /// E.g. `test` as in `#[test]` - Word(Ident), - - /// List meta item. - /// - /// E.g. `derive(..)` as in `#[derive(..)]` - List(Ident, Vec<NestedMetaItem>), - - /// Name-value meta item. - /// - /// E.g. `feature = "foo"` as in `#[feature = "foo"]` - NameValue(Ident, Lit), -} - -impl MetaItem { - /// Name of the item. - /// - /// E.g. `test` as in `#[test]`, `derive` as in `#[derive(..)]`, and - /// `feature` as in `#[feature = "foo"]`. - pub fn name(&self) -> &str { - match *self { - MetaItem::Word(ref name) | - MetaItem::List(ref name, _) | - MetaItem::NameValue(ref name, _) => name.as_ref(), - } - } -} - -/// Possible values inside of compile-time attribute lists. -/// -/// E.g. the '..' in `#[name(..)]`. -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub enum NestedMetaItem { - /// A full `MetaItem`. - /// - /// E.g. `Copy` in `#[derive(Copy)]` would be a `MetaItem::Word(Ident::from("Copy"))`. - MetaItem(MetaItem), - - /// A Rust literal. - /// - /// E.g. `"name"` in `#[rename("name")]`. - Literal(Lit), -} - -pub trait FilterAttrs<'a> { - type Ret: Iterator<Item = &'a Attribute>; - - fn outer(self) -> Self::Ret; - fn inner(self) -> Self::Ret; -} - -impl<'a, T> FilterAttrs<'a> for T - where T: IntoIterator<Item = &'a Attribute> -{ - type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>; - - fn outer(self) -> Self::Ret { - fn is_outer(attr: &&Attribute) -> bool { - attr.style == AttrStyle::Outer - } - self.into_iter().filter(is_outer) - } - - fn inner(self) -> Self::Ret { - fn is_inner(attr: &&Attribute) -> bool { - attr.style == AttrStyle::Inner - } - self.into_iter().filter(is_inner) - } -} - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use ident::parsing::ident; - use lit::parsing::lit; - use synom::space::{block_comment, whitespace}; - - #[cfg(feature = "full")] - named!(pub inner_attr -> Attribute, alt!( - do_parse!( - punct!("#") >> - punct!("!") >> - punct!("[") >> - meta_item: meta_item >> - punct!("]") >> - (Attribute { - style: AttrStyle::Inner, - value: meta_item, - is_sugared_doc: false, - }) - ) - | - do_parse!( - punct!("//!") >> - content: take_until!("\n") >> - (Attribute { - style: AttrStyle::Inner, - value: MetaItem::NameValue( - "doc".into(), - format!("//!{}", content).into(), - ), - is_sugared_doc: true, - }) - ) - | - do_parse!( - option!(whitespace) >> - peek!(tag!("/*!")) >> - com: block_comment >> - (Attribute { - style: AttrStyle::Inner, - value: MetaItem::NameValue( - "doc".into(), - com.into(), - ), - is_sugared_doc: true, - }) - ) - )); - - named!(pub outer_attr -> Attribute, alt!( - do_parse!( - punct!("#") >> - punct!("[") >> - meta_item: meta_item >> - punct!("]") >> - (Attribute { - style: AttrStyle::Outer, - value: meta_item, - is_sugared_doc: false, - }) - ) - | - do_parse!( - punct!("///") >> - not!(tag!("/")) >> - content: take_until!("\n") >> - (Attribute { - style: AttrStyle::Outer, - value: MetaItem::NameValue( - "doc".into(), - format!("///{}", content).into(), - ), - is_sugared_doc: true, - }) - ) - | - do_parse!( - option!(whitespace) >> - peek!(tuple!(tag!("/**"), not!(tag!("*")))) >> - com: block_comment >> - (Attribute { - style: AttrStyle::Outer, - value: MetaItem::NameValue( - "doc".into(), - com.into(), - ), - is_sugared_doc: true, - }) - ) - )); - - named!(meta_item -> MetaItem, alt!( - do_parse!( - id: ident >> - punct!("(") >> - inner: terminated_list!(punct!(","), nested_meta_item) >> - punct!(")") >> - (MetaItem::List(id, inner)) - ) - | - do_parse!( - name: ident >> - punct!("=") >> - value: lit >> - (MetaItem::NameValue(name, value)) - ) - | - map!(ident, MetaItem::Word) - )); - - named!(nested_meta_item -> NestedMetaItem, alt!( - meta_item => { NestedMetaItem::MetaItem } - | - lit => { NestedMetaItem::Literal } - )); -} - -#[cfg(feature = "printing")] -mod printing { - use super::*; - use lit::{Lit, StrStyle}; - use quote::{Tokens, ToTokens}; - - impl ToTokens for Attribute { - fn to_tokens(&self, tokens: &mut Tokens) { - if let Attribute { style, - value: MetaItem::NameValue(ref name, - Lit::Str(ref value, StrStyle::Cooked)), - is_sugared_doc: true } = *self { - if name == "doc" { - match style { - AttrStyle::Inner if value.starts_with("//!") => { - tokens.append(&format!("{}\n", value)); - return; - } - AttrStyle::Inner if value.starts_with("/*!") => { - tokens.append(value); - return; - } - AttrStyle::Outer if value.starts_with("///") => { - tokens.append(&format!("{}\n", value)); - return; - } - AttrStyle::Outer if value.starts_with("/**") => { - tokens.append(value); - return; - } - _ => {} - } - } - } - - tokens.append("#"); - if let AttrStyle::Inner = self.style { - tokens.append("!"); - } - tokens.append("["); - self.value.to_tokens(tokens); - tokens.append("]"); - } - } - - impl ToTokens for MetaItem { - fn to_tokens(&self, tokens: &mut Tokens) { - match *self { - MetaItem::Word(ref ident) => { - ident.to_tokens(tokens); - } - MetaItem::List(ref ident, ref inner) => { - ident.to_tokens(tokens); - tokens.append("("); - tokens.append_separated(inner, ","); - tokens.append(")"); - } - MetaItem::NameValue(ref name, ref value) => { - name.to_tokens(tokens); - tokens.append("="); - value.to_tokens(tokens); - } - } - } - } - - impl ToTokens for NestedMetaItem { - fn to_tokens(&self, tokens: &mut Tokens) { - match *self { - NestedMetaItem::MetaItem(ref nested) => { - nested.to_tokens(tokens); - } - NestedMetaItem::Literal(ref lit) => { - lit.to_tokens(tokens); - } - } - } - } -}
deleted file mode 100644 --- a/third_party/rust/syn-0.11.11/src/constant.rs +++ /dev/null @@ -1,180 +0,0 @@ -use super::*; - -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub enum ConstExpr { - /// A function call - /// - /// The first field resolves to the function itself, - /// and the second field is the list of arguments - Call(Box<ConstExpr>, Vec<ConstExpr>), - - /// A binary operation (For example: `a + b`, `a * b`) - Binary(BinOp, Box<ConstExpr>, Box<ConstExpr>), - - /// A unary operation (For example: `!x`, `*x`) - Unary(UnOp, Box<ConstExpr>), - - /// A literal (For example: `1`, `"foo"`) - Lit(Lit), - - /// A cast (`foo as f64`) - Cast(Box<ConstExpr>, Box<Ty>), - - /// Variable reference, possibly containing `::` and/or type - /// parameters, e.g. foo::bar::<baz>. - Path(Path), - - /// An indexing operation (`foo[2]`) - Index(Box<ConstExpr>, Box<ConstExpr>), - - /// No-op: used solely so we can pretty-print faithfully - Paren(Box<ConstExpr>), - - /// If compiling with full support for expression syntax, any expression is - /// allowed - Other(Other), -} - -#[cfg(not(feature = "full"))] -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct Other { - _private: (), -} - -#[cfg(feature = "full")] -pub type Other = Expr; - -#[cfg(feature = "parsing")] -pub mod parsing { - use super::*; - use {BinOp, Ty}; - use lit::parsing::lit; - use op::parsing::{binop, unop}; - use ty::parsing::{path, ty}; - - named!(pub const_expr -> ConstExpr, do_parse!( - mut e: alt!( - expr_unary - | - expr_lit - | - expr_path - | - expr_paren - // Cannot handle ConstExpr::Other here because for example - // `[u32; n!()]` would end up successfully parsing `n` as - // ConstExpr::Path and then fail to parse `!()`. Instead, callers - // are required to handle Other. See ty::parsing::array_len and - // data::parsing::discriminant. - ) >> - many0!(alt!( - tap!(args: and_call => { - e = ConstExpr::Call(Box::new(e), args); - }) - | - tap!(more: and_binary => { - let (op, other) = more; - e = ConstExpr::Binary(op, Box::new(e), Box::new(other)); - }) - | - tap!(ty: and_cast => { - e = ConstExpr::Cast(Box::new(e), Box::new(ty)); - }) - | - tap!(i: and_index => { - e = ConstExpr::Index(Box::new(e), Box::new(i)); - }) - )) >> - (e) - )); - - named!(and_call -> Vec<ConstExpr>, do_parse!( - punct!("(") >> - args: terminated_list!(punct!(","), const_expr) >> - punct!(")") >>