author | Sebastian Hengst <archaeopteryx@coole-files.de> |
Mon, 22 Oct 2018 20:26:16 +0300 | |
changeset 498794 | 2872e7a3606d6108874930a1eb4062c74bad0e9e |
parent 498767 | af3fd0a2c2e61c263a1d252824dfdeb18162cf14 (current diff) |
parent 498793 | d0bb57076f0a08fd2f73e9d0d6728ebc91c231d0 (diff) |
child 498806 | 8c1121739072bf560b00acd482745de2c952b33a |
child 498873 | ae1d789f73409413b98a6f7e7d21d8974e8abd98 |
push id | 10290 |
push user | ffxbld-merge |
push date | Mon, 03 Dec 2018 16:23:23 +0000 |
treeherder | mozilla-beta@700bed2445e6 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | merge |
milestone | 65.0a1 |
first release with | nightly linux32
2872e7a3606d
/
65.0a1
/
20181022220734
/
files
nightly linux64
2872e7a3606d
/
65.0a1
/
20181022220734
/
files
nightly mac
2872e7a3606d
/
65.0a1
/
20181022220734
/
files
nightly win32
2872e7a3606d
/
65.0a1
/
20181022220734
/
files
nightly win64
2872e7a3606d
/
65.0a1
/
20181022220734
/
files
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
releases | nightly linux32
65.0a1
/
20181022220734
/
pushlog to previous
nightly linux64
65.0a1
/
20181022220734
/
pushlog to previous
nightly mac
65.0a1
/
20181022220734
/
pushlog to previous
nightly win32
65.0a1
/
20181022220734
/
pushlog to previous
nightly win64
65.0a1
/
20181022220734
/
pushlog to previous
|
third_party/rust/clang-sys/ci/before_install.sh | file | annotate | diff | comparison | revisions | |
third_party/rust/clang-sys/ci/script.sh | file | annotate | diff | comparison | revisions | |
third_party/rust/nom/src/stream.rs | file | annotate | diff | comparison | revisions | |
third_party/rust/nom/tests/cross_function_backtracking.rs | file | annotate | diff | comparison | revisions | |
third_party/rust/nom/tests/omnom.rs | file | annotate | diff | comparison | revisions |
--- a/Cargo.lock +++ b/Cargo.lock @@ -155,17 +155,17 @@ dependencies = [ "cc 1.0.23 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "baldrdash" version = "0.1.0" dependencies = [ - "bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bindgen 0.43.0 (registry+https://github.com/rust-lang/crates.io-index)", "cranelift-codegen 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", "cranelift-wasm 0.20.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "target-lexicon 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -209,23 +209,23 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "bindgen" -version = "0.39.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "cexpr 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "clang-sys 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)", + "clang-sys 0.26.1 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -331,20 +331,20 @@ dependencies = [ [[package]] name = "cc" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "cexpr" -version = "0.2.3" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "nom 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "nom 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "cfg-if" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -354,17 +354,17 @@ source = "registry+https://github.com/ru dependencies = [ "num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "clang-sys" -version = "0.23.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", "libloading 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1203,17 +1203,17 @@ dependencies = [ name = "itoa" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "js" version = "0.1.4" dependencies = [ - "bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bindgen 0.43.0 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "mozjs_sys 0.0.0", "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1227,17 +1227,17 @@ dependencies = [ "mozilla-central-workspace-hack 0.1.0", ] [[package]] name = "jsrust_shared" version = "0.1.0" dependencies = [ "baldrdash 0.1.0", - "bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bindgen 0.43.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "kernel32-sys" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1684,16 +1684,24 @@ source = "registry+https://github.com/ru name = "nom" version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] +name = "nom" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] name = "nserror" version = "0.1.0" dependencies = [ "nsstring 0.1.0", ] [[package]] name = "nsstring" @@ -2354,17 +2362,17 @@ source = "registry+https://github.com/ru [[package]] name = "style" version = "0.0.1" dependencies = [ "app_units 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bindgen 0.43.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.24.1 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.19.0 (registry+https://github.com/rust-lang/crates.io-index)", "fallible 0.0.1", "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "hashglobe 0.1.0", @@ -3135,36 +3143,36 @@ dependencies = [ "checksum ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b385d69402821a1c254533a011a312531cbcc0e3e24f19bbb4747a5a2daf37e2" "checksum atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2dcb6e6d35f20276943cc04bb98e538b348d525a04ac79c10021561d202f21" "checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652" "checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a" "checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0" "checksum base64 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "85415d2594767338a74a30c1d370b2f3262ec1b4ed2d7bba5b3faf4de40467d9" "checksum binary-space-partition 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "88ceb0d16c4fd0e42876e298d7d3ce3780dd9ebdcbe4199816a32c77e08597ff" "checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0" -"checksum bindgen 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eac4ed5f2de9efc3c87cb722468fa49d0763e98f999d539bfc5e452c13d85c91" +"checksum bindgen 0.43.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41df015ccbc22b038641bd84d0aeeff01e0a4c0714ed35ed0e9a3dd8ad8d732" "checksum binjs_meta 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "430239e4551e42b80fa5d92322ac80ea38c9dda56e5d5582e057e2288352b71a" "checksum bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6f1efcc46c18245a69c38fcc5cc650f16d3a59d034f3106e9ed63748f695730a" "checksum bit-vec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4440d5cb623bb7390ae27fec0bb6c61111969860f8e3ae198bfa0663645e67cf" "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" "checksum bitreader 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "80b13e2ab064ff3aa0bdbf1eff533f9822dc37899821f5f98c67f263eab51707" "checksum blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400" "checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab" "checksum boxfnonce 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8380105befe91099e6f69206164072c05bc92427ff6aa8a5171388317346dd75" "checksum build_const 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e90dc84f5e62d2ebe7676b83c22d33b6db8bd27340fb6ffbff0a364efa0cb9c9" "checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40" "checksum byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "652805b7e73fada9d85e9a6682a4abd490cb52d96aeecc12e33a0de34dfd0d23" "checksum bytes 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e178b8e0e239e844b083d5a0d4a156b2654e67f9f80144d48398fcd736a24fb8" "checksum bzip2 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3eafc42c44e0d827de6b1c131175098fe7fb53b8ce8a47e65cb3ea94688be24" "checksum bzip2-sys 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2c5162604199bbb17690ede847eaa6120a3f33d5ab4dcc8e7c25b16d849ae79b" "checksum cc 1.0.23 (registry+https://github.com/rust-lang/crates.io-index)" = "c37f0efaa4b9b001fa6f02d4b644dee4af97d3414df07c51e3e4f015f3a3e131" -"checksum cexpr 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "42aac45e9567d97474a834efdee3081b3c942b2205be932092f53354ce503d6c" +"checksum cexpr 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8fc0086be9ca82f7fc89fc873435531cb898b86e850005850de1f820e2db6e9b" "checksum cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878" -"checksum clang-sys 0.23.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d7f7c04e52c35222fffcc3a115b5daf5f7e2bfb71c13c4e2321afe1fc71859c2" +"checksum clang-sys 0.26.1 (registry+https://github.com/rust-lang/crates.io-index)" = "481e42017c1416b1c0856ece45658ecbb7c93d8a93455f7e5fa77f3b35455557" "checksum clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f0f16b89cbb9ee36d87483dc939fe9f1e13c05898d56d7b230a0d4dff033a536" "checksum cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)" = "56d741ea7a69e577f6d06b36b7dff4738f680593dc27a701ffa8506b73ce28bb" "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" "checksum cookie 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1465f8134efa296b4c19db34d909637cb2bf0f7aaf21299e23e18fa29ac557cf" "checksum core-foundation 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7caa6cb9e76ddddbea09a03266d6b3bc98cd41e9fb9b017c473e7cca593ec25" "checksum core-foundation-sys 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b2a53cce0ddcf7e7e1f998738d757d5a3bf08bf799a180e50ebe50d298f52f5a" "checksum core-graphics 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)" = "62ceafe1622ffc9a332199096841d0ff9912ec8cf8f9cde01e254a7d5217cd10" "checksum core-text 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f3f46450d6f2397261af420b4ccce23807add2e45fa206410a03d66fb7f050ae" @@ -3272,16 +3280,17 @@ dependencies = [ "checksum moz_cbor 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20c82a57087fd5990d7122dbff1607c3b20c3d2958e9d9ad9765aab415e2c91c" "checksum mp4parse_fallible 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6626c2aef76eb8f984eef02e475883d3fe9112e114720446c5810fc5f045cd30" "checksum msdos_time 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729" "checksum net2 0.2.32 (registry+https://github.com/rust-lang/crates.io-index)" = "9044faf1413a1057267be51b5afba8eb1090bd2231c693664aa1db716fe1eae0" "checksum new-ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8ccbebba6fb53a6d2bdcfaf79cb339bc136dee3bfff54dc337a334bafe36476a" "checksum new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0cdc457076c78ab54d5e0d6fa7c47981757f1e34dc39ff92787f217dede586c4" "checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" "checksum nom 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05aec50c70fd288702bcd93284a8444607f3292dbdf2a30de5ea5dcdbe72287b" +"checksum nom 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9c349f68f25f596b9f44cf0e7c69752a5c633b0550c3ff849518bfba0233774a" "checksum num-derive 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d2c31b75c36a993d30c7a13d70513cb93f02acafdd5b7ba250f9b0e18615de7" "checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" "checksum num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31" "checksum num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3a5d7cc97d6d30d8b9bc8fa19bf45349ffe46241e8816f50f62f6d6aaabee1" "checksum num_cpus 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "514f0d73e64be53ff320680ca671b64fe3fb91da01e1ae2ddc99eb51d453b20d" "checksum object 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6cca6ad89d0801138cb4ef606908ae12d83edc4c790ef5178fc7b4c72d959e90" "checksum ordered-float 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2f0015e9e8e28ee20c581cfbfe47c650cedeb9ed0721090e0b7ebb10b9cdbcc2" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
--- a/accessible/base/NotificationController.cpp +++ b/accessible/base/NotificationController.cpp @@ -434,21 +434,20 @@ NotificationController::ScheduleContentI } AutoTArray<nsCOMPtr<nsIContent>, 10> list; for (nsIContent* node = aStartChildNode; node != aEndChildNode; node = node->GetNextSibling()) { MOZ_ASSERT(parent == node->GetFlattenedTreeParentNode()); // Notification triggers for content insertion even if no content was - // actually inserted, check if the given content has a frame to discard + // actually inserted (like if the content is display: none). Try to catch // this case early. - // - // TODO(emilio): Should this handle display: contents? - if (node->GetPrimaryFrame()) { + if (node->GetPrimaryFrame() || + (node->IsElement() && node->AsElement()->IsDisplayContents())) { list.AppendElement(node); } } if (!list.IsEmpty()) { mContentInsertions.LookupOrAdd(container)->AppendElements(list); ScheduleProcessing(); }
--- a/accessible/tests/mochitest/treeupdate/test_general.html +++ b/accessible/tests/mochitest/treeupdate/test_general.html @@ -95,30 +95,65 @@ testAccessibleTree("c3", accTree); }; this.getID = function insertInaccessibleAccessibleSiblings_getID() { return "insert inaccessible and then accessible siblings"; }; } + // Test for bug 1500416. + function displayContentsInsertion() { + this.eventSeq = [ + new invokerChecker(EVENT_REORDER, "c4"), + ]; + + this.invoke = function displayContentsInsertion_invoke() { + document.body.offsetTop; // Flush layout. + + let list = document.createElement("ul"); + list.style.display = "contents"; + list.appendChild(document.createElement("li")); + list.firstChild.appendChild(document.createTextNode("Text")); + getNode("c4").appendChild(list); + }; + + this.finalCheck = function displayContentsInsertion_finalCheck() { + var accTree = + { SECTION: [ // container + { LIST: [ + { LISTITEM: [ + { STATICTEXT: [] }, + { TEXT_LEAF: [] }, + ] }, + ] }, + ] }; + testAccessibleTree("c4", accTree); + }; + + this.getID = function displayContentsInsertion_getID() { + return "insert accessible display: contents element."; + }; + } + // ////////////////////////////////////////////////////////////////////////// // Do tests // ////////////////////////////////////////////////////////////////////////// var gQueue = null; // gA11yEventDumpID = "eventdump"; // debug stuff // gA11yEventDumpToConsole = true; function doTests() { gQueue = new eventQueue(); gQueue.push(new prependAppend("c1")); gQueue.push(new removeRemove("c2")); gQueue.push(new insertInaccessibleAccessibleSiblings()); + gQueue.push(new displayContentsInsertion()); gQueue.invoke(); // Will call SimpleTest.finish(); } SimpleTest.waitForExplicitFinish(); addA11yLoadEvent(doTests); </script> </head> @@ -128,10 +163,11 @@ <div id="content" style="display: none"></div> <pre id="test"> </pre> <div id="c1"><input></div> <div id="c2"><span><input type="checkbox"><input></span><input type="button"></div> <div id="c3"><input type="button" value="button"></div> + <div id="c4"></div> </body> </html>
--- a/browser/components/customizableui/test/browser.ini +++ b/browser/components/customizableui/test/browser.ini @@ -166,17 +166,17 @@ tags = fullscreen [browser_panelUINotifications_multiWindow.js] [browser_remove_customized_specials.js] [browser_switch_to_customize_mode.js] [browser_synced_tabs_menu.js] [browser_backfwd_enabled_post_customize.js] [browser_check_tooltips_in_navbar.js] [browser_editcontrols_update.js] subsuite = clipboard -skip-if = verify && !debug && os == 'mac' +skip-if = (verify && !debug && os == 'mac') || (os == 'mac') # Bug 1458046 [browser_customization_context_menus.js] [browser_newtab_button_customizemode.js] [browser_open_from_popup.js] [browser_sidebar_toggle.js] skip-if = verify [browser_remote_tabs_button.js] skip-if = (verify && debug && (os == 'linux' || os == 'mac')) [browser_widget_animation.js]
--- a/devtools/client/responsive.html/test/browser/browser.ini +++ b/devtools/client/responsive.html/test/browser/browser.ini @@ -55,15 +55,16 @@ skip-if = true # Bug 1413765 [browser_state_restore.js] [browser_tab_close.js] [browser_tab_remoteness_change.js] [browser_target_blank.js] [browser_telemetry_activate_rdm.js] [browser_toolbox_computed_view.js] [browser_toolbox_rule_view.js] [browser_toolbox_rule_view_reload.js] +skip-if = os == "linux" || os == "mac" # Bug 1498336 [browser_toolbox_swap_browsers.js] [browser_toolbox_swap_inspector.js] [browser_touch_device.js] [browser_touch_simulation.js] [browser_user_agent_input.js] [browser_viewport_basics.js] [browser_window_close.js]
--- a/dom/quota/ActorsParent.cpp +++ b/dom/quota/ActorsParent.cpp @@ -78,17 +78,17 @@ #if DISABLE_ASSERTS_FOR_FUZZING #define ASSERT_UNLESS_FUZZING(...) do { } while (0) #else #define ASSERT_UNLESS_FUZZING(...) MOZ_ASSERT(false, __VA_ARGS__) #endif #define UNKNOWN_FILE_WARNING(_leafName) \ QM_WARNING("Something (%s) in the directory that doesn't belong!", \ - NS_ConvertUTF16toUTF8(leafName).get()) + NS_ConvertUTF16toUTF8(_leafName).get()) // The amount of time, in milliseconds, that our IO thread will stay alive // after the last event it processes. #define DEFAULT_THREAD_TIMEOUT_MS 30000 // The amount of time, in milliseconds, that we will wait for active storage // transactions on shutdown before aborting them. #define DEFAULT_SHUTDOWN_TIMER_MS 30000 @@ -1595,17 +1595,17 @@ mozilla::Atomic<bool> gShutdown(false); // Constants for temporary storage limit computing. static const int32_t kDefaultFixedLimitKB = -1; static const uint32_t kDefaultChunkSizeKB = 10 * 1024; int32_t gFixedLimitKB = kDefaultFixedLimitKB; uint32_t gChunkSizeKB = kDefaultChunkSizeKB; bool gTestingEnabled = false; -class StorageDirectoryHelper +class StorageOperationBase : public Runnable { mozilla::Mutex mMutex; mozilla::CondVar mCondVar; nsresult mMainThreadResultCode; bool mWaiting; protected: @@ -1613,30 +1613,30 @@ protected: nsTArray<OriginProps> mOriginProps; nsCOMPtr<nsIFile> mDirectory; const bool mPersistent; public: - StorageDirectoryHelper(nsIFile* aDirectory, bool aPersistent) - : Runnable("dom::quota::StorageDirectoryHelper") - , mMutex("StorageDirectoryHelper::mMutex") - , mCondVar(mMutex, "StorageDirectoryHelper::mCondVar") + StorageOperationBase(nsIFile* aDirectory, bool aPersistent) + : Runnable("dom::quota::StorageOperationBase") + , mMutex("StorageOperationBase::mMutex") + , mCondVar(mMutex, "StorageOperationBase::mCondVar") , mMainThreadResultCode(NS_OK) , mWaiting(true) , mDirectory(aDirectory) , mPersistent(aPersistent) { AssertIsOnIOThread(); } protected: - ~StorageDirectoryHelper() + virtual ~StorageOperationBase() { } nsresult GetDirectoryMetadata(nsIFile* aDirectory, int64_t& aTimestamp, nsACString& aGroup, nsACString& aOrigin, Nullable<bool>& aIsApp); @@ -1667,17 +1667,17 @@ protected: private: nsresult RunOnMainThread(); NS_IMETHOD Run() override; }; -struct StorageDirectoryHelper::OriginProps +struct StorageOperationBase::OriginProps { enum Type { eChrome, eContent, eObsolete }; @@ -1796,113 +1796,133 @@ private: void HandleToken(const nsDependentCSubstring& aToken); void HandleTrailingSeparator(); }; +class RepositoryOperationBase + : public StorageOperationBase +{ +public: + RepositoryOperationBase(nsIFile* aDirectory, + bool aPersistent) + : StorageOperationBase(aDirectory, aPersistent) + { } + + nsresult + ProcessRepository(); + +protected: + virtual ~RepositoryOperationBase() + { } + + template<typename UpgradeMethod> + nsresult + MaybeUpgradeClients(const OriginProps& aOriginsProps, + UpgradeMethod aMethod); + +private: + virtual nsresult + PrepareOriginDirectory(OriginProps& aOriginProps, bool* aRemoved) = 0; +}; + class CreateOrUpgradeDirectoryMetadataHelper final - : public StorageDirectoryHelper + : public RepositoryOperationBase { nsCOMPtr<nsIFile> mPermanentStorageDir; public: CreateOrUpgradeDirectoryMetadataHelper(nsIFile* aDirectory, bool aPersistent) - : StorageDirectoryHelper(aDirectory, aPersistent) + : RepositoryOperationBase(aDirectory, aPersistent) { } - nsresult - CreateOrUpgradeMetadataFiles(); - private: nsresult MaybeUpgradeOriginDirectory(nsIFile* aDirectory); nsresult + PrepareOriginDirectory(OriginProps& aOriginProps, bool* aRemoved) override; + + nsresult ProcessOriginDirectory(const OriginProps& aOriginProps) override; }; class UpgradeStorageFrom0_0To1_0Helper final - : public StorageDirectoryHelper + : public RepositoryOperationBase { public: UpgradeStorageFrom0_0To1_0Helper(nsIFile* aDirectory, bool aPersistent) - : StorageDirectoryHelper(aDirectory, aPersistent) + : RepositoryOperationBase(aDirectory, aPersistent) { } - nsresult - DoUpgrade(); - private: nsresult + PrepareOriginDirectory(OriginProps& aOriginProps, bool* aRemoved) override; + + nsresult ProcessOriginDirectory(const OriginProps& aOriginProps) override; }; class UpgradeStorageFrom1_0To2_0Helper final - : public StorageDirectoryHelper + : public RepositoryOperationBase { public: UpgradeStorageFrom1_0To2_0Helper(nsIFile* aDirectory, bool aPersistent) - : StorageDirectoryHelper(aDirectory, aPersistent) + : RepositoryOperationBase(aDirectory, aPersistent) { } - nsresult - DoUpgrade(); - private: nsresult - MaybeUpgradeClients(const OriginProps& aOriginProps); + MaybeRemoveMorgueDirectory(const OriginProps& aOriginProps); nsresult MaybeRemoveAppsData(const OriginProps& aOriginProps, bool* aRemoved); nsresult MaybeStripObsoleteOriginAttributes(const OriginProps& aOriginProps, bool* aStripped); nsresult + PrepareOriginDirectory(OriginProps& aOriginProps, bool* aRemoved) override; + + nsresult ProcessOriginDirectory(const OriginProps& aOriginProps) override; }; -// XXXtt: The following class is duplicated from -// UpgradeStorageFrom1_0To2_0Helper and it should be extracted out in -// bug 1395102. class UpgradeStorageFrom2_0To2_1Helper final - : public StorageDirectoryHelper + : public RepositoryOperationBase { public: UpgradeStorageFrom2_0To2_1Helper(nsIFile* aDirectory, bool aPersistent) - : StorageDirectoryHelper(aDirectory, aPersistent) + : RepositoryOperationBase(aDirectory, aPersistent) { } - nsresult - DoUpgrade(); - private: nsresult - MaybeUpgradeClients(const OriginProps& aOriginProps); + PrepareOriginDirectory(OriginProps& aOriginProps, bool* aRemoved) override; nsresult ProcessOriginDirectory(const OriginProps& aOriginProps) override; }; class RestoreDirectoryMetadata2Helper final - : public StorageDirectoryHelper + : public StorageOperationBase { public: RestoreDirectoryMetadata2Helper(nsIFile* aDirectory, bool aPersistent) - : StorageDirectoryHelper(aDirectory, aPersistent) + : StorageOperationBase(aDirectory, aPersistent) { } nsresult RestoreMetadata2File(); private: nsresult ProcessOriginDirectory(const OriginProps& aOriginProps) override; @@ -4461,17 +4481,17 @@ QuotaManager::MaybeUpgradePersistentStor return NS_OK; } // Create real metadata files for origin directories in persistent storage. RefPtr<CreateOrUpgradeDirectoryMetadataHelper> helper = new CreateOrUpgradeDirectoryMetadataHelper(persistentStorageDir, /* aPersistent */ true); - rv = helper->CreateOrUpgradeMetadataFiles(); + rv = helper->ProcessRepository(); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } // Upgrade metadata files for origin directories in temporary storage. nsCOMPtr<nsIFile> temporaryStorageDir; rv = NS_NewLocalFile(mTemporaryStoragePath, false, getter_AddRefs(temporaryStorageDir)); @@ -4494,17 +4514,17 @@ QuotaManager::MaybeUpgradePersistentStor NS_WARNING("temporary entry is not a directory!"); return NS_OK; } helper = new CreateOrUpgradeDirectoryMetadataHelper(temporaryStorageDir, /* aPersistent */ false); - rv = helper->CreateOrUpgradeMetadataFiles(); + rv = helper->ProcessRepository(); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } } // And finally rename persistent to default. rv = persistentStorageDir->RenameTo(nullptr, NS_LITERAL_STRING(DEFAULT_DIRECTORY_NAME)); if (NS_WARN_IF(NS_FAILED(rv))) { @@ -4565,16 +4585,75 @@ QuotaManager::MaybeRemoveOldDirectories( if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } } return NS_OK; } +template<typename Helper> +nsresult +QuotaManager::UpgradeStorage(const int32_t aOldVersion, + const int32_t aNewVersion, + mozIStorageConnection* aConnection) +{ + AssertIsOnIOThread(); + MOZ_ASSERT(aNewVersion > aOldVersion); + MOZ_ASSERT(aNewVersion <= kStorageVersion); + MOZ_ASSERT(aConnection); + + nsresult rv; + + for (const PersistenceType persistenceType : kAllPersistenceTypes) { + nsCOMPtr<nsIFile> directory; + rv = NS_NewLocalFile(GetStoragePath(persistenceType), false, + getter_AddRefs(directory)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + bool exists; + rv = directory->Exists(&exists); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (!exists) { + continue; + } + + bool persistent = persistenceType == PERSISTENCE_TYPE_PERSISTENT; + RefPtr<RepositoryOperationBase> helper = new Helper(directory, persistent); + rv = helper->ProcessRepository(); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + +#ifdef DEBUG + { + int32_t storageVersion; + rv = aConnection->GetSchemaVersion(&storageVersion); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + MOZ_ASSERT(storageVersion == aOldVersion); + } +#endif + + rv = aConnection->SetSchemaVersion(aNewVersion); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + return NS_OK; +} + nsresult QuotaManager::UpgradeStorageFrom0_0To1_0(mozIStorageConnection* aConnection) { AssertIsOnIOThread(); MOZ_ASSERT(aConnection); nsresult rv = MaybeUpgradeIndexedDBDirectory(); if (NS_WARN_IF(NS_FAILED(rv))) { @@ -4586,47 +4665,20 @@ QuotaManager::UpgradeStorageFrom0_0To1_0 return rv; } rv = MaybeRemoveOldDirectories(); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } - for (const PersistenceType persistenceType : kAllPersistenceTypes) { - nsCOMPtr<nsIFile> directory; - rv = NS_NewLocalFile(GetStoragePath(persistenceType), false, - getter_AddRefs(directory)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - bool persistent = persistenceType == PERSISTENCE_TYPE_PERSISTENT; - RefPtr<UpgradeStorageFrom0_0To1_0Helper> helper = - new UpgradeStorageFrom0_0To1_0Helper(directory, persistent); - - rv = helper->DoUpgrade(); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - } - -#ifdef DEBUG - { - int32_t storageVersion; - rv = aConnection->GetSchemaVersion(&storageVersion); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - MOZ_ASSERT(storageVersion == 0); - } -#endif - - rv = aConnection->SetSchemaVersion(MakeStorageVersion(1, 0)); + rv = + UpgradeStorage<UpgradeStorageFrom0_0To1_0Helper>(0, + MakeStorageVersion(1, 0), + aConnection); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } return NS_OK; } nsresult @@ -4697,118 +4749,40 @@ QuotaManager::UpgradeStorageFrom1_0To2_0 // [Downgrade-incompatible changes]: // File manager directories with the ".files" suffix prevent older versions of // Firefox from initializing and using the storage. // File manager directories without the ".files" suffix can appear if user // runs an already upgraded profile in an older version of Firefox. File // manager directories without the ".files" suffix then prevent current // Firefox from initializing and using the storage. - nsresult rv; - - for (const PersistenceType persistenceType : kAllPersistenceTypes) { - nsCOMPtr<nsIFile> directory; - rv = NS_NewLocalFile(GetStoragePath(persistenceType), false, - getter_AddRefs(directory)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - bool exists; - rv = directory->Exists(&exists); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!exists) { - continue; - } - - bool persistent = persistenceType == PERSISTENCE_TYPE_PERSISTENT; - RefPtr<UpgradeStorageFrom1_0To2_0Helper> helper = - new UpgradeStorageFrom1_0To2_0Helper(directory, persistent); - - rv = helper->DoUpgrade(); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - } - -#ifdef DEBUG - { - int32_t storageVersion; - rv = aConnection->GetSchemaVersion(&storageVersion); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - MOZ_ASSERT(storageVersion == MakeStorageVersion(1, 0)); - } -#endif - - rv = aConnection->SetSchemaVersion(MakeStorageVersion(2, 0)); + nsresult rv = + UpgradeStorage<UpgradeStorageFrom1_0To2_0Helper>(MakeStorageVersion(1, 0), + MakeStorageVersion(2, 0), + aConnection); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } return NS_OK; } nsresult QuotaManager::UpgradeStorageFrom2_0To2_1(mozIStorageConnection* aConnection) { AssertIsOnIOThread(); MOZ_ASSERT(aConnection); // The upgrade is mainly to create a directory padding file in DOM Cache // directory to record the overall padding size of an origin. - nsresult rv; - - for (const PersistenceType persistenceType : kAllPersistenceTypes) { - nsCOMPtr<nsIFile> directory; - rv = NS_NewLocalFile(GetStoragePath(persistenceType), false, - getter_AddRefs(directory)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - bool exists; - rv = directory->Exists(&exists); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!exists) { - continue; - } - - bool persistent = persistenceType == PERSISTENCE_TYPE_PERSISTENT; - RefPtr<UpgradeStorageFrom2_0To2_1Helper> helper = - new UpgradeStorageFrom2_0To2_1Helper(directory, persistent); - - rv = helper->DoUpgrade(); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - } - -#ifdef DEBUG - { - int32_t storageVersion; - rv = aConnection->GetSchemaVersion(&storageVersion); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - MOZ_ASSERT(storageVersion == MakeStorageVersion(2, 0)); - } -#endif - - rv = aConnection->SetSchemaVersion(MakeStorageVersion(2, 1)); + nsresult rv = + UpgradeStorage<UpgradeStorageFrom2_0To2_1Helper>(MakeStorageVersion(2, 0), + MakeStorageVersion(2, 1), + aConnection); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } return NS_OK; } nsresult @@ -5730,16 +5704,18 @@ QuotaManager::GetInfoForChrome(nsACStrin ChromeOrigin(*aOrigin); } } // static bool QuotaManager::IsOriginInternal(const nsACString& aOrigin) { + MOZ_ASSERT(!aOrigin.IsEmpty()); + // The first prompt is not required for these origins. if (aOrigin.EqualsLiteral(kChromeOrigin) || StringBeginsWith(aOrigin, nsDependentCString(kAboutHomeOriginPrefix)) || StringBeginsWith(aOrigin, nsDependentCString(kIndexedDBOriginPrefix)) || StringBeginsWith(aOrigin, nsDependentCString(kResourceOriginPrefix))) { return true; } @@ -8100,21 +8076,21 @@ void PersistOp::GetResponse(RequestResponse& aResponse) { AssertIsOnOwningThread(); aResponse = PersistResponse(); } nsresult -StorageDirectoryHelper::GetDirectoryMetadata(nsIFile* aDirectory, - int64_t& aTimestamp, - nsACString& aGroup, - nsACString& aOrigin, - Nullable<bool>& aIsApp) +StorageOperationBase::GetDirectoryMetadata(nsIFile* aDirectory, + int64_t& aTimestamp, + nsACString& aGroup, + nsACString& aOrigin, + Nullable<bool>& aIsApp) { AssertIsOnIOThread(); MOZ_ASSERT(aDirectory); nsCOMPtr<nsIBinaryInputStream> binaryStream; nsresult rv = GetBinaryInputStream(aDirectory, NS_LITERAL_STRING(METADATA_FILE_NAME), getter_AddRefs(binaryStream)); @@ -8149,22 +8125,22 @@ StorageDirectoryHelper::GetDirectoryMeta aTimestamp = timestamp; aGroup = group; aOrigin = origin; aIsApp = std::move(isApp); return NS_OK; } nsresult -StorageDirectoryHelper::GetDirectoryMetadata2(nsIFile* aDirectory, - int64_t& aTimestamp, - nsACString& aSuffix, - nsACString& aGroup, - nsACString& aOrigin, - bool& aIsApp) +StorageOperationBase::GetDirectoryMetadata2(nsIFile* aDirectory, + int64_t& aTimestamp, + nsACString& aSuffix, + nsACString& aGroup, + nsACString& aOrigin, + bool& aIsApp) { AssertIsOnIOThread(); MOZ_ASSERT(aDirectory); nsCOMPtr<nsIBinaryInputStream> binaryStream; nsresult rv = GetBinaryInputStream(aDirectory, NS_LITERAL_STRING(METADATA_V2_FILE_NAME), getter_AddRefs(binaryStream)); @@ -8224,34 +8200,34 @@ StorageDirectoryHelper::GetDirectoryMeta aSuffix = suffix; aGroup = group; aOrigin = origin; aIsApp = isApp; return NS_OK; } nsresult -StorageDirectoryHelper::RemoveObsoleteOrigin(const OriginProps& aOriginProps) +StorageOperationBase::RemoveObsoleteOrigin(const OriginProps& aOriginProps) { AssertIsOnIOThread(); MOZ_ASSERT(aOriginProps.mDirectory); QM_WARNING("Deleting obsolete %s directory that is no longer a legal " "origin!", NS_ConvertUTF16toUTF8(aOriginProps.mLeafName).get()); nsresult rv = aOriginProps.mDirectory->Remove(/* recursive */ true); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } return NS_OK; } nsresult -StorageDirectoryHelper::ProcessOriginDirectories() +StorageOperationBase::ProcessOriginDirectories() { AssertIsOnIOThread(); MOZ_ASSERT(!mOriginProps.IsEmpty()); MOZ_ALWAYS_SUCCEEDS(NS_DispatchToMainThread(this)); { mozilla::MutexAutoLock autolock(mMutex); @@ -8291,17 +8267,17 @@ StorageDirectoryHelper::ProcessOriginDir return rv; } } return NS_OK; } nsresult -StorageDirectoryHelper::RunOnMainThread() +StorageOperationBase::RunOnMainThread() { MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(!mOriginProps.IsEmpty()); nsresult rv; for (uint32_t count = mOriginProps.Length(), index = 0; index < count; @@ -8349,17 +8325,17 @@ StorageDirectoryHelper::RunOnMainThread( MOZ_CRASH("Bad type!"); } } return NS_OK; } NS_IMETHODIMP -StorageDirectoryHelper::Run() +StorageOperationBase::Run() { MOZ_ASSERT(NS_IsMainThread()); nsresult rv = RunOnMainThread(); if (NS_WARN_IF(NS_FAILED(rv))) { mMainThreadResultCode = rv; } @@ -8368,33 +8344,44 @@ StorageDirectoryHelper::Run() mWaiting = false; mCondVar.Notify(); return NS_OK; } nsresult -StorageDirectoryHelper:: -OriginProps::Init(nsIFile* aDirectory) +StorageOperationBase::OriginProps::Init(nsIFile* aDirectory) { AssertIsOnIOThread(); MOZ_ASSERT(aDirectory); nsString leafName; nsresult rv = aDirectory->GetLeafName(leafName); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } if (leafName.EqualsLiteral(kChromeOrigin)) { + // XXX We can remove this special handling once origin parser supports it + // directly. mDirectory = aDirectory; mLeafName = leafName; mSpec = kChromeOrigin; mType = eChrome; + } else if (leafName.EqualsLiteral("moz-safe-about+++home")) { + // XXX We can remove this special handling once origin parser supports it + // directly. + + // This directory was accidentally created by a buggy nightly and can be + // safely removed. + + mDirectory = aDirectory; + mLeafName = leafName; + mType = eObsolete; } else { nsCString spec; OriginAttributes attrs; OriginParser::ResultType result = OriginParser::ParseOrigin(NS_ConvertUTF16toUTF8(leafName), spec, &attrs); if (NS_WARN_IF(result == OriginParser::InvalidOrigin)) { return NS_ERROR_FAILURE; } @@ -8788,106 +8775,76 @@ OriginParser::HandleTrailingSeparator() MOZ_ASSERT(mSchemeType == eFile); mPathnameComponents.AppendElement(EmptyCString()); mState = eHandledTrailingSeparator; } nsresult -CreateOrUpgradeDirectoryMetadataHelper::CreateOrUpgradeMetadataFiles() +RepositoryOperationBase::ProcessRepository() { AssertIsOnIOThread(); - bool exists; - nsresult rv = mDirectory->Exists(&exists); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!exists) { - return NS_OK; - } + DebugOnly<bool> exists; + MOZ_ASSERT(NS_SUCCEEDED(mDirectory->Exists(&exists))); + MOZ_ASSERT(exists); nsCOMPtr<nsIDirectoryEnumerator> entries; - rv = mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsCOMPtr<nsIFile> originDir; - while (NS_SUCCEEDED((rv = entries->GetNextFile(getter_AddRefs(originDir)))) && - originDir) { - nsString leafName; - rv = originDir->GetLeafName(leafName); + nsresult rv = mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + while (true) { + nsCOMPtr<nsIFile> originDir; + rv = entries->GetNextFile(getter_AddRefs(originDir)); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } + if (!originDir) { + break; + } + bool isDirectory; rv = originDir->IsDirectory(&isDirectory); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } - if (isDirectory) { - if (leafName.EqualsLiteral("moz-safe-about+++home")) { - // This directory was accidentally created by a buggy nightly and can - // be safely removed. - - QM_WARNING("Deleting accidental moz-safe-about+++home directory!"); - - rv = originDir->Remove(/* aRecursive */ true); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - continue; + if (!isDirectory) { + nsString leafName; + rv = originDir->GetLeafName(leafName); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; } - } else { + // Unknown files during upgrade are allowed. Just warn if we find them. if (!IsOSMetadata(leafName)) { UNKNOWN_FILE_WARNING(leafName); } continue; } - if (mPersistent) { - rv = MaybeUpgradeOriginDirectory(originDir); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - } - OriginProps originProps; rv = originProps.Init(originDir); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } - if (!mPersistent) { - int64_t timestamp; - nsCString group; - nsCString origin; - Nullable<bool> isApp; - rv = GetDirectoryMetadata(originDir, - timestamp, - group, - origin, - isApp); - if (NS_FAILED(rv)) { - originProps.mTimestamp = GetLastModifiedTime(originDir, mPersistent); - originProps.mNeedsRestore = true; - } else if (!isApp.IsNull()) { - originProps.mIgnore = true; + if (originProps.mType != OriginProps::eObsolete) { + bool removed; + rv = PrepareOriginDirectory(originProps, &removed); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; } - } - else { - bool persistent = QuotaManager::IsOriginInternal(originProps.mSpec); - originProps.mTimestamp = GetLastModifiedTime(originDir, persistent); + if (removed) { + continue; + } } mOriginProps.AppendElement(std::move(originProps)); } if (mOriginProps.IsEmpty()) { return NS_OK; } @@ -8895,16 +8852,86 @@ CreateOrUpgradeDirectoryMetadataHelper:: rv = ProcessOriginDirectories(); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } return NS_OK; } +template<typename UpgradeMethod> +nsresult +RepositoryOperationBase::MaybeUpgradeClients(const OriginProps& aOriginProps, + UpgradeMethod aMethod) +{ + AssertIsOnIOThread(); + MOZ_ASSERT(aOriginProps.mDirectory); + MOZ_ASSERT(aMethod); + + QuotaManager* quotaManager = QuotaManager::Get(); + MOZ_ASSERT(quotaManager); + + nsCOMPtr<nsIDirectoryEnumerator> entries; + nsresult rv = + aOriginProps.mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + while (true) { + nsCOMPtr<nsIFile> file; + rv = entries->GetNextFile(getter_AddRefs(file)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (!file) { + break; + } + + bool isDirectory; + rv = file->IsDirectory(&isDirectory); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + nsString leafName; + rv = file->GetLeafName(leafName); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (!isDirectory) { + // Unknown files during upgrade are allowed. Just warn if we find them. + if (!IsOriginMetadata(leafName) && + !IsTempMetadata(leafName)) { + UNKNOWN_FILE_WARNING(leafName); + } + continue; + } + + Client::Type clientType; + rv = Client::TypeFromText(leafName, clientType); + if (NS_FAILED(rv)) { + UNKNOWN_FILE_WARNING(leafName); + continue; + } + + Client* client = quotaManager->GetClient(clientType); + MOZ_ASSERT(client); + + rv = (client->*aMethod)(file); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + } + + return NS_OK; +} + nsresult CreateOrUpgradeDirectoryMetadataHelper::MaybeUpgradeOriginDirectory( nsIFile* aDirectory) { AssertIsOnIOThread(); MOZ_ASSERT(aDirectory); nsCOMPtr<nsIFile> metadataFile; @@ -8993,16 +9020,59 @@ CreateOrUpgradeDirectoryMetadataHelper:: return rv; } } return NS_OK; } nsresult +CreateOrUpgradeDirectoryMetadataHelper::PrepareOriginDirectory( + OriginProps& aOriginProps, + bool* aRemoved) +{ + AssertIsOnIOThread(); + MOZ_ASSERT(aOriginProps.mDirectory); + MOZ_ASSERT(aRemoved); + + nsresult rv; + + if (mPersistent) { + rv = MaybeUpgradeOriginDirectory(aOriginProps.mDirectory); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + bool persistent = QuotaManager::IsOriginInternal(aOriginProps.mSpec); + aOriginProps.mTimestamp = + GetLastModifiedTime(aOriginProps.mDirectory, persistent); + } else { + int64_t timestamp; + nsCString group; + nsCString origin; + Nullable<bool> isApp; + rv = GetDirectoryMetadata(aOriginProps.mDirectory, + timestamp, + group, + origin, + isApp); + if (NS_FAILED(rv)) { + aOriginProps.mTimestamp = + GetLastModifiedTime(aOriginProps.mDirectory, mPersistent); + aOriginProps.mNeedsRestore = true; + } else if (!isApp.IsNull()) { + aOriginProps.mIgnore = true; + } + } + + *aRemoved = false; + return NS_OK; +} + +nsresult CreateOrUpgradeDirectoryMetadataHelper::ProcessOriginDirectory( const OriginProps& aOriginProps) { AssertIsOnIOThread(); nsresult rv; if (mPersistent) { @@ -9102,93 +9172,42 @@ CreateOrUpgradeDirectoryMetadataHelper:: return rv; } } return NS_OK; } nsresult -UpgradeStorageFrom0_0To1_0Helper::DoUpgrade() +UpgradeStorageFrom0_0To1_0Helper::PrepareOriginDirectory( + OriginProps& aOriginProps, + bool* aRemoved) { AssertIsOnIOThread(); - - bool exists; - nsresult rv = mDirectory->Exists(&exists); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!exists) { - return NS_OK; - } - - nsCOMPtr<nsIDirectoryEnumerator> entries; - rv = mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsCOMPtr<nsIFile> originDir; - while (NS_SUCCEEDED((rv = entries->GetNextFile(getter_AddRefs(originDir)))) && - originDir) { - bool isDirectory; - rv = originDir->IsDirectory(&isDirectory); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!isDirectory) { - nsString leafName; - rv = originDir->GetLeafName(leafName); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - // Unknown files during upgrade are allowed. Just warn if we find them. - if (!IsOSMetadata(leafName)) { - UNKNOWN_FILE_WARNING(leafName); - } - continue; - } - - OriginProps originProps; - rv = originProps.Init(originDir); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - int64_t timestamp; - nsCString group; - nsCString origin; - Nullable<bool> isApp; - nsresult rv = GetDirectoryMetadata(originDir, - timestamp, - group, - origin, - isApp); - if (NS_FAILED(rv) || isApp.IsNull()) { - originProps.mTimestamp = GetLastModifiedTime(originDir, mPersistent); - originProps.mNeedsRestore = true; - } else { - originProps.mTimestamp = timestamp; - } - - mOriginProps.AppendElement(std::move(originProps)); - } - - if (mOriginProps.IsEmpty()) { - return NS_OK; - } - - rv = ProcessOriginDirectories(); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - + MOZ_ASSERT(aOriginProps.mDirectory); + MOZ_ASSERT(aRemoved); + + int64_t timestamp; + nsCString group; + nsCString origin; + Nullable<bool> isApp; + nsresult rv = GetDirectoryMetadata(aOriginProps.mDirectory, + timestamp, + group, + origin, + isApp); + if (NS_FAILED(rv) || isApp.IsNull()) { + aOriginProps.mTimestamp = + GetLastModifiedTime(aOriginProps.mDirectory, mPersistent); + aOriginProps.mNeedsRestore = true; + } else { + aOriginProps.mTimestamp = timestamp; + } + + *aRemoved = false; return NS_OK; } nsresult UpgradeStorageFrom0_0To1_0Helper::ProcessOriginDirectory( const OriginProps& aOriginProps) { AssertIsOnIOThread(); @@ -9233,189 +9252,51 @@ UpgradeStorageFrom0_0To1_0Helper::Proces return rv; } } return NS_OK; } nsresult -UpgradeStorageFrom1_0To2_0Helper::DoUpgrade() -{ - AssertIsOnIOThread(); - - DebugOnly<bool> exists; - MOZ_ASSERT(NS_SUCCEEDED(mDirectory->Exists(&exists))); - MOZ_ASSERT(exists); - - nsCOMPtr<nsIDirectoryEnumerator> entries; - nsresult rv = mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsCOMPtr<nsIFile> originDir; - while (NS_SUCCEEDED((rv = entries->GetNextFile(getter_AddRefs(originDir)))) && originDir) { - bool isDirectory; - rv = originDir->IsDirectory(&isDirectory); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!isDirectory) { - nsString leafName; - rv = originDir->GetLeafName(leafName); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - // Unknown files during upgrade are allowed. Just warn if we find them. - if (!IsOSMetadata(leafName)) { - UNKNOWN_FILE_WARNING(leafName); - } - continue; - } - - OriginProps originProps; - rv = originProps.Init(originDir); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - rv = MaybeUpgradeClients(originProps); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - bool removed; - rv = MaybeRemoveAppsData(originProps, &removed); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - if (removed) { - continue; - } - - int64_t timestamp; - nsCString group; - nsCString origin; - Nullable<bool> isApp; - nsresult rv = GetDirectoryMetadata(originDir, - timestamp, - group, - origin, - isApp); - if (NS_FAILED(rv) || isApp.IsNull()) { - originProps.mNeedsRestore = true; - } - - nsCString suffix; - rv = GetDirectoryMetadata2(originDir, - timestamp, - suffix, - group, - origin, - isApp.SetValue()); - if (NS_FAILED(rv)) { - originProps.mTimestamp = GetLastModifiedTime(originDir, mPersistent); - originProps.mNeedsRestore2 = true; - } else { - originProps.mTimestamp = timestamp; - } - - mOriginProps.AppendElement(std::move(originProps)); - } - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (mOriginProps.IsEmpty()) { - return NS_OK; - } - - rv = ProcessOriginDirectories(); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - return NS_OK; -} - -nsresult -UpgradeStorageFrom1_0To2_0Helper::MaybeUpgradeClients( +UpgradeStorageFrom1_0To2_0Helper::MaybeRemoveMorgueDirectory( const OriginProps& aOriginProps) { AssertIsOnIOThread(); MOZ_ASSERT(aOriginProps.mDirectory); - QuotaManager* quotaManager = QuotaManager::Get(); - MOZ_ASSERT(quotaManager); - - nsCOMPtr<nsIDirectoryEnumerator> entries; - nsresult rv = - aOriginProps.mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsCOMPtr<nsIFile> file; - while (NS_SUCCEEDED((rv = entries->GetNextFile(getter_AddRefs(file)))) && file) { - bool isDirectory; - rv = file->IsDirectory(&isDirectory); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsString leafName; - rv = file->GetLeafName(leafName); + // The Cache API was creating top level morgue directories by accident for + // a short time in nightly. This unfortunately prevents all storage from + // working. So recover these profiles permanently by removing these corrupt + // directories as part of this upgrade. + + nsCOMPtr<nsIFile> morgueDir; + nsresult rv = aOriginProps.mDirectory->Clone(getter_AddRefs(morgueDir)); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = morgueDir->Append(NS_LITERAL_STRING("morgue")); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + bool exists; + rv = morgueDir->Exists(&exists); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + if (exists) { + QM_WARNING("Deleting accidental morgue directory!"); + + rv = morgueDir->Remove(/* recursive */ true); if (NS_WARN_IF(NS_FAILED(rv))) { return rv; } - - if (!isDirectory) { - // Unknown files during upgrade are allowed. Just warn if we find them. - if (!IsOriginMetadata(leafName) && - !IsTempMetadata(leafName)) { - UNKNOWN_FILE_WARNING(leafName); - } - continue; - } - - // The Cache API was creating top level morgue directories by accident for - // a short time in nightly. This unfortunately prevents all storage from - // working. So recover these profiles permanently by removing these corrupt - // directories as part of this upgrade. - if (leafName.EqualsLiteral("morgue")) { - QM_WARNING("Deleting accidental morgue directory!"); - - rv = file->Remove(/* recursive */ true); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - continue; - } - - Client::Type clientType; - rv = Client::TypeFromText(leafName, clientType); - if (NS_FAILED(rv)) { - UNKNOWN_FILE_WARNING(leafName); - continue; - } - - Client* client = quotaManager->GetClient(clientType); - MOZ_ASSERT(client); - - rv = client->UpgradeStorageFrom1_0To2_0(file); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - } - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; } return NS_OK; } nsresult UpgradeStorageFrom1_0To2_0Helper::MaybeRemoveAppsData( const OriginProps& aOriginProps, @@ -9510,16 +9391,78 @@ UpgradeStorageFrom1_0To2_0Helper::MaybeS return rv; } *aStripped = true; return NS_OK; } nsresult +UpgradeStorageFrom1_0To2_0Helper::PrepareOriginDirectory( + OriginProps& aOriginProps, + bool* aRemoved) +{ + AssertIsOnIOThread(); + MOZ_ASSERT(aOriginProps.mDirectory); + MOZ_ASSERT(aRemoved); + + nsresult rv = MaybeRemoveMorgueDirectory(aOriginProps); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + rv = MaybeUpgradeClients(aOriginProps, + &Client::UpgradeStorageFrom1_0To2_0); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + bool removed; + rv = MaybeRemoveAppsData(aOriginProps, &removed); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + if (removed) { + *aRemoved = true; + return NS_OK; + } + + int64_t timestamp; + nsCString group; + nsCString origin; + Nullable<bool> isApp; + rv = GetDirectoryMetadata(aOriginProps.mDirectory, + timestamp, + group, + origin, + isApp); + if (NS_FAILED(rv) || isApp.IsNull()) { + aOriginProps.mNeedsRestore = true; + } + + nsCString suffix; + rv = GetDirectoryMetadata2(aOriginProps.mDirectory, + timestamp, + suffix, + group, + origin, + isApp.SetValue()); + if (NS_FAILED(rv)) { + aOriginProps.mTimestamp = + GetLastModifiedTime(aOriginProps.mDirectory, mPersistent); + aOriginProps.mNeedsRestore2 = true; + } else { + aOriginProps.mTimestamp = timestamp; + } + + *aRemoved = false; + return NS_OK; +} + +nsresult UpgradeStorageFrom1_0To2_0Helper::ProcessOriginDirectory( const OriginProps& aOriginProps) { AssertIsOnIOThread(); bool stripped; nsresult rv = MaybeStripObsoleteOriginAttributes(aOriginProps, &stripped); if (NS_WARN_IF(NS_FAILED(rv))) { @@ -9551,168 +9494,59 @@ UpgradeStorageFrom1_0To2_0Helper::Proces return rv; } } return NS_OK; } nsresult -UpgradeStorageFrom2_0To2_1Helper::DoUpgrade() -{ - AssertIsOnIOThread(); - - DebugOnly<bool> exists; - MOZ_ASSERT(NS_SUCCEEDED(mDirectory->Exists(&exists))); - MOZ_ASSERT(exists); - - nsCOMPtr<nsIDirectoryEnumerator> entries; - nsresult rv = mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsCOMPtr<nsIFile> originDir; - while (NS_SUCCEEDED((rv = entries->GetNextFile(getter_AddRefs(originDir)))) && originDir) { - bool isDirectory; - rv = originDir->IsDirectory(&isDirectory); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!isDirectory) { - nsString leafName; - rv = originDir->GetLeafName(leafName); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - // Unknown files during upgrade are allowed. Just warn if we find them. - if (!IsOSMetadata(leafName)) { - UNKNOWN_FILE_WARNING(leafName); - } - continue; - } - - OriginProps originProps; - rv = originProps.Init(originDir); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - // Only update DOM Cache directory for adding padding file. - rv = MaybeUpgradeClients(originProps); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - int64_t timestamp; - nsCString group; - nsCString origin; - Nullable<bool> isApp; - nsresult rv = GetDirectoryMetadata(originDir, - timestamp, - group, - origin, - isApp); - if (NS_FAILED(rv) || isApp.IsNull()) { - originProps.mNeedsRestore = true; - } - - nsCString suffix; - rv = GetDirectoryMetadata2(originDir, - timestamp, - suffix, - group, - origin, - isApp.SetValue()); - if (NS_FAILED(rv)) { - originProps.mTimestamp = GetLastModifiedTime(originDir, mPersistent); - originProps.mNeedsRestore2 = true; - } else { - originProps.mTimestamp = timestamp; - } - - mOriginProps.AppendElement(std::move(originProps)); - } - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (mOriginProps.IsEmpty()) { - return NS_OK; - } - - rv = ProcessOriginDirectories(); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - return NS_OK; -} - -nsresult -UpgradeStorageFrom2_0To2_1Helper::MaybeUpgradeClients( - const OriginProps& aOriginProps) +UpgradeStorageFrom2_0To2_1Helper::PrepareOriginDirectory( + OriginProps& aOriginProps, + bool* aRemoved) { AssertIsOnIOThread(); MOZ_ASSERT(aOriginProps.mDirectory); - - QuotaManager* quotaManager = QuotaManager::Get(); - MOZ_ASSERT(quotaManager); - - nsCOMPtr<nsIDirectoryEnumerator> entries; - nsresult rv = - aOriginProps.mDirectory->GetDirectoryEntries(getter_AddRefs(entries)); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsCOMPtr<nsIFile> file; - while (NS_SUCCEEDED((rv = entries->GetNextFile(getter_AddRefs(file)))) && file) { - bool isDirectory; - rv = file->IsDirectory(&isDirectory); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - nsString leafName; - rv = file->GetLeafName(leafName); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - - if (!isDirectory) { - // Unknown files during upgrade are allowed. Just warn if we find them. - if (!IsOriginMetadata(leafName) && - !IsTempMetadata(leafName)) { - UNKNOWN_FILE_WARNING(leafName); - } - continue; - } - - Client::Type clientType; - rv = Client::TypeFromText(leafName, clientType); - if (NS_FAILED(rv)) { - UNKNOWN_FILE_WARNING(leafName); - continue; - } - - Client* client = quotaManager->GetClient(clientType); - MOZ_ASSERT(client); - - rv = client->UpgradeStorageFrom2_0To2_1(file); - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - } - if (NS_WARN_IF(NS_FAILED(rv))) { - return rv; - } - + MOZ_ASSERT(aRemoved); + + nsresult rv = MaybeUpgradeClients(aOriginProps, + &Client::UpgradeStorageFrom2_0To2_1); + if (NS_WARN_IF(NS_FAILED(rv))) { + return rv; + } + + int64_t timestamp; + nsCString group; + nsCString origin; + Nullable<bool> isApp; + rv = GetDirectoryMetadata(aOriginProps.mDirectory, + timestamp, + group, + origin, + isApp); + if (NS_FAILED(rv) || isApp.IsNull()) { + aOriginProps.mNeedsRestore = true; + } + + nsCString suffix; + rv = GetDirectoryMetadata2(aOriginProps.mDirectory, + timestamp, + suffix, + group, + origin, + isApp.SetValue()); + if (NS_FAILED(rv)) { + aOriginProps.mTimestamp = + GetLastModifiedTime(aOriginProps.mDirectory, mPersistent); + aOriginProps.mNeedsRestore2 = true; + } else { + aOriginProps.mTimestamp = timestamp; + } + + *aRemoved = false; return NS_OK; } nsresult UpgradeStorageFrom2_0To2_1Helper::ProcessOriginDirectory( const OriginProps& aOriginProps) { AssertIsOnIOThread();
--- a/dom/quota/QuotaManager.h +++ b/dom/quota/QuotaManager.h @@ -472,16 +472,22 @@ private: MaybeUpgradeIndexedDBDirectory(); nsresult MaybeUpgradePersistentStorageDirectory(); nsresult MaybeRemoveOldDirectories(); + template<typename Helper> + nsresult + UpgradeStorage(const int32_t aOldVersion, + const int32_t aNewVersion, + mozIStorageConnection* aConnection); + nsresult UpgradeStorageFrom0_0To1_0(mozIStorageConnection* aConnection); nsresult UpgradeStorageFrom1_0To2_0(mozIStorageConnection* aConnection); nsresult UpgradeStorageFrom2_0To2_1(mozIStorageConnection* aConnection);
--- a/js/rust/Cargo.toml +++ b/js/rust/Cargo.toml @@ -2,17 +2,17 @@ name = "js" version = "0.1.4" authors = ["The Servo Project Developers"] build = "build.rs" license = "MPL-2.0" [build-dependencies] env_logger = {version = "0.5", default-features = false} # disable `regex` to reduce code size -bindgen = {version = "0.39", default-features = false} # disable `logging` to reduce code size +bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size cmake = "0.1" glob = "0.2.11" [[test]] name = "bigint" required-features = ["bigint"] [[test]] name = "callback"
--- a/js/src/builtin/Module.js +++ b/js/src/builtin/Module.js @@ -294,26 +294,27 @@ function ModuleInstantiate() let stack = []; // Steps 4-5 try { InnerModuleInstantiation(module, stack, 0); } catch (error) { for (let i = 0; i < stack.length; i++) { let m = stack[i]; - assert(m.status === MODULE_STATUS_INSTANTIATING, - "Expected instantiating status during failed instantiation"); - HandleModuleInstantiationFailure(m); + if (m.status === MODULE_STATUS_INSTANTIATING) { + HandleModuleInstantiationFailure(m); + } } // Handle OOM when appending to the stack or over-recursion errors. - if (stack.length === 0) + if (stack.length === 0 && module.status === MODULE_STATUS_INSTANTIATING) { HandleModuleInstantiationFailure(module); + } - assert(module.status === MODULE_STATUS_UNINSTANTIATED, + assert(module.status !== MODULE_STATUS_INSTANTIATING, "Expected uninstantiated status after failed instantiation"); throw error; } // Step 6 assert(module.status === MODULE_STATUS_INSTANTIATED || module.status === MODULE_STATUS_EVALUATED ||
--- a/js/src/builtin/ModuleObject.cpp +++ b/js/src/builtin/ModuleObject.cpp @@ -3,16 +3,17 @@ * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "builtin/ModuleObject.h" #include "mozilla/EnumSet.h" +#include "builtin/Promise.h" #include "builtin/SelfHostingDefines.h" #include "frontend/ParseNode.h" #include "frontend/SharedContext.h" #include "gc/FreeOp.h" #include "gc/Policy.h" #include "gc/Tracer.h" #include "vm/AsyncFunction.h" #include "vm/AsyncIteration.h" @@ -1800,8 +1801,93 @@ js::GetOrCreateModuleMetaObject(JSContex if (!func(cx, modulePrivate, metaObject)) { return nullptr; } module->setMetaObject(metaObject); return metaObject; } + +JSObject* +js::CallModuleResolveHook(JSContext* cx, HandleValue referencingPrivate, HandleString specifier) +{ + JS::ModuleResolveHook moduleResolveHook = cx->runtime()->moduleResolveHook; + if (!moduleResolveHook) { + JS_ReportErrorASCII(cx, "Module resolve hook not set"); + return nullptr; + } + + RootedObject result(cx, moduleResolveHook(cx, referencingPrivate, specifier)); + if (!result) { + return nullptr; + } + + if (!result->is<ModuleObject>()) { + JS_ReportErrorASCII(cx, "Module resolve hook did not return Module object"); + return nullptr; + } + + return result; +} + +JSObject* +js::StartDynamicModuleImport(JSContext* cx, HandleValue referencingPrivate, HandleValue specifierArg) +{ + RootedObject promiseConstructor(cx, JS::GetPromiseConstructor(cx)); + if (!promiseConstructor) { + return nullptr; + } + + RootedObject promiseObject(cx, JS::NewPromiseObject(cx, nullptr)); + if (!promiseObject) { + return nullptr; + } + + Handle<PromiseObject*> promise = promiseObject.as<PromiseObject>(); + + RootedString specifier(cx, ToString(cx, specifierArg)); + if (!specifier) { + if (!RejectPromiseWithPendingError(cx, promise)) + return nullptr; + return promise; + } + + JS::ModuleDynamicImportHook importHook = cx->runtime()->moduleDynamicImportHook; + MOZ_ASSERT(importHook); + if (!importHook(cx, referencingPrivate, specifier, promise)) { + if (!RejectPromiseWithPendingError(cx, promise)) + return nullptr; + return promise; + } + + return promise; +} + +bool +js::FinishDynamicModuleImport(JSContext* cx, HandleValue referencingPrivate, HandleString specifier, + HandleObject promiseArg) +{ + Handle<PromiseObject*> promise = promiseArg.as<PromiseObject>(); + + if (cx->isExceptionPending()) { + return RejectPromiseWithPendingError(cx, promise); + } + + RootedObject result(cx, CallModuleResolveHook(cx, referencingPrivate, specifier)); + if (!result) { + return RejectPromiseWithPendingError(cx, promise); + } + + RootedModuleObject module(cx, &result->as<ModuleObject>()); + if (module->status() != MODULE_STATUS_EVALUATED) { + JS_ReportErrorASCII(cx, "Unevaluated or errored module returned by module resolve hook"); + return RejectPromiseWithPendingError(cx, promise); + } + + RootedObject ns(cx, ModuleObject::GetOrCreateModuleNamespace(cx, module)); + if (!ns) { + return RejectPromiseWithPendingError(cx, promise); + } + + RootedValue value(cx, ObjectValue(*ns)); + return PromiseObject::resolve(cx, promise, value); +}
--- a/js/src/builtin/ModuleObject.h +++ b/js/src/builtin/ModuleObject.h @@ -414,16 +414,26 @@ class MOZ_STACK_CLASS ModuleBuilder ArrayObject* createArray(const JS::Rooted<GCVector<T>>& vector); template <typename K, typename V> ArrayObject* createArray(const JS::Rooted<GCHashMap<K, V>>& map); }; JSObject* GetOrCreateModuleMetaObject(JSContext* cx, HandleObject module); +JSObject* +CallModuleResolveHook(JSContext* cx, HandleValue referencingPrivate, HandleString specifier); + +JSObject* +StartDynamicModuleImport(JSContext* cx, HandleValue referencingPrivate, HandleValue specifier); + +bool +FinishDynamicModuleImport(JSContext* cx, HandleValue referencingPrivate, HandleString specifier, + HandleObject promise); + } // namespace js template<> inline bool JSObject::is<js::ModuleNamespaceObject>() const { return js::IsDerivedProxyObject(this, &js::ModuleNamespaceObject::proxyHandler); }
--- a/js/src/builtin/Promise.cpp +++ b/js/src/builtin/Promise.cpp @@ -3399,16 +3399,26 @@ OriginalPromiseThenBuiltin(JSContext* cx if (rvalUsed) { rval.setObject(*resultCapability.promise()); } else { rval.setUndefined(); } return true; } +MOZ_MUST_USE bool +js::RejectPromiseWithPendingError(JSContext* cx, Handle<PromiseObject*> promise) +{ + // Not much we can do about uncatchable exceptions, just bail. + RootedValue exn(cx); + if (!GetAndClearException(cx, &exn)) + return false; + return PromiseObject::reject(cx, promise, exn); +} + static MOZ_MUST_USE bool PerformPromiseThenWithReaction(JSContext* cx, Handle<PromiseObject*> promise, Handle<PromiseReactionRecord*> reaction); // Some async/await functions are implemented here instead of // js/src/builtin/AsyncFunction.cpp, to call Promise internal functions. // ES 2018 draft 14.6.11 and 14.7.14 step 1.
--- a/js/src/builtin/Promise.h +++ b/js/src/builtin/Promise.h @@ -220,16 +220,18 @@ OriginalPromiseThen(JSContext* cx, Handl * PromiseResolve ( C, x ) * * The abstract operation PromiseResolve, given a constructor and a value, * returns a new promise resolved with that value. */ MOZ_MUST_USE JSObject* PromiseResolve(JSContext* cx, HandleObject constructor, HandleValue value); +MOZ_MUST_USE bool +RejectPromiseWithPendingError(JSContext* cx, Handle<PromiseObject*> promise); /** * Create the promise object which will be used as the return value of an async * function. */ MOZ_MUST_USE PromiseObject* CreatePromiseObjectForAsync(JSContext* cx, HandleValue generatorVal);
--- a/js/src/builtin/Stream.cpp +++ b/js/src/builtin/Stream.cpp @@ -519,26 +519,16 @@ ReportArgTypeError(JSContext* cx, const return; } JS_ReportErrorNumberUTF8(cx, GetErrorMessage, nullptr, JSMSG_NOT_EXPECTED_TYPE, funName, expectedType, bytes.get()); } static MOZ_MUST_USE bool -RejectWithPendingError(JSContext* cx, Handle<PromiseObject*> promise) { - // Not much we can do about uncatchable exceptions, just bail. - RootedValue exn(cx); - if (!GetAndClearException(cx, &exn)) { - return false; - } - return PromiseObject::reject(cx, promise, exn); -} - -static MOZ_MUST_USE bool ReturnPromiseRejectedWithPendingError(JSContext* cx, const CallArgs& args) { JSObject* promise = PromiseRejectedWithPendingError(cx); if (!promise) { return false; } args.rval().setObject(*promise); @@ -1456,17 +1446,17 @@ ReadableStreamTee_Cancel(JSContext* cx, Rooted<PromiseObject*> promise(cx, teeState->promise()); // Step b: Let cancelResult be ! ReadableStreamCancel(stream, compositeReason). RootedObject cancelResult(cx, ReadableStream::cancel(cx, stream, compositeReasonVal)); { AutoRealm ar(cx, promise); if (!cancelResult) { - if (!RejectWithPendingError(cx, promise)) { + if (!RejectPromiseWithPendingError(cx, promise)) { return nullptr; } } else { // Step c: Resolve teeState.[[promise]] with cancelResult. RootedValue resultVal(cx, ObjectValue(*cancelResult)); if (!cx->compartment()->wrap(cx, &resultVal)) { return nullptr; }
--- a/js/src/frontend/BytecodeEmitter.cpp +++ b/js/src/frontend/BytecodeEmitter.cpp @@ -8873,18 +8873,24 @@ BytecodeEmitter::emitTree(ParseNode* pn, case ParseNodeKind::ImportMeta: if (!emit1(JSOP_IMPORTMETA)) { return false; } break; case ParseNodeKind::CallImport: - reportError(nullptr, JSMSG_NO_DYNAMIC_IMPORT); - return false; + if (!cx->runtime()->moduleDynamicImportHook) { + reportError(nullptr, JSMSG_NO_DYNAMIC_IMPORT); + return false; + } + if (!emitTree(pn->as<BinaryNode>().right()) || !emit1(JSOP_DYNAMIC_IMPORT)) { + return false; + } + break; case ParseNodeKind::SetThis: if (!emitSetThis(&pn->as<BinaryNode>())) { return false; } break; case ParseNodeKind::PropertyName:
--- a/js/src/frontend/FullParseHandler.h +++ b/js/src/frontend/FullParseHandler.h @@ -568,17 +568,17 @@ FOR_EACH_PARSENODE_SUBCLASS(DECLARE_AS) return new_<NullaryNode>(ParseNodeKind::ExportBatchSpec, JSOP_NOP, pos); } BinaryNodeType newImportMeta(NullaryNodeType importHolder, NullaryNodeType metaHolder) { return new_<BinaryNode>(ParseNodeKind::ImportMeta, JSOP_NOP, importHolder, metaHolder); } BinaryNodeType newCallImport(NullaryNodeType importHolder, Node singleArg) { - return new_<BinaryNode>(ParseNodeKind::CallImport, JSOP_NOP, importHolder, singleArg); + return new_<BinaryNode>(ParseNodeKind::CallImport, JSOP_DYNAMIC_IMPORT, importHolder, singleArg); } UnaryNodeType newExprStatement(Node expr, uint32_t end) { MOZ_ASSERT(expr->pn_pos.end <= end); return new_<UnaryNode>(ParseNodeKind::ExpressionStatement, TokenPos(expr->pn_pos.begin, end), expr); }
--- a/js/src/frontend/Parser.cpp +++ b/js/src/frontend/Parser.cpp @@ -10908,17 +10908,17 @@ GeneralParser<ParseHandler, Unit>::impor } else if (next == TokenKind::LeftParen) { Node arg = assignExpr(InAllowed, yieldHandling, TripledotProhibited); if (!arg) { return null(); } MUST_MATCH_TOKEN_MOD(TokenKind::RightParen, TokenStream::Operand, JSMSG_PAREN_AFTER_ARGS); - if (!abortIfSyntaxParser()) { + if (!context->runtime()->moduleDynamicImportHook && !abortIfSyntaxParser()) { return null(); } return handler.newCallImport(importHolder, arg); } else { error(JSMSG_UNEXPECTED_TOKEN_NO_EXPECT, TokenKindToDesc(next)); return null(); }
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/modules/dynamic-import-error.js @@ -0,0 +1,14 @@ +// |jit-test| module + +let result = null; +let error = null; +let promise = import("nonexistent.js"); +promise.then((ns) => { + result = ns; +}).catch((e) => { + error = e; +}); + +drainJobQueue(); +assertEq(result, null); +assertEq(error instanceof Error, true);
--- a/js/src/jit-test/tests/modules/dynamic-import-expression.js +++ b/js/src/jit-test/tests/modules/dynamic-import-expression.js @@ -72,14 +72,8 @@ assertParseThrowsSyntaxError("import("); assertParseThrowsSyntaxError("import(1,"); assertParseThrowsSyntaxError("import(1, 2"); assertParseThrowsSyntaxError("import(1, 2)"); assertParseThrowsSyntaxError("x = import"); assertParseThrowsSyntaxError("x = import("); assertParseThrowsSyntaxError("x = import(1,"); assertParseThrowsSyntaxError("x = import(1, 2"); assertParseThrowsSyntaxError("x = import(1, 2)"); - -// import() is not implemented. -assertThrowsInstanceOf(() => eval("import('foo')"), - SyntaxError); -assertThrowsInstanceOf(() => parseModule("import('foo')"), - SyntaxError);
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/modules/dynamic-import-ion.js @@ -0,0 +1,16 @@ +// Even with --ion-eager, this needs to be run twice before it executes the +// ion-compiled version. +for (let i = 0; i < 2; i++) { + let result = null; + let error = null; + let promise = import("../../modules/module1.js"); + promise.then((ns) => { + result = ns; + }).catch((e) => { + error = e; + }); + + drainJobQueue(); + assertEq(error, null); + assertEq(result.a, 1); +}
--- a/js/src/jit-test/tests/modules/dynamic-import-lazy.js +++ b/js/src/jit-test/tests/modules/dynamic-import-lazy.js @@ -1,6 +1,4 @@ -// |jit-test| error: SyntaxError - function lazyilyParsedFunction() { return import("/module1.js"); }
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/modules/dynamic-import-module.js @@ -0,0 +1,39 @@ +// |jit-test| module + +function testImport(path, name, value) { + let result = null; + let error = null; + let promise = import(path); + promise.then((ns) => { + result = ns; + }).catch((e) => { + error = e; + }); + + drainJobQueue(); + assertEq(error, null); + assertEq(result[name], value); +} + +// Resolved via module load path. +testImport("module1.js", "a", 1); + +// Relative path resolved relative to this script. +testImport("../../modules/module1a.js", "a", 2); + +// Import inside function. +function f() { + testImport("../../modules/module2.js", "b", 2); +} +f(); + +// Import inside direct eval. +eval(`testImport("../../modules/module3.js", "c", 3)`); + +// Import inside indirect eval. +const indirect = eval; +const defineTestFunc = testImport.toSource(); +indirect(defineTestFunc + `testImport("../../modules/module3.js");`); + +// Import inside dynamic function. +Function(defineTestFunc + `testImport("../../modules/module3.js");`)();
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/modules/dynamic-import-oom.js @@ -0,0 +1,4 @@ +// |jit-test| skip-if: !('oomTest' in this) + +oomTest(() => import("module1.js")); +oomTest(() => import("cyclicImport1.js"));
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/modules/dynamic-import-script.js @@ -0,0 +1,45 @@ +function testImport(path, name, value) { + let result = null; + let error = null; + let promise = import(path); + promise.then((ns) => { + result = ns; + }).catch((e) => { + error = e; + }); + + drainJobQueue(); + assertEq(error, null); + assertEq(result[name], value); +} + +// Resolved via module load path. +testImport("module1.js", "a", 1); + +// Relative path resolved relative to this script. +testImport("../../modules/module1a.js", "a", 2); + +// Import inside function. +function f() { + testImport("../../modules/module2.js", "b", 2); +} +f(); + +// Import inside eval. +eval(`testImport("../../modules/module3.js", "c", 3)`); + +// Import inside indirect eval. +const indirect = eval; +const defineTestFunc = testImport.toSource(); +indirect(defineTestFunc + `testImport("../../modules/module3.js");`); + +// Import inside dynamic function. +Function(defineTestFunc + `testImport("../../modules/module3.js");`)(); + +// Import in eval in promise handler. +let ran = false; +Promise + .resolve(`import("../../modules/module3.js").then(() => { ran = true; })`) + .then(eval) +drainJobQueue(); +assertEq(ran, true);
new file mode 100644 --- /dev/null +++ b/js/src/jit-test/tests/modules/inline-data.js @@ -0,0 +1,17 @@ +// |jit-test| module + +import { a } from "javascript: export let a = 42;"; +assertEq(a, 42); + +let result = null; +let error = null; +let promise = import("javascript: export let b = 100;"); +promise.then((ns) => { + result = ns; +}).catch((e) => { + error = e; +}); + +drainJobQueue(); +assertEq(error, null); +assertEq(result.b, 100);
--- a/js/src/jit/BaselineCompiler.cpp +++ b/js/src/jit/BaselineCompiler.cpp @@ -5439,8 +5439,33 @@ BaselineCompiler::emit_JSOP_IMPORTMETA() if (!callVM(GetOrCreateModuleMetaObjectInfo)) { return false; } masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); frame.push(R0); return true; } + +typedef JSObject* (*StartDynamicModuleImportFn)(JSContext*, HandleValue, HandleValue); +static const VMFunction StartDynamicModuleImportInfo = + FunctionInfo<StartDynamicModuleImportFn>(js::StartDynamicModuleImport, + "StartDynamicModuleImport"); + +bool +BaselineCompiler::emit_JSOP_DYNAMIC_IMPORT() +{ + RootedValue referencingPrivate(cx, FindScriptOrModulePrivateForScript(script)); + + // Put specifier value in R0. + frame.popRegsAndSync(1); + + prepareVMCall(); + pushArg(R0); + pushArg(referencingPrivate); + if (!callVM(StartDynamicModuleImportInfo)) { + return false; + } + + masm.tagValue(JSVAL_TYPE_OBJECT, ReturnReg, R0); + frame.push(R0); + return true; +}
--- a/js/src/jit/BaselineCompiler.h +++ b/js/src/jit/BaselineCompiler.h @@ -240,17 +240,18 @@ namespace jit { _(JSOP_TRY_DESTRUCTURING_ITERCLOSE) \ _(JSOP_CHECKCLASSHERITAGE) \ _(JSOP_INITHOMEOBJECT) \ _(JSOP_BUILTINPROTO) \ _(JSOP_OBJWITHPROTO) \ _(JSOP_FUNWITHPROTO) \ _(JSOP_CLASSCONSTRUCTOR) \ _(JSOP_DERIVEDCONSTRUCTOR) \ - _(JSOP_IMPORTMETA) + _(JSOP_IMPORTMETA) \ + _(JSOP_DYNAMIC_IMPORT) class BaselineCompiler final { JSContext* cx; JSScript* script; jsbytecode* pc; StackMacroAssembler masm; bool ionCompileable_;
--- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -3020,16 +3020,29 @@ static const VMFunction GetOrCreateModul void CodeGenerator::visitModuleMetadata(LModuleMetadata* lir) { pushArg(ImmPtr(lir->mir()->module())); callVM(GetOrCreateModuleMetaObjectInfo, lir); } +typedef JSObject* (*StartDynamicModuleImportFn)(JSContext*, HandleValue, HandleValue); +static const VMFunction StartDynamicModuleImportInfo = + FunctionInfo<StartDynamicModuleImportFn>(js::StartDynamicModuleImport, + "StartDynamicModuleImport"); + +void +CodeGenerator::visitDynamicImport(LDynamicImport* lir) +{ + pushArg(ToValue(lir, LDynamicImport::SpecifierIndex)); + pushArg(ToValue(lir, LDynamicImport::ReferencingPrivateIndex)); + callVM(StartDynamicModuleImportInfo, lir); +} + typedef JSObject* (*LambdaFn)(JSContext*, HandleFunction, HandleObject); static const VMFunction LambdaInfo = FunctionInfo<LambdaFn>(js::Lambda, "Lambda"); void CodeGenerator::visitLambdaForSingleton(LLambdaForSingleton* lir) { pushArg(ToRegister(lir->environmentChain())); pushArg(ImmGCPtr(lir->mir()->info().funUnsafe()));
--- a/js/src/jit/IonBuilder.cpp +++ b/js/src/jit/IonBuilder.cpp @@ -16,16 +16,17 @@ #include "jit/BaselineInspector.h" #include "jit/Ion.h" #include "jit/IonControlFlow.h" #include "jit/IonOptimizationLevels.h" #include "jit/JitSpewer.h" #include "jit/Lowering.h" #include "jit/MIRGraph.h" #include "vm/ArgumentsObject.h" +#include "vm/EnvironmentObject.h" #include "vm/Opcodes.h" #include "vm/RegExpStatics.h" #include "vm/TraceLogging.h" #include "gc/Nursery-inl.h" #include "jit/CompileInfo-inl.h" #include "jit/shared/Lowering-shared-inl.h" #include "vm/BytecodeUtil-inl.h" @@ -2484,16 +2485,19 @@ IonBuilder::inspectOpcode(JSOp op) arr->setImplicitlyUsedUnchecked(); pushConstant(BooleanValue(false)); return Ok(); } case JSOP_IMPORTMETA: return jsop_importmeta(); + case JSOP_DYNAMIC_IMPORT: + return jsop_dynamic_import(); + case JSOP_LOOPENTRY: return jsop_loopentry(); // ===== NOT Yet Implemented ===== // Read below! // With case JSOP_ENTERWITH: @@ -13823,16 +13827,30 @@ IonBuilder::jsop_importmeta() MOZ_ASSERT(module); MModuleMetadata* meta = MModuleMetadata::New(alloc(), module); current->add(meta); current->push(meta); return resumeAfter(meta); } +AbortReasonOr<Ok> +IonBuilder::jsop_dynamic_import() +{ + Value referencingPrivate = FindScriptOrModulePrivateForScript(script()); + MConstant* ref = constant(referencingPrivate); + + MDefinition* specifier = current->pop(); + + MDynamicImport* ins = MDynamicImport::New(alloc(), ref, specifier); + current->add(ins); + current->push(ins); + return resumeAfter(ins); +} + MInstruction* IonBuilder::addConvertElementsToDoubles(MDefinition* elements) { MInstruction* convert = MConvertElementsToDoubles::New(alloc(), elements); current->add(convert); return convert; }
--- a/js/src/jit/IonBuilder.h +++ b/js/src/jit/IonBuilder.h @@ -588,16 +588,17 @@ class IonBuilder AbortReasonOr<Ok> jsop_debugger(); AbortReasonOr<Ok> jsop_newtarget(); AbortReasonOr<Ok> jsop_checkisobj(uint8_t kind); AbortReasonOr<Ok> jsop_checkiscallable(uint8_t kind); AbortReasonOr<Ok> jsop_checkobjcoercible(); AbortReasonOr<Ok> jsop_pushcallobj(); AbortReasonOr<Ok> jsop_implicitthis(PropertyName* name); AbortReasonOr<Ok> jsop_importmeta(); + AbortReasonOr<Ok> jsop_dynamic_import(); /* Inlining. */ enum InliningStatus { InliningStatus_NotInlined, InliningStatus_WarmUpCountTooLow, InliningStatus_Inlined
--- a/js/src/jit/Lowering.cpp +++ b/js/src/jit/Lowering.cpp @@ -2598,16 +2598,25 @@ void LIRGenerator::visitModuleMetadata(MModuleMetadata* ins) { LModuleMetadata* lir = new(alloc()) LModuleMetadata(); defineReturn(lir, ins); assignSafepoint(lir, ins); } void +LIRGenerator::visitDynamicImport(MDynamicImport* ins) +{ + LDynamicImport* lir = new(alloc()) LDynamicImport(useBoxAtStart(ins->referencingPrivate()), + useBoxAtStart(ins->specifier())); + defineReturn(lir, ins); + assignSafepoint(lir, ins); +} + +void LIRGenerator::visitLambda(MLambda* ins) { if (ins->info().singletonType || ins->info().useSingletonForClone) { // If the function has a singleton type, this instruction will only be // executed once so we don't bother inlining it. // // If UseSingletonForClone is true, we will assign a singleton type to // the clone and we have to clone the script, we can't do that inline.
--- a/js/src/jit/MIR.h +++ b/js/src/jit/MIR.h @@ -7814,16 +7814,32 @@ class MModuleMetadata : public MNullaryI return AliasSet::None(); } bool appendRoots(MRootList& roots) const override { return roots.append(module_); } }; +class MDynamicImport : public MBinaryInstruction, + public BoxInputsPolicy::Data +{ + explicit MDynamicImport(MDefinition* referencingPrivate, MDefinition* specifier) + : MBinaryInstruction(classOpcode, referencingPrivate, specifier) + { + setResultType(MIRType::Object); + } + + public: + INSTRUCTION_HEADER(DynamicImport) + TRIVIAL_NEW_WRAPPERS + NAMED_OPERANDS((0, referencingPrivate)) + NAMED_OPERANDS((1, specifier)) +}; + struct LambdaFunctionInfo { // The functions used in lambdas are the canonical original function in // the script, and are immutable except for delazification. Record this // information while still on the main thread to avoid races. private: CompilerFunction fun_;
--- a/js/src/jit/shared/LIR-shared.h +++ b/js/src/jit/shared/LIR-shared.h @@ -4898,16 +4898,37 @@ class LModuleMetadata : public LCallInst return mir_->toModuleMetadata(); } LModuleMetadata() : LCallInstructionHelper(classOpcode) {} }; +class LDynamicImport : public LCallInstructionHelper<1, 2 * BOX_PIECES, 0> +{ + public: + LIR_HEADER(DynamicImport) + + static const size_t ReferencingPrivateIndex = 0; + static const size_t SpecifierIndex = BOX_PIECES; + + explicit LDynamicImport(const LBoxAllocation& referencingPrivate, + const LBoxAllocation& specifier) + : LCallInstructionHelper(classOpcode) + { + setBoxOperand(ReferencingPrivateIndex, referencingPrivate); + setBoxOperand(SpecifierIndex, specifier); + } + + const MDynamicImport* mir() const { + return mir_->toDynamicImport(); + } +}; + class LLambdaForSingleton : public LCallInstructionHelper<1, 1, 0> { public: LIR_HEADER(LambdaForSingleton) explicit LLambdaForSingleton(const LAllocation& envChain) : LCallInstructionHelper(classOpcode) {
--- a/js/src/js.msg +++ b/js/src/js.msg @@ -610,16 +610,17 @@ MSG_DEF(JSMSG_BAD_DEFAULT_EXPORT, MSG_DEF(JSMSG_MISSING_INDIRECT_EXPORT, 0, JSEXN_SYNTAXERR, "indirect export not found") MSG_DEF(JSMSG_AMBIGUOUS_INDIRECT_EXPORT, 0, JSEXN_SYNTAXERR, "ambiguous indirect export") MSG_DEF(JSMSG_MISSING_IMPORT, 0, JSEXN_SYNTAXERR, "import not found") MSG_DEF(JSMSG_AMBIGUOUS_IMPORT, 0, JSEXN_SYNTAXERR, "ambiguous import") MSG_DEF(JSMSG_MISSING_NAMESPACE_EXPORT, 0, JSEXN_SYNTAXERR, "export not found for namespace") MSG_DEF(JSMSG_MISSING_EXPORT, 1, JSEXN_SYNTAXERR, "local binding for export '{0}' not found") MSG_DEF(JSMSG_BAD_MODULE_STATUS, 0, JSEXN_INTERNALERR, "module record has unexpected status") MSG_DEF(JSMSG_NO_DYNAMIC_IMPORT, 0, JSEXN_SYNTAXERR, "dynamic module import is not implemented") +MSG_DEF(JSMSG_IMPORT_SCRIPT_NOT_FOUND, 0, JSEXN_TYPEERR, "can't find referencing script for dynamic module import") // Promise MSG_DEF(JSMSG_CANNOT_RESOLVE_PROMISE_WITH_ITSELF, 0, JSEXN_TYPEERR, "A promise cannot be resolved with itself.") MSG_DEF(JSMSG_PROMISE_CAPABILITY_HAS_SOMETHING_ALREADY, 0, JSEXN_TYPEERR, "GetCapabilitiesExecutor function already invoked with non-undefined values.") MSG_DEF(JSMSG_PROMISE_RESOLVE_FUNCTION_NOT_CALLABLE, 0, JSEXN_TYPEERR, "A Promise subclass passed a non-callable value as the resolve function.") MSG_DEF(JSMSG_PROMISE_REJECT_FUNCTION_NOT_CALLABLE, 0, JSEXN_TYPEERR, "A Promise subclass passed a non-callable value as the reject function.") MSG_DEF(JSMSG_PROMISE_ERROR_IN_WRAPPED_REJECTION_REASON,0, JSEXN_INTERNALERR, "Promise rejection value is a non-unwrappable cross-compartment wrapper.")
--- a/js/src/jsapi.cpp +++ b/js/src/jsapi.cpp @@ -4136,16 +4136,41 @@ JS::GetModuleMetadataHook(JSRuntime* rt) JS_PUBLIC_API(void) JS::SetModuleMetadataHook(JSRuntime* rt, JS::ModuleMetadataHook func) { AssertHeapIsIdle(); rt->moduleMetadataHook = func; } +JS_PUBLIC_API(JS::ModuleDynamicImportHook) +JS::GetModuleDynamicImportHook(JSRuntime* rt) +{ + AssertHeapIsIdle(); + return rt->moduleDynamicImportHook; +} + +JS_PUBLIC_API(void) +JS::SetModuleDynamicImportHook(JSRuntime* rt, JS::ModuleDynamicImportHook func) +{ + AssertHeapIsIdle(); + rt->moduleDynamicImportHook = func; +} + +JS_PUBLIC_API(bool) +JS::FinishDynamicModuleImport(JSContext* cx, HandleValue referencingPrivate, HandleString specifier, + HandleObject promise) +{ + AssertHeapIsIdle(); + CHECK_THREAD(cx); + cx->check(referencingPrivate, promise); + + return js::FinishDynamicModuleImport(cx, referencingPrivate, specifier, promise); +} + JS_PUBLIC_API(bool) JS::CompileModule(JSContext* cx, const ReadOnlyCompileOptions& options, SourceBufferHolder& srcBuf, JS::MutableHandleObject module) { MOZ_ASSERT(!cx->zone()->isAtomsZone()); AssertHeapIsIdle(); CHECK_THREAD(cx);
--- a/js/src/jsapi.h +++ b/js/src/jsapi.h @@ -3045,16 +3045,35 @@ GetModuleMetadataHook(JSRuntime* rt); /** * Set the hook for populating the import.meta metadata object to the given * function. */ extern JS_PUBLIC_API(void) SetModuleMetadataHook(JSRuntime* rt, ModuleMetadataHook func); +using ModuleDynamicImportHook = bool (*)(JSContext* cx, HandleValue referencingPrivate, + HandleString specifier, HandleObject promise); + +/** + * Get the HostResolveImportedModule hook for the runtime. + */ +extern JS_PUBLIC_API(ModuleDynamicImportHook) +GetModuleDynamicImportHook(JSRuntime* rt); + +/** + * Set the HostResolveImportedModule hook for the runtime to the given function. + */ +extern JS_PUBLIC_API(void) +SetModuleDynamicImportHook(JSRuntime* rt, ModuleDynamicImportHook func); + +extern JS_PUBLIC_API(bool) +FinishDynamicModuleImport(JSContext* cx, HandleValue referencingPrivate, HandleString specifier, + HandleObject promise); + /** * Parse the given source buffer as a module in the scope of the current global * of cx and return a source text module record. */ extern JS_PUBLIC_API(bool) CompileModule(JSContext* cx, const ReadOnlyCompileOptions& options, SourceBufferHolder& srcBuf, JS::MutableHandleObject moduleRecord);
--- a/js/src/rust/shared/Cargo.toml +++ b/js/src/rust/shared/Cargo.toml @@ -7,13 +7,13 @@ authors = ["The Spidermonkey developers" crate-type = ["rlib"] name = "jsrust_shared" path = "lib.rs" [dependencies] baldrdash = { path = "../../wasm/cranelift" } [build-dependencies] -bindgen = {version = "0.39", default-features = false} # disable `logging` to reduce code size +bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size # Uncomment this to enable perf support in release mode. #[profile.release] #debug = true
--- a/js/src/shell/ModuleLoader.js +++ b/js/src/shell/ModuleLoader.js @@ -1,41 +1,51 @@ /* -*- Mode: javascript; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* global getModuleLoadPath setModuleLoadHook setModuleResolveHook setModuleMetadataHook */ /* global getModulePrivate setModulePrivate parseModule os */ +/* global setModuleDynamicImportHook finishDynamicModuleImport abortDynamicModuleImport */ // A basic synchronous module loader for testing the shell. +// +// Supports loading files and 'javascript:' URLs that embed JS source text. + { // Save standard built-ins before scripts can modify them. const ArrayPrototypeJoin = Array.prototype.join; const MapPrototypeGet = Map.prototype.get; const MapPrototypeHas = Map.prototype.has; const MapPrototypeSet = Map.prototype.set; const ObjectDefineProperty = Object.defineProperty; const ReflectApply = Reflect.apply; const StringPrototypeIndexOf = String.prototype.indexOf; const StringPrototypeLastIndexOf = String.prototype.lastIndexOf; const StringPrototypeStartsWith = String.prototype.startsWith; const StringPrototypeSubstring = String.prototype.substring; const ErrorClass = Error; +const JAVASCRIPT_SCHEME = "javascript:"; + const ReflectLoader = new class { constructor() { this.registry = new Map(); - this.modulePaths = new Map(); this.loadPath = getModuleLoadPath(); } + isJavascriptURL(name) { + return ReflectApply(StringPrototypeStartsWith, name, [JAVASCRIPT_SCHEME]); + } + resolve(name, referencingInfo) { - if (os.path.isAbsolute(name)) + if (this.isJavascriptURL(name) || os.path.isAbsolute(name)) { return name; + } let loadPath = this.loadPath; // Treat |name| as a relative path if it starts with either "./" // or "../". let isRelative = ReflectApply(StringPrototypeStartsWith, name, ["./"]) || ReflectApply(StringPrototypeStartsWith, name, ["../"]) #ifdef XP_WIN @@ -62,16 +72,20 @@ const ReflectLoader = new class { if (sepIndex >= 0) loadPath = ReflectApply(StringPrototypeSubstring, path, [0, sepIndex]); } return os.path.join(loadPath, name); } normalize(path) { + if (this.isJavascriptURL(path)) { + return path; + } + #ifdef XP_WIN // Replace all forward slashes with backward slashes. // NB: It may be tempting to replace this loop with a call to // String.prototype.replace, but user scripts may have modified // String.prototype or RegExp.prototype built-in functions, which makes // it unsafe to call String.prototype.replace. let newPath = ""; let lastSlash = 0; @@ -145,16 +159,20 @@ const ReflectLoader = new class { let normalized = ReflectApply(ArrayPrototypeJoin, components, [pathsep]); #ifdef XP_WIN normalized = drive + normalized; #endif return normalized; } fetch(path) { + if (this.isJavascriptURL(path)) { + return ReflectApply(StringPrototypeSubstring, path, [JAVASCRIPT_SCHEME.length]); + } + return os.file.readFile(path); } loadAndParse(path) { let normalized = this.normalize(path); if (ReflectApply(MapPrototypeHas, this.registry, [normalized])) return ReflectApply(MapPrototypeGet, this.registry, [normalized]); @@ -171,17 +189,17 @@ const ReflectLoader = new class { module.declarationInstantiation(); return module.evaluation(); } importRoot(path) { return this.loadAndExecute(path); } - ["import"](name, referrer) { + ["import"](name, referencingInfo) { let path = this.resolve(name, null); return this.loadAndExecute(path); } populateImportMeta(moduleInfo, metaObject) { // For the shell, use the module's normalized path as the base URL. let path; @@ -191,19 +209,28 @@ const ReflectLoader = new class { path = "(unknown)"; } metaObject.url = path; } }; setModuleLoadHook((path) => ReflectLoader.importRoot(path)); -setModuleResolveHook((module, requestName) => { - let path = ReflectLoader.resolve(requestName, module); +setModuleResolveHook((referencingInfo, requestName) => { + let path = ReflectLoader.resolve(requestName, referencingInfo); return ReflectLoader.loadAndParse(path); }); setModuleMetadataHook((module, metaObject) => { ReflectLoader.populateImportMeta(module, metaObject); }); +setModuleDynamicImportHook((referencingInfo, specifier, promise) => { + try { + let path = ReflectLoader.resolve(specifier, referencingInfo); + ReflectLoader.loadAndExecute(path); + finishDynamicModuleImport(referencingInfo, specifier, promise); + } catch (err) { + abortDynamicModuleImport(referencingInfo, specifier, promise, err); + } +}); + } -
--- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -188,16 +188,17 @@ enum JSShellExitCode { }; // Define use of application-specific slots on the shell's global object. enum GlobalAppSlot { GlobalAppSlotModuleLoadHook, // Shell-specific; load a module graph GlobalAppSlotModuleResolveHook, // HostResolveImportedModule GlobalAppSlotModuleMetadataHook, // HostPopulateImportMeta + GlobalAppSlotModuleDynamicImportHook, // HostImportModuleDynamically GlobalAppSlotCount }; static_assert(GlobalAppSlotCount <= JSCLASS_GLOBAL_APPLICATION_SLOTS, "Too many applications slots defined for shell global"); /* * Note: This limit should match the stack limit set by the browser in * js/xpconnect/src/XPCJSContext.cpp @@ -845,16 +846,42 @@ EnvironmentPreparer::invoke(HandleObject AutoRealm ar(cx, global); AutoReportException are(cx); if (!closure(cx)) { return; } } +static bool +RegisterScriptPathWithModuleLoader(JSContext* cx, HandleScript script, const char* filename) +{ + // Set the private value associated with a script to a object containing the + // script's filename so that the module loader can use it to resolve + // relative imports. + + RootedString path(cx, JS_NewStringCopyZ(cx, filename)); + if (!path) { + return false; + } + + RootedObject infoObject(cx, JS_NewPlainObject(cx)); + if (!infoObject) { + return false; + } + + RootedValue pathValue(cx, StringValue(path)); + if (!JS_DefineProperty(cx, infoObject, "path", pathValue, 0)) { + return false; + } + + JS::SetScriptPrivate(script, ObjectValue(*infoObject)); + return true; +} + static MOZ_MUST_USE bool RunFile(JSContext* cx, const char* filename, FILE* file, bool compileOnly) { SkipUTF8BOM(file); // To support the UNIX #! shell hack, gobble the first line if it starts // with '#'. int ch = fgetc(file); @@ -878,16 +905,20 @@ RunFile(JSContext* cx, const char* filen .setNoScriptRval(true); if (!JS::CompileUtf8File(cx, options, file, &script)) { return false; } MOZ_ASSERT(script); } + if (!RegisterScriptPathWithModuleLoader(cx, script, filename)) { + return false; + } + #ifdef DEBUG if (dumpEntrainedVariables) { AnalyzeEntrainedVariables(cx, script); } #endif if (!compileOnly) { if (!JS_ExecuteScript(cx, script)) { return false; @@ -914,25 +945,28 @@ RunBinAST(JSContext* cx, const char* fil .setNoScriptRval(true); script = JS::DecodeBinAST(cx, options, file); if (!script) { return false; } } + if (!RegisterScriptPathWithModuleLoader(cx, script, filename)) { + return false; + } + return JS_ExecuteScript(cx, script); } #endif // JS_BUILD_BINAST static bool InitModuleLoader(JSContext* cx) { - // Decompress and evaluate the embedded module loader source to initialize // the module loader for the current compartment. uint32_t srcLen = moduleloader::GetRawScriptsSize(); auto src = cx->make_pod_array<char>(srcLen); if (!src || !DecompressString(moduleloader::compressedSources, moduleloader::GetCompressedSize(), reinterpret_cast<unsigned char*>(src.get()), srcLen)) { @@ -4718,17 +4752,17 @@ SetModuleResolveHook(JSContext* cx, unsi Handle<GlobalObject*> global = cx->global(); global->setReservedSlot(GlobalAppSlotModuleResolveHook, args[0]); args.rval().setUndefined(); return true; } static JSObject* -CallModuleResolveHook(JSContext* cx, HandleValue referencingPrivate, HandleString specifier) +ShellModuleResolveHook(JSContext* cx, HandleValue referencingPrivate, HandleString specifier) { Handle<GlobalObject*> global = cx->global(); RootedValue hookValue(cx, global->getReservedSlot(GlobalAppSlotModuleResolveHook)); if (hookValue.isUndefined()) { JS_ReportErrorASCII(cx, "Module resolve hook not set"); return nullptr; } MOZ_ASSERT(hookValue.toObject().is<JSFunction>()); @@ -4835,16 +4869,119 @@ ShellGetModulePrivate(JSContext* cx, uns return ReportArgumentTypeError(cx, args[0], "module object"); } args.rval().set(JS::GetModulePrivate(&args[0].toObject())); return true; } static bool +SetModuleDynamicImportHook(JSContext* cx, unsigned argc, Value* vp) +{ + CallArgs args = CallArgsFromVp(argc, vp); + if (args.length() != 1) { + JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_MORE_ARGS_NEEDED, + "setModuleDynamicImportHook", "0", "s"); + return false; + } + + if (!args[0].isObject() || !args[0].toObject().is<JSFunction>()) { + const char* typeName = InformalValueTypeName(args[0]); + JS_ReportErrorASCII(cx, "expected hook function, got %s", typeName); + return false; + } + + Handle<GlobalObject*> global = cx->global(); + global->setReservedSlot(GlobalAppSlotModuleDynamicImportHook, args[0]); + + args.rval().setUndefined(); + return true; +} + +static bool +FinishDynamicModuleImport(JSContext* cx, unsigned argc, Value* vp) +{ + CallArgs args = CallArgsFromVp(argc, vp); + if (args.length() != 3) { + JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_MORE_ARGS_NEEDED, + "finishDynamicModuleImport", "0", "s"); + return false; + } + + if (!args[1].isString()) { + return ReportArgumentTypeError(cx, args[1], "String"); + } + + if (!args[2].isObject() || !args[2].toObject().is<PromiseObject>()) { + return ReportArgumentTypeError(cx, args[2], "PromiseObject"); + } + + RootedString specifier(cx, args[1].toString()); + Rooted<PromiseObject*> promise(cx, &args[2].toObject().as<PromiseObject>()); + + return js::FinishDynamicModuleImport(cx, args[0], specifier, promise); +} + +static bool +AbortDynamicModuleImport(JSContext* cx, unsigned argc, Value* vp) +{ + CallArgs args = CallArgsFromVp(argc, vp); + if (args.length() != 4) { + JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_MORE_ARGS_NEEDED, + "abortDynamicModuleImport", "0", "s"); + return false; + } + + if (!args[1].isString()) { + return ReportArgumentTypeError(cx, args[1], "String"); + } + + if (!args[2].isObject() || !args[2].toObject().is<PromiseObject>()) { + return ReportArgumentTypeError(cx, args[2], "PromiseObject"); + } + + if (!args[3].isObject() || !args[3].toObject().is<ErrorObject>()) { + return ReportArgumentTypeError(cx, args[3], "ErrorObject"); + } + + RootedString specifier(cx, args[1].toString()); + Rooted<PromiseObject*> promise(cx, &args[2].toObject().as<PromiseObject>()); + Rooted<ErrorObject*> error(cx, &args[3].toObject().as<ErrorObject>()); + + Rooted<Value> value(cx, ObjectValue(*error)); + cx->setPendingException(value); + return js::FinishDynamicModuleImport(cx, args[0], specifier, promise); +} + +static bool +ShellModuleDynamicImportHook(JSContext* cx, HandleValue referencingPrivate, HandleString specifier, + HandleObject promise) +{ + Handle<GlobalObject*> global = cx->global(); + RootedValue hookValue(cx, global->getReservedSlot(GlobalAppSlotModuleDynamicImportHook)); + if (hookValue.isUndefined()) { + JS_ReportErrorASCII(cx, "Module resolve hook not set"); + return false; + } + MOZ_ASSERT(hookValue.toObject().is<JSFunction>()); + + JS::AutoValueArray<3> args(cx); + args[0].set(referencingPrivate); + args[1].setString(specifier); + args[2].setObject(*promise); + + RootedValue result(cx); + if (!JS_CallFunctionValue(cx, nullptr, hookValue, args, &result)) { + return false; + } + + return true; +} + +static bool GetModuleLoadPath(JSContext* cx, unsigned argc, Value* vp) { CallArgs args = CallArgsFromVp(argc, vp); ShellContext* sc = GetShellContext(cx); if (sc->moduleLoadPath) { JSString* str = JS_NewStringCopyZ(cx, sc->moduleLoadPath.get()); if (!str) { @@ -8054,27 +8191,43 @@ static const JSFunctionSpecWithHelp shel JS_FN_HELP("setModuleLoadHook", SetModuleLoadHook, 1, 0, "setModuleLoadHook(function(path))", " Set the shell specific module load hook to |function|.\n" " This hook is used to load a module graph. It should be implemented by the\n" " module loader."), JS_FN_HELP("setModuleResolveHook", SetModuleResolveHook, 1, 0, -"setModuleResolveHook(function(module, specifier) {})", +"setModuleResolveHook(function(referrer, specifier))", " Set the HostResolveImportedModule hook to |function|.\n" " This hook is used to look up a previously loaded module object. It should\n" " be implemented by the module loader."), JS_FN_HELP("setModuleMetadataHook", SetModuleMetadataHook, 1, 0, "setModuleMetadataHook(function(module) {})", " Set the HostPopulateImportMeta hook to |function|.\n" " This hook is used to create the metadata object returned by import.meta for\n" " a module. It should be implemented by the module loader."), + JS_FN_HELP("setModuleDynamicImportHook", SetModuleDynamicImportHook, 1, 0, +"setModuleDynamicImportHook(function(referrer, specifier, promise))", +" Set the HostImportModuleDynamically hook to |function|.\n" +" This hook is used to dynamically import a module. It should\n" +" be implemented by the module loader."), + + JS_FN_HELP("finishDynamicModuleImport", FinishDynamicModuleImport, 3, 0, +"finishDynamicModuleImport(referrer, specifier, promise)", +" The module loader's dynamic import hook should call this when the module has" +" been loaded successfully."), + + JS_FN_HELP("abortDynamicModuleImport", AbortDynamicModuleImport, 4, 0, +"abortDynamicModuleImport(referrer, specifier, promise, error)", +" The module loader's dynamic import hook should call this when the module " +" import has failed."), + JS_FN_HELP("setModulePrivate", ShellSetModulePrivate, 2, 0, "setModulePrivate(scriptObject, privateValue)", " Associate a private value with a module object.\n"), JS_FN_HELP("getModulePrivate", ShellGetModulePrivate, 2, 0, "getModulePrivate(scriptObject)", " Get the private value associated with a module object.\n"), @@ -10783,17 +10936,18 @@ main(int argc, char** argv, char** envp) JS_SetGCParameter(cx, JSGC_DYNAMIC_HEAP_GROWTH, 1); JS_SetGCParameter(cx, JSGC_DYNAMIC_MARK_SLICE, 1); JS_SetGCParameter(cx, JSGC_SLICE_TIME_BUDGET, 10); } #endif js::SetPreserveWrapperCallback(cx, DummyPreserveWrapperCallback); - JS::SetModuleResolveHook(cx->runtime(), CallModuleResolveHook); + JS::SetModuleResolveHook(cx->runtime(), ShellModuleResolveHook); + JS::SetModuleDynamicImportHook(cx->runtime(), ShellModuleDynamicImportHook); JS::SetModuleMetadataHook(cx->runtime(), CallModuleMetadataHook); result = Shell(cx, &op, envp); #ifdef DEBUG if (OOM_printAllocationCount) { printf("OOM max count: %" PRIu64 "\n", js::oom::counter); }
--- a/js/src/tests/lib/jittests.py +++ b/js/src/tests/lib/jittests.py @@ -367,18 +367,18 @@ class JitTest: cmd = prefix + ['--js-cache', JitTest.CacheDir] cmd += list(set(self.jitflags)) for expr in exprs: cmd += ['-e', expr] for inc in self.other_includes: cmd += ['-f', libdir + inc] if self.skip_if_cond: cmd += ['-e', "if ({}) quit({})".format(self.skip_if_cond, self.SKIPPED_EXIT_STATUS)] + cmd += ['--module-load-path', moduledir] if self.is_module: - cmd += ['--module-load-path', moduledir] cmd += ['--module', path] elif self.is_binast: # In builds with BinAST, this will run the test file. In builds without, # It's a no-op and the tests will silently pass. cmd += ['-B', path] elif self.test_reflect_stringify is None: cmd += ['-f', path] else:
--- a/js/src/vm/EnvironmentObject.cpp +++ b/js/src/vm/EnvironmentObject.cpp @@ -3472,16 +3472,33 @@ js::GetModuleObjectForScript(JSScript* s for (ScopeIter si(script); si; si++) { if (si.kind() == ScopeKind::Module) { return si.scope()->as<ModuleScope>().module(); } } return nullptr; } +Value +js::FindScriptOrModulePrivateForScript(JSScript* script) +{ + while (script) { + ScriptSourceObject* sso = &script->scriptSourceUnwrap(); + Value value = sso->getPrivate(); + if (!value.isUndefined()) { + return value; + } + + MOZ_ASSERT(sso->introductionScript() != script); + script = sso->introductionScript(); + } + + return UndefinedValue(); +} + bool js::GetThisValueForDebuggerMaybeOptimizedOut(JSContext* cx, AbstractFramePtr frame, jsbytecode* pc, MutableHandleValue res) { RootedObject scopeChain(cx); RootedScope scope(cx); if (!GetFrameEnvironmentAndScope(cx, frame, pc, &scopeChain, &scope)) { return false;
--- a/js/src/vm/EnvironmentObject.h +++ b/js/src/vm/EnvironmentObject.h @@ -1177,17 +1177,21 @@ IsFrameInitialEnvironment(AbstractFrameP return false; } extern bool CreateObjectsForEnvironmentChain(JSContext* cx, AutoObjectVector& chain, HandleObject terminatingEnv, MutableHandleObject envObj); -ModuleObject* GetModuleObjectForScript(JSScript* script); +ModuleObject* +GetModuleObjectForScript(JSScript* script); + +Value +FindScriptOrModulePrivateForScript(JSScript* script); ModuleEnvironmentObject* GetModuleEnvironmentForScript(JSScript* script); MOZ_MUST_USE bool GetThisValueForDebuggerMaybeOptimizedOut(JSContext* cx, AbstractFramePtr frame, jsbytecode* pc, MutableHandleValue res); MOZ_MUST_USE bool
--- a/js/src/vm/Interpreter.cpp +++ b/js/src/vm/Interpreter.cpp @@ -4719,17 +4719,33 @@ CASE(JSOP_IMPORTMETA) JSObject* metaObject = GetOrCreateModuleMetaObject(cx, module); if (!metaObject) { goto error; } PUSH_OBJECT(*metaObject); } -END_CASE(JSOP_NEWTARGET) +END_CASE(JSOP_IMPORTMETA) + +CASE(JSOP_DYNAMIC_IMPORT) +{ + ReservedRooted<Value> referencingPrivate(&rootValue0); + referencingPrivate = FindScriptOrModulePrivateForScript(script); + + ReservedRooted<Value> specifier(&rootValue1); + POP_COPY_TO(specifier); + + JSObject* promise = StartDynamicModuleImport(cx, referencingPrivate, specifier); + if (!promise) + goto error; + + PUSH_OBJECT(*promise); +} +END_CASE(JSOP_DYNAMIC_IMPORT) CASE(JSOP_SUPERFUN) { ReservedRooted<JSObject*> superEnvFunc(&rootObject0, &GetSuperEnvFunction(cx, REGS)); ReservedRooted<JSObject*> superFun(&rootObject1); superFun = SuperFunOperation(cx, superEnvFunc); if (!superFun) { goto error;
--- a/js/src/vm/Opcodes.h +++ b/js/src/vm/Opcodes.h @@ -2359,24 +2359,33 @@ 1234567890123456789012345678901234567890 /* * Push "import.meta" * * Category: Variables and Scopes * Type: Modules * Operands: * Stack: => import.meta */ \ - macro(JSOP_IMPORTMETA, 232, "importmeta", NULL, 1, 0, 1, JOF_BYTE) + macro(JSOP_IMPORTMETA, 232, "importmeta", NULL, 1, 0, 1, JOF_BYTE) \ + /* + * Dynamic import of the module specified by the string value on the top of + * the stack. + * + * Category: Variables and Scopes + * Type: Modules + * Operands: + * Stack: arg => rval + */ \ + macro(JSOP_DYNAMIC_IMPORT, 233, "call-import", NULL, 1, 1, 1, JOF_BYTE) /* * In certain circumstances it may be useful to "pad out" the opcode space to * a power of two. Use this macro to do so. */ #define FOR_EACH_TRAILING_UNUSED_OPCODE(macro) \ - macro(233) \ macro(234) \ macro(235) \ macro(236) \ macro(237) \ macro(238) \ macro(239) \ macro(240) \ macro(241) \
--- a/js/src/vm/Runtime.cpp +++ b/js/src/vm/Runtime.cpp @@ -171,17 +171,18 @@ JSRuntime::JSRuntime(JSRuntime* parentRu autoWritableJitCodeActive_(false), oomCallback(nullptr), debuggerMallocSizeOf(ReturnZeroSize), performanceMonitoring_(), stackFormat_(parentRuntime ? js::StackFormat::Default : js::StackFormat::SpiderMonkey), wasmInstances(mutexid::WasmRuntimeInstances), moduleResolveHook(), - moduleMetadataHook() + moduleMetadataHook(), + moduleDynamicImportHook() { JS_COUNT_CTOR(JSRuntime); liveRuntimesCount++; lcovOutput().init(); } JSRuntime::~JSRuntime()
--- a/js/src/vm/Runtime.h +++ b/js/src/vm/Runtime.h @@ -976,16 +976,20 @@ struct JSRuntime : public js::MallocProv // The implementation-defined abstract operation HostResolveImportedModule. js::MainThreadData<JS::ModuleResolveHook> moduleResolveHook; // A hook that implements the abstract operations // HostGetImportMetaProperties and HostFinalizeImportMeta. js::MainThreadData<JS::ModuleMetadataHook> moduleMetadataHook; + // A hook that implements the abstract operation + // HostImportModuleDynamically. + js::MainThreadData<JS::ModuleDynamicImportHook> moduleDynamicImportHook; + public: #if defined(JS_BUILD_BINAST) js::BinaryASTSupport& binast() { return binast_; } private: js::BinaryASTSupport binast_; #endif // defined(JS_BUILD_BINAST)
--- a/js/src/vm/Scope.cpp +++ b/js/src/vm/Scope.cpp @@ -1218,22 +1218,16 @@ ModuleScope::createWithData(JSContext* c /* static */ Shape* ModuleScope::getEmptyEnvironmentShape(JSContext* cx) { const Class* cls = &ModuleEnvironmentObject::class_; return EmptyEnvironmentShape(cx, cls, JSSLOT_FREE(cls), ModuleScopeEnvShapeFlags); } -JSScript* -ModuleScope::script() const -{ - return module()->script(); -} - static const uint32_t WasmInstanceEnvShapeFlags = BaseShape::NOT_EXTENSIBLE | BaseShape::DELEGATE; template <size_t ArrayLength> static JSAtom* GenerateWasmName(JSContext* cx, const char (&prefix)[ArrayLength], uint32_t index) {
--- a/js/src/vm/Scope.h +++ b/js/src/vm/Scope.h @@ -1022,18 +1022,16 @@ class ModuleScope : public Scope uint32_t nextFrameSlot() const { return data().nextFrameSlot; } ModuleObject* module() const { return data().module; } - JSScript* script() const; - static Shape* getEmptyEnvironmentShape(JSContext* cx); }; class WasmInstanceScope : public Scope { friend class BindingIter; friend class Scope; friend class GCMarker;
--- a/js/src/vm/SelfHosting.cpp +++ b/js/src/vm/SelfHosting.cpp @@ -2178,25 +2178,18 @@ intrinsic_NameForTypedArray(JSContext* c static bool intrinsic_HostResolveImportedModule(JSContext* cx, unsigned argc, Value* vp) { CallArgs args = CallArgsFromVp(argc, vp); MOZ_ASSERT(args.length() == 2); RootedModuleObject module(cx, &args[0].toObject().as<ModuleObject>()); RootedString specifier(cx, args[1].toString()); - JS::ModuleResolveHook moduleResolveHook = cx->runtime()->moduleResolveHook; - if (!moduleResolveHook) { - JS_ReportErrorASCII(cx, "Module resolve hook not set"); - return false; - } - - RootedObject result(cx); RootedValue referencingPrivate(cx, JS::GetModulePrivate(module)); - result = moduleResolveHook(cx, referencingPrivate, specifier); + RootedObject result(cx, CallModuleResolveHook(cx, referencingPrivate, specifier)); if (!result) { return false; } if (!result->is<ModuleObject>()) { JS_ReportErrorASCII(cx, "Module resolve hook did not return Module object"); return false; }
--- a/js/src/vm/Stack.cpp +++ b/js/src/vm/Stack.cpp @@ -149,23 +149,21 @@ AssertScopeMatchesEnvironment(Scope* sco env = &env->as<LexicalEnvironmentObject>().enclosingEnvironment(); MOZ_ASSERT(env->is<GlobalObject>()); break; case ScopeKind::NonSyntactic: MOZ_CRASH("NonSyntactic should not have a syntactic environment"); break; - case ScopeKind::Module: { - ModuleObject* module = &env->as<ModuleEnvironmentObject>().module(); - MOZ_ASSERT_IF(module->maybeScript(), - module->script() == si.scope()->as<ModuleScope>().script()); + case ScopeKind::Module: + MOZ_ASSERT(&env->as<ModuleEnvironmentObject>().module() == + si.scope()->as<ModuleScope>().module()); env = &env->as<ModuleEnvironmentObject>().enclosingEnvironment(); break; - } case ScopeKind::WasmInstance: env = &env->as<WasmInstanceEnvironmentObject>().enclosingEnvironment(); break; case ScopeKind::WasmFunction: env = &env->as<WasmFunctionCallObject>().enclosingEnvironment(); break;
--- a/js/src/wasm/cranelift/Cargo.toml +++ b/js/src/wasm/cranelift/Cargo.toml @@ -10,13 +10,13 @@ name = "baldrdash" [dependencies] cranelift-codegen = "0.20.0" cranelift-wasm = "0.20.1" target-lexicon = "0.0.3" log = { version = "0.4.4", default-features = false, features = ["release_max_level_warn"] } env_logger = "0.5.6" [build-dependencies] -bindgen = {version = "0.39", default-features = false} # disable `logging` to reduce code size +bindgen = {version = "0.43", default-features = false} # disable `logging` to reduce code size # Uncomment this to enable perf support in release mode. #[profile.release] #debug = true
--- a/python/mozbuild/mozbuild/backend/cargo_build_defs.py +++ b/python/mozbuild/mozbuild/backend/cargo_build_defs.py @@ -107,16 +107,20 @@ cargo_extra_outputs = { 'bzip2-1.0.6/bzlib.o', 'bzip2-1.0.6/compress.o', 'bzip2-1.0.6/crctable.o', 'bzip2-1.0.6/decompress.o', 'bzip2-1.0.6/huffman.o', 'bzip2-1.0.6/randtable.o', 'libbz2.a', ], + 'clang-sys': [ + 'common.rs', + 'dynamic.rs', + ], 'cranelift-codegen': [ 'binemit-arm32.rs', 'binemit-arm64.rs', 'binemit-riscv.rs', 'binemit-x86.rs', 'encoding-arm32.rs', 'encoding-arm64.rs', 'encoding-riscv.rs',
--- a/servo/components/style/Cargo.toml +++ b/servo/components/style/Cargo.toml @@ -73,12 +73,12 @@ time = "0.1" uluru = "0.3" unicode-bidi = "0.3" unicode-segmentation = "1.0" void = "1.0.2" [build-dependencies] lazy_static = "1" log = "0.4" -bindgen = { version = "0.39", optional = true, default-features = false } +bindgen = { version = "0.43", optional = true, default-features = false } regex = {version = "1.0", optional = true} walkdir = "2.1.4" toml = {version = "0.4.5", optional = true, default-features = false}
new file mode 100644 --- /dev/null +++ b/taskcluster/ci/bouncer-locations-breakpoint/kind.yml @@ -0,0 +1,31 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +loader: taskgraph.loader.transform:loader + +transforms: + - taskgraph.transforms.bouncer_locations_breakpoint:transforms + - taskgraph.transforms.task:transforms + +job-defaults: + description: nightly bouncer locations breakpoint job + attributes: + build_platform: linux64-nightly + nightly: true + worker-type: + by-project: + mozilla-central: null-provisioner/human-breakpoint + default: invalid/invalid + worker: + implementation: bouncer-locations-breakpoint + run-on-projects: ['mozilla-central'] + treeherder: + symbol: BncLoc-Br + kind: other + tier: 2 + +jobs: + firefox: + treeherder: + platform: firefox-release/opt
--- a/taskcluster/ci/bouncer-locations/kind.yml +++ b/taskcluster/ci/bouncer-locations/kind.yml @@ -3,16 +3,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. loader: taskgraph.loader.transform:loader transforms: - taskgraph.transforms.bouncer_locations:transforms - taskgraph.transforms.task:transforms +kind-dependencies: + - bouncer-locations-breakpoint + job-defaults: description: nightly bouncer locations job attributes: build_platform: linux64-nightly nightly: true worker-type: by-project: mozilla-central: scriptworker-prov-v1/bouncer-v1
--- a/taskcluster/docs/kinds.rst +++ b/taskcluster/docs/kinds.rst @@ -306,16 +306,20 @@ Update Bouncer's (download.mozilla.org) cron-bouncer-check ------------------ Checks Bouncer (download.mozilla.org) uptake. bouncer-locations ----------------- Updates nightly bouncer locations for version bump +bouncer-locations-breakpoint +---------------------------- +Human breakpoint to block the running of the bouncer locations job until shippable builds are implemented + release-bouncer-check --------------------- Checks Bouncer (download.mozilla.org) uptake as part of the release tasks. release-generate-checksums -------------------------- Generate the per-release checksums along with the summaries
--- a/taskcluster/taskgraph/transforms/bouncer_locations.py +++ b/taskcluster/taskgraph/transforms/bouncer_locations.py @@ -23,11 +23,17 @@ def make_task_worker(config, jobs): resolve_keyed_by( job, 'scopes', item_name=job['name'], project=config.params['project'] ) resolve_keyed_by( job, 'bouncer-products', item_name=job['name'], project=config.params['project'] ) job['worker']['bouncer-products'] = job['bouncer-products'] + del job['bouncer-products'] - del job['bouncer-products'] + # chain the breakpoint as dependency to this task + dependencies = {} + for dep_task in config.kind_dependencies_tasks: + dependencies[dep_task.kind] = dep_task.label + + job.setdefault('dependencies', {}).update(dependencies) yield job
new file mode 100644 --- /dev/null +++ b/taskcluster/taskgraph/transforms/bouncer_locations_breakpoint.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +from __future__ import absolute_import, print_function, unicode_literals + +import logging + +from taskgraph.transforms.base import TransformSequence +from taskgraph.util.schema import resolve_keyed_by + +logger = logging.getLogger(__name__) + + +transforms = TransformSequence() + + +@transforms.add +def make_task_worker(config, jobs): + for job in jobs: + resolve_keyed_by( + job, 'worker-type', item_name=job['name'], project=config.params['project'] + ) + job['worker']['payload'] = {} + yield job
--- a/taskcluster/taskgraph/transforms/task.py +++ b/taskcluster/taskgraph/transforms/task.py @@ -567,16 +567,19 @@ task_description_schema = Schema({ }], }, { Required('implementation'): 'bouncer-aliases', Required('entries'): object, }, { Required('implementation'): 'bouncer-locations', Required('bouncer-products'): [basestring], }, { + Required('implementation'): 'bouncer-locations-breakpoint', + Required('payload'): object, + }, { Required('implementation'): 'bouncer-submission', Required('locales'): [basestring], Required('entries'): object, }, { Required('implementation'): 'invalid', # an invalid task is one which should never actually be created; this is used in # release automation on branches where the task just doesn't make sense Extra: object, @@ -1210,16 +1213,21 @@ def build_bouncer_locations_payload(conf release_config = get_release_config(config) task_def['payload'] = { 'bouncer_products': worker['bouncer-products'], 'version': release_config['version'], } +@payload_builder('bouncer-locations-breakpoint') +def build_bouncer_locations_breakpoint_payload(config, task, task_def): + task_def['payload'] = task['worker']['payload'] + + @payload_builder('bouncer-submission') def build_bouncer_submission_payload(config, task, task_def): worker = task['worker'] task_def['payload'] = { 'locales': worker['locales'], 'submission_entries': worker['entries'] }
--- a/testing/geckodriver/README.md +++ b/testing/geckodriver/README.md @@ -46,17 +46,28 @@ Documentation * [geckodriver usage](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/Usage.html) * [Supported platforms](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/Support.html) * [Firefox capabilities](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/Capabilities.html) * [Capabilities example](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/Capabilities.html#capabilities-example) * [Enabling trace logs](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/TraceLogs.html) * [Analyzing crash data from Firefox](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/CrashReports.html) -* [Contributing](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/index.html#for-developers) +* [Contributing](https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/#for-developers) + + +Source code +----------- + +geckodriver’s canonical source code can be found in [mozilla-central]. +We only use this GitHub repository for issue tracking and making releases. +See our [contribution documentation] for more information. + +[mozilla-central]: https://hg.mozilla.org/mozilla-central/file/tip/testing/geckodriver +[contribution documentation]: https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/#for-developers Contact ------- The mailing list for geckodriver discussion is tools-marionette@lists.mozilla.org ([subscribe], [archive]).
--- a/testing/web-platform/tests/wasm/jsapi/constructor/compile.any.js +++ b/testing/web-platform/tests/wasm/jsapi/constructor/compile.any.js @@ -68,14 +68,14 @@ promise_test(() => { return WebAssembly.compile(emptyModuleBinary).then(assert_Module); }, "Result type"); promise_test(() => { return WebAssembly.compile(emptyModuleBinary, {}).then(assert_Module); }, "Stray argument"); promise_test(() => { - const buffer = new WasmModuleBuilder().toBuffer(); + const buffer = new Uint8Array(new WasmModuleBuilder().toBuffer()); assert_equals(buffer[0], 0); const promise = WebAssembly.compile(buffer); buffer[0] = 1; return promise.then(assert_Module); }, "Changing the buffer");
--- a/testing/web-platform/tests/wasm/jsapi/constructor/instantiate.any.js +++ b/testing/web-platform/tests/wasm/jsapi/constructor/instantiate.any.js @@ -98,14 +98,14 @@ for (const [name, fn] of instanceTestFac } promise_test(t => { const buffer = new Uint8Array(); return promise_rejects(t, new WebAssembly.CompileError(), WebAssembly.instantiate(buffer)); }, "Invalid code"); promise_test(() => { - const buffer = new WasmModuleBuilder().toBuffer(); + const buffer = new Uint8Array(new WasmModuleBuilder().toBuffer()); assert_equals(buffer[0], 0); const promise = WebAssembly.instantiate(buffer); buffer[0] = 1; return promise.then(assert_WebAssemblyInstantiatedSource); }, "Changing the buffer");
--- a/testing/web-platform/tests/wasm/jsapi/instanceTestFactory.js +++ b/testing/web-platform/tests/wasm/jsapi/instanceTestFactory.js @@ -128,28 +128,24 @@ const instanceTestFactory = [ [ "No imports", function() { const builder = new WasmModuleBuilder(); builder .addFunction("fn", kSig_v_d) - .addBody([ - kExprEnd - ]) + .addBody([]) .exportFunc(); builder .addFunction("fn2", kSig_v_v) - .addBody([ - kExprEnd - ]) + .addBody([]) .exportFunc(); - builder.setFunctionTableLength(1); + builder.setTableLength(1); builder.addExportOfKind("table", kExternalTable, 0); builder.addGlobal(kWasmI32, true) .exportAs("global") .init = 7; builder.addGlobal(kWasmF64, true) .exportAs("global2") .init = 1.2; @@ -185,17 +181,16 @@ const instanceTestFactory = [ const index = builder.addImportedGlobal("module", "global", kWasmI32); builder .addFunction("fn", kSig_i_v) .addBody([ kExprGetGlobal, index, kExprReturn, - kExprEnd, ]) .exportFunc(); const buffer = builder.toBuffer(); const imports = { "module": { "global": value,
--- a/testing/web-platform/tests/wasm/jsapi/module/exports.any.js +++ b/testing/web-platform/tests/wasm/jsapi/module/exports.any.js @@ -87,28 +87,24 @@ test(() => { assert_not_equals(WebAssembly.Module.exports(module), WebAssembly.Module.exports(module)); }, "Empty module: array caching"); test(() => { const builder = new WasmModuleBuilder(); builder .addFunction("fn", kSig_v_v) - .addBody([ - kExprEnd - ]) + .addBody([]) .exportFunc(); builder .addFunction("fn2", kSig_v_v) - .addBody([ - kExprEnd - ]) + .addBody([]) .exportFunc(); - builder.setFunctionTableLength(1); + builder.setTableLength(1); builder.addExportOfKind("table", kExternalTable, 0); builder.addGlobal(kWasmI32, true) .exportAs("global") .init = 7; builder.addGlobal(kWasmF64, true) .exportAs("global2") .init = 1.2;
--- a/testing/web-platform/tests/wasm/jsapi/table/get-set.any.js +++ b/testing/web-platform/tests/wasm/jsapi/table/get-set.any.js @@ -4,25 +4,21 @@ // META: script=assertions.js let functions; setup(() => { const builder = new WasmModuleBuilder(); builder .addFunction("fn", kSig_v_d) - .addBody([ - kExprEnd - ]) + .addBody([]) .exportFunc(); builder .addFunction("fn2", kSig_v_v) - .addBody([ - kExprEnd - ]) + .addBody([]) .exportFunc(); const buffer = builder.toBuffer() const module = new WebAssembly.Module(buffer); const instance = new WebAssembly.Instance(module, {}); functions = instance.exports; });
--- a/testing/web-platform/tests/wasm/jsapi/wasm-constants.js +++ b/testing/web-platform/tests/wasm/jsapi/wasm-constants.js @@ -16,17 +16,17 @@ function bytes() { } // Header declaration constants var kWasmH0 = 0; var kWasmH1 = 0x61; var kWasmH2 = 0x73; var kWasmH3 = 0x6d; -var kWasmV0 = 1; +var kWasmV0 = 0x1; var kWasmV1 = 0; var kWasmV2 = 0; var kWasmV3 = 0; var kHeaderSize = 8; var kPageSize = 65536; function bytesWithHeader() { @@ -60,34 +60,38 @@ let kMemorySectionCode = 5; // Memory let kGlobalSectionCode = 6; // Global declarations let kExportSectionCode = 7; // Exports let kStartSectionCode = 8; // Start function declaration let kElementSectionCode = 9; // Elements section let kCodeSectionCode = 10; // Function code let kDataSectionCode = 11; // Data segments let kNameSectionCode = 12; // Name section (encoded as string) +// Name section types +let kModuleNameCode = 0; +let kFunctionNamesCode = 1; +let kLocalNamesCode = 2; + let kWasmFunctionTypeForm = 0x60; let kWasmAnyFunctionTypeForm = 0x70; -let kResizableMaximumFlag = 1; +let kHasMaximumFlag = 1; // Function declaration flags let kDeclFunctionName = 0x01; let kDeclFunctionImport = 0x02; let kDeclFunctionLocals = 0x04; let kDeclFunctionExport = 0x08; // Local types let kWasmStmt = 0x40; let kWasmI32 = 0x7f; let kWasmI64 = 0x7e; let kWasmF32 = 0x7d; let kWasmF64 = 0x7c; -let kWasmS128 = 0x7b; let kExternalFunction = 0; let kExternalTable = 1; let kExternalMemory = 2; let kExternalGlobal = 3; let kTableZero = 0; let kMemoryZero = 0; @@ -99,26 +103,25 @@ let kSig_i_l = makeSig([kWasmI64], [kWas let kSig_i_ii = makeSig([kWasmI32, kWasmI32], [kWasmI32]); let kSig_i_iii = makeSig([kWasmI32, kWasmI32, kWasmI32], [kWasmI32]); let kSig_d_dd = makeSig([kWasmF64, kWasmF64], [kWasmF64]); let kSig_l_ll = makeSig([kWasmI64, kWasmI64], [kWasmI64]); let kSig_i_dd = makeSig([kWasmF64, kWasmF64], [kWasmI32]); let kSig_v_v = makeSig([], []); let kSig_i_v = makeSig([], [kWasmI32]); let kSig_l_v = makeSig([], [kWasmI64]); -let kSig_f_v = makeSig([], [kWasmF64]); +let kSig_f_v = makeSig([], [kWasmF32]); let kSig_d_v = makeSig([], [kWasmF64]); let kSig_v_i = makeSig([kWasmI32], []); let kSig_v_ii = makeSig([kWasmI32, kWasmI32], []); let kSig_v_iii = makeSig([kWasmI32, kWasmI32, kWasmI32], []); let kSig_v_l = makeSig([kWasmI64], []); let kSig_v_d = makeSig([kWasmF64], []); let kSig_v_dd = makeSig([kWasmF64, kWasmF64], []); let kSig_v_ddi = makeSig([kWasmF64, kWasmF64, kWasmI32], []); -let kSig_s_v = makeSig([], [kWasmS128]); function makeSig(params, results) { return {params: params, results: results}; } function makeSig_v_x(x) { return makeSig([x], []); } @@ -186,17 +189,17 @@ let kExprI64StoreMem = 0x37; let kExprF32StoreMem = 0x38; let kExprF64StoreMem = 0x39; let kExprI32StoreMem8 = 0x3a; let kExprI32StoreMem16 = 0x3b; let kExprI64StoreMem8 = 0x3c; let kExprI64StoreMem16 = 0x3d; let kExprI64StoreMem32 = 0x3e; let kExprMemorySize = 0x3f; -let kExprGrowMemory = 0x40; +let kExprMemoryGrow = 0x40; let kExprI32Eqz = 0x45; let kExprI32Eq = 0x46; let kExprI32Ne = 0x47; let kExprI32LtS = 0x48; let kExprI32LtU = 0x49; let kExprI32GtS = 0x4a; let kExprI32GtU = 0x4b; let kExprI32LeS = 0x4c; @@ -334,41 +337,39 @@ let kTrapMsgs = [ "remainder by zero", "integer result unrepresentable", "invalid function", "function signature mismatch", "invalid index into function table" ]; function assertTraps(trap, code) { - var threwException = true; - try { - if (typeof code === 'function') { - code(); - } else { - eval(code); - } - threwException = false; - } catch (e) { - assertEquals("object", typeof e); - assertEquals(kTrapMsgs[trap], e.message); - // Success. - return; + try { + if (typeof code === 'function') { + code(); + } else { + eval(code); } - throw new MjsUnitAssertionError("Did not trap, expected: " + kTrapMsgs[trap]); + } catch (e) { + assertEquals('object', typeof e); + assertEquals(kTrapMsgs[trap], e.message); + // Success. + return; + } + throw new MjsUnitAssertionError('Did not trap, expected: ' + kTrapMsgs[trap]); } function assertWasmThrows(value, code) { - assertEquals("number", typeof(value)); - try { - if (typeof code === 'function') { - code(); - } else { - eval(code); - } - } catch (e) { - assertEquals("number", typeof e); - assertEquals(value, e); - // Success. - return; + assertEquals('number', typeof value); + try { + if (typeof code === 'function') { + code(); + } else { + eval(code); } - throw new MjsUnitAssertionError("Did not throw at all, expected: " + value); + } catch (e) { + assertEquals('number', typeof e); + assertEquals(value, e); + // Success. + return; + } + throw new MjsUnitAssertionError('Did not throw, expected: ' + value); }
--- a/testing/web-platform/tests/wasm/jsapi/wasm-module-builder.js +++ b/testing/web-platform/tests/wasm/jsapi/wasm-module-builder.js @@ -64,50 +64,89 @@ class Binary extends Array { this.push(kWasmH0, kWasmH1, kWasmH2, kWasmH3, kWasmV0, kWasmV1, kWasmV2, kWasmV3); } emit_section(section_code, content_generator) { // Emit section name. this.emit_u8(section_code); // Emit the section to a temporary buffer: its full length isn't know yet. - let section = new Binary; + const section = new Binary; content_generator(section); // Emit section length. this.emit_u32v(section.length); // Copy the temporary buffer. - this.push(...section); + for (const b of section) { + this.push(b); + } } } class WasmFunctionBuilder { constructor(module, name, type_index) { this.module = module; this.name = name; this.type_index = type_index; this.body = []; } + numLocalNames() { + if (this.local_names === undefined) return 0; + let num_local_names = 0; + for (let loc_name of this.local_names) { + if (loc_name !== undefined) ++num_local_names; + } + return num_local_names; + } + exportAs(name) { this.module.addExport(name, this.index); return this; } exportFunc() { this.exportAs(this.name); return this; } addBody(body) { + for (let b of body) { + if (typeof b !== 'number' || (b & (~0xFF)) !== 0 ) + throw new Error('invalid body (entries must be 8 bit numbers): ' + body); + } + this.body = body.slice(); + // Automatically add the end for the function block to the body. + this.body.push(kExprEnd); + return this; + } + + addBodyWithEnd(body) { this.body = body; return this; } - addLocals(locals) { - this.locals = locals; + getNumLocals() { + let total_locals = 0; + for (let l of this.locals || []) { + for (let type of ["i32", "i64", "f32", "f64"]) { + total_locals += l[type + "_count"] || 0; + } + } + return total_locals; + } + + addLocals(locals, names) { + const old_num_locals = this.getNumLocals(); + if (!this.locals) this.locals = [] + this.locals.push(locals); + if (names) { + if (!this.local_names) this.local_names = []; + const missing_names = old_num_locals - this.local_names.length; + this.local_names.push(...new Array(missing_names), ...names); + } return this; } end() { return this.module; } } @@ -128,20 +167,20 @@ class WasmGlobalBuilder { class WasmModuleBuilder { constructor() { this.types = []; this.imports = []; this.exports = []; this.globals = []; this.functions = []; - this.function_table = []; - this.function_table_length = 0; - this.function_table_inits = []; - this.segments = []; + this.table_length_min = 0; + this.table_length_max = undefined; + this.element_segments = []; + this.data_segments = []; this.explicit = []; this.num_imported_funcs = 0; this.num_imported_globals = 0; return this; } addStart(start_index) { this.start_index = start_index; @@ -153,16 +192,32 @@ class WasmModuleBuilder { return this; } addExplicitSection(bytes) { this.explicit.push(bytes); return this; } + stringToBytes(name) { + var result = new Binary(); + result.emit_u32v(name.length); + for (var i = 0; i < name.length; i++) { + result.emit_u8(name.charCodeAt(i)); + } + return result; + } + + addCustomSection(name, bytes) { + name = this.stringToBytes(name); + var length = new Binary(); + length.emit_u32v(name.length + bytes.length); + this.explicit.push([0, ...length, ...name, ...bytes]); + } + addType(type) { // TODO: canonicalize types? this.types.push(type); return this.types.length - 1; } addGlobal(local_type, mutable) { let glob = new WasmGlobalBuilder(this, local_type, mutable); @@ -175,25 +230,31 @@ class WasmModuleBuilder { let type_index = (typeof type) == "number" ? type : this.addType(type); let func = new WasmFunctionBuilder(this, name, type_index); func.index = this.functions.length + this.num_imported_funcs; this.functions.push(func); return func; } addImport(module = "", name, type) { + if (this.functions.length != 0) { + throw new Error('Imported functions must be declared before local ones'); + } let type_index = (typeof type) == "number" ? type : this.addType(type); this.imports.push({module: module, name: name, kind: kExternalFunction, type: type_index}); return this.num_imported_funcs++; } - addImportedGlobal(module = "", name, type) { + addImportedGlobal(module = "", name, type, mutable = false) { + if (this.globals.length != 0) { + throw new Error('Imported globals must be declared before local ones'); + } let o = {module: module, name: name, kind: kExternalGlobal, type: type, - mutable: false} + mutable: mutable}; this.imports.push(o); return this.num_imported_globals++; } addImportedMemory(module = "", name, initial = 0, maximum) { let o = {module: module, name: name, kind: kExternalMemory, initial: initial, maximum: maximum}; this.imports.push(o); @@ -212,42 +273,61 @@ class WasmModuleBuilder { } addExportOfKind(name, kind, index) { this.exports.push({name: name, kind: kind, index: index}); return this; } addDataSegment(addr, data, is_global = false) { - this.segments.push({addr: addr, data: data, is_global: is_global}); - return this.segments.length - 1; + this.data_segments.push({addr: addr, data: data, is_global: is_global}); + return this.data_segments.length - 1; } exportMemoryAs(name) { this.exports.push({name: name, kind: kExternalMemory, index: 0}); } - addFunctionTableInit(base, is_global, array) { - this.function_table_inits.push({base: base, is_global: is_global, + addElementSegment(base, is_global, array, is_import = false) { + this.element_segments.push({base: base, is_global: is_global, array: array}); if (!is_global) { var length = base + array.length; - if (length > this.function_table_length) { - this.function_table_length = length; + if (length > this.table_length_min && !is_import) { + this.table_length_min = length; + } + if (length > this.table_length_max && !is_import) { + this.table_length_max = length; } } return this; } appendToTable(array) { - return this.addFunctionTableInit(this.function_table.length, false, array); + for (let n of array) { + if (typeof n != 'number') + throw new Error('invalid table (entries have to be numbers): ' + array); + } + return this.addElementSegment(this.table_length_min, false, array); } - setFunctionTableLength(length) { - this.function_table_length = length; + setTableBounds(min, max) { + this.table_length_min = min; + this.table_length_max = max; + return this; + } + + setTableLength(length) { + this.table_length_min = length; + this.table_length_max = length; + return this; + } + + setName(name) { + this.name = name; return this; } toArray(debug = false) { let binary = new Binary; let wasm = this; // Add header @@ -300,50 +380,49 @@ class WasmModuleBuilder { } else { throw new Error("unknown/unsupported import kind " + imp.kind); } } }); } // Add functions declarations - let has_names = false; - let names = false; if (wasm.functions.length > 0) { if (debug) print("emitting function decls @ " + binary.length); binary.emit_section(kFunctionSectionCode, section => { section.emit_u32v(wasm.functions.length); for (let func of wasm.functions) { - has_names = has_names || (func.name != undefined && - func.name.length > 0); section.emit_u32v(func.type_index); } }); } - // Add function_table. - if (wasm.function_table_length > 0) { + // Add table section + if (wasm.table_length_min > 0) { if (debug) print("emitting table @ " + binary.length); binary.emit_section(kTableSectionCode, section => { section.emit_u8(1); // one table entry section.emit_u8(kWasmAnyFunctionTypeForm); - section.emit_u8(1); - section.emit_u32v(wasm.function_table_length); - section.emit_u32v(wasm.function_table_length); + const max = wasm.table_length_max; + const has_max = max !== undefined; + section.emit_u8(has_max ? kHasMaximumFlag : 0); + section.emit_u32v(wasm.table_length_min); + if (has_max) section.emit_u32v(max); }); } // Add memory section - if (wasm.memory != undefined) { + if (wasm.memory !== undefined) { if (debug) print("emitting memory @ " + binary.length); binary.emit_section(kMemorySectionCode, section => { section.emit_u8(1); // one memory entry - section.emit_u32v(kResizableMaximumFlag); + const has_max = wasm.memory.max !== undefined; + section.emit_u8(has_max ? kHasMaximumFlag : 0); section.emit_u32v(wasm.memory.min); - section.emit_u32v(wasm.memory.max); + if (has_max) section.emit_u32v(wasm.memory.max); }); } // Add global section. if (wasm.globals.length > 0) { if (debug) print ("emitting globals @ " + binary.length); binary.emit_section(kGlobalSectionCode, section => { section.emit_u32v(wasm.globals.length); @@ -354,17 +433,17 @@ class WasmModuleBuilder { // Emit a constant initializer. switch (global.type) { case kWasmI32: section.emit_u8(kExprI32Const); section.emit_u32v(global.init); break; case kWasmI64: section.emit_u8(kExprI64Const); - section.emit_u8(global.init); + section.emit_u32v(global.init); break; case kWasmF32: section.emit_u8(kExprF32Const); f32_view[0] = global.init; section.emit_u8(byte_view[0]); section.emit_u8(byte_view[1]); section.emit_u8(byte_view[2]); section.emit_u8(byte_view[3]); @@ -388,17 +467,17 @@ class WasmModuleBuilder { section.emit_u32v(global.init_index); } section.emit_u8(kExprEnd); // end of init expression } }); } // Add export table. - var mem_export = (wasm.memory != undefined && wasm.memory.exp); + var mem_export = (wasm.memory !== undefined && wasm.memory.exp); var exports_count = wasm.exports.length + (mem_export ? 1 : 0); if (exports_count > 0) { if (debug) print("emitting exports @ " + binary.length); binary.emit_section(kExportSectionCode, section => { section.emit_u32v(exports_count); for (let exp of wasm.exports) { section.emit_string(exp.name); section.emit_u8(exp.kind); @@ -408,32 +487,32 @@ class WasmModuleBuilder { section.emit_string("memory"); section.emit_u8(kExternalMemory); section.emit_u8(0); } }); } // Add start function section. - if (wasm.start_index != undefined) { + if (wasm.start_index !== undefined) { if (debug) print("emitting start function @ " + binary.length); binary.emit_section(kStartSectionCode, section => { section.emit_u32v(wasm.start_index); }); } - // Add table elements. - if (wasm.function_table_inits.length > 0) { - if (debug) print("emitting table @ " + binary.length); + // Add element segments + if (wasm.element_segments.length > 0) { + if (debug) print("emitting element segments @ " + binary.length); binary.emit_section(kElementSectionCode, section => { - var inits = wasm.function_table_inits; + var inits = wasm.element_segments; section.emit_u32v(inits.length); - section.emit_u8(0); // table index for (let init of inits) { + section.emit_u8(0); // table index if (init.is_global) { section.emit_u8(kExprGetGlobal); } else { section.emit_u8(kExprI32Const); } section.emit_u32v(init.base); section.emit_u8(kExprEnd); section.emit_u32v(init.array.length); @@ -448,19 +527,17 @@ class WasmModuleBuilder { if (wasm.functions.length > 0) { // emit function bodies if (debug) print("emitting code @ " + binary.length); binary.emit_section(kCodeSectionCode, section => { section.emit_u32v(wasm.functions.length); for (let func of wasm.functions) { // Function body length will be patched later. let local_decls = []; - let l = func.locals; - if (l != undefined) { - let local_decls_count = 0; + for (let l of func.locals || []) { if (l.i32_count > 0) { local_decls.push({count: l.i32_count, type: kWasmI32}); } if (l.i64_count > 0) { local_decls.push({count: l.i64_count, type: kWasmI64}); } if (l.f32_count > 0) { local_decls.push({count: l.f32_count, type: kWasmF32}); @@ -480,21 +557,21 @@ class WasmModuleBuilder { section.emit_u32v(header.length + func.body.length); section.emit_bytes(header); section.emit_bytes(func.body); } }); } // Add data segments. - if (wasm.segments.length > 0) { + if (wasm.data_segments.length > 0) { if (debug) print("emitting data segments @ " + binary.length); binary.emit_section(kDataSectionCode, section => { - section.emit_u32v(wasm.segments.length); - for (let seg of wasm.segments) { + section.emit_u32v(wasm.data_segments.length); + for (let seg of wasm.data_segments) { section.emit_u8(0); // linear memory index 0 if (seg.is_global) { // initializer is a global variable section.emit_u8(kExprGetGlobal); section.emit_u32v(seg.addr); } else { // initializer is a constant section.emit_u8(kExprI32Const); @@ -508,48 +585,86 @@ class WasmModuleBuilder { } // Add any explicitly added sections for (let exp of wasm.explicit) { if (debug) print("emitting explicit @ " + binary.length); binary.emit_bytes(exp); } - // Add function names. - if (has_names) { - if (debug) print("emitting names @ " + binary.length); + // Add names. + let num_function_names = 0; + let num_functions_with_local_names = 0; + for (let func of wasm.functions) { + if (func.name !== undefined) ++num_function_names; + if (func.numLocalNames() > 0) ++num_functions_with_local_names; + } + if (num_function_names > 0 || num_functions_with_local_names > 0 || + wasm.name !== undefined) { + if (debug) print('emitting names @ ' + binary.length); binary.emit_section(kUnknownSectionCode, section => { - section.emit_string("name"); - var count = wasm.functions.length + wasm.num_imported_funcs; - section.emit_u32v(count); - for (var i = 0; i < wasm.num_imported_funcs; i++) { - section.emit_u8(0); // empty string - section.emit_u8(0); // local names count == 0 + section.emit_string('name'); + // Emit module name. + if (wasm.name !== undefined) { + section.emit_section(kModuleNameCode, name_section => { + name_section.emit_string(wasm.name); + }); } - for (let func of wasm.functions) { - var name = func.name == undefined ? "" : func.name; - section.emit_string(name); - section.emit_u8(0); // local names count == 0 + // Emit function names. + if (num_function_names > 0) { + section.emit_section(kFunctionNamesCode, name_section => { + name_section.emit_u32v(num_function_names); + for (let func of wasm.functions) { + if (func.name === undefined) continue; + name_section.emit_u32v(func.index); + name_section.emit_string(func.name); + } + }); + } + // Emit local names. + if (num_functions_with_local_names > 0) { + section.emit_section(kLocalNamesCode, name_section => { + name_section.emit_u32v(num_functions_with_local_names); + for (let func of wasm.functions) { + if (func.numLocalNames() == 0) continue; + name_section.emit_u32v(func.index); + name_section.emit_u32v(func.numLocalNames()); + for (let i = 0; i < func.local_names.length; ++i) { + if (func.local_names[i] === undefined) continue; + name_section.emit_u32v(i); + name_section.emit_string(func.local_names[i]); + } + } + }); } }); } return binary; } toBuffer(debug = false) { let bytes = this.toArray(debug); let buffer = new ArrayBuffer(bytes.length); let view = new Uint8Array(buffer); for (let i = 0; i < bytes.length; i++) { let val = bytes[i]; if ((typeof val) == "string") val = val.charCodeAt(0); view[i] = val | 0; } - return new Uint8Array(buffer); + return buffer; + } + + instantiate(ffi) { + let module = new WebAssembly.Module(this.toBuffer()); + let instance = new WebAssembly.Instance(module, ffi); + return instance; } - instantiate(...args) { - let module = new WebAssembly.Module(this.toBuffer()); - let instance = new WebAssembly.Instance(module, ...args); - return instance; + asyncInstantiate(ffi) { + return WebAssembly.instantiate(this.toBuffer(), ffi) + .then(({module, instance}) => instance); + } + + toModule(debug = false) { + return new WebAssembly.Module(this.toBuffer(debug)); } }
--- a/third_party/rust/bindgen/.cargo-checksum.json +++ b/third_party/rust/bindgen/.cargo-checksum.json @@ -1,1 +1,1 @@ -{"files":{"Cargo.toml":"b4b52379175634c9d1f97f7fffefb3092e7bb6bd70e6f98000e55f949e4af424","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"630d1a1d123c131bad0fec23173e263ba8ecc064b5cd8446d4cab7ffd197db45","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"9d41b7848cea37e8741fa7bc947ba58a83647824b1a0bbe7ff75012c412eab13","src/clang.rs":"ca3bc42a3d6a8057d105348d604e01d37fc88a8088701c062ee138a9a6b49a85","src/codegen/bitfield_unit.rs":"88b0604322dc449fc9284850eadc1f5d14b42fa747d4258bae0b6b9535f52dfd","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"f37a75b1fb40633c35e2b9231d8f4e244cd3deef0dfd5e48f074fdcc760f7c94","src/codegen/impl_debug.rs":"71b3b9c1bcb71b5705945e2d4b7457ec57127153031c533ae3ef76f8e0fc460e","src/codegen/impl_partialeq.rs":"cd972c60e7dcb700c6f01185c055933d800b3b5d4959d00b2232e17dfa2d097c","src/codegen/mod.rs":"b3ea487f50e920563ef20f2ab3829bb6eae0cf0f0f08d98d8c52ec6fb4db8bad","src/codegen/struct_layout.rs":"9bd0e3455e55e2a1faa4f332a327c2529a21bdfdd0fcb3a45bc5cdd7801d288f","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"57a44c77fa87d0a5113f600c6bc70e2b98cf029bf41f1205cd45f453666afb73","src/ir/analysis/derive_copy.rs":"b4ff30eea031b9d41715e63114f76dbcce580d9be1b7146ff47d51ab2c108423","src/ir/analysis/derive_debug.rs":"9266e72a255f9ee8536785f33f80c1486b5dd4bc6827fa3c88201e01a22adc96","src/ir/analysis/derive_default.rs":"de59bd34a0cff1f9cd9e182c6bfd0bfbaa76551fe55929ab940fc83af9383ef7","src/ir/analysis/derive_hash.rs":"849b23c9c72e69f9e00bf7b4d99373e01603f45f2fddce21a53bd7f176daa687","src/ir/analysis/derive_partialeq_or_partialord.rs":"512d4503242171280ee8c94b094d090c3456b84d202f46fbee61e2c52c789082","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"b2c0fa08d9f20c513266cc200b75af3ff785c6d05cf8d7f3a9710975a41c13f9","src/ir/analysis/has_type_param_in_array.rs":"8ac853669c8957b125376def466eed9a5d733f6bfd3e83b2c16e0f2ed67541f9","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"c9e53740e6c8850cabc719d09700ee490fda0f866d6a6574761c95a0fcf7ebd3","src/ir/analysis/template_params.rs":"6554dd1240142ec0e7299e678b696725f5cba99243d1c3d1cbf58d4764082fd6","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"000481754e5433d7c0886b9ce8b93b64c7ab1ae52867d211c73c7c4b336649a2","src/ir/comp.rs":"7b22f3ff19ca45a6fbfe7ea015109d43f4ddf65b33b47b1c37829fcb87cdff9b","src/ir/context.rs":"b255306e5ffd85a223395f96ebdfe82c4630d072cabe44f106610c45ae6f7823","src/ir/derive.rs":"9550d01731ca66be28124c91fd0211a618743a065bec7b00e27c934afff82a84","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"3611100df8ddf01b010d2eae1d26a67df022e47b6236b0ed9d1b9b42340ebafd","src/ir/function.rs":"b86e665c6659c32bce39194240e7da6221c5a2ec51b362ad9f6e34f1bc396a6f","src/ir/int.rs":"07e0c7dbd2dd977177fae3acd2a14adf271c6cf9ff4b57cddc11d50734fd4801","src/ir/item.rs":"f1bd45f3a0cc32a132fee775e8269d5f2faac8d8e0ed1beeb553d2c9795708e9","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"91ecbf3a250eae63f29b508b0fb0418623523c9d3e0df96dd1f98d04d81b95ad","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"828a890acdc8b10c44e69e2ed4a4f5d8c0e734606d3a8cc71658dcf43a49acf4","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"10d767c64dff6040e0a1e71d2b69e8341d4cfa18385e5f6321220ea7114f519a","src/ir/var.rs":"57c8aa9f834c6f06418f7d471b1771bbb915821ef0d194b383be60092edca5f7","src/lib.rs":"f54a4f4622633c28ae3b130a15d7da1f07e1220012935bf6844cce1c658377ff","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"86bac5dd54eb5c2df71fdece02c72b6c95b9b7a818710ac20d457fdddca30dd9","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"c417889726b5e3325f9375551bf23fd54c9b40020151c364741ea6126ede386b","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"eac4ed5f2de9efc3c87cb722468fa49d0763e98f999d539bfc5e452c13d85c91"} \ No newline at end of file +{"files":{".cargo_vcs_info.json":"1eb83f46382fa835038c7ee9f7bf386b67ec313d8a9bda6b86b8ee534270ea7d","Cargo.toml":"d6ee9689e1d4ece0423949f53cfd91c6c1f53e9246e559e3c6e9093d691a4229","LICENSE":"c23953d9deb0a3312dbeaf6c128a657f3591acee45067612fa68405eaa4525db","README.md":"630d1a1d123c131bad0fec23173e263ba8ecc064b5cd8446d4cab7ffd197db45","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"9d41b7848cea37e8741fa7bc947ba58a83647824b1a0bbe7ff75012c412eab13","src/clang.rs":"b25f8d455e3cd89d416a4c5e55d828db9691f4def82109c1dd12457e5ca2c13c","src/codegen/bitfield_unit.rs":"88b0604322dc449fc9284850eadc1f5d14b42fa747d4258bae0b6b9535f52dfd","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"2c890c96a1a6b72ada63593cb544f005476fb176d7181553713e83710dc8eefd","src/codegen/impl_debug.rs":"43b977b8d16073d021977ce57f3c22eb5b1083493905ae19a171e2271939f574","src/codegen/impl_partialeq.rs":"671dd0eac712bf8281e11a7b3e545a443c6e9e2c8ee7fbebeb03c76667ca206b","src/codegen/mod.rs":"a1bfb972bed101b735d3ac19fa1080bd3a8d4e38e11b4adf34ca413982e2fe5e","src/codegen/struct_layout.rs":"b77f03dfbbed408a5fa6e693560aea8dc902fe7d10d847ce39122e6961078515","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"be74e03d4f00582fa8970439da52057b04204b450193833953ed84772933bd46","src/ir/analysis/derive_copy.rs":"b7e12cdc74937909529e4cefe9f43b3ee0a5590f07392b73481811ac9fddedd2","src/ir/analysis/derive_debug.rs":"cf9346ecb3afd4e94094a2723e4d76c76c55f42a13dc1d5ec6564d25d3a46cf4","src/ir/analysis/derive_default.rs":"87332eccd5accbfbf7fad2e1511be4f8945b0538ae3e0628c8af17d16068691f","src/ir/analysis/derive_hash.rs":"521ea1dbe221755042a95e8e8dcb594e427e54be2eb869c61ebbdb27fec5aa77","src/ir/analysis/derive_partialeq_or_partialord.rs":"3c5d051f69401fe50b56143143eca3e71674d6a87d0013c31745b75d0f3d584f","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"5f7ee1b834978817041d884fee4648b31ecb66c62aafb8e7a9a17e5ac434bfe5","src/ir/analysis/has_type_param_in_array.rs":"abf74468b923c015aaf67599e50857267516010472819a79ca494fe02dd6ac93","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"4f788bff0ceb0e008d70145510340ab636e5203787316f0be41f789ce9b2f73d","src/ir/analysis/template_params.rs":"6554dd1240142ec0e7299e678b696725f5cba99243d1c3d1cbf58d4764082fd6","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"1b068d5834da7360aec4cb80d9c55219cedbb2ae8b9727a39ec7d156c88fe0b5","src/ir/comp.rs":"7b22f3ff19ca45a6fbfe7ea015109d43f4ddf65b33b47b1c37829fcb87cdff9b","src/ir/context.rs":"7fde495a0b97f332a2018a6ee7b1cbac46e8dee68f2a3956a2a95e26e970c266","src/ir/derive.rs":"19601e76528d6cce8e04a66572e75da4e9efdecc4d60a983fc68c11958e9f3ec","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"9cc242d6b3c1866665594e8b306860ee39c0ea42d22198d46b7fded473fe3e84","src/ir/function.rs":"44603a952b5f5f3074f4f9b4995e88b28906e22830865533469f42071af61aa9","src/ir/int.rs":"07e0c7dbd2dd977177fae3acd2a14adf271c6cf9ff4b57cddc11d50734fd4801","src/ir/item.rs":"0ec5cec91551e01069451265fcf755cb54608bc2dfa770e770769eef0f2ba080","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"e722edffcd34914b534813da5af6fe8ba69927a54e0ec88ae1733f5ddf0e50b1","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"828a890acdc8b10c44e69e2ed4a4f5d8c0e734606d3a8cc71658dcf43a49acf4","src/ir/template.rs":"c0f8570b927dfd6a421fc4ce3094ec837a3ed936445225dbfac961e8e0842ae5","src/ir/traversal.rs":"ea751379a5aec02f93f8d2c61e18232776b1f000dbeae64b9a7195ba21a19dd6","src/ir/ty.rs":"a7e72e4fb85a1328f17a5428c7bc7f1c51326299833f82252770bebf99d1a9a1","src/ir/var.rs":"5c0caaa505faef18e334c6198b3634b6f390d14cf9da629226cd78617fd3594b","src/lib.rs":"b94a374ef78f6b6cd2b0676efcd67a16800730b28374155b54387a55774aed6e","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"a4b4028542d6292363fc97621c704bf1b4e7eb149e9cb86b52e30aad0be13b99","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"c417889726b5e3325f9375551bf23fd54c9b40020151c364741ea6126ede386b","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"b41df015ccbc22b038641bd84d0aeeff01e0a4c0714ed35ed0e9a3dd8ad8d732"} \ No newline at end of file
new file mode 100644 --- /dev/null +++ b/third_party/rust/bindgen/.cargo_vcs_info.json @@ -0,0 +1,5 @@ +{ + "git": { + "sha1": "a242c51e6ff0d4cc27940a3927e713e8dff82c5f" + } +}
--- a/third_party/rust/bindgen/Cargo.toml +++ b/third_party/rust/bindgen/Cargo.toml @@ -7,17 +7,17 @@ # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] name = "bindgen" -version = "0.39.0" +version = "0.43.0" authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"] build = "build.rs" include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"] description = "Automatically generates Rust FFI bindings to C and C++ libraries." homepage = "https://rust-lang-nursery.github.io/rust-bindgen/" documentation = "https://docs.rs/bindgen" readme = "README.md" keywords = ["bindings", "ffi", "code-generation"] @@ -31,23 +31,23 @@ path = "src/lib.rs" [[bin]] name = "bindgen" path = "src/main.rs" doc = false [dependencies.bitflags] version = "1.0.3" [dependencies.cexpr] -version = "0.2" +version = "0.3.3" [dependencies.cfg-if] version = "0.1.0" [dependencies.clang-sys] -version = "0.23" +version = "0.26" features = ["runtime", "clang_6_0"] [dependencies.clap] version = "2" [dependencies.env_logger] version = "0.5" optional = true
--- a/third_party/rust/bindgen/src/clang.rs +++ b/third_party/rust/bindgen/src/clang.rs @@ -7,17 +7,17 @@ use cexpr; use clang_sys::*; use regex; use std::{mem, ptr, slice}; use std::ffi::{CStr, CString}; use std::fmt; use std::hash::Hash; use std::hash::Hasher; -use std::os::raw::{c_char, c_int, c_uint, c_ulong}; +use std::os::raw::{c_char, c_int, c_uint, c_ulong, c_longlong, c_ulonglong}; /// A cursor into the Clang AST, pointing to an AST node. /// /// We call the AST node pointed to by the cursor the cursor's "referent". #[derive(Copy, Clone)] pub struct Cursor { x: CXCursor, } @@ -935,16 +935,17 @@ impl Type { /// Given that this type is a pointer type, return the type that it points /// to. pub fn pointee_type(&self) -> Option<Type> { match self.kind() { CXType_Pointer | CXType_RValueReference | CXType_LValueReference | CXType_MemberPointer | + CXType_BlockPointer | CXType_ObjCObjectPointer => { let ret = Type { x: unsafe { clang_getPointeeType(self.x) }, }; debug_assert!(ret.is_valid()); Some(ret) } _ => None, @@ -1781,23 +1782,44 @@ impl EvalResult { CXEval_Float => { Some(unsafe { clang_EvalResult_getAsDouble(self.x) } as f64) } _ => None, } } /// Try to get back the result as an integer. - pub fn as_int(&self) -> Option<i32> { - match self.kind() { - CXEval_Int => { - Some(unsafe { clang_EvalResult_getAsInt(self.x) } as i32) + pub fn as_int(&self) -> Option<i64> { + if self.kind() != CXEval_Int { + return None; + } + + if !clang_EvalResult_isUnsignedInt::is_loaded() { + // FIXME(emilio): There's no way to detect underflow here, and clang + // will just happily give us a value. + return Some(unsafe { clang_EvalResult_getAsInt(self.x) } as i64) + } + + if unsafe { clang_EvalResult_isUnsignedInt(self.x) } != 0 { + let value = unsafe { clang_EvalResult_getAsUnsigned(self.x) }; + if value > i64::max_value() as c_ulonglong { + return None; } - _ => None, + + return Some(value as i64) } + + let value = unsafe { clang_EvalResult_getAsLongLong(self.x) }; + if value > i64::max_value() as c_longlong { + return None; + } + if value < i64::min_value() as c_longlong { + return None; + } + Some(value as i64) } /// Evaluates the expression as a literal string, that may or may not be /// valid utf-8. pub fn as_literal_string(&self) -> Option<Vec<u8>> { match self.kind() { CXEval_StrLiteral => { let ret = unsafe {
--- a/third_party/rust/bindgen/src/codegen/helpers.rs +++ b/third_party/rust/bindgen/src/codegen/helpers.rs @@ -54,49 +54,49 @@ pub mod attributes { quote! { #[link_name = #name] } } } /// Generates a proper type for a field or type with a given `Layout`, that is, /// a type with the correct size and alignment restrictions. -pub fn blob(layout: Layout) -> quote::Tokens { +pub fn blob(ctx: &BindgenContext, layout: Layout) -> quote::Tokens { let opaque = layout.opaque(); // FIXME(emilio, #412): We fall back to byte alignment, but there are // some things that legitimately are more than 8-byte aligned. // // Eventually we should be able to `unwrap` here, but... - let ty_name = match opaque.known_rust_type_for_array() { + let ty_name = match opaque.known_rust_type_for_array(ctx) { Some(ty) => ty, None => { warn!("Found unknown alignment on code generation!"); "u8" } }; let ty_name = Term::new(ty_name, Span::call_site()); - let data_len = opaque.array_size().unwrap_or(layout.size); + let data_len = opaque.array_size(ctx).unwrap_or(layout.size); if data_len == 1 { quote! { #ty_name } } else { quote! { [ #ty_name ; #data_len ] } } } /// Integer type of the same size as the given `Layout`. -pub fn integer_type(layout: Layout) -> Option<quote::Tokens> { - let name = Layout::known_type_for_size(layout.size)?; +pub fn integer_type(ctx: &BindgenContext, layout: Layout) -> Option<quote::Tokens> { + let name = Layout::known_type_for_size(ctx, layout.size)?; let name = Term::new(name, Span::call_site()); Some(quote! { #name }) } /// Generates a bitfield allocation unit type for a type with the given `Layout`. pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> quote::Tokens { let mut tokens = quote! {}; @@ -117,16 +117,17 @@ pub fn bitfield_unit(ctx: &BindgenContex }); tokens } pub mod ast_ty { use ir::context::BindgenContext; use ir::function::FunctionSig; + use ir::layout::Layout; use ir::ty::FloatKind; use quote; use proc_macro2; pub fn raw_type(ctx: &BindgenContext, name: &str) -> quote::Tokens { let ident = ctx.rust_ident_raw(name); match ctx.options().ctypes_prefix { Some(ref prefix) => { @@ -139,32 +140,54 @@ pub mod ast_ty { ::std::os::raw::#ident }, } } pub fn float_kind_rust_type( ctx: &BindgenContext, fk: FloatKind, + layout: Option<Layout>, ) -> quote::Tokens { - // TODO: we probably should just take the type layout into - // account? + // TODO: we probably should take the type layout into account more + // often? // // Also, maybe this one shouldn't be the default? - // - // FIXME: `c_longdouble` doesn't seem to be defined in some - // systems, so we use `c_double` directly. match (fk, ctx.options().convert_floats) { (FloatKind::Float, true) => quote! { f32 }, - (FloatKind::Double, true) | - (FloatKind::LongDouble, true) => quote! { f64 }, + (FloatKind::Double, true) => quote! { f64 }, (FloatKind::Float, false) => raw_type(ctx, "c_float"), - (FloatKind::Double, false) | - (FloatKind::LongDouble, false) => raw_type(ctx, "c_double"), - (FloatKind::Float128, _) => quote! { [u8; 16] }, + (FloatKind::Double, false) => raw_type(ctx, "c_double"), + (FloatKind::LongDouble, _) => { + match layout { + Some(layout) => { + match layout.size { + 4 => quote! { f32 }, + 8 => quote! { f64 }, + // TODO(emilio): If rust ever gains f128 we should + // use it here and below. + _ => super::integer_type(ctx, layout).unwrap_or(quote! { f64 }), + } + } + None => { + debug_assert!( + false, + "How didn't we know the layout for a primitive type?" + ); + quote! { f64 } + } + } + } + (FloatKind::Float128, _) => { + if ctx.options().rust_features.i128_and_u128 { + quote! { u128 } + } else { + quote! { [u64; 2] } + } + } } } pub fn int_expr(val: i64) -> quote::Tokens { // Don't use quote! { #val } because that adds the type suffix. let val = proc_macro2::Literal::i64_unsuffixed(val); quote!(#val) }
--- a/third_party/rust/bindgen/src/codegen/impl_debug.rs +++ b/third_party/rust/bindgen/src/codegen/impl_debug.rs @@ -37,18 +37,20 @@ pub fn gen_debug_impl( } } } } format_string.push_str(" }}"); tokens.insert(0, quote! { #format_string }); + let prefix = ctx.trait_prefix(); + quote! { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + fn fmt(&self, f: &mut ::#prefix::fmt::Formatter<'_>) -> ::#prefix ::fmt::Result { write!(f, #( #tokens ),*) } } } /// A trait for the things which we can codegen tokens that contribute towards a /// generated `impl Debug`. pub trait ImplDebug<'a> { @@ -149,17 +151,16 @@ impl<'a> ImplDebug<'a> for Item { TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Complex(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | - TypeKind::BlockPointer | TypeKind::UnresolvedTypeRef(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::Comp(..) | TypeKind::ObjCSel => debug_print(name, quote! { #name_ident }), TypeKind::TemplateInstantiation(ref inst) => { if inst.is_opaque(ctx, self) { @@ -179,51 +180,68 @@ impl<'a> ImplDebug<'a> for Item { if self.has_type_param_in_array(ctx) { Some( (format!("{}: Array with length {}", name, len), vec![]), ) } else if len < RUST_DERIVE_IN_ARRAY_LIMIT { // The simple case debug_print(name, quote! { #name_ident }) } else { - // Let's implement our own print function - Some(( - format!("{}: [{{}}]", name), - vec![quote! { - self.#name_ident - .iter() - .enumerate() - .map(|(i, v)| format!("{}{:?}", if i > 0 { ", " } else { "" }, v)) - .collect::<String>() - }], - )) + if ctx.options().use_core { + // There is no String in core; reducing field visibility to avoid breaking + // no_std setups. + Some(( + format!("{}: [...]", name), vec![] + )) + } else { + // Let's implement our own print function + Some(( + format!("{}: [{{}}]", name), + vec![quote! { + self.#name_ident + .iter() + .enumerate() + .map(|(i, v)| format!("{}{:?}", if i > 0 { ", " } else { "" }, v)) + .collect::<String>() + }], + )) + } } } TypeKind::Vector(_, len) => { - let self_ids = 0..len; - Some(( - format!("{}({{}})", name), - vec![quote! { - #(format!("{:?}", self.#self_ids)),* - }] - )) + if ctx.options().use_core { + // There is no format! in core; reducing field visibility to avoid breaking + // no_std setups. + Some(( + format!("{}(...)", name), vec![] + )) + } else { + let self_ids = 0..len; + Some(( + format!("{}({{}})", name), + vec![quote! { + #(format!("{:?}", self.#self_ids)),* + }] + )) + } } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { // We follow the aliases ctx.resolve_item(t).impl_debug(ctx, name) } TypeKind::Pointer(inner) => { let inner_type = ctx.resolve_type(inner).canonical_type(ctx); match *inner_type.kind() { TypeKind::Function(ref sig) - if !sig.can_trivially_derive_debug() => { + if !sig.can_trivially_derive_debug(ctx) => { Some((format!("{}: FunctionPointer", name), vec![])) } _ => debug_print(name, quote! { #name_ident }), } } TypeKind::Opaque => None, }
--- a/third_party/rust/bindgen/src/codegen/impl_partialeq.rs +++ b/third_party/rust/bindgen/src/codegen/impl_partialeq.rs @@ -83,17 +83,16 @@ fn gen_field(ctx: &BindgenContext, ty_it TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Complex(..) | TypeKind::Float(..) | TypeKind::Enum(..) | TypeKind::TypeParam | TypeKind::UnresolvedTypeRef(..) | - TypeKind::BlockPointer | TypeKind::Reference(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel | TypeKind::Comp(..) | TypeKind::Pointer(_) | TypeKind::Function(..) | TypeKind::Opaque => quote_equals(name_ident), @@ -120,14 +119,15 @@ fn gen_field(ctx: &BindgenContext, ty_it let other_ids = 0..len; quote! { #(self.#self_ids == other.#other_ids &&)* true } }, TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { let inner_item = ctx.resolve_item(t); gen_field(ctx, inner_item, name) } } }
--- a/third_party/rust/bindgen/src/codegen/mod.rs +++ b/third_party/rust/bindgen/src/codegen/mod.rs @@ -2,17 +2,17 @@ mod impl_debug; mod impl_partialeq; mod error; mod helpers; pub mod struct_layout; #[cfg(test)] #[allow(warnings)] pub(crate) mod bitfield_unit; -#[cfg(test)] +#[cfg(all(test, target_endian = "little"))] mod bitfield_unit_tests; use self::helpers::attributes; use self::struct_layout::StructLayoutTracker; use super::BindgenOptions; use ir::analysis::{HasVtable, Sizedness}; @@ -98,16 +98,19 @@ struct CodegenResult<'a> { saw_union: bool, /// Whether an incomplete array has been generated at least once. saw_incomplete_array: bool, /// Whether Objective C types have been seen at least once. saw_objc: bool, + /// Whether Apple block types have been seen at least once. + saw_block: bool, + /// Whether a bitfield allocation unit has been seen at least once. saw_bitfield_unit: bool, items_seen: HashSet<ItemId>, /// The set of generated function/var names, needed because in C/C++ is /// legal to do something like: /// /// ```c++ @@ -135,16 +138,17 @@ struct CodegenResult<'a> { impl<'a> CodegenResult<'a> { fn new(codegen_id: &'a Cell<usize>) -> Self { CodegenResult { items: vec![], saw_union: false, saw_bindgen_union: false, saw_incomplete_array: false, saw_objc: false, + saw_block: false, saw_bitfield_unit: false, codegen_id: codegen_id, items_seen: Default::default(), functions_seen: Default::default(), vars_seen: Default::default(), overload_counters: Default::default(), } } @@ -161,16 +165,20 @@ impl<'a> CodegenResult<'a> { fn saw_incomplete_array(&mut self) { self.saw_incomplete_array = true; } fn saw_objc(&mut self) { self.saw_objc = true; } + fn saw_block(&mut self) { + self.saw_block = true; + } + fn saw_bitfield_unit(&mut self) { self.saw_bitfield_unit = true; } fn seen<Id: Into<ItemId>>(&self, item: Id) -> bool { self.items_seen.contains(&item.into()) } @@ -210,16 +218,17 @@ impl<'a> CodegenResult<'a> { { let mut new = Self::new(self.codegen_id); cb(&mut new); self.saw_union |= new.saw_union; self.saw_incomplete_array |= new.saw_incomplete_array; self.saw_objc |= new.saw_objc; + self.saw_block |= new.saw_block; self.saw_bitfield_unit |= new.saw_bitfield_unit; new.items } } impl<'a> ops::Deref for CodegenResult<'a> { type Target = Vec<quote::Tokens>; @@ -288,17 +297,16 @@ impl AppendImplicitTemplateParams for qu TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Complex(..) | TypeKind::Array(..) | TypeKind::TypeParam | TypeKind::Opaque | TypeKind::Function(..) | TypeKind::Enum(..) | - TypeKind::BlockPointer | TypeKind::ObjCId | TypeKind::ObjCSel | TypeKind::TemplateInstantiation(..) => return, _ => {}, } let params: Vec<_> = item.used_template_params(ctx).iter().map(|p| { p.try_to_rust_ty(ctx, &()) @@ -389,16 +397,19 @@ impl CodeGenerator for Module { for child in self.children() { if ctx.codegen_items().contains(child) { *found_any = true; ctx.resolve_item(*child).codegen(ctx, result, &()); } } if item.id() == ctx.root_module() { + if result.saw_block { + utils::prepend_block_header(ctx, &mut *result); + } if result.saw_bindgen_union { utils::prepend_union_types(ctx, &mut *result); } if result.saw_incomplete_array { utils::prepend_incomplete_array_types(ctx, &mut *result); } if ctx.need_bindgen_complex_type() { utils::prepend_complex_type(&mut *result); @@ -559,20 +570,26 @@ impl CodeGenerator for Var { } else { let mut attrs = vec![]; if let Some(mangled) = self.mangled_name() { attrs.push(attributes::link_name(mangled)); } else if canonical_name != self.name() { attrs.push(attributes::link_name(self.name())); } + let maybe_mut = if self.is_const() { + quote! { } + } else { + quote! { mut } + }; + let mut tokens = quote!( extern "C" { #(#attrs)* - pub static mut #canonical_ident: #ty; + pub static #maybe_mut #canonical_ident: #ty; } ); result.push(tokens); } } } @@ -592,29 +609,61 @@ impl CodeGenerator for Type { TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Complex(..) | TypeKind::Array(..) | TypeKind::Vector(..) | TypeKind::Pointer(..) | - TypeKind::BlockPointer | TypeKind::Reference(..) | TypeKind::Function(..) | TypeKind::ResolvedTypeRef(..) | TypeKind::Opaque | TypeKind::TypeParam => { // These items don't need code generation, they only need to be // converted to rust types in fields, arguments, and such. return; } TypeKind::TemplateInstantiation(ref inst) => { inst.codegen(ctx, result, item) } + TypeKind::BlockPointer(inner) => { + if !ctx.options().generate_block { + return; + } + + let inner_item = inner.into_resolver() + .through_type_refs() + .resolve(ctx); + let name = item.canonical_name(ctx); + + let inner_rust_type = { + if let TypeKind::Function(fnsig) = inner_item.kind().expect_type().kind() { + utils::fnsig_block(ctx, fnsig) + } else { + panic!("invalid block typedef: {:?}", inner_item) + } + }; + + let rust_name = ctx.rust_ident(&name); + + let mut tokens = if let Some(comment) = item.comment(ctx) { + attributes::doc(comment) + } else { + quote! {} + }; + + tokens.append_all(quote! { + pub type #rust_name = #inner_rust_type ; + }); + + result.push(tokens); + result.saw_block(); + } TypeKind::Comp(ref ci) => ci.codegen(ctx, result, item), TypeKind::TemplateAlias(inner, _) | TypeKind::Alias(inner) => { let inner_item = inner.into_resolver() .through_type_refs() .resolve(ctx); let name = item.canonical_name(ctx); @@ -1141,17 +1190,17 @@ impl Bitfield { ctx: &BindgenContext, param_name: quote::Tokens, mut ctor_impl: quote::Tokens, ) -> quote::Tokens { let bitfield_ty = ctx.resolve_type(self.ty()); let bitfield_ty_layout = bitfield_ty.layout(ctx).expect( "Bitfield without layout? Gah!", ); - let bitfield_int_ty = helpers::blob(bitfield_ty_layout); + let bitfield_int_ty = helpers::blob(ctx, bitfield_ty_layout); let offset = self.offset_into_unit(); let width = self.width() as u8; let prefix = ctx.trait_prefix(); ctor_impl.append_all(quote! { __bindgen_bitfield_unit.set( #offset, @@ -1323,17 +1372,17 @@ impl<'a> FieldCodegen<'a> for Bitfield { let unit_field_ident = Term::new(unit_field_name, Span::call_site()); let bitfield_ty_item = ctx.resolve_item(self.ty()); let bitfield_ty = bitfield_ty_item.expect_type(); let bitfield_ty_layout = bitfield_ty.layout(ctx).expect( "Bitfield without layout? Gah!", ); - let bitfield_int_ty = match helpers::integer_type(bitfield_ty_layout) { + let bitfield_int_ty = match helpers::integer_type(ctx, bitfield_ty_layout) { Some(int_ty) => { *bitfield_representable_as_int = true; int_ty } None => { *bitfield_representable_as_int = false; return; } @@ -1496,49 +1545,28 @@ impl CodeGenerator for CompInfo { &mut methods, (), ); } } let is_union = self.kind() == CompKind::Union; let layout = item.kind().expect_type().layout(ctx); - if is_union && !is_opaque && !self.is_forward_declaration() { - result.saw_union(); - if !self.can_be_rust_union(ctx) { - result.saw_bindgen_union(); - } - - let layout = layout.expect("Unable to get layout information?"); - let ty = helpers::blob(layout); - - fields.push(if self.can_be_rust_union(ctx) { - quote! { - _bindgen_union_align: #ty , - } - } else { - struct_layout.saw_union(layout); - - quote! { - pub bindgen_union_field: #ty , - } - }); - } let mut explicit_align = None; if is_opaque { // Opaque item should not have generated methods, fields. debug_assert!(fields.is_empty()); debug_assert!(methods.is_empty()); match layout { Some(l) => { explicit_align = Some(l.align); - let ty = helpers::blob(l); + let ty = helpers::blob(ctx, l); fields.push(quote! { pub _bindgen_opaque_blob: #ty , }); } None => { warn!("Opaque type without layout! Expect dragons!"); } } @@ -1551,24 +1579,50 @@ impl CodeGenerator for CompInfo { if let Some(layout) = layout { if struct_layout.requires_explicit_align(layout) { if layout.align == 1 { packed = true; } else { explicit_align = Some(layout.align); if !ctx.options().rust_features.repr_align { - let ty = helpers::blob(Layout::new(0, layout.align)); + let ty = helpers::blob(ctx, Layout::new(0, layout.align)); fields.push(quote! { pub __bindgen_align: #ty , }); } } } } + } else if is_union && !self.is_forward_declaration() { + result.saw_union(); + if !self.can_be_rust_union(ctx) { + result.saw_bindgen_union(); + } + + // TODO(emilio): It'd be nice to unify this with the struct path + // above somehow. + let layout = layout.expect("Unable to get layout information?"); + + if struct_layout.requires_explicit_align(layout) { + explicit_align = Some(layout.align); + } + + let ty = helpers::blob(ctx, layout); + fields.push(if self.can_be_rust_union(ctx) { + quote! { + _bindgen_union_align: #ty , + } + } else { + struct_layout.saw_union(layout); + + quote! { + pub bindgen_union_field: #ty , + } + }); } // C++ requires every struct to be addressable, so what C++ compilers do // is making the struct 1-byte sized. // // This is apparently not the case for C, see: // https://github.com/rust-lang-nursery/rust-bindgen/issues/551 // @@ -1585,17 +1639,17 @@ impl CodeGenerator for CompInfo { // Generate the address field if it's an opaque type and // couldn't determine the layout of the blob. layout.is_none() } else { layout.map_or(true, |l| l.size != 0) }; if has_address { - let ty = helpers::blob(Layout::new(1, 1)); + let ty = helpers::blob(ctx, Layout::new(1, 1)); fields.push(quote! { pub _address: #ty, }); } } let mut generic_param_names = vec![]; @@ -1742,17 +1796,17 @@ impl CodeGenerator for CompInfo { child_item.codegen(ctx, result, &()); } // NOTE: Some unexposed attributes (like alignment attributes) may // affect layout, so we're bad and pray to the gods for avoid sending // all the tests to shit when parsing things like max_align_t. if self.found_unknown_attr() { warn!( - "Type {} has an unkown attribute that may affect layout", + "Type {} has an unknown attribute that may affect layout", canonical_ident.as_str() ); } if all_template_params.is_empty() { if !is_opaque { for var in self.inner_vars() { ctx.resolve_item(*var).codegen(ctx, result, &()); @@ -1915,18 +1969,20 @@ impl CodeGenerator for CompInfo { if needs_debug_impl { let impl_ = impl_debug::gen_debug_impl( ctx, self.fields(), item, self.kind(), ); + let prefix = ctx.trait_prefix(); + result.push(quote! { - impl #generics ::std::fmt::Debug for #ty_for_impl { + impl #generics ::#prefix::fmt::Debug for #ty_for_impl { #impl_ } }); } if needs_partialeq_impl { if let Some(impl_) = impl_partialeq::gen_partialeq_impl(ctx, self, item, &ty_for_impl) { @@ -2511,47 +2567,43 @@ impl CodeGenerator for Enum { (true, 8) => "i64", (false, 8) => "u64", _ => { warn!("invalid enum decl: signed: {}, size: {}", signed, size); "i32" } }; - // ModuleConsts has higher precedence before Rust in order to avoid problems with - // overlapping match patterns - let variation = if self.is_constified_enum_module(ctx, item) { - EnumVariation::ModuleConsts - } else if self.is_bitfield(ctx, item) { - EnumVariation::Bitfield - } else if self.is_rustified_enum(ctx, item) { - EnumVariation::Rust - } else if self.is_constified_enum(ctx, item) { - EnumVariation::Consts - } else { - ctx.options().default_enum_style - }; - let mut attrs = vec![]; + let variation = self.computed_enum_variation(ctx, item); + // TODO(emilio): Delegate this to the builders? if variation.is_rust() { attrs.push(attributes::repr(repr_name)); } else if variation.is_bitfield() { attrs.push(attributes::repr("C")); } if let Some(comment) = item.comment(ctx) { attrs.push(attributes::doc(comment)); } if !variation.is_const() { - attrs.push(attributes::derives( - &["Debug", "Copy", "Clone", "PartialEq", "Eq", "Hash"], - )); + let mut derives = vec!["Debug", "Copy", "Clone", "PartialEq", "Eq", "Hash"]; + + if item.can_derive_partialord(ctx) { + derives.push("PartialOrd"); + } + + if item.can_derive_ord(ctx) { + derives.push("Ord"); + } + + attrs.push(attributes::derives(&derives)); } fn add_constant<'a>( ctx: &BindgenContext, enum_: &Type, // Only to avoid recomputing every time. enum_canonical_name: &Term, // May be the same as "variant" if it's because the @@ -2754,17 +2806,17 @@ trait TryToOpaque { /// Do not override this provided trait method. fn try_to_opaque( &self, ctx: &BindgenContext, extra: &Self::Extra, ) -> error::Result<quote::Tokens> { self.try_get_layout(ctx, extra).map(|layout| { - helpers::blob(layout) + helpers::blob(ctx, layout) }) } } /// Infallible conversion of an IR thing to an opaque blob. /// /// The resulting layout is best effort, and is unfortunately not guaranteed to /// be correct. When all else fails, we fall back to a single byte layout as a @@ -2781,17 +2833,17 @@ trait ToOpaque: TryToOpaque { } fn to_opaque( &self, ctx: &BindgenContext, extra: &Self::Extra, ) -> quote::Tokens { let layout = self.get_layout(ctx, extra); - helpers::blob(layout) + helpers::blob(ctx, layout) } } impl<T> ToOpaque for T where T: TryToOpaque, { } @@ -2839,17 +2891,17 @@ where &self, ctx: &BindgenContext, extra: &E, ) -> error::Result<quote::Tokens> { self.try_to_rust_ty(ctx, extra).or_else( |_| if let Ok(layout) = self.try_get_layout(ctx, extra) { - Ok(helpers::blob(layout)) + Ok(helpers::blob(ctx, layout)) } else { Err(error::Error::NoLayoutForOpaqueBlob) }, ) } } /// Infallible conversion to a Rust type, or an opaque blob with a best effort @@ -2991,17 +3043,17 @@ impl TryToRustTy for Type { IntKind::UShort => Ok(raw_type(ctx, "c_ushort")), IntKind::Int => Ok(raw_type(ctx, "c_int")), IntKind::UInt => Ok(raw_type(ctx, "c_uint")), IntKind::Long => Ok(raw_type(ctx, "c_long")), IntKind::ULong => Ok(raw_type(ctx, "c_ulong")), IntKind::LongLong => Ok(raw_type(ctx, "c_longlong")), IntKind::ULongLong => Ok(raw_type(ctx, "c_ulonglong")), IntKind::WChar { size } => { - let ty = Layout::known_type_for_size(size) + let ty = Layout::known_type_for_size(ctx, size) .expect("Non-representable wchar_t?"); let ident = ctx.rust_ident_raw(ty); Ok(quote! { #ident }) }, IntKind::I8 => Ok(quote! { i8 }), IntKind::U8 => Ok(quote! { u8 }), IntKind::I16 => Ok(quote! { i16 }), @@ -3013,27 +3065,37 @@ impl TryToRustTy for Type { IntKind::Custom { name, .. } => { let ident = ctx.rust_ident_raw(name); Ok(quote! { #ident }) } - // FIXME: This doesn't generate the proper alignment, but we - // can't do better right now. We should be able to use - // i128/u128 when they're available. - IntKind::U128 | IntKind::I128 => { - Ok(quote! { [u64; 2] }) + IntKind::U128 => { + Ok(if ctx.options().rust_features.i128_and_u128 { + quote! { u128 } + } else { + // Best effort thing, but wrong alignment + // unfortunately. + quote! { [u64; 2] } + }) + } + IntKind::I128 => { + Ok(if ctx.options().rust_features.i128_and_u128 { + quote! { i128 } + } else { + quote! { [u64; 2] } + }) } } } - TypeKind::Float(fk) => Ok(float_kind_rust_type(ctx, fk)), + TypeKind::Float(fk) => Ok(float_kind_rust_type(ctx, fk, self.layout(ctx))), TypeKind::Complex(fk) => { - let float_path = float_kind_rust_type(ctx, fk); + let float_path = float_kind_rust_type(ctx, fk, self.layout(ctx)); ctx.generated_bindgen_complex(); Ok(if ctx.options().enable_cxx_namespaces { quote! { root::__BindgenComplex<#float_path> } } else { quote! { @@ -3064,53 +3126,46 @@ impl TryToRustTy for Type { let path = Term::new(&path.join("::"), Span::call_site()); Ok(quote!(#path)) } TypeKind::TemplateInstantiation(ref inst) => { inst.try_to_rust_ty(ctx, item) } TypeKind::ResolvedTypeRef(inner) => inner.try_to_rust_ty(ctx, &()), TypeKind::TemplateAlias(..) | - TypeKind::Alias(..) => { + TypeKind::Alias(..) | + TypeKind::BlockPointer(..) => { + if self.is_block_pointer() && !ctx.options().generate_block { + let void = raw_type(ctx, "c_void"); + return Ok(void.to_ptr(/* is_const = */ false)); + } let template_params = item.used_template_params(ctx) .into_iter() .filter(|param| param.is_template_param(ctx, &())) .collect::<Vec<_>>(); - let spelling = self.name().expect("Unnamed alias?"); if item.is_opaque(ctx, &()) && !template_params.is_empty() { self.try_to_opaque(ctx, item) - } else if let Some(ty) = utils::type_from_named( - ctx, - spelling, - ) - { + } else if let Some(ty) = self.name().and_then(|name| utils::type_from_named(ctx, name)) { Ok(ty) } else { utils::build_path(item, ctx) } } TypeKind::Comp(ref info) => { let template_params = item.all_template_params(ctx); if info.has_non_type_template_params() || (item.is_opaque(ctx, &()) && !template_params.is_empty()) { return self.try_to_opaque(ctx, item); } utils::build_path(item, ctx) } TypeKind::Opaque => self.try_to_opaque(ctx, item), - TypeKind::BlockPointer => { - let void = raw_type(ctx, "c_void"); - Ok(void.to_ptr( - /* is_const = */ - false - )) - } TypeKind::Pointer(inner) | TypeKind::Reference(inner) => { let is_const = ctx.resolve_type(inner).is_const(); let inner = inner.into_resolver().through_type_refs().resolve(ctx); let inner_ty = inner.expect_type(); // Regardless if we can properly represent the inner type, we @@ -3553,16 +3608,35 @@ mod utils { pub type id = *mut objc::runtime::Object; }; let items = vec![use_objc, id_type]; let old_items = mem::replace(result, items); result.extend(old_items.into_iter()); } + pub fn prepend_block_header( + ctx: &BindgenContext, + result: &mut Vec<quote::Tokens>, + ) { + let use_block = if ctx.options().block_extern_crate { + quote! { + extern crate block; + } + } else { + quote! { + use block; + } + }; + + let items = vec![use_block]; + let old_items = mem::replace(result, items); + result.extend(old_items.into_iter()); + } + pub fn prepend_union_types( ctx: &BindgenContext, result: &mut Vec<quote::Tokens>, ) { let prefix = ctx.trait_prefix(); // TODO(emilio): The fmt::Debug impl could be way nicer with // std::intrinsics::type_name, but... @@ -3609,17 +3683,17 @@ mod utils { }; let union_field_copy_impl = quote! { impl<T> ::#prefix::marker::Copy for __BindgenUnionField<T> {} }; let union_field_debug_impl = quote! { impl<T> ::#prefix::fmt::Debug for __BindgenUnionField<T> { - fn fmt(&self, fmt: &mut ::#prefix::fmt::Formatter) + fn fmt(&self, fmt: &mut ::#prefix::fmt::Formatter<'_>) -> ::#prefix::fmt::Result { fmt.write_str("__BindgenUnionField") } } }; // The actual memory of the filed will be hashed, so that's why these // field doesn't do anything with the hash. @@ -3696,17 +3770,17 @@ mod utils { pub unsafe fn as_mut_slice(&mut self, len: usize) -> &mut [T] { ::#prefix::slice::from_raw_parts_mut(self.as_mut_ptr(), len) } } }; let incomplete_array_debug_impl = quote! { impl<T> ::#prefix::fmt::Debug for __IncompleteArrayField<T> { - fn fmt(&self, fmt: &mut ::#prefix::fmt::Formatter) + fn fmt(&self, fmt: &mut ::#prefix::fmt::Formatter<'_>) -> ::#prefix::fmt::Result { fmt.write_str("__IncompleteArrayField") } } }; let incomplete_array_clone_impl = quote! { impl<T> ::#prefix::clone::Clone for __IncompleteArrayField<T> { @@ -3864,9 +3938,31 @@ mod utils { }).collect::<Vec<_>>(); if sig.is_variadic() { args.push(quote! { ... }) } args } + + pub fn fnsig_block( + ctx: &BindgenContext, + sig: &FunctionSig, + ) -> quote::Tokens { + let args = sig.argument_types().iter().map(|&(_, ty)| { + let arg_item = ctx.resolve_item(ty); + + arg_item.to_rust_ty_or_opaque(ctx, &()) + }); + + let return_item = ctx.resolve_item(sig.return_type()); + let ret_ty = if let TypeKind::Void = *return_item.kind().expect_type().kind() { + quote! { () } + } else { + return_item.to_rust_ty_or_opaque(ctx, &()) + }; + + quote! { + *const ::block::Block<(#(#args),*), #ret_ty> + } + } }
--- a/third_party/rust/bindgen/src/codegen/struct_layout.rs +++ b/third_party/rust/bindgen/src/codegen/struct_layout.rs @@ -282,36 +282,41 @@ impl<'a> StructLayoutTracker<'a> { Some(self.padding_field(layout)) } else { None } } pub fn requires_explicit_align(&self, layout: Layout) -> bool { + let repr_align = self.ctx.options().rust_features().repr_align; + + // Always force explicit repr(align) for stuff more than 16-byte aligned + // to work-around https://github.com/rust-lang/rust/issues/54341. + // + // Worst-case this just generates redundant alignment attributes. + if repr_align && self.max_field_align >= 16 { + return true; + } + if self.max_field_align >= layout.align { return false; } - // At this point we require explicit alignment, but we may not be able - // to generate the right bits, let's double check. - if self.ctx.options().rust_features().repr_align { - return true; - } // We can only generate up-to a word of alignment unless we support // repr(align). - layout.align <= self.ctx.target_pointer_size() + repr_align || layout.align <= self.ctx.target_pointer_size() } fn padding_bytes(&self, layout: Layout) -> usize { align_to(self.latest_offset, layout.align) - self.latest_offset } fn padding_field(&mut self, layout: Layout) -> quote::Tokens { - let ty = helpers::blob(layout); + let ty = helpers::blob(self.ctx, layout); let padding_count = self.padding_count; self.padding_count += 1; let padding_field_name = Term::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site()); self.max_field_align = cmp::max(self.max_field_align, layout.align);
--- a/third_party/rust/bindgen/src/features.rs +++ b/third_party/rust/bindgen/src/features.rs @@ -91,16 +91,18 @@ macro_rules! rust_target_base { /// Rust stable 1.19 => Stable_1_19 => 1.19; /// Rust stable 1.20 => Stable_1_20 => 1.20; /// Rust stable 1.21 => Stable_1_21 => 1.21; /// Rust stable 1.25 => Stable_1_25 => 1.25; + /// Rust stable 1.26 + => Stable_1_26 => 1.26; /// Nightly rust => Nightly => nightly; ); } } rust_target_base!(rust_target_def); rust_target_base!(rust_target_values_def); @@ -167,16 +169,20 @@ rust_feature_def!( Stable_1_21 { /// builtin impls for `Clone` ([PR](https://github.com/rust-lang/rust/pull/43690)) => builtin_clone_impls; } Stable_1_25 { /// repr(align) ([PR](https://github.com/rust-lang/rust/pull/47006)) => repr_align; } + Stable_1_26 { + /// [i128 / u128 support](https://doc.rust-lang.org/std/primitive.i128.html) + => i128_and_u128; + } Nightly { /// `thiscall` calling convention ([Tracking issue](https://github.com/rust-lang/rust/issues/42202)) => thiscall_abi; } ); impl Default for RustFeatures { fn default() -> Self {
--- a/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs +++ b/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs @@ -144,17 +144,17 @@ impl<'ctx> MonotoneFramework for CannotD }; if self.ctx.no_copy_by_name(&item) { return self.insert(id); } if item.is_opaque(self.ctx, &()) { let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_copy() + l.opaque().can_trivially_derive_copy(self.ctx) }); return if layout_can_derive { trace!(" we can trivially derive Copy for the layout"); ConstrainResult::Same } else { trace!(" we cannot derive Copy for the layout"); self.insert(id) }; @@ -168,17 +168,16 @@ impl<'ctx> MonotoneFramework for CannotD TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Vector(..) | TypeKind::Complex(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | TypeKind::TypeParam | - TypeKind::BlockPointer | TypeKind::Pointer(..) | TypeKind::UnresolvedTypeRef(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel => { trace!(" simple type that can always derive Copy"); ConstrainResult::Same } @@ -199,17 +198,18 @@ impl<'ctx> MonotoneFramework for CannotD } else { trace!(" array cannot derive Copy with 0 length"); self.insert(id) } } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { let cant_derive_copy = self.is_not_copy(t); if cant_derive_copy { trace!( " arrays of T for which we cannot derive Copy \ also cannot derive Copy" ); return self.insert(id); }
--- a/third_party/rust/bindgen/src/ir/analysis/derive_debug.rs +++ b/third_party/rust/bindgen/src/ir/analysis/derive_debug.rs @@ -141,17 +141,17 @@ impl<'ctx> MonotoneFramework for CannotD None => { trace!(" not a type; ignoring"); return ConstrainResult::Same; } }; if item.is_opaque(self.ctx, &()) { let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_debug() + l.opaque().can_trivially_derive_debug(self.ctx) }); return if layout_can_derive && !(ty.is_union() && self.ctx.options().rust_features().untagged_union) { trace!(" we can trivially derive Debug for the layout"); ConstrainResult::Same } else { trace!(" we cannot derive Debug for the layout"); @@ -178,17 +178,16 @@ impl<'ctx> MonotoneFramework for CannotD TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Complex(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | TypeKind::Vector(..) | - TypeKind::BlockPointer | TypeKind::TypeParam | TypeKind::UnresolvedTypeRef(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel => { trace!(" simple type that can always derive Debug"); ConstrainResult::Same } @@ -208,17 +207,18 @@ impl<'ctx> MonotoneFramework for CannotD } else { trace!(" array is too large to derive Debug"); self.insert(id) } } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { if self.is_not_debug(t) { trace!( " aliases and type refs to T which cannot derive \ Debug also cannot derive Debug" ); self.insert(id) } else { trace!( @@ -237,17 +237,17 @@ impl<'ctx> MonotoneFramework for CannotD if info.kind() == CompKind::Union { if self.ctx.options().rust_features().untagged_union { trace!(" cannot derive Debug for Rust unions"); return self.insert(id); } if ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_debug() + l.opaque().can_trivially_derive_debug(self.ctx) }) { trace!(" union layout can trivially derive Debug"); return ConstrainResult::Same; } else { trace!(" union layout cannot derive Debug"); return self.insert(id); } @@ -294,17 +294,17 @@ impl<'ctx> MonotoneFramework for CannotD trace!(" comp can derive Debug"); ConstrainResult::Same } TypeKind::Pointer(inner) => { let inner_type = self.ctx.resolve_type(inner).canonical_type(self.ctx); if let TypeKind::Function(ref sig) = *inner_type.kind() { - if !sig.can_trivially_derive_debug() { + if !sig.can_trivially_derive_debug(self.ctx) { trace!( " function pointer that can't trivially derive Debug" ); return self.insert(id); } } trace!(" pointers can derive Debug"); ConstrainResult::Same
--- a/third_party/rust/bindgen/src/ir/analysis/derive_default.rs +++ b/third_party/rust/bindgen/src/ir/analysis/derive_default.rs @@ -168,17 +168,17 @@ impl<'ctx> MonotoneFramework for CannotD None => { trace!(" not a type; ignoring"); return ConstrainResult::Same; } }; if item.is_opaque(self.ctx, &()) { let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_default() + l.opaque().can_trivially_derive_default(self.ctx) }); return if layout_can_derive && !(ty.is_union() && self.ctx.options().rust_features().untagged_union) { trace!(" we can trivially derive Default for the layout"); ConstrainResult::Same } else { trace!(" we cannot derive Default for the layout"); @@ -210,17 +210,16 @@ impl<'ctx> MonotoneFramework for CannotD ConstrainResult::Same } TypeKind::Void | TypeKind::TypeParam | TypeKind::Reference(..) | TypeKind::NullPtr | TypeKind::Pointer(..) | - TypeKind::BlockPointer | TypeKind::ObjCId | TypeKind::ObjCSel | TypeKind::ObjCInterface(..) | TypeKind::Enum(..) => { trace!(" types that always cannot derive Default"); self.insert(id) } @@ -239,17 +238,18 @@ impl<'ctx> MonotoneFramework for CannotD } else { trace!(" array is too large to derive Default"); self.insert(id) } } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { if self.is_not_default(t) { trace!( " aliases and type refs to T which cannot derive \ Default also cannot derive Default" ); self.insert(id) } else { trace!( @@ -273,17 +273,17 @@ impl<'ctx> MonotoneFramework for CannotD if info.kind() == CompKind::Union { if self.ctx.options().rust_features().untagged_union { trace!(" cannot derive Default for Rust unions"); return self.insert(id); } if ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_default() + l.opaque().can_trivially_derive_default(self.ctx) }) { trace!(" union layout can trivially derive Default"); return ConstrainResult::Same; } else { trace!(" union layout cannot derive Default"); return self.insert(id); }
--- a/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs +++ b/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs @@ -128,17 +128,17 @@ impl<'ctx> MonotoneFramework for CannotD }; if self.ctx.no_hash_by_name(&item) { return self.insert(id) } if item.is_opaque(self.ctx, &()) { let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_hash() + l.opaque().can_trivially_derive_hash(self.ctx) }); return if layout_can_derive && !(ty.is_union() && self.ctx.options().rust_features().untagged_union) { trace!(" we can trivially derive Hash for the layout"); ConstrainResult::Same } else { trace!(" we cannot derive Hash for the layout"); @@ -162,17 +162,16 @@ impl<'ctx> MonotoneFramework for CannotD // Handle the simple cases. These can derive hash without further // information. TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Enum(..) | TypeKind::TypeParam | TypeKind::UnresolvedTypeRef(..) | - TypeKind::BlockPointer | TypeKind::Reference(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel => { trace!(" simple type that can always derive Hash"); ConstrainResult::Same } @@ -214,39 +213,40 @@ impl<'ctx> MonotoneFramework for CannotD trace!(" vector can derive Hash"); ConstrainResult::Same } TypeKind::Pointer(inner) => { let inner_type = self.ctx.resolve_type(inner).canonical_type(self.ctx); if let TypeKind::Function(ref sig) = *inner_type.kind() { - if !sig.can_trivially_derive_hash() { + if !sig.can_trivially_derive_hash(self.ctx) { trace!( " function pointer that can't trivially derive Hash" ); return self.insert(id); } } trace!(" pointers can derive Hash"); ConstrainResult::Same } TypeKind::Function(ref sig) => { - if !sig.can_trivially_derive_hash() { + if !sig.can_trivially_derive_hash(self.ctx) { trace!(" function that can't trivially derive Hash"); return self.insert(id); } trace!(" function can derive Hash"); ConstrainResult::Same } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { if self.cannot_derive_hash.contains(&t.into()) { trace!( " aliases and type refs to T which cannot derive \ Hash also cannot derive Hash" ); self.insert(id) } else { trace!( @@ -270,17 +270,17 @@ impl<'ctx> MonotoneFramework for CannotD if info.kind() == CompKind::Union { if self.ctx.options().rust_features().untagged_union { trace!(" cannot derive Hash for Rust unions"); return self.insert(id); } if ty.layout(self.ctx).map_or(true, |l| { - l.opaque().can_trivially_derive_hash() + l.opaque().can_trivially_derive_hash(self.ctx) }) { trace!(" union layout can trivially derive Hash"); return ConstrainResult::Same; } else { trace!(" union layout cannot derive Hash"); return self.insert(id); }
--- a/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs +++ b/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs @@ -38,17 +38,17 @@ use std::collections::hash_map::Entry; /// /// * If T is an instantiation of an abstract template definition, T cannot be /// derived `PartialEq`/`PartialOrd` if any of the template arguments or /// template definition cannot derive `PartialEq`/`PartialOrd`. #[derive(Debug, Clone)] pub struct CannotDerivePartialEqOrPartialOrd<'ctx> { ctx: &'ctx BindgenContext, - // The incremental result of this analysis's computation. + // The incremental result of this analysis's computation. // Contains information whether particular item can derive `PartialEq`/`PartialOrd`. can_derive_partialeq_or_partialord: HashMap<ItemId, CanDerive>, // Dependencies saying that if a key ItemId has been inserted into the // `cannot_derive_partialeq_or_partialord` set, then each of the ids // in Vec<ItemId> need to be considered again. // // This is a subset of the natural IR graph with reversed edges, where we @@ -124,17 +124,17 @@ impl<'ctx> CannotDerivePartialEqOrPartia trace!( " cannot derive `PartialEq`/`PartialOrd` for Rust unions" ); return CanDerive::No; } let layout_can_derive = ty.layout(self.ctx) .map_or(CanDerive::Yes, |l| { - l.opaque().can_trivially_derive_partialeq_or_partialord() + l.opaque().can_trivially_derive_partialeq_or_partialord(self.ctx) }); match layout_can_derive { CanDerive::Yes => { trace!( " we can trivially derive `PartialEq`/`PartialOrd` for the layout" ); } @@ -153,17 +153,16 @@ impl<'ctx> CannotDerivePartialEqOrPartia TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Complex(..) | TypeKind::Float(..) | TypeKind::Enum(..) | TypeKind::TypeParam | TypeKind::UnresolvedTypeRef(..) | - TypeKind::BlockPointer | TypeKind::Reference(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel => { trace!( " simple type that can always derive `PartialEq`/`PartialOrd`" ); return CanDerive::Yes; @@ -206,31 +205,31 @@ impl<'ctx> CannotDerivePartialEqOrPartia trace!(" vectors cannot derive `PartialEq`/`PartialOrd`"); return CanDerive::No; } TypeKind::Pointer(inner) => { let inner_type = self.ctx.resolve_type(inner).canonical_type(self.ctx); if let TypeKind::Function(ref sig) = *inner_type.kind() { - if sig.can_trivially_derive_partialeq_or_partialord() + if sig.can_trivially_derive_partialeq_or_partialord(self.ctx) != CanDerive::Yes { trace!( " function pointer that can't trivially derive `PartialEq`/`PartialOrd`" ); return CanDerive::No; } } trace!(" pointers can derive `PartialEq`/`PartialOrd`"); return CanDerive::Yes; } TypeKind::Function(ref sig) => { - if sig.can_trivially_derive_partialeq_or_partialord() + if sig.can_trivially_derive_partialeq_or_partialord(self.ctx) != CanDerive::Yes { trace!( " function that can't trivially derive `PartialEq`/`PartialOrd`" ); return CanDerive::No; } trace!(" function can derive `PartialEq`/`PartialOrd`"); @@ -254,17 +253,17 @@ impl<'ctx> CannotDerivePartialEqOrPartia " cannot derive `PartialEq`/`PartialOrd` for Rust unions" ); return CanDerive::No; } let layout_can_derive = ty.layout(self.ctx).map_or(CanDerive::Yes, |l| { l.opaque() - .can_trivially_derive_partialeq_or_partialord() + .can_trivially_derive_partialeq_or_partialord(self.ctx) }); match layout_can_derive { CanDerive::Yes => { trace!( " union layout can trivially derive `PartialEq`/`PartialOrd`" ); } _ => { @@ -276,16 +275,17 @@ impl<'ctx> CannotDerivePartialEqOrPartia return layout_can_derive; } return self.constrain_join(item); } TypeKind::ResolvedTypeRef(..) | TypeKind::TemplateAlias(..) | TypeKind::Alias(..) | + TypeKind::BlockPointer(..) | TypeKind::TemplateInstantiation(..) => { return self.constrain_join(item); } TypeKind::Opaque => unreachable!( "The early ty.is_opaque check should have handled this case" ), }
--- a/third_party/rust/bindgen/src/ir/analysis/has_float.rs +++ b/third_party/rust/bindgen/src/ir/analysis/has_float.rs @@ -117,17 +117,16 @@ impl<'ctx> MonotoneFramework for HasFloa match *ty.kind() { TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | - TypeKind::BlockPointer | TypeKind::TypeParam | TypeKind::Opaque | TypeKind::Pointer(..) | TypeKind::UnresolvedTypeRef(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel => { trace!(" simple type that do not have float"); @@ -154,17 +153,18 @@ impl<'ctx> MonotoneFramework for HasFloa return self.insert(id) } trace!(" Vector with type T that do not have float also do not have float"); ConstrainResult::Same } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { if self.has_float.contains(&t.into()) { trace!(" aliases and type refs to T which have float \ also have float"); self.insert(id) } else { trace!(" aliases and type refs to T which do not have float \ also do not have floaarrayt"); ConstrainResult::Same
--- a/third_party/rust/bindgen/src/ir/analysis/has_type_param_in_array.rs +++ b/third_party/rust/bindgen/src/ir/analysis/has_type_param_in_array.rs @@ -130,17 +130,16 @@ impl<'ctx> MonotoneFramework for HasType TypeKind::NullPtr | TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Vector(..) | TypeKind::Complex(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | - TypeKind::BlockPointer | TypeKind::TypeParam | TypeKind::Opaque | TypeKind::Pointer(..) | TypeKind::UnresolvedTypeRef(..) | TypeKind::ObjCInterface(..) | TypeKind::ObjCId | TypeKind::ObjCSel => { trace!(" simple type that do not have array"); @@ -161,17 +160,18 @@ impl<'ctx> MonotoneFramework for HasType ); ConstrainResult::Same } } } TypeKind::ResolvedTypeRef(t) | TypeKind::TemplateAlias(t, _) | - TypeKind::Alias(t) => { + TypeKind::Alias(t) | + TypeKind::BlockPointer(t) => { if self.has_type_parameter_in_array.contains(&t.into()) { trace!( " aliases and type refs to T which have array \ also have array" ); self.insert(id) } else { trace!(
--- a/third_party/rust/bindgen/src/ir/analysis/sizedness.rs +++ b/third_party/rust/bindgen/src/ir/analysis/sizedness.rs @@ -256,31 +256,31 @@ impl<'ctx> MonotoneFramework for Sizedne TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Complex(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | TypeKind::NullPtr | - TypeKind::BlockPointer | TypeKind::ObjCId | TypeKind::ObjCSel | TypeKind::Pointer(..) => { trace!(" {:?} is known not to be zero-sized", ty.kind()); self.insert(id, SizednessResult::NonZeroSized) } TypeKind::ObjCInterface(..) => { trace!(" obj-c interfaces always have at least the `isa` pointer"); self.insert(id, SizednessResult::NonZeroSized) } TypeKind::TemplateAlias(t, _) | TypeKind::Alias(t) | + TypeKind::BlockPointer(t) | TypeKind::ResolvedTypeRef(t) => { trace!(" aliases and type refs forward to their inner type"); self.forward(t, id) } TypeKind::TemplateInstantiation(ref inst) => { trace!(" template instantiations are zero-sized if their \ definition is zero-sized");
--- a/third_party/rust/bindgen/src/ir/comment.rs +++ b/third_party/rust/bindgen/src/ir/comment.rs @@ -30,68 +30,60 @@ fn kind(comment: &str) -> Option<Kind> { Some(Kind::SingleLines) } else { None } } fn make_indent(indent: usize) -> String { const RUST_INDENTATION: usize = 4; - iter::repeat(' ').take(indent * RUST_INDENTATION).collect() } /// Preprocesses multiple single line comments. /// /// Handles lines starting with both `//` and `///`. fn preprocess_single_lines(comment: &str, indent: usize) -> String { debug_assert!(comment.starts_with("//"), "comment is not single line"); let indent = make_indent(indent); let mut is_first = true; let lines: Vec<_> = comment .lines() - .map(|l| l.trim_left_matches('/').trim()) + .map(|l| l.trim().trim_left_matches('/')) .map(|l| { let indent = if is_first { "" } else { &*indent }; is_first = false; - let maybe_space = if l.is_empty() { "" } else { " " }; - format!("{}///{}{}", indent, maybe_space, l) + format!("{}///{}", indent, l) }) .collect(); lines.join("\n") } fn preprocess_multi_line(comment: &str, indent: usize) -> String { let comment = comment .trim_left_matches('/') - .trim_left_matches('*') - .trim_left_matches('!') .trim_right_matches('/') - .trim_right_matches('*') - .trim(); + .trim_right_matches('*'); let indent = make_indent(indent); // Strip any potential `*` characters preceding each line. let mut is_first = true; let mut lines: Vec<_> = comment.lines() - .map(|line| line.trim().trim_left_matches('*').trim()) - .skip_while(|line| line.is_empty()) // Skip the first empty lines. + .map(|line| line.trim().trim_left_matches('*').trim_left_matches('!')) + .skip_while(|line| line.trim().is_empty()) // Skip the first empty lines. .map(|line| { let indent = if is_first { "" } else { &*indent }; is_first = false; - let maybe_space = if line.is_empty() { "" } else { " " }; - format!("{}///{}{}", indent, maybe_space, line) + format!("{}///{}", indent, line) }) .collect(); // Remove the trailing line corresponding to the `*/`. - let last_line_is_empty = lines.last().map_or(false, |l| l.is_empty()); - - if last_line_is_empty { + if lines.last().map_or(false, |l| l.trim().is_empty() || l.trim() == "///") { lines.pop(); } lines.join("\n") } #[cfg(test)] mod test { @@ -102,23 +94,24 @@ mod test { assert_eq!(kind("/// hello"), Some(Kind::SingleLines)); assert_eq!(kind("/** world */"), Some(Kind::MultiLine)); } #[test] fn processes_single_lines_correctly() { assert_eq!(preprocess("/// hello", 0), "/// hello"); assert_eq!(preprocess("// hello", 0), "/// hello"); + assert_eq!(preprocess("// hello", 0), "/// hello"); } #[test] fn processes_multi_lines_correctly() { assert_eq!( preprocess("/** hello \n * world \n * foo \n */", 0), "/// hello\n/// world\n/// foo" ); assert_eq!( preprocess("/**\nhello\n*world\n*foo\n*/", 0), - "/// hello\n/// world\n/// foo" + "///hello\n///world\n///foo" ); } }
--- a/third_party/rust/bindgen/src/ir/context.rs +++ b/third_party/rust/bindgen/src/ir/context.rs @@ -221,17 +221,17 @@ impl<T> CanDeriveDefault for T where T: Copy + Into<ItemId> { fn can_derive_default(&self, ctx: &BindgenContext) -> bool { ctx.options().derive_default && ctx.lookup_can_derive_default(*self) } } -impl<'a, T> CanDeriveCopy<'a> for T +impl<T> CanDeriveCopy for T where T: Copy + Into<ItemId> { fn can_derive_copy(&self, ctx: &BindgenContext) -> bool { ctx.options().derive_copy && ctx.lookup_can_derive_copy(*self) } } @@ -509,22 +509,29 @@ impl<'ctx> WhitelistedItemsTraversal<'ct const HOST_TARGET: &'static str = include_str!(concat!(env!("OUT_DIR"), "/host-target.txt")); /// Returns the effective target, and whether it was explicitly specified on the /// clang flags. fn find_effective_target(clang_args: &[String]) -> (String, bool) { use std::env; - for opt in clang_args { + let mut args = clang_args.iter(); + while let Some(opt) = args.next() { if opt.starts_with("--target=") { let mut split = opt.split('='); split.next(); return (split.next().unwrap().to_owned(), true); } + + if opt == "-target" { + if let Some(target) = args.next() { + return (target.clone(), true); + } + } } // If we're running from a build script, try to find the cargo target. if let Ok(t) = env::var("TARGET") { return (t, false) } (HOST_TARGET.to_owned(), false) @@ -571,17 +578,20 @@ If you encounter an error missing from t }; let target_info = clang::TargetInfo::new(&translation_unit); #[cfg(debug_assertions)] { if let Some(ref ti) = target_info { if effective_target == HOST_TARGET { - assert_eq!(ti.pointer_width / 8, mem::size_of::<*mut ()>()); + assert_eq!( + ti.pointer_width / 8, mem::size_of::<*mut ()>(), + "{:?} {:?}", effective_target, HOST_TARGET + ); } } } let root_module = Self::build_root_module(ItemId(0)); let root_module_id = root_module.id().as_module_id_unchecked(); let mut me = BindgenContext { @@ -839,67 +849,68 @@ If you encounter an error missing from t /// Mangles a name so it doesn't conflict with any keyword. pub fn rust_mangle<'a>(&self, name: &'a str) -> Cow<'a, str> { if name.contains("@") || name.contains("?") || name.contains("$") || match name { "abstract" | - "alignof" | - "as" | - "become" | - "box" | + "alignof" | + "as" | + "async" | + "become" | + "box" | "break" | - "const" | - "continue" | - "crate" | - "do" | + "const" | + "continue" | + "crate" | + "do" | "else" | - "enum" | - "extern" | - "false" | - "final" | + "enum" | + "extern" | + "false" | + "final" | "fn" | - "for" | - "if" | - "impl" | - "in" | + "for" | + "if" | + "impl" | + "in" | "let" | - "loop" | - "macro" | - "match" | - "mod" | + "loop" | + "macro" | + "match" | + "mod" | "move" | - "mut" | - "offsetof" | - "override" | - "priv" | + "mut" | + "offsetof" | + "override" | + "priv" | "proc" | - "pub" | - "pure" | - "ref" | - "return" | + "pub" | + "pure" | + "ref" | + "return" | "Self" | - "self" | - "sizeof" | - "static" | - "struct" | + "self" | + "sizeof" | + "static" | + "struct" | "super" | - "trait" | - "true" | - "type" | - "typeof" | + "trait" | + "true" | + "type" | + "typeof" | "unsafe" | - "unsized" | - "use" | - "virtual" | - "where" | + "unsized" | + "use" | + "virtual" | + "where" | "while" | - "yield" | + "yield" | "bool" | "_" => true, _ => false, } { let mut s = name.to_owned(); s = s.replace("@", "_"); s = s.replace("?", "_");
--- a/third_party/rust/bindgen/src/ir/derive.rs +++ b/third_party/rust/bindgen/src/ir/derive.rs @@ -25,51 +25,51 @@ pub trait CanDeriveDebug { } /// A trait that encapsulates the logic for whether or not we can trivially /// derive `Debug` without looking at any other types or the results of a fix /// point analysis. This is a helper trait for the fix point analysis. pub trait CanTriviallyDeriveDebug { /// Return `true` if `Debug` can trivially be derived for this thing, /// `false` otherwise. - fn can_trivially_derive_debug(&self) -> bool; + fn can_trivially_derive_debug(&self, ctx: &BindgenContext) -> bool; } /// A trait that encapsulates the logic for whether or not we can derive `Copy` /// for a given thing. -pub trait CanDeriveCopy<'a> { +pub trait CanDeriveCopy { /// Return `true` if `Copy` can be derived for this thing, `false` /// otherwise. - fn can_derive_copy(&'a self, ctx: &'a BindgenContext) -> bool; + fn can_derive_copy(&self, ctx: &BindgenContext) -> bool; } /// A trait that encapsulates the logic for whether or not we can trivially /// derive `Copy` without looking at any other types or results of fix point /// analyses. This is a helper trait for fix point analysis. pub trait CanTriviallyDeriveCopy { /// Return `true` if `Copy` can be trivially derived for this thing, `false` /// otherwise. - fn can_trivially_derive_copy(&self) -> bool; + fn can_trivially_derive_copy(&self, ctx: &BindgenContext) -> bool; } /// A trait that encapsulates the logic for whether or not we can derive /// `Default` for a given thing. pub trait CanDeriveDefault { /// Return `true` if `Default` can be derived for this thing, `false` /// otherwise. fn can_derive_default(&self, ctx: &BindgenContext) -> bool; } /// A trait that encapsulates the logic for whether or not we can trivially /// derive `Default` without looking at any other types or results of fix point /// analyses. This is a helper trait for the fix point analysis. pub trait CanTriviallyDeriveDefault { /// Return `true` if `Default` can trivially derived for this thing, `false` /// otherwise. - fn can_trivially_derive_default(&self) -> bool; + fn can_trivially_derive_default(&self, ctx: &BindgenContext) -> bool; } /// A trait that encapsulates the logic for whether or not we can derive `Hash` /// for a given thing. pub trait CanDeriveHash { /// Return `true` if `Hash` can be derived for this thing, `false` /// otherwise. fn can_derive_hash(&self, ctx: &BindgenContext) -> bool; @@ -106,50 +106,50 @@ pub trait CanDeriveOrd { } /// A trait that encapsulates the logic for whether or not we can derive `Hash` /// without looking at any other types or the results of any fix point /// analyses. This is a helper trait for the fix point analysis. pub trait CanTriviallyDeriveHash { /// Return `true` if `Hash` can trivially be derived for this thing, `false` /// otherwise. - fn can_trivially_derive_hash(&self) -> bool; + fn can_trivially_derive_hash(&self, ctx: &BindgenContext) -> bool; } /// A trait that encapsulates the logic for whether or not we can trivially /// derive `PartialEq` or `PartialOrd` without looking at any other types or /// results of fix point analyses. This is a helper for the fix point analysis. pub trait CanTriviallyDerivePartialEqOrPartialOrd { /// Return `Yes` if `PartialEq` or `PartialOrd` can trivially be derived /// for this thing. - fn can_trivially_derive_partialeq_or_partialord(&self) -> CanDerive; + fn can_trivially_derive_partialeq_or_partialord(&self, ctx: &BindgenContext) -> CanDerive; } /// Whether it is possible or not to automatically derive trait for an item. -/// +/// /// ```ignore /// No /// ^ /// | /// ArrayTooLarge /// ^ /// | /// Yes /// ``` -/// +/// /// Initially we assume that we can derive trait for all types and then /// update our understanding as we learn more about each type. #[derive(Debug, Copy, Clone, PartialEq, Eq, Ord)] pub enum CanDerive { /// No, we cannot. No, /// The only thing that stops us from automatically deriving is that /// array with more than maximum number of elements is used. - /// + /// /// This means we probably can "manually" implement such trait. ArrayTooLarge, /// Yes, we can derive automatically. Yes, } impl Default for CanDerive {
--- a/third_party/rust/bindgen/src/ir/enum_ty.rs +++ b/third_party/rust/bindgen/src/ir/enum_ty.rs @@ -1,12 +1,13 @@ //! Intermediate representation for C/C++ enumerations. use super::context::{BindgenContext, TypeId}; use super::item::Item; +use super::super::codegen::EnumVariation; use super::ty::TypeKind; use clang; use ir::annotations::Annotations; use ir::item::ItemCanonicalPath; use parse::{ClangItemParser, ParseError}; use regex_set::RegexSet; /// An enum representing custom handling that can be given to a variant. @@ -137,42 +138,43 @@ impl Enum { }); Ok(Enum::new(repr, variants)) } fn is_matching_enum(&self, ctx: &BindgenContext, enums: &RegexSet, item: &Item) -> bool { let path = item.canonical_path(ctx); let enum_ty = item.expect_type(); - let path_matches = enums.matches(&path[1..].join("::")); - let enum_is_anon = enum_ty.name().is_none(); - let a_variant_matches = self.variants().iter().any(|v| { - enums.matches(&v.name()) - }); - path_matches || (enum_is_anon && a_variant_matches) - } + if enums.matches(&path[1..].join("::")) { + return true; + } - /// Whether the enum should be a bitfield - pub fn is_bitfield(&self, ctx: &BindgenContext, item: &Item) -> bool { - self.is_matching_enum(ctx, &ctx.options().bitfield_enums, item) + // Test the variants if the enum is anonymous. + if enum_ty.name().is_some() { + return false; + } + + self.variants().iter().any(|v| enums.matches(&v.name())) } - /// Whether the enum should be an constified enum module - pub fn is_constified_enum_module(&self, ctx: &BindgenContext, item: &Item) -> bool { - self.is_matching_enum(ctx, &ctx.options().constified_enum_modules, item) - } - - /// Whether the enum should be an set of constants - pub fn is_constified_enum(&self, ctx: &BindgenContext, item: &Item) -> bool { - self.is_matching_enum(ctx, &ctx.options().constified_enums, item) - } - - /// Whether the enum should be a Rust enum - pub fn is_rustified_enum(&self, ctx: &BindgenContext, item: &Item) -> bool { - self.is_matching_enum(ctx, &ctx.options().rustified_enums, item) + /// Returns the final representation of the enum. + pub fn computed_enum_variation(&self, ctx: &BindgenContext, item: &Item) -> EnumVariation { + // ModuleConsts has higher precedence before Rust in order to avoid + // problems with overlapping match patterns. + if self.is_matching_enum(ctx, &ctx.options().constified_enum_modules, item) { + EnumVariation::ModuleConsts + } else if self.is_matching_enum(ctx, &ctx.options().bitfield_enums, item) { + EnumVariation::Bitfield + } else if self.is_matching_enum(ctx, &ctx.options().rustified_enums, item) { + EnumVariation::Rust + } else if self.is_matching_enum(ctx, &ctx.options().constified_enums, item) { + EnumVariation::Consts + } else { + ctx.options().default_enum_style + } } } /// A single enum variant, to be contained only in an enum. #[derive(Debug)] pub struct EnumVariant { /// The name of the variant. name: String,
--- a/third_party/rust/bindgen/src/ir/function.rs +++ b/third_party/rust/bindgen/src/ir/function.rs @@ -437,17 +437,26 @@ impl FunctionSig { { ty.ret_type().or_else(|| cursor.ret_type()).ok_or( ParseError::Continue, )? } else { ty.ret_type().ok_or(ParseError::Continue)? }; let ret = Item::from_ty_or_ref(ty_ret_type, cursor, None, ctx); - let call_conv = ty.call_conv(); + + // Clang plays with us at "find the calling convention", see #549 and + // co. This seems to be a better fix than that commit. + let mut call_conv = ty.call_conv(); + if let Some(ty) = cursor.cur_type().canonical_type().pointee_type() { + let cursor_call_conv = ty.call_conv(); + if cursor_call_conv != CXCallingConv_Invalid { + call_conv = cursor_call_conv; + } + } let abi = get_abi(call_conv); if abi.is_unknown() { warn!("Unknown calling convention: {:?}", call_conv); } Ok(Self::new(ret.into(), args, ty.is_variadic(), abi)) } @@ -576,31 +585,28 @@ impl Trace for FunctionSig { for &(_, ty) in self.argument_types() { tracer.visit_kind(ty.into(), EdgeKind::FunctionParameter); } } } impl CanTriviallyDeriveDebug for FunctionSig { - fn can_trivially_derive_debug(&self) -> bool { + fn can_trivially_derive_debug(&self, _: &BindgenContext) -> bool { self.function_pointers_can_derive() } } impl CanTriviallyDeriveHash for FunctionSig { - fn can_trivially_derive_hash(&self) -> bool { + fn can_trivially_derive_hash(&self, _: &BindgenContext) -> bool { self.function_pointers_can_derive() } } impl CanTriviallyDerivePartialEqOrPartialOrd for FunctionSig { - fn can_trivially_derive_partialeq_or_partialord(&self) -> CanDerive { - if self.argument_types.len() > RUST_DERIVE_FUNPTR_LIMIT { - return CanDerive::No; - } - - match self.abi { - Abi::C | Abi::Unknown(..) => CanDerive::Yes, - _ => CanDerive::No, + fn can_trivially_derive_partialeq_or_partialord(&self, _: &BindgenContext) -> CanDerive { + if self.function_pointers_can_derive() { + CanDerive::Yes + } else { + CanDerive::No } } }
--- a/third_party/rust/bindgen/src/ir/item.rs +++ b/third_party/rust/bindgen/src/ir/item.rs @@ -8,17 +8,17 @@ use super::context::{BindgenContext, Ite use super::derive::{CanDeriveCopy, CanDeriveDebug, CanDeriveDefault, CanDeriveHash, CanDerivePartialOrd, CanDeriveOrd, CanDerivePartialEq, CanDeriveEq}; use super::dot::DotAttributes; use super::function::{Function, FunctionKind}; use super::item_kind::ItemKind; use super::layout::Opaque; use super::module::Module; -use super::super::codegen::CONSTIFIED_ENUM_MODULE_REPR_NAME; +use super::super::codegen::{CONSTIFIED_ENUM_MODULE_REPR_NAME, EnumVariation}; use super::template::{AsTemplateParam, TemplateParameters}; use super::traversal::{EdgeKind, Trace, Tracer}; use super::ty::{Type, TypeKind}; use clang; use clang_sys; use parse::{ClangItemParser, ClangSubItemParser, ParseError, ParseResult}; use regex; use std::cell::{Cell, RefCell}; @@ -324,17 +324,17 @@ impl CanDeriveDebug for Item { } impl CanDeriveDefault for Item { fn can_derive_default(&self, ctx: &BindgenContext) -> bool { self.id().can_derive_default(ctx) } } -impl<'a> CanDeriveCopy<'a> for Item { +impl CanDeriveCopy for Item { fn can_derive_copy(&self, ctx: &BindgenContext) -> bool { self.id().can_derive_copy(ctx) } } impl CanDeriveHash for Item { fn can_derive_hash(&self, ctx: &BindgenContext) -> bool { self.id().can_derive_hash(ctx) @@ -632,16 +632,17 @@ impl Item { "You're not supposed to call this yet" ); if self.annotations.hide() { return true; } let path = self.canonical_path(ctx); let name = path[1..].join("::"); + ctx.options().blacklisted_items.matches(&name) || match self.kind { ItemKind::Type(..) => { ctx.options().blacklisted_types.matches(&name) || ctx.is_replaced_type(&path, self.id) } ItemKind::Function(..) => { ctx.options().blacklisted_functions.matches(&name) } @@ -923,17 +924,17 @@ impl Item { let item = self.id.into_resolver().through_type_refs().resolve(ctx); let type_ = match *item.kind() { ItemKind::Type(ref type_) => type_, _ => return false, }; match *type_.kind() { TypeKind::Enum(ref enum_) => { - enum_.is_constified_enum_module(ctx, self) + enum_.computed_enum_variation(ctx, self) == EnumVariation::ModuleConsts } TypeKind::Alias(inner_id) => { // TODO(emilio): Make this "hop through type aliases that aren't // really generated" an option in `ItemResolver`? let inner_item = ctx.resolve_item(inner_id); let name = item.canonical_name(ctx); if inner_item.canonical_name(ctx) == name {
--- a/third_party/rust/bindgen/src/ir/layout.rs +++ b/third_party/rust/bindgen/src/ir/layout.rs @@ -31,18 +31,22 @@ fn test_layout_for_size() { assert_eq!( Layout::for_size_internal(ptr_size, 3 * ptr_size), Layout::new(3 * ptr_size, ptr_size) ); } impl Layout { /// Gets the integer type name for a given known size. - pub fn known_type_for_size(size: usize) -> Option<&'static str> { + pub fn known_type_for_size( + ctx: &BindgenContext, + size: usize, + ) -> Option<&'static str> { Some(match size { + 16 if ctx.options().rust_features.i128_and_u128 => "u128", 8 => "u64", 4 => "u32", 2 => "u16", 1 => "u8", _ => return None, }) } @@ -100,67 +104,67 @@ impl Opaque { let layout = Layout::new(ty.size(), ty.align()); let ty_kind = TypeKind::Opaque; let is_const = ty.is_const(); Type::new(None, Some(layout), ty_kind, is_const) } /// Return the known rust type we should use to create a correctly-aligned /// field with this layout. - pub fn known_rust_type_for_array(&self) -> Option<&'static str> { - Layout::known_type_for_size(self.0.align) + pub fn known_rust_type_for_array(&self,ctx: &BindgenContext) -> Option<&'static str> { + Layout::known_type_for_size(ctx, self.0.align) } /// Return the array size that an opaque type for this layout should have if /// we know the correct type for it, or `None` otherwise. - pub fn array_size(&self) -> Option<usize> { - if self.known_rust_type_for_array().is_some() { + pub fn array_size(&self, ctx: &BindgenContext) -> Option<usize> { + if self.known_rust_type_for_array(ctx).is_some() { Some(self.0.size / cmp::max(self.0.align, 1)) } else { None } } /// Return `true` if this opaque layout's array size will fit within the /// maximum number of array elements that Rust allows deriving traits /// with. Return `false` otherwise. - pub fn array_size_within_derive_limit(&self) -> bool { - self.array_size().map_or(false, |size| { + pub fn array_size_within_derive_limit(&self, ctx: &BindgenContext) -> bool { + self.array_size(ctx).map_or(false, |size| { size <= RUST_DERIVE_IN_ARRAY_LIMIT }) } } impl CanTriviallyDeriveDebug for Opaque { - fn can_trivially_derive_debug(&self) -> bool { - self.array_size_within_derive_limit() + fn can_trivially_derive_debug(&self, ctx: &BindgenContext) -> bool { + self.array_size_within_derive_limit(ctx) } } impl CanTriviallyDeriveDefault for Opaque { - fn can_trivially_derive_default(&self) -> bool { - self.array_size_within_derive_limit() + fn can_trivially_derive_default(&self, ctx: &BindgenContext) -> bool { + self.array_size_within_derive_limit(ctx) } } impl CanTriviallyDeriveCopy for Opaque { - fn can_trivially_derive_copy(&self) -> bool { - self.array_size_within_derive_limit() + fn can_trivially_derive_copy(&self, ctx: &BindgenContext) -> bool { + self.array_size_within_derive_limit(ctx) } } impl CanTriviallyDeriveHash for Opaque { - fn can_trivially_derive_hash(&self) -> bool { - self.array_size_within_derive_limit() + fn can_trivially_derive_hash(&self, ctx: &BindgenContext) -> bool { + self.array_size_within_derive_limit(ctx) } } impl CanTriviallyDerivePartialEqOrPartialOrd for Opaque { - fn can_trivially_derive_partialeq_or_partialord(&self) -> CanDerive { - self.array_size().map_or(CanDerive::No, |size| { - if size <= RUST_DERIVE_IN_ARRAY_LIMIT { - CanDerive::Yes - } else { - CanDerive::ArrayTooLarge - } - }) + fn can_trivially_derive_partialeq_or_partialord(&self, ctx: &BindgenContext) -> CanDerive { + // TODO(emilio): This is inconsistent with the rest of the + // CanTriviallyDerive* traits. + if self.array_size_within_derive_limit(ctx) { + CanDerive::Yes + } else { + CanDerive::ArrayTooLarge + } } }
--- a/third_party/rust/bindgen/src/ir/ty.rs +++ b/third_party/rust/bindgen/src/ir/ty.rs @@ -85,16 +85,24 @@ impl Type { &mut self.kind } /// Get this type's name. pub fn name(&self) -> Option<&str> { self.name.as_ref().map(|name| &**name) } + /// Whether this is a block pointer type. + pub fn is_block_pointer(&self) -> bool { + match self.kind { + TypeKind::BlockPointer(..) => true, + _ => false, + } + } + /// Is this a compound type? pub fn is_comp(&self) -> bool { match self.kind { TypeKind::Comp(..) => true, _ => false, } } @@ -150,17 +158,16 @@ impl Type { pub fn is_builtin_or_type_param(&self) -> bool { match self.kind { TypeKind::Void | TypeKind::NullPtr | TypeKind::Function(..) | TypeKind::Array(..) | TypeKind::Reference(..) | TypeKind::Pointer(..) | - TypeKind::BlockPointer | TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::TypeParam => true, _ => false, } } /// Creates a new named type, with name `name`. @@ -239,18 +246,17 @@ impl Type { /// What is the layout of this type? pub fn layout(&self, ctx: &BindgenContext) -> Option<Layout> { self.layout.or_else(|| { match self.kind { TypeKind::Comp(ref ci) => ci.layout(ctx), // FIXME(emilio): This is a hack for anonymous union templates. // Use the actual pointer size! - TypeKind::Pointer(..) | - TypeKind::BlockPointer => { + TypeKind::Pointer(..) => { Some(Layout::new( ctx.target_pointer_size(), ctx.target_pointer_size(), )) } TypeKind::ResolvedTypeRef(inner) => { ctx.resolve_type(inner).layout(ctx) } @@ -334,24 +340,24 @@ impl Type { TypeKind::Int(..) | TypeKind::Float(..) | TypeKind::Complex(..) | TypeKind::Function(..) | TypeKind::Enum(..) | TypeKind::Reference(..) | TypeKind::Void | TypeKind::NullPtr | - TypeKind::BlockPointer | TypeKind::Pointer(..) | TypeKind::ObjCId | TypeKind::ObjCSel | TypeKind::ObjCInterface(..) => Some(self), TypeKind::ResolvedTypeRef(inner) | TypeKind::Alias(inner) | + TypeKind::BlockPointer(inner) | TypeKind::TemplateAlias(inner, _) => { ctx.resolve_type(inner).safe_canonical_type(ctx) } TypeKind::TemplateInstantiation(ref inst) => { ctx.resolve_type(inst.template_definition()) .safe_canonical_type(ctx) } @@ -480,17 +486,17 @@ impl TypeKind { TypeKind::Complex(..) => "Complex", TypeKind::Alias(..) => "Alias", TypeKind::TemplateAlias(..) => "TemplateAlias", TypeKind::Array(..) => "Array", TypeKind::Vector(..) => "Vector", TypeKind::Function(..) => "Function", TypeKind::Enum(..) => "Enum", TypeKind::Pointer(..) => "Pointer", - TypeKind::BlockPointer => "BlockPointer", + TypeKind::BlockPointer(..) => "BlockPointer", TypeKind::Reference(..) => "Reference", TypeKind::TemplateInstantiation(..) => "TemplateInstantiation", TypeKind::UnresolvedTypeRef(..) => "UnresolvedTypeRef", TypeKind::ResolvedTypeRef(..) => "ResolvedTypeRef", TypeKind::TypeParam => "TypeParam", TypeKind::ObjCInterface(..) => "ObjCInterface", TypeKind::ObjCId => "ObjCId", TypeKind::ObjCSel => "ObjCSel", @@ -574,17 +580,17 @@ impl TemplateParameters for TypeKind { TypeKind::Int(_) | TypeKind::Float(_) | TypeKind::Complex(_) | TypeKind::Array(..) | TypeKind::Vector(..) | TypeKind::Function(_) | TypeKind::Enum(_) | TypeKind::Pointer(_) | - TypeKind::BlockPointer | + TypeKind::BlockPointer(_) | TypeKind::Reference(_) | TypeKind::UnresolvedTypeRef(..) | TypeKind::TypeParam | TypeKind::Alias(_) | TypeKind::ObjCId | TypeKind::ObjCSel | TypeKind::ObjCInterface(_) => vec![], } @@ -650,17 +656,17 @@ pub enum TypeKind { /// An `enum` type. Enum(Enum), /// A pointer to a type. The bool field represents whether it's const or /// not. Pointer(TypeId), /// A pointer to an Apple block. - BlockPointer, + BlockPointer(TypeId), /// A reference to a type, as in: int& foo(). Reference(TypeId), /// An instantiation of an abstract template definition with a set of /// concrete template arguments. TemplateInstantiation(TemplateInstantiation), @@ -1051,47 +1057,27 @@ impl Type { // something else we might get confused, see the comment inside // TypeRef. // // We might need to, though, if the context is already in the // process of resolving them. CXType_ObjCObjectPointer | CXType_MemberPointer | CXType_Pointer => { - // Fun fact: the canonical type of a pointer type may sometimes - // contain information we need but isn't present in the concrete - // type (yeah, I'm equally wat'd). - // - // Yet we still have trouble if we unconditionally trust the - // canonical type, like too-much desugaring (sigh). - // - // See tests/headers/call-conv-field.h for an example. - // - // Since for now the only identifier cause of breakage is the - // ABI for function pointers, and different ABI mixed with - // problematic stuff like that one is _extremely_ unlikely and - // can be bypassed via blacklisting, we do the check explicitly - // (as hacky as it is). - // - // Yet we should probably (somehow) get the best of both worlds, - // presumably special-casing function pointers as a whole, yet - // someone is going to need to care about typedef'd function - // pointers, etc, which isn't trivial given function pointers - // are mostly unexposed. I don't have the time for it right now. - let mut pointee = ty.pointee_type().unwrap(); - let canonical_pointee = - canonical_ty.pointee_type().unwrap(); - if pointee.call_conv() != canonical_pointee.call_conv() { - pointee = canonical_pointee; - } + let pointee = ty.pointee_type().unwrap(); let inner = Item::from_ty_or_ref(pointee, location, None, ctx); TypeKind::Pointer(inner) } - CXType_BlockPointer => TypeKind::BlockPointer, + CXType_BlockPointer => { + let pointee = ty.pointee_type().expect("Not valid Type?"); + let inner = + Item::from_ty_or_ref(pointee, location, None, ctx); + TypeKind::BlockPointer(inner) + }, // XXX: RValueReference is most likely wrong, but I don't think we // can even add bindings for that, so huh. CXType_RValueReference | CXType_LValueReference => { let inner = Item::from_ty_or_ref( ty.pointee_type().unwrap(), location, None, @@ -1227,16 +1213,17 @@ impl Trace for Type { where T: Tracer, { match *self.kind() { TypeKind::Pointer(inner) | TypeKind::Reference(inner) | TypeKind::Array(inner, _) | TypeKind::Vector(inner, _) | + TypeKind::BlockPointer(inner) | TypeKind::Alias(inner) | TypeKind::ResolvedTypeRef(inner) => { tracer.visit_kind(inner.into(), EdgeKind::TypeReference); } TypeKind::TemplateAlias(inner, ref template_params) => { tracer.visit_kind(inner.into(), EdgeKind::TypeReference); for param in template_params { tracer.visit_kind( @@ -1268,13 +1255,12 @@ impl Trace for Type { TypeKind::UnresolvedTypeRef(_, _, None) | TypeKind::TypeParam | TypeKind::Void | TypeKind::NullPtr | TypeKind::Int(_) | TypeKind::Float(_) | TypeKind::Complex(_) | TypeKind::ObjCId | - TypeKind::ObjCSel | - TypeKind::BlockPointer => {} + TypeKind::ObjCSel => {} } } }
--- a/third_party/rust/bindgen/src/ir/var.rs +++ b/third_party/rust/bindgen/src/ir/var.rs @@ -260,18 +260,17 @@ impl ClangSubItemParser for Var { let value = if is_integer { let kind = match *canonical_ty.unwrap().kind() { TypeKind::Int(kind) => kind, _ => unreachable!(), }; let mut val = cursor .evaluate() - .and_then(|v| v.as_int()) - .map(|val| val as i64); + .and_then(|v| v.as_int()); if val.is_none() || !kind.signedness_matches(val.unwrap()) { let tu = ctx.translation_unit(); val = get_integer_literal_from_cursor(&cursor, tu); } val.map(|val| if kind == IntKind::Bool { VarType::Bool(val != 0) } else { @@ -300,52 +299,52 @@ impl ClangSubItemParser for Var { } } /// Try and parse a macro using all the macros parsed until now. fn parse_macro( ctx: &BindgenContext, cursor: &clang::Cursor, ) -> Option<(Vec<u8>, cexpr::expr::EvalResult)> { - use cexpr::{expr, nom}; + use cexpr::expr; let mut cexpr_tokens = cursor.cexpr_tokens()?; let parser = expr::IdentifierParser::new(ctx.parsed_macros()); match parser.macro_definition(&cexpr_tokens) { - nom::IResult::Done(_, (id, val)) => { + Ok((_, (id, val))) => { return Some((id.into(), val)); } _ => {} } // Try without the last token, to workaround a libclang bug in versions // previous to 4.0. // // See: // https://bugs.llvm.org//show_bug.cgi?id=9069 // https://reviews.llvm.org/D26446 cexpr_tokens.pop()?; match parser.macro_definition(&cexpr_tokens) { - nom::IResult::Done(_, (id, val)) => Some((id.into(), val)), + Ok((_, (id, val))) => Some((id.into(), val)), _ => None, } } fn parse_int_literal_tokens(cursor: &clang::Cursor) -> Option<i64> { - use cexpr::{expr, nom}; + use cexpr::expr; use cexpr::expr::EvalResult; let cexpr_tokens = cursor.cexpr_tokens()?; // TODO(emilio): We can try to parse other kinds of literals. match expr::expr(&cexpr_tokens) { - nom::IResult::Done(_, EvalResult::Int(Wrapping(val))) => Some(val), + Ok((_, EvalResult::Int(Wrapping(val)))) => Some(val), _ => None, } } fn get_integer_literal_from_cursor( cursor: &clang::Cursor, unit: &clang::TranslationUnit, ) -> Option<i64> {
--- a/third_party/rust/bindgen/src/lib.rs +++ b/third_party/rust/bindgen/src/lib.rs @@ -94,17 +94,17 @@ use std::io::{self, Write}; use std::iter; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::sync::Arc; fn args_are_cpp(clang_args: &[String]) -> bool { return clang_args .windows(2) - .any(|w| w[0] == "-x=c++" || w[1] == "-x=c++" || w == &["-x", "c++"]); + .any(|w| w[0] == "-xc++" || w[1] == "-xc++" || w == &["-x", "c++"]); } bitflags! { /// A type used to indicate which kind of items we have to generate. pub struct CodegenConfig: u32 { /// Whether to generate functions. const FUNCTIONS = 1 << 0; /// Whether to generate types. @@ -306,16 +306,30 @@ impl Builder { output_vector.push( item.trim_left_matches("^") .trim_right_matches("$") .into(), ); }) .count(); + self.options + .blacklisted_items + .get_items() + .iter() + .map(|item| { + output_vector.push("--blacklist-item".into()); + output_vector.push( + item.trim_left_matches("^") + .trim_right_matches("$") + .into(), + ); + }) + .count(); + if !self.options.layout_tests { output_vector.push("--no-layout-tests".into()); } if self.options.impl_debug { output_vector.push("--impl-debug".into()); } @@ -368,16 +382,24 @@ impl Builder { if !self.options.whitelist_recursively { output_vector.push("--no-recursive-whitelist".into()); } if self.options.objc_extern_crate { output_vector.push("--objc-extern-crate".into()); } + if self.options.generate_block { + output_vector.push("--generate-block".into()); + } + + if self.options.block_extern_crate { + output_vector.push("--block-extern-crate".into()); + } + if self.options.builtins { output_vector.push("--builtins".into()); } if let Some(ref prefix) = self.options.ctypes_prefix { output_vector.push("--ctypes-prefix".into()); output_vector.push(prefix.clone()); } @@ -695,16 +717,29 @@ impl Builder { /// Generate `#[macro_use] extern crate objc;` instead of `use objc;` /// in the prologue of the files generated from objective-c files pub fn objc_extern_crate(mut self, doit: bool) -> Self { self.options.objc_extern_crate = doit; self } + /// Generate proper block signatures instead of void pointers. + pub fn generate_block(mut self, doit: bool) -> Self { + self.options.generate_block = doit; + self + } + + /// Generate `#[macro_use] extern crate block;` instead of `use block;` + /// in the prologue of the files generated from apple block files + pub fn block_extern_crate(mut self, doit: bool) -> Self { + self.options.block_extern_crate = doit; + self + } + /// Whether to use the clang-provided name mangling. This is true by default /// and probably needed for C++ features. /// /// However, some old libclang versions seem to return incorrect results in /// some cases for non-mangled functions, see [1], so we allow disabling it. /// /// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/528 pub fn trust_clang_mangling(mut self, doit: bool) -> Self { @@ -728,16 +763,24 @@ impl Builder { /// Hide the given function from the generated bindings. Regular expressions /// are supported. pub fn blacklist_function<T: AsRef<str>>(mut self, arg: T) -> Builder { self.options.blacklisted_functions.insert(arg); self } + /// Hide the given item from the generated bindings, regardless of + /// whether it's a type, function, module, etc. Regular + /// expressions are supported. + pub fn blacklist_item<T: AsRef<str>>(mut self, arg: T) -> Builder { + self.options.blacklisted_items.insert(arg); + self + } + /// Treat the given type as opaque in the generated bindings. Regular /// expressions are supported. pub fn opaque_type<T: AsRef<str>>(mut self, arg: T) -> Builder { self.options.opaque_types.insert(arg); self } /// Whitelist the given type so that it (and all types that it transitively @@ -1287,16 +1330,20 @@ struct BindgenOptions { /// The set of types that have been blacklisted and should not appear /// anywhere in the generated code. blacklisted_types: RegexSet, /// The set of functions that have been blacklisted and should not appear /// in the generated code. blacklisted_functions: RegexSet, + /// The set of items, regardless of item-type, that have been + /// blacklisted and should not appear in the generated code. + blacklisted_items: RegexSet, + /// The set of types that should be treated as opaque structures in the /// generated code. opaque_types: RegexSet, /// The explicit rustfmt path. rustfmt_path: Option<PathBuf>, /// The set of types that we should have bindings for in the generated @@ -1449,16 +1496,24 @@ struct BindgenOptions { /// Whether to whitelist types recursively. Defaults to true. whitelist_recursively: bool, /// Instead of emitting 'use objc;' to files generated from objective c files, /// generate '#[macro_use] extern crate objc;' objc_extern_crate: bool, + /// Instead of emitting 'use block;' to files generated from objective c files, + /// generate '#[macro_use] extern crate block;' + generate_block: bool, + + /// Instead of emitting 'use block;' to files generated from objective c files, + /// generate '#[macro_use] extern crate block;' + block_extern_crate: bool, + /// Whether to use the clang-provided name mangling. This is true and /// probably needed for C++ features. /// /// However, some old libclang versions seem to return incorrect results in /// some cases for non-mangled functions, see [1], so we allow disabling it. /// /// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/528 enable_mangling: bool, @@ -1497,16 +1552,17 @@ impl ::std::panic::UnwindSafe for Bindge impl BindgenOptions { fn build(&mut self) { self.whitelisted_vars.build(); self.whitelisted_types.build(); self.whitelisted_functions.build(); self.blacklisted_types.build(); self.blacklisted_functions.build(); + self.blacklisted_items.build(); self.opaque_types.build(); self.bitfield_enums.build(); self.constified_enums.build(); self.constified_enum_modules.build(); self.rustified_enums.build(); self.no_partialeq_types.build(); self.no_copy_types.build(); self.no_hash_types.build(); @@ -1530,16 +1586,17 @@ impl Default for BindgenOptions { fn default() -> BindgenOptions { let rust_target = RustTarget::default(); BindgenOptions { rust_target, rust_features: rust_target.into(), blacklisted_types: Default::default(), blacklisted_functions: Default::default(), + blacklisted_items: Default::default(), opaque_types: Default::default(), rustfmt_path: Default::default(), whitelisted_types: Default::default(), whitelisted_functions: Default::default(), whitelisted_vars: Default::default(), default_enum_style: Default::default(), bitfield_enums: Default::default(), rustified_enums: Default::default(), @@ -1573,17 +1630,19 @@ impl Default for BindgenOptions { input_header: None, input_unsaved_files: vec![], parse_callbacks: None, codegen_config: CodegenConfig::all(), conservative_inline_namespaces: false, generate_comments: true, generate_inline_functions: false, whitelist_recursively: true, + generate_block: false, objc_extern_crate: false, + block_extern_crate: false, enable_mangling: true, prepend_enum_name: true, time_phases: false, rustfmt_bindings: true, rustfmt_configuration_file: None, no_partialeq_types: Default::default(), no_copy_types: Default::default(), no_hash_types: Default::default(),
--- a/third_party/rust/bindgen/src/options.rs +++ b/third_party/rust/bindgen/src/options.rs @@ -73,16 +73,23 @@ where .number_of_values(1), Arg::with_name("blacklist-function") .long("blacklist-function") .help("Mark <function> as hidden.") .value_name("function") .takes_value(true) .multiple(true) .number_of_values(1), + Arg::with_name("blacklist-item") + .long("blacklist-item") + .help("Mark <item> as hidden.") + .value_name("item") + .takes_value(true) + .multiple(true) + .number_of_values(1), Arg::with_name("no-layout-tests") .long("no-layout-tests") .help("Avoid generating layout tests for any type."), Arg::with_name("no-derive-copy") .long("no-derive-copy") .help("Avoid deriving Copy on any type."), Arg::with_name("no-derive-debug") .long("no-derive-debug") @@ -127,16 +134,22 @@ where .long("no-recursive-whitelist") .help("Disable whitelisting types recursively. This will cause \ bindgen to emit Rust code that won't compile! See the \ `bindgen::Builder::whitelist_recursively` method's \ documentation for details."), Arg::with_name("objc-extern-crate") .long("objc-extern-crate") .help("Use extern crate instead of use for objc."), + Arg::with_name("generate-block") + .long("generate-block") + .help("Generate block signatures instead of void pointers."), + Arg::with_name("block-extern-crate") + .long("block-extern-crate") + .help("Use extern crate instead of use for block."), Arg::with_name("distrust-clang-mangling") .long("distrust-clang-mangling") .help("Do not trust the libclang-provided mangling"), Arg::with_name("builtins") .long("builtins") .help("Output bindings for builtin definitions, e.g. \ __builtin_va_list."), Arg::with_name("ctypes-prefix") @@ -145,16 +158,17 @@ where ::std::os::raw.") .value_name("prefix") .takes_value(true), Arg::with_name("time-phases") .long("time-phases") .help("Time the different bindgen phases and print to stderr"), // All positional arguments after the end of options marker, `--` Arg::with_name("clang-args") + .last(true) .multiple(true), Arg::with_name("emit-clang-ast") .long("emit-clang-ast") .help("Output the Clang AST for debugging purposes."), Arg::with_name("emit-ir") .long("emit-ir") .help("Output our internal IR for debugging purposes."), Arg::with_name("emit-ir-graphviz") @@ -358,16 +372,22 @@ where } if let Some(hidden_functions) = matches.values_of("blacklist-function") { for fun in hidden_functions { builder = builder.blacklist_function(fun); } } + if let Some(hidden_identifiers) = matches.values_of("blacklist-item") { + for id in hidden_identifiers { + builder = builder.blacklist_item(id); + } + } + if matches.is_present("builtins") { builder = builder.emit_builtins(); } if matches.is_present("no-layout-tests") { builder = builder.layout_tests(false); } @@ -487,16 +507,24 @@ where if matches.is_present("no-recursive-whitelist") { builder = builder.whitelist_recursively(false); } if matches.is_present("objc-extern-crate") { builder = builder.objc_extern_crate(true); } + if matches.is_present("generate-block") { + builder = builder.generate_block(true); + } + + if matches.is_present("block-extern-crate") { + builder = builder.block_extern_crate(true); + } + if let Some(opaque_types) = matches.values_of("opaque-type") { for ty in opaque_types { builder = builder.opaque_type(ty); } } if let Some(lines) = matches.values_of("raw-line") { for line in lines {
--- a/third_party/rust/cexpr/.cargo-checksum.json +++ b/third_party/rust/cexpr/.cargo-checksum.json @@ -1,1 +1,1 @@ -{"files":{"Cargo.toml":"22914a43154e0b38bbe265a67024c1f98af9087ca561448ac0f13ed57c9311ae","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"d9771b8c6cf4426d3846de54c1febe20907f1eeadf7adfb5ade89a83bd9ea77f","src/expr.rs":"b888963ab9eb344c93c0654286231b4204809a971682155fade5c69a4493636f","src/lib.rs":"78bbde89e803cf931216e38bdd992e13894cd898868478a258eac5155bdf4de9","src/literal.rs":"fb80a7b43d96bedfd47bc2d39eaf588c5cef6a2374132fbdfe5146dd56a1894c","src/token.rs":"52d42deb2a2575bb8631e2e821593d8288fed16e21bab3ceeacb6a7b06c40087","tests/clang.rs":"a650cde69ab2e801f994f15e4cb32c538e53abefcdd862865ce277ce9a055242","tests/input/chars.h":"69c8141870872b795b5174bad125b748732c2b01d0e98ffcfc37b19f3f791f69","tests/input/fail.h":"b0b6cffd2dd17410b5eb02ee79ab75754820480b960db8a9866cc9983bd36b65","tests/input/floats.h":"28ec664e793c494e1a31f3bc5b790014e9921fc741bf475a86319b9a9eee5915","tests/input/int_signed.h":"934199eded85dd7820ca08c0beb1381ee6d9339970d2720a69c23025571707ce","tests/input/int_unsigned.h":"f47c1ccb6c69856162639277d7552090055420155df55f65581e57217cccce76","tests/input/strings.h":"75c60527068172b97983d2b8361938e856ea394002d5bef05de1adc6a0f5fc01","tests/input/test_llvm_bug_9069.h":"8d9ae1d1eadc8f6d5c14296f984547fe894d0f2ce5cd6d7aa8caad40a56bc5e1"},"package":"42aac45e9567d97474a834efdee3081b3c942b2205be932092f53354ce503d6c"} \ No newline at end of file +{"files":{"Cargo.toml":"32b00f47d6888b44ac5fb30e9693437dd95c98f000b5abb9a85880edc746dcb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"d9771b8c6cf4426d3846de54c1febe20907f1eeadf7adfb5ade89a83bd9ea77f","src/expr.rs":"b2c80d3125ff2fd66d0f889dfa2d6476b8e68cc9513e508ba862470182a3f7cc","src/lib.rs":"689f19cb8a8e88c2489e0d8f59ed75b03cb19f0e789e5a0d3447f695b2ef2259","src/literal.rs":"22aec22c7bdd374afd06c16c19f09b2763c5ffc37ecb0bbd60a5d9102f57ebc3","src/token.rs":"52d42deb2a2575bb8631e2e821593d8288fed16e21bab3ceeacb6a7b06c40087","tests/clang.rs":"0f820e2003e34c2ab69cd759314cebf755fd1b1929974976d3776968f687be7e","tests/input/chars.h":"69c8141870872b795b5174bad125b748732c2b01d0e98ffcfc37b19f3f791f69","tests/input/fail.h":"b0b6cffd2dd17410b5eb02ee79ab75754820480b960db8a9866cc9983bd36b65","tests/input/floats.h":"28ec664e793c494e1a31f3bc5b790014e9921fc741bf475a86319b9a9eee5915","tests/input/int_signed.h":"934199eded85dd7820ca08c0beb1381ee6d9339970d2720a69c23025571707ce","tests/input/int_unsigned.h":"d6b82716669aecbec4cfff2d1bf8c5af926f80ba01fe89de3b439264f3080ccb","tests/input/strings.h":"75c60527068172b97983d2b8361938e856ea394002d5bef05de1adc6a0f5fc01","tests/input/test_llvm_bug_9069.h":"8d9ae1d1eadc8f6d5c14296f984547fe894d0f2ce5cd6d7aa8caad40a56bc5e1"},"package":"8fc0086be9ca82f7fc89fc873435531cb898b86e850005850de1f820e2db6e9b"} \ No newline at end of file
--- a/third_party/rust/cexpr/Cargo.toml +++ b/third_party/rust/cexpr/Cargo.toml @@ -7,20 +7,20 @@ # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] name = "cexpr" -version = "0.2.3" +version = "0.3.3" authors = ["Jethro Beekman <jethro@jbeekman.nl>"] description = "A C expression parser and evaluator" documentation = "https://docs.rs/cexpr/" keywords = ["C", "expression", "parser"] license = "Apache-2.0/MIT" repository = "https://github.com/jethrogb/rust-cexpr" [dependencies.nom] -version = "^3" +version = "^4" features = ["verbose-errors"] [dev-dependencies.clang-sys] -version = "0.11.0" +version = ">= 0.13.0, < 0.27.0"
--- a/third_party/rust/cexpr/src/expr.rs +++ b/third_party/rust/cexpr/src/expr.rs @@ -30,17 +30,17 @@ use nom_crate::*; /// Expression parser/evaluator that supports identifiers. #[derive(Debug)] pub struct IdentifierParser<'ident> { identifiers: &'ident HashMap<Vec<u8>,EvalResult>, } #[derive(Copy,Clone)] struct PRef<'a>(&'a IdentifierParser<'a>); -pub type CResult<'a,R:'a> = IResult<&'a [Token],R,::Error>; +pub type CResult<'a,R> = IResult<&'a [Token],R,::Error>; /// The result of parsing a literal or evaluating an expression. #[derive(Debug,Clone,PartialEq)] pub enum EvalResult { Int(Wrapping<i64>), Float(f64), Char(CChar), Str(Vec<u8>), @@ -82,76 +82,98 @@ impl From<Vec<u8>> for EvalResult { // =========================================== // ============= Clang tokens ================ // =========================================== macro_rules! exact_token ( ($i:expr, $k:ident, $c:expr) => ({ if $i.is_empty() { - let res: CResult<&[u8]> = IResult::Incomplete(Needed::Size($c.len())); + let res: CResult<&[u8]> = Err(::nom_crate::Err::Incomplete(Needed::Size($c.len()))); res } else { if $i[0].kind==TokenKind::$k && &$i[0].raw[..]==$c { - IResult::Done(&$i[1..], &$i[0].raw[..]) + Ok((&$i[1..], &$i[0].raw[..])) } else { - IResult::Error(Err::Position(ErrorKind::Custom(::Error::ExactToken(TokenKind::$k,$c)), $i)) + Err(::nom_crate::Err::Error(error_position!($i, ErrorKind::Custom(::Error::ExactToken(TokenKind::$k,$c))))) } } }); ); macro_rules! typed_token ( ($i:expr, $k:ident) => ({ if $i.is_empty() { - let res: CResult<&[u8]> = IResult::Incomplete(Needed::Size(1)); + let res: CResult<&[u8]> = Err(::nom_crate::Err::Incomplete(Needed::Size(1))); res } else { if $i[0].kind==TokenKind::$k { - IResult::Done(&$i[1..], &$i[0].raw[..]) + Ok((&$i[1..], &$i[0].raw[..])) } else { - IResult::Error(Err::Position(ErrorKind::Custom(::Error::TypedToken(TokenKind::$k)), $i)) + Err(Err::Error(error_position!($i, ErrorKind::Custom(::Error::TypedToken(TokenKind::$k))))) } } }); ); #[allow(unused_macros)] macro_rules! any_token ( ($i:expr,) => ({ if $i.is_empty() { - let res: CResult<&Token> = IResult::Incomplete(Needed::Size(1)); + let res: CResult<&Token> = Err(::nom_crate::Err::Incomplete(Needed::Size(1))); res } else { - IResult::Done(&$i[1..], &$i[0]) + Ok((&$i[1..], &$i[0])) } }); ); macro_rules! p ( ($i:expr, $c:expr) => (exact_token!($i,Punctuation,$c.as_bytes())) ); macro_rules! one_of_punctuation ( ($i:expr, $c:expr) => ({ if $i.is_empty() { let min = $c.iter().map(|opt|opt.len()).min().expect("at least one option"); - let res: CResult<&[u8]> = IResult::Incomplete(Needed::Size(min)); + let res: CResult<&[u8]> = Err(::nom_crate::Err::Incomplete(Needed::Size(min))); res } else { if $i[0].kind==TokenKind::Punctuation && $c.iter().any(|opt|opt.as_bytes()==&$i[0].raw[..]) { - IResult::Done(&$i[1..], &$i[0].raw[..]) + Ok((&$i[1..], &$i[0].raw[..])) } else { - const VAILD_VALUES: &'static [&'static str] = &$c; - IResult::Error(Err::Position(ErrorKind::Custom(::Error::ExactTokens(TokenKind::Punctuation,VAILD_VALUES)), $i)) + const VALID_VALUES: &'static [&'static str] = &$c; + Err(Err::Error(error_position!($i, ErrorKind::Custom(::Error::ExactTokens(TokenKind::Punctuation,VALID_VALUES))))) } } }); ); +/// equivalent to nom's complete! macro, but adds the custom error type +#[macro_export] +macro_rules! comp ( + ($i:expr, $submac:ident!( $($args:tt)* )) => ( + { + use ::nom_crate::lib::std::result::Result::*; + use ::nom_crate::{Err,ErrorKind}; + + let i_ = $i.clone(); + match $submac!(i_, $($args)*) { + Err(Err::Incomplete(_)) => { + Err(Err::Error(error_position!($i, ErrorKind::Complete::<::Error>))) + }, + rest => rest + } + } + ); + ($i:expr, $f:expr) => ( + comp!($i, call!($f)); + ); +); + // ================================================== // ============= Numeric expressions ================ // ================================================== impl<'a> AddAssign<&'a EvalResult> for EvalResult { fn add_assign(&mut self, rhs: &'a EvalResult) { use self::EvalResult::*; *self=match (&*self,rhs) { @@ -285,17 +307,17 @@ impl<'a> PRef<'a> { map_opt!(pair!(one_of_punctuation!(["+", "-", "~"]),call_m!(self.unary)),unary_op) ) ); method!(mul_div_rem<PRef<'a>,&[Token],EvalResult,::Error>, mut self, do_parse!( acc: call_m!(self.unary) >> res: fold_many0!( - pair!(one_of_punctuation!(["*", "/", "%"]), call_m!(self.unary)), + pair!(comp!(one_of_punctuation!(["*", "/", "%"])), call_m!(self.unary)), acc, |mut acc, (op, val): (&[u8], EvalResult)| { match op[0] as char { '*' => acc *= &val, '/' => acc /= &val, '%' => acc %= &val, _ => unreachable!() }; @@ -304,17 +326,17 @@ impl<'a> PRef<'a> { ) >> (res) ) ); method!(add_sub<PRef<'a>,&[Token],EvalResult,::Error>, mut self, do_parse!( acc: call_m!(self.mul_div_rem) >> res: fold_many0!( - pair!(one_of_punctuation!(["+", "-"]), call_m!(self.mul_div_rem)), + pair!(comp!(one_of_punctuation!(["+", "-"])), call_m!(self.mul_div_rem)), acc, |mut acc, (op, val): (&[u8], EvalResult)| { match op[0] as char { '+' => acc += &val, '-' => acc -= &val, _ => unreachable!() }; acc @@ -322,17 +344,17 @@ impl<'a> PRef<'a> { ) >> (res) ) ); method!(shl_shr<PRef<'a>,&[Token],EvalResult,::Error>, mut self, numeric!(do_parse!( acc: call_m!(self.add_sub) >> res: fold_many0!( - pair!(one_of_punctuation!(["<<", ">>"]), call_m!(self.add_sub)), + pair!(comp!(one_of_punctuation!(["<<", ">>"])), call_m!(self.add_sub)), acc, |mut acc, (op, val): (&[u8], EvalResult)| { match op { b"<<" => acc <<= &val, b">>" => acc >>= &val, _ => unreachable!() }; acc @@ -340,45 +362,45 @@ impl<'a> PRef<'a> { ) >> (res) )) ); method!(and<PRef<'a>,&[Token],EvalResult,::Error>, mut self, numeric!(do_parse!( acc: call_m!(self.shl_shr) >> res: fold_many0!( - preceded!(p!("&"), call_m!(self.shl_shr)), + preceded!(comp!(p!("&")), call_m!(self.shl_shr)), acc, |mut acc, val: EvalResult| { acc &= &val; acc } ) >> (res) )) ); method!(xor<PRef<'a>,&[Token],EvalResult,::Error>, mut self, numeric!(do_parse!( acc: call_m!(self.and) >> res: fold_many0!( - preceded!(p!("^"), call_m!(self.and)), + preceded!(comp!(p!("^")), call_m!(self.and)), acc, |mut acc, val: EvalResult| { acc ^= &val; acc } ) >> (res) )) ); method!(or<PRef<'a>,&[Token],EvalResult,::Error>, mut self, numeric!(do_parse!( acc: call_m!(self.xor) >> res: fold_many0!( - preceded!(p!("|"), call_m!(self.xor)), + preceded!(comp!(p!("|")), call_m!(self.xor)), acc, |mut acc, val: EvalResult| { acc |= &val; acc } ) >> (res) )) ); @@ -392,54 +414,56 @@ impl<'a> PRef<'a> { // ======================================================= // ============= Literals and identifiers ================ // ======================================================= impl<'a> PRef<'a> { fn identifier(self, input: &[Token]) -> (Self,CResult<EvalResult>) { (self,match input.split_first() { None => - IResult::Incomplete(Needed::Size(1)), + Err(Err::Incomplete(Needed::Size(1))), Some((&Token{kind:TokenKind::Identifier,ref raw},rest)) => { if let Some(r) = self.identifiers.get(&raw[..]) { - IResult::Done(rest, r.clone()) + Ok((rest, r.clone())) } else { - IResult::Error(Err::Position(ErrorKind::Custom(::Error::UnknownIdentifier), input)) + Err(Err::Error(error_position!(input, ErrorKind::Custom(::Error::UnknownIdentifier)))) } }, Some(_) => - IResult::Error(Err::Position(ErrorKind::Custom(::Error::TypedToken(TokenKind::Identifier)), input)), + Err(Err::Error(error_position!(input, ErrorKind::Custom(::Error::TypedToken(TokenKind::Identifier))))), }) } fn literal(self, input: &[Token]) -> (Self,CResult<EvalResult>) { (self,match input.split_first() { None => - IResult::Incomplete(Needed::Size(1)), + Err(Err::Incomplete(Needed::Size(1))), Some((&Token{kind:TokenKind::Literal,ref raw},rest)) => match literal::parse(raw) { - IResult::Done(_,result) => IResult::Done(rest, result), - _ => IResult::Error(Err::Position(ErrorKind::Custom(::Error::InvalidLiteral), input)) + Ok((_,result)) => Ok((rest, result)), + _ => { + Err(Err::Error(error_position!(input, ErrorKind::Custom(::Error::InvalidLiteral)))) + }, }, Some(_) => - IResult::Error(Err::Position(ErrorKind::Custom(::Error::TypedToken(TokenKind::Literal)), input)), + Err(Err::Error(error_position!(input, ErrorKind::Custom(::Error::TypedToken(TokenKind::Literal))))), }) } method!(string<PRef<'a>,&[Token],Vec<u8>,::Error>, mut self, alt!( map_opt!(call_m!(self.literal),EvalResult::as_str) | map_opt!(call_m!(self.identifier),EvalResult::as_str) ) ); // "string1" "string2" etc... method!(concat_str<PRef<'a>,&[Token],EvalResult,::Error>, mut self, map!( - pair!(call_m!(self.string),many0!(call_m!(self.string))), + pair!(call_m!(self.string),many0!(comp!(call_m!(self.string)))), |(first,v)| Vec::into_iter(v).fold(first,|mut s,elem|{Vec::extend_from_slice(&mut s,Vec::<u8>::as_slice(&elem));s}).into() ) ); method!(expr<PRef<'a>,&[Token],EvalResult,::Error>, mut self, alt!( call_m!(self.numeric_expr) | delimited!(p!("("),call_m!(self.expr),p!(")")) | @@ -490,17 +514,17 @@ impl<'ident> IdentifierParser<'ident> { /// on most function-like macros, or if the token stream contains comments, /// keywords or unknown identifiers. /// /// N.B. This is intended to fail on function-like macros, but if it the /// macro takes a single argument, the argument name is defined as an /// identifier, and the macro otherwise parses as an expression, it will /// return a result even on function-like macros. /// - /// ```ignore + /// ```c /// // will evaluate into IDENTIFIER /// #define DELETE(IDENTIFIER) /// // will evaluate into IDENTIFIER-3 /// #define NEGATIVE_THREE(IDENTIFIER) -3 /// ``` pub fn macro_definition<'a>(&self,input: &'a [Token]) -> CResult<'a,(&'a [u8],EvalResult)> { ::assert_full_parse(self.as_ref().macro_definition(input).1) }
--- a/third_party/rust/cexpr/src/lib.rs +++ b/third_party/rust/cexpr/src/lib.rs @@ -34,24 +34,31 @@ pub enum Error { /// /// When encountered, this generally means a bug exists in the data that /// was passed in or the parsing logic. InvalidLiteral, /// A full parse was requested, but data was left over after parsing finished. Partial, } +impl From<u32> for Error { + fn from(_: u32) -> Self { + Error::InvalidLiteral + } +} + macro_rules! identity ( ($i:expr,$e:expr) => ($e); ); /// If the input result indicates a succesful parse, but there is data left, /// return an `Error::Partial` instead. -pub fn assert_full_parse<I,O,E>(result: IResult<&[I],O,E>) -> IResult<&[I],O,::Error> { +pub fn assert_full_parse<I,O,E>(result: IResult<&[I],O,E>) -> IResult<&[I],O,::Error> + where Error: From<E> { match fix_error!((),::Error,identity!(result)) { - IResult::Done(rem,output) => if rem.len()==0 { - IResult::Done(rem, output) + Ok((rem,output)) => if rem.len()==0 { + Ok((rem, output)) } else { - IResult::Error(Err::Position(ErrorKind::Custom(::Error::Partial), rem)) + Err(Err::Error(error_position!(rem, ErrorKind::Custom(::Error::Partial)))) }, r => r, } }
--- a/third_party/rust/cexpr/src/literal.rs +++ b/third_party/rust/cexpr/src/literal.rs @@ -74,35 +74,57 @@ impl Into<Vec<u8>> for CChar { let mut v=Vec::with_capacity(1); v.push(i as u8); v } } } } +/// ensures the child parser consumes the whole input +#[macro_export] +macro_rules! full ( + ($i: expr, $submac:ident!( $($args:tt)* )) => ( + { + use ::nom_crate::lib::std::result::Result::*; + let res = $submac!($i, $($args)*); + match res { + Ok((i, o)) => if i.len() == 0 { + Ok((i, o)) + } else { + Err(::nom_crate::Err::Error(error_position!(i, ::nom_crate::ErrorKind::Custom(42)))) + }, + r => r, + } + } + ); + ($i:expr, $f:ident) => ( + full!($i, call!($f)); + ); +); + // ==================================================== // ======== macros that shouldn't be necessary ======== // ==================================================== macro_rules! force_type ( - ($input:expr,IResult<$i:ty,$o:ty,$e:ty>) => (IResult::Error::<$i,$o,$e>(Err::Position(ErrorKind::Fix,$input))) + ($input:expr,IResult<$i:ty,$o:ty,$e:ty>) => (Err::<($i,$o),Err<$i,$e>>(::nom_crate::Err::Error(error_position!($input, ErrorKind::Fix)))) ); // ================================= // ======== matching digits ======== // ================================= macro_rules! byte ( ($i:expr, $($p: pat)|* ) => ({ match $i.split_first() { - $(Some((&c @ $p,rest)))|* => IResult::Done::<&[_],u8,u32>(rest,c), - Some(_) => IResult::Error(Err::Position(ErrorKind::OneOf,$i)), - None => IResult::Incomplete(Needed::Size(1)), + $(Some((&c @ $p,rest)))|* => Ok::<(&[_],u8),::nom_crate::Err<&[_],u32>>((rest,c)), + Some(_) => Err(::nom_crate::Err::Error(error_position!($i, ErrorKind::OneOf))), + None => Err(::nom_crate::Err::Incomplete(Needed::Size(1))), } }) ); named!(binary<u8>,byte!(b'0' ... b'1')); named!(octal<u8>,byte!(b'0' ... b'7')); named!(decimal<u8>,byte!(b'0' ... b'9')); named!(hexadecimal<u8>,byte!(b'0' ... b'9' | b'a' ... b'f' | b'A' ... b'F')); @@ -137,17 +159,17 @@ fn c_raw_escape(n: Vec<u8>, radix: u32) fn c_unicode_escape(n: Vec<u8>) -> Option<CChar> { str::from_utf8(&n).ok() .and_then(|i|u32::from_str_radix(i,16).ok()) .and_then(char::from_u32) .map(CChar::Char) } named!(escaped_char<CChar>, - preceded!(char!('\\'),alt!( + preceded!(complete!(char!('\\')),alt_complete!( map!(one_of!(r#"'"?\"#),CChar::Char) | map!(one_of!("abfnrtv"),escape2char) | map_opt!(many_m_n!(1,3,octal),|v|c_raw_escape(v,8)) | map_opt!(preceded!(char!('x'),many1!(hexadecimal)),|v|c_raw_escape(v,16)) | map_opt!(preceded!(char!('u'),many_m_n!(4,4,hexadecimal)),c_unicode_escape) | map_opt!(preceded!(char!('U'),many_m_n!(8,8,hexadecimal)),c_unicode_escape) )) ); @@ -168,70 +190,80 @@ named!(c_char<CChar>, char!('\'') ) ); named!(c_string<Vec<u8> >, delimited!( alt!( preceded!(c_width_prefix,char!('"')) | char!('"') ), fold_many0!( - alt!(map!(escaped_char, |c:CChar| c.into()) | map!(is_not!("\""), |c: &[u8]| c.into())), + alt!(map!(escaped_char, |c:CChar| c.into()) | map!(complete!(is_not!("\"")), |c: &[u8]| c.into())), Vec::new(), |mut v: Vec<u8>, res:Vec<u8>| { v.extend_from_slice(&res); v } ), char!('"') ) ); // ================================ // ======== parse integers ======== // ================================ fn c_int_radix(n: Vec<u8>, radix: u32) -> Option<u64> { str::from_utf8(&n).ok() .and_then(|i|u64::from_str_radix(i,radix).ok()) } +fn take_ul(input: &[u8]) -> IResult<&[u8], &[u8]> { + use ::nom_crate::InputTakeAtPosition; + + let r = input.split_at_position(|c| c != b'u' && c != b'U' && c != b'l' && c != b'L'); + match r { + Err(Err::Incomplete(_)) => Ok((&input[input.len()..], input)), + res => res, + } +} + named!(c_int<i64>, map!(terminated!(alt_complete!( - map_opt!(preceded!(tag!("0x"),many1!(hexadecimal)),|v|c_int_radix(v,16)) | - map_opt!(preceded!(tag!("0b"),many1!(binary)),|v|c_int_radix(v,2)) | - map_opt!(preceded!(char!('0'),many1!(octal)),|v|c_int_radix(v,8)) | - map_opt!(many1!(decimal),|v|c_int_radix(v,10)) | + map_opt!(preceded!(tag!("0x"),many1!(complete!(hexadecimal))),|v|c_int_radix(v,16)) | + map_opt!(preceded!(tag!("0b"),many1!(complete!(binary))),|v|c_int_radix(v,2)) | + map_opt!(preceded!(char!('0'),many1!(complete!(octal))),|v|c_int_radix(v,8)) | + map_opt!(many1!(complete!(decimal)),|v|c_int_radix(v,10)) | force_type!(IResult<_,_,u32>) - ),is_a!("ulUL")),|i|i as i64) + ),opt!(take_ul)),|i|i as i64) ); // ============================== // ======== parse floats ======== // ============================== named!(float_width<u8>,complete!(byte!(b'f' | b'l' | b'F' | b'L'))); -named!(float_exp<(Option<u8>,Vec<u8>)>,preceded!(byte!(b'e'|b'E'),pair!(opt!(byte!(b'-'|b'+')),many1!(decimal)))); +named!(float_exp<(Option<u8>,Vec<u8>)>,preceded!(byte!(b'e'|b'E'),pair!(opt!(byte!(b'-'|b'+')),many1!(complete!(decimal))))); named!(c_float<f64>, map_opt!(alt!( - terminated!(recognize!(tuple!(many1!(decimal),byte!(b'.'),many0!(decimal))),opt!(float_width)) | - terminated!(recognize!(tuple!(many0!(decimal),byte!(b'.'),many1!(decimal))),opt!(float_width)) | - terminated!(recognize!(tuple!(many0!(decimal),opt!(byte!(b'.')),many1!(decimal),float_exp)),opt!(float_width)) | - terminated!(recognize!(tuple!(many1!(decimal),opt!(byte!(b'.')),many0!(decimal),float_exp)),opt!(float_width)) | - terminated!(recognize!(many1!(decimal)),float_width) + terminated!(recognize!(tuple!(many1!(complete!(decimal)),byte!(b'.'),many0!(complete!(decimal)))),opt!(float_width)) | + terminated!(recognize!(tuple!(many0!(complete!(decimal)),byte!(b'.'),many1!(complete!(decimal)))),opt!(float_width)) | + terminated!(recognize!(tuple!(many0!(complete!(decimal)),opt!(byte!(b'.')),many1!(complete!(decimal)),float_exp)),opt!(float_width)) | + terminated!(recognize!(tuple!(many1!(complete!(decimal)),opt!(byte!(b'.')),many0!(complete!(decimal)),float_exp)),opt!(float_width)) | + terminated!(recognize!(many1!(complete!(decimal))),float_width) ),|v|str::from_utf8(v).ok().and_then(|i|f64::from_str(i).ok())) ); // ================================ // ======== main interface ======== // ================================ named!(one_literal<&[u8],EvalResult,::Error>, fix_error!(::Error,alt_complete!( - map!(c_char,EvalResult::Char) | - map!(c_int,|i|EvalResult::Int(::std::num::Wrapping(i))) | - map!(c_float,EvalResult::Float) | - map!(c_string,EvalResult::Str) + map!(full!(c_char),EvalResult::Char) | + map!(full!(c_int),|i|EvalResult::Int(::std::num::Wrapping(i))) | + map!(full!(c_float),EvalResult::Float) | + map!(full!(c_string),EvalResult::Str) )) ); /// Parse a C literal. /// /// The input must contain exactly the representation of a single literal /// token, and in particular no whitespace or sign prefixes. pub fn parse(input: &[u8]) -> IResult<&[u8],EvalResult,::Error> {
--- a/third_party/rust/cexpr/tests/clang.rs +++ b/third_party/rust/cexpr/tests/clang.rs @@ -63,17 +63,17 @@ fn test_definition(ident: Vec<u8>, token Some(Invalid) }.expect(&format!("Invalid definition in testcase: {}",display_name)) }; let result = if functional { let mut fnidents; let expr_tokens; match fn_macro_declaration(&tokens) { - cexpr::nom::IResult::Done(rest,(_,args)) => { + Ok((rest,(_,args))) => { fnidents=idents.clone(); expr_tokens=rest; for arg in args { let val = match test { Int(_) => bytes_to_int(&arg), Str(_) => Some(Str(arg.to_owned())), _ => unimplemented!() }.expect(&format!("Invalid argument in functional macro testcase: {}",display_name)); @@ -82,21 +82,21 @@ fn test_definition(ident: Vec<u8>, token }, e => { println!("Failed test for {}, unable to parse functional macro declaration: {:?}",display_name,e); return false; } } assert_full_parse(IdentifierParser::new(&fnidents).expr(&expr_tokens)) } else { - IdentifierParser::new(idents).macro_definition(&tokens).map(|(_,val)|val) + IdentifierParser::new(idents).macro_definition(&tokens).map(|(i, (_,val))|(i, val)) }; match result { - cexpr::nom::IResult::Done(_,val) => { + Ok((_,val)) => { if val==test { if let Some(_)=idents.insert(ident,val) { panic!("Duplicate definition for testcase: {}",display_name); } true } else { println!("Failed test for {}, expected {:?}, got {:?}",display_name,test,val); false @@ -144,21 +144,21 @@ extern "C" fn visit_children_thunk<F>(cu unsafe fn visit_children<F>(cursor: CXCursor, mut f: F) where F: FnMut(CXCursor,CXCursor) -> CXChildVisitResult { clang_visitChildren(cursor, visit_children_thunk::<F> as _, &mut f as *mut F as CXClientData); } unsafe fn location_in_scope(r: CXSourceRange) -> bool { let start=clang_getRangeStart(r); - let mut file=CXFile(ptr::null_mut()); + let mut file=ptr::null_mut(); clang_getSpellingLocation(start,&mut file,ptr::null_mut(),ptr::null_mut(),ptr::null_mut()); clang_Location_isFromMainFile(start)!=0 && clang_Location_isInSystemHeader(start)==0 - && file.0!=ptr::null_mut() + && file!=ptr::null_mut() } /// tokenize_range_adjust can be used to work around LLVM bug 9069 /// https://bugs.llvm.org//show_bug.cgi?id=9069 fn file_visit_macros<F: FnMut(Vec<u8>, Vec<Token>)>(file: &str, tokenize_range_adjust: bool, mut visitor: F) { unsafe { let tu={ let index=clang_createIndex(true as _, false as _);
--- a/third_party/rust/cexpr/tests/input/int_unsigned.h +++ b/third_party/rust/cexpr/tests/input/int_unsigned.h @@ -1,12 +1,13 @@ #define Int_456 456 #define Int_0 0 #define Int_1 0b1 #define Int_2 0x2 +#define Int_3 3L #define Int_63 077 #define Int_123 123 #define Int_124 124u #define Int_125 125uL #define Int_126 126LuL #define Int_16 (((1)<<4ULL))/*comment*/ #define Int_13 1|8^6&2<<1
--- a/third_party/rust/clang-sys/.cargo-checksum.json +++ b/third_party/rust/clang-sys/.cargo-checksum.json @@ -1,1 +1,1 @@ -{"files":{".travis.yml":"d5c89494c836e00ec8c3c02c9e228bf5dc34aabff203c37662a248e2da4bda05","CHANGELOG.md":"62fd8ba43afbc4da3dba40d448a5af482794aaaa99071d40dc7abf8fc1a2195b","Cargo.toml":"1ada60cd29713d4386050d2b61a9eed430827885520816b0412ed0380fa3fa8f","LICENSE.txt":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","README.md":"dff1b472fe1edbc6059ff5a96e595fa8dab9e9e133d10fd761cf5dfdcc80f4c6","appveyor.yml":"c9ab8ab1ab028b27d2be176e994a0d6a255cf8bcc36e15868472b6b8abf33fac","build.rs":"50be9c247e528ab0a354a7652fa9516906f79bbb4d128d54db7f5a9ee1ed2a86","ci/before_install.sh":"711c9d0539fa0372980c3a288d9482a0e46d3ba0fb8f7c7c110d6488a8ec4de5","ci/install.bat":"fb636c3511ba038ccf805755ef6542237cc595e905edcd61d56abd7163321f76","ci/script.sh":"1bb1cd29bd9635cc126cdcbd6c02f3500620a231a86726bf2165a4b74baaf433","ci/test_script.bat":"73462f51aaa9a1c14ce9f55c41dc3672df64faa9789725384ae4f28d8ba3c90b","clippy.toml":"acef14b9acffa18d1069ae08a4e8fe824a614f91b0bc71a6b1c68e4d885397e6","src/lib.rs":"d0a11284694f4f77448e72480addca613572d19c07fa92157e0fa717ed504abd","src/link.rs":"a0208e6b8e4840f1162b3b799b5e12dd559cc6f31a330b0eb1ba4ebe2385296d","src/support.rs":"70e77ea4337f740b13c394034c5705e962af6ee7ac4843fc7c9c7fe22ec2d074","tests/header.h":"b1cf564b21d76db78529d1934e1481a5f0452fdedc6e32954608293c310498b6","tests/lib.rs":"e5e8a60bcaec3b5d043fde4a993d397adb56454d0b2a6adaa15df0535246f909"},"package":"d7f7c04e52c35222fffcc3a115b5daf5f7e2bfb71c13c4e2321afe1fc71859c2"} \ No newline at end of file +{"files":{".travis.yml":"01d03daeb78fddd8fcd3dcf1ae9c30ffda023db214a1a89a1d5be768ea03277c","CHANGELOG.md":"ed5eb852120b184fda36d06a920080e5bd377b018548f2d37eb575dee32a20b1","Cargo.toml":"ae115d5ba8ac3e9074f6f8e64a89ec88579b388db75bbbd655d010610661b19a","LICENSE.txt":"3ddf9be5c28fe27dad143a5dc76eea25222ad1dd68934a047064e56ed2fa40c5","README.md":"21ff1488c29d612cee0d10fc48dab7efbd0a8a24158ee709b88e312e939008a0","appveyor.yml":"1a5d6953fb6e373dc760d50659628f04d48f68bd3f3f8e434800b31e74ef1497","build.rs":"06ef3732108d09118f50294fbca2b90857085fc3364777fca77eb022866399ac","build/common.rs":"4532706a124c0ff40332d5a72cd1e465bee72cd118f8071fbb6e70dde00c68f8","build/dynamic.rs":"544e5cb9fe364165a641192fc3f1ab916cb1d632a92536b8ce77a941fbb3082b","build/static.rs":"ff8de756b33efff75770a5552ff4573fe1bbb68ec8bd40d57854a05adb7e9d5c","ci/before_install.sh":"efb85403bedbfc6db19a8c41c61be98eac9f6e09ac6a33c0bdaf2828b5ea73ba","ci/install.bat":"bb02414d81dd23c8597f82f390769b084c2d32129ed197475a769a25ee97249a","ci/script.sh":"52db533df970f1b44c0b2663f3bfac4476f2150e94fc392b2bab4145325f418b","ci/test_script.bat":"901609adc59dab2730e16dd374d0351d6406e7559fe4d86ddd9a857ad9c84d2a","clippy.toml":"fcf54943ba571514b244cc098ce08671b4117167733e8107e799d533a12a2195","src/lib.rs":"d9952a832909e5490fca27b08d349a0d36c4004cd04fdb024ddf246cb81503d1","src/link.rs":"625ac2a7c5d0b85122e4a68542f235f09a069e7743f67f94c433f0f4313c09db","src/support.rs":"3eae21722287a462921825929c48802642d28ca194d1bc43aee47739350ecd17","tests/header.h":"1b15a686d1c06561960045a26c25a34d840f26c8246f2f5e630f993b69c7492c","tests/lib.rs":"d5d39e3ffbdc7303c2f1b9ae09f60ebf546b7c2c3599ec5d0c99d23332456908"},"package":"481e42017c1416b1c0856ece45658ecbb7c93d8a93455f7e5fa77f3b35455557"} \ No newline at end of file
--- a/third_party/rust/clang-sys/.travis.yml +++ b/third_party/rust/clang-sys/.travis.yml @@ -1,26 +1,26 @@ -language: rust - -addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-5 - -os: - - linux - -rust: stable - -env: - - LLVM_VERSION=3.5 CLANG_VERSION=clang_3_5 - - LLVM_VERSION=5.0 CLANG_VERSION=clang_5_0 - -cache: - directories: - - $HOME/.cargo - - $HOME/.llvm - -before_install: . ./ci/before_install.sh - -script: . ./ci/script.sh +language: rust + +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - gcc-5 + +os: + - linux + +rust: stable + +env: + - LLVM_VERSION=3.5 CLANG_VERSION=clang_3_5 + - LLVM_VERSION=5.0 CLANG_VERSION=clang_5_0 + +cache: + directories: + - $HOME/.cargo + - $HOME/.llvm + +before_install: . ./ci/before_install.sh + +script: . ./ci/script.sh
--- a/third_party/rust/clang-sys/CHANGELOG.md +++ b/third_party/rust/clang-sys/CHANGELOG.md @@ -1,272 +1,295 @@ -## [0.23.0] - 2018-06-16 - -### Changed -- Changed `Clang::find` to skip dynamic libraries for an incorrect architecture on Windows - -## [0.22.0] - 2018-03-11 - -### Added -- Added support for `clang` 6.0.x -- Bumped `libc` version to `0.2.39` -- Bumped `libloading` version to `0.5.0` - -## [0.21.2] - 2018-02-17 - -### Changed -- Added original errors to error messages -- Added support for searching for libraries in `LD_LIBRARY_PATH` directories - -## [0.21.1] - 2017-11-24 - -### Changed -- Improved finding of versioned libraries (e.g., `libclang-3.9.so`) - -### Fixed -* Fixed compilation failures on the beta and nightly channels caused by a [compiler bug](https://github.com/KyleMayes/clang-sys/pull/69) - -## [0.21.0] - 2017-10-11 - -### Changed -* Replaced `bitflags` usage with constants which avoids crashes on 32-bit Linux platforms - -## [0.20.1] - 2017-09-16 - -### Fixed -- Fixed static linking - -## [0.20.0] - 2017-09-14 - -### Added -- Added support for `clang` 5.0.x -- Added `clang` as a link target of this package -- Added dummy implementations of `is_loaded` for builds with the `static` Cargo feature enabled - -## [0.19.0] - 2017-07-02 - -### Changed -- Bumped `bitflags` version to `0.9.1` -- Added `args` parameter to `Clang::new` function which passes arguments to the Clang executable - -## [0.18.0] - 2017-05-16 - -### Changed -- Improved finding of versioned libraries (e.g., `libclang.so.3.9`) - -## [0.17.0] - 2017-05-08 - -### Changed -- Changed storage type of include search paths from `Vec<PathBuf>` to `Option<Vec<PathBuf>>` - -## [0.16.0] - 2017-05-02 - -### Changed -- Bumped `libloading` version to `0.4.0` - -## [0.15.2] - 2017-04-28 - -### Fixed -- Fixed finding of `libclang.so.1` on Linux - -## [0.15.1] - 2017-03-29 - -### Fixed -- Fixed static linking when libraries are in [different directories](https://github.com/KyleMayes/clang-sys/issues/50) - -## [0.15.0] - 2017-03-13 - -### Added -- Added support for `clang` 4.0.x - -### Changed -- Changed functions in the `Functions` struct to be `unsafe` (`runtime` feature only) -- Changed `Clang::find` method to ignore directories and non-executable files -- Changed `Clang::find` to skip dynamic libraries for an incorrect architecture on FreeBSD and Linux -- Bumped `bitflags` version to `0.7.0` - -## [0.14.0] - 2017-01-30 - -### Changed -- Changed all enum types from tuple structs to raw integers to avoid - [segmentation faults](https://github.com/rust-lang/rust/issues/39394) on some platforms - -## [0.13.0] - 2017-01-29 - -### Changed -- Changed all opaque pointers types from tuple structs to raw pointers to avoid - [segmentation faults](https://github.com/rust-lang/rust/issues/39394) on some platforms - -## [0.12.0] - 2016-12-13 - -### Changed -- Altered the runtime linking API to allow for testing the presence of functions - -## [0.11.1] - 2016-12-07 - -### Added -- Added support for linking to Clang on Windows from unofficial LLVM sources such as MSYS and MinGW - -## [0.11.0] - 2016-10-07 - -### Changed -- Changed all enums from Rust enums to typed constants to avoid - [undefined behavior](https://github.com/KyleMayes/clang-sys/issues/42) - -## [0.10.1] - 2016-08-21 - -### Changed -- Changed static linking on FreeBSD and OS X to link against `libc++` instead of `libstd++` - -## [0.10.0] - 2016-08-01 - -### Changed -- Added `runtime` Cargo feature that links to `libclang` shared library at runtime -- Added `from_raw` method to `CXTypeLayoutError` enum -- Added implementations of `Deref` for opaque FFI structs -- Changed `Default` implementations for structs to zero out the struct - -## [0.9.0] - 2016-07-21 - -### Added -- Added documentation bindings - -## [0.8.1] - 2016-07-20 - -### Changed -- Added `CLANG_PATH` environment variable for providing a path to `clang` executable -- Added usage of `llvm-config` to search for `clang` -- Added usage of `xcodebuild` to search for `clang` on OS X - -## [0.8.0] - 2016-07-18 - -### Added -- Added support for `clang` 3.9.x - -### Changed -- Bumped `libc` version to `0.2.14` - -### Fixed -- Fixed `LIBCLANG_PATH` usage on Windows to search both the `bin` and `lib` directories -- Fixed search path parsing on OS X -- Fixed search path parsing on Windows -- Fixed default search path ordering on OS X - -## [0.7.2] - 2016-06-17 - -### Fixed -- Fixed finding of `clang` executables when system has executables matching `clang-*` - (e.g., `clang-format`) - -## [0.7.1] - 2016-06-10 - -### Changed -- Bumped `libc` version to `0.2.12` - -### Fixed -- Fixed finding of `clang` executables suffixed by their version (e.g., `clang-3.5`) - -## [0.7.0] - 2016-05-31 - -### Changed -- Changed `Clang` struct `version` field type to `Option<CXVersion>` - -## [0.6.0] - 2016-05-26 - -### Added -- Added `support` module - -### Fixed -- Fixed `libclang` linking on FreeBSD -- Fixed `libclang` linking on Windows with the MSVC toolchain -- Improved `libclang` static linking - -## [0.5.4] - 20160-5-19 - -### Changed -- Added implementations of `Default` for FFI structs - -## [0.5.3] - 2016-05-17 - -### Changed -- Bumped `bitflags` version to `0.7.0` - -## [0.5.2] - 2016-05-12 - -### Fixed -- Fixed `libclang` static linking - -## [0.5.1] - 2016-05-10 - -### Fixed -- Fixed `libclang` linking on OS X -- Fixed `libclang` linking on Windows - -## [0.5.0] - 2016-05-10 - -### Removed -- Removed `rustc_version` dependency -- Removed support for `LIBCLANG_STATIC` environment variable - -### Changed -- Bumped `bitflags` version to `0.6.0` -- Bumped `libc` version to `0.2.11` -- Improved `libclang` search path -- Improved `libclang` static linking - -## [0.4.2] - 2016-04-20 - -### Changed -- Bumped `libc` version to `0.2.10` - -## [0.4.1] - 2016-04-02 - -### Changed -- Bumped `libc` version to `0.2.9` -- Bumped `rustc_version` version to `0.1.7` - -## [0.4.0] - 2016-03-28 - -### Removed -- Removed support for `clang` 3.4.x - -## [0.3.1] - 2016-03-21 - -### Added -- Added support for finding `libclang` - -## [0.3.0] - 2016-03-16 - -### Removed -- Removed build system types and functions - -### Added -- Added support for `clang` 3.4.x - -### Changed -- Bumped `bitflags` version to `0.5.0` -- Bumped `libc` version to `0.2.8` - -## [0.2.1] - 2016-02-13 - -### Changed -- Simplified internal usage of conditional compilation -- Bumped `bitflags` version to `0.4.0` -- Bumped `libc` version to `0.2.7` -- Bumped `rustc_version` version to `0.1.6` - -## [0.2.0] - 2016-02-12 - -### Added -- Added support for `clang` 3.8.x - -## [0.1.2] - 2015-12-29 - -### Added -- Added derivations of `Debug` for FFI structs - -## [0.1.1] - 2015-12-26 - -### Added -- Added derivations of `PartialOrd` and `Ord` for FFI enums - -## [0.1.0] - 2015-12-22 -- Initial release +## [0.26.1] - 2018-10-10 + +### Fixed +- Fixed support for finding libraries in `bin` directories on Windows + +## [0.26.0] - 2018-10-07 + +### Changed +- Added support for finding libraries with version suffixes on Linux when using runtime linking (e.g., `libclang.so.1`) + +## [0.25.0] - 2018-10-06 + +### Changed +- Added support for versioned libraries on BSDs + +## [0.24.0] - 2018-09-15 + +### Changed +- Reworked finding of libraries (see `README.md` for details) + +### Added +- Added support for `clang` 7.0.x + +## [0.23.0] - 2018-06-16 + +### Changed +- Changed `Clang::find` to skip dynamic libraries for an incorrect architecture on Windows + +## [0.22.0] - 2018-03-11 + +### Added +- Added support for `clang` 6.0.x +- Bumped `libc` version to `0.2.39` +- Bumped `libloading` version to `0.5.0` + +## [0.21.2] - 2018-02-17 + +### Changed +- Added original errors to error messages +- Added support for searching for libraries in `LD_LIBRARY_PATH` directories + +## [0.21.1] - 2017-11-24 + +### Changed +- Improved finding of versioned libraries (e.g., `libclang-3.9.so`) + +### Fixed +* Fixed compilation failures on the beta and nightly channels caused by a [compiler bug](https://github.com/KyleMayes/clang-sys/pull/69) + +## [0.21.0] - 2017-10-11 + +### Changed +* Replaced `bitflags` usage with constants which avoids crashes on 32-bit Linux platforms + +## [0.20.1] - 2017-09-16 + +### Fixed +- Fixed static linking + +## [0.20.0] - 2017-09-14 + +### Added +- Added support for `clang` 5.0.x +- Added `clang` as a link target of this package +- Added dummy implementations of `is_loaded` for builds with the `static` Cargo feature enabled + +## [0.19.0] - 2017-07-02 + +### Changed +- Bumped `bitflags` version to `0.9.1` +- Added `args` parameter to `Clang::new` function which passes arguments to the Clang executable + +## [0.18.0] - 2017-05-16 + +### Changed +- Improved finding of versioned libraries (e.g., `libclang.so.3.9`) + +## [0.17.0] - 2017-05-08 + +### Changed +- Changed storage type of include search paths from `Vec<PathBuf>` to `Option<Vec<PathBuf>>` + +## [0.16.0] - 2017-05-02 + +### Changed +- Bumped `libloading` version to `0.4.0` + +## [0.15.2] - 2017-04-28 + +### Fixed +- Fixed finding of `libclang.so.1` on Linux + +## [0.15.1] - 2017-03-29 + +### Fixed +- Fixed static linking when libraries are in [different directories](https://github.com/KyleMayes/clang-sys/issues/50) + +## [0.15.0] - 2017-03-13 + +### Added +- Added support for `clang` 4.0.x + +### Changed +- Changed functions in the `Functions` struct to be `unsafe` (`runtime` feature only) +- Changed `Clang::find` method to ignore directories and non-executable files +- Changed `Clang::find` to skip dynamic libraries for an incorrect architecture on FreeBSD and Linux +- Bumped `bitflags` version to `0.7.0` + +## [0.14.0] - 2017-01-30 + +### Changed +- Changed all enum types from tuple structs to raw integers to avoid + [segmentation faults](https://github.com/rust-lang/rust/issues/39394) on some platforms + +## [0.13.0] - 2017-01-29 + +### Changed +- Changed all opaque pointers types from tuple structs to raw pointers to avoid + [segmentation faults](https://github.com/rust-lang/rust/issues/39394) on some platforms + +## [0.12.0] - 2016-12-13 + +### Changed +- Altered the runtime linking API to allow for testing the presence of functions + +## [0.11.1] - 2016-12-07 + +### Added +- Added support for linking to Clang on Windows from unofficial LLVM sources such as MSYS and MinGW + +## [0.11.0] - 2016-10-07 + +### Changed +- Changed all enums from Rust enums to typed constants to avoid + [undefined behavior](https://github.com/KyleMayes/clang-sys/issues/42) + +## [0.10.1] - 2016-08-21 + +### Changed +- Changed static linking on FreeBSD and OS X to link against `libc++` instead of `libstd++` + +## [0.10.0] - 2016-08-01 + +### Changed +- Added `runtime` Cargo feature that links to `libclang` shared library at runtime +- Added `from_raw` method to `CXTypeLayoutError` enum +- Added implementations of `Deref` for opaque FFI structs +- Changed `Default` implementations for structs to zero out the struct + +## [0.9.0] - 2016-07-21 + +### Added +- Added documentation bindings + +## [0.8.1] - 2016-07-20 + +### Changed +- Added `CLANG_PATH` environment variable for providing a path to `clang` executable +- Added usage of `llvm-config` to search for `clang` +- Added usage of `xcodebuild` to search for `clang` on OS X + +## [0.8.0] - 2016-07-18 + +### Added +- Added support for `clang` 3.9.x + +### Changed +- Bumped `libc` version to `0.2.14` + +### Fixed +- Fixed `LIBCLANG_PATH` usage on Windows to search both the `bin` and `lib` directories +- Fixed search path parsing on OS X +- Fixed search path parsing on Windows +- Fixed default search path ordering on OS X + +## [0.7.2] - 2016-06-17 + +### Fixed +- Fixed finding of `clang` executables when system has executables matching `clang-*` + (e.g., `clang-format`) + +## [0.7.1] - 2016-06-10 + +### Changed +- Bumped `libc` version to `0.2.12` + +### Fixed +- Fixed finding of `clang` executables suffixed by their version (e.g., `clang-3.5`) + +## [0.7.0] - 2016-05-31 + +### Changed +- Changed `Clang` struct `version` field type to `Option<CXVersion>` + +## [0.6.0] - 2016-05-26 + +### Added +- Added `support` module + +### Fixed +- Fixed `libclang` linking on FreeBSD +- Fixed `libclang` linking on Windows with the MSVC toolchain +- Improved `libclang` static linking + +## [0.5.4] - 20160-5-19 + +### Changed +- Added implementations of `Default` for FFI structs + +## [0.5.3] - 2016-05-17 + +### Changed +- Bumped `bitflags` version to `0.7.0` + +## [0.5.2] - 2016-05-12 + +### Fixed +- Fixed `libclang` static linking + +## [0.5.1] - 2016-05-10 + +### Fixed +- Fixed `libclang` linking on OS X +- Fixed `libclang` linking on Windows + +## [0.5.0] - 2016-05-10 + +### Removed +- Removed `rustc_version` dependency +- Removed support for `LIBCLANG_STATIC` environment variable + +### Changed +- Bumped `bitflags` version to `0.6.0` +- Bumped `libc` version to `0.2.11` +- Improved `libclang` search path +- Improved `libclang` static linking + +## [0.4.2] - 2016-04-20 + +### Changed +- Bumped `libc` version to `0.2.10` + +## [0.4.1] - 2016-04-02 + +### Changed +- Bumped `libc` version to `0.2.9` +- Bumped `rustc_version` version to `0.1.7` + +## [0.4.0] - 2016-03-28 + +### Removed +- Removed support for `clang` 3.4.x + +## [0.3.1] - 2016-03-21 + +### Added +- Added support for finding `libclang` + +## [0.3.0] - 2016-03-16 + +### Removed +- Removed build system types and functions + +### Added +- Added support for `clang` 3.4.x + +### Changed +- Bumped `bitflags` version to `0.5.0` +- Bumped `libc` version to `0.2.8` + +## [0.2.1] - 2016-02-13 + +### Changed +- Simplified internal usage of conditional compilation +- Bumped `bitflags` version to `0.4.0` +- Bumped `libc` version to `0.2.7` +- Bumped `rustc_version` version to `0.1.6` + +## [0.2.0] - 2016-02-12 + +### Added +- Added support for `clang` 3.8.x + +## [0.1.2] - 2015-12-29 + +### Added +- Added derivations of `Debug` for FFI structs + +## [0.1.1] - 2015-12-26 + +### Added +- Added derivations of `PartialOrd` and `Ord` for FFI enums + +## [0.1.0] - 2015-12-22 +- Initial release
--- a/third_party/rust/clang-sys/Cargo.toml +++ b/third_party/rust/clang-sys/Cargo.toml @@ -7,17 +7,17 @@ # # If you believe there's an error in this file please file an # issue against the rust-lang/cargo repository. If you're # editing this file be aware that the upstream Cargo.toml # will likely look very different (and much more reasonable) [package] name = "clang-sys" -version = "0.23.0" +version = "0.26.1" authors = ["Kyle Mayes <kyle@mayeses.com>"] build = "build.rs" links = "clang" description = "Rust bindings for libclang." documentation = "https://kylemayes.github.io/clang-sys/3_5/clang_sys" readme = "README.md" license = "Apache-2.0" repository = "https://github.com/KyleMayes/clang-sys" @@ -37,17 +37,19 @@ version = "0.2.11" clang_3_5 = [] clang_3_6 = ["gte_clang_3_6"] clang_3_7 = ["gte_clang_3_6", "gte_clang_3_7"] clang_3_8 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8"] clang_3_9 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8", "gte_clang_3_9"] clang_4_0 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8", "gte_clang_3_9", "gte_clang_4_0"] clang_5_0 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8", "gte_clang_3_9", "gte_clang_4_0", "gte_clang_5_0"] clang_6_0 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8", "gte_clang_3_9", "gte_clang_4_0", "gte_clang_5_0", "gte_clang_6_0"] +clang_7_0 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8", "gte_clang_3_9", "gte_clang_4_0", "gte_clang_5_0", "gte_clang_6_0", "gte_clang_7_0"] gte_clang_3_6 = [] gte_clang_3_7 = [] gte_clang_3_8 = [] gte_clang_3_9 = [] gte_clang_4_0 = [] gte_clang_5_0 = [] gte_clang_6_0 = [] +gte_clang_7_0 = [] runtime = ["libloading"] static = []
--- a/third_party/rust/clang-sys/LICENSE.txt +++ b/third_party/rust/clang-sys/LICENSE.txt @@ -1,202 +1,202 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License.
--- a/third_party/rust/clang-sys/README.md +++ b/third_party/rust/clang-sys/README.md @@ -1,113 +1,122 @@ -# clang-sys - -[](https://crates.io/crates/clang-sys) -[](https://travis-ci.org/KyleMayes/clang-sys) -[](https://ci.appveyor.com/project/KyleMayes/clang-sys-vtvy5/branch/master) - -Rust bindings for `libclang`. - -If you are interested in a Rust wrapper for these bindings, see -[clang-rs](https://github.com/KyleMayes/clang-rs). - -Supported on the stable, beta, and nightly Rust channels. - -Released under the Apache License 2.0. - -See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines on contributing to this repository. - -## Supported Versions - -To target a version of `libclang`, enable one of the following Cargo features: - -* `clang_3_5` - requires `libclang` 3.5 or later - ([Documentation](https://kylemayes.github.io/clang-sys/3_5/clang_sys)) -* `clang_3_6` - requires `libclang` 3.6 or later - ([Documentation](https://kylemayes.github.io/clang-sys/3_6/clang_sys)) -* `clang_3_7` - requires `libclang` 3.7 or later - ([Documentation](https://kylemayes.github.io/clang-sys/3_7/clang_sys)) -* `clang_3_8` - requires `libclang` 3.8 or later - ([Documentation](https://kylemayes.github.io/clang-sys/3_8/clang_sys)) -* `clang_3_9` - requires `libclang` 3.9 or later - ([Documentation](https://kylemayes.github.io/clang-sys/3_9/clang_sys)) -* `clang_4_0` - requires `libclang` 4.0 or later - ([Documentation](https://kylemayes.github.io/clang-sys/4_0/clang_sys)) -* `clang_5_0` - requires `libclang` 5.0 or later - ([Documentation](https://kylemayes.github.io/clang-sys/5_0/clang_sys)) -* `clang_6_0` - requires `libclang` 6.0 or later - ([Documentation](https://kylemayes.github.io/clang-sys/6_0/clang_sys)) - -If you do not enable one of these features, the API provided by `libclang` 3.5 will be available by -default. - -## Dependencies - -By default, this crate will attempt to link to `libclang` dynamically. In this case, this crate -depends on the `libclang` shared library (`libclang.so` on Linux, `libclang.dylib` on OS X, -`libclang.dll` on Windows). If you want to link to `libclang` statically instead, enable the -`static` Cargo feature. In this case, this crate depends on the LLVM and Clang static libraries. If -you don't want to link to `libclang` at compiletime but instead want to load it at runtime, enable -the `runtime` Cargo feature. - -These libraries can be either be installed as a part of Clang or downloaded -[here](http://llvm.org/releases/download.html). - -**Note:** This crate supports finding versioned instances of `libclang.so` (e.g., -`libclang.so.3.9` or `libclang-3.9.so`). In the case where there are multiple instances to choose -from, this crate will prefer an unversioned instance first, then the version with the shortest and -highest version. For example, the following instances of `libclang.so` are listed in descending -order of preference: - -1. `libclang.so` -2. `libclang.so.4` -3. `libclang.so.4.0` -4. `libclang.so.3` -5. `libclang.so.3.9` - -**Note:** The downloads for LLVM and Clang 3.8 and later do not include the `libclang.a` static -library. This means you cannot link to any of these versions of `libclang` statically unless you -build it from source. - -## Environment Variables - -The following environment variables, if set, are used by this crate to find the required libraries -and executables: - -* `LLVM_CONFIG_PATH` **(compiletime)** - provides a path to an `llvm-config` executable -* `LIBCLANG_PATH` **(compiletime)** - provides a path to a directory containing a `libclang` shared - library -* `LIBCLANG_STATIC_PATH` **(compiletime)** - provides a path to a directory containing LLVM and - Clang static libraries -* `CLANG_PATH` **(runtime)** - provides a path to a `clang` executable - -## Linking - -### Dynamic - -First, the `libclang` shared library will be searched for in the directory provided by the -`LIBCLANG_PATH` environment variable if it was set. If this fails, the directory returned by -`llvm-config --libdir` will be searched. Failing that, the directories in the `LD_LIBRARY_PATH` -environment variable will be searched. If none of these approaches is successful, a list of likely -directories will be searched (e.g., `/usr/local/lib` on Linux). - -On Linux, running an executable that has been dynamically linked to `libclang` may require you to -add a path to `libclang.so` to the `LD_LIBRARY_PATH` environment variable. The same is true on OS -X, except the `DYLD_LIBRARY_PATH` environment variable is used instead. - -On Windows, running an executable that has been dynamically linked to `libclang` requires that -`libclang.dll` can be found by the executable at runtime. See -[here](https://msdn.microsoft.com/en-us/library/7d83bc18.aspx) for more information. - -### Static - -The availability of `llvm-config` is not optional for static linking. Ensure that an instance of -this executable can be found on your system's path or set the `LLVM_CONFIG_PATH` environment -variable. The required LLVM and Clang static libraries will be searched for in the same way as the -shared library is searched for, except the `LIBCLANG_STATIC_PATH` environment variable is used in -place of the `LIBCLANG_PATH` environment variable. - -### Runtime - -The `clang_sys::load` function is used to load a `libclang` shared library for use in the thread in -which it is called. The `clang_sys::unload` function will unload the `libclang` shared library. -`clang_sys::load` searches for a `libclang` shared library in the same way one is searched for when -linking to `libclang` dynamically at compiletime. +# clang-sys + +[](https://crates.io/crates/clang-sys) +[](https://travis-ci.org/KyleMayes/clang-sys) +[](https://ci.appveyor.com/project/KyleMayes/clang-sys-vtvy5/branch/master) + +Rust bindings for `libclang`. + +If you are interested in a Rust wrapper for these bindings, see +[clang-rs](https://github.com/KyleMayes/clang-rs). + +Supported on the stable, beta, and nightly Rust channels. + +Released under the Apache License 2.0. + +## Supported Versions + +To target a version of `libclang`, enable one of the following Cargo features: + +* `clang_3_5` - requires `libclang` 3.5 or later + ([Documentation](https://kylemayes.github.io/clang-sys/3_5/clang_sys)) +* `clang_3_6` - requires `libclang` 3.6 or later + ([Documentation](https://kylemayes.github.io/clang-sys/3_6/clang_sys)) +* `clang_3_7` - requires `libclang` 3.7 or later + ([Documentation](https://kylemayes.github.io/clang-sys/3_7/clang_sys)) +* `clang_3_8` - requires `libclang` 3.8 or later + ([Documentation](https://kylemayes.github.io/clang-sys/3_8/clang_sys)) +* `clang_3_9` - requires `libclang` 3.9 or later + ([Documentation](https://kylemayes.github.io/clang-sys/3_9/clang_sys)) +* `clang_4_0` - requires `libclang` 4.0 or later + ([Documentation](https://kylemayes.github.io/clang-sys/4_0/clang_sys)) +* `clang_5_0` - requires `libclang` 5.0 or later + ([Documentation](https://kylemayes.github.io/clang-sys/5_0/clang_sys)) +* `clang_6_0` - requires `libclang` 6.0 or later + ([Documentation](https://kylemayes.github.io/clang-sys/6_0/clang_sys)) +* `clang_7_0` - requires `libclang` 7.0 or later + ([Documentation](https://kylemayes.github.io/clang-sys/7_0/clang_sys)) + +If you do not enable one of these features, the API provided by `libclang` 3.5 will be available by +default. + +## Dependencies + +By default, this crate will attempt to link to `libclang` dynamically. In this case, this crate +depends on the `libclang` shared library (`libclang.so` on Linux, `libclang.dylib` on OS X, +`libclang.dll` on Windows). If you want to link to `libclang` statically instead, enable the +`static` Cargo feature. In this case, this crate depends on the LLVM and Clang static libraries. If +you don't want to link to `libclang` at compiletime but instead want to load it at runtime, enable +the `runtime` Cargo feature. + +These libraries can be either be installed as a part of Clang or downloaded +[here](http://llvm.org/releases/download.html). + +**Note:** The downloads for LLVM and Clang 3.8 and later do not include the `libclang.a` static +library. This means you cannot link to any of these versions of `libclang` statically unless you +build it from source. + +### Versioned Dependencies + +This crate supports finding versioned instances of `libclang.so` (e.g.,`libclang-3.9.so`). +In the case where there are multiple instances to choose from, this crate will prefer instances with +higher versions. For example, the following instances of `libclang.so` are listed in descending +order of preference: + +1. `libclang-4.0.so` +2. `libclang-4.so` +3. `libclang-3.9.so` +4. `libclang-3.so` +5. `libclang.so` + +**Note:** On BSD distributions, versioned instances of `libclang.so` matching the pattern +`libclang.so.*` (e.g., `libclang.so.7.0`) are also included. + +**Note:** On Linux distributions when the `runtime` features is enabled, versioned instances of +`libclang.so` matching the pattern `libclang.so.*` (e.g., `libclang.so.1`) are also included. + +## Environment Variables + +The following environment variables, if set, are used by this crate to find the required libraries +and executables: + +* `LLVM_CONFIG_PATH` **(compiletime)** - provides a path to an `llvm-config` executable +* `LIBCLANG_PATH` **(compiletime)** - provides a path to a directory containing a `libclang` shared + library +* `LIBCLANG_STATIC_PATH` **(compiletime)** - provides a path to a directory containing LLVM and + Clang static libraries +* `CLANG_PATH` **(runtime)** - provides a path to a `clang` executable + +## Linking + +### Dynamic + +`libclang` shared libraries will be searched for in the following directories: + +* the directory provided by the `LIBCLANG_PATH` environment variable +* the `bin` and `lib` directories in the directory provided by `llvm-config --libdir` +* the directories provided by `LD_LIBRARY_PATH` environment variable +* a list of likely directories for the target platform (e.g., `/usr/local/lib` on Linux) +* **macOS only:** the toolchain directory in the directory provided by `xcode-select --print-path` + +On Linux, running an executable that has been dynamically linked to `libclang` may require you to +add a path to `libclang.so` to the `LD_LIBRARY_PATH` environment variable. The same is true on OS +X, except the `DYLD_LIBRARY_PATH` environment variable is used instead. + +On Windows, running an executable that has been dynamically linked to `libclang` requires that +`libclang.dll` can be found by the executable at runtime. See +[here](https://msdn.microsoft.com/en-us/library/7d83bc18.aspx) for more information. + +### Static + +The availability of `llvm-config` is not optional for static linking. Ensure that an instance of +this executable can be found on your system's path or set the `LLVM_CONFIG_PATH` environment +variable. The required LLVM and Clang static libraries will be searched for in the same way as +shared libraries are searched for, except the `LIBCLANG_STATIC_PATH` environment variable is used in +place of the `LIBCLANG_PATH` environment variable. + +### Runtime + +The `clang_sys::load` function is used to load a `libclang` shared library for use in the thread in +which it is called. The `clang_sys::unload` function will unload the `libclang` shared library. +`clang_sys::load` searches for a `libclang` shared library in the same way one is searched for when +linking to `libclang` dynamically at compiletime.
--- a/third_party/rust/clang-sys/appveyor.yml +++ b/third_party/rust/clang-sys/appveyor.yml @@ -1,12 +1,12 @@ -environment: - matrix: - - LLVM_VERSION: 5.0.0 - CLANG_VERSION: clang_5_0 - -install: - - .\ci\install.bat - -build: false - -test_script: - - .\ci\test_script.bat +environment: + matrix: + - LLVM_VERSION: 7.0.0 + CLANG_VERSION: clang_7_0 + +install: + - .\ci\install.bat + +build: false + +test_script: + - .\ci\test_script.bat
--- a/third_party/rust/clang-sys/build.rs +++ b/third_party/rust/clang-sys/build.rs @@ -1,451 +1,77 @@ -// Copyright 2016 Kyle Mayes -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Finds the required `libclang` libraries and links to them. -//! -//! # Environment Variables -//! -//! This build script can make use of several environment variables to help it find the required -//! static or dynamic libraries. -//! -//! * `LLVM_CONFIG_PATH` - provides a path to an `llvm-config` executable -//! * `LIBCLANG_PATH` - provides a path to a directory containing a `libclang` shared library -//! * `LIBCLANG_STATIC_PATH` - provides a path to a directory containing LLVM and Clang static libraries - -#![allow(unused_attributes)] - -extern crate glob; - -use std::env; -use std::fs::{self, File}; -use std::io::{Read, Seek, SeekFrom}; -use std::path::{Path, PathBuf}; -use std::process::{Command}; - -use glob::{MatchOptions}; - -/// Returns the version in the supplied file if one can be found. -fn find_version(file: &str) -> Option<&str> { - if file.starts_with("libclang.so.") { - Some(&file[12..]) - } else if file.starts_with("libclang-") { - Some(&file[9..]) - } else { - None - } -} - -/// Returns the components of the version appended to the supplied file. -fn parse_version(file: &Path) -> Vec<u32> { - let file = file.file_name().and_then(|f| f.to_str()).unwrap_or(""); - let version = find_version(file).unwrap_or(""); - version.split('.').map(|s| s.parse::<u32>().unwrap_or(0)).collect() -} - -/// Returns a path to one of the supplied files if such a file can be found in the supplied directory. -fn contains(directory: &Path, files: &[String]) -> Option<PathBuf> { - // Join the directory to the files to obtain our glob patterns. - let patterns = files.iter().filter_map(|f| directory.join(f).to_str().map(ToOwned::to_owned)); - - // Prevent wildcards from matching path separators. - let mut options = MatchOptions::new(); - options.require_literal_separator = true; - - // Collect any files that match the glob patterns. - let mut matches = patterns.flat_map(|p| { - if let Ok(paths) = glob::glob_with(&p, &options) { - paths.filter_map(Result::ok).collect() - } else { - vec![] - } - }).collect::<Vec<_>>(); - - // Sort the matches by their version, preferring shorter and higher versions. - matches.sort_by_key(|m| parse_version(m)); - matches.pop() -} - -/// Runs a console command, returning the output if the command was successfully executed. -fn run(command: &str, arguments: &[&str]) -> Option<String> { - Command::new(command).args(arguments).output().map(|o| { - String::from_utf8_lossy(&o.stdout).into_owned() - }).ok() -} - -/// Runs `llvm-config`, returning the output if the command was successfully executed. -fn run_llvm_config(arguments: &[&str]) -> Result<String, String> { - match run(&env::var("LLVM_CONFIG_PATH").unwrap_or_else(|_| "llvm-config".into()), arguments) { - Some(output) => Ok(output), - None => { - let message = format!( - "couldn't execute `llvm-config {}`, set the LLVM_CONFIG_PATH environment variable \ - to a path to a valid `llvm-config` executable", - arguments.join(" "), - ); - Err(message) - }, - } -} - -/// Backup search directory globs for FreeBSD and Linux. -const SEARCH_LINUX: &[&str] = &[ - "/usr/lib*", - "/usr/lib*/*", - "/usr/lib*/*/*", - "/usr/local/lib*", - "/usr/local/lib*/*", - "/usr/local/lib*/*/*", - "/usr/local/llvm*/lib", -]; - -/// Backup search directory globs for OS X. -const SEARCH_OSX: &[&str] = &[ - "/usr/local/opt/llvm*/lib", - "/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/lib", - "/Library/Developer/CommandLineTools/usr/lib", - "/usr/local/opt/llvm*/lib/llvm*/lib", -]; - -/// Backup search directory globs for Windows. -const SEARCH_WINDOWS: &[&str] = &[ - "C:\\LLVM\\lib", - "C:\\Program Files*\\LLVM\\lib", - "C:\\MSYS*\\MinGW*\\lib", -]; - -/// Returns the ELF class from the ELF header in the supplied file. -fn parse_elf_header(file: &PathBuf) -> Result<u8, String> { - let mut file = try!(File::open(file).map_err(|e| e.to_string())); - let mut elf = [0; 5]; - try!(file.read_exact(&mut elf).map_err(|e| e.to_string())); - if elf[..4] == [127, 69, 76, 70] { - Ok(elf[4]) - } else { - Err("invalid ELF header".into()) - } -} - -/// Returns the magic number from the PE header in the supplied file. -fn parse_pe_header(file: &PathBuf) -> Result<u16, String> { - let mut file = try!(File::open(file).map_err(|e| e.to_string())); - let mut pe = [0; 4]; - - // Determine the header offset. - try!(file.seek(SeekFrom::Start(0x3C)).map_err(|e| e.to_string())); - try!(file.read_exact(&mut pe).map_err(|e| e.to_string())); - let offset = i32::from(pe[0]) + (i32::from(pe[1]) << 8) + (i32::from(pe[2]) << 16) + (i32::from(pe[3]) << 24); - - // Determine the validity of the header. - try!(file.seek(SeekFrom::Start(offset as u64)).map_err(|e| e.to_string()));