From 8861e3209829eaab6a017c64ac0440cad39f74fa Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Sun, 22 Mar 2020 19:25:11 -0700 Subject: [PATCH 01/15] Port to tokio 0.2, and to stdlib futures for fs and task_executor (#9071) We're on an older version of tokio, which doesn't smoothly support usage of async/await. Switch to tokio 0.2, which supports directly spawning and awaiting (via its macros) stdlib futures, which is an important step toward being able to utilize async/await more broadly. Additionally, port the `fs` and `task_executor` crates to stdlib futures. Finally, transitively fixup for the new APIs: in particular, since both `task_executor` and `tokio` now consume stdlib futures to spawn tasks, we switch all relevant tests and main methods to use the `tokio::main` and `tokio::test` macros, which annotate async methods and spawn a runtime to allow for `await`ing futures inline. Progress toward more usage of async/await! --- src/rust/engine/ASYNC.md | 9 + src/rust/engine/Cargo.lock | 747 +++++--------- src/rust/engine/Cargo.toml | 12 +- src/rust/engine/engine_cffi/Cargo.toml | 1 + src/rust/engine/engine_cffi/src/lib.rs | 34 +- src/rust/engine/fs/Cargo.toml | 6 +- src/rust/engine/fs/brfs/Cargo.toml | 3 +- src/rust/engine/fs/brfs/src/main.rs | 328 +++--- src/rust/engine/fs/brfs/src/syscall_tests.rs | 14 +- src/rust/engine/fs/brfs/src/tests.rs | 134 ++- src/rust/engine/fs/fs_util/Cargo.toml | 3 +- src/rust/engine/fs/fs_util/src/main.rs | 170 ++-- src/rust/engine/fs/src/glob_matching.rs | 407 ++++---- src/rust/engine/fs/src/lib.rs | 150 +-- src/rust/engine/fs/src/posixfs_tests.rs | 163 +-- src/rust/engine/fs/store/Cargo.toml | 4 +- src/rust/engine/fs/store/src/local.rs | 15 +- src/rust/engine/fs/store/src/local_tests.rs | 350 ++++--- src/rust/engine/fs/store/src/remote_tests.rs | 183 ++-- src/rust/engine/fs/store/src/snapshot.rs | 6 +- .../engine/fs/store/src/snapshot_tests.rs | 481 +++++---- src/rust/engine/fs/store/src/tests.rs | 957 +++++++++++------- src/rust/engine/logging/Cargo.toml | 2 +- src/rust/engine/logging/src/lib.rs | 2 +- src/rust/engine/logging/src/logger.rs | 69 +- src/rust/engine/process_execution/Cargo.toml | 9 +- .../engine/process_execution/src/cache.rs | 4 +- .../process_execution/src/cache_tests.rs | 30 +- .../engine/process_execution/src/local.rs | 124 +-- .../process_execution/src/local_tests.rs | 194 ++-- .../process_execution/src/nailgun/mod.rs | 23 +- .../process_execution/src/nailgun/tests.rs | 41 +- .../engine/process_execution/src/remote.rs | 22 +- .../process_execution/src/remote_tests.rs | 429 ++++---- .../engine/process_execution/src/speculate.rs | 10 +- .../process_execution/src/speculate_tests.rs | 67 +- src/rust/engine/process_executor/Cargo.toml | 3 +- src/rust/engine/process_executor/src/main.rs | 22 +- src/rust/engine/serverset/Cargo.toml | 5 +- src/rust/engine/serverset/src/lib.rs | 10 +- src/rust/engine/serverset/src/retry_tests.rs | 23 +- src/rust/engine/serverset/src/tests.rs | 116 +-- src/rust/engine/sharded_lmdb/Cargo.toml | 2 +- src/rust/engine/sharded_lmdb/src/lib.rs | 30 +- src/rust/engine/src/context.rs | 27 +- src/rust/engine/src/nodes.rs | 90 +- src/rust/engine/src/scheduler.rs | 9 +- src/rust/engine/task_executor/Cargo.toml | 5 +- src/rust/engine/task_executor/src/lib.rs | 116 +-- src/rust/engine/workunit_store/Cargo.toml | 2 +- src/rust/engine/workunit_store/src/lib.rs | 45 +- 51 files changed, 3064 insertions(+), 2644 deletions(-) create mode 100644 src/rust/engine/ASYNC.md diff --git a/src/rust/engine/ASYNC.md b/src/rust/engine/ASYNC.md new file mode 100644 index 00000000000..f54518a2de9 --- /dev/null +++ b/src/rust/engine/ASYNC.md @@ -0,0 +1,9 @@ + +# async-await port notes + +Many functions at the boundary between ported async-await, stdlib futures code and legacy +future 0.1 code temporarily return futures 0.3 BoxFuture and use explicit lifetimes, because that +is easier for a futures 0.1 consumer. stdlib futures consumers can easily call async functions with +references (because they can remain "on the stack"), but an 0.1 future cannot. These methods can be +swapped back to async once all callers are using async-await. + diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index c3c57f8d128..39abbaeb17d 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -41,6 +41,16 @@ name = "arrayvec" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "async-trait" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "async_semaphore" version = "0.0.1" @@ -48,7 +58,7 @@ dependencies = [ "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -91,14 +101,6 @@ dependencies = [ "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "base64" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "base64" version = "0.11.0" @@ -180,6 +182,7 @@ dependencies = [ "errno 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "fuse 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -191,7 +194,7 @@ dependencies = [ "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -228,7 +231,6 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -410,32 +412,6 @@ name = "constant_time_eq" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "cookie" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "cookie_store" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cookie 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "publicsuffix 1.5.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "try_from 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "copy_dir" version = "0.1.2" @@ -453,11 +429,25 @@ dependencies = [ "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "core-foundation" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "core-foundation-sys" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "core-foundation-sys" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "crates-io" version = "0.22.0" @@ -487,45 +477,6 @@ dependencies = [ "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "crossbeam-deque" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crossbeam-queue" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "crossbeam-queue" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "crossbeam-utils" version = "0.6.6" @@ -671,12 +622,14 @@ dependencies = [ name = "engine" version = "0.0.1" dependencies = [ + "async-trait 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", "boxfuture 0.0.1", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "concrete_time 0.0.1", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "graph 0.0.1", "hashing 0.0.1", "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -684,11 +637,12 @@ dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "num_enum 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num_enum 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "process_execution 0.0.1", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "reqwest 0.9.24 (registry+https://github.com/rust-lang/crates.io-index)", + "reqwest 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)", "rule_graph 0.0.1", "sharded_lmdb 0.0.1", "smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", @@ -696,8 +650,9 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "ui 0.0.1", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -711,6 +666,7 @@ dependencies = [ "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", "engine 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", @@ -765,14 +721,6 @@ dependencies = [ "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "error-chain" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "failure" version = "0.1.6" @@ -848,9 +796,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "fs" version = "0.0.1" dependencies = [ - "boxfuture 0.0.1", + "async-trait 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "ignore 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -859,7 +807,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -881,6 +829,7 @@ dependencies = [ "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", @@ -890,7 +839,7 @@ dependencies = [ "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", "store 0.1.0", "task_executor 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -958,15 +907,6 @@ name = "futures-core" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "futures-cpupool" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "futures-executor" version = "0.3.4" @@ -1160,6 +1100,24 @@ dependencies = [ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "h2" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "hashing" version = "0.0.1" @@ -1212,15 +1170,23 @@ dependencies = [ "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "http" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "http-body" -version = "0.1.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-buf 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1238,47 +1204,42 @@ dependencies = [ [[package]] name = "hyper" -version = "0.12.35" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "h2 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "http-body 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-channel 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "h2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-buf 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-tcp 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "want 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "hyper-rustls" -version = "0.17.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "ct-logs 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper 0.12.35 (registry+https://github.com/rust-lang/crates.io-index)", - "rustls 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-rustls 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls-native-certs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", - "webpki-roots 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1500,14 +1461,6 @@ dependencies = [ "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "lock_api" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "log" version = "0.3.9" @@ -1530,12 +1483,12 @@ version = "0.0.1" dependencies = [ "cargo 0.34.0 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "num_enum 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "simplelog 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "ui 0.0.1", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1560,14 +1513,6 @@ name = "memchr" version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "memoffset" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "mime" version = "0.3.16" @@ -1678,15 +1623,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "nails" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1870,16 +1815,6 @@ dependencies = [ "parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "parking_lot" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "lock_api 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "parking_lot_core" version = "0.3.1" @@ -1892,20 +1827,6 @@ dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "parking_lot_core" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "percent-encoding" version = "1.0.1" @@ -1925,6 +1846,24 @@ dependencies = [ "ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "pin-project" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "pin-project-internal 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "pin-project-internal" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "pin-project-lite" version = "0.1.4" @@ -1999,7 +1938,7 @@ dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "mock 0.0.1", - "nails 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nails 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2010,10 +1949,8 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-process 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "uname 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", @@ -2025,11 +1962,12 @@ version = "0.0.1" dependencies = [ "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", "process_execution 0.0.1", "store 0.1.0", "task_executor 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -2103,7 +2041,7 @@ name = "protoc" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2119,18 +2057,6 @@ dependencies = [ "protoc 2.10.1 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "publicsuffix" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "error-chain 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", - "idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "quick-error" version = "1.2.3" @@ -2375,36 +2301,36 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.9.24" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "cookie 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cookie_store 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "encoding_rs 0.8.22 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper 0.12.35 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper-rustls 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rustls 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", + "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_urlencoded 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-rustls 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", - "webpki-roots 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", + "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "webpki-roots 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)", "winreg 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2473,16 +2399,27 @@ dependencies = [ [[package]] name = "rustls" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "ring 0.16.11 (registry+https://github.com/rust-lang/crates.io-index)", "sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rustls-native-certs" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "ryu" version = "1.0.2" @@ -2510,11 +2447,6 @@ name = "scopeguard" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "scopeguard" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "sct" version = "0.6.0" @@ -2524,6 +2456,26 @@ dependencies = [ "untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "security-framework" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "core-foundation 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "security-framework-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "semver" version = "0.9.0" @@ -2584,13 +2536,13 @@ dependencies = [ [[package]] name = "serde_urlencoded" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2599,11 +2551,11 @@ version = "0.0.1" dependencies = [ "boxfuture 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2623,7 +2575,7 @@ version = "0.0.1" dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", "lmdb 0.8.0 (git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2727,6 +2679,7 @@ dependencies = [ "dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "grpcio 0.3.0 (git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532)", "hashing 0.0.1", "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2745,8 +2698,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", @@ -2851,10 +2803,9 @@ dependencies = [ name = "task_executor" version = "0.0.1" dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -2953,56 +2904,24 @@ dependencies = [ [[package]] name = "tokio" -version = "0.1.22" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", + "mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-fs 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-sync 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-tcp 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-udp 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-uds 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-macros 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-buf" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "either 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-codec" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3014,34 +2933,6 @@ dependencies = [ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "tokio-current-thread" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-executor" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-fs" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "tokio-io" version = "0.1.13" @@ -3062,149 +2953,28 @@ dependencies = [ "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "tokio-process" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-signal 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-reactor" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-sync 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "tokio-rustls" -version = "0.10.3" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "rustls 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "tokio-signal" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", - "signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-sync" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-tcp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-threadpool" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-timer" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-udp" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-uds" -version = "0.2.6" +name = "tokio-util" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", - "mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3296,6 +3066,11 @@ dependencies = [ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tower-service" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "tower-util" version = "0.1.0" @@ -3311,14 +3086,6 @@ name = "try-lock" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "try_from" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "typenum" version = "1.11.2" @@ -3477,10 +3244,9 @@ dependencies = [ [[package]] name = "want" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3496,6 +3262,8 @@ version = "0.2.58" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", "wasm-bindgen-macro 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3513,6 +3281,17 @@ dependencies = [ "wasm-bindgen-shared 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.58" @@ -3577,7 +3356,7 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.17.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3650,9 +3429,9 @@ name = "workunit_store" version = "0.0.1" dependencies = [ "concrete_time 0.0.1", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3680,12 +3459,12 @@ dependencies = [ "checksum arc-swap 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d7b8a9123b8027467bce0099fe556c628a53c8d83df0507084c31e9ba2e39aff" "checksum arrayref 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" "checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" +"checksum async-trait 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "750b1c38a1dfadd108da0f01c08f4cdc7ff1bb39b325f9c82cc972361780a6e1" "checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" "checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" "checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" "checksum backtrace 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "e4036b9bf40f3cf16aba72a3d65e8a520fc4bafcdc7079aea8f848c58c5b5536" "checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" -"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" "checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" "checksum blake2b_simd 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d8fb2d74254a3a0b5cac33ac9f8ed0e44aa50378d9dbb2e5d83bd21ed1dc2c8a" @@ -3711,18 +3490,14 @@ dependencies = [ "checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007" "checksum commoncrypto-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2" "checksum constant_time_eq 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" -"checksum cookie 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "888604f00b3db336d2af898ec3c1d5d0ddf5e6d462220f2ededc33a87ac4bbd5" -"checksum cookie_store 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "46750b3f362965f197996c4448e4a0935e791bf7d6631bfce9ee0af3d24c919c" "checksum copy_dir 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e4281031634644843bd2f5aa9c48cf98fc48d6b083bd90bb11becf10deaf8b0" "checksum core-foundation 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "25b9e03f145fd4f2bf705e07b900cd41fc636598fe5dc452fd0db1441c3f496d" +"checksum core-foundation 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "57d24c7a13c43e870e37c1556b74555437870a04514f7685f5b354e090567171" "checksum core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" +"checksum core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac" "checksum crates-io 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)" = "091018c3f5e8109d82d94b648555f0d4a308d15626da2fb22c76f32117e24569" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" "checksum crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c" -"checksum crossbeam-deque 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3aa945d63861bfe624b55d153a39684da1e8c0bc8fba932f7ee3a3c16cea3ca" -"checksum crossbeam-epoch 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5064ebdbf05ce3cb95e45c8b086f72263f4166b29b97f6baff7ef7fe047b55ac" -"checksum crossbeam-queue 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7c979cd6cfe72335896575c6b5688da489e420d36a27a0b9eb0c73db574b4a4b" -"checksum crossbeam-queue 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c695eeca1e7173472a32221542ae469b3e9aac3a4fc81f7696bcad82029493db" "checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" "checksum crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" "checksum crypto-hash 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8a77162240fd97248d19a564a565eb563a3f592b386e4136fb300909e67dddca" @@ -3742,7 +3517,6 @@ dependencies = [ "checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3" "checksum errno 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c2a071601ed01b988f896ab14b95e67335d1eeb50190932a1320f7fe3cadc84e" "checksum errno-dragonfly 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "14ca354e36190500e1e1fb267c647932382b54053c50b14970856c0b00a35067" -"checksum error-chain 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3ab49e9dcb602294bc42f9a7dfc9bc6e936fca4418ea300dbfb84fe16de0b7d9" "checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" "checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" "checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" @@ -3761,7 +3535,6 @@ dependencies = [ "checksum futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5c329ae8753502fb44ae4fc2b622fa2a94652c41e795143765ba0927f92ab780" "checksum futures-channel 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c77d04ce8edd9cb903932b608268b3fffec4163dc053b3b402bf47eac1f1a8" "checksum futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f25592f769825e89b92358db00d26f965761e094951ac44d3663ef25b7ac464a" -"checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" "checksum futures-executor 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f674f3e1bcb15b37284a90cedf55afdba482ab061c407a9c0ebbd0f3109741ba" "checksum futures-io 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a638959aa96152c7a4cddf50fcb1e3fede0583b27157c26e67d6f99904090dc6" "checksum futures-macro 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5081aa3de1f7542a794a397cde100ed903b0630152d0973479018fd85423a7" @@ -3780,16 +3553,18 @@ dependencies = [ "checksum grpcio-compiler 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a63ccc27b0099347d2bea2c3d0f1c79c018a13cfd08b814a1992e341b645d5e1" "checksum grpcio-sys 0.2.3 (git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532)" = "" "checksum h2 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" +"checksum h2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9d5c295d1c0c68e4e42003d75f908f5e16a1edd1cbe0b0d02e4dc2006a384f47" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" "checksum home 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "29302b90cfa76231a757a887d1e3153331a63c7f80b6c75f86366334cbe70708" "checksum http 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d6ccf5ede3a895d8856620237b2f02972c1bbc78d2965ad7fe8838d4a0ed41f0" -"checksum http-body 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" +"checksum http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b708cc7f06493459026f53b9a61a7a121a5d1ec6238dee58ea4941132b30156b" +"checksum http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" "checksum httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" -"checksum hyper 0.12.35 (registry+https://github.com/rust-lang/crates.io-index)" = "9dbe6ed1438e1f8ad955a4701e9a944938e9519f6888d12d8558b645e247d5f6" -"checksum hyper-rustls 0.17.1 (registry+https://github.com/rust-lang/crates.io-index)" = "719d85c7df4a7f309a77d145340a063ea929dcb2e025bae46a80345cffec2952" +"checksum hyper 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ed6081100e960d9d74734659ffc9cc91daf1c0fc7aceb8eaa94ee1a3f5046f2e" +"checksum hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac965ea399ec3a25ac7d13b8affd4b8f39325cca00858ddf5eb29b79e6b14b08" "checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" "checksum ignore 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "522daefc3b69036f80c7d2990b28ff9e0471c683bad05ca258e0a01dd22c5a1e" @@ -3812,14 +3587,12 @@ dependencies = [ "checksum lmdb 0.8.0 (git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8)" = "" "checksum lmdb-sys 0.8.0 (git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8)" = "" "checksum lock_api 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "62ebf1391f6acad60e5c8b43706dde4582df75c06698ab44511d15016bc2442c" -"checksum lock_api 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "79b2de95ecb4691949fea4716ca53cdbcfccb2c612e19644a8bad05edcf9f47b" "checksum log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" "checksum log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "14b6052be84e6b71ab17edffc2eeabf5c2c3ae1fdb464aae35ac50c67a44e1f7" "checksum maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" "checksum memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53445de381a1f436797497c61d851644d0e8e88e6140f22872ad33a704933978" -"checksum memoffset 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "75189eb85871ea5c2e2c15abbdd541185f63b408415e5051f5cac122d8c774b9" "checksum mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" "checksum mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a0ed03949aef72dbdf3116a383d7b38b4768e6f960528cd6a6044aa9ed68599" "checksum miniz_oxide 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aa679ff6578b1cddee93d7e82e263b94a575e0bfced07284eb0c037c1d2416a5" @@ -3830,7 +3603,7 @@ dependencies = [ "checksum miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "396aa0f2003d7df8395cb93e09871561ccc3e785f0acb369170e8cc74ddf9226" "checksum mktemp 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "77001ceb9eed65439f3dc2a2543f9ba1417d912686bf224a7738d0966e6dcd69" "checksum multimap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb04b9f127583ed176e163fb9ec6f3e793b87e21deedd5734a69386a18a0151" -"checksum nails 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2c608b13791e902e685701016a522b958ac1e1cb9197c6c002c44914947d52" +"checksum nails 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bbf022f659381fd767684f3f1d46b55a20b3d0902d4c722f9f78589d8afa4156" "checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" "checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" "checksum num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "4703ad64153382334aa8db57c637364c322d3372e097840c72000dabdcf6156e" @@ -3851,12 +3624,12 @@ dependencies = [ "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" "checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13" "checksum parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f0802bff09003b291ba756dc7e79313e51cc31667e94afbe847def490424cde5" -"checksum parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252" "checksum parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad7f7e6ebdc79edff6fdcb87a55b620174f7a989e3eb31b65231f4af57f00b8c" -"checksum parking_lot_core 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b876b1b9e7ac6e1a74a6da34d25c42e17e8862aa409cbbbdcfc8d86c6f3bc62b" "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" +"checksum pin-project 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7804a463a8d9572f13453c516a5faea534a2403d7ced2f0c7e100eeff072772c" +"checksum pin-project-internal 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "385322a45f2ecf3410c68d2a549a4a2685e8051d0f278e39743ff4e451cb9b3f" "checksum pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "237844750cfbb86f67afe27eee600dfbbcb6188d734139b534cbfbf4f96792ae" "checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" "checksum pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677" @@ -3873,7 +3646,6 @@ dependencies = [ "checksum protobuf-codegen 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c12a571137dc99703cb46fa21f185834fc5578a65836573fcff127f7b53f41e1" "checksum protoc 2.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fd83d2547a9e2c8bc6016607281b3ec7ef4871c55be6930915481d80350ab88" "checksum protoc-grpcio 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b0292d93a536174ff6bafe8b5e8534aeeb2b039146bae59770c07f4d2c2458c9" -"checksum publicsuffix 1.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3bbaa49075179162b49acac1c6aa45fb4dafb5f13cf6794276d77bc7fd95757b" "checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" @@ -3901,7 +3673,7 @@ dependencies = [ "checksum regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "322cf97724bea3ee221b78fe25ac9c46114ebb51747ad5babd51a2fc6a8235a8" "checksum regex-syntax 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)" = "b28dfe3fe9badec5dbf0a79a9cccad2cfc2ab5484bdb3e44cbd1ae8b3ba2be06" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" -"checksum reqwest 0.9.24 (registry+https://github.com/rust-lang/crates.io-index)" = "f88643aea3c1343c804950d7bf983bd2067f5ab59db6d613a08e05572f2714ab" +"checksum reqwest 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b81e49ddec5109a9dcfc5f2a317ff53377c915e9ae9d4f2fb50914b85614e2" "checksum ring 0.16.11 (registry+https://github.com/rust-lang/crates.io-index)" = "741ba1704ae21999c00942f9f5944f801e977f54302af346b596287599ad1862" "checksum rust-argon2 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2bc8af4bda8e1ff4932523b94d3dd20ee30a87232323eda55903ffd71d2fb017" "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" @@ -3909,13 +3681,15 @@ dependencies = [ "checksum rustc-workspace-hack 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc71d2faa173b74b232dedc235e3ee1696581bb132fc116fa3626d6151a1a8fb" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" "checksum rustfix 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7150ac777a2931a53489f5a41eb0937b84e3092a20cd0e73ad436b65b507f607" -"checksum rustls 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b25a18b1bf7387f0145e7f8324e700805aade3842dd3db2e74e4cdeb4677c09e" +"checksum rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1" +"checksum rustls-native-certs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a75ffeb84a6bd9d014713119542ce415db3a3e4748f0bfce1e1416cd224a23a5" "checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" "checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" "checksum schannel 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "507a9e6e8ffe0a4e0ebb9a10293e62fdf7657c06f1b8bb07a8fcf697d2abf295" "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" -"checksum scopeguard 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b42e15e59b18a828bbf5c58ea01debb36b9b096346de35d941dcb89009f24a0d" "checksum sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c" +"checksum security-framework 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "97bbedbe81904398b6ebb054b3e912f99d55807125790f3198ac990d98def5b0" +"checksum security-framework-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "06fd2f23e31ef68dd2328cc383bd493142e46107a3a0e24f7d734e3f3b80fe4c" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" "checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449" @@ -3923,7 +3697,7 @@ dependencies = [ "checksum serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "190e9765dcedb56be63b6e0993a006c7e3b071a016a304736e4a315dc01fb142" "checksum serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)" = "9371ade75d4c2d6cb154141b9752cf3781ec9c05e0e5cf35060e1e70ee7b9c25" "checksum serde_test 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "33f96dff8c3744387b53404ea33e834073b0791dcc1ea9c85b805745f9324704" -"checksum serde_urlencoded 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "642dd69105886af2efd227f75a520ec9b44a820d65bc133a9131f7d229fd165a" +"checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" "checksum sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "27044adfd2e1f077f649f59deb9490d3941d674002f7d062870a60ebe9bd47a0" "checksum shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "170a13e64f2a51b77a45702ba77287f5c6829375b04a69cf2222acd17d0cfab9" "checksum signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94f478ede9f64724c5d173d7bb56099ec3e2d9fc2774aac65d34b8b890405f41" @@ -3955,26 +3729,12 @@ dependencies = [ "checksum thread-scoped 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bcbb6aa301e5d3b0b5ef639c9a9c7e2f1c944f177b460c04dc24c69b1fa2bd99" "checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" "checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" -"checksum tokio 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6" -"checksum tokio 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8fdd17989496f49cdc57978c96f0c9fe5e4a58a8bddc6813c449a4624f6a030b" -"checksum tokio-buf 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" -"checksum tokio-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "25b2998660ba0e70d18684de5d06b70b70a3a747469af9dea7618cc59e75976b" +"checksum tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "0fa5e81d6bc4e67fe889d5783bd2a128ab2e0cfa487e0be16b6a8d177b101616" "checksum tokio-connect 0.1.0 (git+https://github.com/pantsbuild/tokio-connect?rev=f7ad1ca437973d6e24037ac6f7d5ef1013833c0b)" = "" -"checksum tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e" -"checksum tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671" -"checksum tokio-fs 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "297a1206e0ca6302a0eed35b700d292b275256f596e2f3fea7729d5e629b6ff4" "checksum tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" "checksum tokio-macros 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f4b1e7ed7d5d4c2af3d999904b0eebe76544897cdbfb2b9684bed2174ab20f7c" -"checksum tokio-process 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "382d90f43fa31caebe5d3bc6cfd854963394fff3b8cb59d5146607aaae7e7e43" -"checksum tokio-reactor 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351" -"checksum tokio-rustls 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2d7cf08f990090abd6c6a73cab46fed62f85e8aef8b99e4b918a9f4a637f0676" -"checksum tokio-signal 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c34c6e548f101053321cba3da7cbb87a610b85555884c41b07da2eb91aff12" -"checksum tokio-sync 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee" -"checksum tokio-tcp 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "98df18ed66e3b72e742f185882a9e201892407957e45fbff8da17ae7a7c51f72" -"checksum tokio-threadpool 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "df720b6581784c118f0eb4310796b12b1d242a7eb95f716a8367855325c25f89" -"checksum tokio-timer 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296" -"checksum tokio-udp 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e2a0b10e610b39c38b031a2fcab08e4b82f16ece36504988dcbd81dbba650d82" -"checksum tokio-uds 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "5076db410d6fdc6523df7595447629099a1fdc47b3d9f896220780fa48faf798" +"checksum tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4adb8b3e5f86b707f1b54e7c15b6de52617a823608ccda98a15d3a24222f265a" +"checksum tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "571da51182ec208780505a32528fc5512a8fe1443ab960b3f2f3ef093cd16930" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" "checksum tower-add-origin 0.1.0 (git+https://github.com/pantsbuild/tower-http?rev=56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00)" = "" "checksum tower-direct-service 0.1.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)" = "" @@ -3983,9 +3743,9 @@ dependencies = [ "checksum tower-h2 0.1.0 (git+https://github.com/pantsbuild/tower-h2?rev=44b0efb4983b769283efd5b2a3bc3decbf7c33de)" = "" "checksum tower-http 0.1.0 (git+https://github.com/pantsbuild/tower-http?rev=56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00)" = "" "checksum tower-service 0.2.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)" = "" +"checksum tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" "checksum tower-util 0.1.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)" = "" "checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382" -"checksum try_from 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "283d3b89e1368717881a9d51dad843cc435380d8109c9e47d38780a324698d8b" "checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" "checksum uname 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" "checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" @@ -4007,17 +3767,18 @@ dependencies = [ "checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" "checksum walkdir 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c66c0b9792f0a765345452775f3adbd28dde9d33f30d13e5dcc5ae17cf6f3780" "checksum walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" -"checksum want 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" +"checksum want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" "checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" "checksum wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "5205e9afdf42282b192e2310a5b463a6d1c1d774e30dc3c791ac37ab42d2616c" "checksum wasm-bindgen-backend 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "11cdb95816290b525b32587d76419facd99662a07e59d3cdb560488a819d9a45" +"checksum wasm-bindgen-futures 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8bbdd49e3e28b40dec6a9ba8d17798245ce32b019513a845369c641b275135d9" "checksum wasm-bindgen-macro 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "574094772ce6921576fb6f2e3f7497b8a76273b6db092be18fc48a082de09dc3" "checksum wasm-bindgen-macro-support 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "e85031354f25eaebe78bb7db1c3d86140312a911a106b2e29f9cc440ce3e7668" "checksum wasm-bindgen-shared 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "f5e7e61fc929f4c0dddb748b102ebf9f632e2b8d739f2016542b4de2965a9601" "checksum wasm-bindgen-webidl 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "ef012a0d93fc0432df126a8eaf547b2dce25a8ce9212e1d3cbeef5c11157975d" "checksum web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)" = "aaf97caf6aa8c2b1dac90faf0db529d9d63c93846cca4911856f78a83cebf53b" "checksum webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1f50e1972865d6b1adb54167d1c8ed48606004c2c9d0ea5f1eeb34d95e863ef" -"checksum webpki-roots 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a262ae37dd9d60f60dd473d1158f9fbebf110ba7b6a5051c8160460f6043718b" +"checksum webpki-roots 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)" = "91cd5736df7f12a964a5067a12c62fa38e1bd8080aff1f80bc29be7c80d19ab4" "checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" "checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" diff --git a/src/rust/engine/Cargo.toml b/src/rust/engine/Cargo.toml index e106e5288be..232d4856993 100644 --- a/src/rust/engine/Cargo.toml +++ b/src/rust/engine/Cargo.toml @@ -81,12 +81,14 @@ default-members = [ ] [dependencies] +async-trait = "0.1" boxfuture = { path = "boxfuture" } bytes = "0.4.5" concrete_time = { path = "concrete_time" } fnv = "1.0.5" fs = { path = "fs" } futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } graph = { path = "graph" } hashing = { path = "hashing" } indexmap = "1.0.2" @@ -94,21 +96,23 @@ itertools = "0.8.2" lazy_static = "1" log = "0.4" logging = { path = "logging" } -num_enum = "0.1.1" +num_cpus = "1" +num_enum = "0.4" parking_lot = "0.6" process_execution = { path = "process_execution" } rand = "0.6" -reqwest = { version = "0.9.22", default_features = false, features = ["rustls-tls"] } +reqwest = { version = "0.10", default_features = false, features = ["stream", "rustls-tls"] } rule_graph = { path = "rule_graph" } sharded_lmdb = { path = "sharded_lmdb" } smallvec = "0.6" store = { path = "fs/store" } +task_executor = { path = "task_executor" } tempfile = "3" time = "0.1.40" +tokio = { version = "0.2", features = ["rt-threaded"] } ui = { path = "ui" } -url = "1.7.1" +url = "2.1" uuid = { version = "0.7", features = ["v4"] } -task_executor = { path = "task_executor" } workunit_store = { path = "workunit_store" } [patch.crates-io] diff --git a/src/rust/engine/engine_cffi/Cargo.toml b/src/rust/engine/engine_cffi/Cargo.toml index 612dabe0bd3..d8978c47ff5 100644 --- a/src/rust/engine/engine_cffi/Cargo.toml +++ b/src/rust/engine/engine_cffi/Cargo.toml @@ -11,6 +11,7 @@ crate-type = ["cdylib"] [dependencies] engine = { path = ".." } futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } hashing = { path = "../hashing" } log = "0.4" logging = { path = "../logging" } diff --git a/src/rust/engine/engine_cffi/src/lib.rs b/src/rust/engine/engine_cffi/src/lib.rs index 02854f676b4..56425fc0472 100644 --- a/src/rust/engine/engine_cffi/src/lib.rs +++ b/src/rust/engine/engine_cffi/src/lib.rs @@ -43,6 +43,7 @@ use engine::{ externs, nodes, Core, ExecutionRequest, ExecutionTermination, Function, Handle, Key, Params, RootResult, Rule, Scheduler, Session, Tasks, TypeId, Types, Value, }; +use futures::compat::Future01CompatExt; use futures01::{future, Future}; use hashing::{Digest, EMPTY_DIGEST}; use log::{error, warn, Log}; @@ -896,7 +897,8 @@ pub extern "C" fn capture_snapshots( }) .collect::>(), ) - .map(|values| externs::store_tuple(&values)), + .map(|values| externs::store_tuple(&values)) + .compat(), ) }) .into() @@ -926,11 +928,10 @@ pub extern "C" fn merge_directories( scheduler .core .executor - .block_on(store::Snapshot::merge_directories( - scheduler.core.store(), - digests, - workunit_store, - )) + .block_on( + store::Snapshot::merge_directories(scheduler.core.store(), digests, workunit_store) + .compat(), + ) .map(|dir| nodes::Snapshot::store_directory(&scheduler.core, &dir)) .into() }) @@ -975,13 +976,11 @@ pub extern "C" fn run_local_interactive_process( None => unreachable!() }; - let write_operation = scheduler.core.store().materialize_directory( + scheduler.core.store().materialize_directory( destination, digest, session.workunit_store(), - ); - - scheduler.core.executor.spawn_on_io_pool(write_operation).wait()?; + ).wait()?; } } @@ -1058,7 +1057,7 @@ pub extern "C" fn materialize_directories( let types = &scheduler.core.types; let construct_materialize_directories_results = types.construct_materialize_directories_results; let construct_materialize_directory_result = types.construct_materialize_directory_result; - let work_future = future::join_all( + future::join_all( digests_and_path_prefixes .into_iter() .map(|(digest, path_prefix)| { @@ -1105,9 +1104,8 @@ pub extern "C" fn materialize_directories( &[externs::store_tuple(&entries)], ); output - }); - - scheduler.core.executor.spawn_on_io_pool(work_future).wait() + }) + .wait() }) .into() } @@ -1120,7 +1118,7 @@ pub extern "C" fn init_logging(level: u64, show_rust_3rdparty_logs: bool) { #[no_mangle] pub extern "C" fn setup_pantsd_logger(log_file_ptr: *const raw::c_char, level: u64) -> PyResult { - logging::set_destination(Destination::Pantsd); + logging::set_thread_destination(Destination::Pantsd); let path_str = unsafe { CStr::from_ptr(log_file_ptr).to_string_lossy().into_owned() }; let path = PathBuf::from(path_str); @@ -1134,7 +1132,7 @@ pub extern "C" fn setup_pantsd_logger(log_file_ptr: *const raw::c_char, level: u // Might be called before externs are set, therefore can't return a PyResult #[no_mangle] pub extern "C" fn setup_stderr_logger(level: u64) { - logging::set_destination(Destination::Stderr); + logging::set_thread_destination(Destination::Stderr); LOGGER .set_stderr_logger(level) .expect("Error setting up STDERR logger"); @@ -1173,7 +1171,7 @@ pub extern "C" fn flush_log() { #[no_mangle] pub extern "C" fn override_thread_logging_destination(destination: Destination) { - logging::set_destination(destination); + logging::set_thread_destination(destination); } fn graph_full(scheduler: &Scheduler, subject_types: Vec) -> RuleGraph { @@ -1202,7 +1200,7 @@ where F: FnOnce(&Scheduler) -> T, { let scheduler = unsafe { Box::from_raw(scheduler_ptr) }; - let t = f(&scheduler); + let t = scheduler.core.runtime.enter(|| f(&scheduler)); mem::forget(scheduler); t } diff --git a/src/rust/engine/fs/Cargo.toml b/src/rust/engine/fs/Cargo.toml index 3ad78b77ed3..ab49bf23d39 100644 --- a/src/rust/engine/fs/Cargo.toml +++ b/src/rust/engine/fs/Cargo.toml @@ -6,9 +6,9 @@ authors = [ "Pants Build " ] publish = false [dependencies] -boxfuture = { path = "../boxfuture" } +async-trait = "0.1" bytes = "0.4.5" -futures01 = { package = "futures", version = "0.1" } +futures = "0.3" glob = "0.2.11" ignore = "0.4.4" lazy_static = "1" @@ -19,4 +19,4 @@ tempfile = "3" [dev-dependencies] testutil = { path = "../testutil" } -tokio = "0.1" +tokio = { version = "0.2", features = ["rt-core", "macros"] } diff --git a/src/rust/engine/fs/brfs/Cargo.toml b/src/rust/engine/fs/brfs/Cargo.toml index 2d64d480144..d6bb8a73f77 100644 --- a/src/rust/engine/fs/brfs/Cargo.toml +++ b/src/rust/engine/fs/brfs/Cargo.toml @@ -13,6 +13,7 @@ env_logger = "0.5.4" errno = "0.2.3" fuse = "0.3.1" futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } hashing = { path = "../../hashing" } libc = "0.2.39" log = "0.4.1" @@ -22,7 +23,7 @@ serverset = { path = "../../serverset" } store = { path = "../store" } task_executor = { path = "../../task_executor" } time = "0.1.39" -tokio = "0.1" +tokio = { version = "0.2", features = ["rt-threaded", "macros"] } workunit_store = { path = "../../workunit_store" } [dev-dependencies] diff --git a/src/rust/engine/fs/brfs/src/main.rs b/src/rust/engine/fs/brfs/src/main.rs index 8b38bcbd524..77b46137645 100644 --- a/src/rust/engine/fs/brfs/src/main.rs +++ b/src/rust/engine/fs/brfs/src/main.rs @@ -38,6 +38,7 @@ use serverset; use time; +use futures::compat::Future01CompatExt; use hashing::{Digest, Fingerprint}; use log::{debug, error, warn}; use parking_lot::Mutex; @@ -47,6 +48,7 @@ use std::ffi::{CString, OsStr, OsString}; use std::path::Path; use std::sync::Arc; use store::Store; +use tokio::runtime::Handle; use workunit_store::WorkUnitStore; const TTL: time::Timespec = time::Timespec { sec: 0, nsec: 0 }; @@ -177,11 +179,12 @@ impl BuildResultFS { non_executable_inode })) } - Vacant(entry) => match self.runtime.block_on(self.store.load_file_bytes_with( - digest, - |_| (), - WorkUnitStore::new(), - )) { + Vacant(entry) => match self.runtime.block_on( + self + .store + .load_file_bytes_with(digest, |_| (), WorkUnitStore::new()) + .compat(), + ) { Ok(Some(((), _metadata))) => { let executable_inode = self.next_inode; self.next_inode += 1; @@ -219,10 +222,12 @@ impl BuildResultFS { pub fn inode_for_directory(&mut self, digest: Digest) -> Result, String> { match self.directory_inode_cache.entry(digest) { Occupied(entry) => Ok(Some(*entry.get())), - Vacant(entry) => match self - .runtime - .block_on(self.store.load_directory(digest, WorkUnitStore::new())) - { + Vacant(entry) => match self.runtime.block_on( + self + .store + .load_directory(digest, WorkUnitStore::new()) + .compat(), + ) { Ok(Some(_)) => { // TODO: Kick off some background futures to pre-load the contents of this Directory into // an in-memory cache. Keep a background CPU pool driving those Futures. @@ -311,9 +316,12 @@ impl BuildResultFS { entry_type: EntryType::Directory, .. }) => { - let maybe_directory = self - .runtime - .block_on(self.store.load_directory(digest, WorkUnitStore::new())); + let maybe_directory = self.runtime.block_on( + self + .store + .load_directory(digest, WorkUnitStore::new()) + .compat(), + ); match maybe_directory { Ok(Some((directory, _metadata))) => { @@ -405,95 +413,100 @@ impl fuse::Filesystem for BuildResultFS { name: &OsStr, reply: fuse::ReplyEntry, ) { - let r = match (parent, name.to_str()) { - (ROOT, Some("digest")) => Ok(dir_attr_for(DIGEST_ROOT)), - (ROOT, Some("directory")) => Ok(dir_attr_for(DIRECTORY_ROOT)), - (DIGEST_ROOT, Some(digest_str)) => match digest_from_filepath(digest_str) { - Ok(digest) => self - .inode_for_file(digest, true) - .map_err(|err| { - error!("Error loading file by digest {}: {}", digest_str, err); - libc::EINVAL - }) - .and_then(|maybe_inode| { - maybe_inode - .and_then(|inode| self.file_attr_for(inode)) - .ok_or(libc::ENOENT) - }), - Err(err) => { - warn!("Invalid digest for file in digest root: {}", err); - Err(libc::ENOENT) - } - }, - (DIRECTORY_ROOT, Some(digest_str)) => match digest_from_filepath(digest_str) { - Ok(digest) => self.dir_attr_for(digest), - Err(err) => { - warn!("Invalid digest for directory in directory root: {}", err); - Err(libc::ENOENT) - } - }, - (parent, Some(filename)) => { - let maybe_cache_entry = self - .inode_digest_cache - .get(&parent) - .cloned() - .ok_or(libc::ENOENT); - maybe_cache_entry - .and_then(|cache_entry| { - let parent_digest = cache_entry.digest; - self - .runtime - .block_on( - self - .store - .load_directory(parent_digest, WorkUnitStore::new()), - ) - .map_err(|err| { - error!("Error reading directory {:?}: {}", parent_digest, err); - libc::EINVAL - })? - .and_then(|(directory, _metadata)| self.node_for_digest(&directory, filename)) - .ok_or(libc::ENOENT) - }) - .and_then(|node| match node { - Node::Directory(directory_node) => { - let digest_result: Result = directory_node.get_digest().into(); - let digest = digest_result.map_err(|err| { - error!("Error parsing digest: {:?}", err); - libc::ENOENT - })?; - self.dir_attr_for(digest) - } - Node::File(file_node) => { - let digest_result: Result = file_node.get_digest().into(); - let digest = digest_result.map_err(|err| { - error!("Error parsing digest: {:?}", err); - libc::ENOENT - })?; + let runtime = self.runtime.clone(); + runtime.enter(|| { + let r = match (parent, name.to_str()) { + (ROOT, Some("digest")) => Ok(dir_attr_for(DIGEST_ROOT)), + (ROOT, Some("directory")) => Ok(dir_attr_for(DIRECTORY_ROOT)), + (DIGEST_ROOT, Some(digest_str)) => match digest_from_filepath(digest_str) { + Ok(digest) => self + .inode_for_file(digest, true) + .map_err(|err| { + error!("Error loading file by digest {}: {}", digest_str, err); + libc::EINVAL + }) + .and_then(|maybe_inode| { + maybe_inode + .and_then(|inode| self.file_attr_for(inode)) + .ok_or(libc::ENOENT) + }), + Err(err) => { + warn!("Invalid digest for file in digest root: {}", err); + Err(libc::ENOENT) + } + }, + (DIRECTORY_ROOT, Some(digest_str)) => match digest_from_filepath(digest_str) { + Ok(digest) => self.dir_attr_for(digest), + Err(err) => { + warn!("Invalid digest for directory in directory root: {}", err); + Err(libc::ENOENT) + } + }, + (parent, Some(filename)) => { + let maybe_cache_entry = self + .inode_digest_cache + .get(&parent) + .cloned() + .ok_or(libc::ENOENT); + maybe_cache_entry + .and_then(|cache_entry| { + let parent_digest = cache_entry.digest; self - .inode_for_file(digest, file_node.get_is_executable()) + .runtime + .block_on( + self + .store + .load_directory(parent_digest, WorkUnitStore::new()) + .compat(), + ) .map_err(|err| { - error!("Error loading file by digest {}: {}", filename, err); + error!("Error reading directory {:?}: {}", parent_digest, err); libc::EINVAL - }) - .and_then(|maybe_inode| { - maybe_inode - .and_then(|inode| self.file_attr_for(inode)) - .ok_or(libc::ENOENT) - }) - } - }) + })? + .and_then(|(directory, _metadata)| self.node_for_digest(&directory, filename)) + .ok_or(libc::ENOENT) + }) + .and_then(|node| match node { + Node::Directory(directory_node) => { + let digest_result: Result = directory_node.get_digest().into(); + let digest = digest_result.map_err(|err| { + error!("Error parsing digest: {:?}", err); + libc::ENOENT + })?; + self.dir_attr_for(digest) + } + Node::File(file_node) => { + let digest_result: Result = file_node.get_digest().into(); + let digest = digest_result.map_err(|err| { + error!("Error parsing digest: {:?}", err); + libc::ENOENT + })?; + self + .inode_for_file(digest, file_node.get_is_executable()) + .map_err(|err| { + error!("Error loading file by digest {}: {}", filename, err); + libc::EINVAL + }) + .and_then(|maybe_inode| { + maybe_inode + .and_then(|inode| self.file_attr_for(inode)) + .ok_or(libc::ENOENT) + }) + } + }) + } + _ => Err(libc::ENOENT), + }; + match r { + Ok(r) => reply.entry(&TTL, &r, 1), + Err(err) => reply.error(err), } - _ => Err(libc::ENOENT), - }; - match r { - Ok(r) => reply.entry(&TTL, &r, 1), - Err(err) => reply.error(err), - } + }) } fn getattr(&mut self, _req: &fuse::Request<'_>, inode: Inode, reply: fuse::ReplyAttr) { - match inode { + let runtime = self.runtime.clone(); + runtime.enter(|| match inode { ROOT => reply.attr(&TTL, &dir_attr_for(ROOT)), DIGEST_ROOT => reply.attr(&TTL, &dir_attr_for(DIGEST_ROOT)), DIRECTORY_ROOT => reply.attr(&TTL, &dir_attr_for(DIRECTORY_ROOT)), @@ -511,7 +524,7 @@ impl fuse::Filesystem for BuildResultFS { }) => reply.attr(&TTL, &dir_attr_for(inode)), _ => reply.error(libc::ENOENT), }, - } + }) } // TODO: Find out whether fh is ever passed if open isn't explicitly implemented (and whether offset is ever negative) @@ -524,48 +537,56 @@ impl fuse::Filesystem for BuildResultFS { size: u32, reply: fuse::ReplyData, ) { - match self.inode_digest_cache.get(&inode) { - Some(&InodeDetails { - digest, - entry_type: EntryType::File, - .. - }) => { - let reply = Arc::new(Mutex::new(Some(reply))); - let reply2 = reply.clone(); - // TODO: Read from a cache of Futures driven from a CPU pool, so we can merge in-flight - // requests, rather than reading from the store directly here. - let result: Result<(), ()> = self - .runtime - .block_on(self.store.load_file_bytes_with( - digest, - move |bytes| { - let begin = std::cmp::min(offset as usize, bytes.len()); - let end = std::cmp::min(offset as usize + size as usize, bytes.len()); - let mut reply = reply.lock(); - reply.take().unwrap().data(&bytes.slice(begin, end)); - }, - WorkUnitStore::new(), - )) - .map(|v| { - if v.is_none() { + let runtime = self.runtime.clone(); + runtime.enter(|| { + match self.inode_digest_cache.get(&inode) { + Some(&InodeDetails { + digest, + entry_type: EntryType::File, + .. + }) => { + let reply = Arc::new(Mutex::new(Some(reply))); + let reply2 = reply.clone(); + // TODO: Read from a cache of Futures driven from a CPU pool, so we can merge in-flight + // requests, rather than reading from the store directly here. + let result: Result<(), ()> = self + .runtime + .block_on( + self + .store + .load_file_bytes_with( + digest, + move |bytes| { + let begin = std::cmp::min(offset as usize, bytes.len()); + let end = std::cmp::min(offset as usize + size as usize, bytes.len()); + let mut reply = reply.lock(); + reply.take().unwrap().data(&bytes.slice(begin, end)); + }, + WorkUnitStore::new(), + ) + .compat(), + ) + .map(|v| { + if v.is_none() { + let maybe_reply = reply2.lock().take(); + if let Some(reply) = maybe_reply { + reply.error(libc::ENOENT); + } + } + }) + .or_else(|err| { + error!("Error loading bytes for {:?}: {}", digest, err); let maybe_reply = reply2.lock().take(); if let Some(reply) = maybe_reply { - reply.error(libc::ENOENT); + reply.error(libc::EINVAL); } - } - }) - .or_else(|err| { - error!("Error loading bytes for {:?}: {}", digest, err); - let maybe_reply = reply2.lock().take(); - if let Some(reply) = maybe_reply { - reply.error(libc::EINVAL); - } - Ok(()) - }); - result.expect("Error from read future which should have been handled in the future "); + Ok(()) + }); + result.expect("Error from read future which should have been handled in the future "); + } + _ => reply.error(libc::ENOENT), } - _ => reply.error(libc::ENOENT), - } + }) } fn readdir( @@ -577,23 +598,26 @@ impl fuse::Filesystem for BuildResultFS { offset: i64, mut reply: fuse::ReplyDirectory, ) { - match self.readdir_entries(inode) { - Ok(entries) => { - // 0 is a magic offset which means no offset, whereas a non-zero offset means start - // _after_ that entry. Inconsistency is fun. - let to_skip = if offset == 0 { 0 } else { offset + 1 } as usize; - let mut i = offset; - for entry in entries.into_iter().skip(to_skip) { - if reply.add(entry.inode, i, entry.kind, entry.name) { - // Buffer is full, don't add more entries. - break; + let runtime = self.runtime.clone(); + runtime.enter(|| { + match self.readdir_entries(inode) { + Ok(entries) => { + // 0 is a magic offset which means no offset, whereas a non-zero offset means start + // _after_ that entry. Inconsistency is fun. + let to_skip = if offset == 0 { 0 } else { offset + 1 } as usize; + let mut i = offset; + for entry in entries.into_iter().skip(to_skip) { + if reply.add(entry.inode, i, entry.kind, entry.name) { + // Buffer is full, don't add more entries. + break; + } + i += 1; } - i += 1; + reply.ok(); } - reply.ok(); + Err(err) => reply.error(err), } - Err(err) => reply.error(err), - } + }) } // If this isn't implemented, OSX will try to manipulate ._ files to manage xattrs out of band, which adds both overhead and logspam. @@ -604,7 +628,10 @@ impl fuse::Filesystem for BuildResultFS { _size: u32, reply: fuse::ReplyXattr, ) { - reply.size(0); + let runtime = self.runtime.clone(); + runtime.enter(|| { + reply.size(0); + }) } } @@ -630,7 +657,8 @@ pub fn mount<'a, P: AsRef>( fs } -fn main() { +#[tokio::main] +async fn main() { let default_store_path = dirs::home_dir() .expect("Couldn't find homedir") .join(".cache") @@ -697,7 +725,7 @@ fn main() { } else { None }; - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = match args.value_of("server-address") { Some(address) => Store::with_remote( diff --git a/src/rust/engine/fs/brfs/src/syscall_tests.rs b/src/rust/engine/fs/brfs/src/syscall_tests.rs index 93d0145b330..b4dd7a38d5d 100644 --- a/src/rust/engine/fs/brfs/src/syscall_tests.rs +++ b/src/rust/engine/fs/brfs/src/syscall_tests.rs @@ -4,24 +4,28 @@ use super::mount; use super::tests::digest_to_filepath; use crate::tests::make_dirs; +use futures::compat::Future01CompatExt; use libc; use std::ffi::CString; use std::path::Path; use store::Store; use testutil::data::TestData; +use tokio::runtime::Handle; -#[test] -fn read_file_by_digest_exact_bytes() { +#[tokio::test] +async fn read_file_by_digest_exact_bytes() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); - runtime - .block_on(store.store_file_bytes(test_bytes.bytes(), false)) + store + .store_file_bytes(test_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); diff --git a/src/rust/engine/fs/brfs/src/tests.rs b/src/rust/engine/fs/brfs/src/tests.rs index 3c569723785..a34ddfd35c8 100644 --- a/src/rust/engine/fs/brfs/src/tests.rs +++ b/src/rust/engine/fs/brfs/src/tests.rs @@ -2,18 +2,20 @@ use tempfile; use testutil; use crate::mount; +use futures::compat::Future01CompatExt; use hashing; use store::Store; use testutil::{ data::{TestData, TestDirectory}, file, }; +use tokio::runtime::Handle; -#[test] -fn missing_digest() { +#[tokio::test] +async fn missing_digest() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); @@ -26,18 +28,20 @@ fn missing_digest() { .exists()); } -#[test] -fn read_file_by_digest() { +#[tokio::test] +async fn read_file_by_digest() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); let test_bytes = TestData::roland(); - runtime - .block_on(store.store_file_bytes(test_bytes.bytes(), false)) + store + .store_file_bytes(test_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); @@ -49,10 +53,10 @@ fn read_file_by_digest() { assert!(file::is_executable(&file_path)); } -#[test] -fn list_directory() { +#[tokio::test] +async fn list_directory() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); @@ -60,11 +64,15 @@ fn list_directory() { let test_bytes = TestData::roland(); let test_directory = TestDirectory::containing_roland(); - runtime - .block_on(store.store_file_bytes(test_bytes.bytes(), false)) + store + .store_file_bytes(test_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.record_directory(&test_directory.directory(), false)) + store + .record_directory(&test_directory.directory(), false) + .compat() + .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); @@ -75,10 +83,10 @@ fn list_directory() { assert_eq!(vec!["roland"], file::list_dir(&virtual_dir)); } -#[test] -fn read_file_from_directory() { +#[tokio::test] +async fn read_file_from_directory() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); @@ -86,11 +94,15 @@ fn read_file_from_directory() { let test_bytes = TestData::roland(); let test_directory = TestDirectory::containing_roland(); - runtime - .block_on(store.store_file_bytes(test_bytes.bytes(), false)) + store + .store_file_bytes(test_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.record_directory(&test_directory.directory(), false)) + store + .record_directory(&test_directory.directory(), false) + .compat() + .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); @@ -103,10 +115,10 @@ fn read_file_from_directory() { assert!(!file::is_executable(&roland)); } -#[test] -fn list_recursive_directory() { +#[tokio::test] +async fn list_recursive_directory() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); @@ -116,17 +128,25 @@ fn list_recursive_directory() { let test_directory = TestDirectory::containing_roland(); let recursive_directory = TestDirectory::recursive(); - runtime - .block_on(store.store_file_bytes(test_bytes.bytes(), false)) + store + .store_file_bytes(test_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.store_file_bytes(treat_bytes.bytes(), false)) + store + .store_file_bytes(treat_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.record_directory(&test_directory.directory(), false)) + store + .record_directory(&test_directory.directory(), false) + .compat() + .await .expect("Storing directory"); - runtime - .block_on(store.record_directory(&recursive_directory.directory(), false)) + store + .record_directory(&recursive_directory.directory(), false) + .compat() + .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); @@ -138,10 +158,10 @@ fn list_recursive_directory() { assert_eq!(vec!["roland"], file::list_dir(&virtual_dir.join("cats"))); } -#[test] -fn read_file_from_recursive_directory() { +#[tokio::test] +async fn read_file_from_recursive_directory() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); @@ -151,17 +171,25 @@ fn read_file_from_recursive_directory() { let test_directory = TestDirectory::containing_roland(); let recursive_directory = TestDirectory::recursive(); - runtime - .block_on(store.store_file_bytes(test_bytes.bytes(), false)) + store + .store_file_bytes(test_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.store_file_bytes(treat_bytes.bytes(), false)) + store + .store_file_bytes(treat_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.record_directory(&test_directory.directory(), false)) + store + .record_directory(&test_directory.directory(), false) + .compat() + .await .expect("Storing directory"); - runtime - .block_on(store.record_directory(&recursive_directory.directory(), false)) + store + .record_directory(&recursive_directory.directory(), false) + .compat() + .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); @@ -178,10 +206,10 @@ fn read_file_from_recursive_directory() { assert!(!file::is_executable(&roland)); } -#[test] -fn files_are_correctly_executable() { +#[tokio::test] +async fn files_are_correctly_executable() { let (store_dir, mount_dir) = make_dirs(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::local_only(runtime.clone(), store_dir.path()).expect("Error creating local store"); @@ -189,11 +217,15 @@ fn files_are_correctly_executable() { let treat_bytes = TestData::catnip(); let directory = TestDirectory::with_mixed_executable_files(); - runtime - .block_on(store.store_file_bytes(treat_bytes.bytes(), false)) + store + .store_file_bytes(treat_bytes.bytes(), false) + .compat() + .await .expect("Storing bytes"); - runtime - .block_on(store.record_directory(&directory.directory(), false)) + store + .record_directory(&directory.directory(), false) + .compat() + .await .expect("Storing directory"); let _fs = mount(mount_dir.path(), store, runtime).expect("Mounting"); diff --git a/src/rust/engine/fs/fs_util/Cargo.toml b/src/rust/engine/fs/fs_util/Cargo.toml index c3af587dd7f..a0da6f094da 100644 --- a/src/rust/engine/fs/fs_util/Cargo.toml +++ b/src/rust/engine/fs/fs_util/Cargo.toml @@ -12,6 +12,7 @@ clap = "2" env_logger = "0.5.4" fs = { path = ".." } futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } hashing = { path = "../../hashing" } parking_lot = "0.6" protobuf = { version = "2.0.6", features = ["with-bytes"] } @@ -21,5 +22,5 @@ serde_json = "1.0" serde_derive = "1.0" store = { path = "../store" } task_executor = { path = "../../task_executor" } -tokio = "0.1" +tokio = { version = "0.2", features = ["rt-threaded", "macros"] } workunit_store = { path = "../../workunit_store" } diff --git a/src/rust/engine/fs/fs_util/src/main.rs b/src/rust/engine/fs/fs_util/src/main.rs index 31b886068db..786304898be 100644 --- a/src/rust/engine/fs/fs_util/src/main.rs +++ b/src/rust/engine/fs/fs_util/src/main.rs @@ -37,6 +37,8 @@ use boxfuture::{try_future, BoxFuture, Boxable}; use bytes::Bytes; use clap::{value_t, App, Arg, SubCommand}; use fs::GlobMatching; +use futures::compat::Future01CompatExt; +use futures::future::TryFutureExt; use futures01::{future, Future}; use hashing::{Digest, Fingerprint}; use parking_lot::Mutex; @@ -49,6 +51,7 @@ use std::process::exit; use std::sync::Arc; use std::time::Duration; use store::{Snapshot, Store, StoreFileByDigest, UploadSummary}; +use tokio::runtime::Handle; #[derive(Debug)] enum ExitCode { @@ -71,7 +74,8 @@ struct SummaryWithDigest { summary: Option, } -fn main() { +#[tokio::main] +async fn main() { env_logger::init(); match execute( @@ -254,7 +258,7 @@ to this directory.", .default_value("3") ) .get_matches(), - ) { + ).await { Ok(_) => {} Err(err) => { eprintln!("{}", err.0); @@ -263,12 +267,14 @@ to this directory.", }; } -fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { +// TODO: Sure, it's a bit long... +#[allow(clippy::cognitive_complexity)] +async fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { let store_dir = top_match .value_of("local-store-path") .map(PathBuf::from) .unwrap_or_else(Store::default_path); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let (store, store_has_remote) = { let (store_result, store_has_remote) = match top_match.values_of("server-address") { Some(cas_address) => { @@ -353,11 +359,14 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { .parse::() .expect("size_bytes must be a non-negative number"); let digest = Digest(fingerprint, size_bytes); - let write_result = runtime.block_on(store.load_file_bytes_with( - digest, - |bytes| io::stdout().write_all(&bytes).unwrap(), - workunit_store::WorkUnitStore::new(), - ))?; + let write_result = store + .load_file_bytes_with( + digest, + |bytes| io::stdout().write_all(&bytes).unwrap(), + workunit_store::WorkUnitStore::new(), + ) + .compat() + .await?; write_result .ok_or_else(|| { ExitError( @@ -380,18 +389,19 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { ); let file = posix_fs .stat_sync(PathBuf::from(path.file_name().unwrap())) - .unwrap(); + .unwrap() + .ok_or_else(|| format!("Tried to save file {:?} but it did not exist", path))?; match file { fs::Stat::File(f) => { - let digest = runtime - .block_on( - store::OneOffStoreFileByDigest::new(store.clone(), Arc::new(posix_fs)) - .store_by_digest(f, workunit_store::WorkUnitStore::new()), - ) + let digest = store::OneOffStoreFileByDigest::new(store.clone(), Arc::new(posix_fs)) + .store_by_digest(f, workunit_store::WorkUnitStore::new()) + .compat() + .await .unwrap(); - let report = runtime - .block_on(ensure_uploaded_to_remote(&store, store_has_remote, digest)) + let report = ensure_uploaded_to_remote(&store, store_has_remote, digest) + .compat() + .await .unwrap(); print_upload_summary(args.value_of("output-mode"), &report); @@ -419,12 +429,10 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { .parse::() .expect("size_bytes must be a non-negative number"); let digest = Digest(fingerprint, size_bytes); - runtime - .block_on(store.materialize_directory( - destination, - digest, - workunit_store::WorkUnitStore::new(), - )) + store + .materialize_directory(destination, digest, workunit_store::WorkUnitStore::new()) + .compat() + .await .map(|metadata| { eprintln!("{}", serde_json::to_string_pretty(&metadata).unwrap()); }) @@ -442,33 +450,35 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { args.value_of("root").unwrap(), )); let store_copy = store.clone(); - let digest = runtime.block_on( - posix_fs - .expand(fs::PathGlobs::create( - &args - .values_of("globs") - .unwrap() - .map(str::to_string) - .collect::>(), - // By using `Ignore`, we say that we don't care if some globs fail to expand. Is - // that a valid assumption? - fs::StrictGlobMatching::Ignore, - fs::GlobExpansionConjunction::AllMatch, - )?) - .map_err(|e| format!("Error expanding globs: {:?}", e)) - .and_then(move |paths| { - Snapshot::from_path_stats( - store_copy.clone(), - &store::OneOffStoreFileByDigest::new(store_copy, posix_fs), - paths, - workunit_store::WorkUnitStore::new(), - ) - }) - .map(|snapshot| snapshot.digest), - )?; + let digest = posix_fs + .expand(fs::PathGlobs::create( + &args + .values_of("globs") + .unwrap() + .map(str::to_string) + .collect::>(), + // By using `Ignore`, we say that we don't care if some globs fail to expand. Is + // that a valid assumption? + fs::StrictGlobMatching::Ignore, + fs::GlobExpansionConjunction::AllMatch, + )?) + .compat() + .map_err(|e| format!("Error expanding globs: {:?}", e)) + .and_then(move |paths| { + Snapshot::from_path_stats( + store_copy.clone(), + &store::OneOffStoreFileByDigest::new(store_copy, posix_fs), + paths, + workunit_store::WorkUnitStore::new(), + ) + }) + .map(|snapshot| snapshot.digest) + .compat() + .await?; - let report = runtime - .block_on(ensure_uploaded_to_remote(&store, store_has_remote, digest)) + let report = ensure_uploaded_to_remote(&store, store_has_remote, digest) + .compat() + .await .unwrap(); print_upload_summary(args.value_of("output-mode"), &report); @@ -483,37 +493,37 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { .expect("size_bytes must be a non-negative number"); let digest = Digest(fingerprint, size_bytes); let proto_bytes = match args.value_of("output-format").unwrap() { - "binary" => runtime - .block_on(store.load_directory(digest, workunit_store::WorkUnitStore::new())) + "binary" => store + .load_directory(digest, workunit_store::WorkUnitStore::new()) + .compat() + .await .map(|maybe_d| maybe_d.map(|(d, _metadata)| d.write_to_bytes().unwrap())), - "text" => runtime - .block_on(store.load_directory(digest, workunit_store::WorkUnitStore::new())) + "text" => store + .load_directory(digest, workunit_store::WorkUnitStore::new()) + .compat() + .await .map(|maybe_p| maybe_p.map(|(p, _metadata)| format!("{:?}\n", p).as_bytes().to_vec())), - "recursive-file-list" => runtime - .block_on(expand_files(store, digest)) - .map(|maybe_v| { + "recursive-file-list" => expand_files(store, digest).compat().await.map(|maybe_v| { + maybe_v + .map(|v| { + v.into_iter() + .map(|(name, _digest)| format!("{}\n", name)) + .collect::>() + .join("") + }) + .map(String::into_bytes) + }), + "recursive-file-list-with-digests" => { + expand_files(store, digest).compat().await.map(|maybe_v| { maybe_v .map(|v| { v.into_iter() - .map(|(name, _digest)| format!("{}\n", name)) + .map(|(name, digest)| format!("{} {} {}\n", name, digest.0, digest.1)) .collect::>() .join("") }) .map(String::into_bytes) - }), - "recursive-file-list-with-digests" => { - runtime - .block_on(expand_files(store, digest)) - .map(|maybe_v| { - maybe_v - .map(|v| { - v.into_iter() - .map(|(name, digest)| format!("{} {} {}\n", name, digest.0, digest.1)) - .collect::>() - .join("") - }) - .map(String::into_bytes) - }) + }) } format => Err(format!( "Unexpected value of --output-format arg: {}", @@ -541,12 +551,12 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { .parse::() .expect("size_bytes must be a non-negative number"); let digest = Digest(fingerprint, size_bytes); - let v = match runtime.block_on(store.load_file_bytes_with( - digest, - |bytes| bytes, - workunit_store::WorkUnitStore::new(), - ))? { - None => runtime.block_on( + let v = match store + .load_file_bytes_with(digest, |bytes| bytes, workunit_store::WorkUnitStore::new()) + .compat() + .await? + { + None => { store .load_directory(digest, workunit_store::WorkUnitStore::new()) .map(|maybe_dir| { @@ -557,8 +567,10 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> { .expect("Error serializing Directory proto"), ) }) - }), - )?, + }) + .compat() + .await? + } Some((bytes, _metadata)) => Some(bytes), }; match v { diff --git a/src/rust/engine/fs/src/glob_matching.rs b/src/rust/engine/fs/src/glob_matching.rs index 9f397ac6ffa..00e2cb934f5 100644 --- a/src/rust/engine/fs/src/glob_matching.rs +++ b/src/rust/engine/fs/src/glob_matching.rs @@ -6,8 +6,8 @@ use std::fmt::Display; use std::path::{Path, PathBuf}; use std::sync::Arc; -use boxfuture::{BoxFuture, Boxable}; -use futures01::{future, Future}; +use async_trait::async_trait; +use futures::future::{self, BoxFuture, TryFutureExt}; use glob::Pattern; use log::warn; use parking_lot::Mutex; @@ -17,6 +17,7 @@ use crate::{ StrictGlobMatching, VFS, }; +#[async_trait] pub trait GlobMatching: VFS { /// /// Canonicalize the Link for the given Path to an underlying File or Dir. May result @@ -26,15 +27,18 @@ pub trait GlobMatching: VFS { /// /// TODO: Should handle symlink loops (which would exhibit as an infinite loop in expand). /// - fn canonicalize(&self, symbolic_path: PathBuf, link: Link) -> BoxFuture, E> { - GlobMatchingImplementation::canonicalize(self, symbolic_path, link) + async fn canonicalize(&self, symbolic_path: PathBuf, link: Link) -> Result, E> { + GlobMatchingImplementation::canonicalize(self, symbolic_path, link).await } /// /// Recursively expands PathGlobs into PathStats while applying excludes. /// - fn expand(&self, path_globs: PathGlobs) -> BoxFuture, E> { - GlobMatchingImplementation::expand(self, path_globs) + /// TODO: See the note on references in ASYNC.md. + /// + fn expand<'a, 'b>(&'a self, path_globs: PathGlobs) -> BoxFuture<'b, Result, E>> { + let fs = self.clone(); + Box::pin(async move { GlobMatchingImplementation::expand(&fs, path_globs).await }) } } @@ -44,70 +48,64 @@ impl> GlobMatching for T {} // traits don't allow specifying private methods (and we don't want to use a top-level `fn` because // it's much more awkward than just specifying `&self`). // The methods of `GlobMatching` are forwarded to methods here. +#[async_trait] trait GlobMatchingImplementation: VFS { - fn directory_listing( + async fn directory_listing( &self, canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern, exclude: &Arc, - ) -> BoxFuture, E> { + ) -> Result, E> { // List the directory. - let context = self.clone(); - let exclude = exclude.clone(); + let dir_listing = self.scandir(canonical_dir).await?; - self - .scandir(canonical_dir) - .and_then(move |dir_listing| { - // Match any relevant Stats, and join them into PathStats. - future::join_all( - dir_listing - .0 - .iter() - .filter(|stat| { - // Match relevant filenames. - stat - .path() - .file_name() - .map(|file_name| wildcard.matches_path(Path::new(file_name))) - .unwrap_or(false) - }) - .filter_map(|stat| { - // Append matched filenames. - stat - .path() - .file_name() - .map(|file_name| symbolic_path.join(file_name)) - .map(|symbolic_stat_path| (symbolic_stat_path, stat)) - }) - .map(|(stat_symbolic_path, stat)| { - // Canonicalize matched PathStats, and filter paths that are ignored by local excludes. - // Context ("global") ignore patterns are applied during `scandir`. - if exclude.is_ignored(&stat) { - future::ok(None).to_boxed() - } else { - match stat { - Stat::Link(l) => context.canonicalize(stat_symbolic_path, l.clone()), - Stat::Dir(d) => { - future::ok(Some(PathStat::dir(stat_symbolic_path, d.clone()))).to_boxed() - } - Stat::File(f) => { - future::ok(Some(PathStat::file(stat_symbolic_path, f.clone()))).to_boxed() - } - } + // Match any relevant Stats, and join them into PathStats. + let path_stats = future::try_join_all( + dir_listing + .0 + .iter() + .filter(|stat| { + // Match relevant filenames. + stat + .path() + .file_name() + .map(|file_name| wildcard.matches_path(Path::new(file_name))) + .unwrap_or(false) + }) + .filter_map(|stat| { + // Append matched filenames. + stat + .path() + .file_name() + .map(|file_name| symbolic_path.join(file_name)) + .map(|symbolic_stat_path| (symbolic_stat_path, stat)) + }) + .map(|(stat_symbolic_path, stat)| { + let context = self.clone(); + let exclude = exclude.clone(); + async move { + // Canonicalize matched PathStats, and filter paths that are ignored by local excludes. + // Context ("global") ignore patterns are applied during `scandir`. + if exclude.is_ignored(&stat) { + Ok(None) + } else { + match stat { + Stat::Link(l) => context.canonicalize(stat_symbolic_path, l.clone()).await, + Stat::Dir(d) => Ok(Some(PathStat::dir(stat_symbolic_path, d.clone()))), + Stat::File(f) => Ok(Some(PathStat::file(stat_symbolic_path, f.clone()))), } - }) - .collect::>(), - ) - }) - .map(|path_stats| { - // See the note above. - path_stats.into_iter().filter_map(|pso| pso).collect() - }) - .to_boxed() + } + } + }) + .collect::>(), + ) + .await?; + // See the note above. + Ok(path_stats.into_iter().filter_map(|pso| pso).collect()) } - fn expand(&self, path_globs: PathGlobs) -> BoxFuture, E> { + async fn expand(&self, path_globs: PathGlobs) -> Result, E> { let PathGlobs { include, exclude, @@ -117,7 +115,7 @@ trait GlobMatchingImplementation: VFS { } = path_globs; if include.is_empty() { - return future::ok(vec![]).to_boxed(); + return Ok(vec![]); } let result = Arc::new(Mutex::new(Vec::new())); @@ -132,139 +130,144 @@ trait GlobMatchingImplementation: VFS { } } - future::join_all(roots) - .and_then(move |matched| { - if strict_match_behavior.should_check_glob_matches() { - // Get all the inputs which didn't transitively expand to any files. - let matching_inputs = sources - .iter() - .zip(matched.into_iter()) - .filter_map( - |(source, matched)| { - if matched { - Some(source.clone()) - } else { - None - } - }, - ) - .collect::>(); - - let non_matching_inputs = sources - .into_iter() - .filter(|s| !matching_inputs.contains(s)) - .collect::>(); + let matched = future::try_join_all(roots).await?; - let match_failed = match conjunction { - // All must match. - GlobExpansionConjunction::AllMatch => !non_matching_inputs.is_empty(), - // Only one needs to match. - GlobExpansionConjunction::AnyMatch => matching_inputs.is_empty(), - }; - - if match_failed { - let mut non_matching_inputs = non_matching_inputs - .iter() - .map(|parsed_source| parsed_source.0.clone()) - .collect::>(); - non_matching_inputs.sort(); - let single_glob = non_matching_inputs.len() == 1; - let prefix = format!("Unmatched glob{}", if single_glob { "" } else { "s" }); - let origin = match &strict_match_behavior { - StrictGlobMatching::Warn(description) | StrictGlobMatching::Error(description) => { - format!(" from {}: ", description) - } - _ => ": ".to_string(), - }; - let unmatched_globs = if single_glob { - format!("{:?}", non_matching_inputs[0]) - } else { - format!("{:?}", non_matching_inputs) - }; - let exclude_patterns = exclude.exclude_patterns(); - let excludes_portion = if exclude_patterns.is_empty() { - "".to_string() + if strict_match_behavior.should_check_glob_matches() { + // Get all the inputs which didn't transitively expand to any files. + let matching_inputs = sources + .iter() + .zip(matched.into_iter()) + .filter_map( + |(source, matched)| { + if matched { + Some(source.clone()) } else { - let single_exclude = exclude_patterns.len() == 1; - if single_exclude { - format!(", exclude: {:?}", exclude_patterns[0]) - } else { - format!(", excludes: {:?}", exclude_patterns) - } - }; - let msg = format!( - "{}{}{}{}", - prefix, origin, unmatched_globs, excludes_portion - ); - if strict_match_behavior.should_throw_on_error() { - return future::err(Self::mk_error(&msg)); - } else { - warn!("{}", msg); + None } + }, + ) + .collect::>(); + + let non_matching_inputs = sources + .into_iter() + .filter(|s| !matching_inputs.contains(s)) + .collect::>(); + + let match_failed = match conjunction { + // All must match. + GlobExpansionConjunction::AllMatch => !non_matching_inputs.is_empty(), + // Only one needs to match. + GlobExpansionConjunction::AnyMatch => matching_inputs.is_empty(), + }; + + if match_failed { + let mut non_matching_inputs = non_matching_inputs + .iter() + .map(|parsed_source| parsed_source.0.clone()) + .collect::>(); + non_matching_inputs.sort(); + let single_glob = non_matching_inputs.len() == 1; + let prefix = format!("Unmatched glob{}", if single_glob { "" } else { "s" }); + let origin = match &strict_match_behavior { + StrictGlobMatching::Warn(description) | StrictGlobMatching::Error(description) => { + format!(" from {}: ", description) + } + _ => ": ".to_string(), + }; + let unmatched_globs = if single_glob { + format!("{:?}", non_matching_inputs[0]) + } else { + format!("{:?}", non_matching_inputs) + }; + let exclude_patterns = exclude.exclude_patterns(); + let excludes_portion = if exclude_patterns.is_empty() { + "".to_string() + } else { + let single_exclude = exclude_patterns.len() == 1; + if single_exclude { + format!(", exclude: {:?}", exclude_patterns[0]) + } else { + format!(", excludes: {:?}", exclude_patterns) } + }; + let msg = format!( + "{}{}{}{}", + prefix, origin, unmatched_globs, excludes_portion + ); + if strict_match_behavior.should_throw_on_error() { + return Err(Self::mk_error(&msg)); + } else { + warn!("{}", msg); } + } + } - let mut path_stats = Arc::try_unwrap(result) - .unwrap_or_else(|_| panic!("expand violated its contract.")) - .into_inner() - .into_iter() - .collect::>(); - path_stats.sort_by(|a, b| a.path().cmp(b.path())); - path_stats.dedup_by(|a, b| a.path() == b.path()); - future::ok(path_stats) - }) - .to_boxed() + let mut path_stats = Arc::try_unwrap(result) + .unwrap_or_else(|_| panic!("expand violated its contract.")) + .into_inner() + .into_iter() + .collect::>(); + path_stats.sort_by(|a, b| a.path().cmp(b.path())); + path_stats.dedup_by(|a, b| a.path() == b.path()); + Ok(path_stats) } - fn expand_single( + async fn expand_single( &self, result: Arc>>, exclude: Arc, path_glob: PathGlob, - ) -> BoxFuture { + ) -> Result { match path_glob { PathGlob::Wildcard { canonical_dir, symbolic_path, wildcard, - } => self.expand_wildcard(result, exclude, canonical_dir, symbolic_path, wildcard), + } => { + self + .expand_wildcard(result, exclude, canonical_dir, symbolic_path, wildcard) + .await + } PathGlob::DirWildcard { canonical_dir, symbolic_path, wildcard, remainder, - } => self.expand_dir_wildcard( - result, - exclude, - canonical_dir, - symbolic_path, - wildcard, - remainder, - ), + } => { + self + .expand_dir_wildcard( + result, + exclude, + canonical_dir, + symbolic_path, + wildcard, + remainder, + ) + .await + } } } - fn expand_wildcard( + async fn expand_wildcard( &self, result: Arc>>, exclude: Arc, canonical_dir: Dir, symbolic_path: PathBuf, wildcard: Pattern, - ) -> BoxFuture { + ) -> Result { // Filter directory listing to append PathStats, with no continuation. - self + let path_stats = self .directory_listing(canonical_dir, symbolic_path, wildcard, &exclude) - .map(move |path_stats| { - let mut result = result.lock(); - let matched = !path_stats.is_empty(); - result.extend(path_stats); - matched - }) - .to_boxed() + .await?; + + let mut result = result.lock(); + let matched = !path_stats.is_empty(); + result.extend(path_stats); + Ok(matched) } - fn expand_dir_wildcard( + async fn expand_dir_wildcard( &self, result: Arc>>, exclude: Arc, @@ -272,64 +275,58 @@ trait GlobMatchingImplementation: VFS { symbolic_path: PathBuf, wildcard: Pattern, remainder: Vec, - ) -> BoxFuture { + ) -> Result { // Filter directory listing and recurse for matched Dirs. let context = self.clone(); - self + let path_stats = self .directory_listing(canonical_dir, symbolic_path, wildcard, &exclude) - .and_then(move |path_stats| { - path_stats - .into_iter() - .filter_map(|ps| match ps { - PathStat::Dir { path, stat } => Some( - PathGlob::parse_globs(stat, path, &remainder).map_err(|e| Self::mk_error(e.as_str())), - ), - PathStat::File { .. } => None, - }) - .collect::, E>>() - }) - .and_then(move |path_globs| { - let child_globs = path_globs - .into_iter() - .flat_map(Vec::into_iter) - .map(|pg| context.expand_single(result.clone(), exclude.clone(), pg)) - .collect::>(); - future::join_all(child_globs) - .map(|child_matches| child_matches.into_iter().any(|m| m)) - .to_boxed() + .await?; + + let path_globs = path_stats + .into_iter() + .filter_map(|ps| match ps { + PathStat::Dir { path, stat } => Some( + PathGlob::parse_globs(stat, path, &remainder).map_err(|e| Self::mk_error(e.as_str())), + ), + PathStat::File { .. } => None, }) - .to_boxed() + .collect::, E>>()?; + + let child_globs = path_globs + .into_iter() + .flat_map(Vec::into_iter) + .map(|pg| context.expand_single(result.clone(), exclude.clone(), pg)) + .collect::>(); + + let child_matches = future::try_join_all(child_globs).await?; + Ok(child_matches.into_iter().any(|m| m)) } - fn canonicalize(&self, symbolic_path: PathBuf, link: Link) -> BoxFuture, E> { + async fn canonicalize(&self, symbolic_path: PathBuf, link: Link) -> Result, E> { // Read the link, which may result in PathGlob(s) that match 0 or 1 Path. let context = self.clone(); - self + // If the link destination can't be parsed as PathGlob(s), it is broken. + let link_globs = self .read_link(&link) - .map(|dest_path| { - // If the link destination can't be parsed as PathGlob(s), it is broken. - dest_path - .to_str() - .and_then(|dest_str| { - // Escape any globs in the parsed dest, which should guarantee one output PathGlob. - PathGlob::create(&[Pattern::escape(dest_str)]).ok() - }) - .unwrap_or_else(|| vec![]) - }) - .and_then(move |link_globs| { - future::result(PathGlobs::from_globs(link_globs)) - .map_err(|e| Self::mk_error(e.as_str())) - .and_then(move |path_globs| context.expand(path_globs)) + .await? + .to_str() + .and_then(|dest_str| { + // Escape any globs in the parsed dest, which should guarantee one output PathGlob. + PathGlob::create(&[Pattern::escape(dest_str)]).ok() }) + .unwrap_or_else(|| vec![]); + + let path_globs = PathGlobs::from_globs(link_globs).map_err(|e| Self::mk_error(e.as_str()))?; + let mut path_stats = context + .expand(path_globs) .map_err(move |e| Self::mk_error(&format!("While expanding link {:?}: {}", link.0, e))) - .map(|mut path_stats| { - // Since we've escaped any globs in the parsed path, expect either 0 or 1 destination. - path_stats.pop().map(|ps| match ps { - PathStat::Dir { stat, .. } => PathStat::dir(symbolic_path, stat), - PathStat::File { stat, .. } => PathStat::file(symbolic_path, stat), - }) - }) - .to_boxed() + .await?; + + // Since we've escaped any globs in the parsed path, expect either 0 or 1 destination. + Ok(path_stats.pop().map(|ps| match ps { + PathStat::Dir { stat, .. } => PathStat::dir(symbolic_path, stat), + PathStat::File { stat, .. } => PathStat::file(symbolic_path, stat), + })) } } diff --git a/src/rust/engine/fs/src/lib.rs b/src/rust/engine/fs/src/lib.rs index 4f41c42dfec..6e9e3ec16c5 100644 --- a/src/rust/engine/fs/src/lib.rs +++ b/src/rust/engine/fs/src/lib.rs @@ -29,9 +29,9 @@ mod glob_matching; pub use crate::glob_matching::GlobMatching; use ::ignore::gitignore::{Gitignore, GitignoreBuilder}; -use boxfuture::{BoxFuture, Boxable}; +use async_trait::async_trait; use bytes::Bytes; -use futures01::{future, Future}; +use futures::future::{self, BoxFuture, TryFutureExt}; use glob::{MatchOptions, Pattern}; use lazy_static::lazy_static; use std::cmp::min; @@ -630,14 +630,17 @@ impl PosixFS { }) } - pub fn scandir( - &self, + /// + /// TODO: See the note on references in ASYNC.md. + /// + pub fn scandir<'a, 'b>( + &'a self, dir_relative_to_root: Dir, - ) -> impl Future { + ) -> BoxFuture<'b, Result> { let vfs = self.clone(); - self.executor.spawn_on_io_pool(future::lazy(move || { - vfs.scandir_sync(&dir_relative_to_root) - })) + self + .executor + .spawn_blocking(move || vfs.scandir_sync(&dir_relative_to_root)) } fn scandir_sync(&self, dir_relative_to_root: &Dir) -> Result { @@ -690,34 +693,51 @@ impl PosixFS { self.ignore.is_ignored(stat) } - pub fn read_file(&self, file: &File) -> impl Future { + /// + /// TODO: See the note on references in ASYNC.md. + /// + pub fn read_file<'a, 'b, 'c>( + &'a self, + file: &'b File, + ) -> BoxFuture<'c, Result> { let path = file.path.clone(); let path_abs = self.root.0.join(&file.path); - self.executor.spawn_on_io_pool(future::lazy(move || { - let is_executable = path_abs.metadata()?.permissions().mode() & 0o100 == 0o100; - std::fs::File::open(&path_abs) - .and_then(|mut f| { - let mut content = Vec::new(); - f.read_to_end(&mut content)?; - Ok(FileContent { - path: path, - content: Bytes::from(content), - is_executable, - }) - }) - .map_err(|e| { - io::Error::new( - e.kind(), - format!("Failed to read file {:?}: {}", path_abs, e), - ) + let executor = self.executor.clone(); + Box::pin(async move { + executor + .spawn_blocking(move || { + let is_executable = path_abs.metadata()?.permissions().mode() & 0o100 == 0o100; + std::fs::File::open(&path_abs) + .and_then(|mut f| { + let mut content = Vec::new(); + f.read_to_end(&mut content)?; + Ok(FileContent { + path: path, + content: Bytes::from(content), + is_executable, + }) + }) + .map_err(|e| { + io::Error::new( + e.kind(), + format!("Failed to read file {:?}: {}", path_abs, e), + ) + }) }) - })) + .await + }) } - pub fn read_link(&self, link: &Link) -> impl Future { + /// + /// TODO: See the note on references in ASYNC.md. + /// + pub fn read_link<'a, 'b, 'c>( + &'a self, + link: &'b Link, + ) -> BoxFuture<'c, Result> { let link_parent = link.0.parent().map(Path::to_owned); let link_abs = self.root.0.join(link.0.as_path()); - self.executor.spawn_on_io_pool(future::lazy(move || { + self.executor.spawn_blocking(move || { link_abs .read_link() .and_then(|path_buf| { @@ -743,7 +763,7 @@ impl PosixFS { format!("Failed to read link {:?}: {}", link_abs, e), ) }) - })) + }) } /// @@ -803,25 +823,35 @@ impl PosixFS { } } - pub fn stat_sync(&self, relative_path: PathBuf) -> Result { + pub fn stat_sync(&self, relative_path: PathBuf) -> Result, io::Error> { let abs_path = self.root.0.join(&relative_path); let metadata = match self.symlink_behavior { - SymlinkBehavior::Aware => fs::symlink_metadata(abs_path)?, - SymlinkBehavior::Oblivious => fs::metadata(abs_path)?, + SymlinkBehavior::Aware => fs::symlink_metadata(abs_path), + SymlinkBehavior::Oblivious => fs::metadata(abs_path), }; - PosixFS::stat_internal(&self.root.0, relative_path, metadata.file_type(), || { - Ok(metadata) - }) + let stat_result = metadata.and_then(|metadata| { + PosixFS::stat_internal(&self.root.0, relative_path, metadata.file_type(), || { + Ok(metadata) + }) + }); + match stat_result { + Ok(v) => Ok(Some(v)), + Err(err) => match err.kind() { + io::ErrorKind::NotFound => Ok(None), + _ => Err(err), + }, + } } } +#[async_trait] impl VFS for Arc { - fn read_link(&self, link: &Link) -> BoxFuture { - PosixFS::read_link(self, link).to_boxed() + async fn read_link(&self, link: &Link) -> Result { + PosixFS::read_link(self, link).await } - fn scandir(&self, dir: Dir) -> BoxFuture, io::Error> { - PosixFS::scandir(self, dir).map(Arc::new).to_boxed() + async fn scandir(&self, dir: Dir) -> Result, io::Error> { + Ok(Arc::new(PosixFS::scandir(self, dir).await?)) } fn is_ignored(&self, stat: &Stat) -> bool { @@ -833,13 +863,15 @@ impl VFS for Arc { } } +#[async_trait] pub trait PathStatGetter { - fn path_stats(&self, paths: Vec) -> BoxFuture>, E>; + async fn path_stats(&self, paths: Vec) -> Result>, E>; } +#[async_trait] impl PathStatGetter for Arc { - fn path_stats(&self, paths: Vec) -> BoxFuture>, io::Error> { - future::join_all( + async fn path_stats(&self, paths: Vec) -> Result>, io::Error> { + future::try_join_all( paths .into_iter() .map(|path| { @@ -847,40 +879,32 @@ impl PathStatGetter for Arc { let fs2 = self.clone(); self .executor - .spawn_on_io_pool(future::lazy(move || fs2.stat_sync(path))) - .then(|stat_result| match stat_result { - Ok(v) => Ok(Some(v)), - Err(err) => match err.kind() { - io::ErrorKind::NotFound => Ok(None), - _ => Err(err), - }, - }) + .spawn_blocking(move || fs2.stat_sync(path)) .and_then(move |maybe_stat| { - match maybe_stat { - // Note: This will drop PathStats for symlinks which don't point anywhere. - Some(Stat::Link(link)) => fs.canonicalize(link.0.clone(), link), - Some(Stat::Dir(dir)) => { - future::ok(Some(PathStat::dir(dir.0.clone(), dir))).to_boxed() - } - Some(Stat::File(file)) => { - future::ok(Some(PathStat::file(file.path.clone(), file))).to_boxed() + async move { + match maybe_stat { + // Note: This will drop PathStats for symlinks which don't point anywhere. + Some(Stat::Link(link)) => fs.canonicalize(link.0.clone(), link).await, + Some(Stat::Dir(dir)) => Ok(Some(PathStat::dir(dir.0.clone(), dir))), + Some(Stat::File(file)) => Ok(Some(PathStat::file(file.path.clone(), file))), + None => Ok(None), } - None => future::ok(None).to_boxed(), } }) }) .collect::>(), ) - .to_boxed() + .await } } /// /// A context for filesystem operations parameterized on an error type 'E'. /// +#[async_trait] pub trait VFS: Clone + Send + Sync + 'static { - fn read_link(&self, link: &Link) -> BoxFuture; - fn scandir(&self, dir: Dir) -> BoxFuture, E>; + async fn read_link(&self, link: &Link) -> Result; + async fn scandir(&self, dir: Dir) -> Result, E>; fn is_ignored(&self, stat: &Stat) -> bool; fn mk_error(msg: &str) -> E; } diff --git a/src/rust/engine/fs/src/posixfs_tests.rs b/src/rust/engine/fs/src/posixfs_tests.rs index 7cfd0042cb2..984b6be4ab2 100644 --- a/src/rust/engine/fs/src/posixfs_tests.rs +++ b/src/rust/engine/fs/src/posixfs_tests.rs @@ -5,30 +5,31 @@ use crate::{ Dir, DirectoryListing, File, GlobExpansionConjunction, GlobMatching, Link, PathGlobs, PathStat, PathStatGetter, PosixFS, Stat, StrictGlobMatching, SymlinkBehavior, VFS, }; -use boxfuture::{BoxFuture, Boxable}; -use futures01::future::{self, Future}; + +use async_trait::async_trait; use std; use std::collections::HashMap; use std::path::{Components, Path, PathBuf}; use std::sync::Arc; use testutil::make_file; +use tokio::runtime::Handle; -#[test] -fn is_executable_false() { +#[tokio::test] +async fn is_executable_false() { let dir = tempfile::TempDir::new().unwrap(); make_file(&dir.path().join("marmosets"), &[], 0o611); - assert_only_file_is_executable(dir.path(), false); + assert_only_file_is_executable(dir.path(), false).await; } -#[test] -fn is_executable_true() { +#[tokio::test] +async fn is_executable_true() { let dir = tempfile::TempDir::new().unwrap(); make_file(&dir.path().join("photograph_marmosets"), &[], 0o700); - assert_only_file_is_executable(dir.path(), true); + assert_only_file_is_executable(dir.path(), true).await; } -#[test] -fn read_file() { +#[tokio::test] +async fn read_file() { let dir = tempfile::TempDir::new().unwrap(); let path = PathBuf::from("marmosets"); let content = "cute".as_bytes().to_vec(); @@ -38,37 +39,37 @@ fn read_file() { 0o600, ); let fs = new_posixfs(&dir.path()); - let mut rt = tokio::runtime::Runtime::new().unwrap(); - let file_content = rt - .block_on(fs.read_file(&File { + let file_content = fs + .read_file(&File { path: path.clone(), is_executable: false, - })) + }) + .await .unwrap(); assert_eq!(file_content.path, path); assert_eq!(file_content.content, content); } -#[test] -fn read_file_missing() { +#[tokio::test] +async fn read_file_missing() { let dir = tempfile::TempDir::new().unwrap(); new_posixfs(&dir.path()) .read_file(&File { path: PathBuf::from("marmosets"), is_executable: false, }) - .wait() + .await .expect_err("Expected error"); } -#[test] -fn stat_executable_file() { +#[tokio::test] +async fn stat_executable_file() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("photograph_marmosets"); make_file(&dir.path().join(&path), &[], 0o700); assert_eq!( - posix_fs.stat_sync(path.clone()).unwrap(), + posix_fs.stat_sync(path.clone()).unwrap().unwrap(), super::Stat::File(File { path: path, is_executable: true, @@ -76,14 +77,14 @@ fn stat_executable_file() { ) } -#[test] -fn stat_nonexecutable_file() { +#[tokio::test] +async fn stat_nonexecutable_file() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("marmosets"); make_file(&dir.path().join(&path), &[], 0o600); assert_eq!( - posix_fs.stat_sync(path.clone()).unwrap(), + posix_fs.stat_sync(path.clone()).unwrap().unwrap(), super::Stat::File(File { path: path, is_executable: false, @@ -91,20 +92,20 @@ fn stat_nonexecutable_file() { ) } -#[test] -fn stat_dir() { +#[tokio::test] +async fn stat_dir() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("enclosure"); std::fs::create_dir(dir.path().join(&path)).unwrap(); assert_eq!( - posix_fs.stat_sync(path.clone()).unwrap(), + posix_fs.stat_sync(path.clone()).unwrap().unwrap(), super::Stat::Dir(Dir(path)) ) } -#[test] -fn stat_symlink() { +#[tokio::test] +async fn stat_symlink() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("marmosets"); @@ -113,13 +114,13 @@ fn stat_symlink() { let link_path = PathBuf::from("remarkably_similar_marmoset"); std::os::unix::fs::symlink(&dir.path().join(path), dir.path().join(&link_path)).unwrap(); assert_eq!( - posix_fs.stat_sync(link_path.clone()).unwrap(), + posix_fs.stat_sync(link_path.clone()).unwrap().unwrap(), super::Stat::Link(Link(link_path)) ) } -#[test] -fn stat_symlink_oblivious() { +#[tokio::test] +async fn stat_symlink_oblivious() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs_symlink_oblivious(&dir.path()); let path = PathBuf::from("marmosets"); @@ -129,7 +130,7 @@ fn stat_symlink_oblivious() { std::os::unix::fs::symlink(&dir.path().join(path), dir.path().join(&link_path)).unwrap(); // Symlink oblivious stat will give us the destination type. assert_eq!( - posix_fs.stat_sync(link_path.clone()).unwrap(), + posix_fs.stat_sync(link_path.clone()).unwrap().unwrap(), super::Stat::File(File { path: link_path, is_executable: false, @@ -137,37 +138,37 @@ fn stat_symlink_oblivious() { ) } -#[test] -fn stat_other() { +#[tokio::test] +async fn stat_other() { new_posixfs("/dev") .stat_sync(PathBuf::from("null")) .expect_err("Want error"); } -#[test] -fn stat_missing() { +#[tokio::test] +async fn stat_missing() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); - posix_fs - .stat_sync(PathBuf::from("no_marmosets")) - .expect_err("Want error"); + assert_eq!( + posix_fs.stat_sync(PathBuf::from("no_marmosets")).unwrap(), + None, + ); } -#[test] -fn scandir_empty() { +#[tokio::test] +async fn scandir_empty() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); let path = PathBuf::from("empty_enclosure"); std::fs::create_dir(dir.path().join(&path)).unwrap(); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); assert_eq!( - runtime.block_on(posix_fs.scandir(Dir(path))).unwrap(), + posix_fs.scandir(Dir(path)).await.unwrap(), DirectoryListing(vec![]) ); } -#[test] -fn scandir() { +#[tokio::test] +async fn scandir() { let dir = tempfile::TempDir::new().unwrap(); let path = PathBuf::from("enclosure"); std::fs::create_dir(dir.path().join(&path)).unwrap(); @@ -195,12 +196,11 @@ fn scandir() { 0o600, ); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - // Symlink aware. assert_eq!( - runtime - .block_on(new_posixfs(&dir.path()).scandir(Dir(path.clone()))) + new_posixfs(&dir.path()) + .scandir(Dir(path.clone())) + .await .unwrap(), DirectoryListing(vec![ Stat::File(File { @@ -222,8 +222,9 @@ fn scandir() { // Symlink oblivious. assert_eq!( - runtime - .block_on(new_posixfs_symlink_oblivious(&dir.path()).scandir(Dir(path))) + new_posixfs_symlink_oblivious(&dir.path()) + .scandir(Dir(path)) + .await .unwrap(), DirectoryListing(vec![ Stat::File(File { @@ -247,18 +248,18 @@ fn scandir() { ); } -#[test] -fn scandir_missing() { +#[tokio::test] +async fn scandir_missing() { let dir = tempfile::TempDir::new().unwrap(); let posix_fs = new_posixfs(&dir.path()); posix_fs .scandir(Dir(PathBuf::from("no_marmosets_here"))) - .wait() + .await .expect_err("Want error"); } -#[test] -fn path_stats_for_paths() { +#[tokio::test] +async fn path_stats_for_paths() { let dir = tempfile::TempDir::new().unwrap(); let root_path = dir.path(); @@ -284,9 +285,8 @@ fn path_stats_for_paths() { std::os::unix::fs::symlink("doesnotexist", &root_path.join("symlink_to_nothing")).unwrap(); let posix_fs = Arc::new(new_posixfs(&root_path)); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - let path_stats = runtime - .block_on(posix_fs.path_stats(vec![ + let path_stats = posix_fs + .path_stats(vec![ PathBuf::from("executable_file"), PathBuf::from("regular_file"), PathBuf::from("dir"), @@ -295,7 +295,8 @@ fn path_stats_for_paths() { PathBuf::from("dir_symlink"), PathBuf::from("symlink_to_nothing"), PathBuf::from("doesnotexist"), - ])) + ]) + .await .unwrap(); let v: Vec> = vec![ Some(PathStat::file( @@ -340,8 +341,8 @@ fn path_stats_for_paths() { assert_eq!(v, path_stats); } -#[test] -fn memfs_expand_basic() { +#[tokio::test] +async fn memfs_expand_basic() { // Create two files, with the effect that there is a nested directory for the longer path. let p1 = PathBuf::from("some/file"); let p2 = PathBuf::from("some/other"); @@ -354,7 +355,7 @@ fn memfs_expand_basic() { .unwrap(); assert_eq!( - fs.expand(globs).wait().unwrap(), + fs.expand(globs).await.unwrap(), vec![ PathStat::file( p1.clone(), @@ -368,12 +369,9 @@ fn memfs_expand_basic() { ); } -fn assert_only_file_is_executable(path: &Path, want_is_executable: bool) { +async fn assert_only_file_is_executable(path: &Path, want_is_executable: bool) { let fs = new_posixfs(path); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - let stats = runtime - .block_on(fs.scandir(Dir(PathBuf::from(".")))) - .unwrap(); + let stats = fs.scandir(Dir(PathBuf::from("."))).await.unwrap(); assert_eq!(stats.0.len(), 1); match stats.0.get(0).unwrap() { &super::Stat::File(File { @@ -384,14 +382,19 @@ fn assert_only_file_is_executable(path: &Path, want_is_executable: bool) { } fn new_posixfs>(dir: P) -> PosixFS { - PosixFS::new(dir.as_ref(), &[], task_executor::Executor::new()).unwrap() + PosixFS::new( + dir.as_ref(), + &[], + task_executor::Executor::new(Handle::current()), + ) + .unwrap() } fn new_posixfs_symlink_oblivious>(dir: P) -> PosixFS { PosixFS::new_with_symlink_behavior( dir.as_ref(), &[], - task_executor::Executor::new(), + task_executor::Executor::new(Handle::current()), SymlinkBehavior::Oblivious, ) .unwrap() @@ -446,21 +449,19 @@ impl MemFS { } } +#[async_trait] impl VFS for Arc { - fn read_link(&self, link: &Link) -> BoxFuture { + async fn read_link(&self, link: &Link) -> Result { // The creation of a static filesystem does not allow for Links. - future::err(format!("{:?} does not exist within this filesystem.", link)).to_boxed() + Err(format!("{:?} does not exist within this filesystem.", link)) } - fn scandir(&self, dir: Dir) -> BoxFuture, String> { - future::result( - self - .contents - .get(&dir) - .cloned() - .ok_or_else(|| format!("{:?} does not exist within this filesystem.", dir)), - ) - .to_boxed() + async fn scandir(&self, dir: Dir) -> Result, String> { + self + .contents + .get(&dir) + .cloned() + .ok_or_else(|| format!("{:?} does not exist within this filesystem.", dir)) } fn is_ignored(&self, _stat: &Stat) -> bool { diff --git a/src/rust/engine/fs/store/Cargo.toml b/src/rust/engine/fs/store/Cargo.toml index 4d67d06096b..40df0fc8e05 100644 --- a/src/rust/engine/fs/store/Cargo.toml +++ b/src/rust/engine/fs/store/Cargo.toml @@ -13,6 +13,7 @@ digest = "0.8" dirs = "1" fs = { path = ".." } futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } grpcio = { git = "https://github.com/pantsbuild/grpc-rs.git", rev = "b582ef3dc4e8c7289093c8febff8dadf0997b532", default_features = false, features = ["protobuf-codec", "secure"] } hashing = { path = "../../hashing" } indexmap = "1.0.2" @@ -28,7 +29,6 @@ serde_derive = "1.0" sharded_lmdb = { path = "../../sharded_lmdb" } task_executor = { path = "../../task_executor" } tempfile = "3" -tokio-threadpool = "0.1.12" uuid = { version = "0.7.1", features = ["v4"] } workunit_store = {path = "../../workunit_store" } @@ -36,5 +36,5 @@ workunit_store = {path = "../../workunit_store" } maplit = "*" mock = { path = "../../testutil/mock" } testutil = { path = "../../testutil" } -tokio = "0.1" +tokio = { version = "0.2", features = ["rt-core", "macros"] } walkdir = "2" diff --git a/src/rust/engine/fs/store/src/local.rs b/src/rust/engine/fs/store/src/local.rs index 6480bf8af6a..f56d6d78eb5 100644 --- a/src/rust/engine/fs/store/src/local.rs +++ b/src/rust/engine/fs/store/src/local.rs @@ -3,6 +3,7 @@ use super::{EntryType, ShrinkBehavior, GIGABYTES}; use boxfuture::{try_future, BoxFuture, Boxable}; use bytes::Bytes; use digest::{Digest as DigestTrait, FixedOutput}; +use futures::future::{FutureExt, TryFutureExt}; use futures01::{future, Future}; use hashing::{Digest, Fingerprint, EMPTY_DIGEST}; use lmdb::Error::NotFound; @@ -267,17 +268,23 @@ impl ByteStore { self .inner .executor - .spawn_on_io_pool(future::lazy(move || { + .spawn_blocking(move || { let fingerprint = { let mut hasher = Sha256::default(); hasher.input(&bytes); Fingerprint::from_bytes_unsafe(hasher.fixed_result().as_slice()) }; Ok(Digest(fingerprint, bytes.len())) - })) + }) + .boxed() + .compat() .and_then(move |digest| { future::done(dbs) - .and_then(move |db| db.store_bytes(digest.0, bytes2, initial_lease)) + .and_then(move |db| { + db.store_bytes(digest.0, bytes2, initial_lease) + .boxed() + .compat() + }) .map(move |()| digest) }) } @@ -306,7 +313,7 @@ impl ByteStore { } else { Err(format!("Got hash collision reading from store - digest {:?} was requested, but retrieved bytes with that fingerprint had length {}. Congratulations, you may have broken sha256! Underlying bytes: {:?}", digest, bytes.len(), bytes)) } - }).to_boxed() + }).boxed().compat().to_boxed() } pub fn all_digests(&self, entry_type: EntryType) -> Result, String> { diff --git a/src/rust/engine/fs/store/src/local_tests.rs b/src/rust/engine/fs/store/src/local_tests.rs index 0883fde8146..bdb7f1aa1ef 100644 --- a/src/rust/engine/fs/store/src/local_tests.rs +++ b/src/rust/engine/fs/store/src/local_tests.rs @@ -1,101 +1,126 @@ use crate::local::ByteStore; -use crate::tests::block_on; use crate::{EntryType, ShrinkBehavior}; use bytes::{BufMut, Bytes, BytesMut}; +use futures::compat::Future01CompatExt; use hashing::{Digest, Fingerprint}; use std::path::Path; use tempfile::TempDir; use testutil::data::{TestData, TestDirectory}; +use tokio::runtime::Handle; use walkdir::WalkDir; -#[test] -fn save_file() { +#[tokio::test] +async fn save_file() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); assert_eq!( - block_on(new_store(dir.path()).store_bytes(EntryType::File, testdata.bytes(), false,)), + new_store(dir.path()) + .store_bytes(EntryType::File, testdata.bytes(), false,) + .compat() + .await, Ok(testdata.digest()) ); } -#[test] -fn save_file_is_idempotent() { +#[tokio::test] +async fn save_file_is_idempotent() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); - block_on(new_store(dir.path()).store_bytes(EntryType::File, testdata.bytes(), false)).unwrap(); + new_store(dir.path()) + .store_bytes(EntryType::File, testdata.bytes(), false) + .compat() + .await + .unwrap(); assert_eq!( - block_on(new_store(dir.path()).store_bytes(EntryType::File, testdata.bytes(), false,)), + new_store(dir.path()) + .store_bytes(EntryType::File, testdata.bytes(), false,) + .compat() + .await, Ok(testdata.digest()) ); } -#[test] -fn roundtrip_file() { +#[tokio::test] +async fn roundtrip_file() { let testdata = TestData::roland(); let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); - let hash = prime_store_with_file_bytes(&store, testdata.bytes()); - assert_eq!(load_file_bytes(&store, hash), Ok(Some(testdata.bytes()))); + let hash = prime_store_with_file_bytes(&store, testdata.bytes()).await; + assert_eq!( + load_file_bytes(&store, hash).await, + Ok(Some(testdata.bytes())) + ); } -#[test] -fn missing_file() { +#[tokio::test] +async fn missing_file() { let dir = TempDir::new().unwrap(); assert_eq!( - load_file_bytes(&new_store(dir.path()), TestData::roland().digest()), + load_file_bytes(&new_store(dir.path()), TestData::roland().digest()).await, Ok(None) ); } -#[test] -fn record_and_load_directory_proto() { +#[tokio::test] +async fn record_and_load_directory_proto() { let dir = TempDir::new().unwrap(); let testdir = TestDirectory::containing_roland(); assert_eq!( - block_on(new_store(dir.path()).store_bytes(EntryType::Directory, testdir.bytes(), false,)), + new_store(dir.path()) + .store_bytes(EntryType::Directory, testdir.bytes(), false,) + .compat() + .await, Ok(testdir.digest()) ); assert_eq!( - load_directory_proto_bytes(&new_store(dir.path()), testdir.digest()), + load_directory_proto_bytes(&new_store(dir.path()), testdir.digest()).await, Ok(Some(testdir.bytes())) ); } -#[test] -fn missing_directory() { +#[tokio::test] +async fn missing_directory() { let dir = TempDir::new().unwrap(); let testdir = TestDirectory::containing_roland(); assert_eq!( - load_directory_proto_bytes(&new_store(dir.path()), testdir.digest()), + load_directory_proto_bytes(&new_store(dir.path()), testdir.digest()).await, Ok(None) ); } -#[test] -fn file_is_not_directory_proto() { +#[tokio::test] +async fn file_is_not_directory_proto() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); - block_on(new_store(dir.path()).store_bytes(EntryType::File, testdata.bytes(), false)).unwrap(); + new_store(dir.path()) + .store_bytes(EntryType::File, testdata.bytes(), false) + .compat() + .await + .unwrap(); assert_eq!( - load_directory_proto_bytes(&new_store(dir.path()), testdata.digest()), + load_directory_proto_bytes(&new_store(dir.path()), testdata.digest()).await, Ok(None) ); } -#[test] -fn garbage_collect_nothing_to_do() { +#[tokio::test] +async fn garbage_collect_nothing_to_do() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let bytes = Bytes::from("0123456789"); - block_on(store.store_bytes(EntryType::File, bytes.clone(), false)).expect("Error storing"); + store + .store_bytes(EntryType::File, bytes.clone(), false) + .compat() + .await + .expect("Error storing"); store .shrink(10, ShrinkBehavior::Fast) .expect("Error shrinking"); @@ -110,17 +135,22 @@ fn garbage_collect_nothing_to_do() { .unwrap(), 10 ) - ), + ) + .await, Ok(Some(bytes)) ); } -#[test] -fn garbage_collect_nothing_to_do_with_lease() { +#[tokio::test] +async fn garbage_collect_nothing_to_do_with_lease() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let bytes = Bytes::from("0123456789"); - block_on(store.store_bytes(EntryType::File, bytes.clone(), false)).expect("Error storing"); + store + .store_bytes(EntryType::File, bytes.clone(), false) + .compat() + .await + .expect("Error storing"); let file_fingerprint = Fingerprint::from_hex_string( "84d89877f0d4041efb6bf91a16f0248f2fd573e6af05c19f96bedb9f882f7882", ) @@ -133,13 +163,13 @@ fn garbage_collect_nothing_to_do_with_lease() { .shrink(10, ShrinkBehavior::Fast) .expect("Error shrinking"); assert_eq!( - load_bytes(&store, EntryType::File, file_digest), + load_bytes(&store, EntryType::File, file_digest).await, Ok(Some(bytes)) ); } -#[test] -fn garbage_collect_remove_one_of_two_files_no_leases() { +#[tokio::test] +async fn garbage_collect_remove_one_of_two_files_no_leases() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let bytes_1 = Bytes::from("0123456789"); @@ -154,14 +184,30 @@ fn garbage_collect_remove_one_of_two_files_no_leases() { ) .unwrap(); let digest_2 = Digest(fingerprint_2, 10); - block_on(store.store_bytes(EntryType::File, bytes_1.clone(), false)).expect("Error storing"); - block_on(store.store_bytes(EntryType::File, bytes_2.clone(), false)).expect("Error storing"); + store + .store_bytes(EntryType::File, bytes_1.clone(), false) + .compat() + .await + .expect("Error storing"); + store + .store_bytes(EntryType::File, bytes_2.clone(), false) + .compat() + .await + .expect("Error storing"); store .shrink(10, ShrinkBehavior::Fast) .expect("Error shrinking"); let mut entries = Vec::new(); - entries.push(load_bytes(&store, EntryType::File, digest_1).expect("Error loading bytes")); - entries.push(load_bytes(&store, EntryType::File, digest_2).expect("Error loading bytes")); + entries.push( + load_bytes(&store, EntryType::File, digest_1) + .await + .expect("Error loading bytes"), + ); + entries.push( + load_bytes(&store, EntryType::File, digest_2) + .await + .expect("Error loading bytes"), + ); assert_eq!( 1, entries.iter().filter(|maybe| maybe.is_some()).count(), @@ -170,8 +216,8 @@ fn garbage_collect_remove_one_of_two_files_no_leases() { ); } -#[test] -fn garbage_collect_remove_both_files_no_leases() { +#[tokio::test] +async fn garbage_collect_remove_both_files_no_leases() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let bytes_1 = Bytes::from("0123456789"); @@ -186,44 +232,64 @@ fn garbage_collect_remove_both_files_no_leases() { ) .unwrap(); let digest_2 = Digest(fingerprint_2, 10); - block_on(store.store_bytes(EntryType::File, bytes_1.clone(), false)).expect("Error storing"); - block_on(store.store_bytes(EntryType::File, bytes_2.clone(), false)).expect("Error storing"); + store + .store_bytes(EntryType::File, bytes_1.clone(), false) + .compat() + .await + .expect("Error storing"); + store + .store_bytes(EntryType::File, bytes_2.clone(), false) + .compat() + .await + .expect("Error storing"); store .shrink(1, ShrinkBehavior::Fast) .expect("Error shrinking"); assert_eq!( - load_bytes(&store, EntryType::File, digest_1), + load_bytes(&store, EntryType::File, digest_1).await, Ok(None), "Should have garbage collected {:?}", fingerprint_1 ); assert_eq!( - load_bytes(&store, EntryType::File, digest_2), + load_bytes(&store, EntryType::File, digest_2).await, Ok(None), "Should have garbage collected {:?}", fingerprint_2 ); } -#[test] -fn garbage_collect_remove_one_of_two_directories_no_leases() { +#[tokio::test] +async fn garbage_collect_remove_one_of_two_directories_no_leases() { let dir = TempDir::new().unwrap(); let testdir = TestDirectory::containing_roland(); let other_testdir = TestDirectory::containing_dnalor(); let store = new_store(dir.path()); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), false)).expect("Error storing"); - block_on(store.store_bytes(EntryType::Directory, other_testdir.bytes(), false)) + store + .store_bytes(EntryType::Directory, testdir.bytes(), false) + .compat() + .await + .expect("Error storing"); + store + .store_bytes(EntryType::Directory, other_testdir.bytes(), false) + .compat() + .await .expect("Error storing"); store .shrink(80, ShrinkBehavior::Fast) .expect("Error shrinking"); let mut entries = Vec::new(); - entries - .push(load_bytes(&store, EntryType::Directory, testdir.digest()).expect("Error loading bytes")); entries.push( - load_bytes(&store, EntryType::Directory, other_testdir.digest()).expect("Error loading bytes"), + load_bytes(&store, EntryType::Directory, testdir.digest()) + .await + .expect("Error loading bytes"), + ); + entries.push( + load_bytes(&store, EntryType::Directory, other_testdir.digest()) + .await + .expect("Error loading bytes"), ); assert_eq!( 1, @@ -233,106 +299,137 @@ fn garbage_collect_remove_one_of_two_directories_no_leases() { ); } -#[test] -fn garbage_collect_remove_file_with_leased_directory() { +#[tokio::test] +async fn garbage_collect_remove_file_with_leased_directory() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let testdir = TestDirectory::containing_roland(); let testdata = TestData::fourty_chars(); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), true)).expect("Error storing"); + store + .store_bytes(EntryType::Directory, testdir.bytes(), true) + .compat() + .await + .expect("Error storing"); - block_on(store.store_bytes(EntryType::File, testdata.bytes(), false)).expect("Error storing"); + store + .store_bytes(EntryType::File, testdata.bytes(), false) + .compat() + .await + .expect("Error storing"); store .shrink(80, ShrinkBehavior::Fast) .expect("Error shrinking"); assert_eq!( - load_bytes(&store, EntryType::File, testdata.digest()), + load_bytes(&store, EntryType::File, testdata.digest()).await, Ok(None), "File was present when it should've been garbage collected" ); assert_eq!( - load_bytes(&store, EntryType::Directory, testdir.digest()), + load_bytes(&store, EntryType::Directory, testdir.digest()).await, Ok(Some(testdir.bytes())), "Directory was missing despite lease" ); } -#[test] -fn garbage_collect_remove_file_while_leased_file() { +#[tokio::test] +async fn garbage_collect_remove_file_while_leased_file() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let testdir = TestDirectory::containing_roland(); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), false)).expect("Error storing"); + store + .store_bytes(EntryType::Directory, testdir.bytes(), false) + .compat() + .await + .expect("Error storing"); let fourty_chars = TestData::fourty_chars(); - block_on(store.store_bytes(EntryType::File, fourty_chars.bytes(), true)).expect("Error storing"); + store + .store_bytes(EntryType::File, fourty_chars.bytes(), true) + .compat() + .await + .expect("Error storing"); store .shrink(80, ShrinkBehavior::Fast) .expect("Error shrinking"); assert_eq!( - load_bytes(&store, EntryType::File, fourty_chars.digest()), + load_bytes(&store, EntryType::File, fourty_chars.digest()).await, Ok(Some(fourty_chars.bytes())), "File was missing despite lease" ); assert_eq!( - load_bytes(&store, EntryType::Directory, testdir.digest()), + load_bytes(&store, EntryType::Directory, testdir.digest()).await, Ok(None), "Directory was present when it should've been garbage collected" ); } -#[test] -fn garbage_collect_fail_because_too_many_leases() { +#[tokio::test] +async fn garbage_collect_fail_because_too_many_leases() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let testdir = TestDirectory::containing_roland(); let fourty_chars = TestData::fourty_chars(); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), true)).expect("Error storing"); - block_on(store.store_bytes(EntryType::File, fourty_chars.bytes(), true)).expect("Error storing"); + store + .store_bytes(EntryType::Directory, testdir.bytes(), true) + .compat() + .await + .expect("Error storing"); + store + .store_bytes(EntryType::File, fourty_chars.bytes(), true) + .compat() + .await + .expect("Error storing"); - block_on(store.store_bytes(EntryType::File, TestData::roland().bytes(), false)) + store + .store_bytes(EntryType::File, TestData::roland().bytes(), false) + .compat() + .await .expect("Error storing"); assert_eq!(store.shrink(80, ShrinkBehavior::Fast), Ok(160)); assert_eq!( - load_bytes(&store, EntryType::File, fourty_chars.digest()), + load_bytes(&store, EntryType::File, fourty_chars.digest()).await, Ok(Some(fourty_chars.bytes())), "Leased file should still be present" ); assert_eq!( - load_bytes(&store, EntryType::Directory, testdir.digest()), + load_bytes(&store, EntryType::Directory, testdir.digest()).await, Ok(Some(testdir.bytes())), "Leased directory should still be present" ); // Whether the unleased file is present is undefined. } -#[test] -fn garbage_collect_and_compact() { +async fn write_one_meg(store: &ByteStore, byte: u8) { + let mut bytes = BytesMut::with_capacity(1024 * 1024); + for _ in 0..1024 * 1024 { + bytes.put(byte); + } + store + .store_bytes(EntryType::File, bytes.freeze(), false) + .compat() + .await + .expect("Error storing"); +} + +#[tokio::test] +async fn garbage_collect_and_compact() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); - let write_one_meg = |byte: u8| { - let mut bytes = BytesMut::with_capacity(1024 * 1024); - for _ in 0..1024 * 1024 { - bytes.put(byte); - } - block_on(store.store_bytes(EntryType::File, bytes.freeze(), false)).expect("Error storing"); - }; + write_one_meg(&store, b'0').await; - write_one_meg(b'0'); - - write_one_meg(b'1'); + write_one_meg(&store, b'1').await; let size = get_directory_size(dir.path()); assert!( @@ -353,103 +450,128 @@ fn garbage_collect_and_compact() { ); } -#[test] -fn entry_type_for_file() { +#[tokio::test] +async fn entry_type_for_file() { let testdata = TestData::roland(); let testdir = TestDirectory::containing_roland(); let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), false)).expect("Error storing"); - prime_store_with_file_bytes(&store, testdata.bytes()); + store + .store_bytes(EntryType::Directory, testdir.bytes(), false) + .compat() + .await + .expect("Error storing"); + prime_store_with_file_bytes(&store, testdata.bytes()).await; assert_eq!( store.entry_type(&testdata.fingerprint()), Ok(Some(EntryType::File)) ) } -#[test] -fn entry_type_for_directory() { +#[tokio::test] +async fn entry_type_for_directory() { let testdata = TestData::roland(); let testdir = TestDirectory::containing_roland(); let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), false)).expect("Error storing"); - prime_store_with_file_bytes(&store, testdata.bytes()); + store + .store_bytes(EntryType::Directory, testdir.bytes(), false) + .compat() + .await + .expect("Error storing"); + prime_store_with_file_bytes(&store, testdata.bytes()).await; assert_eq!( store.entry_type(&testdir.fingerprint()), Ok(Some(EntryType::Directory)) ) } -#[test] -fn entry_type_for_missing() { +#[tokio::test] +async fn entry_type_for_missing() { let testdata = TestData::roland(); let testdir = TestDirectory::containing_roland(); let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); - block_on(store.store_bytes(EntryType::Directory, testdir.bytes(), false)).expect("Error storing"); - prime_store_with_file_bytes(&store, testdata.bytes()); + store + .store_bytes(EntryType::Directory, testdir.bytes(), false) + .compat() + .await + .expect("Error storing"); + prime_store_with_file_bytes(&store, testdata.bytes()).await; assert_eq!( store.entry_type(&TestDirectory::recursive().fingerprint()), Ok(None) ) } -#[test] -pub fn empty_file_is_known() { +#[tokio::test] +async fn empty_file_is_known() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let empty_file = TestData::empty(); assert_eq!( - block_on(store.load_bytes_with(EntryType::File, empty_file.digest(), |b| b)), + store + .load_bytes_with(EntryType::File, empty_file.digest(), |b| b) + .compat() + .await, Ok(Some(empty_file.bytes())), ) } -#[test] -pub fn empty_directory_is_known() { +#[tokio::test] +async fn empty_directory_is_known() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); let empty_dir = TestDirectory::empty(); assert_eq!( - block_on(store.load_bytes_with(EntryType::Directory, empty_dir.digest(), |b| b)), + store + .load_bytes_with(EntryType::Directory, empty_dir.digest(), |b| b) + .compat() + .await, Ok(Some(empty_dir.bytes())), ) } -#[test] -pub fn all_digests() { +#[tokio::test] +async fn all_digests() { let dir = TempDir::new().unwrap(); let store = new_store(dir.path()); - let digest = prime_store_with_file_bytes(&store, TestData::roland().bytes()); + let digest = prime_store_with_file_bytes(&store, TestData::roland().bytes()).await; assert_eq!(Ok(vec![digest]), store.all_digests(EntryType::File)); } pub fn new_store>(dir: P) -> ByteStore { - ByteStore::new(task_executor::Executor::new(), dir).unwrap() + ByteStore::new(task_executor::Executor::new(Handle::current()), dir).unwrap() } -pub fn load_file_bytes(store: &ByteStore, digest: Digest) -> Result, String> { - load_bytes(&store, EntryType::File, digest) +pub async fn load_file_bytes(store: &ByteStore, digest: Digest) -> Result, String> { + load_bytes(&store, EntryType::File, digest).await } -pub fn load_directory_proto_bytes( +pub async fn load_directory_proto_bytes( store: &ByteStore, digest: Digest, ) -> Result, String> { - load_bytes(&store, EntryType::Directory, digest) + load_bytes(&store, EntryType::Directory, digest).await } -fn load_bytes( +pub async fn load_bytes( store: &ByteStore, entry_type: EntryType, digest: Digest, ) -> Result, String> { - block_on(store.load_bytes_with(entry_type, digest, |b| b)) + store + .load_bytes_with(entry_type, digest, |b| b) + .compat() + .await } -fn prime_store_with_file_bytes(store: &ByteStore, bytes: Bytes) -> Digest { - block_on(store.store_bytes(EntryType::File, bytes, false)).expect("Error storing file bytes") +async fn prime_store_with_file_bytes(store: &ByteStore, bytes: Bytes) -> Digest { + store + .store_bytes(EntryType::File, bytes, false) + .compat() + .await + .expect("Error storing file bytes") } fn get_directory_size(path: &Path) -> usize { diff --git a/src/rust/engine/fs/store/src/remote_tests.rs b/src/rust/engine/fs/store/src/remote_tests.rs index 5aecdf89bb3..d0b182b84bf 100644 --- a/src/rust/engine/fs/store/src/remote_tests.rs +++ b/src/rust/engine/fs/store/src/remote_tests.rs @@ -1,6 +1,7 @@ use crate::remote::ByteStore; use crate::{EntryType, MEGABYTES}; use bytes::Bytes; +use futures::compat::Future01CompatExt; use hashing::Digest; use mock::StubCAS; use serverset::BackoffConfig; @@ -9,73 +10,78 @@ use std::time::Duration; use testutil::data::{TestData, TestDirectory}; use workunit_store::WorkUnitStore; -use crate::tests::{big_file_bytes, big_file_digest, big_file_fingerprint, block_on, new_cas}; +use crate::tests::{big_file_bytes, big_file_digest, big_file_fingerprint, new_cas}; -#[test] -fn loads_file() { +#[tokio::test] +async fn loads_file() { let testdata = TestData::roland(); let cas = new_cas(10); assert_eq!( - load_file_bytes(&new_byte_store(&cas), testdata.digest()).unwrap(), + load_file_bytes(&new_byte_store(&cas), testdata.digest()) + .await + .unwrap(), Some(testdata.bytes()) ); } -#[test] -fn missing_file() { +#[tokio::test] +async fn missing_file() { let cas = StubCAS::empty(); assert_eq!( - load_file_bytes(&new_byte_store(&cas), TestData::roland().digest()), + load_file_bytes(&new_byte_store(&cas), TestData::roland().digest()).await, Ok(None) ); } -#[test] -fn load_directory() { +#[tokio::test] +async fn load_directory() { let cas = new_cas(10); let testdir = TestDirectory::containing_roland(); assert_eq!( - load_directory_proto_bytes(&new_byte_store(&cas), testdir.digest()), + load_directory_proto_bytes(&new_byte_store(&cas), testdir.digest()).await, Ok(Some(testdir.bytes())) ); } -#[test] -fn missing_directory() { +#[tokio::test] +async fn missing_directory() { let cas = StubCAS::empty(); assert_eq!( load_directory_proto_bytes( &new_byte_store(&cas), TestDirectory::containing_roland().digest() - ), + ) + .await, Ok(None) ); } -#[test] -fn load_file_grpc_error() { +#[tokio::test] +async fn load_file_grpc_error() { let cas = StubCAS::always_errors(); - let error = - load_file_bytes(&new_byte_store(&cas), TestData::roland().digest()).expect_err("Want error"); + let error = load_file_bytes(&new_byte_store(&cas), TestData::roland().digest()) + .await + .expect_err("Want error"); assert!( error.contains("StubCAS is configured to always fail"), format!("Bad error message, got: {}", error) ) } -#[test] -fn load_directory_grpc_error() { +#[tokio::test] +async fn load_directory_grpc_error() { let cas = StubCAS::always_errors(); let error = load_directory_proto_bytes( &new_byte_store(&cas), TestDirectory::containing_roland().digest(), ) + .await .expect_err("Want error"); assert!( error.contains("StubCAS is configured to always fail"), @@ -83,58 +89,61 @@ fn load_directory_grpc_error() { ) } -#[test] -fn fetch_less_than_one_chunk() { +#[tokio::test] +async fn fetch_less_than_one_chunk() { let testdata = TestData::roland(); let cas = new_cas(testdata.bytes().len() + 1); assert_eq!( - load_file_bytes(&new_byte_store(&cas), testdata.digest()), + load_file_bytes(&new_byte_store(&cas), testdata.digest()).await, Ok(Some(testdata.bytes())) ) } -#[test] -fn fetch_exactly_one_chunk() { +#[tokio::test] +async fn fetch_exactly_one_chunk() { let testdata = TestData::roland(); let cas = new_cas(testdata.bytes().len()); assert_eq!( - load_file_bytes(&new_byte_store(&cas), testdata.digest()), + load_file_bytes(&new_byte_store(&cas), testdata.digest()).await, Ok(Some(testdata.bytes())) ) } -#[test] -fn fetch_multiple_chunks_exact() { +#[tokio::test] +async fn fetch_multiple_chunks_exact() { let testdata = TestData::roland(); let cas = new_cas(1); assert_eq!( - load_file_bytes(&new_byte_store(&cas), testdata.digest()), + load_file_bytes(&new_byte_store(&cas), testdata.digest()).await, Ok(Some(testdata.bytes())) ) } -#[test] -fn fetch_multiple_chunks_nonfactor() { +#[tokio::test] +async fn fetch_multiple_chunks_nonfactor() { let testdata = TestData::roland(); let cas = new_cas(9); assert_eq!( - load_file_bytes(&new_byte_store(&cas), testdata.digest()), + load_file_bytes(&new_byte_store(&cas), testdata.digest()).await, Ok(Some(testdata.bytes())) ) } -#[test] -fn write_file_one_chunk() { +#[tokio::test] +async fn write_file_one_chunk() { let testdata = TestData::roland(); let cas = StubCAS::empty(); let store = new_byte_store(&cas); assert_eq!( - block_on(store.store_bytes(testdata.bytes(), WorkUnitStore::new())), + store + .store_bytes(testdata.bytes(), WorkUnitStore::new()) + .compat() + .await, Ok(testdata.digest()) ); @@ -142,8 +151,8 @@ fn write_file_one_chunk() { assert_eq!(blobs.get(&testdata.fingerprint()), Some(&testdata.bytes())); } -#[test] -fn write_file_multiple_chunks() { +#[tokio::test] +async fn write_file_multiple_chunks() { let cas = StubCAS::empty(); let store = ByteStore::new( @@ -165,7 +174,10 @@ fn write_file_multiple_chunks() { let fingerprint = big_file_fingerprint(); assert_eq!( - block_on(store.store_bytes(all_the_henries.clone(), WorkUnitStore::new())), + store + .store_bytes(all_the_henries.clone(), WorkUnitStore::new()) + .compat() + .await, Ok(big_file_digest()) ); @@ -186,14 +198,17 @@ fn write_file_multiple_chunks() { } } -#[test] -fn write_empty_file() { +#[tokio::test] +async fn write_empty_file() { let empty_file = TestData::empty(); let cas = StubCAS::empty(); let store = new_byte_store(&cas); assert_eq!( - block_on(store.store_bytes(empty_file.bytes(), WorkUnitStore::new())), + store + .store_bytes(empty_file.bytes(), WorkUnitStore::new()) + .compat() + .await, Ok(empty_file.digest()) ); @@ -204,12 +219,15 @@ fn write_empty_file() { ); } -#[test] -fn write_file_errors() { +#[tokio::test] +async fn write_file_errors() { let cas = StubCAS::always_errors(); let store = new_byte_store(&cas); - let error = block_on(store.store_bytes(TestData::roland().bytes(), WorkUnitStore::new())) + let error = store + .store_bytes(TestData::roland().bytes(), WorkUnitStore::new()) + .compat() + .await .expect_err("Want error"); assert!( error.contains("Error from server"), @@ -221,8 +239,8 @@ fn write_file_errors() { ); } -#[test] -fn write_connection_error() { +#[tokio::test] +async fn write_connection_error() { let store = ByteStore::new( vec![String::from("doesnotexist.example")], None, @@ -236,7 +254,10 @@ fn write_connection_error() { 1, ) .unwrap(); - let error = block_on(store.store_bytes(TestData::roland().bytes(), WorkUnitStore::new())) + let error = store + .store_bytes(TestData::roland().bytes(), WorkUnitStore::new()) + .compat() + .await .expect_err("Want error"); assert!( error.contains("Error attempting to upload digest"), @@ -244,22 +265,25 @@ fn write_connection_error() { ); } -#[test] -fn list_missing_digests_none_missing() { +#[tokio::test] +async fn list_missing_digests_none_missing() { let cas = new_cas(1024); let store = new_byte_store(&cas); assert_eq!( - block_on(store.list_missing_digests( - store.find_missing_blobs_request(vec![TestData::roland().digest()].iter()), - WorkUnitStore::new(), - )), + store + .list_missing_digests( + store.find_missing_blobs_request(vec![TestData::roland().digest()].iter()), + WorkUnitStore::new(), + ) + .compat() + .await, Ok(HashSet::new()) ); } -#[test] -fn list_missing_digests_some_missing() { +#[tokio::test] +async fn list_missing_digests_some_missing() { let cas = StubCAS::empty(); let store = new_byte_store(&cas); @@ -270,33 +294,39 @@ fn list_missing_digests_some_missing() { digest_set.insert(digest); assert_eq!( - block_on(store.list_missing_digests( - store.find_missing_blobs_request(vec![digest].iter()), - WorkUnitStore::new(), - )), + store + .list_missing_digests( + store.find_missing_blobs_request(vec![digest].iter()), + WorkUnitStore::new(), + ) + .compat() + .await, Ok(digest_set) ); } -#[test] -fn list_missing_digests_error() { +#[tokio::test] +async fn list_missing_digests_error() { let cas = StubCAS::always_errors(); let store = new_byte_store(&cas); - let error = block_on(store.list_missing_digests( - store.find_missing_blobs_request(vec![TestData::roland().digest()].iter()), - WorkUnitStore::new(), - )) - .expect_err("Want error"); + let error = store + .list_missing_digests( + store.find_missing_blobs_request(vec![TestData::roland().digest()].iter()), + WorkUnitStore::new(), + ) + .compat() + .await + .expect_err("Want error"); assert!( error.contains("StubCAS is configured to always fail"), format!("Bad error message, got: {}", error) ); } -#[test] -fn reads_from_multiple_cas_servers() { +#[tokio::test] +async fn reads_from_multiple_cas_servers() { let roland = TestData::roland(); let catnip = TestData::catnip(); @@ -318,12 +348,12 @@ fn reads_from_multiple_cas_servers() { .unwrap(); assert_eq!( - load_file_bytes(&store, roland.digest()), + load_file_bytes(&store, roland.digest()).await, Ok(Some(roland.bytes())) ); assert_eq!( - load_file_bytes(&store, catnip.digest()), + load_file_bytes(&store, catnip.digest()).await, Ok(Some(catnip.bytes())) ); @@ -347,21 +377,24 @@ fn new_byte_store(cas: &StubCAS) -> ByteStore { .unwrap() } -pub fn load_file_bytes(store: &ByteStore, digest: Digest) -> Result, String> { - load_bytes(&store, EntryType::File, digest) +pub async fn load_file_bytes(store: &ByteStore, digest: Digest) -> Result, String> { + load_bytes(&store, EntryType::File, digest).await } -pub fn load_directory_proto_bytes( +pub async fn load_directory_proto_bytes( store: &ByteStore, digest: Digest, ) -> Result, String> { - load_bytes(&store, EntryType::Directory, digest) + load_bytes(&store, EntryType::Directory, digest).await } -fn load_bytes( +async fn load_bytes( store: &ByteStore, entry_type: EntryType, digest: Digest, ) -> Result, String> { - block_on(store.load_bytes_with(entry_type, digest, |b| b, WorkUnitStore::new())) + store + .load_bytes_with(entry_type, digest, |b| b, WorkUnitStore::new()) + .compat() + .await } diff --git a/src/rust/engine/fs/store/src/snapshot.rs b/src/rust/engine/fs/store/src/snapshot.rs index e5aaab846ca..9ddf8f27805 100644 --- a/src/rust/engine/fs/store/src/snapshot.rs +++ b/src/rust/engine/fs/store/src/snapshot.rs @@ -5,8 +5,8 @@ use crate::Store; use bazel_protos; use boxfuture::{try_future, BoxFuture, Boxable}; use fs::{Dir, File, GlobMatching, PathGlobs, PathStat, PosixFS, SymlinkBehavior}; -use futures01::future::{self, join_all}; -use futures01::Future; +use futures::future::TryFutureExt; +use futures01::future::{self, join_all, Future}; use hashing::{Digest, EMPTY_DIGEST}; use indexmap::{self, IndexMap}; use itertools::Itertools; @@ -547,6 +547,7 @@ impl Snapshot { posix_fs .expand(path_globs) + .compat() .map_err(|err| format!("Error expanding globs: {}", err)) .and_then(|path_stats| { Snapshot::from_path_stats( @@ -710,6 +711,7 @@ impl StoreFileByDigest for OneOffStoreFileByDigest { self .posix_fs .read_file(&file) + .compat() .map_err(move |err| format!("Error reading file {:?}: {:?}", file, err)) .and_then(move |content| store.store_file_bytes(content.content, true)) .to_boxed() diff --git a/src/rust/engine/fs/store/src/snapshot_tests.rs b/src/rust/engine/fs/store/src/snapshot_tests.rs index 3853035194a..4a698646b5c 100644 --- a/src/rust/engine/fs/store/src/snapshot_tests.rs +++ b/src/rust/engine/fs/store/src/snapshot_tests.rs @@ -1,8 +1,10 @@ +use futures::compat::Future01CompatExt; use futures01::future::Future; use hashing::{Digest, Fingerprint}; use tempfile; use testutil::data::TestDirectory; use testutil::make_file; +use tokio::runtime::Handle; use crate::{OneOffStoreFileByDigest, Snapshot, Store}; use fs::{ @@ -22,9 +24,8 @@ fn setup() -> ( tempfile::TempDir, Arc, OneOffStoreFileByDigest, - task_executor::Executor, ) { - let executor = task_executor::Executor::new(); + let executor = task_executor::Executor::new(Handle::current()); // TODO: Pass a remote CAS address through. let store = Store::local_only( executor.clone(), @@ -35,27 +36,24 @@ fn setup() -> ( ) .unwrap(); let dir = tempfile::Builder::new().prefix("root").tempdir().unwrap(); - let posix_fs = Arc::new(PosixFS::new(dir.path(), &[], task_executor::Executor::new()).unwrap()); + let posix_fs = Arc::new(PosixFS::new(dir.path(), &[], executor).unwrap()); let file_saver = OneOffStoreFileByDigest::new(store.clone(), posix_fs.clone()); - (store, dir, posix_fs, file_saver, executor) + (store, dir, posix_fs, file_saver) } -#[test] -fn snapshot_one_file() { - let (store, dir, posix_fs, digester, runtime) = setup(); +#[tokio::test] +async fn snapshot_one_file() { + let (store, dir, posix_fs, digester) = setup(); let file_name = PathBuf::from("roland"); make_file(&dir.path().join(&file_name), STR.as_bytes(), 0o600); - let path_stats = expand_all_sorted(posix_fs, &runtime); - let snapshot = runtime - .block_on(Snapshot::from_path_stats( - store, - &digester, - path_stats.clone(), - WorkUnitStore::new(), - )) - .unwrap(); + let path_stats = expand_all_sorted(posix_fs).await; + let snapshot = + Snapshot::from_path_stats(store, &digester, path_stats.clone(), WorkUnitStore::new()) + .compat() + .await + .unwrap(); assert_eq!( snapshot, Snapshot { @@ -71,24 +69,21 @@ fn snapshot_one_file() { ); } -#[test] -fn snapshot_recursive_directories() { - let (store, dir, posix_fs, digester, runtime) = setup(); +#[tokio::test] +async fn snapshot_recursive_directories() { + let (store, dir, posix_fs, digester) = setup(); let cats = PathBuf::from("cats"); let roland = cats.join("roland"); std::fs::create_dir_all(&dir.path().join(cats)).unwrap(); make_file(&dir.path().join(&roland), STR.as_bytes(), 0o600); - let path_stats = expand_all_sorted(posix_fs, &runtime); - let snapshot = runtime - .block_on(Snapshot::from_path_stats( - store, - &digester, - path_stats.clone(), - WorkUnitStore::new(), - )) - .unwrap(); + let path_stats = expand_all_sorted(posix_fs).await; + let snapshot = + Snapshot::from_path_stats(store, &digester, path_stats.clone(), WorkUnitStore::new()) + .compat() + .await + .unwrap(); assert_eq!( snapshot, Snapshot { @@ -104,39 +99,37 @@ fn snapshot_recursive_directories() { ); } -#[test] -fn snapshot_from_digest() { - let (store, dir, posix_fs, digester, runtime) = setup(); +#[tokio::test] +async fn snapshot_from_digest() { + let (store, dir, posix_fs, digester) = setup(); let cats = PathBuf::from("cats"); let roland = cats.join("roland"); std::fs::create_dir_all(&dir.path().join(cats)).unwrap(); make_file(&dir.path().join(&roland), STR.as_bytes(), 0o600); - let path_stats = expand_all_sorted(posix_fs, &runtime); - let expected_snapshot = runtime - .block_on(Snapshot::from_path_stats( - store.clone(), - &digester, - path_stats.clone(), - WorkUnitStore::new(), - )) - .unwrap(); + let path_stats = expand_all_sorted(posix_fs).await; + let expected_snapshot = Snapshot::from_path_stats( + store.clone(), + &digester, + path_stats.clone(), + WorkUnitStore::new(), + ) + .compat() + .await + .unwrap(); assert_eq!( expected_snapshot, - runtime - .block_on(Snapshot::from_digest( - store, - expected_snapshot.digest, - WorkUnitStore::new() - )) + Snapshot::from_digest(store, expected_snapshot.digest, WorkUnitStore::new()) + .compat() + .await .unwrap() ); } -#[test] -fn snapshot_recursive_directories_including_empty() { - let (store, dir, posix_fs, digester, runtime) = setup(); +#[tokio::test] +async fn snapshot_recursive_directories_including_empty() { + let (store, dir, posix_fs, digester) = setup(); let cats = PathBuf::from("cats"); let roland = cats.join("roland"); @@ -147,18 +140,19 @@ fn snapshot_recursive_directories_including_empty() { std::fs::create_dir_all(&dir.path().join(&llamas)).unwrap(); make_file(&dir.path().join(&roland), STR.as_bytes(), 0o600); - let sorted_path_stats = expand_all_sorted(posix_fs, &runtime); + let sorted_path_stats = expand_all_sorted(posix_fs).await; let mut unsorted_path_stats = sorted_path_stats.clone(); unsorted_path_stats.reverse(); assert_eq!( - runtime - .block_on(Snapshot::from_path_stats( - store, - &digester, - unsorted_path_stats.clone(), - WorkUnitStore::new(), - )) - .unwrap(), + Snapshot::from_path_stats( + store, + &digester, + unsorted_path_stats.clone(), + WorkUnitStore::new(), + ) + .compat() + .await + .unwrap(), Snapshot { digest: Digest( Fingerprint::from_hex_string( @@ -172,25 +166,31 @@ fn snapshot_recursive_directories_including_empty() { ); } -#[test] -fn merge_directories_two_files() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn merge_directories_two_files() { + let (store, _, _, _) = setup(); let containing_roland = TestDirectory::containing_roland(); let containing_treats = TestDirectory::containing_treats(); - runtime - .block_on(store.record_directory(&containing_roland.directory(), false)) + store + .record_directory(&containing_roland.directory(), false) + .compat() + .await .expect("Storing roland directory"); - runtime - .block_on(store.record_directory(&containing_treats.directory(), false)) + store + .record_directory(&containing_treats.directory(), false) + .compat() + .await .expect("Storing treats directory"); - let result = runtime.block_on(Snapshot::merge_directories( + let result = Snapshot::merge_directories( store, vec![containing_treats.digest(), containing_roland.digest()], WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!( result, @@ -198,27 +198,32 @@ fn merge_directories_two_files() { ); } -#[test] -fn merge_directories_clashing_files() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn merge_directories_clashing_files() { + let (store, _, _, _) = setup(); let containing_roland = TestDirectory::containing_roland(); let containing_wrong_roland = TestDirectory::containing_wrong_roland(); - runtime - .block_on(store.record_directory(&containing_roland.directory(), false)) + store + .record_directory(&containing_roland.directory(), false) + .compat() + .await .expect("Storing roland directory"); - runtime - .block_on(store.record_directory(&containing_wrong_roland.directory(), false)) + store + .record_directory(&containing_wrong_roland.directory(), false) + .compat() + .await .expect("Storing wrong roland directory"); - let err = runtime - .block_on(Snapshot::merge_directories( - store, - vec![containing_roland.digest(), containing_wrong_roland.digest()], - WorkUnitStore::new(), - )) - .expect_err("Want error merging"); + let err = Snapshot::merge_directories( + store, + vec![containing_roland.digest(), containing_wrong_roland.digest()], + WorkUnitStore::new(), + ) + .compat() + .await + .expect_err("Want error merging"); assert!( err.contains("roland"), @@ -227,28 +232,34 @@ fn merge_directories_clashing_files() { ); } -#[test] -fn merge_directories_same_files() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn merge_directories_same_files() { + let (store, _, _, _) = setup(); let containing_roland = TestDirectory::containing_roland(); let containing_roland_and_treats = TestDirectory::containing_roland_and_treats(); - runtime - .block_on(store.record_directory(&containing_roland.directory(), false)) + store + .record_directory(&containing_roland.directory(), false) + .compat() + .await .expect("Storing roland directory"); - runtime - .block_on(store.record_directory(&containing_roland_and_treats.directory(), false)) + store + .record_directory(&containing_roland_and_treats.directory(), false) + .compat() + .await .expect("Storing treats directory"); - let result = runtime.block_on(Snapshot::merge_directories( + let result = Snapshot::merge_directories( store, vec![ containing_roland.digest(), containing_roland_and_treats.digest(), ], WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!( result, @@ -256,9 +267,9 @@ fn merge_directories_same_files() { ); } -#[test] -fn snapshot_merge_two_files() { - let (store, tempdir, _, digester, runtime) = setup(); +#[tokio::test] +async fn snapshot_merge_two_files() { + let (store, tempdir, _, digester) = setup(); let common_dir_name = "tower"; let common_dir = PathBuf::from(common_dir_name); @@ -277,33 +288,34 @@ fn snapshot_merge_two_files() { true, ); - let snapshot1 = runtime - .block_on(Snapshot::from_path_stats( - store.clone(), - &digester, - vec![dir.clone(), file1.clone()], - WorkUnitStore::new(), - )) - .unwrap(); + let snapshot1 = Snapshot::from_path_stats( + store.clone(), + &digester, + vec![dir.clone(), file1.clone()], + WorkUnitStore::new(), + ) + .compat() + .await + .unwrap(); - let snapshot2 = runtime - .block_on(Snapshot::from_path_stats( - store.clone(), - &digester, - vec![dir.clone(), file2.clone()], - WorkUnitStore::new(), - )) - .unwrap(); + let snapshot2 = Snapshot::from_path_stats( + store.clone(), + &digester, + vec![dir.clone(), file2.clone()], + WorkUnitStore::new(), + ) + .compat() + .await + .unwrap(); - let merged = runtime - .block_on(Snapshot::merge( - store.clone(), - &[snapshot1, snapshot2], - WorkUnitStore::new(), - )) + let merged = Snapshot::merge(store.clone(), &[snapshot1, snapshot2], WorkUnitStore::new()) + .compat() + .await .unwrap(); - let merged_root_directory = runtime - .block_on(store.load_directory(merged.digest, WorkUnitStore::new())) + let merged_root_directory = store + .load_directory(merged.digest, WorkUnitStore::new()) + .compat() + .await .unwrap() .unwrap() .0; @@ -315,8 +327,10 @@ fn snapshot_merge_two_files() { let merged_child_dirnode = merged_root_directory.directories[0].clone(); let merged_child_dirnode_digest: Result = merged_child_dirnode.get_digest().into(); - let merged_child_directory = runtime - .block_on(store.load_directory(merged_child_dirnode_digest.unwrap(), WorkUnitStore::new())) + let merged_child_directory = store + .load_directory(merged_child_dirnode_digest.unwrap(), WorkUnitStore::new()) + .compat() + .await .unwrap() .unwrap() .0; @@ -332,9 +346,9 @@ fn snapshot_merge_two_files() { ); } -#[test] -fn snapshot_merge_colliding() { - let (store, tempdir, _, digester, runtime) = setup(); +#[tokio::test] +async fn snapshot_merge_colliding() { + let (store, tempdir, _, digester) = setup(); let file = make_file_stat( tempdir.path(), @@ -343,23 +357,21 @@ fn snapshot_merge_colliding() { false, ); - let snapshot1 = runtime - .block_on(Snapshot::from_path_stats( - store.clone(), - &digester, - vec![file.clone()], - WorkUnitStore::new(), - )) - .unwrap(); + let snapshot1 = Snapshot::from_path_stats( + store.clone(), + &digester, + vec![file.clone()], + WorkUnitStore::new(), + ) + .compat() + .await + .unwrap(); - let snapshot2 = runtime - .block_on(Snapshot::from_path_stats( - store.clone(), - &digester, - vec![file], - WorkUnitStore::new(), - )) - .unwrap(); + let snapshot2 = + Snapshot::from_path_stats(store.clone(), &digester, vec![file], WorkUnitStore::new()) + .compat() + .await + .unwrap(); let merged_res = Snapshot::merge(store.clone(), &[snapshot1, snapshot2], WorkUnitStore::new()).wait(); @@ -373,89 +385,101 @@ fn snapshot_merge_colliding() { } } -#[test] -fn strip_empty_prefix() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_empty_prefix() { + let (store, _, _, _) = setup(); let dir = TestDirectory::nested(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( - store, - dir.digest(), - PathBuf::from(""), - WorkUnitStore::new(), - )); + let result = + super::Snapshot::strip_prefix(store, dir.digest(), PathBuf::from(""), WorkUnitStore::new()) + .compat() + .await; assert_eq!(result, Ok(dir.digest())); } -#[test] -fn strip_non_empty_prefix() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_non_empty_prefix() { + let (store, _, _, _) = setup(); let dir = TestDirectory::nested(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - runtime - .block_on(store.record_directory(&TestDirectory::containing_roland().directory(), false)) + store + .record_directory(&TestDirectory::containing_roland().directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( + let result = super::Snapshot::strip_prefix( store, dir.digest(), PathBuf::from("cats"), WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!(result, Ok(TestDirectory::containing_roland().digest())); } -#[test] -fn strip_prefix_empty_subdir() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_prefix_empty_subdir() { + let (store, _, _, _) = setup(); let dir = TestDirectory::containing_falcons_dir(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( + let result = super::Snapshot::strip_prefix( store, dir.digest(), PathBuf::from("falcons/peregrine"), WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!(result, Ok(TestDirectory::empty().digest())); } -#[test] -fn strip_dir_not_in_store() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_dir_not_in_store() { + let (store, _, _, _) = setup(); let digest = TestDirectory::nested().digest(); - let result = runtime.block_on(super::Snapshot::strip_prefix( - store, - digest, - PathBuf::from("cats"), - WorkUnitStore::new(), - )); + let result = + super::Snapshot::strip_prefix(store, digest, PathBuf::from("cats"), WorkUnitStore::new()) + .compat() + .await; assert_eq!(result, Err(format!("{:?} was not known", digest))); } -#[test] -fn strip_subdir_not_in_store() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_subdir_not_in_store() { + let (store, _, _, _) = setup(); let dir = TestDirectory::nested(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( + let result = super::Snapshot::strip_prefix( store, dir.digest(), PathBuf::from("cats"), WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!( result, Err(format!( @@ -465,64 +489,82 @@ fn strip_subdir_not_in_store() { ); } -#[test] -fn strip_prefix_non_matching_file() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_prefix_non_matching_file() { + let (store, _, _, _) = setup(); let dir = TestDirectory::recursive(); let child_dir = TestDirectory::containing_roland(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - runtime - .block_on(store.record_directory(&child_dir.directory(), false)) + store + .record_directory(&child_dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( + let result = super::Snapshot::strip_prefix( store, dir.digest(), PathBuf::from("cats"), WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!(result, Err(format!("Cannot strip prefix cats from root directory {:?} - root directory contained non-matching file named: treats", dir.digest()))); } -#[test] -fn strip_prefix_non_matching_dir() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_prefix_non_matching_dir() { + let (store, _, _, _) = setup(); let dir = TestDirectory::double_nested_dir_and_file(); let child_dir = TestDirectory::nested_dir_and_file(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - runtime - .block_on(store.record_directory(&child_dir.directory(), false)) + store + .record_directory(&child_dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( + let result = super::Snapshot::strip_prefix( store, dir.digest(), PathBuf::from("animals/cats"), WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!(result, Err(format!("Cannot strip prefix animals/cats from root directory {:?} - subdirectory animals contained non-matching directory named: birds", dir.digest()))); } -#[test] -fn strip_subdir_not_in_dir() { - let (store, _, _, _, runtime) = setup(); +#[tokio::test] +async fn strip_subdir_not_in_dir() { + let (store, _, _, _) = setup(); let dir = TestDirectory::nested(); - runtime - .block_on(store.record_directory(&dir.directory(), false)) + store + .record_directory(&dir.directory(), false) + .compat() + .await .expect("Error storing directory"); - runtime - .block_on(store.record_directory(&TestDirectory::containing_roland().directory(), false)) + store + .record_directory(&TestDirectory::containing_roland().directory(), false) + .compat() + .await .expect("Error storing directory"); - let result = runtime.block_on(super::Snapshot::strip_prefix( + let result = super::Snapshot::strip_prefix( store, dir.digest(), PathBuf::from("cats/ugly"), WorkUnitStore::new(), - )); + ) + .compat() + .await; assert_eq!(result, Err(format!("Cannot strip prefix cats/ugly from root directory {:?} - subdirectory cats didn't contain a directory named ugly but did contain file named: roland", dir.digest()))); } @@ -546,19 +588,18 @@ fn make_file_stat(root: &Path, relpath: &Path, contents: &[u8], is_executable: b ) } -fn expand_all_sorted(posix_fs: Arc, executor: &task_executor::Executor) -> Vec { - let mut v = executor - .block_on( - posix_fs.expand( - // Don't error or warn if there are no paths matched -- that is a valid state. - PathGlobs::create( - &["**".to_owned()], - StrictGlobMatching::Ignore, - GlobExpansionConjunction::AllMatch, - ) - .unwrap(), - ), +async fn expand_all_sorted(posix_fs: Arc) -> Vec { + let mut v = posix_fs + .expand( + // Don't error or warn if there are no paths matched -- that is a valid state. + PathGlobs::create( + &["**".to_owned()], + StrictGlobMatching::Ignore, + GlobExpansionConjunction::AllMatch, + ) + .unwrap(), ) + .await .unwrap(); v.sort_by(|a, b| a.path().cmp(b.path())); v diff --git a/src/rust/engine/fs/store/src/tests.rs b/src/rust/engine/fs/store/src/tests.rs index a835d109d9d..6a696b7b9b4 100644 --- a/src/rust/engine/fs/store/src/tests.rs +++ b/src/rust/engine/fs/store/src/tests.rs @@ -6,7 +6,7 @@ use crate::{ use bazel_protos; use bytes::Bytes; use digest::{Digest as DigestTrait, FixedOutput}; -use futures01::Future; +use futures::compat::Future01CompatExt; use hashing::{Digest, Fingerprint}; use maplit::btreemap; use mock::StubCAS; @@ -22,6 +22,7 @@ use std::path::{Path, PathBuf}; use std::time::Duration; use tempfile::TempDir; use testutil::data::{TestData, TestDirectory}; +use tokio::runtime::Handle; use workunit_store::WorkUnitStore; impl LoadMetadata { @@ -70,9 +71,12 @@ pub fn extra_big_file_bytes() -> Bytes { bytes } -pub fn load_file_bytes(store: &Store, digest: Digest) -> Result, String> { - block_on(store.load_file_bytes_with(digest, |bytes| bytes, WorkUnitStore::new())) - .map(|option| option.map(|(bytes, _metadata)| bytes)) +pub async fn load_file_bytes(store: &Store, digest: Digest) -> Result, String> { + let option = store + .load_file_bytes_with(digest, |bytes| bytes, WorkUnitStore::new()) + .compat() + .await?; + Ok(option.map(|(bytes, _metadata)| bytes)) } /// @@ -90,7 +94,8 @@ pub fn new_cas(chunk_size_bytes: usize) -> StubCAS { /// Create a new local store with whatever was already serialized in dir. /// fn new_local_store>(dir: P) -> Store { - Store::local_only(task_executor::Executor::new(), dir).expect("Error creating local store") + Store::local_only(task_executor::Executor::new(Handle::current()), dir) + .expect("Error creating local store") } /// @@ -98,7 +103,7 @@ fn new_local_store>(dir: P) -> Store { /// fn new_store>(dir: P, cas_address: String) -> Store { Store::with_remote( - task_executor::Executor::new(), + task_executor::Executor::new(Handle::current()), dir, vec![cas_address], None, @@ -114,62 +119,61 @@ fn new_store>(dir: P, cas_address: String) -> Store { .unwrap() } -#[test] -fn load_file_prefers_local() { +#[tokio::test] +async fn load_file_prefers_local() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); - block_on(crate::local_tests::new_store(dir.path()).store_bytes( - EntryType::File, - testdata.bytes(), - false, - )) - .expect("Store failed"); + crate::local_tests::new_store(dir.path()) + .store_bytes(EntryType::File, testdata.bytes(), false) + .compat() + .await + .expect("Store failed"); let cas = new_cas(1024); assert_eq!( - load_file_bytes(&new_store(dir.path(), cas.address()), testdata.digest(),), + load_file_bytes(&new_store(dir.path(), cas.address()), testdata.digest()).await, Ok(Some(testdata.bytes())) ); assert_eq!(0, cas.read_request_count()); } -#[test] -fn load_directory_prefers_local() { +#[tokio::test] +async fn load_directory_prefers_local() { let dir = TempDir::new().unwrap(); let testdir = TestDirectory::containing_roland(); - block_on(crate::local_tests::new_store(dir.path()).store_bytes( - EntryType::Directory, - testdir.bytes(), - false, - )) - .expect("Store failed"); + crate::local_tests::new_store(dir.path()) + .store_bytes(EntryType::Directory, testdir.bytes(), false) + .compat() + .await + .expect("Store failed"); let cas = new_cas(1024); assert_eq!( - block_on( - new_store(dir.path(), cas.address()).load_directory(testdir.digest(), WorkUnitStore::new()) - ) - .unwrap() - .unwrap() - .0, + new_store(dir.path(), cas.address()) + .load_directory(testdir.digest(), WorkUnitStore::new()) + .compat() + .await + .unwrap() + .unwrap() + .0, testdir.directory() ); assert_eq!(0, cas.read_request_count()); } -#[test] -fn load_file_falls_back_and_backfills() { +#[tokio::test] +async fn load_file_falls_back_and_backfills() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); let cas = new_cas(1024); assert_eq!( - load_file_bytes(&new_store(dir.path(), cas.address()), testdata.digest()), + load_file_bytes(&new_store(dir.path(), cas.address()), testdata.digest()).await, Ok(Some(testdata.bytes())), "Read from CAS" ); @@ -178,14 +182,15 @@ fn load_file_falls_back_and_backfills() { crate::local_tests::load_file_bytes( &crate::local_tests::new_store(dir.path()), testdata.digest(), - ), + ) + .await, Ok(Some(testdata.bytes())), "Read from local cache" ); } -#[test] -fn load_directory_falls_back_and_backfills() { +#[tokio::test] +async fn load_directory_falls_back_and_backfills() { let dir = TempDir::new().unwrap(); let cas = new_cas(1024); @@ -193,12 +198,13 @@ fn load_directory_falls_back_and_backfills() { let testdir = TestDirectory::containing_roland(); assert_eq!( - block_on( - new_store(dir.path(), cas.address()).load_directory(testdir.digest(), WorkUnitStore::new()) - ) - .unwrap() - .unwrap() - .0, + new_store(dir.path(), cas.address()) + .load_directory(testdir.digest(), WorkUnitStore::new()) + .compat() + .await + .unwrap() + .unwrap() + .0, testdir.directory() ); assert_eq!(1, cas.read_request_count()); @@ -206,13 +212,14 @@ fn load_directory_falls_back_and_backfills() { crate::local_tests::load_directory_proto_bytes( &crate::local_tests::new_store(dir.path()), testdir.digest(), - ), + ) + .await, Ok(Some(testdir.bytes())) ); } -#[test] -fn load_recursive_directory() { +#[tokio::test] +async fn load_recursive_directory() { let dir = TempDir::new().unwrap(); let roland = TestData::roland(); @@ -231,40 +238,44 @@ fn load_recursive_directory() { .directory(&recursive_testdir) .build(); - block_on( - new_store(dir.path(), cas.address()) - .ensure_local_has_recursive_directory(recursive_testdir_digest, WorkUnitStore::new()), - ) - .expect("Downloading recursive directory should have succeeded."); + new_store(dir.path(), cas.address()) + .ensure_local_has_recursive_directory(recursive_testdir_digest, WorkUnitStore::new()) + .compat() + .await + .expect("Downloading recursive directory should have succeeded."); assert_eq!( - load_file_bytes(&new_local_store(dir.path()), roland.digest(),), + load_file_bytes(&new_local_store(dir.path()), roland.digest()).await, Ok(Some(roland.bytes())) ); assert_eq!( - load_file_bytes(&new_local_store(dir.path()), catnip.digest(),), + load_file_bytes(&new_local_store(dir.path()), catnip.digest()).await, Ok(Some(catnip.bytes())) ); assert_eq!( - block_on(new_local_store(dir.path()).load_directory(testdir_digest, WorkUnitStore::new())) + new_local_store(dir.path()) + .load_directory(testdir_digest, WorkUnitStore::new()) + .compat() + .await .unwrap() .unwrap() .0, testdir_directory ); assert_eq!( - block_on( - new_local_store(dir.path()).load_directory(recursive_testdir_digest, WorkUnitStore::new()) - ) - .unwrap() - .unwrap() - .0, + new_local_store(dir.path()) + .load_directory(recursive_testdir_digest, WorkUnitStore::new()) + .compat() + .await + .unwrap() + .unwrap() + .0, recursive_testdir_directory ); } -#[test] -fn load_file_missing_is_none() { +#[tokio::test] +async fn load_file_missing_is_none() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); @@ -272,29 +283,33 @@ fn load_file_missing_is_none() { load_file_bytes( &new_store(dir.path(), cas.address()), TestData::roland().digest() - ), + ) + .await, Ok(None) ); assert_eq!(1, cas.read_request_count()); } -#[test] -fn load_directory_missing_is_none() { +#[tokio::test] +async fn load_directory_missing_is_none() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); assert_eq!( - block_on(new_store(dir.path(), cas.address()).load_directory( - TestDirectory::containing_roland().digest(), - WorkUnitStore::new() - )), + new_store(dir.path(), cas.address()) + .load_directory( + TestDirectory::containing_roland().digest(), + WorkUnitStore::new() + ) + .compat() + .await, Ok(None) ); assert_eq!(1, cas.read_request_count()); } -#[test] -fn load_file_remote_error_is_error() { +#[tokio::test] +async fn load_file_remote_error_is_error() { let dir = TempDir::new().unwrap(); let cas = StubCAS::always_errors(); @@ -302,6 +317,7 @@ fn load_file_remote_error_is_error() { &new_store(dir.path(), cas.address()), TestData::roland().digest(), ) + .await .expect_err("Want error"); assert!( cas.read_request_count() > 0, @@ -314,16 +330,16 @@ fn load_file_remote_error_is_error() { ); } -#[test] -fn load_directory_remote_error_is_error() { +#[tokio::test] +async fn load_directory_remote_error_is_error() { let dir = TempDir::new().unwrap(); let cas = StubCAS::always_errors(); - let error = block_on( - new_store(dir.path(), cas.address()) - .load_directory(TestData::roland().digest(), WorkUnitStore::new()), - ) - .expect_err("Want error"); + let error = new_store(dir.path(), cas.address()) + .load_directory(TestData::roland().digest(), WorkUnitStore::new()) + .compat() + .await + .expect_err("Want error"); assert!( cas.read_request_count() > 0, "Want read_request_count > 0 but got {}", @@ -335,29 +351,31 @@ fn load_directory_remote_error_is_error() { ); } -#[test] -fn malformed_remote_directory_is_error() { +#[tokio::test] +async fn malformed_remote_directory_is_error() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); let cas = new_cas(1024); - block_on( - new_store(dir.path(), cas.address()).load_directory(testdata.digest(), WorkUnitStore::new()), - ) - .expect_err("Want error"); + new_store(dir.path(), cas.address()) + .load_directory(testdata.digest(), WorkUnitStore::new()) + .compat() + .await + .expect_err("Want error"); assert_eq!( crate::local_tests::load_directory_proto_bytes( &crate::local_tests::new_store(dir.path()), testdata.digest() - ), + ) + .await, Ok(None) ); } -#[test] -fn non_canonical_remote_directory_is_error() { +#[tokio::test] +async fn non_canonical_remote_directory_is_error() { let mut non_canonical_directory = TestDirectory::containing_roland().directory(); non_canonical_directory.mut_files().push({ let mut file = bazel_protos::remote_execution::FileNode::new(); @@ -388,23 +406,24 @@ fn non_canonical_remote_directory_is_error() { non_canonical_directory_bytes, ) .build(); - block_on( - new_store(dir.path(), cas.address()) - .load_directory(directory_digest.clone(), WorkUnitStore::new()), - ) - .expect_err("Want error"); + new_store(dir.path(), cas.address()) + .load_directory(directory_digest.clone(), WorkUnitStore::new()) + .compat() + .await + .expect_err("Want error"); assert_eq!( crate::local_tests::load_directory_proto_bytes( &crate::local_tests::new_store(dir.path()), directory_digest, - ), + ) + .await, Ok(None) ); } -#[test] -fn wrong_remote_file_bytes_is_error() { +#[tokio::test] +async fn wrong_remote_file_bytes_is_error() { let dir = TempDir::new().unwrap(); let testdata = TestData::roland(); @@ -416,19 +435,21 @@ fn wrong_remote_file_bytes_is_error() { ) .build(); load_file_bytes(&new_store(dir.path(), cas.address()), testdata.digest()) + .await .expect_err("Want error"); assert_eq!( crate::local_tests::load_file_bytes( &crate::local_tests::new_store(dir.path()), testdata.digest() - ), + ) + .await, Ok(None) ); } -#[test] -fn wrong_remote_directory_bytes_is_error() { +#[tokio::test] +async fn wrong_remote_directory_bytes_is_error() { let dir = TempDir::new().unwrap(); let testdir = TestDirectory::containing_dnalor(); @@ -439,46 +460,55 @@ fn wrong_remote_directory_bytes_is_error() { TestDirectory::containing_roland().bytes(), ) .build(); - load_file_bytes(&new_store(dir.path(), cas.address()), testdir.digest()).expect_err("Want error"); + load_file_bytes(&new_store(dir.path(), cas.address()), testdir.digest()) + .await + .expect_err("Want error"); assert_eq!( crate::local_tests::load_file_bytes( &crate::local_tests::new_store(dir.path()), testdir.digest() - ), + ) + .await, Ok(None) ); } -#[test] -fn expand_empty_directory() { +#[tokio::test] +async fn expand_empty_directory() { let dir = TempDir::new().unwrap(); let empty_dir = TestDirectory::empty(); - let expanded = block_on( - new_local_store(dir.path()).expand_directory(empty_dir.digest(), WorkUnitStore::new()), - ) - .expect("Error expanding directory"); + let expanded = new_local_store(dir.path()) + .expand_directory(empty_dir.digest(), WorkUnitStore::new()) + .compat() + .await + .expect("Error expanding directory"); let want: HashMap = vec![(empty_dir.digest(), EntryType::Directory)] .into_iter() .collect(); assert_eq!(expanded, want); } -#[test] -fn expand_flat_directory() { +#[tokio::test] +async fn expand_flat_directory() { let dir = TempDir::new().unwrap(); let roland = TestData::roland(); let testdir = TestDirectory::containing_roland(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - let expanded = - block_on(new_local_store(dir.path()).expand_directory(testdir.digest(), WorkUnitStore::new())) - .expect("Error expanding directory"); + let expanded = new_local_store(dir.path()) + .expand_directory(testdir.digest(), WorkUnitStore::new()) + .compat() + .await + .expect("Error expanding directory"); let want: HashMap = vec![ (testdir.digest(), EntryType::Directory), (roland.digest(), EntryType::File), @@ -488,8 +518,8 @@ fn expand_flat_directory() { assert_eq!(expanded, want); } -#[test] -fn expand_recursive_directory() { +#[tokio::test] +async fn expand_recursive_directory() { let dir = TempDir::new().unwrap(); let roland = TestData::roland(); @@ -497,15 +527,22 @@ fn expand_recursive_directory() { let testdir = TestDirectory::containing_roland(); let recursive_testdir = TestDirectory::recursive(); - block_on(new_local_store(dir.path()).record_directory(&recursive_testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&recursive_testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - let expanded = block_on( - new_local_store(dir.path()).expand_directory(recursive_testdir.digest(), WorkUnitStore::new()), - ) - .expect("Error expanding directory"); + let expanded = new_local_store(dir.path()) + .expand_directory(recursive_testdir.digest(), WorkUnitStore::new()) + .compat() + .await + .expect("Error expanding directory"); let want: HashMap = vec![ (recursive_testdir.digest(), EntryType::Directory), (testdir.digest(), EntryType::Directory), @@ -517,11 +554,14 @@ fn expand_recursive_directory() { assert_eq!(expanded, want); } -#[test] -fn expand_missing_directory() { +#[tokio::test] +async fn expand_missing_directory() { let dir = TempDir::new().unwrap(); let digest = TestDirectory::containing_roland().digest(); - let error = block_on(new_local_store(dir.path()).expand_directory(digest, WorkUnitStore::new())) + let error = new_local_store(dir.path()) + .expand_directory(digest, WorkUnitStore::new()) + .compat() + .await .expect_err("Want error"); assert!( error.contains(&format!("{:?}", digest)), @@ -530,19 +570,23 @@ fn expand_missing_directory() { ); } -#[test] -fn expand_directory_missing_subdir() { +#[tokio::test] +async fn expand_directory_missing_subdir() { let dir = TempDir::new().unwrap(); let recursive_testdir = TestDirectory::recursive(); - block_on(new_local_store(dir.path()).record_directory(&recursive_testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&recursive_testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - let error = block_on( - new_local_store(dir.path()).expand_directory(recursive_testdir.digest(), WorkUnitStore::new()), - ) - .expect_err("Want error"); + let error = new_local_store(dir.path()) + .expand_directory(recursive_testdir.digest(), WorkUnitStore::new()) + .compat() + .await + .expect_err("Want error"); assert!( error.contains(&format!( "{}", @@ -553,23 +597,26 @@ fn expand_directory_missing_subdir() { ); } -#[test] -fn uploads_files() { +#[tokio::test] +async fn uploads_files() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); let testdata = TestData::roland(); - block_on(new_local_store(dir.path()).store_file_bytes(testdata.bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(testdata.bytes(), false) + .compat() + .await .expect("Error storing file locally"); assert_eq!(cas.blobs.lock().get(&testdata.fingerprint()), None); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdata.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading file"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdata.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading file"); assert_eq!( cas.blobs.lock().get(&testdata.fingerprint()), @@ -577,27 +624,33 @@ fn uploads_files() { ); } -#[test] -fn uploads_directories_recursively() { +#[tokio::test] +async fn uploads_directories_recursively() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); let testdata = TestData::roland(); let testdir = TestDirectory::containing_roland(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(new_local_store(dir.path()).store_file_bytes(testdata.bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(testdata.bytes(), false) + .compat() + .await .expect("Error storing file locally"); assert_eq!(cas.blobs.lock().get(&testdata.fingerprint()), None); assert_eq!(cas.blobs.lock().get(&testdir.fingerprint()), None); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading directory"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading directory"); assert_eq!( cas.blobs.lock().get(&testdir.fingerprint()), @@ -609,24 +662,30 @@ fn uploads_directories_recursively() { ); } -#[test] -fn uploads_files_recursively_when_under_three_digests_ignoring_items_already_in_cas() { +#[tokio::test] +async fn uploads_files_recursively_when_under_three_digests_ignoring_items_already_in_cas() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); let testdata = TestData::roland(); let testdir = TestDirectory::containing_roland(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(new_local_store(dir.path()).store_file_bytes(testdata.bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(testdata.bytes(), false) + .compat() + .await .expect("Error storing file locally"); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdata.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading file"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdata.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading file"); assert_eq!(cas.write_message_sizes.lock().len(), 1); assert_eq!( @@ -635,11 +694,11 @@ fn uploads_files_recursively_when_under_three_digests_ignoring_items_already_in_ ); assert_eq!(cas.blobs.lock().get(&testdir.fingerprint()), None); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading directory"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading directory"); assert_eq!(cas.write_message_sizes.lock().len(), 3); assert_eq!( @@ -648,8 +707,8 @@ fn uploads_files_recursively_when_under_three_digests_ignoring_items_already_in_ ); } -#[test] -fn does_not_reupload_file_already_in_cas_when_requested_with_three_other_digests() { +#[tokio::test] +async fn does_not_reupload_file_already_in_cas_when_requested_with_three_other_digests() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); @@ -657,18 +716,27 @@ fn does_not_reupload_file_already_in_cas_when_requested_with_three_other_digests let roland = TestData::roland(); let testdir = TestDirectory::containing_roland(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(new_local_store(dir.path()).store_file_bytes(roland.bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(roland.bytes(), false) + .compat() + .await .expect("Error storing file locally"); - block_on(new_local_store(dir.path()).store_file_bytes(catnip.bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(catnip.bytes(), false) + .compat() + .await .expect("Error storing file locally"); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![roland.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading big file"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![roland.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading big file"); assert_eq!(cas.write_message_sizes.lock().len(), 1); assert_eq!( @@ -678,13 +746,14 @@ fn does_not_reupload_file_already_in_cas_when_requested_with_three_other_digests assert_eq!(cas.blobs.lock().get(&catnip.fingerprint()), None); assert_eq!(cas.blobs.lock().get(&testdir.fingerprint()), None); - block_on( - new_store(dir.path(), cas.address()).ensure_remote_has_recursive( + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive( vec![testdir.digest(), catnip.digest()], WorkUnitStore::new(), - ), - ) - .expect("Error uploading directory"); + ) + .compat() + .await + .expect("Error uploading directory"); assert_eq!(cas.write_message_sizes.lock().len(), 3); assert_eq!( @@ -697,19 +766,22 @@ fn does_not_reupload_file_already_in_cas_when_requested_with_three_other_digests ); } -#[test] -fn does_not_reupload_big_file_already_in_cas() { +#[tokio::test] +async fn does_not_reupload_big_file_already_in_cas() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); - block_on(new_local_store(dir.path()).store_file_bytes(extra_big_file_bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(extra_big_file_bytes(), false) + .compat() + .await .expect("Error storing file locally"); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![extra_big_file_digest()], WorkUnitStore::new()), - ) - .expect("Error uploading directory"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![extra_big_file_digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading directory"); assert_eq!(cas.write_message_sizes.lock().len(), 1); assert_eq!( @@ -717,11 +789,11 @@ fn does_not_reupload_big_file_already_in_cas() { Some(&extra_big_file_bytes()) ); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![extra_big_file_digest()], WorkUnitStore::new()), - ) - .expect("Error uploading directory"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![extra_big_file_digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading directory"); assert_eq!(cas.write_message_sizes.lock().len(), 1); assert_eq!( @@ -730,8 +802,8 @@ fn does_not_reupload_big_file_already_in_cas() { ); } -#[test] -fn upload_missing_files() { +#[tokio::test] +async fn upload_missing_files() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); @@ -739,35 +811,38 @@ fn upload_missing_files() { assert_eq!(cas.blobs.lock().get(&testdata.fingerprint()), None); - let error = block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdata.digest()], WorkUnitStore::new()), - ) - .expect_err("Want error"); + let error = new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdata.digest()], WorkUnitStore::new()) + .compat() + .await + .expect_err("Want error"); assert_eq!( error, format!("Failed to upload digest {:?}: Not found", testdata.digest()) ); } -#[test] -fn upload_missing_file_in_directory() { +#[tokio::test] +async fn upload_missing_file_in_directory() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); let testdir = TestDirectory::containing_roland(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); assert_eq!(cas.blobs.lock().get(&testdir.fingerprint()), None); assert_eq!(cas.blobs.lock().get(&testdir.fingerprint()), None); - let error = block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect_err("Want error"); + let error = new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect_err("Want error"); assert_eq!( error, format!( @@ -778,31 +853,34 @@ fn upload_missing_file_in_directory() { ); } -#[test] -fn uploading_digest_with_wrong_size_is_error() { +#[tokio::test] +async fn uploading_digest_with_wrong_size_is_error() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); let testdata = TestData::roland(); - block_on(new_local_store(dir.path()).store_file_bytes(testdata.bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(testdata.bytes(), false) + .compat() + .await .expect("Error storing file locally"); assert_eq!(cas.blobs.lock().get(&testdata.fingerprint()), None); let wrong_digest = Digest(testdata.fingerprint(), testdata.len() + 1); - block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![wrong_digest], WorkUnitStore::new()), - ) - .expect_err("Expect error uploading file"); + new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![wrong_digest], WorkUnitStore::new()) + .compat() + .await + .expect_err("Expect error uploading file"); assert_eq!(cas.blobs.lock().get(&testdata.fingerprint()), None); } -#[test] -fn instance_name_upload() { +#[tokio::test] +async fn instance_name_upload() { let dir = TempDir::new().unwrap(); let cas = StubCAS::builder() .instance_name("dark-tower".to_owned()) @@ -811,15 +889,24 @@ fn instance_name_upload() { // 3 is enough digests to trigger a FindMissingBlobs request let testdir = TestDirectory::containing_roland_and_treats(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(new_local_store(dir.path()).store_file_bytes(TestData::roland().bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(TestData::roland().bytes(), false) + .compat() + .await .expect("Error storing roland locally"); - block_on(new_local_store(dir.path()).store_file_bytes(TestData::catnip().bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(TestData::catnip().bytes(), false) + .compat() + .await .expect("Error storing catnip locally"); let store_with_remote = Store::with_remote( - task_executor::Executor::new(), + task_executor::Executor::new(Handle::current()), dir.path(), vec![cas.address()], Some("dark-tower".to_owned()), @@ -834,14 +921,15 @@ fn instance_name_upload() { ) .unwrap(); - block_on( - store_with_remote.ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading"); + store_with_remote + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading"); } -#[test] -fn instance_name_download() { +#[tokio::test] +async fn instance_name_download() { let dir = TempDir::new().unwrap(); let cas = StubCAS::builder() .instance_name("dark-tower".to_owned()) @@ -849,7 +937,7 @@ fn instance_name_download() { .build(); let store_with_remote = Store::with_remote( - task_executor::Executor::new(), + task_executor::Executor::new(Handle::current()), dir.path(), vec![cas.address()], Some("dark-tower".to_owned()), @@ -865,20 +953,19 @@ fn instance_name_download() { .unwrap(); assert_eq!( - block_on(store_with_remote.load_file_bytes_with( - TestData::roland().digest(), - |b| b, - WorkUnitStore::new() - )) - .unwrap() - .unwrap() - .0, + store_with_remote + .load_file_bytes_with(TestData::roland().digest(), |b| b, WorkUnitStore::new()) + .compat() + .await + .unwrap() + .unwrap() + .0, TestData::roland().bytes() ) } -#[test] -fn auth_upload() { +#[tokio::test] +async fn auth_upload() { let dir = TempDir::new().unwrap(); let cas = StubCAS::builder() .required_auth_token("Armory.Key".to_owned()) @@ -887,15 +974,24 @@ fn auth_upload() { // 3 is enough digests to trigger a FindMissingBlobs request let testdir = TestDirectory::containing_roland_and_treats(); - block_on(new_local_store(dir.path()).record_directory(&testdir.directory(), false)) + new_local_store(dir.path()) + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(new_local_store(dir.path()).store_file_bytes(TestData::roland().bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(TestData::roland().bytes(), false) + .compat() + .await .expect("Error storing roland locally"); - block_on(new_local_store(dir.path()).store_file_bytes(TestData::catnip().bytes(), false)) + new_local_store(dir.path()) + .store_file_bytes(TestData::catnip().bytes(), false) + .compat() + .await .expect("Error storing catnip locally"); let store_with_remote = Store::with_remote( - task_executor::Executor::new(), + task_executor::Executor::new(Handle::current()), dir.path(), vec![cas.address()], None, @@ -910,14 +1006,15 @@ fn auth_upload() { ) .unwrap(); - block_on( - store_with_remote.ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading"); + store_with_remote + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading"); } -#[test] -fn auth_download() { +#[tokio::test] +async fn auth_download() { let dir = TempDir::new().unwrap(); let cas = StubCAS::builder() .required_auth_token("Armory.Key".to_owned()) @@ -925,7 +1022,7 @@ fn auth_download() { .build(); let store_with_remote = Store::with_remote( - task_executor::Executor::new(), + task_executor::Executor::new(Handle::current()), dir.path(), vec![cas.address()], None, @@ -941,36 +1038,38 @@ fn auth_download() { .unwrap(); assert_eq!( - block_on(store_with_remote.load_file_bytes_with( - TestData::roland().digest(), - |b| b, - WorkUnitStore::new() - )) - .unwrap() - .unwrap() - .0, + store_with_remote + .load_file_bytes_with(TestData::roland().digest(), |b| b, WorkUnitStore::new()) + .compat() + .await + .unwrap() + .unwrap() + .0, TestData::roland().bytes() ) } -#[test] -fn materialize_missing_file() { +#[tokio::test] +async fn materialize_missing_file() { let materialize_dir = TempDir::new().unwrap(); let file = materialize_dir.path().join("file"); let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.materialize_file( - file.clone(), - TestData::roland().digest(), - false, - WorkUnitStore::new(), - )) - .expect_err("Want unknown digest error"); + store + .materialize_file( + file.clone(), + TestData::roland().digest(), + false, + WorkUnitStore::new(), + ) + .compat() + .await + .expect_err("Want unknown digest error"); } -#[test] -fn materialize_file() { +#[tokio::test] +async fn materialize_file() { let materialize_dir = TempDir::new().unwrap(); let file = materialize_dir.path().join("file"); @@ -978,15 +1077,22 @@ fn materialize_file() { let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.store_file_bytes(testdata.bytes(), false)).expect("Error saving bytes"); - block_on(store.materialize_file(file.clone(), testdata.digest(), false, WorkUnitStore::new())) + store + .store_file_bytes(testdata.bytes(), false) + .compat() + .await + .expect("Error saving bytes"); + store + .materialize_file(file.clone(), testdata.digest(), false, WorkUnitStore::new()) + .compat() + .await .expect("Error materializing file"); assert_eq!(file_contents(&file), testdata.bytes()); assert!(!is_executable(&file)); } -#[test] -fn materialize_file_executable() { +#[tokio::test] +async fn materialize_file_executable() { let materialize_dir = TempDir::new().unwrap(); let file = materialize_dir.path().join("file"); @@ -994,29 +1100,39 @@ fn materialize_file_executable() { let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.store_file_bytes(testdata.bytes(), false)).expect("Error saving bytes"); - block_on(store.materialize_file(file.clone(), testdata.digest(), true, WorkUnitStore::new())) + store + .store_file_bytes(testdata.bytes(), false) + .compat() + .await + .expect("Error saving bytes"); + store + .materialize_file(file.clone(), testdata.digest(), true, WorkUnitStore::new()) + .compat() + .await .expect("Error materializing file"); assert_eq!(file_contents(&file), testdata.bytes()); assert!(is_executable(&file)); } -#[test] -fn materialize_missing_directory() { +#[tokio::test] +async fn materialize_missing_directory() { let materialize_dir = TempDir::new().unwrap(); let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.materialize_directory( - materialize_dir.path().to_owned(), - TestDirectory::recursive().digest(), - WorkUnitStore::new(), - )) - .expect_err("Want unknown digest error"); + store + .materialize_directory( + materialize_dir.path().to_owned(), + TestDirectory::recursive().digest(), + WorkUnitStore::new(), + ) + .compat() + .await + .expect_err("Want unknown digest error"); } -#[test] -fn materialize_directory() { +#[tokio::test] +async fn materialize_directory() { let materialize_dir = TempDir::new().unwrap(); let roland = TestData::roland(); @@ -1026,18 +1142,36 @@ fn materialize_directory() { let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.record_directory(&recursive_testdir.directory(), false)) + store + .record_directory(&recursive_testdir.directory(), false) + .compat() + .await .expect("Error saving recursive Directory"); - block_on(store.record_directory(&testdir.directory(), false)).expect("Error saving Directory"); - block_on(store.store_file_bytes(roland.bytes(), false)).expect("Error saving file bytes"); - block_on(store.store_file_bytes(catnip.bytes(), false)).expect("Error saving catnip file bytes"); - - block_on(store.materialize_directory( - materialize_dir.path().to_owned(), - recursive_testdir.digest(), - WorkUnitStore::new(), - )) - .expect("Error materializing"); + store + .record_directory(&testdir.directory(), false) + .compat() + .await + .expect("Error saving Directory"); + store + .store_file_bytes(roland.bytes(), false) + .compat() + .await + .expect("Error saving file bytes"); + store + .store_file_bytes(catnip.bytes(), false) + .compat() + .await + .expect("Error saving catnip file bytes"); + + store + .materialize_directory( + materialize_dir.path().to_owned(), + recursive_testdir.digest(), + WorkUnitStore::new(), + ) + .compat() + .await + .expect("Error materializing"); assert_eq!(list_dir(materialize_dir.path()), vec!["cats", "treats"]); assert_eq!( @@ -1054,8 +1188,8 @@ fn materialize_directory() { ); } -#[test] -fn materialize_directory_executable() { +#[tokio::test] +async fn materialize_directory_executable() { let materialize_dir = TempDir::new().unwrap(); let catnip = TestData::catnip(); @@ -1063,15 +1197,26 @@ fn materialize_directory_executable() { let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.record_directory(&testdir.directory(), false)).expect("Error saving Directory"); - block_on(store.store_file_bytes(catnip.bytes(), false)).expect("Error saving catnip file bytes"); - - block_on(store.materialize_directory( - materialize_dir.path().to_owned(), - testdir.digest(), - WorkUnitStore::new(), - )) - .expect("Error materializing"); + store + .record_directory(&testdir.directory(), false) + .compat() + .await + .expect("Error saving Directory"); + store + .store_file_bytes(catnip.bytes(), false) + .compat() + .await + .expect("Error saving catnip file bytes"); + + store + .materialize_directory( + materialize_dir.path().to_owned(), + testdir.digest(), + WorkUnitStore::new(), + ) + .compat() + .await + .expect("Error materializing"); assert_eq!(list_dir(materialize_dir.path()), vec!["feed", "food"]); assert_eq!( @@ -1086,20 +1231,22 @@ fn materialize_directory_executable() { assert!(!is_executable(&materialize_dir.path().join("food"))); } -#[test] -fn contents_for_directory_empty() { +#[tokio::test] +async fn contents_for_directory_empty() { let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - let file_contents = - block_on(store.contents_for_directory(TestDirectory::empty().digest(), WorkUnitStore::new())) - .expect("Getting FileContents"); + let file_contents = store + .contents_for_directory(TestDirectory::empty().digest(), WorkUnitStore::new()) + .compat() + .await + .expect("Getting FileContents"); assert_same_filecontents(file_contents, vec![]); } -#[test] -fn contents_for_directory() { +#[tokio::test] +async fn contents_for_directory() { let roland = TestData::roland(); let catnip = TestData::catnip(); let testdir = TestDirectory::containing_roland(); @@ -1107,15 +1254,32 @@ fn contents_for_directory() { let store_dir = TempDir::new().unwrap(); let store = new_local_store(store_dir.path()); - block_on(store.record_directory(&recursive_testdir.directory(), false)) + store + .record_directory(&recursive_testdir.directory(), false) + .compat() + .await .expect("Error saving recursive Directory"); - block_on(store.record_directory(&testdir.directory(), false)).expect("Error saving Directory"); - block_on(store.store_file_bytes(roland.bytes(), false)).expect("Error saving file bytes"); - block_on(store.store_file_bytes(catnip.bytes(), false)).expect("Error saving catnip file bytes"); - - let file_contents = - block_on(store.contents_for_directory(recursive_testdir.digest(), WorkUnitStore::new())) - .expect("Getting FileContents"); + store + .record_directory(&testdir.directory(), false) + .compat() + .await + .expect("Error saving Directory"); + store + .store_file_bytes(roland.bytes(), false) + .compat() + .await + .expect("Error saving file bytes"); + store + .store_file_bytes(catnip.bytes(), false) + .compat() + .await + .expect("Error saving catnip file bytes"); + + let file_contents = store + .contents_for_directory(recursive_testdir.digest(), WorkUnitStore::new()) + .compat() + .await + .expect("Getting FileContents"); assert_same_filecontents( file_contents, @@ -1206,19 +1370,8 @@ fn is_executable(path: &Path) -> bool { == 0o100 } -pub fn block_on< - Item: Send + 'static, - Error: Send + 'static, - Fut: Future + Send + 'static, ->( - f: Fut, -) -> Result { - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - runtime.block_on(f) -} - -#[test] -fn returns_upload_summary_on_empty_cas() { +#[tokio::test] +async fn returns_upload_summary_on_empty_cas() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); @@ -1227,17 +1380,26 @@ fn returns_upload_summary_on_empty_cas() { let testdir = TestDirectory::containing_roland_and_treats(); let local_store = new_local_store(dir.path()); - block_on(local_store.record_directory(&testdir.directory(), false)) + local_store + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(local_store.store_file_bytes(testroland.bytes(), false)) + local_store + .store_file_bytes(testroland.bytes(), false) + .compat() + .await .expect("Error storing file locally"); - block_on(local_store.store_file_bytes(testcatnip.bytes(), false)) + local_store + .store_file_bytes(testcatnip.bytes(), false) + .compat() + .await .expect("Error storing file locally"); - let mut summary = block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading file"); + let mut summary = new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading file"); // We store all 3 files, and so we must sum their digests let test_data = vec![ @@ -1259,8 +1421,8 @@ fn returns_upload_summary_on_empty_cas() { ); } -#[test] -fn summary_does_not_count_things_in_cas() { +#[tokio::test] +async fn summary_does_not_count_things_in_cas() { let dir = TempDir::new().unwrap(); let cas = StubCAS::empty(); @@ -1270,19 +1432,28 @@ fn summary_does_not_count_things_in_cas() { // Store everything locally let local_store = new_local_store(dir.path()); - block_on(local_store.record_directory(&testdir.directory(), false)) + local_store + .record_directory(&testdir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - block_on(local_store.store_file_bytes(testroland.bytes(), false)) + local_store + .store_file_bytes(testroland.bytes(), false) + .compat() + .await .expect("Error storing file locally"); - block_on(local_store.store_file_bytes(testcatnip.bytes(), false)) + local_store + .store_file_bytes(testcatnip.bytes(), false) + .compat() + .await .expect("Error storing file locally"); // Store testroland first, which should return a summary of one file - let mut data_summary = block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testroland.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading file"); + let mut data_summary = new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testroland.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading file"); data_summary.upload_wall_time = Duration::default(); assert_eq!( @@ -1299,11 +1470,11 @@ fn summary_does_not_count_things_in_cas() { // Store the directory and catnip. // It should see the digest of testroland already in cas, // and not report it in uploads. - let mut dir_summary = block_on( - new_store(dir.path(), cas.address()) - .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()), - ) - .expect("Error uploading directory"); + let mut dir_summary = new_store(dir.path(), cas.address()) + .ensure_remote_has_recursive(vec![testdir.digest()], WorkUnitStore::new()) + .compat() + .await + .expect("Error uploading directory"); dir_summary.upload_wall_time = Duration::default(); @@ -1319,37 +1490,45 @@ fn summary_does_not_count_things_in_cas() { ); } -#[test] -fn materialize_directory_metadata_all_local() { +#[tokio::test] +async fn materialize_directory_metadata_all_local() { let outer_dir = TestDirectory::double_nested(); let nested_dir = TestDirectory::nested(); let inner_dir = TestDirectory::containing_roland(); let file = TestData::roland(); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - let dir = tempfile::tempdir().unwrap(); let store = new_local_store(dir.path()); - runtime - .block_on(store.record_directory(&outer_dir.directory(), false)) + store + .record_directory(&outer_dir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - runtime - .block_on(store.record_directory(&nested_dir.directory(), false)) + store + .record_directory(&nested_dir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - runtime - .block_on(store.record_directory(&inner_dir.directory(), false)) + store + .record_directory(&inner_dir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - runtime - .block_on(store.store_file_bytes(file.bytes(), false)) + store + .store_file_bytes(file.bytes(), false) + .compat() + .await .expect("Error storing file locally"); let mat_dir = tempfile::tempdir().unwrap(); - let metadata = runtime - .block_on(store.materialize_directory( + let metadata = store + .materialize_directory( mat_dir.path().to_owned(), outer_dir.digest(), WorkUnitStore::new(), - )) + ) + .compat() + .await .unwrap(); let local = LoadMetadata::Local; @@ -1377,35 +1556,42 @@ fn materialize_directory_metadata_all_local() { assert_eq!(want, metadata); } -#[test] -fn materialize_directory_metadata_mixed() { +#[tokio::test] +async fn materialize_directory_metadata_mixed() { let outer_dir = TestDirectory::double_nested(); // /pets/cats/roland let nested_dir = TestDirectory::nested(); // /cats/roland let inner_dir = TestDirectory::containing_roland(); let file = TestData::roland(); let cas = StubCAS::builder().directory(&nested_dir).build(); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); let dir = tempfile::tempdir().unwrap(); let store = new_store(dir.path(), cas.address()); - runtime - .block_on(store.record_directory(&outer_dir.directory(), false)) + store + .record_directory(&outer_dir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - runtime - .block_on(store.record_directory(&inner_dir.directory(), false)) + store + .record_directory(&inner_dir.directory(), false) + .compat() + .await .expect("Error storing directory locally"); - runtime - .block_on(store.store_file_bytes(file.bytes(), false)) + store + .store_file_bytes(file.bytes(), false) + .compat() + .await .expect("Error storing file locally"); let mat_dir = tempfile::tempdir().unwrap(); - let metadata = runtime - .block_on(store.materialize_directory( + let metadata = store + .materialize_directory( mat_dir.path().to_owned(), outer_dir.digest(), WorkUnitStore::new(), - )) + ) + .compat() + .await .unwrap(); assert!(metadata @@ -1429,8 +1615,8 @@ fn materialize_directory_metadata_mixed() { ); } -#[test] -fn explicitly_overwrites_already_existing_file() { +#[tokio::test] +async fn explicitly_overwrites_already_existing_file() { fn test_file_with_arbitrary_content(filename: &str, content: &TestData) -> TestDirectory { use bazel_protos; let digest = content.digest(); @@ -1445,7 +1631,6 @@ fn explicitly_overwrites_already_existing_file() { TestDirectory { directory } } - let mut runtime = tokio::runtime::Runtime::new().unwrap(); let dir_to_write_to = tempfile::tempdir().unwrap(); let file_path: PathBuf = [dir_to_write_to.path(), Path::new("some_filename")] .iter() @@ -1464,12 +1649,14 @@ fn explicitly_overwrites_already_existing_file() { .build(); let store = new_store(tempfile::tempdir().unwrap(), cas.address()); - let _ = runtime - .block_on(store.materialize_directory( + let _ = store + .materialize_directory( dir_to_write_to.path().to_owned(), contents_dir.digest(), WorkUnitStore::new(), - )) + ) + .compat() + .await .unwrap(); let file_contents = std::fs::read(&file_path).unwrap(); diff --git a/src/rust/engine/logging/Cargo.toml b/src/rust/engine/logging/Cargo.toml index ff5e2ffad37..b0ac0f6c96d 100644 --- a/src/rust/engine/logging/Cargo.toml +++ b/src/rust/engine/logging/Cargo.toml @@ -7,12 +7,12 @@ publish = false [dependencies] chrono = "0.4.10" -futures01 = { package = "futures", version = "0.1" } lazy_static = "1" log = "0.4" num_enum = "0.1.1" parking_lot = "0.6" simplelog = "0.7.4" +tokio = { version = "0.2", features = ["rt-util"] } ui = { path = "../ui" } uuid = { version = "0.7", features = ["v4"] } diff --git a/src/rust/engine/logging/src/lib.rs b/src/rust/engine/logging/src/lib.rs index 567164e0ed4..4db2c7649a4 100644 --- a/src/rust/engine/logging/src/lib.rs +++ b/src/rust/engine/logging/src/lib.rs @@ -47,7 +47,7 @@ macro_rules! debug_log { pub mod logger; -pub use logger::{get_destination, set_destination, Destination}; +pub use logger::{get_destination, scope_task_destination, set_thread_destination, Destination}; pub type Logger = logger::Logger; diff --git a/src/rust/engine/logging/src/logger.rs b/src/rust/engine/logging/src/logger.rs index bf932fbb897..1b2f86cb611 100644 --- a/src/rust/engine/logging/src/logger.rs +++ b/src/rust/engine/logging/src/logger.rs @@ -2,20 +2,24 @@ // Licensed under the Apache License, Version 2.0 (see LICENSE). use crate::PythonLogLevel; -use chrono; -use futures01::task_local; -use lazy_static::lazy_static; -use log::{log, set_logger, set_max_level, LevelFilter, Log, Metadata, Record}; -use parking_lot::Mutex; -use simplelog::{ConfigBuilder, LevelPadding, WriteLogger}; + +use std::cell::RefCell; use std::collections::HashMap; use std::convert::TryInto; use std::fs::File; use std::fs::OpenOptions; +use std::future::Future; use std::io::{stderr, Stderr, Write}; use std::path::PathBuf; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; + +use chrono; +use lazy_static::lazy_static; +use log::{log, set_logger, set_max_level, LevelFilter, Log, Metadata, Record}; +use parking_lot::Mutex; +use simplelog::{ConfigBuilder, LevelPadding, WriteLogger}; +use tokio::task_local; use ui::EngineDisplay; use uuid::Uuid; @@ -259,37 +263,44 @@ pub enum Destination { } thread_local! { - pub static THREAD_DESTINATION: Mutex = Mutex::new(Destination::Stderr) + static THREAD_DESTINATION: RefCell = RefCell::new(Destination::Stderr) } task_local! { - static TASK_DESTINATION: Mutex> = Mutex::new(None) + static TASK_DESTINATION: Destination; } -pub fn set_destination(destination: Destination) { - if futures01::task::is_in_task() { - TASK_DESTINATION.with(|task_destination| { - *task_destination.lock() = Some(destination); - }) - } else { - THREAD_DESTINATION.with(|thread_destination| { - *thread_destination.lock() = destination; - }) - } +/// +/// Set the current log destination for a Thread, but _not_ for a Task. Tasks must always be spawned +/// by callers using the `scope_task_destination` helper (generally via task_executor::Executor.) +/// +pub fn set_thread_destination(destination: Destination) { + THREAD_DESTINATION.with(|thread_destination| { + *thread_destination.borrow_mut() = destination; + }) } -pub fn get_destination() -> Destination { - fn get_task_destination() -> Option { - TASK_DESTINATION.with(|destination| *destination.lock()) - } - - fn get_thread_destination() -> Destination { - THREAD_DESTINATION.with(|destination| *destination.lock()) - } +/// +/// Propagate the current log destination to a Future representing a newly spawned Task. Usage of +/// this method should mostly be contained to task_executor::Executor. +/// +pub async fn scope_task_destination(destination: Destination, f: F) -> F::Output +where + F: Future, +{ + TASK_DESTINATION.scope(destination, f).await +} - if futures01::task::is_in_task() { - get_task_destination().unwrap_or_else(get_thread_destination) +/// +/// Get the current log destination, from either a Task or a Thread. +/// +/// TODO: Having this return an Option and tracking down all cases where it has defaulted would be +/// good. +/// +pub fn get_destination() -> Destination { + if let Ok(destination) = TASK_DESTINATION.try_with(|destination| *destination) { + destination } else { - get_thread_destination() + THREAD_DESTINATION.with(|destination| *destination.borrow()) } } diff --git a/src/rust/engine/process_execution/Cargo.toml b/src/rust/engine/process_execution/Cargo.toml index 152c1b80a97..01087422b72 100644 --- a/src/rust/engine/process_execution/Cargo.toml +++ b/src/rust/engine/process_execution/Cargo.toml @@ -21,7 +21,7 @@ grpcio = { git = "https://github.com/pantsbuild/grpc-rs.git", rev = "b582ef3dc4e hashing = { path = "../hashing" } libc = "0.2.39" log = "0.4" -nails = "0.3" +nails = "0.4" protobuf = { version = "2.0.6", features = ["with-bytes"] } sha2 = "0.8" sharded_lmdb = { path = "../sharded_lmdb" } @@ -29,10 +29,8 @@ store = { path = "../fs/store" } task_executor = { path = "../task_executor" } tempfile = "3" concrete_time = { path = "../concrete_time" } -tokio = "0.1" -tokio-codec = "0.1" -tokio-process = "0.2.1" -tokio-timer = "0.2" +tokio = { version = "0.2", features = ["process", "rt-threaded", "tcp", "time"] } +tokio-util = { version = "0.2", features = ["codec"] } uname = "0.1.1" workunit_store = { path = "../workunit_store" } regex = "1.3.1" @@ -47,3 +45,4 @@ parking_lot = "0.6" spectral = "0.6.0" tempfile = "3" testutil = { path = "../testutil" } +tokio = { version = "0.2", features = ["macros"] } diff --git a/src/rust/engine/process_execution/src/cache.rs b/src/rust/engine/process_execution/src/cache.rs index 82e6d868ba0..dfbd48289ad 100644 --- a/src/rust/engine/process_execution/src/cache.rs +++ b/src/rust/engine/process_execution/src/cache.rs @@ -5,6 +5,7 @@ use crate::{ use std::sync::Arc; use bytes::Bytes; +use futures::future::TryFutureExt; use futures01::{future, Future}; use log::{debug, warn}; use protobuf::Message; @@ -93,6 +94,7 @@ impl CommandRunner { .map_err(|e| format!("Invalid ExecuteResponse: {:?}", e))?; Ok(execute_response) }) + .compat() .and_then(move |maybe_execute_response| { if let Some(execute_response) = maybe_execute_response { crate::remote::populate_fallible_execution_result( @@ -145,6 +147,6 @@ impl CommandRunner { .map(Bytes::from) .map_err(|err| format!("Error serializing execute process result to cache: {}", err)) }) - .and_then(move |bytes| process_execution_store.store_bytes(fingerprint, bytes, false)) + .and_then(move |bytes| process_execution_store.store_bytes(fingerprint, bytes, false).compat()) } } diff --git a/src/rust/engine/process_execution/src/cache_tests.rs b/src/rust/engine/process_execution/src/cache_tests.rs index 08266b41449..cb76b0719a6 100644 --- a/src/rust/engine/process_execution/src/cache_tests.rs +++ b/src/rust/engine/process_execution/src/cache_tests.rs @@ -2,6 +2,7 @@ use crate::{ CommandRunner as CommandRunnerTrait, Context, ExecuteProcessRequest, ExecuteProcessRequestMetadata, FallibleExecuteProcessResult, PlatformConstraint, }; +use futures::compat::Future01CompatExt; use hashing::EMPTY_DIGEST; use sharded_lmdb::ShardedLmdb; use std::collections::{BTreeMap, BTreeSet}; @@ -12,14 +13,15 @@ use std::time::Duration; use store::Store; use tempfile::TempDir; use testutil::data::TestData; +use tokio::runtime::Handle; struct RoundtripResults { uncached: Result, maybe_cached: Result, } -fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { - let runtime = task_executor::Executor::new(); +async fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { + let runtime = task_executor::Executor::new(Handle::current()); let work_dir = TempDir::new().unwrap(); let store_dir = TempDir::new().unwrap(); let store = Store::local_only(runtime.clone(), store_dir.path()).unwrap(); @@ -62,7 +64,10 @@ fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { is_nailgunnable: false, }; - let local_result = runtime.block_on(local.run(request.clone().into(), Context::default())); + let local_result = local + .run(request.clone().into(), Context::default()) + .compat() + .await; let cache_dir = TempDir::new().unwrap(); let caching = crate::cache::CommandRunner { @@ -81,7 +86,7 @@ fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { }, }; - let uncached_result = runtime.block_on(caching.run(request.clone().into(), Context::default())); + let uncached_result = caching.run(request.clone().into(), Context::default()).compat().await; assert_eq!(local_result, uncached_result); @@ -89,7 +94,10 @@ fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { // fail due to a FileNotFound error. So, If the second run succeeds, that implies that the // cache was successfully used. std::fs::remove_file(&script_path).unwrap(); - let maybe_cached_result = runtime.block_on(caching.run(request.into(), Context::default())); + let maybe_cached_result = caching + .run(request.into(), Context::default()) + .compat() + .await; RoundtripResults { uncached: uncached_result, @@ -97,15 +105,15 @@ fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { } } -#[test] -fn cache_success() { - let results = run_roundtrip(0); +#[tokio::test] +async fn cache_success() { + let results = run_roundtrip(0).await; assert_eq!(results.uncached, results.maybe_cached); } -#[test] -fn failures_not_cached() { - let results = run_roundtrip(1); +#[tokio::test] +async fn failures_not_cached() { + let results = run_roundtrip(1).await; assert_ne!(results.uncached, results.maybe_cached); assert_eq!(results.uncached.unwrap().exit_code, 1); assert_eq!(results.maybe_cached.unwrap().exit_code, 127); // aka the return code for file not found diff --git a/src/rust/engine/process_execution/src/local.rs b/src/rust/engine/process_execution/src/local.rs index 1f093070bf4..b6647a248cf 100644 --- a/src/rust/engine/process_execution/src/local.rs +++ b/src/rust/engine/process_execution/src/local.rs @@ -3,7 +3,9 @@ use tempfile; use boxfuture::{try_future, BoxFuture, Boxable}; use fs::{self, GlobExpansionConjunction, GlobMatching, PathGlobs, StrictGlobMatching}; -use futures01::{future, Future, Stream}; +use futures::future::{FutureExt, TryFutureExt}; +use futures::stream::{BoxStream, StreamExt, TryStreamExt}; +use futures01::{future, Future}; use log::info; use nails::execution::{ChildOutput, ExitCode}; @@ -13,13 +15,13 @@ use std::fs::create_dir_all; use std::ops::Neg; use std::os::unix::{fs::symlink, process::ExitStatusExt}; use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; +use std::process::Stdio; use std::sync::Arc; use store::{OneOffStoreFileByDigest, Snapshot, Store}; -use tokio::timer::Timeout; -use tokio_codec::{BytesCodec, FramedRead}; -use tokio_process::CommandExt; +use tokio::process::Command; +use tokio::time::timeout; +use tokio_util::codec::{BytesCodec, FramedRead}; use crate::{ Context, ExecuteProcessRequest, FallibleExecuteProcessResult, MultiPlatformExecuteProcessRequest, @@ -84,6 +86,7 @@ impl CommandRunner { posix_fs .expand(output_globs) + .compat() .map_err(|err| format!("Error expanding output globs: {}", err)) .and_then(|path_stats| { Snapshot::from_path_stats( @@ -139,33 +142,40 @@ impl StreamedHermeticCommand { self } - fn stream(&mut self) -> Result + Send, String> { + /// + /// TODO: See the note on references in ASYNC.md. + /// + fn stream<'a, 'b>(&'a mut self) -> Result>, String> { self .inner .stdin(Stdio::null()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) - .spawn_async() + .spawn() .map_err(|e| format!("Error launching process: {:?}", e)) .and_then(|mut child| { - let stdout_stream = FramedRead::new(child.stdout().take().unwrap(), BytesCodec::new()) - .map(|bytes| ChildOutput::Stdout(bytes.into())); - let stderr_stream = FramedRead::new(child.stderr().take().unwrap(), BytesCodec::new()) - .map(|bytes| ChildOutput::Stderr(bytes.into())); - let exit_stream = child.into_stream().map(|exit_status| { - ChildOutput::Exit(ExitCode( - exit_status - .code() - .or_else(|| exit_status.signal().map(Neg::neg)) - .expect("Child process should exit via returned code or signal."), - )) - }); + let stdout_stream = FramedRead::new(child.stdout.take().unwrap(), BytesCodec::new()) + .map_ok(|bytes| ChildOutput::Stdout(bytes.into())) + .boxed(); + let stderr_stream = FramedRead::new(child.stderr.take().unwrap(), BytesCodec::new()) + .map_ok(|bytes| ChildOutput::Stderr(bytes.into())) + .boxed(); + let exit_stream = child + .into_stream() + .map_ok(|exit_status| { + ChildOutput::Exit(ExitCode( + exit_status + .code() + .or_else(|| exit_status.signal().map(Neg::neg)) + .expect("Child process should exit via returned code or signal."), + )) + }) + .boxed(); Ok( - stdout_stream - .select(stderr_stream) - .select(exit_stream) - .map_err(|e| format!("Failed to consume process outputs: {:?}", e)), + futures::stream::select_all(vec![stdout_stream, stderr_stream, exit_stream]) + .map_err(|e| format!("Failed to consume process outputs: {:?}", e)) + .boxed(), ) }) } @@ -181,31 +191,27 @@ pub struct ChildResults { } impl ChildResults { - pub fn collect_from( - stream: impl Stream + Send, - ) -> impl Future { - let init = ( - BytesMut::with_capacity(8192), - BytesMut::with_capacity(8192), - 0, - ); - stream - .fold( - init, - |(mut stdout, mut stderr, mut exit_code), child_output| { - match child_output { - ChildOutput::Stdout(bytes) => stdout.extend_from_slice(&bytes), - ChildOutput::Stderr(bytes) => stderr.extend_from_slice(&bytes), - ChildOutput::Exit(code) => exit_code = code.0, - }; - Ok((stdout, stderr, exit_code)) as Result<_, E> - }, - ) - .map(|(stdout, stderr, exit_code)| ChildResults { + pub fn collect_from( + mut stream: BoxStream>, + ) -> futures::future::BoxFuture> { + let mut stdout = BytesMut::with_capacity(8192); + let mut stderr = BytesMut::with_capacity(8192); + let mut exit_code = 1; + + Box::pin(async move { + while let Some(child_output_res) = stream.next().await { + match child_output_res? { + ChildOutput::Stdout(bytes) => stdout.extend_from_slice(&bytes), + ChildOutput::Stderr(bytes) => stderr.extend_from_slice(&bytes), + ChildOutput::Exit(code) => exit_code = code.0, + }; + } + Ok(ChildResults { stdout: stdout.into(), stderr: stderr.into(), exit_code, }) + }) } } @@ -253,12 +259,12 @@ impl super::CommandRunner for CommandRunner { } } impl CapturedWorkdir for CommandRunner { - fn run_in_workdir( - &self, - workdir_path: &Path, + fn run_in_workdir<'a, 'b, 'c>( + &'a self, + workdir_path: &'b Path, req: ExecuteProcessRequest, _context: Context, - ) -> Result + Send>, String> { + ) -> Result>, String> { StreamedHermeticCommand::new(&req.argv[0]) .args(&req.argv[1..]) .current_dir(if let Some(working_directory) = req.working_directory { @@ -268,11 +274,6 @@ impl CapturedWorkdir for CommandRunner { }) .envs(&req.env) .stream() - .map(|s| { - // NB: Converting from `impl Stream` to `Box` requires this odd dance. - let stream: Box + Send> = Box::new(s); - stream - }) } } @@ -370,8 +371,12 @@ pub trait CapturedWorkdir { // https://github.com/pantsbuild/pants/issues/6089 .map(ChildResults::collect_from) .and_then(move |child_results_future| { - Timeout::new(child_results_future, req_timeout).map_err(|e| e.to_string()) + timeout(req_timeout, child_results_future) + .boxed() + .compat() + .map_err(|e| e.to_string()) }) + .and_then(|res| res) .and_then(move |child_results| { let output_snapshot = if output_file_paths.is_empty() && output_dir_paths.is_empty() { future::ok(store::Snapshot::empty()).to_boxed() @@ -440,10 +445,13 @@ pub trait CapturedWorkdir { .to_boxed() } - fn run_in_workdir( - &self, - workdir_path: &Path, + /// + /// TODO: See the note on references in ASYNC.md. + /// + fn run_in_workdir<'a, 'b, 'c>( + &'a self, + workdir_path: &'b Path, req: ExecuteProcessRequest, context: Context, - ) -> Result + Send>, String>; + ) -> Result>, String>; } diff --git a/src/rust/engine/process_execution/src/local_tests.rs b/src/rust/engine/process_execution/src/local_tests.rs index 0beaef690de..33f4bcedc7e 100644 --- a/src/rust/engine/process_execution/src/local_tests.rs +++ b/src/rust/engine/process_execution/src/local_tests.rs @@ -5,6 +5,7 @@ use crate::{ CommandRunner as CommandRunnerTrait, Context, ExecuteProcessRequest, FallibleExecuteProcessResult, PlatformConstraint, RelativePath, }; +use futures::compat::Future01CompatExt; use hashing::EMPTY_DIGEST; use spectral::{assert_that, string::StrAssertions}; use std; @@ -16,10 +17,11 @@ use tempfile::TempDir; use testutil::data::{TestData, TestDirectory}; use testutil::path::find_bash; use testutil::{as_bytes, owned_string_vec}; +use tokio::runtime::Handle; -#[test] +#[tokio::test] #[cfg(unix)] -fn stdout() { +async fn stdout() { let result = run_command_locally(ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "-n", "foo"]), env: BTreeMap::new(), @@ -33,7 +35,8 @@ fn stdout() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -47,9 +50,9 @@ fn stdout() { ) } -#[test] +#[tokio::test] #[cfg(unix)] -fn stdout_and_stderr_and_exit_code() { +async fn stdout_and_stderr_and_exit_code() { let result = run_command_locally(ExecuteProcessRequest { argv: owned_string_vec(&["/bin/bash", "-c", "echo -n foo ; echo >&2 -n bar ; exit 1"]), env: BTreeMap::new(), @@ -63,7 +66,8 @@ fn stdout_and_stderr_and_exit_code() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -77,9 +81,9 @@ fn stdout_and_stderr_and_exit_code() { ) } -#[test] +#[tokio::test] #[cfg(unix)] -fn capture_exit_code_signal() { +async fn capture_exit_code_signal() { // Launch a process that kills itself with a signal. let result = run_command_locally(ExecuteProcessRequest { argv: owned_string_vec(&["/bin/bash", "-c", "kill $$"]), @@ -94,7 +98,8 @@ fn capture_exit_code_signal() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -108,9 +113,9 @@ fn capture_exit_code_signal() { ) } -#[test] +#[tokio::test] #[cfg(unix)] -fn env() { +async fn env() { let mut env: BTreeMap = BTreeMap::new(); env.insert("FOO".to_string(), "foo".to_string()); env.insert("BAR".to_string(), "not foo".to_string()); @@ -128,7 +133,8 @@ fn env() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; let stdout = String::from_utf8(result.unwrap().stdout.to_vec()).unwrap(); let got_env: BTreeMap = stdout @@ -147,9 +153,9 @@ fn env() { assert_eq!(env, got_env); } -#[test] +#[tokio::test] #[cfg(unix)] -fn env_is_deterministic() { +async fn env_is_deterministic() { fn make_request() -> ExecuteProcessRequest { let mut env = BTreeMap::new(); env.insert("FOO".to_string(), "foo".to_string()); @@ -171,14 +177,14 @@ fn env_is_deterministic() { } } - let result1 = run_command_locally(make_request()); - let result2 = run_command_locally(make_request()); + let result1 = run_command_locally(make_request()).await; + let result2 = run_command_locally(make_request()).await; assert_eq!(result1.unwrap(), result2.unwrap()); } -#[test] -fn binary_not_found() { +#[tokio::test] +async fn binary_not_found() { run_command_locally(ExecuteProcessRequest { argv: owned_string_vec(&["echo", "-n", "foo"]), env: BTreeMap::new(), @@ -193,11 +199,12 @@ fn binary_not_found() { target_platform: PlatformConstraint::None, is_nailgunnable: false, }) + .await .expect_err("Want Err"); } -#[test] -fn output_files_none() { +#[tokio::test] +async fn output_files_none() { let result = run_command_locally(ExecuteProcessRequest { argv: owned_string_vec(&[&find_bash(), "-c", "exit 0"]), env: BTreeMap::new(), @@ -211,7 +218,8 @@ fn output_files_none() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), FallibleExecuteProcessResult { @@ -224,8 +232,8 @@ fn output_files_none() { ) } -#[test] -fn output_files_one() { +#[tokio::test] +async fn output_files_one() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -243,7 +251,8 @@ fn output_files_one() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -257,8 +266,8 @@ fn output_files_one() { ) } -#[test] -fn output_dirs() { +#[tokio::test] +async fn output_dirs() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -281,7 +290,8 @@ fn output_dirs() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -295,8 +305,8 @@ fn output_dirs() { ) } -#[test] -fn output_files_many() { +#[tokio::test] +async fn output_files_many() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -320,7 +330,8 @@ fn output_files_many() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -334,8 +345,8 @@ fn output_files_many() { ) } -#[test] -fn output_files_execution_failure() { +#[tokio::test] +async fn output_files_execution_failure() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -357,7 +368,8 @@ fn output_files_execution_failure() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -371,8 +383,8 @@ fn output_files_execution_failure() { ) } -#[test] -fn output_files_partial_output() { +#[tokio::test] +async fn output_files_partial_output() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -392,7 +404,8 @@ fn output_files_partial_output() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -406,8 +419,8 @@ fn output_files_partial_output() { ) } -#[test] -fn output_overlapping_file_and_dir() { +#[tokio::test] +async fn output_overlapping_file_and_dir() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -425,7 +438,8 @@ fn output_overlapping_file_and_dir() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -439,8 +453,8 @@ fn output_overlapping_file_and_dir() { ) } -#[test] -fn jdk_symlink() { +#[tokio::test] +async fn jdk_symlink() { let preserved_work_tmpdir = TempDir::new().unwrap(); let roland = TestData::roland().bytes(); std::fs::write(preserved_work_tmpdir.path().join("roland"), roland.clone()) @@ -458,7 +472,8 @@ fn jdk_symlink() { jdk_home: Some(preserved_work_tmpdir.path().to_path_buf()), target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result, Ok(FallibleExecuteProcessResult { @@ -471,8 +486,8 @@ fn jdk_symlink() { ) } -#[test] -fn test_directory_preservation() { +#[tokio::test] +async fn test_directory_preservation() { let preserved_work_tmpdir = TempDir::new().unwrap(); let preserved_work_root = preserved_work_tmpdir.path().to_owned(); @@ -499,7 +514,8 @@ fn test_directory_preservation() { false, None, None, - ); + ) + .await; result.unwrap(); assert!(preserved_work_root.exists()); @@ -513,8 +529,8 @@ fn test_directory_preservation() { assert!(rolands_path.exists()); } -#[test] -fn test_directory_preservation_error() { +#[tokio::test] +async fn test_directory_preservation_error() { let preserved_work_tmpdir = TempDir::new().unwrap(); let preserved_work_root = preserved_work_tmpdir.path().to_owned(); @@ -541,6 +557,7 @@ fn test_directory_preservation_error() { None, None, ) + .await .expect_err("Want process to fail"); assert!(preserved_work_root.exists()); @@ -548,8 +565,8 @@ fn test_directory_preservation_error() { assert_eq!(testutil::file::list_dir(&preserved_work_root).len(), 1); } -#[test] -fn all_containing_directories_for_outputs_are_created() { +#[tokio::test] +async fn all_containing_directories_for_outputs_are_created() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -573,7 +590,8 @@ fn all_containing_directories_for_outputs_are_created() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -587,8 +605,8 @@ fn all_containing_directories_for_outputs_are_created() { ) } -#[test] -fn output_empty_dir() { +#[tokio::test] +async fn output_empty_dir() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -606,7 +624,8 @@ fn output_empty_dir() { jdk_home: None, target_platform: PlatformConstraint::None, is_nailgunnable: false, - }); + }) + .await; assert_eq!( result.unwrap(), @@ -623,19 +642,23 @@ fn output_empty_dir() { /// This test attempts to make sure local only scratch files are materialized correctly by /// making sure that with input_files being empty, we would be able to capture the content of /// the local only scratch inputs as outputs. -#[test] -fn local_only_scratch_files_materialized() { +#[tokio::test] +async fn local_only_scratch_files_materialized() { let store_dir = TempDir::new().unwrap(); - let executor = task_executor::Executor::new(); + let executor = task_executor::Executor::new(Handle::current()); let store = Store::local_only(executor.clone(), store_dir.path()).unwrap(); // Prepare the store to contain roland, because the EPR needs to materialize it let roland_directory_digest = TestDirectory::containing_roland().digest(); - executor - .block_on(store.record_directory(&TestDirectory::containing_roland().directory(), true)) + store + .record_directory(&TestDirectory::containing_roland().directory(), true) + .compat() + .await .expect("Error saving directory"); - executor - .block_on(store.store_file_bytes(TestData::roland().bytes(), false)) + store + .store_file_bytes(TestData::roland().bytes(), false) + .compat() + .await .expect("Error saving file bytes"); let work_dir = TempDir::new().unwrap(); @@ -659,7 +682,8 @@ fn local_only_scratch_files_materialized() { true, Some(store), Some(executor), - ); + ) + .await; assert_eq!( result.unwrap(), @@ -673,8 +697,8 @@ fn local_only_scratch_files_materialized() { ); } -#[test] -fn timeout() { +#[tokio::test] +async fn timeout() { let result = run_command_locally(ExecuteProcessRequest { argv: vec![ find_bash(), @@ -693,6 +717,7 @@ fn timeout() { target_platform: PlatformConstraint::None, is_nailgunnable: false, }) + .await .unwrap(); assert_eq!(result.exit_code, -15); @@ -701,22 +726,28 @@ fn timeout() { assert_that(&error_msg).contains("sleepy-cat"); } -#[test] -fn working_directory() { +#[tokio::test] +async fn working_directory() { let store_dir = TempDir::new().unwrap(); - let executor = task_executor::Executor::new(); + let executor = task_executor::Executor::new(Handle::current()); let store = Store::local_only(executor.clone(), store_dir.path()).unwrap(); // Prepare the store to contain /cats/roland, because the EPR needs to materialize it and then run // from the ./cats directory. - executor - .block_on(store.store_file_bytes(TestData::roland().bytes(), false)) + store + .store_file_bytes(TestData::roland().bytes(), false) + .compat() + .await .expect("Error saving file bytes"); - executor - .block_on(store.record_directory(&TestDirectory::containing_roland().directory(), true)) + store + .record_directory(&TestDirectory::containing_roland().directory(), true) + .compat() + .await .expect("Error saving directory"); - executor - .block_on(store.record_directory(&TestDirectory::nested().directory(), true)) + store + .record_directory(&TestDirectory::nested().directory(), true) + .compat() + .await .expect("Error saving directory"); let work_dir = TempDir::new().unwrap(); @@ -739,7 +770,8 @@ fn working_directory() { true, Some(store), Some(executor), - ); + ) + .await; assert_eq!( result.unwrap(), @@ -753,19 +785,19 @@ fn working_directory() { ); } -fn run_command_locally(req: ExecuteProcessRequest) -> Result { +async fn run_command_locally(req: ExecuteProcessRequest) -> Result { let work_dir = TempDir::new().unwrap(); - run_command_locally_in_dir_with_cleanup(req, work_dir.path().to_owned()) + run_command_locally_in_dir_with_cleanup(req, work_dir.path().to_owned()).await } -fn run_command_locally_in_dir_with_cleanup( +async fn run_command_locally_in_dir_with_cleanup( req: ExecuteProcessRequest, dir: PathBuf, ) -> Result { - run_command_locally_in_dir(req, dir, true, None, None) -} + run_command_locally_in_dir(req, dir, true, None, None).await + -fn run_command_locally_in_dir( +async fn run_command_locally_in_dir( req: ExecuteProcessRequest, dir: PathBuf, cleanup: bool, @@ -773,9 +805,9 @@ fn run_command_locally_in_dir( executor: Option, ) -> Result { let store_dir = TempDir::new().unwrap(); - let executor = executor.unwrap_or_else(task_executor::Executor::new); + let executor = executor.unwrap_or_else(|| task_executor::Executor::new(Handle::current())); let store = store.unwrap_or_else(|| Store::local_only(executor.clone(), store_dir.path()).unwrap()); let runner = crate::local::CommandRunner::new(store, executor.clone(), dir, cleanup); - executor.block_on(runner.run(req.into(), Context::default())) + runner.run(req.into(), Context::default()).compat().await } diff --git a/src/rust/engine/process_execution/src/nailgun/mod.rs b/src/rust/engine/process_execution/src/nailgun/mod.rs index 59e51132db8..e8865cac1db 100644 --- a/src/rust/engine/process_execution/src/nailgun/mod.rs +++ b/src/rust/engine/process_execution/src/nailgun/mod.rs @@ -7,8 +7,7 @@ use std::time::Duration; use boxfuture::{try_future, BoxFuture, Boxable}; use futures::compat::Future01CompatExt; use futures::future::{FutureExt, TryFutureExt}; -use futures01::future::Future; -use futures01::stream::Stream; +use futures::stream::{BoxStream, StreamExt}; use log::{debug, trace}; use nails::execution::{child_channel, ChildInput, ChildOutput, Command}; use tokio::net::TcpStream; @@ -207,12 +206,12 @@ impl super::CommandRunner for CommandRunner { } impl CapturedWorkdir for CommandRunner { - fn run_in_workdir( - &self, - workdir_path: &Path, + fn run_in_workdir<'a, 'b, 'c>( + &'a self, + workdir_path: &'b Path, req: ExecuteProcessRequest, context: Context, - ) -> Result + Send>, String> { + ) -> Result>, String> { // Separate argument lists, to form distinct EPRs for (1) starting the nailgun server and (2) running the client in it. let ParsedJVMCommandLines { nailgun_args, @@ -271,8 +270,6 @@ impl CapturedWorkdir for CommandRunner { ) .compat() }) - .boxed() - .compat() .map_err(|e| format!("Failed to connect to nailgun! {}", e)) .inspect(move |_| debug!("Connected to nailgun instance {}", &nailgun_name3)) .and_then(move |nailgun_port| { @@ -292,18 +289,14 @@ impl CapturedWorkdir for CommandRunner { trace!("Client request: {:#?}", client_req); let addr: SocketAddr = format!("127.0.0.1:{:?}", nailgun_port).parse().unwrap(); debug!("Connecting to server at {}...", addr); - TcpStream::connect(&addr) + TcpStream::connect(addr) .and_then(move |stream| { nails::client_handle_connection(stream, cmd, stdio_write, stdin_read) }) .map_err(|e| format!("Error communicating with server: {}", e)) - .map(ChildOutput::Exit) + .map_ok(ChildOutput::Exit) }); - Ok(Box::new( - stdio_read - .map_err(|()| unreachable!()) - .select(nails_command.into_stream()), - )) + Ok(futures::stream::select(stdio_read.map(Ok), nails_command.into_stream()).boxed()) } } diff --git a/src/rust/engine/process_execution/src/nailgun/tests.rs b/src/rust/engine/process_execution/src/nailgun/tests.rs index 1b6252ff499..2115c33655e 100644 --- a/src/rust/engine/process_execution/src/nailgun/tests.rs +++ b/src/rust/engine/process_execution/src/nailgun/tests.rs @@ -1,16 +1,18 @@ use crate::nailgun::{CommandRunner, ARGS_TO_START_NAILGUN, NAILGUN_MAIN_CLASS}; use crate::{ExecuteProcessRequest, ExecuteProcessRequestMetadata, PlatformConstraint}; +use futures::compat::Future01CompatExt; use hashing::EMPTY_DIGEST; use std::fs::read_link; use std::os::unix::fs::symlink; use std::path::PathBuf; use store::Store; use tempfile::TempDir; +use tokio::runtime::Handle; use workunit_store::WorkUnitStore; fn mock_nailgun_runner(workdir_base: Option) -> CommandRunner { let store_dir = TempDir::new().unwrap(); - let executor = task_executor::Executor::new(); + let executor = task_executor::Executor::new(Handle::current()); let store = Store::local_only(executor.clone(), store_dir.path()).unwrap(); let local_runner = crate::local::CommandRunner::new(store, executor.clone(), std::env::temp_dir(), true); @@ -48,8 +50,8 @@ fn mock_nailgunnable_request(jdk_home: Option) -> ExecuteProcessRequest } } -#[test] -fn get_workdir_creates_directory_if_it_doesnt_exist() { +#[tokio::test] +async fn get_workdir_creates_directory_if_it_doesnt_exist() { let mock_workdir_base = unique_temp_dir(std::env::temp_dir(), None) .path() .to_owned(); @@ -63,8 +65,8 @@ fn get_workdir_creates_directory_if_it_doesnt_exist() { assert!(target_workdir.exists()); } -#[test] -fn get_workdir_returns_the_workdir_when_it_exists() { +#[tokio::test] +async fn get_workdir_returns_the_workdir_when_it_exists() { let mock_workdir_base = unique_temp_dir(std::env::temp_dir(), None) .path() .to_owned(); @@ -82,8 +84,8 @@ fn get_workdir_returns_the_workdir_when_it_exists() { assert!(target_workdir.exists()); } -#[test] -fn creating_nailgun_server_request_updates_the_cli() { +#[tokio::test] +async fn creating_nailgun_server_request_updates_the_cli() { let req = super::construct_nailgun_server_request( &NAILGUN_MAIN_CLASS.to_string(), Vec::new(), @@ -94,26 +96,25 @@ fn creating_nailgun_server_request_updates_the_cli() { assert_eq!(req.argv[1..], ARGS_TO_START_NAILGUN); } -#[test] -fn creating_nailgun_client_request_removes_jdk_home() { +#[tokio::test] +async fn creating_nailgun_client_request_removes_jdk_home() { let original_req = mock_nailgunnable_request(Some(PathBuf::from("some/path"))); let req = super::construct_nailgun_client_request(original_req, "".to_string(), vec![]); assert_eq!(req.jdk_home, None); } -#[test] -fn nailgun_name_is_the_main_class() { +#[tokio::test] +async fn nailgun_name_is_the_main_class() { let main_class = "my.main.class".to_string(); let name = super::CommandRunner::calculate_nailgun_name(&main_class); assert_eq!(name, format!("nailgun_server_{}", main_class)); } -fn materialize_with_jdk( +async fn materialize_with_jdk( runner: &CommandRunner, dir: PathBuf, jdk_path: PathBuf, ) -> Result<(), String> { - let executor = task_executor::Executor::new(); let materializer = super::NailgunPool::materialize_workdir_for_server( runner.inner.store.clone(), dir, @@ -121,11 +122,11 @@ fn materialize_with_jdk( EMPTY_DIGEST, WorkUnitStore::new(), ); - executor.block_on(materializer) + materializer.compat().await } -#[test] -fn materializing_workdir_for_server_creates_a_link_for_the_jdk() { +#[tokio::test] +async fn materializing_workdir_for_server_creates_a_link_for_the_jdk() { let workdir_base_tempdir = unique_temp_dir(std::env::temp_dir(), None); let workdir_base = workdir_base_tempdir.path().to_owned(); let mock_jdk_dir = unique_temp_dir(std::env::temp_dir(), None); @@ -140,7 +141,7 @@ fn materializing_workdir_for_server_creates_a_link_for_the_jdk() { // Assert that the materialization was successful let materialization_result = - materialize_with_jdk(&runner, workdir_for_server.clone(), mock_jdk_path.clone()); + materialize_with_jdk(&runner, workdir_for_server.clone(), mock_jdk_path.clone()).await; assert_eq!(materialization_result, Ok(())); // Assert that the symlink points to the requested jdk @@ -150,8 +151,8 @@ fn materializing_workdir_for_server_creates_a_link_for_the_jdk() { assert_eq!(materialized_jdk.unwrap(), mock_jdk_path); } -#[test] -fn materializing_workdir_for_server_replaces_jdk_link_if_a_different_one_is_requested() { +#[tokio::test] +async fn materializing_workdir_for_server_replaces_jdk_link_if_a_different_one_is_requested() { let workdir_base_tempdir = unique_temp_dir(std::env::temp_dir(), None); let workdir_base = workdir_base_tempdir.path().to_owned(); @@ -174,7 +175,7 @@ fn materializing_workdir_for_server_replaces_jdk_link_if_a_different_one_is_requ // Trigger materialization of the nailgun server workdir let materialization_result = - materialize_with_jdk(&runner, workdir_for_server, requested_mock_jdk_path.clone()); + materialize_with_jdk(&runner, workdir_for_server, requested_mock_jdk_path.clone()).await; assert!(materialization_result.is_ok()); // Assert that the symlink points to the requested jdk, and not the original one diff --git a/src/rust/engine/process_execution/src/remote.rs b/src/rust/engine/process_execution/src/remote.rs index add74be30a2..c0541273b0c 100644 --- a/src/rust/engine/process_execution/src/remote.rs +++ b/src/rust/engine/process_execution/src/remote.rs @@ -10,6 +10,8 @@ use bytes::Bytes; use concrete_time::TimeSpan; use digest::{Digest as DigestTrait, FixedOutput}; use fs::{self, File, PathStat}; +use futures::compat::Future01CompatExt; +use futures::future::{FutureExt, TryFutureExt}; use futures01::{future, Future, Stream}; use grpcio; use hashing::{Digest, Fingerprint}; @@ -18,7 +20,7 @@ use log::{debug, trace, warn}; use protobuf::{self, Message, ProtobufEnum}; use sha2::Sha256; use store::{Snapshot, Store, StoreFileByDigest}; -use tokio_timer::Delay; +use tokio::time::delay_for; use crate::{ Context, ExecuteProcessRequest, ExecuteProcessRequestMetadata, ExecutionStats, @@ -77,13 +79,12 @@ impl Drop for CancelRemoteExecutionToken { .cancel_operation_async(&cancel_op_req) { Ok(receiver) => { - self.executor.spawn_and_ignore(receiver.then(move |res| { - match res { + self.executor.spawn_and_ignore(async move { + match receiver.compat().await { Ok(_) => debug!("Canceled operation {} successfully", operation_name), Err(err) => debug!("Failed to cancel operation {}, err {}", operation_name, err), } - Ok(()) - })); + }); } Err(err) => debug!( "Failed to schedule cancel operation: {}, err {}", @@ -405,11 +406,14 @@ impl super::CommandRunner for CommandRunner { .to_boxed() } else { // maybe the delay here should be the min of remaining time and the backoff period - Delay::new(Instant::now() + backoff_period) - .map_err(move |e| { + delay_for(backoff_period) + .unit_error() + .boxed() + .compat() + .map_err(move |()| { format!( - "Future-Delay errored at operation result polling for {}, {}: {}", - operation_name, description, e + "Future-Delay errored at operation result polling for {}, {}", + operation_name, description ) }) .and_then(move |_| { diff --git a/src/rust/engine/process_execution/src/remote_tests.rs b/src/rust/engine/process_execution/src/remote_tests.rs index 8590967c685..1fcd007a09e 100644 --- a/src/rust/engine/process_execution/src/remote_tests.rs +++ b/src/rust/engine/process_execution/src/remote_tests.rs @@ -2,6 +2,8 @@ use bazel_protos; use bazel_protos::operations::Operation; use bazel_protos::remote_execution::ExecutedActionMetadata; use bytes::Bytes; +use futures::compat::Future01CompatExt; +use futures::future::{FutureExt, TryFutureExt}; use futures01::{future, Future}; use grpcio; use hashing::{Digest, Fingerprint, EMPTY_DIGEST}; @@ -28,8 +30,9 @@ use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::iter::{self, FromIterator}; use std::ops::Sub; use std::path::{Path, PathBuf}; -use std::time::{Duration, Instant}; -use tokio::timer::Delay; +use std::time::Duration; +use tokio::runtime::Handle; +use tokio::time::delay_for; use workunit_store::{WorkUnit, WorkUnitStore}; #[derive(Debug, PartialEq)] @@ -48,8 +51,8 @@ enum StderrType { /// is ignored for remoting by showing EPR with different digests of /// `unsafe_local_only_files_because_we_favor_speed_over_correctness_for_this_rule` /// end up having the same bazel_protos::remote_execution::ExecuteRequest. -#[test] -fn local_only_scratch_files_ignored() { +#[tokio::test] +async fn local_only_scratch_files_ignored() { let input_directory = TestDirectory::containing_roland(); let req1 = ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "yo"]), @@ -107,8 +110,8 @@ fn local_only_scratch_files_ignored() { ); } -#[test] -fn make_execute_request() { +#[tokio::test] +async fn make_execute_request() { let input_directory = TestDirectory::containing_roland(); let req = ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "yo"]), @@ -191,8 +194,8 @@ fn make_execute_request() { ); } -#[test] -fn make_execute_request_with_instance_name() { +#[tokio::test] +async fn make_execute_request_with_instance_name() { let input_directory = TestDirectory::containing_roland(); let req = ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "yo"]), @@ -283,8 +286,8 @@ fn make_execute_request_with_instance_name() { ); } -#[test] -fn make_execute_request_with_cache_key_gen_version() { +#[tokio::test] +async fn make_execute_request_with_cache_key_gen_version() { let input_directory = TestDirectory::containing_roland(); let req = ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "yo"]), @@ -380,8 +383,8 @@ fn make_execute_request_with_cache_key_gen_version() { ); } -#[test] -fn make_execute_request_with_jdk() { +#[tokio::test] +async fn make_execute_request_with_jdk() { let input_directory = TestDirectory::containing_roland(); let req = ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "yo"]), @@ -446,8 +449,8 @@ fn make_execute_request_with_jdk() { ); } -#[test] -fn make_execute_request_with_jdk_and_extra_platform_properties() { +#[tokio::test] +async fn make_execute_request_with_jdk_and_extra_platform_properties() { let input_directory = TestDirectory::containing_roland(); let req = ExecuteProcessRequest { argv: owned_string_vec(&["/bin/echo", "yo"]), @@ -548,8 +551,8 @@ fn make_execute_request_with_jdk_and_extra_platform_properties() { ); } -#[test] -fn server_rejecting_execute_request_gives_error() { +#[tokio::test] +async fn server_rejecting_execute_request_gives_error() { let execute_request = echo_foo_request(); let mock_server = { @@ -582,13 +585,15 @@ fn server_rejecting_execute_request_gives_error() { ) }; - let error = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); + let error = run_command_remote(mock_server.address(), execute_request) + .await + .expect_err("Want Err"); assert_that(&error).contains("InvalidArgument"); assert_that(&error).contains("Did not expect this request"); } -#[test] -fn successful_execution_after_one_getoperation() { +#[tokio::test] +async fn successful_execution_after_one_getoperation() { let execute_request = echo_foo_request(); let op_name = "gimme-foo".to_string(); @@ -616,7 +621,9 @@ fn successful_execution_after_one_getoperation() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).unwrap(); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .unwrap(); assert_eq!( result.without_execution_attempts(), @@ -632,8 +639,8 @@ fn successful_execution_after_one_getoperation() { assert_cancellation_requests(&mock_server, vec![]); } -#[test] -fn retries_retriable_errors() { +#[tokio::test] +async fn retries_retriable_errors() { let execute_request = echo_foo_request(); let op_name = "gimme-foo".to_string(); @@ -663,7 +670,9 @@ fn retries_retriable_errors() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).unwrap(); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .unwrap(); assert_eq!( result.without_execution_attempts(), @@ -679,8 +688,8 @@ fn retries_retriable_errors() { assert_cancellation_requests(&mock_server, vec![]); } -#[test] -fn gives_up_after_many_retriable_errors() { +#[tokio::test] +async fn gives_up_after_many_retriable_errors() { let execute_request = echo_foo_request(); let op_name = "gimme-foo".to_string(); @@ -711,7 +720,9 @@ fn gives_up_after_many_retriable_errors() { ) }; - let err = run_command_remote(mock_server.address(), execute_request).unwrap_err(); + let err = run_command_remote(mock_server.address(), execute_request) + .await + .unwrap_err(); assert_that!(err).contains("Gave up"); assert_that!(err).contains("appears to be lost"); @@ -719,8 +730,8 @@ fn gives_up_after_many_retriable_errors() { assert_cancellation_requests(&mock_server, vec![]); } -#[test] -pub fn sends_headers() { +#[tokio::test] +async fn sends_headers() { let execute_request = echo_foo_request(); let op_name = "gimme-foo".to_string(); @@ -748,7 +759,7 @@ pub fn sends_headers() { ) }; let cas = mock::StubCAS::empty(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store_dir = TempDir::new().unwrap(); let store = Store::with_remote( runtime.clone(), @@ -786,9 +797,10 @@ pub fn sends_headers() { workunit_store: WorkUnitStore::default(), build_id: String::from("marmosets"), }; - tokio::runtime::Runtime::new() - .unwrap() - .block_on(command_runner.run(execute_request, context)) + command_runner + .run(execute_request, context) + .compat() + .await .expect("Execution failed"); let received_messages = mock_server.mock_responder.received_messages.lock(); @@ -824,8 +836,8 @@ pub fn sends_headers() { } } -#[test] -fn extract_response_with_digest_stdout() { +#[tokio::test] +async fn extract_response_with_digest_stdout() { let op_name = "gimme-foo".to_string(); let testdata = TestData::roland(); let testdata_empty = TestData::empty(); @@ -841,6 +853,7 @@ fn extract_response_with_digest_stdout() { .unwrap() .unwrap() ) + .await .unwrap() .without_execution_attempts(), FallibleExecuteProcessResult { @@ -853,8 +866,8 @@ fn extract_response_with_digest_stdout() { ); } -#[test] -fn extract_response_with_digest_stderr() { +#[tokio::test] +async fn extract_response_with_digest_stderr() { let op_name = "gimme-foo".to_string(); let testdata = TestData::roland(); let testdata_empty = TestData::empty(); @@ -870,6 +883,7 @@ fn extract_response_with_digest_stderr() { .unwrap() .unwrap() ) + .await .unwrap() .without_execution_attempts(), FallibleExecuteProcessResult { @@ -882,9 +896,9 @@ fn extract_response_with_digest_stderr() { ); } -#[test] -fn ensure_inline_stdio_is_stored() { - let runtime = task_executor::Executor::new(); +#[tokio::test] +async fn ensure_inline_stdio_is_stored() { + let runtime = task_executor::Executor::new(Handle::current()); let test_stdout = TestData::roland(); let test_stderr = TestData::catnip(); @@ -946,8 +960,10 @@ fn ensure_inline_stdio_is_stored() { Duration::from_secs(0), ) .unwrap(); - let result = runtime - .block_on(cmd_runner.run(echo_roland_request(), Context::default())) + let result = cmd_runner + .run(echo_roland_request(), Context::default()) + .compat() + .await .unwrap(); assert_eq!( result.without_execution_attempts(), @@ -964,24 +980,20 @@ fn ensure_inline_stdio_is_stored() { Store::local_only(runtime.clone(), &store_dir_path).expect("Error creating local store"); { assert_eq!( - runtime - .block_on(local_store.load_file_bytes_with( - test_stdout.digest(), - |v| v, - WorkUnitStore::new() - )) + local_store + .load_file_bytes_with(test_stdout.digest(), |v| v, WorkUnitStore::new()) + .compat() + .await .unwrap() .unwrap() .0, test_stdout.bytes() ); assert_eq!( - runtime - .block_on(local_store.load_file_bytes_with( - test_stderr.digest(), - |v| v, - WorkUnitStore::new() - )) + local_store + .load_file_bytes_with(test_stderr.digest(), |v| v, WorkUnitStore::new()) + .compat() + .await .unwrap() .unwrap() .0, @@ -990,8 +1002,8 @@ fn ensure_inline_stdio_is_stored() { } } -#[test] -fn successful_execution_after_four_getoperations() { +#[tokio::test] +async fn successful_execution_after_four_getoperations() { let execute_request = echo_foo_request(); let mock_server = { @@ -1021,7 +1033,9 @@ fn successful_execution_after_four_getoperations() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).unwrap(); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .unwrap(); assert_eq!( result.without_execution_attempts(), @@ -1035,8 +1049,8 @@ fn successful_execution_after_four_getoperations() { ); } -#[test] -fn timeout_after_sufficiently_delayed_getoperations() { +#[tokio::test] +async fn timeout_after_sufficiently_delayed_getoperations() { let request_timeout = Duration::new(1, 0); // The request should timeout after 2 seconds, with 1 second due to the queue_buffer_time and // 1 due to the request_timeout. @@ -1076,7 +1090,9 @@ fn timeout_after_sufficiently_delayed_getoperations() { ) }; - let result = run_command_remote(mock_server.address(), execute_request.into()).unwrap(); + let result = run_command_remote(mock_server.address(), execute_request.into()) + .await + .unwrap(); assert_eq!(result.exit_code, -15); let error_msg = String::from_utf8(result.stdout.to_vec()).unwrap(); assert_that(&error_msg).contains("Exceeded timeout"); @@ -1089,9 +1105,9 @@ fn timeout_after_sufficiently_delayed_getoperations() { assert_cancellation_requests(&mock_server, vec![op_name.to_owned()]); } -#[test] #[ignore] // https://github.com/pantsbuild/pants/issues/8405 -fn dropped_request_cancels() { +#[tokio::test] +async fn dropped_request_cancels() { let request_timeout = Duration::new(10, 0); let delayed_operation_time = Duration::new(5, 0); @@ -1139,7 +1155,6 @@ fn dropped_request_cancels() { Duration::from_millis(0), Duration::from_secs(0), ); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); let successful_mock_result = FallibleExecuteProcessResult { stdout: as_bytes("foo-fast"), @@ -1150,31 +1165,30 @@ fn dropped_request_cancels() { }; let run_future = command_runner.run(execute_request.into(), Context::default()); - let faster_future = Delay::new(Instant::now() + Duration::from_secs(1)) - .map_err(|err| format!("Error from timer: {}", err)) + let faster_future = delay_for(Duration::from_secs(1)) + .unit_error() + .compat() + .map_err(|()| "Error from timer.".to_string()) .map({ let successful_mock_result = successful_mock_result.clone(); |_| successful_mock_result }); - let result = runtime - .block_on( - run_future - .select(faster_future) - .map(|(result, _future)| result) - .map_err(|(err, _future)| err), - ) + let result = run_future + .select(faster_future) + .map(|(result, _future)| result) + .map_err(|(err, _future)| err) + .compat() + .await .unwrap(); assert_eq!(result.without_execution_attempts(), successful_mock_result); - runtime.shutdown_on_idle().wait().unwrap(); - assert_cancellation_requests(&mock_server, vec![op_name.to_owned()]); } -#[test] -fn retry_for_cancelled_channel() { +#[tokio::test] +async fn retry_for_cancelled_channel() { let execute_request = echo_foo_request(); let mock_server = { @@ -1204,7 +1218,9 @@ fn retry_for_cancelled_channel() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).unwrap(); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .unwrap(); assert_eq!( result.without_execution_attempts(), @@ -1218,8 +1234,8 @@ fn retry_for_cancelled_channel() { ); } -#[test] -fn bad_result_bytes() { +#[tokio::test] +async fn bad_result_bytes() { let execute_request = echo_foo_request(); let mock_server = { @@ -1259,11 +1275,13 @@ fn bad_result_bytes() { ) }; - run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); + run_command_remote(mock_server.address(), execute_request) + .await + .expect_err("Want Err"); } -#[test] -fn initial_response_error() { +#[tokio::test] +async fn initial_response_error() { let execute_request = echo_foo_request(); let mock_server = { @@ -1295,13 +1313,15 @@ fn initial_response_error() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .expect_err("Want Err"); assert_eq!(result, "INTERNAL: Something went wrong"); } -#[test] -fn getoperation_response_error() { +#[tokio::test] +async fn getoperation_response_error() { let execute_request = echo_foo_request(); let mock_server = { @@ -1336,15 +1356,17 @@ fn getoperation_response_error() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .expect_err("Want Err"); assert_eq!(result, "INTERNAL: Something went wrong"); assert_cancellation_requests(&mock_server, vec![]); } -#[test] -fn initial_response_missing_response_and_error() { +#[tokio::test] +async fn initial_response_missing_response_and_error() { let execute_request = echo_foo_request(); let mock_server = { @@ -1370,13 +1392,15 @@ fn initial_response_missing_response_and_error() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .expect_err("Want Err"); assert_eq!(result, "Operation finished but no response supplied"); } -#[test] -fn getoperation_missing_response_and_error() { +#[tokio::test] +async fn getoperation_missing_response_and_error() { let execute_request = echo_foo_request(); let mock_server = { @@ -1405,14 +1429,16 @@ fn getoperation_missing_response_and_error() { ) }; - let result = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); + let result = run_command_remote(mock_server.address(), execute_request) + .await + .expect_err("Want Err"); assert_eq!(result, "Operation finished but no response supplied"); } -#[test] -fn execute_missing_file_uploads_if_known() { - let runtime = task_executor::Executor::new(); +#[tokio::test] +async fn execute_missing_file_uploads_if_known() { + let runtime = task_executor::Executor::new(Handle::current()); let roland = TestData::roland(); @@ -1464,11 +1490,15 @@ fn execute_missing_file_uploads_if_known() { 1, ) .expect("Failed to make store"); - runtime - .block_on(store.store_file_bytes(roland.bytes(), false)) + store + .store_file_bytes(roland.bytes(), false) + .compat() + .await .expect("Saving file bytes to store"); - runtime - .block_on(store.record_directory(&TestDirectory::containing_roland().directory(), false)) + store + .record_directory(&TestDirectory::containing_roland().directory(), false) + .compat() + .await .expect("Saving directory bytes to store"); let command_runner = CommandRunner::new( &mock_server.address(), @@ -1485,8 +1515,10 @@ fn execute_missing_file_uploads_if_known() { ) .unwrap(); - let result = runtime - .block_on(command_runner.run(cat_roland_request(), Context::default())) + let result = command_runner + .run(cat_roland_request(), Context::default()) + .compat() + .await .unwrap(); assert_eq!( result.without_execution_attempts(), @@ -1504,10 +1536,10 @@ fn execute_missing_file_uploads_if_known() { } } -//#[test] // TODO: Unignore this test when the server can actually fail with status protos. -// See https://github.com/pantsbuild/pants/issues/6597 -#[allow(dead_code)] -fn execute_missing_file_uploads_if_known_status() { +#[tokio::test] +//// TODO: Unignore this test when the server can actually fail with status protos. +#[ignore] // https://github.com/pantsbuild/pants/issues/6597 +async fn execute_missing_file_uploads_if_known_status() { let roland = TestData::roland(); let mock_server = { @@ -1556,7 +1588,7 @@ fn execute_missing_file_uploads_if_known_status() { let cas = mock::StubCAS::builder() .directory(&TestDirectory::containing_roland()) .build(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::with_remote( runtime.clone(), store_dir, @@ -1611,8 +1643,8 @@ fn execute_missing_file_uploads_if_known_status() { assert_cancellation_requests(&mock_server, vec![]); } -#[test] -fn execute_missing_file_errors_if_unknown() { +#[tokio::test] +async fn execute_missing_file_errors_if_unknown() { let missing_digest = TestDirectory::containing_roland().digest(); let mock_server = { @@ -1640,7 +1672,7 @@ fn execute_missing_file_errors_if_unknown() { .file(&TestData::roland()) .directory(&TestDirectory::containing_roland()) .build(); - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store = Store::with_remote( runtime.clone(), store_dir, @@ -1672,14 +1704,16 @@ fn execute_missing_file_errors_if_unknown() { ) .unwrap(); - let error = runtime - .block_on(runner.run(cat_roland_request(), Context::default())) + let error = runner + .run(cat_roland_request(), Context::default()) + .compat() + .await .expect_err("Want error"); assert_contains(&error, &format!("{}", missing_digest.0)); } -#[test] -fn format_error_complete() { +#[tokio::test] +async fn format_error_complete() { let mut error = bazel_protos::status::Status::new(); error.set_code(bazel_protos::code::Code::CANCELLED.value()); error.set_message("Oops, oh well!".to_string()); @@ -1689,8 +1723,8 @@ fn format_error_complete() { ); } -#[test] -fn extract_execute_response_unknown_code() { +#[tokio::test] +async fn extract_execute_response_unknown_code() { let mut error = bazel_protos::status::Status::new(); error.set_code(555); error.set_message("Oops, oh well!".to_string()); @@ -1700,8 +1734,8 @@ fn extract_execute_response_unknown_code() { ); } -#[test] -fn extract_execute_response_success() { +#[tokio::test] +async fn extract_execute_response_success() { let want_result = FallibleExecuteProcessResult { stdout: as_bytes("roland"), stderr: Bytes::from("simba"), @@ -1735,27 +1769,28 @@ fn extract_execute_response_success() { assert_eq!( extract_execute_response(operation) + .await .unwrap() .without_execution_attempts(), want_result ); } -#[test] -fn extract_execute_response_pending() { +#[tokio::test] +async fn extract_execute_response_pending() { let operation_name = "cat".to_owned(); let mut operation = bazel_protos::operations::Operation::new(); operation.set_name(operation_name.clone()); operation.set_done(false); assert_eq!( - extract_execute_response(operation), + extract_execute_response(operation).await, Err(ExecutionError::NotFinished(operation_name)) ); } -#[test] -fn extract_execute_response_missing_digests() { +#[tokio::test] +async fn extract_execute_response_missing_digests() { let missing_files = vec![ TestData::roland().digest(), TestDirectory::containing_roland().digest(), @@ -1772,13 +1807,13 @@ fn extract_execute_response_missing_digests() { .unwrap(); assert_eq!( - extract_execute_response(operation), + extract_execute_response(operation).await, Err(ExecutionError::MissingDigests(missing_files)) ); } -#[test] -fn extract_execute_response_missing_other_things() { +#[tokio::test] +async fn extract_execute_response_missing_other_things() { let missing = vec![ missing_preconditionfailure_violation(&TestData::roland().digest()), { @@ -1794,14 +1829,14 @@ fn extract_execute_response_missing_other_things() { .unwrap() .unwrap(); - match extract_execute_response(operation) { + match extract_execute_response(operation).await { Err(ExecutionError::Fatal(err)) => assert_contains(&err, "monkeys"), other => assert!(false, "Want fatal error, got {:?}", other), }; } -#[test] -fn extract_execute_response_other_failed_precondition() { +#[tokio::test] +async fn extract_execute_response_other_failed_precondition() { let missing = vec![{ let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new(); violation.set_field_type("OUT_OF_CAPACITY".to_owned()); @@ -1813,14 +1848,14 @@ fn extract_execute_response_other_failed_precondition() { .unwrap() .unwrap(); - match extract_execute_response(operation) { + match extract_execute_response(operation).await { Err(ExecutionError::Fatal(err)) => assert_contains(&err, "OUT_OF_CAPACITY"), other => assert!(false, "Want fatal error, got {:?}", other), }; } -#[test] -fn extract_execute_response_missing_without_list() { +#[tokio::test] +async fn extract_execute_response_missing_without_list() { let missing = vec![]; let operation = make_precondition_failure_operation(missing) @@ -1828,14 +1863,14 @@ fn extract_execute_response_missing_without_list() { .unwrap() .unwrap(); - match extract_execute_response(operation) { + match extract_execute_response(operation).await { Err(ExecutionError::Fatal(err)) => assert_contains(&err.to_lowercase(), "precondition"), other => assert!(false, "Want fatal error, got {:?}", other), }; } -#[test] -fn extract_execute_response_other_status() { +#[tokio::test] +async fn extract_execute_response_other_status() { let mut operation = bazel_protos::operations::Operation::new(); operation.set_name("cat".to_owned()); operation.set_done(true); @@ -1849,14 +1884,14 @@ fn extract_execute_response_other_status() { response })); - match extract_execute_response(operation) { + match extract_execute_response(operation).await { Err(ExecutionError::Fatal(err)) => assert_contains(&err, "PermissionDenied"), other => assert!(false, "Want fatal error, got {:?}", other), }; } -#[test] -fn digest_command() { +#[tokio::test] +async fn digest_command() { let mut command = bazel_protos::remote_execution::Command::new(); command.mut_arguments().push("/bin/echo".to_string()); command.mut_arguments().push("foo".to_string()); @@ -1880,8 +1915,8 @@ fn digest_command() { assert_eq!(digest.1, 32) } -#[test] -fn wait_between_request_1_retry() { +#[tokio::test] +async fn wait_between_request_1_retry() { // wait at least 100 milli for one retry { let execute_request = echo_foo_request(); @@ -1916,9 +1951,10 @@ fn wait_between_request_1_retry() { Duration::from_millis(100), Duration::from_secs(1), ); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - runtime - .block_on(command_runner.run(execute_request, Context::default())) + command_runner + .run(execute_request, Context::default()) + .compat() + .await .unwrap(); let messages = mock_server.mock_responder.received_messages.lock(); @@ -1934,8 +1970,8 @@ fn wait_between_request_1_retry() { } } -#[test] -fn wait_between_request_3_retry() { +#[tokio::test] +async fn wait_between_request_3_retry() { // wait at least 50 + 100 + 150 = 300 milli for 3 retries. { let execute_request = echo_foo_request(); @@ -1972,9 +2008,10 @@ fn wait_between_request_3_retry() { Duration::from_millis(50), Duration::from_secs(5), ); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - runtime - .block_on(command_runner.run(execute_request, Context::default())) + command_runner + .run(execute_request, Context::default()) + .compat() + .await .unwrap(); let messages = mock_server.mock_responder.received_messages.lock(); @@ -2006,8 +2043,8 @@ fn wait_between_request_3_retry() { } } -#[test] -fn extract_output_files_from_response_one_file() { +#[tokio::test] +async fn extract_output_files_from_response_one_file() { let mut output_file = bazel_protos::remote_execution::OutputFile::new(); output_file.set_path("roland".into()); output_file.set_digest((&TestData::roland().digest()).into()); @@ -2024,13 +2061,13 @@ fn extract_output_files_from_response_one_file() { }); assert_eq!( - extract_output_files_from_response(&execute_response), + extract_output_files_from_response(&execute_response).await, Ok(TestDirectory::containing_roland().digest()) ) } -#[test] -fn extract_output_files_from_response_two_files_not_nested() { +#[tokio::test] +async fn extract_output_files_from_response_two_files_not_nested() { let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new(); output_file_1.set_path("roland".into()); output_file_1.set_digest((&TestData::roland().digest()).into()); @@ -2053,13 +2090,13 @@ fn extract_output_files_from_response_two_files_not_nested() { }); assert_eq!( - extract_output_files_from_response(&execute_response), + extract_output_files_from_response(&execute_response).await, Ok(TestDirectory::containing_roland_and_treats().digest()) ) } -#[test] -fn extract_output_files_from_response_two_files_nested() { +#[tokio::test] +async fn extract_output_files_from_response_two_files_nested() { let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new(); output_file_1.set_path("cats/roland".into()); output_file_1.set_digest((&TestData::roland().digest()).into()); @@ -2082,13 +2119,13 @@ fn extract_output_files_from_response_two_files_nested() { }); assert_eq!( - extract_output_files_from_response(&execute_response), + extract_output_files_from_response(&execute_response).await, Ok(TestDirectory::recursive().digest()) ) } -#[test] -fn extract_output_files_from_response_just_directory() { +#[tokio::test] +async fn extract_output_files_from_response_just_directory() { let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new(); output_directory.set_path("cats".into()); output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into()); @@ -2104,13 +2141,13 @@ fn extract_output_files_from_response_just_directory() { }); assert_eq!( - extract_output_files_from_response(&execute_response), + extract_output_files_from_response(&execute_response).await, Ok(TestDirectory::nested().digest()) ) } -#[test] -fn extract_output_files_from_response_directories_and_files() { +#[tokio::test] +async fn extract_output_files_from_response_directories_and_files() { // /catnip // /pets/cats/roland // /pets/dogs/robin @@ -2148,7 +2185,7 @@ fn extract_output_files_from_response_directories_and_files() { }); assert_eq!( - extract_output_files_from_response(&execute_response), + extract_output_files_from_response(&execute_response).await, Ok(Digest( Fingerprint::from_hex_string( "639b4b84bb58a9353d49df8122e7987baf038efe54ed035e67910846c865b1e2" @@ -2159,8 +2196,8 @@ fn extract_output_files_from_response_directories_and_files() { ) } -#[test] -fn extract_output_files_from_response_no_prefix() { +#[tokio::test] +async fn extract_output_files_from_response_no_prefix() { let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new(); output_directory.set_path(String::new()); output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into()); @@ -2174,7 +2211,7 @@ fn extract_output_files_from_response_no_prefix() { }); assert_eq!( - extract_output_files_from_response(&execute_response), + extract_output_files_from_response(&execute_response).await, Ok(TestDirectory::containing_roland().digest()) ) } @@ -2192,8 +2229,8 @@ fn workunits_with_constant_span_id(workunit_store: &WorkUnitStore) -> HashSet Result { @@ -2472,8 +2508,10 @@ fn run_command_remote( Duration::from_millis(0), Duration::from_secs(0), ); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - runtime.block_on(command_runner.run(request, Context::default())) + command_runner + .run(request, Context::default()) + .compat() + .await } fn create_command_runner( @@ -2482,7 +2520,7 @@ fn create_command_runner( backoff_incremental_wait: Duration, backoff_max_wait: Duration, ) -> CommandRunner { - let runtime = task_executor::Executor::new(); + let runtime = task_executor::Executor::new(Handle::current()); let store_dir = TempDir::new().unwrap(); let store = make_store(store_dir.path(), cas, runtime.clone()); CommandRunner::new( @@ -2519,7 +2557,7 @@ fn make_store(store_dir: &Path, cas: &mock::StubCAS, executor: task_executor::Ex .expect("Failed to make store") } -fn extract_execute_response( +async fn extract_execute_response( operation: bazel_protos::operations::Operation, ) -> Result { let cas = mock::StubCAS::builder() @@ -2533,30 +2571,29 @@ fn extract_execute_response( Duration::from_secs(0), ); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - - runtime.block_on(command_runner.extract_execute_response( - OperationOrStatus::Operation(operation), - &mut ExecutionHistory::default(), - WorkUnitStore::new(), - )) + command_runner + .extract_execute_response( + OperationOrStatus::Operation(operation), + &mut ExecutionHistory::default(), + WorkUnitStore::new(), + ) + .compat() + .await } -fn extract_output_files_from_response( +async fn extract_output_files_from_response( execute_response: &bazel_protos::remote_execution::ExecuteResponse, ) -> Result { let cas = mock::StubCAS::builder() .file(&TestData::roland()) .directory(&TestDirectory::containing_roland()) .build(); - let executor = task_executor::Executor::new(); + let executor = task_executor::Executor::new(Handle::current()); let store_dir = TempDir::new().unwrap(); let store = make_store(store_dir.path(), &cas, executor.clone()); - executor.block_on(crate::remote::extract_output_files( - store, - &execute_response, - WorkUnitStore::new(), - )) + crate::remote::extract_output_files(store, &execute_response, WorkUnitStore::new()) + .compat() + .await } fn make_any_proto(message: &dyn Message) -> protobuf::well_known_types::Any { diff --git a/src/rust/engine/process_execution/src/speculate.rs b/src/rust/engine/process_execution/src/speculate.rs index 70245013ca3..4cef5920e79 100644 --- a/src/rust/engine/process_execution/src/speculate.rs +++ b/src/rust/engine/process_execution/src/speculate.rs @@ -3,11 +3,12 @@ use crate::{ MultiPlatformExecuteProcessRequest, }; use boxfuture::{BoxFuture, Boxable}; +use futures::future::{FutureExt, TryFutureExt}; use futures01::future::{err, ok, Either, Future}; use log::{debug, trace}; use std::sync::Arc; -use std::time::{Duration, Instant}; -use tokio_timer::Delay; +use std::time::Duration; +use tokio::time::delay_for; #[derive(Clone)] pub struct SpeculatingCommandRunner { @@ -34,7 +35,10 @@ impl SpeculatingCommandRunner { req: MultiPlatformExecuteProcessRequest, context: Context, ) -> BoxFuture { - let delay = Delay::new(Instant::now() + self.speculation_timeout); + let delay = delay_for(self.speculation_timeout) + .unit_error() + .boxed() + .compat(); let req2 = req.clone(); trace!( "Primary command runner queue length: {:?}", diff --git a/src/rust/engine/process_execution/src/speculate_tests.rs b/src/rust/engine/process_execution/src/speculate_tests.rs index 4f9152a8720..e1aaa08d031 100644 --- a/src/rust/engine/process_execution/src/speculate_tests.rs +++ b/src/rust/engine/process_execution/src/speculate_tests.rs @@ -6,96 +6,98 @@ use crate::{ }; use boxfuture::{BoxFuture, Boxable}; use bytes::Bytes; +use futures::compat::Future01CompatExt; +use futures::future::{FutureExt, TryFutureExt}; use futures01::future::Future; use hashing::EMPTY_DIGEST; use parking_lot::Mutex; use std::sync::Arc; -use std::time::{Duration, Instant}; +use std::time::Duration; use tokio; -use tokio_timer::Delay; +use tokio::time::delay_for; -#[test] -fn test_no_speculation() { +#[tokio::test] +async fn test_no_speculation() { let (result, call_counter, finished_counter) = - run_speculation_test(0, 0, 100, false, false, true, true); + run_speculation_test(0, 0, 100, false, false, true, true).await; assert_eq![1, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m1")]; } -#[test] -fn test_speculate() { +#[tokio::test] +async fn test_speculate() { let (result, call_counter, finished_counter) = - run_speculation_test(100, 0, 10, false, false, true, true); + run_speculation_test(100, 0, 10, false, false, true, true).await; assert_eq![2, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m2")] } -#[test] -fn first_req_slow_success() { +#[tokio::test] +async fn first_req_slow_success() { let (result, call_counter, finished_counter) = - run_speculation_test(500, 1000, 250, false, false, true, true); + run_speculation_test(500, 1000, 250, false, false, true, true).await; assert_eq![2, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m1")] } -#[test] -fn first_req_slow_fail() { +#[tokio::test] +async fn first_req_slow_fail() { let (result, call_counter, finished_counter) = - run_speculation_test(1000, 0, 100, true, false, true, true); + run_speculation_test(1000, 0, 100, true, false, true, true).await; assert_eq![2, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m2")] } -#[test] -fn first_req_fast_fail() { +#[tokio::test] +async fn first_req_fast_fail() { let (result, call_counter, finished_counter) = - run_speculation_test(500, 1000, 250, true, false, true, true); + run_speculation_test(500, 1000, 250, true, false, true, true).await; assert_eq![2, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap_err(), Bytes::from("m1")] } -#[test] -fn only_fail_on_primary_result() { +#[tokio::test] +async fn only_fail_on_primary_result() { let (result, call_counter, finished_counter) = - run_speculation_test(1000, 0, 100, true, true, true, true); + run_speculation_test(1000, 0, 100, true, true, true, true).await; assert_eq![2, *call_counter.lock()]; assert_eq![2, *finished_counter.lock()]; assert_eq![result.unwrap_err(), Bytes::from("m1")] } -#[test] -fn platform_compatible_with_1st_runs_once() { +#[tokio::test] +async fn platform_compatible_with_1st_runs_once() { let (result, call_counter, finished_counter) = - run_speculation_test(0, 0, 100, false, false, true, false); + run_speculation_test(0, 0, 100, false, false, true, false).await; assert_eq![1, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m1")] } -#[test] -fn platform_compatible_with_2nd_runs_once() { +#[tokio::test] +async fn platform_compatible_with_2nd_runs_once() { let (result, call_counter, finished_counter) = - run_speculation_test(0, 0, 100, false, false, false, true); + run_speculation_test(0, 0, 100, false, false, false, true).await; assert_eq![1, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m2")] } -#[test] -fn platform_compatible_with_both_speculates() { +#[tokio::test] +async fn platform_compatible_with_both_speculates() { let (result, call_counter, finished_counter) = - run_speculation_test(1000, 1000, 500, false, false, true, true); + run_speculation_test(1000, 1000, 500, false, false, true, true).await; assert_eq![2, *call_counter.lock()]; assert_eq![1, *finished_counter.lock()]; assert_eq![result.unwrap().stdout, Bytes::from("m1")] } -fn run_speculation_test( +async fn run_speculation_test( r1_latency_ms: u64, r2_latency_ms: u64, speculation_delay_ms: u64, @@ -108,7 +110,6 @@ fn run_speculation_test( Arc>, Arc>, ) { - let runtime = tokio::runtime::Runtime::new().unwrap(); let execute_request = echo_foo_request(); let msg1: String = "m1".into(); let msg2: String = "m2".into(); @@ -135,7 +136,7 @@ fn run_speculation_test( Duration::from_millis(speculation_delay_ms), ); ( - runtime.block_on_all(runner.run(execute_request, context)), + runner.run(execute_request, context).compat().await, call_counter, finished_counter, ) @@ -210,7 +211,7 @@ impl CommandRunner for DelayedCommandRunner { _req: MultiPlatformExecuteProcessRequest, _context: Context, ) -> BoxFuture { - let delay = Delay::new(Instant::now() + self.delay); + let delay = delay_for(self.delay).unit_error().compat(); let exec_result = self.result.clone(); let command_runner = self.clone(); command_runner.incr_call_counter(); diff --git a/src/rust/engine/process_executor/Cargo.toml b/src/rust/engine/process_executor/Cargo.toml index 662ab6e6fac..a4e063b419c 100644 --- a/src/rust/engine/process_executor/Cargo.toml +++ b/src/rust/engine/process_executor/Cargo.toml @@ -8,9 +8,10 @@ publish = false [dependencies] clap = "2" env_logger = "0.5.4" +futures = { version = "0.3", features = ["compat"] } hashing = { path = "../hashing" } process_execution = { path = "../process_execution" } store = { path = "../fs/store" } task_executor = { path = "../task_executor" } -tokio = "0.1" +tokio = { version = "0.2", features = ["rt-threaded", "macros"] } workunit_store = { path = "../workunit_store"} diff --git a/src/rust/engine/process_executor/src/main.rs b/src/rust/engine/process_executor/src/main.rs index 27925fcdb19..9f23eaa00b9 100644 --- a/src/rust/engine/process_executor/src/main.rs +++ b/src/rust/engine/process_executor/src/main.rs @@ -31,6 +31,7 @@ use env_logger; use process_execution; use clap::{value_t, App, AppSettings, Arg}; +use futures::compat::Future01CompatExt; use hashing::{Digest, Fingerprint}; use process_execution::{Context, ExecuteProcessRequestMetadata, PlatformConstraint, RelativePath}; use std::collections::{BTreeMap, BTreeSet}; @@ -40,7 +41,7 @@ use std::path::PathBuf; use std::process::exit; use std::time::Duration; use store::{BackoffConfig, Store}; -use tokio::runtime::Runtime; +use tokio::runtime::Handle; use workunit_store::WorkUnitStore; /// A binary which takes args of format: @@ -50,7 +51,8 @@ use workunit_store::WorkUnitStore; /// It outputs its output/err to stdout/err, and exits with its exit code. /// /// It does not perform $PATH lookup or shell expansion. -fn main() { +#[tokio::main] +async fn main() { env_logger::init(); let args = App::new("process_executor") @@ -268,7 +270,7 @@ fn main() { .map(collection_from_keyvalues::<_, BTreeMap<_, _>>) .unwrap_or_default(); - let executor = task_executor::Executor::new(); + let executor = task_executor::Executor::new(Handle::current()); let store = match (server_arg, args.value_of("cas-server")) { (Some(_server), Some(cas_server)) => { @@ -388,15 +390,17 @@ fn main() { )) as Box, }; - let mut runtime = Runtime::new().unwrap(); - - let result = runtime - .block_on(runner.run(request.into(), Context::default())) + let result = runner + .run(request.into(), Context::default()) + .compat() + .await .expect("Error executing"); if let Some(output) = args.value_of("materialize-output-to").map(PathBuf::from) { - runtime - .block_on(store.materialize_directory(output, result.output_directory, WorkUnitStore::new())) + store + .materialize_directory(output, result.output_directory, WorkUnitStore::new()) + .compat() + .await .unwrap(); } diff --git a/src/rust/engine/serverset/Cargo.toml b/src/rust/engine/serverset/Cargo.toml index a68b1d1c190..45f87a31b97 100644 --- a/src/rust/engine/serverset/Cargo.toml +++ b/src/rust/engine/serverset/Cargo.toml @@ -8,10 +8,11 @@ publish = false [dependencies] boxfuture = { path = "../boxfuture" } futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } parking_lot = "0.6" -tokio-timer = "0.2" +tokio = { version = "0.2", features = ["time"] } [dev-dependencies] maplit = "1" testutil = { path = "../testutil" } -tokio = "0.1" +tokio = { version = "0.2", features = ["rt-core", "macros"] } diff --git a/src/rust/engine/serverset/src/lib.rs b/src/rust/engine/serverset/src/lib.rs index d3730da3007..e8572998b2d 100644 --- a/src/rust/engine/serverset/src/lib.rs +++ b/src/rust/engine/serverset/src/lib.rs @@ -26,11 +26,12 @@ #![allow(clippy::mutex_atomic)] use boxfuture::{BoxFuture, Boxable}; +use futures::future::{FutureExt, TryFutureExt}; use futures01::{future, Future}; use parking_lot::Mutex; use std::sync::Arc; use std::time::{Duration, Instant}; -use tokio_timer::Delay; +use tokio::time::delay_until; mod retry; pub use crate::retry::Retry; @@ -319,8 +320,11 @@ impl Serverset { .unwrap(); let serverset = self.clone(); - Delay::new(instant) - .map_err(|err| format!("Error delaying for serverset: {}", err)) + delay_until(instant.into()) + .unit_error() + .boxed() + .compat() + .map_err(|()| "Error delaying for serverset.".to_string()) .and_then(move |()| serverset.next()) .to_boxed() } diff --git a/src/rust/engine/serverset/src/retry_tests.rs b/src/rust/engine/serverset/src/retry_tests.rs index d35a8d3dc90..8da0206ab1e 100644 --- a/src/rust/engine/serverset/src/retry_tests.rs +++ b/src/rust/engine/serverset/src/retry_tests.rs @@ -3,9 +3,10 @@ use maplit::hashset; use std::time::Duration; use testutil::owned_string_vec; -#[test] -fn retries() { - let mut runtime = tokio::runtime::Runtime::new().unwrap(); +use futures::compat::Future01CompatExt; + +#[tokio::test] +async fn retries() { let s = Serverset::new( owned_string_vec(&["good", "bad", "enough"]), |s| { @@ -22,17 +23,18 @@ fn retries() { let mut saw = hashset![]; for _ in 0..3 { saw.insert( - runtime - .block_on(Retry(s.clone()).all_errors_immediately(|v| v, 1)) + Retry(s.clone()) + .all_errors_immediately(|v| v, 1) + .compat() + .await .unwrap(), ); } assert_eq!(saw, hashset!["good".to_owned(), "enough".to_owned()]); } -#[test] -fn gives_up_on_enough_bad() { - let mut runtime = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn gives_up_on_enough_bad() { let s = Serverset::new( vec!["bad".to_owned()], |s| Err(s.to_owned()), @@ -42,6 +44,9 @@ fn gives_up_on_enough_bad() { .unwrap(); assert_eq!( Err(format!("Failed after 5 retries; last failure: bad")), - runtime.block_on(Retry(s).all_errors_immediately(|v: Result| v, 5)) + Retry(s) + .all_errors_immediately(|v: Result| v, 5) + .compat() + .await ); } diff --git a/src/rust/engine/serverset/src/tests.rs b/src/rust/engine/serverset/src/tests.rs index 0b5185a2f8d..7d10f028c57 100644 --- a/src/rust/engine/serverset/src/tests.rs +++ b/src/rust/engine/serverset/src/tests.rs @@ -1,4 +1,5 @@ use crate::{BackoffConfig, Health, Serverset}; +use futures::compat::Future01CompatExt; use futures01::{future, Future}; use parking_lot::Mutex; use std; @@ -6,6 +7,7 @@ use std::collections::HashSet; use std::sync::Arc; use std::time::Duration; use testutil::owned_string_vec; +use tokio::time::delay_for; fn backoff_config() -> BackoffConfig { BackoffConfig::new(Duration::from_millis(10), 2.0, Duration::from_millis(100)).unwrap() @@ -18,9 +20,8 @@ fn no_servers_is_error() { .expect_err("Want error constructing with no servers"); } -#[test] -fn one_request_works() { - let mut rt = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn one_request_works() { let s = Serverset::new( owned_string_vec(&["good"]), fake_connect, @@ -29,12 +30,11 @@ fn one_request_works() { ) .unwrap(); - assert_eq!(rt.block_on(s.next()).unwrap().0, "good".to_owned()); + assert_eq!(s.next().compat().await.unwrap().0, "good".to_owned()); } -#[test] -fn round_robins() { - let mut rt = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn round_robins() { let s = Serverset::new( owned_string_vec(&["good", "bad"]), fake_connect, @@ -43,12 +43,11 @@ fn round_robins() { ) .unwrap(); - expect_both(&mut rt, &s, 2); + expect_both(&s, 2).await } -#[test] -fn handles_overflow_internally() { - let mut rt = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn handles_overflow_internally() { let s = Serverset::new( owned_string_vec(&["good", "bad"]), fake_connect, @@ -60,7 +59,7 @@ fn handles_overflow_internally() { // 3 because we may skip some values if the number of servers isn't a factor of // std::usize::MAX, so we make sure to go around them all again after overflowing. - expect_both(&mut rt, &s, 3) + expect_both(&s, 3).await } fn unwrap(wrapped: Arc>) -> T { @@ -69,9 +68,8 @@ fn unwrap(wrapped: Arc>) -> T { .into_inner() } -#[test] -fn skips_unhealthy() { - let mut rt = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn skips_unhealthy() { let s = Serverset::new( owned_string_vec(&["good", "bad"]), fake_connect, @@ -80,14 +78,13 @@ fn skips_unhealthy() { ) .unwrap(); - mark_bad_as_bad(&mut rt, &s, Health::Unhealthy); + mark_bad_as_bad(&s, Health::Unhealthy).await; - expect_only_good(&mut rt, &s, Duration::from_millis(10)); + expect_only_good(&s, Duration::from_millis(10)).await; } -#[test] -fn reattempts_unhealthy() { - let mut rt = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn reattempts_unhealthy() { let s = Serverset::new( owned_string_vec(&["good", "bad"]), fake_connect, @@ -96,16 +93,15 @@ fn reattempts_unhealthy() { ) .unwrap(); - mark_bad_as_bad(&mut rt, &s, Health::Unhealthy); + mark_bad_as_bad(&s, Health::Unhealthy).await; - expect_only_good(&mut rt, &s, Duration::from_millis(10)); + expect_only_good(&s, Duration::from_millis(10)).await; - expect_both(&mut rt, &s, 3); + expect_both(&s, 3).await } -#[test] -fn backoff_when_unhealthy() { - let mut rt = tokio::runtime::Runtime::new().unwrap(); +#[tokio::test] +async fn backoff_when_unhealthy() { let s = Serverset::new( owned_string_vec(&["good", "bad"]), fake_connect, @@ -114,25 +110,25 @@ fn backoff_when_unhealthy() { ) .unwrap(); - mark_bad_as_bad(&mut rt, &s, Health::Unhealthy); + mark_bad_as_bad(&s, Health::Unhealthy).await; - expect_only_good(&mut rt, &s, Duration::from_millis(10)); + expect_only_good(&s, Duration::from_millis(10)).await; // mark_bad_as_bad asserts that we attempted to use the bad server as a side effect, so this // checks that we did re-use the server after the lame period. - mark_bad_as_bad(&mut rt, &s, Health::Unhealthy); + mark_bad_as_bad(&s, Health::Unhealthy).await; - expect_only_good(&mut rt, &s, Duration::from_millis(20)); + expect_only_good(&s, Duration::from_millis(20)).await; - mark_bad_as_bad(&mut rt, &s, Health::Unhealthy); + mark_bad_as_bad(&s, Health::Unhealthy).await; - expect_only_good(&mut rt, &s, Duration::from_millis(40)); + expect_only_good(&s, Duration::from_millis(40)).await; - expect_both(&mut rt, &s, 3); + expect_both(&s, 3).await } -#[test] -fn waits_if_all_unhealthy() { +#[tokio::test] +async fn waits_if_all_unhealthy() { let backoff_config = backoff_config(); let s = Serverset::new( owned_string_vec(&["good", "bad"]), @@ -141,21 +137,20 @@ fn waits_if_all_unhealthy() { backoff_config, ) .unwrap(); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); // We will get an address 4 times, and mark it as unhealthy each of those times. // That means that each server will be marked bad twice, which according to our backoff config // means they should be marked as unavailable for 20ms each. for _ in 0..4 { let s = s.clone(); - let (_server, token) = runtime.block_on(s.next()).unwrap(); + let (_server, token) = s.next().compat().await.unwrap(); s.report_health(token, Health::Unhealthy); } let start = std::time::Instant::now(); // This should take at least 20ms because both servers are marked as unhealthy. - let _ = runtime.block_on(s.next()).unwrap(); + let _ = s.next().compat().await.unwrap(); // Make sure we waited for at least 10ms; we should have waited 20ms, but it may have taken a // little time to mark a server as unhealthy, so we have some padding between what we expect @@ -168,35 +163,36 @@ fn waits_if_all_unhealthy() { ); } -fn expect_both(runtime: &mut tokio::runtime::Runtime, s: &Serverset, repetitions: usize) { +async fn expect_both(s: &Serverset, repetitions: usize) { let visited = Arc::new(Mutex::new(HashSet::new())); - runtime - .block_on(future::join_all( - (0..repetitions) - .into_iter() - .map(|_| { - let saw = visited.clone(); - let s = s.clone(); - s.next().map(move |(server, token)| { - saw.lock().insert(server); - s.report_health(token, Health::Healthy) - }) + future::join_all( + (0..repetitions) + .into_iter() + .map(|_| { + let saw = visited.clone(); + let s = s.clone(); + s.next().map(move |(server, token)| { + saw.lock().insert(server); + s.report_health(token, Health::Healthy) }) - .collect::>(), - )) - .unwrap(); + }) + .collect::>(), + ) + .compat() + .await + .unwrap(); let expect: HashSet<_> = owned_string_vec(&["good", "bad"]).into_iter().collect(); assert_eq!(unwrap(visited), expect); } -fn mark_bad_as_bad(runtime: &mut tokio::runtime::Runtime, s: &Serverset, health: Health) { +async fn mark_bad_as_bad(s: &Serverset, health: Health) { let mark_bad_as_baded_bad = Arc::new(Mutex::new(false)); for _ in 0..2 { let s = s.clone(); let mark_bad_as_baded_bad = mark_bad_as_baded_bad.clone(); - let (server, token) = runtime.block_on(s.next()).unwrap(); + let (server, token) = s.next().compat().await.unwrap(); if &server == "bad" { *mark_bad_as_baded_bad.lock() = true; s.report_health(token, health); @@ -210,11 +206,7 @@ fn mark_bad_as_bad(runtime: &mut tokio::runtime::Runtime, s: &Serverset, ); } -fn expect_only_good( - runtime: &mut tokio::runtime::Runtime, - s: &Serverset, - duration: Duration, -) { +async fn expect_only_good(s: &Serverset, duration: Duration) { let buffer = Duration::from_millis(1); let start = std::time::Instant::now(); @@ -224,7 +216,7 @@ fn expect_only_good( let s = s.clone(); let should_break = should_break.clone(); let did_get_at_least_one_good = did_get_at_least_one_good.clone(); - let (server, token) = runtime.block_on(s.next()).unwrap(); + let (server, token) = s.next().compat().await.unwrap(); if start.elapsed() < duration - buffer { assert_eq!("good", &server); *did_get_at_least_one_good.lock() = true; @@ -236,7 +228,7 @@ fn expect_only_good( assert!(*did_get_at_least_one_good.lock()); - std::thread::sleep(buffer * 2); + delay_for(buffer * 2).await; } /// For tests, we just use Strings as servers, as it's an easy type we can make from addresses. diff --git a/src/rust/engine/sharded_lmdb/Cargo.toml b/src/rust/engine/sharded_lmdb/Cargo.toml index d4f639cf51c..0fd900044fd 100644 --- a/src/rust/engine/sharded_lmdb/Cargo.toml +++ b/src/rust/engine/sharded_lmdb/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] bytes = "0.4.5" fs = { path = "../fs" } -futures01 = { package = "futures", version = "0.1" } +futures = "0.3" hashing = { path = "../hashing" } lmdb = { git = "https://github.com/pantsbuild/lmdb-rs.git", rev = "06bdfbfc6348f6804127176e561843f214fc17f8" } log = "0.4" diff --git a/src/rust/engine/sharded_lmdb/src/lib.rs b/src/rust/engine/sharded_lmdb/src/lib.rs index d022608205b..e73f20e1e54 100644 --- a/src/rust/engine/sharded_lmdb/src/lib.rs +++ b/src/rust/engine/sharded_lmdb/src/lib.rs @@ -26,7 +26,7 @@ #![allow(clippy::mutex_atomic)] use bytes::Bytes; -use futures01::{future, Future}; +use futures::future::BoxFuture; use hashing::Fingerprint; use lmdb::{ self, Database, DatabaseFlags, Environment, EnvironmentCopyFlags, EnvironmentFlags, @@ -169,14 +169,17 @@ impl ShardedLmdb { self.lmdbs.values().cloned().collect() } - pub fn store_bytes( - &self, - key: Fingerprint, + /// + /// TODO: See the note on references in ASYNC.md. + /// + pub fn store_bytes<'a, 'b>( + &'a self, + fingerprint: Fingerprint, bytes: Bytes, initial_lease: bool, - ) -> impl Future { + ) -> BoxFuture<'b, Result<(), String>> { let store = self.clone(); - self.executor.spawn_on_io_pool(future::lazy(move || { + self.executor.spawn_blocking(move || { let (env, db, lease_database) = store.get(&key); let put_res = env.begin_rw_txn().and_then(|mut txn| { txn.put(db, &key, &bytes, WriteFlags::NO_OVERWRITE)?; @@ -196,7 +199,7 @@ impl ShardedLmdb { Err(lmdb::Error::KeyExist) => Ok(()), Err(err) => Err(format!("Error storing key {:?}: {}", key.to_hex(), err)), } - })) + }) } fn lease( @@ -221,16 +224,21 @@ impl ShardedLmdb { (now_since_epoch + time::Duration::from_secs(2 * 60 * 60)).as_secs() } + /// + /// TODO: See the note on references in ASYNC.md. + /// pub fn load_bytes_with< + 'a, + 'b, T: Send + 'static, F: Fn(Bytes) -> Result + Send + Sync + 'static, >( - &self, + &'a self, fingerprint: Fingerprint, f: F, - ) -> impl Future, Error = String> { + ) -> BoxFuture<'b, Result, String>> { let store = self.clone(); - self.executor.spawn_on_io_pool(future::lazy(move || { + self.executor.spawn_blocking(move || { let (env, db, _) = store.get(&fingerprint); let ro_txn = env .begin_ro_txn() @@ -244,7 +252,7 @@ impl ShardedLmdb { err, )), }) - })) + }) } #[allow(clippy::identity_conversion)] // False positive: https://github.com/rust-lang/rust-clippy/issues/3913 diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index 9da7c84c9d1..a07c1a499e7 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -2,11 +2,13 @@ // Licensed under the Apache License, Version 2.0 (see LICENSE). use std; +use std::collections::BTreeMap; use std::convert::{Into, TryInto}; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; +use futures::compat::Future01CompatExt; use futures01::Future; use crate::core::{Failure, TypeId}; @@ -27,8 +29,8 @@ use rand::seq::SliceRandom; use reqwest; use rule_graph::RuleGraph; use sharded_lmdb::ShardedLmdb; -use std::collections::BTreeMap; use store::Store; +use tokio::runtime::{Builder, Runtime}; const GIGABYTES: usize = 1024 * 1024 * 1024; @@ -45,10 +47,11 @@ pub struct Core { pub tasks: Tasks, pub rule_graph: RuleGraph, pub types: Types, + pub runtime: Runtime, pub executor: task_executor::Executor, store: Store, pub command_runner: Box, - pub http_client: reqwest::r#async::Client, + pub http_client: reqwest::Client, pub vfs: PosixFS, pub build_root: PathBuf, } @@ -87,7 +90,18 @@ impl Core { let mut remote_store_servers = remote_store_servers; remote_store_servers.shuffle(&mut rand::thread_rng()); - let executor = task_executor::Executor::new(); + let runtime = Builder::new() + // This use of Builder (rather than just Runtime::new()) is to allow us to lower the + // max_threads setting. As of tokio `0.2.13`, the core threads default to num_cpus, and + // the max threads default to a fixed value of 512. In practice, it appears to be slower + // to allow 512 threads, and with the default stack size, 512 threads would use 1GB of RAM. + .core_threads(num_cpus::get()) + .max_threads(num_cpus::get() * 4) + .threaded_scheduler() + .enable_all() + .build() + .map_err(|e| format!("Failed to start the runtime: {}", e))?; + let executor = task_executor::Executor::new(runtime.handle().clone()); // We re-use these certs for both the execution and store service; they're generally tied together. let root_ca_certs = if let Some(path) = remote_root_ca_certs_path { Some( @@ -219,7 +233,7 @@ impl Core { }) } - let http_client = reqwest::r#async::Client::new(); + let http_client = reqwest::Client::new(); let rule_graph = RuleGraph::new(tasks.as_map(), root_subject_types); Ok(Core { @@ -227,6 +241,7 @@ impl Core { tasks: tasks, rule_graph: rule_graph, types: types, + runtime, executor: executor.clone(), store, command_runner, @@ -309,6 +324,8 @@ impl NodeContext for Context { where F: Future + Send + 'static, { - self.core.executor.spawn_and_ignore(future); + self.core.executor.spawn_and_ignore(async move { + let _ = future.compat().await; + }); } } diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index a79d0fcf35d..258fd0f9e51 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -10,8 +10,11 @@ use std::sync::Arc; use std::time::Duration; use std::{self, fmt}; +use async_trait::async_trait; +use futures::compat::Future01CompatExt; +use futures::future::{FutureExt, TryFutureExt}; +use futures::stream::StreamExt; use futures01::future::{self, Future}; -use futures01::Stream; use url::Url; use crate::context::{Context, Core}; @@ -35,7 +38,7 @@ use rule_graph; use graph::{Entry, Node, NodeError, NodeTracer, NodeVisualizer}; use store::{self, StoreFileByDigest}; use workunit_store::{ - generate_random_64bit_string, set_parent_id, StartedWorkUnit, WorkUnit, WorkUnitStore, + generate_random_64bit_string, scope_task_parent_id, StartedWorkUnit, WorkUnit, WorkUnitStore, }; pub type NodeFuture = BoxFuture; @@ -48,13 +51,14 @@ fn err(failure: Failure) -> NodeFuture { future::err(failure).to_boxed() } +#[async_trait] impl VFS for Context { - fn read_link(&self, link: &Link) -> NodeFuture { - self.get(ReadLink(link.clone())).map(|res| res.0).to_boxed() + async fn read_link(&self, link: &Link) -> Result { + Ok(self.get(ReadLink(link.clone())).compat().await?.0) } - fn scandir(&self, dir: Dir) -> NodeFuture> { - self.get(Scandir(dir)) + async fn scandir(&self, dir: Dir) -> Result, Failure> { + self.get(Scandir(dir)).compat().await } fn is_ignored(&self, stat: &fs::Stat) -> bool { @@ -383,6 +387,8 @@ impl WrappedNode for ReadLink { .core .vfs .read_link(&self.0) + .boxed() + .compat() .map(LinkDest) .map_err(|e| throw(&format!("{}", e))) .to_boxed() @@ -409,6 +415,8 @@ impl WrappedNode for DigestFile { .core .vfs .read_file(&self.0) + .boxed() + .compat() .map_err(|e| throw(&format!("{}", e))) .and_then(move |c| { context @@ -442,6 +450,8 @@ impl WrappedNode for Scandir { .core .vfs .scandir(self.0) + .boxed() + .compat() .map(Arc::new) .map_err(|e| throw(&format!("{}", e))) .to_boxed() @@ -467,6 +477,7 @@ impl Snapshot { // and store::Snapshot::from_path_stats tracking dependencies for file digests. context .expand(path_globs) + .compat() .map_err(|e| format!("{}", e)) .and_then(move |path_stats| { store::Snapshot::from_path_stats( @@ -628,6 +639,7 @@ impl DownloadedFile { .http_client .get(url.clone()) .send() + .compat() .map_err(|err| format!("Error downloading file: {}", err)) .and_then(move |response| { // Handle common HTTP errors. @@ -676,25 +688,25 @@ impl DownloadedFile { } } - let hasher = hashing::WriterHasher::new(SizeLimiter { - writer: bytes::BytesMut::with_capacity(expected_digest.1).writer(), - written: 0, - size_limit: expected_digest.1, - }); + let digest_and_bytes = async move { + let mut hasher = hashing::WriterHasher::new(SizeLimiter { + writer: bytes::BytesMut::with_capacity(expected_digest.1).writer(), + written: 0, + size_limit: expected_digest.1, + }); - response - .into_body() - .map_err(|err| format!("Error reading URL fetch response: {}", err)) - .fold(hasher, |mut hasher, chunk| { + let mut response_stream = response.bytes_stream(); + while let Some(next_chunk) = response_stream.next().await { + let chunk = + next_chunk.map_err(|err| format!("Error reading URL fetch response: {}", err))?; hasher .write_all(&chunk) - .map(|_| hasher) - .map_err(|err| format!("Error hashing/writing URL fetch response: {}", err)) - }) - .map(|hasher| { - let (digest, bytewriter) = hasher.finish(); - (digest, bytewriter.writer.into_inner().freeze()) - }) + .map_err(|err| format!("Error hashing/capturing URL fetch response: {}", err))?; + } + let (digest, bytewriter) = hasher.finish(); + Ok((digest, bytewriter.writer.into_inner().freeze())) + }; + digest_and_bytes.boxed().compat().to_boxed() }) .and_then(move |(actual_digest, buf)| { if expected_digest != actual_digest { @@ -1053,28 +1065,28 @@ impl Node for NodeKey { (None, None) }; - let context2 = context.clone(); - future::lazy(|| { - if let Some(span_id) = maybe_span_id { - set_parent_id(span_id); - } - match self { - NodeKey::DigestFile(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::DownloadedFile(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::MultiPlatformExecuteProcess(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::ReadLink(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::Scandir(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::Select(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::Snapshot(n) => n.run(context).map(NodeResult::from).to_boxed(), - NodeKey::Task(n) => n.run(context).map(NodeResult::from).to_boxed(), - } - }) - .inspect(move |_: &NodeResult| { + scope_task_parent_id(maybe_span_id, async move { + let context2 = context.clone(); + let result = match self { + NodeKey::DigestFile(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::DownloadedFile(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::MultiPlatformExecuteProcess(n) => { + n.run(context).map(NodeResult::from).compat().await + } + NodeKey::ReadLink(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Scandir(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Select(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Snapshot(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Task(n) => n.run(context).map(NodeResult::from).compat().await, + }; if let Some(started_workunit) = maybe_started_workunit { let workunit: WorkUnit = started_workunit.finish(); context2.session.workunit_store().add_workunit(workunit) } + result }) + .boxed() + .compat() .to_boxed() } diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index 248ac1a3662..a6374c547dd 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -8,6 +8,7 @@ use std::path::{Path, PathBuf}; use std::sync::{mpsc, Arc}; use std::time::Duration; +use futures::compat::Future01CompatExt; use futures01::future::{self, Future}; use crate::context::{Context, Core}; @@ -341,14 +342,14 @@ impl Scheduler { // If the join failed (due to `Invalidated`, since that is the only error we propagate), retry // the entire set of roots. - core.executor.spawn_and_ignore(roots_res.then(move |res| { + core.executor.spawn_and_ignore(async move { + let res = roots_res.compat().await; if let Ok(res) = res { - sender.send(res).map_err(|_| ()) + let _ = sender.send(res); } else { Scheduler::execute_helper(context, sender, roots, count - 1); - Ok(()) } - })); + }); } /// diff --git a/src/rust/engine/task_executor/Cargo.toml b/src/rust/engine/task_executor/Cargo.toml index 37737f08ba6..5e48d78c953 100644 --- a/src/rust/engine/task_executor/Cargo.toml +++ b/src/rust/engine/task_executor/Cargo.toml @@ -6,8 +6,7 @@ authors = [ "Pants Build " ] publish = false [dependencies] -futures01 = { package = "futures", version = "0.1" } -futures-cpupool = "0.1" +futures = "0.3" logging = { path = "../logging" } -tokio = "0.1" +tokio = { version = "0.2", features = ["blocking", "rt-threaded"] } workunit_store = { path = "../workunit_store" } diff --git a/src/rust/engine/task_executor/src/lib.rs b/src/rust/engine/task_executor/src/lib.rs index 5e3da4d7518..e4d53318522 100644 --- a/src/rust/engine/task_executor/src/lib.rs +++ b/src/rust/engine/task_executor/src/lib.rs @@ -25,29 +25,31 @@ // Arc can be more clear than needing to grok Orderings: #![allow(clippy::mutex_atomic)] -use futures01::{future, Future}; -use std::sync::Arc; -use tokio::runtime::Runtime; +use std::future::Future; -// TODO: It's strange that this is an exposed interface from the logging crate, rather than an -// implementation of a trait that lives elsewhere. This can't currently be a trait because its -// methods have generic types, which isn't allowed on traits. If we can move the API somewhere else -// in the future, that could be nice. +use futures::future::{BoxFuture, FutureExt}; + +use tokio::runtime::{Handle, Runtime}; #[derive(Clone)] pub struct Executor { - runtime: Arc, - io_pool: futures_cpupool::CpuPool, + handle: Handle, } impl Executor { - pub fn new() -> Executor { - Executor { - runtime: Arc::new( - Runtime::new().unwrap_or_else(|e| panic!("Could not initialize Runtime: {:?}", e)), - ), - io_pool: futures_cpupool::CpuPool::new_num_cpus(), - } + pub fn new(handle: Handle) -> Executor { + Executor { handle } + } + + /// + /// Enter the runtime context associated with this Executor. This should be used in situations + /// where threads not started by the runtime need access to it via task-local variables. + /// + pub fn enter(&self, f: F) -> R + where + F: FnOnce() -> R, + { + self.handle.enter(f) } /// @@ -58,18 +60,15 @@ impl Executor { /// This may be useful e.g. if you want to kick off a potentially long-running task, which will /// notify dependees of its completion over an mpsc channel. /// - pub fn spawn_and_ignore + Send + 'static>(&self, future: F) { - self - .runtime - .executor() - .spawn(Self::future_with_correct_context(future)) + pub fn spawn_and_ignore + Send + 'static>(&self, future: F) { + tokio::spawn(Self::future_with_correct_context(future)); } /// /// Run a Future on a tokio Runtime as a new Task, and return a Future handle to it. /// - /// The future will only be driven to completion if something drives the returned Future. If the - /// returned Future is dropped, the computation may be cancelled. + /// If the returned Future is dropped, the computation will still continue to completion: see the + /// tokio::task::JoinHandle docs. /// /// This may be useful for tokio tasks which use the tokio blocking feature (unrelated to the /// Executor::block_on method). When tokio blocking tasks run, they prevent progress on any @@ -85,18 +84,15 @@ impl Executor { /// See https://docs.rs/tokio-threadpool/0.1.15/tokio_threadpool/fn.blocking.html for details of /// tokio blocking. /// - pub fn spawn_oneshot< - Item: Send + 'static, - Error: Send + 'static, - F: Future + Send + 'static, - >( + pub async fn spawn_oneshot + Send + 'static>( &self, future: F, - ) -> impl Future { - futures01::sync::oneshot::spawn( - Self::future_with_correct_context(future), - &self.runtime.executor(), - ) + ) -> O { + // NB: We unwrap here because the only thing that should cause an error in a spawned task is a + // panic, in which case we want to propagate that. + tokio::spawn(Self::future_with_correct_context(future)) + .await + .unwrap() } /// @@ -110,14 +106,7 @@ impl Executor { /// Executor::spawn_and_ignore or Executor::spawn_oneshot. Because it should be used only in very /// limited situations, this overhead is viewed to be acceptable. /// - pub fn block_on< - Item: Send + 'static, - Error: Send + 'static, - F: Future + Send + 'static, - >( - &self, - future: F, - ) -> Result { + pub fn block_on(&self, future: F) -> F::Output { // Make sure to copy our (thread-local) logging destination into the task. // When a daemon thread kicks off a future, it should log like a daemon thread (and similarly // for a user-facing thread). @@ -135,17 +124,23 @@ impl Executor { /// it has caused significant performance regressions, so for how we continue to use our legacy /// I/O CpuPool. Hopefully we can delete this method at some point. /// - pub fn spawn_on_io_pool< - Item: Send + 'static, - Error: Send + 'static, - F: Future + Send + 'static, - >( - &self, - future: F, - ) -> impl Future { - self - .io_pool - .spawn(Self::future_with_correct_context(future)) + /// TODO: See the note on references in ASYNC.md. + /// + pub fn spawn_blocking<'a, 'b, F: FnOnce() -> R + Send + 'static, R: Send + 'static>( + &'a self, + f: F, + ) -> BoxFuture<'b, R> { + let logging_destination = logging::get_destination(); + let workunit_parent_id = workunit_store::get_parent_id(); + // NB: We unwrap here because the only thing that should cause an error in a spawned task is a + // panic, in which case we want to propagate that. + tokio::task::spawn_blocking(move || { + logging::set_thread_destination(logging_destination); + workunit_store::set_thread_parent_id(workunit_parent_id); + f() + }) + .map(|res| res.unwrap()) + .boxed() } /// @@ -154,17 +149,16 @@ impl Executor { /// by it ends up in the pantsd log as we expect. The latter ensures that when a new workunit /// is created it has an accurate handle to its parent. /// - fn future_with_correct_context>( - future: F, - ) -> impl Future { + fn future_with_correct_context(future: F) -> impl Future { let logging_destination = logging::get_destination(); let workunit_parent_id = workunit_store::get_parent_id(); - future::lazy(move || { - logging::set_destination(logging_destination); - if let Some(parent_id) = workunit_parent_id { - workunit_store::set_parent_id(parent_id); - } - future + + // NB: It is important that the first portion of this method is synchronous (meaning that this + // method cannot be `async`), because that means that it will run on the thread that calls it. + // The second, async portion of the method will run in the spawned Task. + + logging::scope_task_destination(logging_destination, async move { + workunit_store::scope_task_parent_id(workunit_parent_id, future).await }) } } diff --git a/src/rust/engine/workunit_store/Cargo.toml b/src/rust/engine/workunit_store/Cargo.toml index 0dd10b198b1..8da56df5c99 100644 --- a/src/rust/engine/workunit_store/Cargo.toml +++ b/src/rust/engine/workunit_store/Cargo.toml @@ -7,6 +7,6 @@ publish = false [dependencies] concrete_time = { path = "../concrete_time" } -futures01 = { package = "futures", version = "0.1" } parking_lot = "0.6" rand = "0.6" +tokio = { version = "0.2", features = ["rt-util"] } diff --git a/src/rust/engine/workunit_store/src/lib.rs b/src/rust/engine/workunit_store/src/lib.rs index 6142547ad1c..95c39194ff1 100644 --- a/src/rust/engine/workunit_store/src/lib.rs +++ b/src/rust/engine/workunit_store/src/lib.rs @@ -26,10 +26,13 @@ #![allow(clippy::mutex_atomic)] use concrete_time::TimeSpan; -use futures01::task_local; use parking_lot::Mutex; use rand::thread_rng; use rand::Rng; +use tokio::task_local; + +use std::cell::RefCell; +use std::future::Future; use std::sync::Arc; #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -125,26 +128,40 @@ fn hex_16_digit_string(number: u64) -> String { format!("{:016.x}", number) } +thread_local! { + static THREAD_PARENT_ID: RefCell> = RefCell::new(None) +} + task_local! { - static TASK_PARENT_ID: Mutex> = Mutex::new(None) + static TASK_PARENT_ID: Option; } -pub fn set_parent_id(parent_id: String) { - if futures01::task::is_in_task() { - TASK_PARENT_ID.with(|task_parent_id| { - *task_parent_id.lock() = Some(parent_id); - }) - } +/// +/// Set the current parent_id for a Thread, but _not_ for a Task. Tasks must always be spawned +/// by callers using the `scope_task_parent_id` helper (generally via task_executor::Executor.) +/// +pub fn set_thread_parent_id(parent_id: Option) { + THREAD_PARENT_ID.with(|thread_parent_id| { + *thread_parent_id.borrow_mut() = parent_id; + }) +} + +/// +/// Propagate the current parent_id to a Future representing a newly spawned Task. Usage of +/// this method should mostly be contained to task_executor::Executor. +/// +pub async fn scope_task_parent_id(parent_id: Option, f: F) -> F::Output +where + F: Future, +{ + TASK_PARENT_ID.scope(parent_id, f).await } pub fn get_parent_id() -> Option { - if futures01::task::is_in_task() { - TASK_PARENT_ID.with(|task_parent_id| { - let task_parent_id = task_parent_id.lock(); - (*task_parent_id).clone() - }) + if let Ok(Some(parent_id)) = TASK_PARENT_ID.try_with(|parent_id| parent_id.clone()) { + Some(parent_id) } else { - None + THREAD_PARENT_ID.with(|parent_id| parent_id.borrow().clone()) } } From 87d1098ba8c0af28097877c1b17bb82f4478cfb8 Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Thu, 26 Mar 2020 12:01:39 -0700 Subject: [PATCH 02/15] Add notify fs watcher to engine. (#9318) * Use the notify crate to implement an `InvalidationWatcher` for Graph operations. * Make watch async, and watcher log to pantsd.log. Relativize paths returned from notify to the build_root. Refactor invalidate method to be an associated method on the InvalidationWatcher. * Respond to feedback. * Use spawn on io pool instead of custom future impl * Write python fs tests * Relativize paths to invalidate to build root * invalidate nodes with parent paths. * Comments * Add rust tests. Make some things public so we can use them in tests. Use canonical path to build root for relativizing changed paths. * Refactor Python tests. Return watch errors as core::Failure all the way to user. Move task executor onto invalidation watcher. Move test_support trait impl into test_support mod. * use futures lock on watcher * Platform specific watching behavior. On Darwin recursively watch the build root at startup. On Linux watch individual directory roots. Co-authored-by: Stu Hood --- pants.toml | 2 + src/rust/engine/Cargo.lock | 1182 ++++++++++++--------- src/rust/engine/Cargo.toml | 13 + src/rust/engine/graph/src/entry.rs | 14 +- src/rust/engine/graph/src/lib.rs | 43 +- src/rust/engine/sharded_lmdb/src/lib.rs | 2 +- src/rust/engine/src/context.rs | 12 +- src/rust/engine/src/core.rs | 2 + src/rust/engine/src/externs.rs | 1 + src/rust/engine/src/lib.rs | 4 + src/rust/engine/src/nodes.rs | 41 +- src/rust/engine/src/scheduler.rs | 18 +- src/rust/engine/src/watch.rs | 198 ++++ src/rust/engine/src/watch_tests.rs | 66 ++ src/rust/engine/testutil/src/lib.rs | 5 + tests/python/pants_test/engine/test_fs.py | 130 ++- 16 files changed, 1178 insertions(+), 555 deletions(-) create mode 100644 src/rust/engine/src/watch.rs create mode 100644 src/rust/engine/src/watch_tests.rs diff --git a/pants.toml b/pants.toml index 08a7e753fde..78ece7a7bb8 100644 --- a/pants.toml +++ b/pants.toml @@ -87,6 +87,8 @@ pants_ignore.add = [ "/build-support/*.venv/", # An absolute symlink to the Pants Rust toolchain sources. "/build-support/bin/native/src", + # We shouldn't walk or watch the rust compiler artifacts because it is slow. + "/src/rust/engine/target", ] build_file_imports = "error" diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index 39abbaeb17d..fd0b560d907 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -7,10 +7,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "aho-corasick" -version = "0.7.8" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -22,13 +22,13 @@ dependencies = [ ] [[package]] -name = "anyhow" -version = "1.0.26" +name = "anymap" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "arc-swap" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -43,12 +43,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "async-trait" -version = "0.1.24" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -58,7 +58,7 @@ dependencies = [ "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -66,8 +66,8 @@ name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -83,22 +83,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "backtrace" -version = "0.3.44" +version = "0.3.46" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace-sys 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "backtrace-sys" -version = "0.1.32" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -179,12 +179,12 @@ dependencies = [ "clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)", "dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", - "errno 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "errno 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "fuse 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", @@ -193,17 +193,17 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] [[package]] name = "bstr" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -212,7 +212,7 @@ version = "0.0.1" [[package]] name = "bumpalo" -version = "3.2.0" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -244,14 +244,6 @@ name = "bytesize" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "c2-chacha" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "cargo" version = "0.34.0" @@ -265,12 +257,12 @@ dependencies = [ "crates-io 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", "crypto-hash 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "curl-sys 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.31+curl-7.70.0 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "flate2 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "filetime 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", "fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "fwdansi 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -278,24 +270,24 @@ dependencies = [ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "home 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "ignore 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", + "ignore 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)", "im-rc 12.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "opener 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-workspace-hack 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustfix 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.26 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -316,9 +308,9 @@ dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", @@ -326,7 +318,7 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.50" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -334,14 +326,23 @@ name = "cfg-if" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "chashmap" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "chrono" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "num-integer 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -371,7 +372,7 @@ name = "cmake" version = "0.1.42" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -395,7 +396,7 @@ name = "commoncrypto-sys" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -403,8 +404,8 @@ name = "concrete_time" version = "0.0.1" dependencies = [ "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -426,7 +427,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -435,7 +436,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -453,11 +454,11 @@ name = "crates-io" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -471,10 +472,19 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.4.0" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "crossbeam-channel" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -488,10 +498,10 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -503,7 +513,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl 0.10.28 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl 0.10.29 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -517,28 +527,28 @@ dependencies = [ [[package]] name = "curl" -version = "0.4.25" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl-sys 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.31+curl-7.70.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)", - "schannel 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "curl-sys" -version = "0.4.25" +version = "0.4.31+curl-7.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "libnghttp2-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -546,7 +556,7 @@ dependencies = [ [[package]] name = "derivative" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", @@ -554,6 +564,16 @@ dependencies = [ "syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "derivative" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "digest" version = "0.8.1" @@ -575,7 +595,7 @@ name = "dirs" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -595,7 +615,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "redox_users 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -622,14 +642,18 @@ dependencies = [ name = "engine" version = "0.0.1" dependencies = [ - "async-trait 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", + "async-trait 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", + "async_semaphore 0.0.1", "boxfuture 0.0.1", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "concrete_time 0.0.1", + "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-locks 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "graph 0.0.1", "hashing 0.0.1", "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -637,7 +661,8 @@ dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "notify 5.0.0-pre.1 (git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd)", + "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_enum 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "process_execution 0.0.1", @@ -649,8 +674,9 @@ dependencies = [ "store 0.1.0", "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "testutil 0.0.1", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "ui 0.0.1", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -663,7 +689,7 @@ version = "0.0.1" dependencies = [ "build_utils 0.0.1", "cbindgen 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "engine 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -686,7 +712,7 @@ dependencies = [ "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -698,17 +724,17 @@ dependencies = [ "atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "errno" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "errno-dragonfly 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -718,26 +744,26 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.55 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "backtrace 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "backtrace 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "failure_derive" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", "synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -748,11 +774,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "filetime" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -764,12 +790,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "flate2" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", "miniz_oxide 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -796,18 +822,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "fs" version = "0.0.1" dependencies = [ - "async-trait 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", + "async-trait 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "ignore 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", + "ignore 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -815,7 +841,7 @@ name = "fs2" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -834,15 +860,32 @@ dependencies = [ "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "store 0.1.0", "task_executor 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] +[[package]] +name = "fsevent" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fsevent-sys" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "fuchsia-cprng" version = "0.1.1" @@ -867,11 +910,11 @@ name = "fuse" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "thread-scoped 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -922,15 +965,25 @@ name = "futures-io" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "futures-locks" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "futures-macro" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack 0.5.15 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -955,10 +1008,10 @@ dependencies = [ "futures-macro 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-task 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro-nested 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-utils 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-hack 0.5.15 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro-nested 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -967,7 +1020,7 @@ name = "fwdansi" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "termcolor 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -981,7 +1034,7 @@ name = "generic-array" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -990,7 +1043,7 @@ version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1000,11 +1053,11 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1013,7 +1066,7 @@ name = "git2-curl" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", + "curl 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)", "git2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1026,14 +1079,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "globset" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "aho-corasick 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)", - "bstr 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", + "aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)", + "bstr 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.7 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1058,7 +1111,7 @@ source = "git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093 dependencies = [ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "grpcio-sys 0.2.3 (git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", ] @@ -1077,9 +1130,9 @@ name = "grpcio-sys" version = "0.2.3" source = "git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532#b582ef3dc4e8c7289093c8febff8dadf0997b532" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1102,7 +1155,7 @@ dependencies = [ [[package]] name = "h2" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1110,12 +1163,12 @@ dependencies = [ "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1124,9 +1177,9 @@ version = "0.0.1" dependencies = [ "digest 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_test 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_test 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1140,10 +1193,10 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.1.6" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1172,7 +1225,7 @@ dependencies = [ [[package]] name = "http" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1186,7 +1239,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1204,23 +1257,23 @@ dependencies = [ [[package]] name = "hyper" -version = "0.13.4" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-channel 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "h2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "h2 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", - "pin-project 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1233,11 +1286,11 @@ dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "ct-logs 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustls-native-certs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1264,19 +1317,20 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.11" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "globset 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1286,7 +1340,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "sized-chunks 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1297,12 +1351,30 @@ dependencies = [ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "inotify" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "inotify-sys" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "iovec" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1331,15 +1403,15 @@ name = "jobserver" version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "js-sys" -version = "0.3.35" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1363,7 +1435,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libc" -version = "0.2.66" +version = "0.2.69" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1371,33 +1443,33 @@ name = "libgit2-sys" version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "curl-sys 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "libssh2-sys 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "curl-sys 0.4.31+curl-7.70.0 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "libssh2-sys 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libnghttp2-sys" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "libssh2-sys" -version = "0.2.14" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1407,8 +1479,8 @@ name = "libz-sys" version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1419,7 +1491,7 @@ version = "0.8.0" source = "git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8#06bdfbfc6348f6804127176e561843f214fc17f8" dependencies = [ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "lmdb-sys 0.8.0 (git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8)", ] @@ -1428,8 +1500,8 @@ name = "lmdb-sys" version = "0.8.0" source = "git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8#06bdfbfc6348f6804127176e561843f214fc17f8" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1457,7 +1529,7 @@ name = "lock_api" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "owning_ref 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1482,13 +1554,13 @@ name = "logging" version = "0.0.1" dependencies = [ "cargo 0.34.0 (registry+https://github.com/rust-lang/crates.io-index)", - "chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "num_enum 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "simplelog 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "simplelog 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "ui 0.0.1", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1510,7 +1582,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "memchr" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1520,7 +1592,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "mime_guess" -version = "2.0.1" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1537,7 +1609,7 @@ dependencies = [ [[package]] name = "mio" -version = "0.6.21" +version = "0.6.22" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1545,33 +1617,44 @@ dependencies = [ "fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "mio-extras" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", + "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "mio-named-pipes" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", + "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "mio-uds" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1580,7 +1663,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", + "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1590,7 +1673,7 @@ name = "miow" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)", + "socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1623,34 +1706,46 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "nails" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "net2" -version = "0.2.33" +version = "0.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "nom" -version = "4.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "notify" +version = "5.0.0-pre.1" +source = "git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd#fba00891d9105e2f581c69fbe415a58cb7966fdd" dependencies = [ - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "chashmap 2.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "filetime 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "inotify 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", + "mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1726,11 +1821,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "hermit-abi 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1743,11 +1838,36 @@ dependencies = [ "syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "num_enum" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "derivative 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_enum_derive 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "num_enum_derive" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "proc-macro-crate 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "numtoa" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "once_cell" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "opaque-debug" version = "0.2.3" @@ -1758,22 +1878,22 @@ name = "opener" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "failure_derive 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "openssl" -version = "0.10.28" +version = "0.10.29" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1783,12 +1903,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "openssl-sys" -version = "0.9.54" +version = "0.9.55" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", "vcpkg 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1800,12 +1920,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "owning_ref" -version = "0.4.0" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "owning_ref" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "parking_lot" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "parking_lot" version = "0.6.4" @@ -1815,12 +1952,23 @@ dependencies = [ "parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "parking_lot_core" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "parking_lot_core" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1848,20 +1996,20 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.8" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "pin-project-internal 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-internal 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "pin-project-internal" -version = "0.4.8" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1871,7 +2019,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "pin-utils" -version = "0.1.0-alpha.4" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1885,18 +2033,21 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] -name = "proc-macro-hack" -version = "0.5.11" +name = "proc-macro-crate" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "proc-macro-hack" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "proc-macro-nested" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -1909,7 +2060,7 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.8" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1925,7 +2076,7 @@ dependencies = [ "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "concrete_time 0.0.1", "copy_dir 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "derivative 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "derivative 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "digest 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1934,14 +2085,14 @@ dependencies = [ "hashing 0.0.1", "itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "mock 0.0.1", - "nails 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "nails 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", - "regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "regex 1.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "sharded_lmdb 0.0.1", "spectral 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1949,7 +2100,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "uname 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1967,7 +2118,7 @@ dependencies = [ "process_execution 0.0.1", "store 0.1.0", "task_executor 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -2003,7 +2154,7 @@ name = "prost-derive" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2038,10 +2189,10 @@ dependencies = [ [[package]] name = "protoc" -version = "2.10.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2049,12 +2200,12 @@ name = "protoc-grpcio" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "grpcio-compiler 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "mktemp 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", "protobuf-codegen 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)", - "protoc 2.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "protoc 2.14.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2072,10 +2223,10 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2083,7 +2234,7 @@ name = "rand" version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2093,7 +2244,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2106,7 +2257,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2117,7 +2268,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2135,8 +2286,8 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "getrandom 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2152,10 +2303,10 @@ dependencies = [ [[package]] name = "rand_chacha" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2209,7 +2360,7 @@ name = "rand_jitter" version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2221,7 +2372,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2277,18 +2428,18 @@ dependencies = [ [[package]] name = "regex" -version = "1.3.4" +version = "1.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "aho-corasick 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)", + "aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "regex-syntax 0.6.17 (registry+https://github.com/rust-lang/crates.io-index)", "thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "regex-syntax" -version = "0.6.14" +version = "0.6.17" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2309,42 +2460,42 @@ dependencies = [ "encoding_rs 0.8.22 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "hyper 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "hyper 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)", "hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", - "mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "mime_guess 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-futures 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-futures 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)", "webpki-roots 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)", "winreg 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "ring" -version = "0.16.11" +version = "0.16.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)", - "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2360,7 +2511,7 @@ dependencies = [ "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "blake2b_simd 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "constant_time_eq 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2391,10 +2542,10 @@ name = "rustfix" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2404,7 +2555,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "ring 0.16.11 (registry+https://github.com/rust-lang/crates.io-index)", + "ring 0.16.13 (registry+https://github.com/rust-lang/crates.io-index)", "sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2416,13 +2567,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", - "schannel 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "security-framework 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "schannel 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "ryu" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -2430,12 +2581,12 @@ name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "schannel" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2452,28 +2603,29 @@ name = "sct" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "ring 0.16.11 (registry+https://github.com/rust-lang/crates.io-index)", - "untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ring 0.16.13 (registry+https://github.com/rust-lang/crates.io-index)", + "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "security-framework" -version = "0.4.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "security-framework-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "security-framework-sys 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "security-framework-sys" -version = "0.4.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2482,7 +2634,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2492,20 +2644,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "serde" -version = "1.0.104" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_derive" -version = "1.0.104" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2513,25 +2665,25 @@ name = "serde_ignored" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_json" -version = "1.0.48" +version = "1.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", - "ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "ryu 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "serde_test" -version = "1.0.104" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2541,7 +2693,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "dtoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", "itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2555,7 +2707,7 @@ dependencies = [ "maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2593,16 +2745,16 @@ name = "signal-hook-registry" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "arc-swap 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "arc-swap 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "simplelog" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", + "chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2612,7 +2764,7 @@ name = "sized-chunks" version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", + "typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2630,25 +2782,20 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "socket2" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "sourcefile" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "spectral" version = "0.6.0" @@ -2690,15 +2837,15 @@ dependencies = [ "mock 0.0.1", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "serverset 0.0.1", "sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "sharded_lmdb 0.0.1", "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", @@ -2759,11 +2906,11 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.14" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2772,9 +2919,9 @@ name = "synstructure" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2783,8 +2930,8 @@ name = "tar" version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "filetime 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "xattr 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2793,7 +2940,7 @@ dependencies = [ name = "tar_api" version = "0.0.1" dependencies = [ - "flate2 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)", + "flate2 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", "tar 0.4.26 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", @@ -2805,7 +2952,7 @@ version = "0.0.1" dependencies = [ "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -2824,7 +2971,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2845,7 +2992,7 @@ name = "termcolor" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2853,7 +3000,7 @@ name = "termion" version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "numtoa 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2894,33 +3041,33 @@ dependencies = [ [[package]] name = "time" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "tokio" -version = "0.2.13" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", + "memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", "mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)", + "mio-uds 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-macros 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-macros 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2933,6 +3080,24 @@ dependencies = [ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tokio-current-thread" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tokio-executor" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "tokio-io" version = "0.1.13" @@ -2945,12 +3110,12 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2960,7 +3125,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2974,7 +3139,20 @@ dependencies = [ "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "tokio-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2982,7 +3160,15 @@ name = "toml" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "toml" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3088,7 +3274,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "typenum" -version = "1.11.2" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -3105,7 +3291,7 @@ name = "uname" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3129,7 +3315,7 @@ name = "unicode-normalization" version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "smallvec 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "smallvec 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3154,7 +3340,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "untrusted" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -3182,7 +3368,7 @@ name = "url_serde" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3213,11 +3399,6 @@ name = "vec_map" version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "version_check" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "version_check" version = "0.9.1" @@ -3239,7 +3420,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3258,91 +3439,73 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "wasm-bindgen" -version = "0.2.58" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", + "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.58" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bumpalo 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bumpalo 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.8" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", - "web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "web-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.58" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-macro-support 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-macro-support 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.58" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-shared 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-backend 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen-shared 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.58" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "wasm-bindgen-webidl" -version = "0.2.58" +version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "anyhow 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)", - "heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-backend 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", - "weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "web-sys" -version = "0.3.35" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "anyhow 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)", - "js-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", - "sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", - "wasm-bindgen-webidl 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)", + "js-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)", + "wasm-bindgen 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3350,8 +3513,8 @@ name = "webpki" version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "ring 0.16.11 (registry+https://github.com/rust-lang/crates.io-index)", - "untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ring 0.16.13 (registry+https://github.com/rust-lang/crates.io-index)", + "untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3362,21 +3525,13 @@ dependencies = [ "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "weedle" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "which" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3405,7 +3560,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "winapi-util" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3431,7 +3586,7 @@ dependencies = [ "concrete_time 0.0.1", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3448,41 +3603,41 @@ name = "xattr" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", ] [metadata] "checksum adler32 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5d2e7343e7fc9de883d1b0341e0b13970f764c14101234857d2ddafa1cb1cac2" -"checksum aho-corasick 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "743ad5a418686aad3b87fd14c43badd828cf26e214a00f92a384291cf22e1811" +"checksum aho-corasick 0.7.10 (registry+https://github.com/rust-lang/crates.io-index)" = "8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada" "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" -"checksum anyhow 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)" = "7825f6833612eb2414095684fcf6c635becf3ce97fe48cf6421321e93bfbd53c" -"checksum arc-swap 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d7b8a9123b8027467bce0099fe556c628a53c8d83df0507084c31e9ba2e39aff" +"checksum anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)" = "33954243bd79057c2de7338850b85983a44588021f8a5fee574a8888c6de4344" +"checksum arc-swap 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b585a98a234c46fc563103e9278c9391fde1f4e6850334da895d27edb9580f62" "checksum arrayref 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" "checksum arrayvec 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" -"checksum async-trait 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "750b1c38a1dfadd108da0f01c08f4cdc7ff1bb39b325f9c82cc972361780a6e1" +"checksum async-trait 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "da71fef07bc806586090247e971229289f64c210a278ee5ae419314eb386b31d" "checksum atty 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" "checksum autocfg 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" "checksum autocfg 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d" -"checksum backtrace 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "e4036b9bf40f3cf16aba72a3d65e8a520fc4bafcdc7079aea8f848c58c5b5536" -"checksum backtrace-sys 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6575f128516de27e3ce99689419835fce9643a9b215a14d2b5b685be018491" +"checksum backtrace 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "b1e692897359247cc6bb902933361652380af0f1b7651ae5c5013407f30e109e" +"checksum backtrace-sys 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "18fbebbe1c9d1f383a9cc7e8ccdb471b91c8d024ee9c2ca5b5346121fe8b4399" "checksum base64 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" "checksum bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" "checksum blake2b_simd 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d8fb2d74254a3a0b5cac33ac9f8ed0e44aa50378d9dbb2e5d83bd21ed1dc2c8a" "checksum block-buffer 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" "checksum block-padding 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" -"checksum bstr 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "502ae1441a0a5adb8fbd38a5955a6416b9493e92b465de5e4a9bde6a539c2c48" -"checksum bumpalo 3.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1f359dc14ff8911330a51ef78022d376f25ed00248912803b58f00cb1c27f742" +"checksum bstr 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "2889e6d50f394968c8bf4240dc3f2a7eb4680844d27308f798229ac9d4725f41" +"checksum bumpalo 3.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "12ae9db68ad7fac5fe51304d20f016c911539251075a214f8e663babefa35187" "checksum byte-tools 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" "checksum byteorder 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" "checksum bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "130aac562c0dd69c56b3b1cc8ffd2e17be31d0b6c25b61c96b76231aa23e39e1" "checksum bytesize 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "716960a18f978640f25101b5cbf1c6f6b0d3192fab36a2d98ca96f0ecbe41010" -"checksum c2-chacha 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb" "checksum cargo 0.34.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f7e90b5f23ae79af3ec0e4dc670349167fd47d6c1134f139cf0627817a4792bf" "checksum cbindgen 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1f861ef68cabbb271d373a7795014052bff37edce22c620d95e395e8719d7dc5" -"checksum cc 1.0.50 (registry+https://github.com/rust-lang/crates.io-index)" = "95e28fa049fda1c330bcf9d723be7663a899c4679724b34c81e9f5a326aab8cd" +"checksum cc 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)" = "c3d87b23d6a92cd03af510a5ade527033f6aa6fa92161e2d5863a907d4c5e31d" "checksum cfg-if 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" -"checksum chrono 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "31850b4a4d6bae316f7a09e691c944c28299298837edc0a03f755618c23cbc01" +"checksum chashmap 2.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ff41a3c2c1e39921b9003de14bf0439c7b63a9039637c291e1a64925d8ddfa45" +"checksum chrono 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "80094f509cf8b5ae86a4966a39b3ff66cd7e2a3e594accec3743ff3fabeab5b2" "checksum clap 2.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5067f5bb2d80ef5d68b4c87db81601f0b75bca627bc2ef76b141d7b846a3c6d9" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cmake 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "81fb25b677f8bf1eb325017cb6bb8452f87969db0fedb4f757b297bee78a7c62" @@ -3497,14 +3652,16 @@ dependencies = [ "checksum core-foundation-sys 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac" "checksum crates-io 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)" = "091018c3f5e8109d82d94b648555f0d4a308d15626da2fb22c76f32117e24569" "checksum crc32fast 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" -"checksum crossbeam-channel 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c" +"checksum crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c8ec7fcd21571dc78f96cc96243cab8d8f035247c3efd16c687be154c3fa9efa" +"checksum crossbeam-channel 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cced8691919c02aac3cb0a1bc2e9b73d89e832bf9a06fc579d4e71b68a2da061" "checksum crossbeam-utils 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)" = "04973fa96e96579258a5091af6003abde64af786b860f18622b82e026cca60e6" -"checksum crossbeam-utils 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce446db02cdc3165b94ae73111e570793400d0794e46125cc4056c81cbb039f4" +"checksum crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" "checksum crypto-hash 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8a77162240fd97248d19a564a565eb563a3f592b386e4136fb300909e67dddca" "checksum ct-logs 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4d3686f5fa27dbc1d76c751300376e167c5a43387f44bb451fd1c24776e49113" -"checksum curl 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "06aa71e9208a54def20792d877bc663d6aae0732b9852e612c4a933177c31283" -"checksum curl-sys 0.4.25 (registry+https://github.com/rust-lang/crates.io-index)" = "0c38ca47d60b86d0cc9d42caa90a0885669c2abc9791f871c81f58cdf39e979b" -"checksum derivative 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "942ca430eef7a3806595a6737bc388bf51adb888d3fc0dd1b50f1c170167ee3a" +"checksum curl 0.4.29 (registry+https://github.com/rust-lang/crates.io-index)" = "762e34611d2d5233a506a79072be944fddd057db2f18e04c0d6fa79e3fd466fd" +"checksum curl-sys 0.4.31+curl-7.70.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dcd62757cc4f5ab9404bc6ca9f0ae447e729a1403948ce5106bd588ceac6a3b0" +"checksum derivative 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3c6d883546668a3e2011b6a716a7330b82eabb0151b138217f632c8243e17135" +"checksum derivative 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cb582b60359da160a9477ee80f15c8d784c477e69c217ef2cdd4169c24ea380f" "checksum digest 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" "checksum dir-diff 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2860407d7d7e2e004bb2128510ad9e8d669e76fa005ccf567977b5d71b8b4a0b" "checksum dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" @@ -3515,18 +3672,20 @@ dependencies = [ "checksum encoding_rs 0.8.22 (registry+https://github.com/rust-lang/crates.io-index)" = "cd8d03faa7fe0c1431609dfad7bbe827af30f82e1e2ae6f7ee4fca6bd764bc28" "checksum env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)" = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38" "checksum env_logger 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aafcde04e90a5226a6443b7aabdb016ba2f8307c847d524724bd9b346dd1a2d3" -"checksum errno 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c2a071601ed01b988f896ab14b95e67335d1eeb50190932a1320f7fe3cadc84e" +"checksum errno 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b480f641ccf0faf324e20c1d3e53d81b7484c698b42ea677f6907ae4db195371" "checksum errno-dragonfly 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "14ca354e36190500e1e1fb267c647932382b54053c50b14970856c0b00a35067" -"checksum failure 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f8273f13c977665c5db7eb2b99ae520952fe5ac831ae4cd09d80c4c7042b5ed9" -"checksum failure_derive 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0bc225b78e0391e4b8683440bf2e63c2deeeb2ce5189eab46e2b68c6d3725d08" +"checksum failure 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" +"checksum failure_derive 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" "checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" -"checksum filetime 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d" +"checksum filetime 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f59efc38004c988e4201d11d263b8171f49a2e7ec0bdbb71773433f271504a5e" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" -"checksum flate2 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6bd6d6f4752952feb71363cffc9ebac9411b75b87c6ab6058c40c8900cf43c0f" +"checksum flate2 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2cfff41391129e0a856d6d822600b8d71179d46879e310417eb9c762eb178b42" "checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" "checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" "checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +"checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" +"checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" @@ -3537,6 +3696,7 @@ dependencies = [ "checksum futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f25592f769825e89b92358db00d26f965761e094951ac44d3663ef25b7ac464a" "checksum futures-executor 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f674f3e1bcb15b37284a90cedf55afdba482ab061c407a9c0ebbd0f3109741ba" "checksum futures-io 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a638959aa96152c7a4cddf50fcb1e3fede0583b27157c26e67d6f99904090dc6" +"checksum futures-locks 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bd5658075ca5ae3918993c5bc95b43fcf22f927227660556a947da598f9f8981" "checksum futures-macro 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5081aa3de1f7542a794a397cde100ed903b0630152d0973479018fd85423a7" "checksum futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3466821b4bc114d95b087b850a724c6f83115e929bc88f1fa98a3304a944c8a6" "checksum futures-task 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0a34e53cf6cdcd0178aa573aed466b646eb3db769570841fda0c7ede375a27" @@ -3548,41 +3708,43 @@ dependencies = [ "checksum git2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7339329bfa14a00223244311560d11f8f489b453fb90092af97f267a6090ab0" "checksum git2-curl 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d58551e903ed7e2d6fe3a2f3c7efa3a784ec29b19d0fbb035aaf0497c183fbdd" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" -"checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" +"checksum globset 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7ad1da430bd7281dde2576f44c84cc3f0f7b475e7202cd503042dff01a8c8120" "checksum grpcio 0.3.0 (git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532)" = "" "checksum grpcio-compiler 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a63ccc27b0099347d2bea2c3d0f1c79c018a13cfd08b814a1992e341b645d5e1" "checksum grpcio-sys 0.2.3 (git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532)" = "" "checksum h2 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" -"checksum h2 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9d5c295d1c0c68e4e42003d75f908f5e16a1edd1cbe0b0d02e4dc2006a384f47" +"checksum h2 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "377038bf3c89d18d6ca1431e7a5027194fbd724ca10592b9487ede5e8e144f42" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" -"checksum hermit-abi 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eff2656d88f158ce120947499e971d743c05dbcbed62e5bd2f38f1698bbc3772" +"checksum hermit-abi 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "61565ff7aaace3525556587bd2dc31d4a07071957be715e63ce7b1eccf51a8f4" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" "checksum home 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "29302b90cfa76231a757a887d1e3153331a63c7f80b6c75f86366334cbe70708" "checksum http 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d6ccf5ede3a895d8856620237b2f02972c1bbc78d2965ad7fe8838d4a0ed41f0" -"checksum http 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b708cc7f06493459026f53b9a61a7a121a5d1ec6238dee58ea4941132b30156b" +"checksum http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "28d569972648b2c512421b5f2a405ad6ac9666547189d0c5477a3f200f3e02f9" "checksum http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" "checksum httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "cd179ae861f0c2e53da70d892f5f3029f9594be0c41dc5269cd371691b1dc2f9" "checksum humantime 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "df004cfca50ef23c36850aaaa59ad52cc70d0e90243c3c7737a4dd32dc7a3c4f" -"checksum hyper 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ed6081100e960d9d74734659ffc9cc91daf1c0fc7aceb8eaa94ee1a3f5046f2e" +"checksum hyper 0.13.5 (registry+https://github.com/rust-lang/crates.io-index)" = "96816e1d921eca64d208a85aab4f7798455a8e34229ee5a88c935bdee1b78b14" "checksum hyper-rustls 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac965ea399ec3a25ac7d13b8affd4b8f39325cca00858ddf5eb29b79e6b14b08" "checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" "checksum idna 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "02e2673c30ee86b5b96a9cb52ad15718aa1f966f5ab9ad54a8b95d5ca33120a9" -"checksum ignore 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "522daefc3b69036f80c7d2990b28ff9e0471c683bad05ca258e0a01dd22c5a1e" +"checksum ignore 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)" = "ddf60d063dbe6b75388eec66cfc07781167ae3d34a09e0c433e6c5de0511f7fb" "checksum im-rc 12.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e882e6e7cd335baacae574b56aa3ce74844ec82fc6777def7c0ac368837dc3d5" "checksum indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "076f042c5b7b98f31d205f1249267e12a6518c1481e9dae9764af19b707d2292" +"checksum inotify 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24e40d6fd5d64e2082e0c796495c8ef5ad667a96d03e5aaa0becfd9d47bcbfb8" +"checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" "checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" "checksum itertools 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" "checksum itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b8b7a7c0c47db5545ed3fef7468ee7bb5b74691498139e4b3f6a20685dc6dd8e" "checksum jobserver 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" -"checksum js-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)" = "7889c7c36282151f6bf465be4700359318aef36baa951462382eae49e9577cf9" +"checksum js-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)" = "fa5a448de267e7358beaf4a5d849518fe9a0c13fce7afd44b06e68550e5562a7" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" "checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f" -"checksum libc 0.2.66 (registry+https://github.com/rust-lang/crates.io-index)" = "d515b1f41455adea1313a4a2ac8a8a477634fbae63cc6100e3aebb207ce61558" +"checksum libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)" = "99e85c08494b21a9054e7fe1374a732aeadaff3980b6990b94bfd3a70f690005" "checksum libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "48441cb35dc255da8ae72825689a95368bf510659ae1ad55dc4aa88cb1789bf1" -"checksum libnghttp2-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02254d44f4435dd79e695f2c2b83cd06a47919adea30216ceaf0c57ca0a72463" -"checksum libssh2-sys 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "36aa6e813339d3a063292b77091dfbbb6152ff9006a459895fa5bebed7d34f10" +"checksum libnghttp2-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b359f5ec8106bc297694c9a562ace312be2cfd17a5fc68dc12249845aa144b11" +"checksum libssh2-sys 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "d45f516b9b19ea6c940b9f36d36734062a153a2b4cc9ef31d82c54bb9780f525" "checksum libz-sys 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb5e43362e38e2bca2fd5f5134c4d4564a23a5c28e9b95411652021a8675ebe" "checksum lmdb 0.8.0 (git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8)" = "" "checksum lmdb-sys 0.8.0 (git+https://github.com/pantsbuild/lmdb-rs.git?rev=06bdfbfc6348f6804127176e561843f214fc17f8)" = "" @@ -3592,20 +3754,21 @@ dependencies = [ "checksum maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum maybe-uninit 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" -"checksum memchr 2.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "53445de381a1f436797497c61d851644d0e8e88e6140f22872ad33a704933978" +"checksum memchr 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" "checksum mime 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" -"checksum mime_guess 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1a0ed03949aef72dbdf3116a383d7b38b4768e6f960528cd6a6044aa9ed68599" +"checksum mime_guess 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2684d4c2e97d99848d30b324b00c8fcc7e5c897b7cbb5819b09e7c90e8baf212" "checksum miniz_oxide 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "aa679ff6578b1cddee93d7e82e263b94a575e0bfced07284eb0c037c1d2416a5" -"checksum mio 0.6.21 (registry+https://github.com/rust-lang/crates.io-index)" = "302dec22bcf6bae6dfb69c647187f4b4d0fb6f535521f7bc022430ce8e12008f" +"checksum mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)" = "fce347092656428bc8eaf6201042cb551b8d67855af7374542a92a0fbfcac430" +"checksum mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" "checksum mio-named-pipes 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "f5e374eff525ce1c5b7687c4cef63943e7686524a387933ad27ca7ec43779cb3" -"checksum mio-uds 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "966257a94e196b11bb43aca423754d87429960a768de9414f3691d6957abf125" +"checksum mio-uds 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0" "checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919" "checksum miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "396aa0f2003d7df8395cb93e09871561ccc3e785f0acb369170e8cc74ddf9226" "checksum mktemp 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "77001ceb9eed65439f3dc2a2543f9ba1417d912686bf224a7738d0966e6dcd69" "checksum multimap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb04b9f127583ed176e163fb9ec6f3e793b87e21deedd5734a69386a18a0151" -"checksum nails 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bbf022f659381fd767684f3f1d46b55a20b3d0902d4c722f9f78589d8afa4156" -"checksum net2 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "42550d9fb7b6684a6d404d9fa7250c2eb2646df731d1c06afc06dcee9e1bcf88" -"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6" +"checksum nails 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7d0a901335354f0d61e36d04729f735b38d88d7c3dd9c09a02c66501fc6d7c0d" +"checksum net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)" = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7" +"checksum notify 5.0.0-pre.1 (git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd)" = "" "checksum num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "4703ad64153382334aa8db57c637364c322d3372e097840c72000dabdcf6156e" "checksum num-bigint 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1" "checksum num-complex 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656" @@ -3613,49 +3776,56 @@ dependencies = [ "checksum num-iter 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "dfb0800a0291891dd9f4fe7bd9c19384f98f7fbe0cd0f39a2c6b88b9868bbc00" "checksum num-rational 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "ee314c74bd753fc86b4780aa9475da469155f3848473a261d2d18e35245a784e" "checksum num-traits 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "c62be47e61d1842b9170f0fdeec8eba98e60e90e5446449a0545e5152acd7096" -"checksum num_cpus 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "46203554f085ff89c235cd12f7075f3233af9b11ed7c9e16dfe2560d03313ce6" +"checksum num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" "checksum num_enum 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4c7147f4bb0e36282bf8e78c85721eb30c1c449afedceef660e7b996fc9c34fa" +"checksum num_enum 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca565a7df06f3d4b485494f25ba05da1435950f4dc263440eda7a6fa9b8e36e4" +"checksum num_enum_derive 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ffa5a33ddddfee04c0283a7653987d634e880347e96b5b2ed64de07efb59db9d" "checksum numtoa 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b8f8bdf33df195859076e54ab11ee78a1b208382d3a26ec40d142ffc1ecc49ef" +"checksum once_cell 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1c601810575c99596d4afc46f78a678c80105117c379eb3650cf99b8a21ce5b" "checksum opaque-debug 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" "checksum opener 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "04b1d6b086d9b3009550f9b6f81b10ad9428cf14f404b8e1a3a06f6f012c8ec9" -"checksum openssl 0.10.28 (registry+https://github.com/rust-lang/crates.io-index)" = "973293749822d7dd6370d6da1e523b0d1db19f06c459134c658b2a4261378b52" +"checksum openssl 0.10.29 (registry+https://github.com/rust-lang/crates.io-index)" = "cee6d85f4cb4c4f59a6a85d5b68a233d280c82e29e822913b9c8b129fbf20bdd" "checksum openssl-probe 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" -"checksum openssl-sys 0.9.54 (registry+https://github.com/rust-lang/crates.io-index)" = "1024c0a59774200a555087a6da3f253a9095a5f344e353b212ac4c8b8e450986" +"checksum openssl-sys 0.9.55 (registry+https://github.com/rust-lang/crates.io-index)" = "7717097d810a0f2e2323f9e5d11e71608355e24828410b55b9d4f18aa5f9a5d8" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" -"checksum owning_ref 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49a4b8ea2179e6a2e27411d3bca09ca6dd630821cf6894c6c7c8467a8ee7ef13" +"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" +"checksum owning_ref 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6ff55baddef9e4ad00f88b6c743a2a8062d4c6ade126c2a528644b8e444d52ce" +"checksum parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "149d8f5b97f3c1133e3cfcd8886449959e856b557ff281e292b733d7c69e005e" "checksum parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f0802bff09003b291ba756dc7e79313e51cc31667e94afbe847def490424cde5" +"checksum parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa" "checksum parking_lot_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad7f7e6ebdc79edff6fdcb87a55b620174f7a989e3eb31b65231f4af57f00b8c" "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" "checksum percent-encoding 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" "checksum petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9c3659d1ee90221741f65dd128d9998311b0e40c5d3c23a62445938214abce4f" -"checksum pin-project 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7804a463a8d9572f13453c516a5faea534a2403d7ced2f0c7e100eeff072772c" -"checksum pin-project-internal 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "385322a45f2ecf3410c68d2a549a4a2685e8051d0f278e39743ff4e451cb9b3f" +"checksum pin-project 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "36e3dcd42688c05a66f841d22c5d8390d9a5d4c9aaf57b9285eae4900a080063" +"checksum pin-project-internal 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "f4d7346ac577ff1296e06a418e7618e22655bae834d4970cb6e39d6da8119969" "checksum pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "237844750cfbb86f67afe27eee600dfbbcb6188d734139b534cbfbf4f96792ae" -"checksum pin-utils 0.1.0-alpha.4 (registry+https://github.com/rust-lang/crates.io-index)" = "5894c618ce612a3fa23881b152b608bafb8c56cfc22f434a3ba3120b40f7b587" +"checksum pin-utils 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" "checksum pkg-config 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)" = "05da548ad6865900e60eaba7f589cc0783590a92e940c26953ff81ddbab2d677" "checksum ppv-lite86 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" -"checksum proc-macro-hack 0.5.11 (registry+https://github.com/rust-lang/crates.io-index)" = "ecd45702f76d6d3c75a80564378ae228a85f0b59d2f3ed43c91b4a69eb2ebfc5" -"checksum proc-macro-nested 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "369a6ed065f249a159e06c45752c780bda2fb53c995718f9e484d08daa9eb42e" +"checksum proc-macro-crate 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e10d4b51f154c8a7fb96fd6dad097cb74b863943ec010ac94b9fd1be8861fe1e" +"checksum proc-macro-hack 0.5.15 (registry+https://github.com/rust-lang/crates.io-index)" = "0d659fe7c6d27f25e9d80a1a094c223f5246f6a6596453e09d7229bf42750b63" +"checksum proc-macro-nested 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8e946095f9d3ed29ec38de908c22f95d9ac008e424c7bcae54c75a79c527c694" "checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -"checksum proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3acb317c6ff86a4e579dfa00fc5e6cca91ecbb4e7eb2df0468805b674eb88548" +"checksum proc-macro2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "8872cf6f48eee44265156c111456a700ab3483686b3f96df4cf5481c89157319" "checksum prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b9f36c478cd43382388dfc3a3679af175c03d19ed8039e79a3e4447e944cd3f3" "checksum prost-build 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6325275b85605f58f576456a47af44417edf5956a6f670bb59fbe12aff69597" "checksum prost-derive 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9787d1977ea72e8066d58e46ae66100324a2815e677897fe78dfe54958f48252" "checksum prost-types 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5644c57d56bc085f9570e113495c1f08d7185beca700dcc296cb4672f380a679" "checksum protobuf 2.0.6 (git+https://github.com/pantsbuild/rust-protobuf?rev=171611c33ec92f07e1b7107327f6d0139a7afebf)" = "" "checksum protobuf-codegen 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c12a571137dc99703cb46fa21f185834fc5578a65836573fcff127f7b53f41e1" -"checksum protoc 2.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9fd83d2547a9e2c8bc6016607281b3ec7ef4871c55be6930915481d80350ab88" +"checksum protoc 2.14.0 (registry+https://github.com/rust-lang/crates.io-index)" = "349d80967ee438cd03ccd236d548d4dcd5f2d9349acda206bef1490a826165d3" "checksum protoc-grpcio 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b0292d93a536174ff6bafe8b5e8534aeeb2b039146bae59770c07f4d2c2458c9" "checksum quick-error 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" -"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" +"checksum quote 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4c1f4b0efa5fc5e8ceb705136bfee52cfdb6a4e3509f770b478cd6ed434232a7" "checksum rand 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" "checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" "checksum rand 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c618c47cd3ebd209790115ab837de41425723956ad3ce2e6a7f09890947cacb9" "checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" "checksum rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" "checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" +"checksum rand_chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" "checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" @@ -3670,11 +3840,11 @@ dependencies = [ "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" "checksum redox_users 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "09b23093265f8d200fa7b4c2c76297f47e681c655f6f1285a8780d6a022f7431" -"checksum regex 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "322cf97724bea3ee221b78fe25ac9c46114ebb51747ad5babd51a2fc6a8235a8" -"checksum regex-syntax 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)" = "b28dfe3fe9badec5dbf0a79a9cccad2cfc2ab5484bdb3e44cbd1ae8b3ba2be06" +"checksum regex 1.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a6020f034922e3194c711b82a627453881bc4682166cabb07134a10c26ba7692" +"checksum regex-syntax 0.6.17 (registry+https://github.com/rust-lang/crates.io-index)" = "7fe5bd57d1d7414c6b5ed48563a2c855d995ff777729dcd91c369ec7fea395ae" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum reqwest 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "02b81e49ddec5109a9dcfc5f2a317ff53377c915e9ae9d4f2fb50914b85614e2" -"checksum ring 0.16.11 (registry+https://github.com/rust-lang/crates.io-index)" = "741ba1704ae21999c00942f9f5944f801e977f54302af346b596287599ad1862" +"checksum ring 0.16.13 (registry+https://github.com/rust-lang/crates.io-index)" = "703516ae74571f24b465b4a1431e81e2ad51336cb0ded733a55a1aa3eccac196" "checksum rust-argon2 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2bc8af4bda8e1ff4932523b94d3dd20ee30a87232323eda55903ffd71d2fb017" "checksum rustc-demangle 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)" = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" @@ -3683,31 +3853,30 @@ dependencies = [ "checksum rustfix 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7150ac777a2931a53489f5a41eb0937b84e3092a20cd0e73ad436b65b507f607" "checksum rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1" "checksum rustls-native-certs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a75ffeb84a6bd9d014713119542ce415db3a3e4748f0bfce1e1416cd224a23a5" -"checksum ryu 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfa8506c1de11c9c4e4c38863ccbe02a305c8188e85a05a784c9e11e1c3910c8" +"checksum ryu 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ed3d612bc64430efeb3f7ee6ef26d590dce0c43249217bddc62112540c7941e1" "checksum same-file 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -"checksum schannel 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "507a9e6e8ffe0a4e0ebb9a10293e62fdf7657c06f1b8bb07a8fcf697d2abf295" +"checksum schannel 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "039c25b130bd8c1321ee2d7de7fde2659fa9c2744e4bb29711cfc852ea53cd19" "checksum scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "94258f53601af11e6a49f722422f6e3425c52b06245a5cf9bc09908b174f5e27" "checksum sct 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c" -"checksum security-framework 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "97bbedbe81904398b6ebb054b3e912f99d55807125790f3198ac990d98def5b0" -"checksum security-framework-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "06fd2f23e31ef68dd2328cc383bd493142e46107a3a0e24f7d734e3f3b80fe4c" +"checksum security-framework 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3f331b9025654145cd425b9ded0caf8f5ae0df80d418b326e2dc1c3dc5eb0620" +"checksum security-framework-sys 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "17bf11d99252f512695eb468de5516e5cf75455521e69dfe343f3b74e4748405" "checksum semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" -"checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449" -"checksum serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64" +"checksum serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)" = "36df6ac6412072f67cf767ebbde4133a5b2e88e76dc6187fa7104cd16f783399" +"checksum serde_derive 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)" = "9e549e3abf4fb8621bd1609f11dfc9f5e50320802273b12f3811a67e6716ea6c" "checksum serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "190e9765dcedb56be63b6e0993a006c7e3b071a016a304736e4a315dc01fb142" -"checksum serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)" = "9371ade75d4c2d6cb154141b9752cf3781ec9c05e0e5cf35060e1e70ee7b9c25" -"checksum serde_test 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "33f96dff8c3744387b53404ea33e834073b0791dcc1ea9c85b805745f9324704" +"checksum serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)" = "a7894c8ed05b7a3a279aeb79025fdec1d3158080b75b98a08faf2806bb799edd" +"checksum serde_test 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)" = "48f80694c19c256c75e68ef3240d078e73d7ed56e0f14300656f8b7b000f3852" "checksum serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9ec5d77e2d4c73717816afac02670d5c4f534ea95ed430442cad02e7a6e32c97" "checksum sha2 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "27044adfd2e1f077f649f59deb9490d3941d674002f7d062870a60ebe9bd47a0" "checksum shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "170a13e64f2a51b77a45702ba77287f5c6829375b04a69cf2222acd17d0cfab9" "checksum signal-hook-registry 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94f478ede9f64724c5d173d7bb56099ec3e2d9fc2774aac65d34b8b890405f41" -"checksum simplelog 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "05a3e303ace6adb0a60a9e9e2fbc6a33e1749d1e43587e2125f7efa9c5e107c5" +"checksum simplelog 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "3cf9a002ccce717d066b3ccdb8a28829436249867229291e91b25d99bd723f0d" "checksum sized-chunks 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9d3e7f23bad2d6694e0f46f5e470ec27eb07b8f3e8b309a4b0dc17501928b9f2" "checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" "checksum smallvec 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0758c52e15a8b5e3691eae6cc559f08eee9406e548a4477ba4e67770a82b6" -"checksum smallvec 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5c2fb2ec9bcd216a5b0d0ccf31ab17b5ed1d627960edff65bbe95d3ce221cefc" -"checksum socket2 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "e8b74de517221a2cb01a53349cf54182acdc31a074727d3079068448c0676d85" -"checksum sourcefile 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "4bf77cb82ba8453b42b6ae1d692e4cdc92f9a47beaf89a847c8be83f4e328ad3" +"checksum smallvec 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7cb5678e1615754284ec264d9bb5b4c27d2018577fd90ac0ceb578591ed5ee4" +"checksum socket2 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "03088793f677dce356f3ccc2edb1b314ad191ab702a5de3faf49304f7e104918" "checksum spectral 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae3c15181f4b14e52eeaac3efaeec4d2764716ce9c86da0c934c3e318649c5ba" "checksum spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" "checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8" @@ -3717,7 +3886,7 @@ dependencies = [ "checksum structopt-derive 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "53010261a84b37689f9ed7d395165029f9cc7abb9f56bbfe86bee2597ed25107" "checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" "checksum syn 0.15.44 (registry+https://github.com/rust-lang/crates.io-index)" = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" -"checksum syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "af6f3550d8dff9ef7dc34d384ac6f107e5d31c8f57d9f28e0081503f547ac8f5" +"checksum syn 1.0.18 (registry+https://github.com/rust-lang/crates.io-index)" = "410a7488c0a728c7ceb4ad59b9567eb4053d02e8cc7f5c0e0eeeb39518369213" "checksum synstructure 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "67656ea1dc1b41b1451851562ea232ec2e5a80242139f7e679ceccfb5d61f545" "checksum tar 0.4.26 (registry+https://github.com/rust-lang/crates.io-index)" = "b3196bfbffbba3e57481b6ea32249fbaf590396a52505a2615adbb79d9d826d3" "checksum tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" @@ -3728,14 +3897,18 @@ dependencies = [ "checksum textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" "checksum thread-scoped 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bcbb6aa301e5d3b0b5ef639c9a9c7e2f1c944f177b460c04dc24c69b1fa2bd99" "checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" -"checksum time 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "db8dcfca086c1143c9270ac42a2bbd8a7ee477b78ac8e45b19abfb0cbede4b6f" -"checksum tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "0fa5e81d6bc4e67fe889d5783bd2a128ab2e0cfa487e0be16b6a8d177b101616" +"checksum time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" +"checksum tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "05c1d570eb1a36f0345a5ce9c6c6e665b70b73d11236912c0b477616aeec47b1" "checksum tokio-connect 0.1.0 (git+https://github.com/pantsbuild/tokio-connect?rev=f7ad1ca437973d6e24037ac6f7d5ef1013833c0b)" = "" +"checksum tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e" +"checksum tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671" "checksum tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" -"checksum tokio-macros 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f4b1e7ed7d5d4c2af3d999904b0eebe76544897cdbfb2b9684bed2174ab20f7c" +"checksum tokio-macros 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c3acc6aa564495a0f2e1d59fab677cd7f81a19994cfc7f3ad0e64301560389" "checksum tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4adb8b3e5f86b707f1b54e7c15b6de52617a823608ccda98a15d3a24222f265a" "checksum tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "571da51182ec208780505a32528fc5512a8fe1443ab960b3f2f3ef093cd16930" +"checksum tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be8242891f2b6cbef26a2d7e8605133c2c554cd35b3e4948ea892d6d68436499" "checksum toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "758664fc71a3a69038656bee8b6be6477d2a6c315a6b81f7081f591bffa4111f" +"checksum toml 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ffc92d160b1eef40665be3a05630d003936a3bc7da7421277846c2613e92c71a" "checksum tower-add-origin 0.1.0 (git+https://github.com/pantsbuild/tower-http?rev=56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00)" = "" "checksum tower-direct-service 0.1.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)" = "" "checksum tower-grpc 0.1.0 (git+https://github.com/pantsbuild/tower-grpc.git?rev=ef19f2e1715f415ecb699e8f17f5845ad2b45daf)" = "" @@ -3746,7 +3919,7 @@ dependencies = [ "checksum tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" "checksum tower-util 0.1.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)" = "" "checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382" -"checksum typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6d2783fe2d6b8c1101136184eb41be8b1ad379e4657050b8aaff0c79ee7575f9" +"checksum typenum 1.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" "checksum uname 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b72f89f0ca32e4db1c04e2a72f5345d59796d4866a1ee0609084569f73683dc8" "checksum unicase 2.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" @@ -3755,7 +3928,7 @@ dependencies = [ "checksum unicode-width 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "caaa9d531767d1ff2150b9332433f32a24622147e5ebb1f26409d5da67afd479" "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" "checksum unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e7639553986605ec5979c7dd957c7895e93eabed50ab2ffa7f6128a75097c" -"checksum untrusted 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60369ef7a31de49bcb3f6ca728d4ba7300d9a1658f94c727d4cab8c8d9f4aece" +"checksum untrusted 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" "checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" "checksum url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "829d4a8476c35c9bf0bbce5a3b23f4106f79728039b726d292bb93bc106787cb" "checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea" @@ -3763,29 +3936,26 @@ dependencies = [ "checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" "checksum vcpkg 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3fc439f2794e98976c88a2a2dafce96b930fe8010b0a256b3c2199a773933168" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" -"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" "checksum walkdir 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c66c0b9792f0a765345452775f3adbd28dde9d33f30d13e5dcc5ae17cf6f3780" "checksum walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" "checksum want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" "checksum wasi 0.9.0+wasi-snapshot-preview1 (registry+https://github.com/rust-lang/crates.io-index)" = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -"checksum wasm-bindgen 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "5205e9afdf42282b192e2310a5b463a6d1c1d774e30dc3c791ac37ab42d2616c" -"checksum wasm-bindgen-backend 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "11cdb95816290b525b32587d76419facd99662a07e59d3cdb560488a819d9a45" -"checksum wasm-bindgen-futures 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8bbdd49e3e28b40dec6a9ba8d17798245ce32b019513a845369c641b275135d9" -"checksum wasm-bindgen-macro 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "574094772ce6921576fb6f2e3f7497b8a76273b6db092be18fc48a082de09dc3" -"checksum wasm-bindgen-macro-support 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "e85031354f25eaebe78bb7db1c3d86140312a911a106b2e29f9cc440ce3e7668" -"checksum wasm-bindgen-shared 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "f5e7e61fc929f4c0dddb748b102ebf9f632e2b8d739f2016542b4de2965a9601" -"checksum wasm-bindgen-webidl 0.2.58 (registry+https://github.com/rust-lang/crates.io-index)" = "ef012a0d93fc0432df126a8eaf547b2dce25a8ce9212e1d3cbeef5c11157975d" -"checksum web-sys 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)" = "aaf97caf6aa8c2b1dac90faf0db529d9d63c93846cca4911856f78a83cebf53b" +"checksum wasm-bindgen 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "e3c7d40d09cdbf0f4895ae58cf57d92e1e57a9dd8ed2e8390514b54a47cc5551" +"checksum wasm-bindgen-backend 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "c3972e137ebf830900db522d6c8fd74d1900dcfc733462e9a12e942b00b4ac94" +"checksum wasm-bindgen-futures 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "8a369c5e1dfb7569e14d62af4da642a3cbc2f9a3652fe586e26ac22222aa4b04" +"checksum wasm-bindgen-macro 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "2cd85aa2c579e8892442954685f0d801f9129de24fa2136b2c6a539c76b65776" +"checksum wasm-bindgen-macro-support 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "8eb197bd3a47553334907ffd2f16507b4f4f01bbec3ac921a7719e0decdfe72a" +"checksum wasm-bindgen-shared 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)" = "a91c2916119c17a8e316507afaaa2dd94b47646048014bbdf6bef098c1bb58ad" +"checksum web-sys 0.3.39 (registry+https://github.com/rust-lang/crates.io-index)" = "8bc359e5dd3b46cb9687a051d50a2fdd228e4ba7cf6fcf861a5365c3d671a642" "checksum webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1f50e1972865d6b1adb54167d1c8ed48606004c2c9d0ea5f1eeb34d95e863ef" "checksum webpki-roots 0.18.0 (registry+https://github.com/rust-lang/crates.io-index)" = "91cd5736df7f12a964a5067a12c62fa38e1bd8080aff1f80bc29be7c80d19ab4" -"checksum weedle 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3bb43f70885151e629e2a19ce9e50bd730fd436cfd4b666894c9ce4de9141164" "checksum which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b57acb10231b9493c8472b20cb57317d0679a49e0bdbee44b3b803a6473af164" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "8093091eeb260906a183e6ae1abdba2ef5ef2257a21801128899c3fc699229c6" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfbf554c6ad11084fb7517daca16cfdcaccbdadba4fc336f032a8b12c2ad80" +"checksum winapi-util 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" "checksum winreg 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b2986deb581c4fe11b621998a5e53361efe6b48a151178d0cd9eeffa4dc6acc9" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" diff --git a/src/rust/engine/Cargo.toml b/src/rust/engine/Cargo.toml index 232d4856993..4b32107f994 100644 --- a/src/rust/engine/Cargo.toml +++ b/src/rust/engine/Cargo.toml @@ -81,14 +81,17 @@ default-members = [ ] [dependencies] +async_semaphore = { path = "async_semaphore" } async-trait = "0.1" boxfuture = { path = "boxfuture" } bytes = "0.4.5" concrete_time = { path = "concrete_time" } +crossbeam-channel = "0.3" fnv = "1.0.5" fs = { path = "fs" } futures01 = { package = "futures", version = "0.1" } futures = { version = "0.3", features = ["compat"] } +futures-locks = "0.3.0" graph = { path = "graph" } hashing = { path = "hashing" } indexmap = "1.0.2" @@ -98,6 +101,11 @@ log = "0.4" logging = { path = "logging" } num_cpus = "1" num_enum = "0.4" +# notify is currently an experimental API, we are pinning to https://docs.rs/notify/5.0.0-pre.1/notify/ +# because the latest prerelease at time of writing has removed the debounced watcher which we would like to use. +# The author suggests they will add the debounced watcher back into the stable 5.0.0 release. When that happens +# we can move to it. +notify = { git = "https://github.com/notify-rs/notify", rev = "fba00891d9105e2f581c69fbe415a58cb7966fdd" } parking_lot = "0.6" process_execution = { path = "process_execution" } rand = "0.6" @@ -115,6 +123,11 @@ url = "2.1" uuid = { version = "0.7", features = ["v4"] } workunit_store = { path = "workunit_store" } +[dev-dependencies] +testutil = { path = "./testutil" } +fs = { path = "./fs" } +env_logger = "0.5.4" + [patch.crates-io] # TODO: Remove patch when we can upgrade to an official released version of protobuf with a fix. # See: https://github.com/pantsbuild/pants/issues/7760 for context. diff --git a/src/rust/engine/graph/src/entry.rs b/src/rust/engine/graph/src/entry.rs index 5f2627c3b8b..c8254a73edd 100644 --- a/src/rust/engine/graph/src/entry.rs +++ b/src/rust/engine/graph/src/entry.rs @@ -18,10 +18,10 @@ use boxfuture::{BoxFuture, Boxable}; /// the Node was `cleared`), the work is discarded. See `Entry::complete` for more information. /// #[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub(crate) struct RunToken(u32); +pub struct RunToken(u32); impl RunToken { - fn initial() -> RunToken { + pub fn initial() -> RunToken { RunToken(0) } @@ -40,10 +40,10 @@ impl RunToken { /// incremented when the output of a node has changed. /// #[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub(crate) struct Generation(u32); +pub struct Generation(u32); impl Generation { - fn initial() -> Generation { + pub fn initial() -> Generation { Generation(0) } @@ -65,7 +65,7 @@ impl Generation { /// If the value is Clean, the consumer can simply use the value as-is. /// #[derive(Clone, Debug)] -pub(crate) enum EntryResult { +pub enum EntryResult { Clean(Result), Dirty(Result), Uncacheable( @@ -118,7 +118,7 @@ impl AsRef> for EntryResult { #[allow(clippy::type_complexity)] #[derive(Debug)] -pub(crate) enum EntryState { +pub enum EntryState { // A node that has either been explicitly cleared, or has not yet started Running. In this state // there is no need for a dirty bit because the RunToken is either in its initial state, or has // been explicitly incremented when the node was cleared. @@ -174,7 +174,7 @@ pub struct Entry { // maps is painful. node: N, - state: Arc>>, + pub state: Arc>>, } impl Entry { diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs index 139968308f4..51a60a546d8 100644 --- a/src/rust/engine/graph/src/lib.rs +++ b/src/rust/engine/graph/src/lib.rs @@ -29,10 +29,12 @@ use hashing; use petgraph; -mod entry; +// make the entry module public for testing purposes. We use it to contruct mock +// graph entries in the notify watch tests. +pub mod entry; mod node; -pub use crate::entry::Entry; +pub use crate::entry::{Entry, EntryState}; use crate::entry::{Generation, RunToken}; use std::collections::binary_heap::BinaryHeap; @@ -1009,6 +1011,43 @@ impl Graph { } } +// This module provides a trait which contains functions that +// should only be used in tests. A user must explicitly import the trait +// to use the extra test functions, and they should only be imported into +// test modules. +pub mod test_support { + use super::{EntryId, EntryState, Graph, Node}; + pub trait TestGraph { + fn set_fixture_entry_state_for_id(&self, id: EntryId, state: EntryState); + fn add_fixture_entry(&self, node: N) -> EntryId; + fn entry_state(&self, id: EntryId) -> &str; + } + impl TestGraph for Graph { + fn set_fixture_entry_state_for_id(&self, id: EntryId, state: EntryState) { + let mut inner = self.inner.lock(); + let entry = inner.entry_for_id_mut(id).unwrap(); + let mut entry_state = entry.state.lock(); + *entry_state = state; + } + + fn add_fixture_entry(&self, node: N) -> EntryId { + let mut inner = self.inner.lock(); + inner.ensure_entry(node) + } + + fn entry_state(&self, id: EntryId) -> &str { + let mut inner = self.inner.lock(); + let entry = inner.entry_for_id_mut(id).unwrap(); + let entry_state = entry.state.lock(); + match *entry_state { + EntryState::Completed { .. } => "completed", + EntryState::Running { .. } => "running", + EntryState::NotStarted { .. } => "not started", + } + } + } +} + /// /// Represents the state of a particular walk through a Graph. Implements Iterator and has the same /// lifetime as the Graph itself. diff --git a/src/rust/engine/sharded_lmdb/src/lib.rs b/src/rust/engine/sharded_lmdb/src/lib.rs index e73f20e1e54..f45953db1d8 100644 --- a/src/rust/engine/sharded_lmdb/src/lib.rs +++ b/src/rust/engine/sharded_lmdb/src/lib.rs @@ -174,7 +174,7 @@ impl ShardedLmdb { /// pub fn store_bytes<'a, 'b>( &'a self, - fingerprint: Fingerprint, + key: Fingerprint, bytes: Bytes, initial_lease: bool, ) -> BoxFuture<'b, Result<(), String>> { diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index a07c1a499e7..dbd62eaf000 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -17,6 +17,7 @@ use crate::nodes::{NodeKey, WrappedNode}; use crate::scheduler::Session; use crate::tasks::{Rule, Tasks}; use crate::types::Types; +use crate::watch::InvalidationWatcher; use boxfuture::{BoxFuture, Boxable}; use core::clone::Clone; use fs::{safe_create_dir_all_ioerror, PosixFS}; @@ -43,7 +44,7 @@ const GIGABYTES: usize = 1024 * 1024 * 1024; /// https://github.com/tokio-rs/tokio/issues/369 is resolved. /// pub struct Core { - pub graph: Graph, + pub graph: Arc>, pub tasks: Tasks, pub rule_graph: RuleGraph, pub types: Types, @@ -53,6 +54,7 @@ pub struct Core { pub command_runner: Box, pub http_client: reqwest::Client, pub vfs: PosixFS, + pub watcher: InvalidationWatcher, pub build_root: PathBuf, } @@ -232,12 +234,15 @@ impl Core { metadata: process_execution_metadata, }) } + let graph = Arc::new(Graph::new()); + let watcher = + InvalidationWatcher::new(Arc::downgrade(&graph), executor.clone(), build_root.clone())?; let http_client = reqwest::Client::new(); let rule_graph = RuleGraph::new(tasks.as_map(), root_subject_types); Ok(Core { - graph: Graph::new(), + graph: graph, tasks: tasks, rule_graph: rule_graph, types: types, @@ -250,7 +255,8 @@ impl Core { // exceptions, rather than as panics. vfs: PosixFS::new(&build_root, &ignore_patterns, executor) .map_err(|e| format!("Could not initialize VFS: {:?}", e))?, - build_root: build_root, + build_root, + watcher, }) } diff --git a/src/rust/engine/src/core.rs b/src/rust/engine/src/core.rs index 2eb2faa27cb..ae343980a35 100644 --- a/src/rust/engine/src/core.rs +++ b/src/rust/engine/src/core.rs @@ -289,6 +289,7 @@ pub enum Failure { Invalidated, /// A rule raised an exception. Throw(Value, String), + FileWatch(String), } impl fmt::Display for Failure { @@ -296,6 +297,7 @@ impl fmt::Display for Failure { match self { Failure::Invalidated => write!(f, "Exhausted retries due to changed files."), Failure::Throw(exc, _) => write!(f, "{}", externs::val_to_str(exc)), + Failure::FileWatch(failure) => write!(f, "{}", failure), } } } diff --git a/src/rust/engine/src/externs.rs b/src/rust/engine/src/externs.rs index 33c9b745a6f..68fcf11e404 100644 --- a/src/rust/engine/src/externs.rs +++ b/src/rust/engine/src/externs.rs @@ -462,6 +462,7 @@ impl From> for PyResult { let val = match f { f @ Failure::Invalidated => create_exception(&format!("{}", f)), Failure::Throw(exc, _) => exc, + Failure::FileWatch(failure) => create_exception(&failure), }; PyResult { is_throw: true, diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index 90cb5465c49..0d6efd13dd4 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -40,6 +40,7 @@ mod scheduler; mod selectors; mod tasks; mod types; +mod watch; pub use crate::context::Core; pub use crate::core::{Function, Key, Params, TypeId, Value}; @@ -49,3 +50,6 @@ pub use crate::scheduler::{ }; pub use crate::tasks::{Rule, Tasks}; pub use crate::types::Types; + +#[cfg(test)] +mod watch_tests; diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index 258fd0f9e51..911a2ba37d9 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -941,7 +941,7 @@ impl NodeVisualizer for Visualizer { let max_colors = 12; match entry.peek(context) { None => "white".to_string(), - Some(Err(Failure::Throw(..))) => "4".to_string(), + Some(Err(Failure::Throw(..))) | Some(Err(Failure::FileWatch(..))) => "4".to_string(), Some(Err(Failure::Invalidated)) => "12".to_string(), Some(Ok(_)) => { let viz_colors_len = self.viz_colors.len(); @@ -962,6 +962,7 @@ impl NodeTracer for Tracer { match result { Some(Err(Failure::Invalidated)) => false, Some(Err(Failure::Throw(..))) => false, + Some(Err(Failure::FileWatch(..))) => false, Some(Ok(_)) => true, None => { // A Node with no state is either still running, or effectively cancelled @@ -986,6 +987,7 @@ impl NodeTracer for Tracer { .join("\n") ), Some(Err(Failure::Invalidated)) => "Invalidated".to_string(), + Some(Err(Failure::FileWatch(failure))) => format!("FileWatch failed: {}", failure), } } } @@ -1067,17 +1069,32 @@ impl Node for NodeKey { scope_task_parent_id(maybe_span_id, async move { let context2 = context.clone(); - let result = match self { - NodeKey::DigestFile(n) => n.run(context).map(NodeResult::from).compat().await, - NodeKey::DownloadedFile(n) => n.run(context).map(NodeResult::from).compat().await, - NodeKey::MultiPlatformExecuteProcess(n) => { - n.run(context).map(NodeResult::from).compat().await - } - NodeKey::ReadLink(n) => n.run(context).map(NodeResult::from).compat().await, - NodeKey::Scandir(n) => n.run(context).map(NodeResult::from).compat().await, - NodeKey::Select(n) => n.run(context).map(NodeResult::from).compat().await, - NodeKey::Snapshot(n) => n.run(context).map(NodeResult::from).compat().await, - NodeKey::Task(n) => n.run(context).map(NodeResult::from).compat().await, + let maybe_watch = if let Some(path) = self.fs_subject() { + let abs_path = context.core.build_root.join(path); + context + .core + .watcher + .watch(abs_path) + .map_err(|e| Failure::FileWatch(format!("{:?}", e))) + .await + } else { + Ok(()) + }; + + let result = match maybe_watch { + Ok(()) => match self { + NodeKey::DigestFile(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::DownloadedFile(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::MultiPlatformExecuteProcess(n) => { + n.run(context).map(NodeResult::from).compat().await + } + NodeKey::ReadLink(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Scandir(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Select(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Snapshot(n) => n.run(context).map(NodeResult::from).compat().await, + NodeKey::Task(n) => n.run(context).map(NodeResult::from).compat().await, + }, + Err(e) => Err(e), }; if let Some(started_workunit) = maybe_started_workunit { let workunit: WorkUnit = started_workunit.finish(); diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index a6374c547dd..3b9b202808c 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -14,6 +14,7 @@ use futures01::future::{self, Future}; use crate::context::{Context, Core}; use crate::core::{Failure, Params, TypeId, Value}; use crate::nodes::{NodeKey, Select, Tracer, Visualizer}; +use crate::watch::InvalidationWatcher; use graph::{Graph, InvalidationResult}; use hashing; use indexmap::IndexMap; @@ -228,22 +229,7 @@ impl Scheduler { /// Invalidate the invalidation roots represented by the given Paths. /// pub fn invalidate(&self, paths: &HashSet) -> usize { - let InvalidationResult { cleared, dirtied } = - self.core.graph.invalidate_from_roots(move |node| { - if let Some(fs_subject) = node.fs_subject() { - paths.contains(fs_subject) - } else { - false - } - }); - // TODO: The rust log level is not currently set correctly in a pantsd context. To ensure that - // we see this even at `info` level, we set it to warn. #6004 should address this by making - // rust logging re-configuration an explicit step in `src/python/pants/init/logging.py`. - warn!( - "invalidation: cleared {} and dirtied {} nodes for: {:?}", - cleared, dirtied, paths - ); - cleared + dirtied + InvalidationWatcher::invalidate(&self.core.graph, paths, "watchman") } /// diff --git a/src/rust/engine/src/watch.rs b/src/rust/engine/src/watch.rs new file mode 100644 index 00000000000..be0b64701f0 --- /dev/null +++ b/src/rust/engine/src/watch.rs @@ -0,0 +1,198 @@ +// Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +// Licensed under the Apache License, Version 2.0 (see LICENSE). + +use std::collections::HashSet; +use std::path::PathBuf; +use std::sync::{Arc, Weak}; +use std::thread; +use std::time::Duration; + +use crossbeam_channel::{self, Receiver, RecvTimeoutError, TryRecvError}; +use log::{debug, error, info, warn}; +use notify::{RecommendedWatcher, RecursiveMode, Watcher}; +//use parking_lot::Mutex; +use futures::compat::Future01CompatExt; +use futures_locks::Mutex; +use process_execution::PlatformConstraint; +use task_executor::Executor; + +use graph::{Graph, InvalidationResult}; +use logging; + +use crate::nodes::NodeKey; + +/// +/// An InvalidationWatcher maintains a Thread that receives events from a notify Watcher. +/// +/// If the spawned Thread exits for any reason, InvalidationWatcher::running() will return False, +/// and the caller should create a new InvalidationWatcher (or shut down, in some cases). Generally +/// this will mean polling. +/// +/// TODO: Need the above polling +/// +/// TODO: To simplify testing the InvalidationWatcher we could create a trait which +/// has an `invalidate_from_roots` method and impl it on the Graph. Then we could make the InvalidationWatcher +/// take an argument that implements the trait. +/// Then we wouldn't have to mock out a Graph object in watch_tests.rs. This will probably +/// only be possible when we remove watchman invalidation, when the one code path for invaldation will be +/// the notify background thread. +/// Potential impl here: https://github.com/pantsbuild/pants/pull/9318#discussion_r396005978 +/// +pub struct InvalidationWatcher { + watcher: Arc>, + executor: Executor, + liveness: Receiver<()>, + current_platform: PlatformConstraint, +} + +impl InvalidationWatcher { + pub fn new( + graph: Weak>, + executor: Executor, + build_root: PathBuf, + ) -> Result { + // Inotify events contain canonical paths to the files being watched. + // If the build_root contains a symlink the paths returned in notify events + // wouldn't have the build_root as a prefix, and so we would miss invalidating certain nodes. + // We canonicalize the build_root once so this isn't a problem. + let canonical_build_root = + std::fs::canonicalize(build_root.as_path()).map_err(|e| format!("{:?}", e))?; + let current_platform = PlatformConstraint::current_platform_constraint()?; + let (watch_sender, watch_receiver) = crossbeam_channel::unbounded(); + let mut watcher: RecommendedWatcher = Watcher::new(watch_sender, Duration::from_millis(50)) + .map_err(|e| format!("Failed to begin watching the filesystem: {}", e))?; + // On darwin the notify API is much more efficient if you watch the build root + // recursively, so we set up that watch here and then return early when watch() is + // called by nodes that are running. On Linux the notify crate handles adding paths to watch + // much more efficiently so we do that instead on Linux. + if current_platform == PlatformConstraint::Darwin { + watcher + .watch(canonical_build_root.clone(), RecursiveMode::Recursive) + .map_err(|e| { + format!( + "Failed to begin recursively watching files in the build root: {}", + e + ) + })? + } + let wrapped_watcher = Arc::new(Mutex::new(watcher)); + + let (thread_liveness_sender, thread_liveness_receiver) = crossbeam_channel::unbounded(); + thread::spawn(move || { + logging::set_thread_destination(logging::Destination::Pantsd); + loop { + let event_res = watch_receiver.recv_timeout(Duration::from_millis(100)); + let graph = if let Some(g) = graph.upgrade() { + g + } else { + // The Graph has been dropped: we're done. + break; + }; + match event_res { + Ok(Ok(ev)) => { + let paths: HashSet<_> = ev + .paths + .into_iter() + .map(|path| { + // relativize paths to build root. + let mut paths_to_invalidate: Vec = vec![]; + let path_relative_to_build_root = { + if path.starts_with(&canonical_build_root) { + path.strip_prefix(&canonical_build_root).unwrap().into() + } else { + path + } + }; + paths_to_invalidate.push(path_relative_to_build_root.clone()); + if let Some(parent_dir) = path_relative_to_build_root.parent() { + paths_to_invalidate.push(parent_dir.to_path_buf()); + } + paths_to_invalidate + }) + .flatten() + .collect(); + debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); + InvalidationWatcher::invalidate(&graph, &paths, "notify"); + } + Ok(Err(err)) => { + if let notify::ErrorKind::PathNotFound = err.kind { + warn!("Path(s) did not exist: {:?}", err.paths); + continue; + } else { + error!("File watcher failing with: {}", err); + break; + } + } + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => { + // The Watcher is gone: we're done. + break; + } + }; + } + debug!("Watch thread exiting."); + // Signal that we're exiting (which we would also do by just dropping the channel). + let _ = thread_liveness_sender.send(()); + }); + + Ok(InvalidationWatcher { + watcher: wrapped_watcher, + executor, + liveness: thread_liveness_receiver, + current_platform, + }) + } + + /// + /// Watch the given path non-recursively. + /// + pub async fn watch(&self, path: PathBuf) -> Result<(), notify::Error> { + // Short circuit here if we are on a Darwin platform because we should be watching + // the entire build root recursively already. + if self.current_platform == PlatformConstraint::Darwin { + Ok(()) + } else { + // Using a futurized mutex here because for some reason using a regular mutex + // to block the io pool causes the v2 ui to not update which nodes its working + // on properly. + let watcher_lock = self.watcher.lock().compat().await; + match watcher_lock { + Ok(mut watcher_lock) => { + self + .executor + .spawn_blocking(move || watcher_lock.watch(path, RecursiveMode::NonRecursive)) + .await + } + Err(()) => Err(notify::Error::new(notify::ErrorKind::Generic( + "Couldn't lock mutex for invalidation watcher".to_string(), + ))), + } + } + } + + /// + /// Returns true if this InvalidationWatcher is still valid: if it is not valid, it will have + /// already logged some sort of error, and will never restart on its own. + /// + pub fn running(&self) -> bool { + match self.liveness.try_recv() { + Ok(()) | Err(TryRecvError::Disconnected) => false, + Err(TryRecvError::Empty) => true, + } + } + + pub fn invalidate(graph: &Graph, paths: &HashSet, caller: &str) -> usize { + let InvalidationResult { cleared, dirtied } = graph.invalidate_from_roots(move |node| { + if let Some(fs_subject) = node.fs_subject() { + paths.contains(fs_subject) + } else { + false + } + }); + info!( + "{} invalidation: cleared {} and dirtied {} nodes for: {:?}", + caller, cleared, dirtied, paths + ); + cleared + dirtied + } +} diff --git a/src/rust/engine/src/watch_tests.rs b/src/rust/engine/src/watch_tests.rs new file mode 100644 index 00000000000..7b6cb28987e --- /dev/null +++ b/src/rust/engine/src/watch_tests.rs @@ -0,0 +1,66 @@ +use crate::nodes::{DigestFile, NodeKey, NodeResult}; +use crate::watch::InvalidationWatcher; +use fs::File; +use graph::entry::{EntryResult, EntryState, Generation, RunToken}; +use graph::{test_support::TestGraph, Graph}; +use hashing::EMPTY_DIGEST; +use std::path::PathBuf; +use std::sync::Arc; +use std::thread::sleep; +use std::time::Duration; +use task_executor::Executor; +use testutil::{append_to_exisiting_file, make_file}; + +#[test] +fn receive_watch_event_on_file_change() { + env_logger::init(); + // setup a build_root with a file in it to watch. + let build_root = tempfile::TempDir::new().unwrap(); + let content = "contents".as_bytes().to_vec(); + let file_path = build_root.path().join("watch_me.txt"); + make_file(&file_path, &content, 0o600); + + // set up a node in the graph to check that it gets cleared by the invalidation watcher. + let node = NodeKey::DigestFile(DigestFile(File { + path: PathBuf::from("watch_me.txt"), + is_executable: false, + })); + let graph = Arc::new(Graph::new()); + let entry_id = graph.add_fixture_entry(node); + let completed_state = EntryState::Completed { + run_token: RunToken::initial(), + generation: Generation::initial(), + result: EntryResult::Clean(Ok(NodeResult::Digest(EMPTY_DIGEST))), + dep_generations: vec![], + }; + graph.set_fixture_entry_state_for_id(entry_id, completed_state); + // Assert the nodes initial state is completed + assert!(graph.entry_state(entry_id) == "completed"); + // Instantiate a watcher and watch the file in question. + let mut rt = tokio::runtime::Runtime::new().unwrap(); + let executor = Executor::new(rt.handle().clone()); + let watcher = InvalidationWatcher::new( + Arc::downgrade(&graph), + executor, + build_root.path().to_path_buf(), + ) + .expect("Couldn't create InvalidationWatcher"); + rt.block_on(watcher.watch(file_path.clone())).unwrap(); + // Update the content of the file being watched. + let new_content = "stnetonc".as_bytes().to_vec(); + append_to_exisiting_file(&file_path, &new_content); + // Wait for watcher background thread to trigger a node invalidation, + // by checking the entry state for the node. It will be reset to EntryState::NotStarted + // when Graph::invalidate_from_roots calls clear on the node. + for _ in 0..10 { + sleep(Duration::from_millis(100)); + if graph.entry_state(entry_id) == "not started" { + return; + } + } + // If we didn't find a new state fail the test. + assert!( + false, + "Nodes EntryState was not invalidated, or reset to NotStarted." + ) +} diff --git a/src/rust/engine/testutil/src/lib.rs b/src/rust/engine/testutil/src/lib.rs index 075623877ee..342a5b0c24a 100644 --- a/src/rust/engine/testutil/src/lib.rs +++ b/src/rust/engine/testutil/src/lib.rs @@ -53,3 +53,8 @@ pub fn make_file(path: &Path, contents: &[u8], mode: u32) { permissions.set_mode(mode); file.set_permissions(permissions).unwrap(); } + +pub fn append_to_exisiting_file(path: &Path, contents: &[u8]) { + let mut file = std::fs::OpenOptions::new().write(true).open(&path).unwrap(); + file.write_all(contents).unwrap(); +} diff --git a/tests/python/pants_test/engine/test_fs.py b/tests/python/pants_test/engine/test_fs.py index af185438cd0..1fbda3c0ab7 100644 --- a/tests/python/pants_test/engine/test_fs.py +++ b/tests/python/pants_test/engine/test_fs.py @@ -4,13 +4,17 @@ import hashlib import logging import os +import shutil import tarfile +import time import unittest from abc import ABCMeta from contextlib import contextmanager from http.server import BaseHTTPRequestHandler from pathlib import Path +from typing import Callable +from pants.base.file_system_project_tree import FileSystemProjectTree from pants.engine.fs import ( EMPTY_DIRECTORY_DIGEST, Digest, @@ -55,6 +59,17 @@ def path_globs(globs) -> PathGlobs: return globs return PathGlobs(globs) + def read_file_content(self, scheduler, filespecs_or_globs): + """Helper method for reading the content of some files from an existing scheduler + session.""" + snapshot = self.execute_expecting_one_result( + scheduler, Snapshot, self.path_globs(filespecs_or_globs) + ).value + result = self.execute_expecting_one_result( + scheduler, FilesContent, snapshot.directory_digest + ).value + return {f.path: f.content for f in result.dependencies} + def assert_walk_dirs(self, filespecs_or_globs, paths, **kwargs): self.assert_walk_snapshot("dirs", filespecs_or_globs, paths, **kwargs) @@ -74,13 +89,7 @@ def assert_walk_snapshot( def assert_content(self, filespecs_or_globs, expected_content): with self.mk_project_tree() as project_tree: scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) - snapshot = self.execute_expecting_one_result( - scheduler, Snapshot, self.path_globs(filespecs_or_globs) - ).value - result = self.execute_expecting_one_result( - scheduler, FilesContent, snapshot.directory_digest - ).value - actual_content = {f.path: f.content for f in result.dependencies} + actual_content = self.read_file_content(scheduler, filespecs_or_globs) self.assertEqual(expected_content, actual_content) def assert_digest(self, filespecs_or_globs, expected_files): @@ -134,7 +143,7 @@ def test_walk_parent_link(self): def test_walk_escaping_symlink(self): link = "subdir/escaping" - dest = "../../this-is-probably-nonexistent" + dest = "../../" def prepare(project_tree): link_path = os.path.join(project_tree.build_root, link) @@ -708,6 +717,111 @@ def test_nonexistent_filename_globs(self) -> None: subset_digest = self.request_single_product(Digest, subset_input) assert subset_snapshot.directory_digest == subset_digest + def test_file_content_invalidated(self) -> None: + """Test that we can update files and have the native engine invalidate previous operations + on those files.""" + + with self.mk_project_tree() as project_tree: + scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) + fname = "4.txt" + new_data = "rouf" + # read the original file so we have a cached value. + self.read_file_content(scheduler, [fname]) + path_to_fname = os.path.join(project_tree.build_root, fname) + with open(path_to_fname, "w") as f: + f.write(new_data) + + def assertion_fn(): + new_content = self.read_file_content(scheduler, [fname]) + if new_content[fname].decode("utf-8") == new_data: + # successfully read new data + return True + return False + + if not self.try_with_backoff(assertion_fn): + raise AssertionError( + f"New content {new_data} was not found in the FilesContent of the " + "modified file {path_to_fname}, instead we found {new_content[fname]}" + ) + + def test_file_content_invalidated_after_parent_deletion(self) -> None: + """Test that FileContent is invalidated after deleting parent directory.""" + + with self.mk_project_tree() as project_tree: + scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) + fname = "a/b/1.txt" + # read the original file so we have nodes to invalidate. + original_content = self.read_file_content(scheduler, [fname]) + self.assertIn(fname, original_content) + path_to_parent_dir = os.path.join(project_tree.build_root, "a/b/") + shutil.rmtree(path_to_parent_dir) + + def assertion_fn(): + new_content = self.read_file_content(scheduler, [fname]) + if new_content.get(fname) is None: + return True + return False + + if not self.try_with_backoff(assertion_fn): + raise AssertionError( + f"Deleting parent dir and could still read file from original snapshot." + ) + + def assert_mutated_directory_digest( + self, mutation_function: Callable[[FileSystemProjectTree, str], Exception] + ): + with self.mk_project_tree() as project_tree: + scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) + dir_path = "a/" + dir_glob = dir_path + "*" + initial_snapshot = self.execute_expecting_one_result( + scheduler, Snapshot, self.path_globs([dir_glob]) + ).value + assert not initial_snapshot.is_empty + assertion_error = mutation_function(project_tree, dir_path) + + def assertion_fn(): + new_snapshot = self.execute_expecting_one_result( + scheduler, Snapshot, self.path_globs([dir_glob]) + ).value + assert not new_snapshot.is_empty + if initial_snapshot.directory_digest != new_snapshot.directory_digest: + # successfully invalidated snapshot and got a new digest + return True + return False + + if not self.try_with_backoff(assertion_fn): + raise assertion_error + + @staticmethod + def try_with_backoff(assertion_fn: Callable[[], bool]) -> bool: + for i in range(4): + time.sleep(0.1 * i) + if assertion_fn(): + return True + return False + + def test_directory_digest_invalidated_by_child_removal(self): + def mutation_function(project_tree, dir_path): + removed_path = os.path.join(project_tree.build_root, dir_path, "3.txt") + os.remove(removed_path) + return AssertionError( + f"Did not find a new directory snapshot after adding file {removed_path}." + ) + + self.assert_mutated_directory_digest(mutation_function) + + def test_directory_digest_invalidated_by_child_change(self): + def mutation_function(project_tree, dir_path): + new_file_path = os.path.join(project_tree.build_root, dir_path, "new_file.txt") + with open(new_file_path, "w") as f: + f.write("new file") + return AssertionError( + f"Did not find a new directory snapshot after adding file {new_file_path}." + ) + + self.assert_mutated_directory_digest(mutation_function) + class StubHandler(BaseHTTPRequestHandler): response_text = b"www.pantsbuild.org" From fd5c22ab5e426f053f828ef5c2dda3cdbb7dd183 Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Thu, 26 Mar 2020 18:27:58 -0700 Subject: [PATCH 03/15] Ignore notify events for pants_ignore patterns. (#9406) * Create a git ignorer on the context object. Adjust all call sites which create a posix fs to pass in an ignorer. * Ignore fsevents from files that match pants_ignore patterns. * Always pass is_dir = false to ignorer to avoid stat-ing every path the event watch thread sees. --- src/rust/engine/fs/fs_util/src/main.rs | 8 +- src/rust/engine/fs/src/lib.rs | 23 ++-- src/rust/engine/fs/src/posixfs_tests.rs | 9 +- src/rust/engine/fs/store/src/snapshot.rs | 6 +- .../engine/fs/store/src/snapshot_tests.rs | 7 +- .../engine/process_execution/src/local.rs | 3 +- src/rust/engine/src/context.rs | 20 +++- src/rust/engine/src/watch.rs | 56 +++++---- src/rust/engine/src/watch_tests.rs | 110 +++++++++++++++--- 9 files changed, 178 insertions(+), 64 deletions(-) diff --git a/src/rust/engine/fs/fs_util/src/main.rs b/src/rust/engine/fs/fs_util/src/main.rs index 786304898be..c6c6c66777a 100644 --- a/src/rust/engine/fs/fs_util/src/main.rs +++ b/src/rust/engine/fs/fs_util/src/main.rs @@ -662,7 +662,13 @@ fn expand_files_helper( } fn make_posix_fs>(executor: task_executor::Executor, root: P) -> fs::PosixFS { - fs::PosixFS::new(&root, &[], executor).unwrap() + // Unwrapping the output of creating the git ignorer with no patterns is infallible. + fs::PosixFS::new( + &root, + fs::GitignoreStyleExcludes::create(&[]).unwrap(), + executor, + ) + .unwrap() } fn ensure_uploaded_to_remote( diff --git a/src/rust/engine/fs/src/lib.rs b/src/rust/engine/fs/src/lib.rs index 6e9e3ec16c5..3fff711e915 100644 --- a/src/rust/engine/fs/src/lib.rs +++ b/src/rust/engine/fs/src/lib.rs @@ -131,7 +131,7 @@ pub struct GitignoreStyleExcludes { } impl GitignoreStyleExcludes { - fn create(patterns: &[String]) -> Result, String> { + pub fn create(patterns: &[String]) -> Result, String> { if patterns.is_empty() { return Ok(EMPTY_IGNORE.clone()); } @@ -171,6 +171,13 @@ impl GitignoreStyleExcludes { ::ignore::Match::Ignore(_) => true, } } + + pub fn is_ignored_or_child_of_ignored_path(&self, path: &Path, is_dir: bool) -> bool { + match self.gitignore.matched_path_or_any_parents(path, is_dir) { + ::ignore::Match::None | ::ignore::Match::Whitelist(_) => false, + ::ignore::Match::Ignore(_) => true, + } + } } lazy_static! { @@ -587,15 +594,15 @@ pub struct PosixFS { impl PosixFS { pub fn new>( root: P, - ignore_patterns: &[String], + ignorer: Arc, executor: task_executor::Executor, ) -> Result { - Self::new_with_symlink_behavior(root, ignore_patterns, executor, SymlinkBehavior::Aware) + Self::new_with_symlink_behavior(root, ignorer, executor, SymlinkBehavior::Aware) } pub fn new_with_symlink_behavior>( root: P, - ignore_patterns: &[String], + ignorer: Arc, executor: task_executor::Executor, symlink_behavior: SymlinkBehavior, ) -> Result { @@ -616,15 +623,9 @@ impl PosixFS { }) .map_err(|e| format!("Could not canonicalize root {:?}: {:?}", root, e))?; - let ignore = GitignoreStyleExcludes::create(&ignore_patterns).map_err(|e| { - format!( - "Could not parse build ignore inputs {:?}: {:?}", - ignore_patterns, e - ) - })?; Ok(PosixFS { root: canonical_root, - ignore: ignore, + ignore: ignorer, executor: executor, symlink_behavior: symlink_behavior, }) diff --git a/src/rust/engine/fs/src/posixfs_tests.rs b/src/rust/engine/fs/src/posixfs_tests.rs index 984b6be4ab2..df858a24cb7 100644 --- a/src/rust/engine/fs/src/posixfs_tests.rs +++ b/src/rust/engine/fs/src/posixfs_tests.rs @@ -2,8 +2,9 @@ use tempfile; use testutil; use crate::{ - Dir, DirectoryListing, File, GlobExpansionConjunction, GlobMatching, Link, PathGlobs, PathStat, - PathStatGetter, PosixFS, Stat, StrictGlobMatching, SymlinkBehavior, VFS, + Dir, DirectoryListing, File, GitignoreStyleExcludes, GlobExpansionConjunction, GlobMatching, + Link, PathGlobs, PathStat, PathStatGetter, PosixFS, Stat, StrictGlobMatching, SymlinkBehavior, + VFS, }; use async_trait::async_trait; @@ -384,7 +385,7 @@ async fn assert_only_file_is_executable(path: &Path, want_is_executable: bool) { fn new_posixfs>(dir: P) -> PosixFS { PosixFS::new( dir.as_ref(), - &[], + GitignoreStyleExcludes::create(&[]).unwrap(), task_executor::Executor::new(Handle::current()), ) .unwrap() @@ -393,7 +394,7 @@ fn new_posixfs>(dir: P) -> PosixFS { fn new_posixfs_symlink_oblivious>(dir: P) -> PosixFS { PosixFS::new_with_symlink_behavior( dir.as_ref(), - &[], + GitignoreStyleExcludes::create(&[]).unwrap(), task_executor::Executor::new(Handle::current()), SymlinkBehavior::Oblivious, ) diff --git a/src/rust/engine/fs/store/src/snapshot.rs b/src/rust/engine/fs/store/src/snapshot.rs index 9ddf8f27805..3a1fc1f308f 100644 --- a/src/rust/engine/fs/store/src/snapshot.rs +++ b/src/rust/engine/fs/store/src/snapshot.rs @@ -4,7 +4,9 @@ use crate::Store; use bazel_protos; use boxfuture::{try_future, BoxFuture, Boxable}; -use fs::{Dir, File, GlobMatching, PathGlobs, PathStat, PosixFS, SymlinkBehavior}; +use fs::{ + Dir, File, GitignoreStyleExcludes, GlobMatching, PathGlobs, PathStat, PosixFS, SymlinkBehavior, +}; use futures::future::TryFutureExt; use futures01::future::{self, join_all, Future}; use hashing::{Digest, EMPTY_DIGEST}; @@ -540,7 +542,7 @@ impl Snapshot { .or_else(|_| { let posix_fs = Arc::new(try_future!(PosixFS::new_with_symlink_behavior( root_path, - &[], + try_future!(GitignoreStyleExcludes::create(&[])), executor, SymlinkBehavior::Oblivious ))); diff --git a/src/rust/engine/fs/store/src/snapshot_tests.rs b/src/rust/engine/fs/store/src/snapshot_tests.rs index 4a698646b5c..d18204a176c 100644 --- a/src/rust/engine/fs/store/src/snapshot_tests.rs +++ b/src/rust/engine/fs/store/src/snapshot_tests.rs @@ -8,8 +8,8 @@ use tokio::runtime::Handle; use crate::{OneOffStoreFileByDigest, Snapshot, Store}; use fs::{ - Dir, File, GlobExpansionConjunction, GlobMatching, PathGlobs, PathStat, PosixFS, - StrictGlobMatching, + Dir, File, GitignoreStyleExcludes, GlobExpansionConjunction, GlobMatching, PathGlobs, PathStat, + PosixFS, StrictGlobMatching, }; use std; @@ -36,7 +36,8 @@ fn setup() -> ( ) .unwrap(); let dir = tempfile::Builder::new().prefix("root").tempdir().unwrap(); - let posix_fs = Arc::new(PosixFS::new(dir.path(), &[], executor).unwrap()); + let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); + let posix_fs = Arc::new(PosixFS::new(dir.path(), ignorer, executor).unwrap()); let file_saver = OneOffStoreFileByDigest::new(store.clone(), posix_fs.clone()); (store, dir, posix_fs, file_saver) } diff --git a/src/rust/engine/process_execution/src/local.rs b/src/rust/engine/process_execution/src/local.rs index b6647a248cf..1493c7675f8 100644 --- a/src/rust/engine/process_execution/src/local.rs +++ b/src/rust/engine/process_execution/src/local.rs @@ -382,7 +382,8 @@ pub trait CapturedWorkdir { future::ok(store::Snapshot::empty()).to_boxed() } else { // Use no ignore patterns, because we are looking for explicitly listed paths. - future::done(fs::PosixFS::new(workdir_path2, &[], executor)) + future::done(fs::GitignoreStyleExcludes::create(&[])) + .and_then(|ignorer| future::done(fs::PosixFS::new(workdir_path2, ignorer, executor))) .map_err(|err| { format!( "Error making posix_fs to fetch local process execution output files: {}", diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index dbd62eaf000..db0e72c0034 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -20,7 +20,7 @@ use crate::types::Types; use crate::watch::InvalidationWatcher; use boxfuture::{BoxFuture, Boxable}; use core::clone::Clone; -use fs::{safe_create_dir_all_ioerror, PosixFS}; +use fs::{safe_create_dir_all_ioerror, GitignoreStyleExcludes, PosixFS}; use graph::{EntryId, Graph, NodeContext}; use process_execution::{ self, speculate::SpeculatingCommandRunner, BoundedCommandRunner, ExecuteProcessRequestMetadata, @@ -235,12 +235,24 @@ impl Core { }) } let graph = Arc::new(Graph::new()); - let watcher = - InvalidationWatcher::new(Arc::downgrade(&graph), executor.clone(), build_root.clone())?; let http_client = reqwest::Client::new(); let rule_graph = RuleGraph::new(tasks.as_map(), root_subject_types); + let ignorer = GitignoreStyleExcludes::create(&ignore_patterns).map_err(|e| { + format!( + "Could not parse build ignore inputs {:?}: {:?}", + ignore_patterns, e + ) + })?; + + let watcher = InvalidationWatcher::new( + Arc::downgrade(&graph), + executor.clone(), + build_root.clone(), + ignorer.clone(), + )?; + Ok(Core { graph: graph, tasks: tasks, @@ -253,7 +265,7 @@ impl Core { http_client, // TODO: Errors in initialization should definitely be exposed as python // exceptions, rather than as panics. - vfs: PosixFS::new(&build_root, &ignore_patterns, executor) + vfs: PosixFS::new(&build_root, ignorer, executor) .map_err(|e| format!("Could not initialize VFS: {:?}", e))?, build_root, watcher, diff --git a/src/rust/engine/src/watch.rs b/src/rust/engine/src/watch.rs index be0b64701f0..a2278178795 100644 --- a/src/rust/engine/src/watch.rs +++ b/src/rust/engine/src/watch.rs @@ -8,14 +8,13 @@ use std::thread; use std::time::Duration; use crossbeam_channel::{self, Receiver, RecvTimeoutError, TryRecvError}; -use log::{debug, error, info, warn}; -use notify::{RecommendedWatcher, RecursiveMode, Watcher}; -//use parking_lot::Mutex; use futures::compat::Future01CompatExt; use futures_locks::Mutex; -use process_execution::PlatformConstraint; +use log::{debug, error, info, warn}; +use notify::{RecommendedWatcher, RecursiveMode, Watcher}; use task_executor::Executor; +use fs::GitignoreStyleExcludes; use graph::{Graph, InvalidationResult}; use logging; @@ -42,7 +41,6 @@ pub struct InvalidationWatcher { watcher: Arc>, executor: Executor, liveness: Receiver<()>, - current_platform: PlatformConstraint, } impl InvalidationWatcher { @@ -50,6 +48,7 @@ impl InvalidationWatcher { graph: Weak>, executor: Executor, build_root: PathBuf, + ignorer: Arc, ) -> Result { // Inotify events contain canonical paths to the files being watched. // If the build_root contains a symlink the paths returned in notify events @@ -57,7 +56,6 @@ impl InvalidationWatcher { // We canonicalize the build_root once so this isn't a problem. let canonical_build_root = std::fs::canonicalize(build_root.as_path()).map_err(|e| format!("{:?}", e))?; - let current_platform = PlatformConstraint::current_platform_constraint()?; let (watch_sender, watch_receiver) = crossbeam_channel::unbounded(); let mut watcher: RecommendedWatcher = Watcher::new(watch_sender, Duration::from_millis(50)) .map_err(|e| format!("Failed to begin watching the filesystem: {}", e))?; @@ -65,7 +63,7 @@ impl InvalidationWatcher { // recursively, so we set up that watch here and then return early when watch() is // called by nodes that are running. On Linux the notify crate handles adding paths to watch // much more efficiently so we do that instead on Linux. - if current_platform == PlatformConstraint::Darwin { + if cfg!(target_os = "macos") { watcher .watch(canonical_build_root.clone(), RecursiveMode::Recursive) .map_err(|e| { @@ -93,26 +91,45 @@ impl InvalidationWatcher { let paths: HashSet<_> = ev .paths .into_iter() - .map(|path| { + .filter_map(|path| { // relativize paths to build root. - let mut paths_to_invalidate: Vec = vec![]; - let path_relative_to_build_root = { - if path.starts_with(&canonical_build_root) { - path.strip_prefix(&canonical_build_root).unwrap().into() - } else { - path - } + let path_relative_to_build_root = if path.starts_with(&canonical_build_root) { + // Unwrapping is fine because we check that the path starts with + // the build root above. + path.strip_prefix(&canonical_build_root).unwrap().into() + } else { + path }; - paths_to_invalidate.push(path_relative_to_build_root.clone()); + // To avoid having to stat paths for events we will eventually ignore we "lie" to the ignorer + // to say that no path is a directory, they could be if someone chmod's or creates a dir. + // This maintains correctness by ensuring that at worst we have false negative events, where a directory + // only glob (one that ends in `/` ) was supposed to ignore a directory path, but didn't because we claimed it was a file. That + // directory path will be used to invalidate nodes, but won't invalidate anything because its path is somewhere + // out of our purview. + if ignorer.is_ignored_or_child_of_ignored_path( + &path_relative_to_build_root, + /* is_dir */ false, + ) { + None + } else { + Some(path_relative_to_build_root) + } + }) + .map(|path_relative_to_build_root| { + let mut paths_to_invalidate: Vec = vec![]; if let Some(parent_dir) = path_relative_to_build_root.parent() { paths_to_invalidate.push(parent_dir.to_path_buf()); } + paths_to_invalidate.push(path_relative_to_build_root); paths_to_invalidate }) .flatten() .collect(); - debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); - InvalidationWatcher::invalidate(&graph, &paths, "notify"); + // Only invalidate stuff if we have paths that weren't filtered out by gitignore. + if !paths.is_empty() { + debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); + InvalidationWatcher::invalidate(&graph, &paths, "notify"); + }; } Ok(Err(err)) => { if let notify::ErrorKind::PathNotFound = err.kind { @@ -139,7 +156,6 @@ impl InvalidationWatcher { watcher: wrapped_watcher, executor, liveness: thread_liveness_receiver, - current_platform, }) } @@ -149,7 +165,7 @@ impl InvalidationWatcher { pub async fn watch(&self, path: PathBuf) -> Result<(), notify::Error> { // Short circuit here if we are on a Darwin platform because we should be watching // the entire build root recursively already. - if self.current_platform == PlatformConstraint::Darwin { + if cfg!(target_os = "macos") { Ok(()) } else { // Using a futurized mutex here because for some reason using a regular mutex diff --git a/src/rust/engine/src/watch_tests.rs b/src/rust/engine/src/watch_tests.rs index 7b6cb28987e..37807029f0b 100644 --- a/src/rust/engine/src/watch_tests.rs +++ b/src/rust/engine/src/watch_tests.rs @@ -1,9 +1,10 @@ use crate::nodes::{DigestFile, NodeKey, NodeResult}; use crate::watch::InvalidationWatcher; -use fs::File; +use fs::{File, GitignoreStyleExcludes}; use graph::entry::{EntryResult, EntryState, Generation, RunToken}; -use graph::{test_support::TestGraph, Graph}; +use graph::{test_support::TestGraph, EntryId, Graph}; use hashing::EMPTY_DIGEST; +use std::fs::create_dir; use std::path::PathBuf; use std::sync::Arc; use std::thread::sleep; @@ -11,18 +12,27 @@ use std::time::Duration; use task_executor::Executor; use testutil::{append_to_exisiting_file, make_file}; -#[test] -fn receive_watch_event_on_file_change() { - env_logger::init(); +fn init_logger() -> () { + match env_logger::try_init() { + Ok(()) => (), + Err(_) => (), + } +} + +fn setup_fs() -> (tempfile::TempDir, PathBuf) { // setup a build_root with a file in it to watch. - let build_root = tempfile::TempDir::new().unwrap(); + let tempdir = tempfile::TempDir::new().unwrap(); + let build_root = tempdir.path(); let content = "contents".as_bytes().to_vec(); - let file_path = build_root.path().join("watch_me.txt"); + create_dir(build_root.join("foo")).unwrap(); + let file_path = build_root.join("foo/watch_me.txt"); make_file(&file_path, &content, 0o600); + (tempdir, file_path) +} - // set up a node in the graph to check that it gets cleared by the invalidation watcher. +fn setup_graph(fs_subject: PathBuf) -> (Arc>, EntryId) { let node = NodeKey::DigestFile(DigestFile(File { - path: PathBuf::from("watch_me.txt"), + path: fs_subject, is_executable: false, })); let graph = Arc::new(Graph::new()); @@ -36,18 +46,47 @@ fn receive_watch_event_on_file_change() { graph.set_fixture_entry_state_for_id(entry_id, completed_state); // Assert the nodes initial state is completed assert!(graph.entry_state(entry_id) == "completed"); - // Instantiate a watcher and watch the file in question. + (graph, entry_id) +} + +fn setup_watch( + ignorer: Arc, + graph: Arc>, + build_root: PathBuf, + file_path: PathBuf, +) -> InvalidationWatcher { let mut rt = tokio::runtime::Runtime::new().unwrap(); let executor = Executor::new(rt.handle().clone()); - let watcher = InvalidationWatcher::new( - Arc::downgrade(&graph), - executor, - build_root.path().to_path_buf(), - ) - .expect("Couldn't create InvalidationWatcher"); - rt.block_on(watcher.watch(file_path.clone())).unwrap(); + let watcher = InvalidationWatcher::new(Arc::downgrade(&graph), executor, build_root, ignorer) + .expect("Couldn't create InvalidationWatcher"); + rt.block_on(watcher.watch(file_path)).unwrap(); + watcher +} + +#[test] +fn receive_watch_event_on_file_change() { + // set up a node in the graph to check that it gets cleared by the invalidation watcher. + // Instantiate a watcher and watch the file in question. + init_logger(); + let (tempdir, file_path) = setup_fs(); + let build_root = tempdir.path().to_path_buf(); + let (graph, entry_id) = setup_graph( + file_path + .clone() + .strip_prefix(build_root.clone()) + .unwrap() + .to_path_buf(), + ); + + let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); + let _watcher = setup_watch( + ignorer, + graph.clone(), + build_root.clone(), + file_path.clone(), + ); // Update the content of the file being watched. - let new_content = "stnetonc".as_bytes().to_vec(); + let new_content = "stnetnoc".as_bytes().to_vec(); append_to_exisiting_file(&file_path, &new_content); // Wait for watcher background thread to trigger a node invalidation, // by checking the entry state for the node. It will be reset to EntryState::NotStarted @@ -64,3 +103,38 @@ fn receive_watch_event_on_file_change() { "Nodes EntryState was not invalidated, or reset to NotStarted." ) } + +#[test] +fn ignore_file_events_matching_patterns_in_pants_ignore() { + init_logger(); + let (tempdir, file_path) = setup_fs(); + let build_root = tempdir.path().to_path_buf(); + let (graph, entry_id) = setup_graph( + file_path + .clone() + .strip_prefix(build_root.clone()) + .unwrap() + .to_path_buf(), + ); + + let ignorer = GitignoreStyleExcludes::create(&["/foo".to_string()]).unwrap(); + let _watcher = setup_watch( + ignorer, + graph.clone(), + build_root.clone(), + file_path.clone(), + ); + // Update the content of the file being watched. + let new_content = "stnetnoc".as_bytes().to_vec(); + append_to_exisiting_file(&file_path, &new_content); + // Wait for watcher background thread to trigger a node invalidation, + // by checking the entry state for the node. It will be reset to EntryState::NotStarted + // when Graph::invalidate_from_roots calls clear on the node. + for _ in 0..10 { + sleep(Duration::from_millis(100)); + // If the state changed the node was invalidated so fail. + if graph.entry_state(entry_id) != "completed" { + assert!(false, "Node was invalidated even though it was ignored") + } + } +} From b2bab03a2d768601438dbf4f901a4002f25f76fc Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Sun, 29 Mar 2020 21:42:19 -0700 Subject: [PATCH 04/15] Add a feature gate to disable the engine fs watcher introduced in #9318 (#9416) * Add a feature gate to disable the engine fs watcher introduced in #9318 by default, to mitigate issues seen in #9415 until a fix is in place. --- src/python/pants/engine/native.py | 1 + src/python/pants/option/global_options.py | 11 ++ src/rust/engine/engine_cffi/src/lib.rs | 4 + src/rust/engine/src/context.rs | 2 + src/rust/engine/src/watch.rs | 187 +++++++++--------- src/rust/engine/src/watch_tests.rs | 10 +- .../pants_test/engine/scheduler_test_base.py | 10 +- tests/python/pants_test/engine/test_fs.py | 18 +- 8 files changed, 145 insertions(+), 98 deletions(-) diff --git a/src/python/pants/engine/native.py b/src/python/pants/engine/native.py index 6914c0684d2..30b32a31081 100644 --- a/src/python/pants/engine/native.py +++ b/src/python/pants/engine/native.py @@ -999,6 +999,7 @@ def ti(type_obj): execution_options.process_execution_use_local_cache, self.context.utf8_dict(execution_options.remote_execution_headers), execution_options.process_execution_local_enable_nailgun, + execution_options.experimental_fs_watcher, ) if scheduler_result.is_throw: value = self.context.from_value(scheduler_result.throw_handle) diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index fff2d18bdaa..a3079b214ca 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -110,6 +110,7 @@ class ExecutionOptions: remote_execution_extra_platform_properties: Any remote_execution_headers: Any process_execution_local_enable_nailgun: bool + experimental_fs_watcher: bool @classmethod def from_bootstrap_options(cls, bootstrap_options): @@ -135,6 +136,7 @@ def from_bootstrap_options(cls, bootstrap_options): remote_execution_extra_platform_properties=bootstrap_options.remote_execution_extra_platform_properties, remote_execution_headers=bootstrap_options.remote_execution_headers, process_execution_local_enable_nailgun=bootstrap_options.process_execution_local_enable_nailgun, + experimental_fs_watcher=bootstrap_options.experimental_fs_watcher, ) @@ -160,6 +162,7 @@ def from_bootstrap_options(cls, bootstrap_options): remote_execution_extra_platform_properties=[], remote_execution_headers={}, process_execution_local_enable_nailgun=False, + experimental_fs_watcher=False, ) @@ -934,6 +937,14 @@ def register_bootstrap_options(cls, register): help="Whether or not to use nailgun to run the requests that are marked as nailgunnable.", advanced=True, ) + register( + "--experimental-fs-watcher", + type=bool, + default=False, + advanced=True, + help="Whether to use the engine filesystem watcher which registers the workspace" + " for kernel file change events", + ) @classmethod def register_options(cls, register): diff --git a/src/rust/engine/engine_cffi/src/lib.rs b/src/rust/engine/engine_cffi/src/lib.rs index 56425fc0472..e6bfaa8643b 100644 --- a/src/rust/engine/engine_cffi/src/lib.rs +++ b/src/rust/engine/engine_cffi/src/lib.rs @@ -207,6 +207,7 @@ pub extern "C" fn scheduler_create( process_execution_use_local_cache: bool, remote_execution_headers_buf: BufferBuffer, process_execution_local_enable_nailgun: bool, + experimental_fs_watcher: bool, ) -> RawResult { match make_core( tasks_ptr, @@ -236,6 +237,7 @@ pub extern "C" fn scheduler_create( process_execution_use_local_cache, remote_execution_headers_buf, process_execution_local_enable_nailgun, + experimental_fs_watcher, ) { Ok(core) => RawResult { is_throw: false, @@ -278,6 +280,7 @@ fn make_core( process_execution_use_local_cache: bool, remote_execution_headers_buf: BufferBuffer, process_execution_local_enable_nailgun: bool, + experimental_fs_watcher: bool, ) -> Result { let root_type_ids = root_type_ids.to_vec(); let ignore_patterns = ignore_patterns_buf @@ -386,6 +389,7 @@ fn make_core( process_execution_use_local_cache, remote_execution_headers, process_execution_local_enable_nailgun, + experimental_fs_watcher, ) } diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index db0e72c0034..10ebe3affd2 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -87,6 +87,7 @@ impl Core { process_execution_use_local_cache: bool, remote_execution_headers: BTreeMap, process_execution_local_enable_nailgun: bool, + experimental_fs_watcher: bool, ) -> Result { // Randomize CAS address order to avoid thundering herds from common config. let mut remote_store_servers = remote_store_servers; @@ -251,6 +252,7 @@ impl Core { executor.clone(), build_root.clone(), ignorer.clone(), + experimental_fs_watcher, )?; Ok(Core { diff --git a/src/rust/engine/src/watch.rs b/src/rust/engine/src/watch.rs index a2278178795..2a5d8b106ef 100644 --- a/src/rust/engine/src/watch.rs +++ b/src/rust/engine/src/watch.rs @@ -41,6 +41,7 @@ pub struct InvalidationWatcher { watcher: Arc>, executor: Executor, liveness: Receiver<()>, + enabled: bool, } impl InvalidationWatcher { @@ -49,6 +50,7 @@ impl InvalidationWatcher { executor: Executor, build_root: PathBuf, ignorer: Arc, + enabled: bool, ) -> Result { // Inotify events contain canonical paths to the files being watched. // If the build_root contains a symlink the paths returned in notify events @@ -59,103 +61,106 @@ impl InvalidationWatcher { let (watch_sender, watch_receiver) = crossbeam_channel::unbounded(); let mut watcher: RecommendedWatcher = Watcher::new(watch_sender, Duration::from_millis(50)) .map_err(|e| format!("Failed to begin watching the filesystem: {}", e))?; - // On darwin the notify API is much more efficient if you watch the build root - // recursively, so we set up that watch here and then return early when watch() is - // called by nodes that are running. On Linux the notify crate handles adding paths to watch - // much more efficiently so we do that instead on Linux. - if cfg!(target_os = "macos") { - watcher - .watch(canonical_build_root.clone(), RecursiveMode::Recursive) - .map_err(|e| { - format!( - "Failed to begin recursively watching files in the build root: {}", - e - ) - })? - } - let wrapped_watcher = Arc::new(Mutex::new(watcher)); let (thread_liveness_sender, thread_liveness_receiver) = crossbeam_channel::unbounded(); - thread::spawn(move || { - logging::set_thread_destination(logging::Destination::Pantsd); - loop { - let event_res = watch_receiver.recv_timeout(Duration::from_millis(100)); - let graph = if let Some(g) = graph.upgrade() { - g - } else { - // The Graph has been dropped: we're done. - break; - }; - match event_res { - Ok(Ok(ev)) => { - let paths: HashSet<_> = ev - .paths - .into_iter() - .filter_map(|path| { - // relativize paths to build root. - let path_relative_to_build_root = if path.starts_with(&canonical_build_root) { - // Unwrapping is fine because we check that the path starts with - // the build root above. - path.strip_prefix(&canonical_build_root).unwrap().into() - } else { - path - }; - // To avoid having to stat paths for events we will eventually ignore we "lie" to the ignorer - // to say that no path is a directory, they could be if someone chmod's or creates a dir. - // This maintains correctness by ensuring that at worst we have false negative events, where a directory - // only glob (one that ends in `/` ) was supposed to ignore a directory path, but didn't because we claimed it was a file. That - // directory path will be used to invalidate nodes, but won't invalidate anything because its path is somewhere - // out of our purview. - if ignorer.is_ignored_or_child_of_ignored_path( - &path_relative_to_build_root, - /* is_dir */ false, - ) { - None - } else { - Some(path_relative_to_build_root) - } - }) - .map(|path_relative_to_build_root| { - let mut paths_to_invalidate: Vec = vec![]; - if let Some(parent_dir) = path_relative_to_build_root.parent() { - paths_to_invalidate.push(parent_dir.to_path_buf()); - } - paths_to_invalidate.push(path_relative_to_build_root); - paths_to_invalidate - }) - .flatten() - .collect(); - // Only invalidate stuff if we have paths that weren't filtered out by gitignore. - if !paths.is_empty() { - debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); - InvalidationWatcher::invalidate(&graph, &paths, "notify"); - }; - } - Ok(Err(err)) => { - if let notify::ErrorKind::PathNotFound = err.kind { - warn!("Path(s) did not exist: {:?}", err.paths); - continue; - } else { - error!("File watcher failing with: {}", err); + if enabled { + // On darwin the notify API is much more efficient if you watch the build root + // recursively, so we set up that watch here and then return early when watch() is + // called by nodes that are running. On Linux the notify crate handles adding paths to watch + // much more efficiently so we do that instead on Linux. + if cfg!(target_os = "macos") { + watcher + .watch(canonical_build_root.clone(), RecursiveMode::Recursive) + .map_err(|e| { + format!( + "Failed to begin recursively watching files in the build root: {}", + e + ) + })? + } + + thread::spawn(move || { + logging::set_thread_destination(logging::Destination::Pantsd); + loop { + let event_res = watch_receiver.recv_timeout(Duration::from_millis(100)); + let graph = if let Some(g) = graph.upgrade() { + g + } else { + // The Graph has been dropped: we're done. + break; + }; + match event_res { + Ok(Ok(ev)) => { + let paths: HashSet<_> = ev + .paths + .into_iter() + .filter_map(|path| { + // relativize paths to build root. + let path_relative_to_build_root = if path.starts_with(&canonical_build_root) { + // Unwrapping is fine because we check that the path starts with + // the build root above. + path.strip_prefix(&canonical_build_root).unwrap().into() + } else { + path + }; + // To avoid having to stat paths for events we will eventually ignore we "lie" to the ignorer + // to say that no path is a directory, they could be if someone chmod's or creates a dir. + // This maintains correctness by ensuring that at worst we have false negative events, where a directory + // only glob (one that ends in `/` ) was supposed to ignore a directory path, but didn't because we claimed it was a file. That + // directory path will be used to invalidate nodes, but won't invalidate anything because its path is somewhere + // out of our purview. + if ignorer.is_ignored_or_child_of_ignored_path( + &path_relative_to_build_root, + /* is_dir */ false, + ) { + None + } else { + Some(path_relative_to_build_root) + } + }) + .map(|path_relative_to_build_root| { + let mut paths_to_invalidate: Vec = vec![]; + if let Some(parent_dir) = path_relative_to_build_root.parent() { + paths_to_invalidate.push(parent_dir.to_path_buf()); + } + paths_to_invalidate.push(path_relative_to_build_root); + paths_to_invalidate + }) + .flatten() + .collect(); + // Only invalidate stuff if we have paths that weren't filtered out by gitignore. + if !paths.is_empty() { + debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); + InvalidationWatcher::invalidate(&graph, &paths, "notify"); + }; + } + Ok(Err(err)) => { + if let notify::ErrorKind::PathNotFound = err.kind { + warn!("Path(s) did not exist: {:?}", err.paths); + continue; + } else { + error!("File watcher failing with: {}", err); + break; + } + } + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => { + // The Watcher is gone: we're done. break; } - } - Err(RecvTimeoutError::Timeout) => continue, - Err(RecvTimeoutError::Disconnected) => { - // The Watcher is gone: we're done. - break; - } - }; - } - debug!("Watch thread exiting."); - // Signal that we're exiting (which we would also do by just dropping the channel). - let _ = thread_liveness_sender.send(()); - }); + }; + } + debug!("Watch thread exiting."); + // Signal that we're exiting (which we would also do by just dropping the channel). + let _ = thread_liveness_sender.send(()); + }); + }; Ok(InvalidationWatcher { - watcher: wrapped_watcher, + watcher: Arc::new(Mutex::new(watcher)), executor, liveness: thread_liveness_receiver, + enabled, }) } @@ -164,8 +169,8 @@ impl InvalidationWatcher { /// pub async fn watch(&self, path: PathBuf) -> Result<(), notify::Error> { // Short circuit here if we are on a Darwin platform because we should be watching - // the entire build root recursively already. - if cfg!(target_os = "macos") { + // the entire build root recursively already, or if we are not enabled. + if cfg!(target_os = "macos") || !self.enabled { Ok(()) } else { // Using a futurized mutex here because for some reason using a regular mutex diff --git a/src/rust/engine/src/watch_tests.rs b/src/rust/engine/src/watch_tests.rs index 37807029f0b..61e4c1aba5d 100644 --- a/src/rust/engine/src/watch_tests.rs +++ b/src/rust/engine/src/watch_tests.rs @@ -57,8 +57,14 @@ fn setup_watch( ) -> InvalidationWatcher { let mut rt = tokio::runtime::Runtime::new().unwrap(); let executor = Executor::new(rt.handle().clone()); - let watcher = InvalidationWatcher::new(Arc::downgrade(&graph), executor, build_root, ignorer) - .expect("Couldn't create InvalidationWatcher"); + let watcher = InvalidationWatcher::new( + Arc::downgrade(&graph), + executor, + build_root, + ignorer, + /*enabled*/ true, + ) + .expect("Couldn't create InvalidationWatcher"); rt.block_on(watcher.watch(file_path)).unwrap(); watcher } diff --git a/tests/python/pants_test/engine/scheduler_test_base.py b/tests/python/pants_test/engine/scheduler_test_base.py index 74053825009..85e927cf0ec 100644 --- a/tests/python/pants_test/engine/scheduler_test_base.py +++ b/tests/python/pants_test/engine/scheduler_test_base.py @@ -3,11 +3,12 @@ import os import shutil +from dataclasses import asdict from pants.base.file_system_project_tree import FileSystemProjectTree from pants.engine.nodes import Throw from pants.engine.scheduler import Scheduler -from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS +from pants.option.global_options import DEFAULT_EXECUTION_OPTIONS, ExecutionOptions from pants.testutil.engine.util import init_native from pants.util.contextutil import temporary_file_path from pants.util.dirutil import safe_mkdtemp, safe_rmtree @@ -48,19 +49,24 @@ def mk_scheduler( work_dir=None, include_trace_on_error=True, should_report_workunits=False, + execution_options=None, ): """Creates a SchedulerSession for a Scheduler with the given Rules installed.""" rules = rules or [] work_dir = work_dir or self._create_work_dir() project_tree = project_tree or self.mk_fs_tree(work_dir=work_dir) local_store_dir = os.path.realpath(safe_mkdtemp()) + if execution_options is not None: + eo = asdict(DEFAULT_EXECUTION_OPTIONS) + eo.update(execution_options) + execution_options = ExecutionOptions(**eo) scheduler = Scheduler( self._native, project_tree, local_store_dir, rules, union_rules, - DEFAULT_EXECUTION_OPTIONS, + execution_options=execution_options or DEFAULT_EXECUTION_OPTIONS, include_trace_on_error=include_trace_on_error, ) return scheduler.new_session( diff --git a/tests/python/pants_test/engine/test_fs.py b/tests/python/pants_test/engine/test_fs.py index 1fbda3c0ab7..5883953b214 100644 --- a/tests/python/pants_test/engine/test_fs.py +++ b/tests/python/pants_test/engine/test_fs.py @@ -722,7 +722,11 @@ def test_file_content_invalidated(self) -> None: on those files.""" with self.mk_project_tree() as project_tree: - scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) + scheduler = self.mk_scheduler( + rules=create_fs_rules(), + project_tree=project_tree, + execution_options={"experimental_fs_watcher": True}, + ) fname = "4.txt" new_data = "rouf" # read the original file so we have a cached value. @@ -748,7 +752,11 @@ def test_file_content_invalidated_after_parent_deletion(self) -> None: """Test that FileContent is invalidated after deleting parent directory.""" with self.mk_project_tree() as project_tree: - scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) + scheduler = self.mk_scheduler( + rules=create_fs_rules(), + project_tree=project_tree, + execution_options={"experimental_fs_watcher": True}, + ) fname = "a/b/1.txt" # read the original file so we have nodes to invalidate. original_content = self.read_file_content(scheduler, [fname]) @@ -771,7 +779,11 @@ def assert_mutated_directory_digest( self, mutation_function: Callable[[FileSystemProjectTree, str], Exception] ): with self.mk_project_tree() as project_tree: - scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) + scheduler = self.mk_scheduler( + rules=create_fs_rules(), + project_tree=project_tree, + execution_options={"experimental_fs_watcher": True}, + ) dir_path = "a/" dir_glob = dir_path + "*" initial_snapshot = self.execute_expecting_one_result( From 0960c8882628af4e45687cc16da9cf4dabd439d5 Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Fri, 3 Apr 2020 10:53:59 -0700 Subject: [PATCH 05/15] Don't rerun uncachable nodes if they are dirtied while running. (#9452) * Don't rerun uncachable nodes if they are dirtied while running. - Retry dependencies of uncacheable nodes a few times to get a result until we are exhausted from trying too many times. - Bubble uncacheable node retry errors up to the user, tell them things were chaning to much. - Don't propagate dirtiness past uncacheable nodes when invalidating from changed roots. Otherwise dirty dependents of uncacheable nodes will need to re-run. * enable the engine fs watcher by default, now that it won't cause issues. Remove execution option override from tests. * use reference to self in stop_walk_predicate closure * invalidate often enough that test doesn't flake --- src/python/pants/option/global_options.py | 4 +- src/rust/engine/Cargo.lock | 2 + src/rust/engine/graph/Cargo.toml | 2 + src/rust/engine/graph/src/entry.rs | 4 +- src/rust/engine/graph/src/lib.rs | 81 +++++++-- src/rust/engine/graph/src/node.rs | 12 +- src/rust/engine/graph/src/tests.rs | 207 +++++++++++++++++----- src/rust/engine/src/core.rs | 4 +- src/rust/engine/src/externs.rs | 1 - src/rust/engine/src/nodes.rs | 14 +- tests/python/pants_test/engine/test_fs.py | 18 +- 11 files changed, 261 insertions(+), 88 deletions(-) diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index a3079b214ca..aa671752d6c 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -162,7 +162,7 @@ def from_bootstrap_options(cls, bootstrap_options): remote_execution_extra_platform_properties=[], remote_execution_headers={}, process_execution_local_enable_nailgun=False, - experimental_fs_watcher=False, + experimental_fs_watcher=True, ) @@ -940,7 +940,7 @@ def register_bootstrap_options(cls, register): register( "--experimental-fs-watcher", type=bool, - default=False, + default=True, advanced=True, help="Whether to use the engine filesystem watcher which registers the workspace" " for kernel file change events", diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index fd0b560d907..10961b8add7 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -1094,8 +1094,10 @@ name = "graph" version = "0.0.1" dependencies = [ "boxfuture 0.0.1", + "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/src/rust/engine/graph/Cargo.toml b/src/rust/engine/graph/Cargo.toml index 905cae65965..758ea40c616 100644 --- a/src/rust/engine/graph/Cargo.toml +++ b/src/rust/engine/graph/Cargo.toml @@ -9,6 +9,7 @@ publish = false boxfuture = { path = "../boxfuture" } fnv = "1.0.5" futures01 = { package = "futures", version = "0.1" } +futures = { version = "0.3", features = ["compat"] } hashing = { path = "../hashing" } indexmap = "1.0.2" log = "0.4" @@ -17,3 +18,4 @@ petgraph = "0.4.5" [dev-dependencies] rand = "0.6" +env_logger = "0.5.4" diff --git a/src/rust/engine/graph/src/entry.rs b/src/rust/engine/graph/src/entry.rs index c8254a73edd..bf957c72fb6 100644 --- a/src/rust/engine/graph/src/entry.rs +++ b/src/rust/engine/graph/src/entry.rs @@ -370,7 +370,7 @@ impl Entry { entry_id, run_token, generation, - if self.node.cacheable(context) { + if self.node.cacheable() { Some(dep_generations) } else { None @@ -473,7 +473,7 @@ impl Entry { } else { // If the new result does not match the previous result, the generation increments. let (generation, next_result) = if let Some(result) = result { - let next_result = if !self.node.cacheable(context) { + let next_result = if !self.node.cacheable() { EntryResult::Uncacheable(result, context.session_id().clone()) } else if has_dirty_dependencies { EntryResult::Dirty(result) diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs index 51a60a546d8..f2c4e6eea55 100644 --- a/src/rust/engine/graph/src/lib.rs +++ b/src/rust/engine/graph/src/lib.rs @@ -47,6 +47,8 @@ use std::time::{Duration, Instant}; use fnv::FnvHasher; +use futures::compat::Future01CompatExt; +use futures::future::{FutureExt, TryFutureExt}; use futures01::future::{self, Future}; use indexmap::IndexSet; use log::{debug, trace, warn}; @@ -166,7 +168,9 @@ impl InnerGraph { // Search for an existing path from dst to src. let mut roots = VecDeque::new(); roots.push_back(root); - self.walk(roots, direction).any(|eid| eid == needle) + self + .walk(roots, direction, |_| false) + .any(|eid| eid == needle) } /// @@ -287,12 +291,18 @@ impl InnerGraph { /// The Walk will iterate over all nodes that descend from the roots in the direction of /// traversal but won't necessarily be in topological order. /// - fn walk(&self, roots: VecDeque, direction: Direction) -> Walk<'_, N> { + fn walk bool>( + &self, + roots: VecDeque, + direction: Direction, + stop_walking_predicate: F, + ) -> Walk<'_, N, F> { Walk { graph: self, direction: direction, deque: roots, walked: HashSet::default(), + stop_walking_predicate, } } @@ -325,7 +335,11 @@ impl InnerGraph { .collect(); // And their transitive dependencies, which will be dirtied. let transitive_ids: Vec<_> = self - .walk(root_ids.iter().cloned().collect(), Direction::Incoming) + .walk( + root_ids.iter().cloned().collect(), + Direction::Incoming, + |id| !self.entry_for_id(*id).unwrap().node().cacheable(), + ) .filter(|eid| !root_ids.contains(eid)) .collect(); @@ -386,7 +400,7 @@ impl InnerGraph { .cloned() .collect(); - for eid in self.walk(root_entries, Direction::Outgoing) { + for eid in self.walk(root_entries, Direction::Outgoing, |_| false) { let entry = self.unsafe_entry_for_id(eid); let node_str = entry.format(context); @@ -602,7 +616,9 @@ impl InnerGraph { .collect(); self .digests_internal( - self.walk(root_ids, Direction::Outgoing).collect(), + self + .walk(root_ids, Direction::Outgoing, |_| false) + .collect(), context.clone(), ) .count() @@ -663,7 +679,7 @@ impl Graph { context: &N::Context, dst_node: N, ) -> BoxFuture { - let maybe_entry_and_id = { + let maybe_entries_and_id = { // Get or create the destination, and then insert the dep and return its state. let mut inner = self.inner.lock(); if inner.draining { @@ -695,16 +711,44 @@ impl Graph { // All edges get a weight of 1.0 so that we can Bellman-Ford over the graph, treating each // edge as having equal weight. inner.pg.add_edge(src_id, dst_id, 1.0); + let src_entry = inner.entry_for_id(src_id).cloned().unwrap(); inner .entry_for_id(dst_id) .cloned() - .map(|entry| (entry, dst_id)) + .map(|dst_entry| (src_entry, dst_entry, dst_id)) } }; // Declare the dep, and return the state of the destination. - if let Some((mut entry, entry_id)) = maybe_entry_and_id { - entry.get(context, entry_id).map(|(res, _)| res).to_boxed() + if let Some((src_entry, mut entry, entry_id)) = maybe_entries_and_id { + if src_entry.node().cacheable() { + entry.get(context, entry_id).map(|(res, _)| res).to_boxed() + } else { + // Src node is uncacheable, which means it is side-effecting, and can only be allowed to run once. + // We retry its dependencies a number of times here in case a side effect of the Node invalidated + // some of its dependencies, or another (external) process causes invalidation. + let context2 = context.clone(); + let mut counter: usize = 8; + let uncached_node = async move { + loop { + counter -= 1; + if counter == 0 { + break Err(N::Error::exhausted()); + } + let dep_res = entry + .get(&context2, entry_id) + .map(|(res, _)| res) + .compat() + .await; + match dep_res { + Ok(r) => break Ok(r), + Err(err) if err == N::Error::invalidated() => continue, + Err(other_err) => break Err(other_err), + } + } + }; + uncached_node.boxed().compat().to_boxed() + } } else { future::err(N::Error::invalidated()).to_boxed() } @@ -869,7 +913,7 @@ impl Graph { /// TODO: We don't track which generation actually added which edges, so over time nodes will end /// up with spurious dependencies. This is mostly sound, but may lead to over-invalidation and /// doing more work than is necessary. - /// As an example, if generation 0 or X depends on A and B, and generation 1 of X depends on C, + /// As an example, if generation 0 of X depends on A and B, and generation 1 of X depends on C, /// nothing will prune the dependencies from X onto A and B, so generation 1 of X will have /// dependencies on A, B, and C in the graph, even though running it only depends on C. /// At some point we should address this, but we must be careful with how we do so; anything which @@ -904,7 +948,7 @@ impl Graph { // If a dependency is uncacheable or currently dirty, this Node should complete as dirty, // independent of matching Generation values. This is to allow for the behaviour that an // uncacheable Node should always have dirty dependents, transitively. - if !entry.node().cacheable(context) || !entry.is_clean(context) { + if !entry.node().cacheable() || !entry.is_clean(context) { has_dirty_dependencies = true; } entry.generation() @@ -1052,23 +1096,30 @@ pub mod test_support { /// Represents the state of a particular walk through a Graph. Implements Iterator and has the same /// lifetime as the Graph itself. /// -struct Walk<'a, N: Node> { +struct Walk<'a, N: Node, F> +where + F: Fn(&EntryId) -> bool, +{ graph: &'a InnerGraph, direction: Direction, deque: VecDeque, walked: HashSet, + stop_walking_predicate: F, } -impl<'a, N: Node + 'a> Iterator for Walk<'a, N> { +impl<'a, N: Node + 'a, F: Fn(&EntryId) -> bool> Iterator for Walk<'a, N, F> { type Item = EntryId; fn next(&mut self) -> Option { while let Some(id) = self.deque.pop_front() { - if !self.walked.insert(id) { + // Visit this node and it neighbors if this node has not yet be visited and we aren't + // stopping our walk at this node, based on if it satifies the stop_walking_predicate. + // This mechanism gives us a way to selectively dirty parts of the graph respecting node boundaries + // like uncacheable nodes, which sholdn't be dirtied. + if !self.walked.insert(id) || (self.stop_walking_predicate)(&id) { continue; } - // Queue the neighbors of the entry and then return it. self .deque .extend(self.graph.pg.neighbors_directed(id, self.direction)); diff --git a/src/rust/engine/graph/src/node.rs b/src/rust/engine/graph/src/node.rs index 26fe3e5066e..a7670dc38a3 100644 --- a/src/rust/engine/graph/src/node.rs +++ b/src/rust/engine/graph/src/node.rs @@ -37,7 +37,7 @@ pub trait Node: Clone + Debug + Display + Eq + Hash + Send + 'static { /// /// If the node result is cacheable, return true. /// - fn cacheable(&self, context: &Self::Context) -> bool; + fn cacheable(&self) -> bool; /// Nodes optionally have a user-facing name (distinct from their Debug and Display /// implementations). This user-facing name is intended to provide high-level information @@ -55,6 +55,12 @@ pub trait NodeError: Clone + Debug + Eq + Send { /// Graph (generally while running). /// fn invalidated() -> Self; + /// + /// Creates an instance that represents an uncacheable node failing from + /// retrying its dependencies too many times, but never able to resolve them, + /// usually because they were invalidated too many times while running. + /// + fn exhausted() -> Self; /// /// Creates an instance that represents that a Node dependency was cyclic along the given path. @@ -100,7 +106,7 @@ pub trait NodeTracer { /// /// A context passed between Nodes that also stores an EntryId to uniquely identify them. /// -pub trait NodeContext: Clone + Send + 'static { +pub trait NodeContext: Clone + Send + Sync + 'static { /// /// The type generated when this Context is cloned for another Node. /// @@ -111,7 +117,7 @@ pub trait NodeContext: Clone + Send + 'static { /// have Session-specific semantics. More than one context object might be associated with a /// single caller "session". /// - type SessionId: Clone + Debug + Eq; + type SessionId: Clone + Debug + Eq + Send; /// /// Creates a clone of this NodeContext to be used for a different Node. diff --git a/src/rust/engine/graph/src/tests.rs b/src/rust/engine/graph/src/tests.rs index fc06da46e64..df56f8cd420 100644 --- a/src/rust/engine/graph/src/tests.rs +++ b/src/rust/engine/graph/src/tests.rs @@ -3,6 +3,7 @@ use rand; use std::cmp; use std::collections::{HashMap, HashSet}; +use std::hash::{Hash, Hasher}; use std::sync::{mpsc, Arc}; use std::thread; use std::time::Duration; @@ -21,7 +22,7 @@ fn create() { let graph = Arc::new(Graph::new()); let context = TContext::new(graph.clone()); assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); } @@ -33,14 +34,17 @@ fn invalidate_and_clean() { // Create three nodes. assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); - assert_eq!(context.runs(), vec![TNode(2), TNode(1), TNode(0)]); + assert_eq!( + context.runs(), + vec![TNode::new(2), TNode::new(1), TNode::new(0)] + ); // Clear the middle Node, which dirties the upper node. assert_eq!( - graph.invalidate_from_roots(|&TNode(n)| n == 1), + graph.invalidate_from_roots(|&TNode(n, _)| n == 1), InvalidationResult { cleared: 1, dirtied: 1 @@ -49,10 +53,13 @@ fn invalidate_and_clean() { // Confirm that the cleared Node re-runs, and the upper node is cleaned without re-running. assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); - assert_eq!(context.runs(), vec![TNode(2), TNode(1), TNode(0), TNode(1)]); + assert_eq!( + context.runs(), + vec![TNode::new(2), TNode::new(1), TNode::new(0), TNode::new(1)] + ); } #[test] @@ -62,14 +69,17 @@ fn invalidate_and_rerun() { // Create three nodes. assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); - assert_eq!(context.runs(), vec![TNode(2), TNode(1), TNode(0)]); + assert_eq!( + context.runs(), + vec![TNode::new(2), TNode::new(1), TNode::new(0)] + ); // Clear the middle Node, which dirties the upper node. assert_eq!( - graph.invalidate_from_roots(|&TNode(n)| n == 1), + graph.invalidate_from_roots(|&TNode(n, _)| n == 1), InvalidationResult { cleared: 1, dirtied: 1 @@ -80,10 +90,10 @@ fn invalidate_and_rerun() { // their input values have changed. let context = context.new_session(1).with_salt(1); assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 1), T(2, 1)]) ); - assert_eq!(context.runs(), vec![TNode(1), TNode(2)]); + assert_eq!(context.runs(), vec![TNode::new(1), TNode::new(2)]); } #[test] @@ -93,13 +103,13 @@ fn invalidate_with_changed_dependencies() { // Create three nodes. assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); // Clear the middle Node, which dirties the upper node. assert_eq!( - graph.invalidate_from_roots(|&TNode(n)| n == 1), + graph.invalidate_from_roots(|&TNode(n, _)| n == 1), InvalidationResult { cleared: 1, dirtied: 1 @@ -107,17 +117,17 @@ fn invalidate_with_changed_dependencies() { ); // Request with a new context that truncates execution at the middle Node. - let context = - TContext::new(graph.clone()).with_dependencies(vec![(TNode(1), None)].into_iter().collect()); + let context = TContext::new(graph.clone()) + .with_dependencies(vec![(TNode::new(1), None)].into_iter().collect()); assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(1, 0), T(2, 0)]) ); // Confirm that dirtying the bottom Node does not affect the middle/upper Nodes, which no // longer depend on it. assert_eq!( - graph.invalidate_from_roots(|&TNode(n)| n == 0), + graph.invalidate_from_roots(|&TNode(n, _)| n == 0), InvalidationResult { cleared: 1, dirtied: 0, @@ -145,7 +155,7 @@ fn invalidate_randomly() { // Invalidate a random node in the graph. let candidate = rng.gen_range(0, range); - graph2.invalidate_from_roots(|&TNode(n)| n == candidate); + graph2.invalidate_from_roots(|&TNode(n, _)| n == candidate); thread::sleep(sleep_per_invalidation); } @@ -160,7 +170,7 @@ fn invalidate_randomly() { let context = TContext::new(graph.clone()).with_salt(iterations); // Compute the root, and validate its output. - let node_output = match graph.create(TNode(range), &context).wait() { + let node_output = match graph.create(TNode::new(range), &context).wait() { Ok(output) => output, Err(TError::Invalidated) => { // Some amnount of concurrent invalidation is expected: retry. @@ -198,33 +208,116 @@ fn dirty_dependents_of_uncacheable_node() { // Create a context for which the bottommost Node is not cacheable. let context = { let mut uncacheable = HashSet::new(); - uncacheable.insert(TNode(0)); + uncacheable.insert(TNode::new(0)); TContext::new(graph.clone()).with_uncacheable(uncacheable) }; // Create three nodes. assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); - assert_eq!(context.runs(), vec![TNode(2), TNode(1), TNode(0)]); + assert_eq!( + context.runs(), + vec![TNode::new(2), TNode::new(1), TNode::new(0)] + ); // Re-request the root in a new session and confirm that only the bottom node re-runs. let context = context.new_session(1); assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); - assert_eq!(context.runs(), vec![TNode(0)]); + assert_eq!(context.runs(), vec![TNode::new(0)]); // Re-request with a new session and different salt, and confirm that everything re-runs bottom // up (the order of node cleaning). let context = context.new_session(2).with_salt(1); assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 1), T(1, 1), T(2, 1)]) ); - assert_eq!(context.runs(), vec![TNode(0), TNode(1), TNode(2)]); + assert_eq!( + context.runs(), + vec![TNode::new(0), TNode::new(1), TNode::new(2)] + ); +} + +#[test] +fn uncachable_node_only_runs_once() { + let _logger = env_logger::try_init(); + let graph = Arc::new(Graph::new()); + + let context = { + let mut uncacheable = HashSet::new(); + uncacheable.insert(TNode::new(1)); + let delay_for_root = Duration::from_millis(1000); + let mut delays = HashMap::new(); + delays.insert(TNode::new(0), delay_for_root); + TContext::new(graph.clone()) + .with_uncacheable(uncacheable) + .with_delays(delays) + }; + + let graph2 = graph.clone(); + let (send, recv) = mpsc::channel::<()>(); + let _join = thread::spawn(move || { + recv.recv_timeout(Duration::from_millis(100)).unwrap(); + thread::sleep(Duration::from_millis(50)); + graph2.invalidate_from_roots(|&TNode(n, _)| n == 0); + }); + + send.send(()).unwrap(); + assert_eq!( + graph.create(TNode::new(2), &context).wait(), + Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) + ); + // TNode(0) was cleared by the invalidation while all nodes were running, + // but the uncacheable node TNode(1) reties it directly, so it runs twice. + assert_eq!( + context.runs(), + vec![TNode::new(2), TNode::new(1), TNode::new(0), TNode::new(0)] + ); +} + +#[test] +fn exhaust_uncacheable_retries() { + let _logger = env_logger::try_init(); + let graph = Arc::new(Graph::new()); + + let context = { + let mut uncacheable = HashSet::new(); + uncacheable.insert(TNode::new(1)); + let delay_for_root = Duration::from_millis(100); + let mut delays = HashMap::new(); + delays.insert(TNode::new(0), delay_for_root); + TContext::new(graph.clone()) + .with_uncacheable(uncacheable) + .with_delays(delays) + }; + + let sleep_per_invalidation = Duration::from_millis(10); + let graph2 = graph.clone(); + let (send, recv) = mpsc::channel(); + let _join = thread::spawn(move || loop { + if let Ok(_) = recv.try_recv() { + break; + }; + thread::sleep(sleep_per_invalidation); + graph2.invalidate_from_roots(|&TNode(n, _)| n == 0); + }); + let (assertion, subject) = match graph.create(TNode::new(2), &context).wait() { + Err(TError::Throw) => (true, None), + Err(e) => (false, Some(Err(e))), + other => (false, Some(other)), + }; + send.send(()).unwrap(); + assert!( + assertion, + "expected {:?} found {:?}", + Err::<(), TError>(TError::Throw), + subject + ); } #[test] @@ -240,7 +333,7 @@ fn drain_and_resume() { // requesting TNode(0). let context = { let mut delays = HashMap::new(); - delays.insert(TNode(1), delay_in_task); + delays.insert(TNode::new(1), delay_in_task); TContext::new(graph.clone()).with_delays(delays) }; @@ -256,7 +349,7 @@ fn drain_and_resume() { // Request a TNode(1) in the "delayed" context, and expect it to be interrupted by the // drain. assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Err(TError::Invalidated), ); @@ -266,7 +359,7 @@ fn drain_and_resume() { .mark_draining(false) .expect("Should already be draining."); assert_eq!( - graph.create(TNode(2), &context).wait(), + graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); } @@ -275,13 +368,16 @@ fn drain_and_resume() { fn cyclic_failure() { // Confirms that an attempt to create a cycle fails. let graph = Arc::new(Graph::new()); - let top = TNode(2); + let top = TNode::new(2); let context = TContext::new(graph.clone()).with_dependencies( // Request creation of a cycle by sending the bottom most node to the top. - vec![(TNode(0), Some(top))].into_iter().collect(), + vec![(TNode::new(0), Some(top))].into_iter().collect(), ); - assert_eq!(graph.create(TNode(2), &context).wait(), Err(TError::Cyclic)); + assert_eq!( + graph.create(TNode::new(2), &context).wait(), + Err(TError::Cyclic) + ); } #[test] @@ -289,8 +385,8 @@ fn cyclic_dirtying() { // Confirms that a dirtied path between two nodes is able to reverse direction while being // cleaned. let graph = Arc::new(Graph::new()); - let initial_top = TNode(2); - let initial_bot = TNode(0); + let initial_top = TNode::new(2); + let initial_bot = TNode::new(0); // Request with a context that creates a path downward. let context_down = TContext::new(graph.clone()); @@ -303,7 +399,7 @@ fn cyclic_dirtying() { graph.invalidate_from_roots(|n| n == &initial_bot); let context_up = context_down.with_salt(1).with_dependencies( // Reverse the path from bottom to top. - vec![(TNode(1), None), (TNode(0), Some(TNode(1)))] + vec![(TNode::new(1), None), (TNode::new(0), Some(TNode::new(1)))] .into_iter() .collect(), ); @@ -348,7 +444,7 @@ fn critical_path() { // Describe a few transformations to navigate between our readable data and the actual types // needed for the graph. let tnode = |node: &str| { - TNode( + TNode::new( nodes .iter() .map(|(k, _)| k) @@ -427,8 +523,24 @@ struct T(usize, usize); /// A node that builds a Vec of tokens by recursively requesting itself and appending its value /// to the result. /// -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -struct TNode(usize); +#[derive(Clone, Debug)] +struct TNode(usize, bool /*cacheability*/); +impl TNode { + fn new(id: usize) -> Self { + TNode(id, true) + } +} +impl PartialEq for TNode { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } +} +impl Eq for TNode {} +impl Hash for TNode { + fn hash(&self, state: &mut H) { + self.0.hash(state); + } +} impl Node for TNode { type Context = TContext; type Item = Vec; @@ -437,8 +549,8 @@ impl Node for TNode { fn run(self, context: TContext) -> BoxFuture, TError> { context.ran(self.clone()); let token = T(self.0, context.salt()); + context.maybe_delay(&self); if let Some(dep) = context.dependency_of(&self) { - context.maybe_delay(&self); context .get(dep) .map(move |mut v| { @@ -455,8 +567,8 @@ impl Node for TNode { None } - fn cacheable(&self, context: &Self::Context) -> bool { - !context.uncacheable.contains(self) + fn cacheable(&self) -> bool { + self.1 } } @@ -639,7 +751,13 @@ impl TContext { match self.edges.get(node) { Some(Some(ref dep)) => Some(dep.clone()), Some(None) => None, - None if node.0 > 0 => Some(TNode(node.0 - 1)), + None if node.0 > 0 => { + let new_node_id = node.0 - 1; + Some(TNode( + new_node_id, + !self.uncacheable.contains(&TNode::new(new_node_id)), + )) + } None => None, } } @@ -653,12 +771,17 @@ impl TContext { enum TError { Cyclic, Invalidated, + Throw, } impl NodeError for TError { fn invalidated() -> Self { TError::Invalidated } + fn exhausted() -> Self { + TError::Throw + } + fn cyclic(_path: Vec) -> Self { TError::Cyclic } diff --git a/src/rust/engine/src/core.rs b/src/rust/engine/src/core.rs index ae343980a35..e51e3683ed0 100644 --- a/src/rust/engine/src/core.rs +++ b/src/rust/engine/src/core.rs @@ -289,15 +289,13 @@ pub enum Failure { Invalidated, /// A rule raised an exception. Throw(Value, String), - FileWatch(String), } impl fmt::Display for Failure { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Failure::Invalidated => write!(f, "Exhausted retries due to changed files."), + Failure::Invalidated => write!(f, "Giving up on retrying due to changed files."), Failure::Throw(exc, _) => write!(f, "{}", externs::val_to_str(exc)), - Failure::FileWatch(failure) => write!(f, "{}", failure), } } } diff --git a/src/rust/engine/src/externs.rs b/src/rust/engine/src/externs.rs index 68fcf11e404..33c9b745a6f 100644 --- a/src/rust/engine/src/externs.rs +++ b/src/rust/engine/src/externs.rs @@ -462,7 +462,6 @@ impl From> for PyResult { let val = match f { f @ Failure::Invalidated => create_exception(&format!("{}", f)), Failure::Throw(exc, _) => exc, - Failure::FileWatch(failure) => create_exception(&failure), }; PyResult { is_throw: true, diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index 911a2ba37d9..39fde2d912f 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -941,7 +941,7 @@ impl NodeVisualizer for Visualizer { let max_colors = 12; match entry.peek(context) { None => "white".to_string(), - Some(Err(Failure::Throw(..))) | Some(Err(Failure::FileWatch(..))) => "4".to_string(), + Some(Err(Failure::Throw(..))) => "4".to_string(), Some(Err(Failure::Invalidated)) => "12".to_string(), Some(Ok(_)) => { let viz_colors_len = self.viz_colors.len(); @@ -962,7 +962,6 @@ impl NodeTracer for Tracer { match result { Some(Err(Failure::Invalidated)) => false, Some(Err(Failure::Throw(..))) => false, - Some(Err(Failure::FileWatch(..))) => false, Some(Ok(_)) => true, None => { // A Node with no state is either still running, or effectively cancelled @@ -987,7 +986,6 @@ impl NodeTracer for Tracer { .join("\n") ), Some(Err(Failure::Invalidated)) => "Invalidated".to_string(), - Some(Err(Failure::FileWatch(failure))) => format!("FileWatch failed: {}", failure), } } } @@ -1075,7 +1073,7 @@ impl Node for NodeKey { .core .watcher .watch(abs_path) - .map_err(|e| Failure::FileWatch(format!("{:?}", e))) + .map_err(|e| Context::mk_error(&format!("{:?}", e))) .await } else { Ok(()) @@ -1118,7 +1116,7 @@ impl Node for NodeKey { } } - fn cacheable(&self, _context: &Self::Context) -> bool { + fn cacheable(&self) -> bool { match self { &NodeKey::Task(ref s) => s.task.cacheable, _ => true, @@ -1161,6 +1159,12 @@ impl NodeError for Failure { Failure::Invalidated } + fn exhausted() -> Failure { + Context::mk_error( + "Exhausted retries for uncacheable node. The filesystem was changing too much.", + ) + } + fn cyclic(mut path: Vec) -> Failure { let path_len = path.len(); if path_len > 1 { diff --git a/tests/python/pants_test/engine/test_fs.py b/tests/python/pants_test/engine/test_fs.py index 5883953b214..b5deaf4c5b9 100644 --- a/tests/python/pants_test/engine/test_fs.py +++ b/tests/python/pants_test/engine/test_fs.py @@ -722,11 +722,7 @@ def test_file_content_invalidated(self) -> None: on those files.""" with self.mk_project_tree() as project_tree: - scheduler = self.mk_scheduler( - rules=create_fs_rules(), - project_tree=project_tree, - execution_options={"experimental_fs_watcher": True}, - ) + scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree,) fname = "4.txt" new_data = "rouf" # read the original file so we have a cached value. @@ -752,11 +748,7 @@ def test_file_content_invalidated_after_parent_deletion(self) -> None: """Test that FileContent is invalidated after deleting parent directory.""" with self.mk_project_tree() as project_tree: - scheduler = self.mk_scheduler( - rules=create_fs_rules(), - project_tree=project_tree, - execution_options={"experimental_fs_watcher": True}, - ) + scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree,) fname = "a/b/1.txt" # read the original file so we have nodes to invalidate. original_content = self.read_file_content(scheduler, [fname]) @@ -779,11 +771,7 @@ def assert_mutated_directory_digest( self, mutation_function: Callable[[FileSystemProjectTree, str], Exception] ): with self.mk_project_tree() as project_tree: - scheduler = self.mk_scheduler( - rules=create_fs_rules(), - project_tree=project_tree, - execution_options={"experimental_fs_watcher": True}, - ) + scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree,) dir_path = "a/" dir_glob = dir_path + "*" initial_snapshot = self.execute_expecting_one_result( From 312a3a0199345f53fc94a84df4cc39a8e199fd29 Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Tue, 7 Apr 2020 23:09:19 -0700 Subject: [PATCH 06/15] Add a flag to prevent the FsEventService and watchman from starting (#9487) * add --watchman-enable flag * disable watchman when flag is false * Don't wait for the initial watchman event if we aren't using watchman. * check invalidation watcher liveness from scheduler service --- src/python/pants/engine/scheduler.py | 5 +- src/python/pants/option/global_options.py | 8 + src/python/pants/pantsd/pants_daemon.py | 16 +- .../pants/pantsd/service/scheduler_service.py | 57 +++--- src/python/pants/pantsd/watchman_launcher.py | 35 ++-- src/rust/engine/engine_cffi/src/lib.rs | 5 + src/rust/engine/src/watch.rs | 179 ++++++++++-------- src/rust/engine/src/watch_tests.rs | 35 ++++ 8 files changed, 226 insertions(+), 114 deletions(-) diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 251dc509fed..2b97715efee 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -9,7 +9,7 @@ import traceback from dataclasses import dataclass from textwrap import dedent -from typing import TYPE_CHECKING, Any, Dict, Tuple +from typing import TYPE_CHECKING, Any, Dict, Tuple, cast from pants.base.exception_sink import ExceptionSink from pants.base.exiter import PANTS_FAILED_EXIT_CODE @@ -242,6 +242,9 @@ def invalidate_files(self, direct_filenames): def invalidate_all_files(self): return self._native.lib.graph_invalidate_all_paths(self._scheduler) + def check_invalidation_watcher_liveness(self) -> bool: + return cast(bool, self._native.lib.check_invalidation_watcher_liveness(self._scheduler)) + def graph_len(self): return self._native.lib.graph_len(self._scheduler) diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index aa671752d6c..60b7e07c33f 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -726,6 +726,14 @@ def register_bootstrap_options(cls, register): ) # Watchman options. + register( + "--watchman-enable", + type=bool, + advanced=True, + default=True, + help="Use the watchman daemon filesystem event watcher to watch for changes " + "in the buildroot. Disable this to rely solely on the experimental pants engine filesystem watcher.", + ) register( "--watchman-version", advanced=True, default="4.9.0-pants1", help="Watchman version." ) diff --git a/src/python/pants/pantsd/pants_daemon.py b/src/python/pants/pantsd/pants_daemon.py index 79b7e909825..6286f764e7d 100644 --- a/src/python/pants/pantsd/pants_daemon.py +++ b/src/python/pants/pantsd/pants_daemon.py @@ -203,7 +203,9 @@ def _setup_services( :returns: A PantsServices instance. """ should_shutdown_after_run = bootstrap_options.shutdown_pantsd_after_run - fs_event_service = FSEventService(watchman, build_root,) + fs_event_service = ( + FSEventService(watchman, build_root,) if bootstrap_options.watchman_enable else None + ) pidfile_absolute = PantsDaemon.metadata_file_path( "pantsd", "pid", bootstrap_options.pants_subprocessdir @@ -218,6 +220,7 @@ def _setup_services( "pantsd processes." ) + # TODO make SchedulerService handle fs_event_service_being None scheduler_service = SchedulerService( fs_event_service=fs_event_service, legacy_graph_scheduler=legacy_graph_scheduler, @@ -239,7 +242,16 @@ def _setup_services( store_gc_service = StoreGCService(legacy_graph_scheduler.scheduler) return PantsServices( - services=(fs_event_service, scheduler_service, pailgun_service, store_gc_service), + services=tuple( + service + for service in ( + fs_event_service, + scheduler_service, + pailgun_service, + store_gc_service, + ) + if service is not None + ), port_map=dict(pailgun=pailgun_service.pailgun_port), ) diff --git a/src/python/pants/pantsd/service/scheduler_service.py b/src/python/pants/pantsd/service/scheduler_service.py index c8f30e37142..d5b58505675 100644 --- a/src/python/pants/pantsd/service/scheduler_service.py +++ b/src/python/pants/pantsd/service/scheduler_service.py @@ -6,6 +6,7 @@ import queue import sys import threading +import time from typing import List, Optional, Set, Tuple, cast from pants.base.exiter import PANTS_SUCCEEDED_EXIT_CODE @@ -28,11 +29,12 @@ class SchedulerService(PantsService): """ QUEUE_SIZE = 64 + INVALIDATION_WATCHER_LIVENESS_CHECK_INTERVAL = 1 def __init__( self, *, - fs_event_service: FSEventService, + fs_event_service: Optional[FSEventService], legacy_graph_scheduler: LegacyGraphScheduler, build_root: str, invalidation_globs: List[str], @@ -79,21 +81,25 @@ def setup(self, services): """Service setup.""" super().setup(services) # Register filesystem event handlers on an FSEventService instance. - self._fs_event_service.register_all_files_handler( - self._enqueue_fs_event, self._fs_event_service.PANTS_ALL_FILES_SUBSCRIPTION_NAME - ) + if self._fs_event_service is not None: + self._fs_event_service.register_all_files_handler( + self._enqueue_fs_event, self._fs_event_service.PANTS_ALL_FILES_SUBSCRIPTION_NAME + ) # N.B. We compute the invalidating fileset eagerly at launch with an assumption that files # that exist at startup are the only ones that can affect the running daemon. - if self._invalidation_globs: - self._invalidating_snapshot = self._get_snapshot() - self._invalidating_files = self._invalidating_snapshot.files - self._logger.info("watching invalidating files: {}".format(self._invalidating_files)) - - if self._pantsd_pidfile: - self._fs_event_service.register_pidfile_handler( - self._pantsd_pidfile, self._enqueue_fs_event - ) + if self._fs_event_service is not None: + if self._invalidation_globs: + self._invalidating_snapshot = self._get_snapshot() + self._invalidating_files = self._invalidating_snapshot.files + self._logger.info( + "watching invalidating files: {}".format(self._invalidating_files) + ) + + if self._pantsd_pidfile: + self._fs_event_service.register_pidfile_handler( + self._pantsd_pidfile, self._enqueue_fs_event + ) def _enqueue_fs_event(self, event): """Watchman filesystem event handler for BUILD/requirements.txt updates. @@ -178,6 +184,7 @@ def _process_event_queue(self): # The first watchman event for all_files is a listing of all files - ignore it. if ( not is_initial_event + and self._fs_event_service is not None and subscription == self._fs_event_service.PANTS_ALL_FILES_SUBSCRIPTION_NAME ): self._handle_batch_event(files) @@ -191,12 +198,14 @@ def _process_event_queue(self): self._event_queue.task_done() - def product_graph_len(self): - """Provides the size of the captive product graph. - - :returns: The node count for the captive product graph. - """ - return self._scheduler.graph_len() + def _check_invalidation_watcher_liveness(self): + time.sleep(self.INVALIDATION_WATCHER_LIVENESS_CHECK_INTERVAL) + if not self._scheduler.check_invalidation_watcher_liveness(): + # Watcher failed for some reason + self._logger.critical( + "The graph invalidation watcher failed, so we are shutting down. Check the pantsd.log for details" + ) + self.terminate() def prepare_v1_graph_run_v2( self, options: Options, options_bootstrapper: OptionsBootstrapper, @@ -208,10 +217,9 @@ def prepare_v1_graph_run_v2( """ # If any nodes exist in the product graph, wait for the initial watchman event to avoid # racing watchman startup vs invalidation events. - graph_len = self._scheduler.graph_len() - if graph_len > 0: + if self._fs_event_service is not None and self._scheduler.graph_len() > 0: self._logger.debug( - "graph len was {}, waiting for initial watchman event".format(graph_len) + f"fs event service is running and graph_len > 0: waiting for initial watchman event" ) self._watchman_is_running.wait() build_id = RunTracker.global_instance().run_id @@ -289,7 +297,10 @@ def _body( def run(self): """Main service entrypoint.""" while not self._state.is_terminating: - self._process_event_queue() + if self._fs_event_service is not None: + self._process_event_queue() + else: + self._check_invalidation_watcher_liveness() self._state.maybe_pause() diff --git a/src/python/pants/pantsd/watchman_launcher.py b/src/python/pants/pantsd/watchman_launcher.py index 8304232b634..c1b54dcef1a 100644 --- a/src/python/pants/pantsd/watchman_launcher.py +++ b/src/python/pants/pantsd/watchman_launcher.py @@ -34,6 +34,7 @@ def create(cls, bootstrap_options): bootstrap_options.watchman_supportdir, bootstrap_options.watchman_startup_timeout, bootstrap_options.watchman_socket_timeout, + bootstrap_options.watchman_enable, bootstrap_options.watchman_socket_path, bootstrap_options.pants_subprocessdir, ) @@ -46,6 +47,7 @@ def __init__( watchman_supportdir, startup_timeout, socket_timeout, + watchman_enable, socket_path_override=None, metadata_base_dir=None, ): @@ -56,6 +58,7 @@ def __init__( :param watchman_supportdir: The supportdir for BinaryUtil. :param socket_timeout: The watchman client socket timeout (in seconds). :param socket_path_override: The overridden target path of the watchman socket, if any. + :param watchman_enable: Whether to start watchman when asked to maybe launch. :param metadata_base_dir: The ProcessManager metadata base directory. """ self._binary_util = binary_util @@ -64,6 +67,7 @@ def __init__( self._startup_timeout = startup_timeout self._socket_timeout = socket_timeout self._socket_path_override = socket_path_override + self._watchman_enable = watchman_enable self._log_level = log_level self._logger = logging.getLogger(__name__) self._metadata_base_dir = metadata_base_dir @@ -92,21 +96,28 @@ def watchman(self): ) def maybe_launch(self): - if not self.watchman.is_alive(): - self._logger.debug("launching watchman") - try: - self.watchman.launch() - except (Watchman.ExecutionError, Watchman.InvalidCommandOutput) as e: - self._logger.fatal("failed to launch watchman: {!r})".format(e)) - raise + if self._watchman_enable: + if not self.watchman.is_alive(): + self._logger.debug("launching watchman") + try: + self.watchman.launch() + except (Watchman.ExecutionError, Watchman.InvalidCommandOutput) as e: + self._logger.critical("failed to launch watchman: {!r})".format(e)) + raise - self._logger.debug( - "watchman is running, pid={pid} socket={socket}".format( - pid=self.watchman.pid, socket=self.watchman.socket + self._logger.debug( + "watchman is running, pid={pid} socket={socket}".format( + pid=self.watchman.pid, socket=self.watchman.socket + ) ) - ) + return self.watchman + else: + self.maybe_terminate() - return self.watchman + def maybe_terminate(self) -> None: + if not self._watchman_enable and self.watchman.is_alive(): + self._logger.info("Watchman was running, but will be killed because it was disabled.") + self.terminate() def terminate(self): self.watchman.terminate() diff --git a/src/rust/engine/engine_cffi/src/lib.rs b/src/rust/engine/engine_cffi/src/lib.rs index e6bfaa8643b..6f872f5c0f2 100644 --- a/src/rust/engine/engine_cffi/src/lib.rs +++ b/src/rust/engine/engine_cffi/src/lib.rs @@ -613,6 +613,11 @@ pub extern "C" fn graph_invalidate_all_paths(scheduler_ptr: *mut Scheduler) -> u }) } +#[no_mangle] +pub extern "C" fn check_invalidation_watcher_liveness(scheduler_ptr: *mut Scheduler) -> bool { + with_scheduler(scheduler_ptr, |scheduler| scheduler.core.watcher.is_alive()) +} + #[no_mangle] pub extern "C" fn graph_len(scheduler_ptr: *mut Scheduler) -> u64 { with_scheduler(scheduler_ptr, |scheduler| scheduler.core.graph.len() as u64) diff --git a/src/rust/engine/src/watch.rs b/src/rust/engine/src/watch.rs index 2a5d8b106ef..7752ae07ec0 100644 --- a/src/rust/engine/src/watch.rs +++ b/src/rust/engine/src/watch.rs @@ -78,83 +78,15 @@ impl InvalidationWatcher { ) })? } + } - thread::spawn(move || { - logging::set_thread_destination(logging::Destination::Pantsd); - loop { - let event_res = watch_receiver.recv_timeout(Duration::from_millis(100)); - let graph = if let Some(g) = graph.upgrade() { - g - } else { - // The Graph has been dropped: we're done. - break; - }; - match event_res { - Ok(Ok(ev)) => { - let paths: HashSet<_> = ev - .paths - .into_iter() - .filter_map(|path| { - // relativize paths to build root. - let path_relative_to_build_root = if path.starts_with(&canonical_build_root) { - // Unwrapping is fine because we check that the path starts with - // the build root above. - path.strip_prefix(&canonical_build_root).unwrap().into() - } else { - path - }; - // To avoid having to stat paths for events we will eventually ignore we "lie" to the ignorer - // to say that no path is a directory, they could be if someone chmod's or creates a dir. - // This maintains correctness by ensuring that at worst we have false negative events, where a directory - // only glob (one that ends in `/` ) was supposed to ignore a directory path, but didn't because we claimed it was a file. That - // directory path will be used to invalidate nodes, but won't invalidate anything because its path is somewhere - // out of our purview. - if ignorer.is_ignored_or_child_of_ignored_path( - &path_relative_to_build_root, - /* is_dir */ false, - ) { - None - } else { - Some(path_relative_to_build_root) - } - }) - .map(|path_relative_to_build_root| { - let mut paths_to_invalidate: Vec = vec![]; - if let Some(parent_dir) = path_relative_to_build_root.parent() { - paths_to_invalidate.push(parent_dir.to_path_buf()); - } - paths_to_invalidate.push(path_relative_to_build_root); - paths_to_invalidate - }) - .flatten() - .collect(); - // Only invalidate stuff if we have paths that weren't filtered out by gitignore. - if !paths.is_empty() { - debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); - InvalidationWatcher::invalidate(&graph, &paths, "notify"); - }; - } - Ok(Err(err)) => { - if let notify::ErrorKind::PathNotFound = err.kind { - warn!("Path(s) did not exist: {:?}", err.paths); - continue; - } else { - error!("File watcher failing with: {}", err); - break; - } - } - Err(RecvTimeoutError::Timeout) => continue, - Err(RecvTimeoutError::Disconnected) => { - // The Watcher is gone: we're done. - break; - } - }; - } - debug!("Watch thread exiting."); - // Signal that we're exiting (which we would also do by just dropping the channel). - let _ = thread_liveness_sender.send(()); - }); - }; + InvalidationWatcher::start_background_thread( + graph, + ignorer, + canonical_build_root, + thread_liveness_sender, + watch_receiver, + ); Ok(InvalidationWatcher { watcher: Arc::new(Mutex::new(watcher)), @@ -164,6 +96,101 @@ impl InvalidationWatcher { }) } + // Public for testing purposes. + pub(crate) fn start_background_thread( + graph: Weak>, + ignorer: Arc, + canonical_build_root: PathBuf, + liveness_sender: crossbeam_channel::Sender<()>, + watch_receiver: Receiver>, + ) { + thread::spawn(move || { + logging::set_thread_destination(logging::Destination::Pantsd); + loop { + let event_res = watch_receiver.recv_timeout(Duration::from_millis(10)); + let graph = if let Some(g) = graph.upgrade() { + g + } else { + // The Graph has been dropped: we're done. + break; + }; + match event_res { + Ok(Ok(ev)) => { + let paths: HashSet<_> = ev + .paths + .into_iter() + .filter_map(|path| { + // relativize paths to build root. + let path_relative_to_build_root = if path.starts_with(&canonical_build_root) { + // Unwrapping is fine because we check that the path starts with + // the build root above. + path.strip_prefix(&canonical_build_root).unwrap().into() + } else { + path + }; + // To avoid having to stat paths for events we will eventually ignore we "lie" to the ignorer + // to say that no path is a directory, they could be if someone chmod's or creates a dir. + // This maintains correctness by ensuring that at worst we have false negative events, where a directory + // only glob (one that ends in `/` ) was supposed to ignore a directory path, but didn't because we claimed it was a file. That + // directory path will be used to invalidate nodes, but won't invalidate anything because its path is somewhere + // out of our purview. + if ignorer.is_ignored_or_child_of_ignored_path( + &path_relative_to_build_root, + /* is_dir */ false, + ) { + None + } else { + Some(path_relative_to_build_root) + } + }) + .map(|path_relative_to_build_root| { + let mut paths_to_invalidate: Vec = vec![]; + if let Some(parent_dir) = path_relative_to_build_root.parent() { + paths_to_invalidate.push(parent_dir.to_path_buf()); + } + paths_to_invalidate.push(path_relative_to_build_root); + paths_to_invalidate + }) + .flatten() + .collect(); + // Only invalidate stuff if we have paths that weren't filtered out by gitignore. + if !paths.is_empty() { + debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); + InvalidationWatcher::invalidate(&graph, &paths, "notify"); + }; + } + Ok(Err(err)) => { + if let notify::ErrorKind::PathNotFound = err.kind { + warn!("Path(s) did not exist: {:?}", err.paths); + continue; + } else { + error!("File watcher failing with: {}", err); + break; + } + } + Err(RecvTimeoutError::Timeout) => continue, + Err(RecvTimeoutError::Disconnected) => { + // The Watcher is gone: we're done. + break; + } + }; + } + debug!("Watch thread exiting."); + // Signal that we're exiting (which we would also do by just dropping the channel). + let _ = liveness_sender.send(()); + }); + } + + pub fn is_alive(&self) -> bool { + if let Ok(()) = self.liveness.try_recv() { + // The watcher background thread set the exit condition. Return false to signal that + // the watcher is not alive. + false + } else { + true + } + } + /// /// Watch the given path non-recursively. /// diff --git a/src/rust/engine/src/watch_tests.rs b/src/rust/engine/src/watch_tests.rs index 61e4c1aba5d..14b4e938bb8 100644 --- a/src/rust/engine/src/watch_tests.rs +++ b/src/rust/engine/src/watch_tests.rs @@ -1,9 +1,11 @@ use crate::nodes::{DigestFile, NodeKey, NodeResult}; use crate::watch::InvalidationWatcher; +use crossbeam_channel; use fs::{File, GitignoreStyleExcludes}; use graph::entry::{EntryResult, EntryState, Generation, RunToken}; use graph::{test_support::TestGraph, EntryId, Graph}; use hashing::EMPTY_DIGEST; +use notify; use std::fs::create_dir; use std::path::PathBuf; use std::sync::Arc; @@ -144,3 +146,36 @@ fn ignore_file_events_matching_patterns_in_pants_ignore() { } } } + +#[test] +fn test_liveness() { + init_logger(); + let (tempdir, file_path) = setup_fs(); + let build_root = tempdir.path().to_path_buf(); + let (graph, _entry_id) = setup_graph( + file_path + .clone() + .strip_prefix(build_root.clone()) + .unwrap() + .to_path_buf(), + ); + + let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); + let (liveness_sender, liveness_receiver) = crossbeam_channel::unbounded(); + let (event_sender, event_receiver) = crossbeam_channel::unbounded(); + InvalidationWatcher::start_background_thread( + Arc::downgrade(&graph), + ignorer, + build_root, + liveness_sender, + event_receiver, + ); + event_sender + .send(Err(notify::Error::generic( + "This should kill the background thread", + ))) + .unwrap(); + assert!(liveness_receiver + .recv_timeout(Duration::from_millis(100)) + .is_ok()); +} From 6bb1801bd2815ccdf0f4fbfd4d4dafae301120c3 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Sun, 26 Apr 2020 11:48:46 -0700 Subject: [PATCH 07/15] Extract a `watch` crate. (#9635) The `watch` module directly accesses the `engine` crate's `Graph`, which makes it more challenging to test. Extract a `watch` crate which is used via an `trait Invalidatable` which is implemented for the engine's `Graph`, as well as independently in tests. [ci skip-jvm-tests] --- src/rust/engine/Cargo.lock | 25 +++- src/rust/engine/Cargo.toml | 10 +- src/rust/engine/graph/src/lib.rs | 37 ----- .../process_execution/src/local_tests.rs | 2 +- src/rust/engine/src/context.rs | 40 ++++- src/rust/engine/src/lib.rs | 4 - src/rust/engine/src/scheduler.rs | 5 +- src/rust/engine/testutil/src/lib.rs | 2 +- src/rust/engine/watch/Cargo.toml | 29 ++++ .../engine/{src/watch.rs => watch/src/lib.rs} | 75 +++++----- .../watch_tests.rs => watch/src/tests.rs} | 141 ++++++++---------- 11 files changed, 197 insertions(+), 173 deletions(-) create mode 100644 src/rust/engine/watch/Cargo.toml rename src/rust/engine/{src/watch.rs => watch/src/lib.rs} (83%) rename src/rust/engine/{src/watch_tests.rs => watch/src/tests.rs} (52%) diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index 10961b8add7..44cc5affee8 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -647,13 +647,11 @@ dependencies = [ "boxfuture 0.0.1", "bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)", "concrete_time 0.0.1", - "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-locks 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "graph 0.0.1", "hashing 0.0.1", "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -661,7 +659,6 @@ dependencies = [ "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "notify 5.0.0-pre.1 (git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd)", "num_cpus 1.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_enum 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -680,6 +677,7 @@ dependencies = [ "ui 0.0.1", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", + "watch 0.0.1", "workunit_store 0.0.1", ] @@ -3501,6 +3499,27 @@ name = "wasm-bindgen-shared" version = "0.2.62" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "watch" +version = "0.0.1" +dependencies = [ + "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "fs 0.0.1", + "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", + "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", + "futures-locks 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "graph 0.0.1", + "hashing 0.0.1", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "logging 0.0.1", + "notify 5.0.0-pre.1 (git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd)", + "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", + "task_executor 0.0.1", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "testutil 0.0.1", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "web-sys" version = "0.3.39" diff --git a/src/rust/engine/Cargo.toml b/src/rust/engine/Cargo.toml index 4b32107f994..6df346f3dc4 100644 --- a/src/rust/engine/Cargo.toml +++ b/src/rust/engine/Cargo.toml @@ -42,6 +42,7 @@ members = [ "testutil/local_cas", "testutil/local_execution_server", "ui", + "watch", "workunit_store" ] @@ -77,6 +78,7 @@ default-members = [ "testutil/local_cas", "testutil/local_execution_server", "ui", + "watch", "workunit_store" ] @@ -86,12 +88,10 @@ async-trait = "0.1" boxfuture = { path = "boxfuture" } bytes = "0.4.5" concrete_time = { path = "concrete_time" } -crossbeam-channel = "0.3" fnv = "1.0.5" fs = { path = "fs" } futures01 = { package = "futures", version = "0.1" } futures = { version = "0.3", features = ["compat"] } -futures-locks = "0.3.0" graph = { path = "graph" } hashing = { path = "hashing" } indexmap = "1.0.2" @@ -101,11 +101,6 @@ log = "0.4" logging = { path = "logging" } num_cpus = "1" num_enum = "0.4" -# notify is currently an experimental API, we are pinning to https://docs.rs/notify/5.0.0-pre.1/notify/ -# because the latest prerelease at time of writing has removed the debounced watcher which we would like to use. -# The author suggests they will add the debounced watcher back into the stable 5.0.0 release. When that happens -# we can move to it. -notify = { git = "https://github.com/notify-rs/notify", rev = "fba00891d9105e2f581c69fbe415a58cb7966fdd" } parking_lot = "0.6" process_execution = { path = "process_execution" } rand = "0.6" @@ -121,6 +116,7 @@ tokio = { version = "0.2", features = ["rt-threaded"] } ui = { path = "ui" } url = "2.1" uuid = { version = "0.7", features = ["v4"] } +watch = { path = "watch" } workunit_store = { path = "workunit_store" } [dev-dependencies] diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs index f2c4e6eea55..9103c1f4c90 100644 --- a/src/rust/engine/graph/src/lib.rs +++ b/src/rust/engine/graph/src/lib.rs @@ -1055,43 +1055,6 @@ impl Graph { } } -// This module provides a trait which contains functions that -// should only be used in tests. A user must explicitly import the trait -// to use the extra test functions, and they should only be imported into -// test modules. -pub mod test_support { - use super::{EntryId, EntryState, Graph, Node}; - pub trait TestGraph { - fn set_fixture_entry_state_for_id(&self, id: EntryId, state: EntryState); - fn add_fixture_entry(&self, node: N) -> EntryId; - fn entry_state(&self, id: EntryId) -> &str; - } - impl TestGraph for Graph { - fn set_fixture_entry_state_for_id(&self, id: EntryId, state: EntryState) { - let mut inner = self.inner.lock(); - let entry = inner.entry_for_id_mut(id).unwrap(); - let mut entry_state = entry.state.lock(); - *entry_state = state; - } - - fn add_fixture_entry(&self, node: N) -> EntryId { - let mut inner = self.inner.lock(); - inner.ensure_entry(node) - } - - fn entry_state(&self, id: EntryId) -> &str { - let mut inner = self.inner.lock(); - let entry = inner.entry_for_id_mut(id).unwrap(); - let entry_state = entry.state.lock(); - match *entry_state { - EntryState::Completed { .. } => "completed", - EntryState::Running { .. } => "running", - EntryState::NotStarted { .. } => "not started", - } - } - } -} - /// /// Represents the state of a particular walk through a Graph. Implements Iterator and has the same /// lifetime as the Graph itself. diff --git a/src/rust/engine/process_execution/src/local_tests.rs b/src/rust/engine/process_execution/src/local_tests.rs index 33f4bcedc7e..72771d43c34 100644 --- a/src/rust/engine/process_execution/src/local_tests.rs +++ b/src/rust/engine/process_execution/src/local_tests.rs @@ -795,7 +795,7 @@ async fn run_command_locally_in_dir_with_cleanup( dir: PathBuf, ) -> Result { run_command_locally_in_dir(req, dir, true, None, None).await - +} async fn run_command_locally_in_dir( req: ExecuteProcessRequest, diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index 10ebe3affd2..96af1d9c207 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -2,8 +2,9 @@ // Licensed under the Apache License, Version 2.0 (see LICENSE). use std; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; use std::convert::{Into, TryInto}; +use std::ops::Deref; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; @@ -17,11 +18,12 @@ use crate::nodes::{NodeKey, WrappedNode}; use crate::scheduler::Session; use crate::tasks::{Rule, Tasks}; use crate::types::Types; -use crate::watch::InvalidationWatcher; + use boxfuture::{BoxFuture, Boxable}; use core::clone::Clone; use fs::{safe_create_dir_all_ioerror, GitignoreStyleExcludes, PosixFS}; -use graph::{EntryId, Graph, NodeContext}; +use graph::{EntryId, Graph, InvalidationResult, NodeContext}; +use log::info; use process_execution::{ self, speculate::SpeculatingCommandRunner, BoundedCommandRunner, ExecuteProcessRequestMetadata, PlatformConstraint, @@ -32,6 +34,7 @@ use rule_graph::RuleGraph; use sharded_lmdb::ShardedLmdb; use store::Store; use tokio::runtime::{Builder, Runtime}; +use watch::{Invalidatable, InvalidationWatcher}; const GIGABYTES: usize = 1024 * 1024 * 1024; @@ -44,7 +47,7 @@ const GIGABYTES: usize = 1024 * 1024 * 1024; /// https://github.com/tokio-rs/tokio/issues/369 is resolved. /// pub struct Core { - pub graph: Arc>, + pub graph: Arc, pub tasks: Tasks, pub rule_graph: RuleGraph, pub types: Types, @@ -235,7 +238,7 @@ impl Core { metadata: process_execution_metadata, }) } - let graph = Arc::new(Graph::new()); + let graph = Arc::new(InvalidatableGraph(Graph::new())); let http_client = reqwest::Client::new(); let rule_graph = RuleGraph::new(tasks.as_map(), root_subject_types); @@ -279,6 +282,33 @@ impl Core { } } +pub struct InvalidatableGraph(Graph); + +impl Invalidatable for InvalidatableGraph { + fn invalidate(&self, paths: &HashSet, caller: &str) -> usize { + let InvalidationResult { cleared, dirtied } = self.invalidate_from_roots(move |node| { + if let Some(fs_subject) = node.fs_subject() { + paths.contains(fs_subject) + } else { + false + } + }); + info!( + "{} invalidation: cleared {} and dirtied {} nodes for: {:?}", + caller, cleared, dirtied, paths + ); + cleared + dirtied + } +} + +impl Deref for InvalidatableGraph { + type Target = Graph; + + fn deref(&self) -> &Graph { + &self.0 + } +} + #[derive(Clone)] pub struct Context { entry_id: Option, diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index 0d6efd13dd4..90cb5465c49 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -40,7 +40,6 @@ mod scheduler; mod selectors; mod tasks; mod types; -mod watch; pub use crate::context::Core; pub use crate::core::{Function, Key, Params, TypeId, Value}; @@ -50,6 +49,3 @@ pub use crate::scheduler::{ }; pub use crate::tasks::{Rule, Tasks}; pub use crate::types::Types; - -#[cfg(test)] -mod watch_tests; diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index 3b9b202808c..6b4b49bb5c5 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -14,7 +14,7 @@ use futures01::future::{self, Future}; use crate::context::{Context, Core}; use crate::core::{Failure, Params, TypeId, Value}; use crate::nodes::{NodeKey, Select, Tracer, Visualizer}; -use crate::watch::InvalidationWatcher; + use graph::{Graph, InvalidationResult}; use hashing; use indexmap::IndexMap; @@ -22,6 +22,7 @@ use log::{debug, info, warn}; use logging::logger::LOGGER; use parking_lot::Mutex; use ui::{EngineDisplay, KeyboardCommand}; +use watch::Invalidatable; use workunit_store::WorkUnitStore; pub enum ExecutionTermination { @@ -229,7 +230,7 @@ impl Scheduler { /// Invalidate the invalidation roots represented by the given Paths. /// pub fn invalidate(&self, paths: &HashSet) -> usize { - InvalidationWatcher::invalidate(&self.core.graph, paths, "watchman") + self.core.graph.invalidate(paths, "watchman") } /// diff --git a/src/rust/engine/testutil/src/lib.rs b/src/rust/engine/testutil/src/lib.rs index 342a5b0c24a..5a0d712d9f2 100644 --- a/src/rust/engine/testutil/src/lib.rs +++ b/src/rust/engine/testutil/src/lib.rs @@ -54,7 +54,7 @@ pub fn make_file(path: &Path, contents: &[u8], mode: u32) { file.set_permissions(permissions).unwrap(); } -pub fn append_to_exisiting_file(path: &Path, contents: &[u8]) { +pub fn append_to_existing_file(path: &Path, contents: &[u8]) { let mut file = std::fs::OpenOptions::new().write(true).open(&path).unwrap(); file.write_all(contents).unwrap(); } diff --git a/src/rust/engine/watch/Cargo.toml b/src/rust/engine/watch/Cargo.toml new file mode 100644 index 00000000000..0b4b43b5799 --- /dev/null +++ b/src/rust/engine/watch/Cargo.toml @@ -0,0 +1,29 @@ +[package] +version = "0.0.1" +edition = "2018" +name = "watch" +authors = [ "Pants Build " ] +publish = false + +[dependencies] +crossbeam-channel = "0.3" +fs = { path = "../fs" } +futures = { version = "0.3", features = ["compat"] } +futures-locks = "0.3.0" +futures01 = { package = "futures", version = "0.1" } +graph = { path = "../graph" } +log = "0.4" +logging = { path = "../logging" } +# notify is currently an experimental API, we are pinning to https://docs.rs/notify/5.0.0-pre.1/notify/ +# because the latest prerelease at time of writing has removed the debounced watcher which we would like to use. +# The author suggests they will add the debounced watcher back into the stable 5.0.0 release. When that happens +# we can move to it. +notify = { git = "https://github.com/notify-rs/notify", rev = "fba00891d9105e2f581c69fbe415a58cb7966fdd" } +task_executor = { path = "../task_executor" } + +[dev-dependencies] +hashing = { path = "../hashing" } +parking_lot = "0.6" +tempfile = "3" +testutil = { path = "../testutil" } +tokio = { version = "0.2", features = ["rt-core", "macros"] } diff --git a/src/rust/engine/src/watch.rs b/src/rust/engine/watch/src/lib.rs similarity index 83% rename from src/rust/engine/src/watch.rs rename to src/rust/engine/watch/src/lib.rs index 7752ae07ec0..48e65110adf 100644 --- a/src/rust/engine/src/watch.rs +++ b/src/rust/engine/watch/src/lib.rs @@ -1,6 +1,33 @@ -// Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +// Copyright 2020 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). +#![deny(warnings)] +// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source. +#![deny( + clippy::all, + clippy::default_trait_access, + clippy::expl_impl_clone_on_copy, + clippy::if_not_else, + clippy::needless_continue, + clippy::unseparated_literal_suffix, + clippy::used_underscore_binding +)] +// It is often more clear to show that nothing is being moved. +#![allow(clippy::match_ref_pats)] +// Subjective style. +#![allow( + clippy::len_without_is_empty, + clippy::redundant_field_names, + clippy::too_many_arguments +)] +// Default isn't as big a deal as people seem to think it is. +#![allow(clippy::new_without_default, clippy::new_ret_no_self)] +// Arc can be more clear than needing to grok Orderings: +#![allow(clippy::mutex_atomic)] + +#[cfg(test)] +mod tests; + use std::collections::HashSet; use std::path::PathBuf; use std::sync::{Arc, Weak}; @@ -10,16 +37,13 @@ use std::time::Duration; use crossbeam_channel::{self, Receiver, RecvTimeoutError, TryRecvError}; use futures::compat::Future01CompatExt; use futures_locks::Mutex; -use log::{debug, error, info, warn}; +use log::{debug, error, warn}; use notify::{RecommendedWatcher, RecursiveMode, Watcher}; use task_executor::Executor; use fs::GitignoreStyleExcludes; -use graph::{Graph, InvalidationResult}; use logging; -use crate::nodes::NodeKey; - /// /// An InvalidationWatcher maintains a Thread that receives events from a notify Watcher. /// @@ -29,14 +53,6 @@ use crate::nodes::NodeKey; /// /// TODO: Need the above polling /// -/// TODO: To simplify testing the InvalidationWatcher we could create a trait which -/// has an `invalidate_from_roots` method and impl it on the Graph. Then we could make the InvalidationWatcher -/// take an argument that implements the trait. -/// Then we wouldn't have to mock out a Graph object in watch_tests.rs. This will probably -/// only be possible when we remove watchman invalidation, when the one code path for invaldation will be -/// the notify background thread. -/// Potential impl here: https://github.com/pantsbuild/pants/pull/9318#discussion_r396005978 -/// pub struct InvalidationWatcher { watcher: Arc>, executor: Executor, @@ -45,8 +61,8 @@ pub struct InvalidationWatcher { } impl InvalidationWatcher { - pub fn new( - graph: Weak>, + pub fn new( + invalidatable: Weak, executor: Executor, build_root: PathBuf, ignorer: Arc, @@ -81,7 +97,7 @@ impl InvalidationWatcher { } InvalidationWatcher::start_background_thread( - graph, + invalidatable, ignorer, canonical_build_root, thread_liveness_sender, @@ -97,8 +113,8 @@ impl InvalidationWatcher { } // Public for testing purposes. - pub(crate) fn start_background_thread( - graph: Weak>, + pub(crate) fn start_background_thread( + invalidatable: Weak, ignorer: Arc, canonical_build_root: PathBuf, liveness_sender: crossbeam_channel::Sender<()>, @@ -108,10 +124,10 @@ impl InvalidationWatcher { logging::set_thread_destination(logging::Destination::Pantsd); loop { let event_res = watch_receiver.recv_timeout(Duration::from_millis(10)); - let graph = if let Some(g) = graph.upgrade() { + let invalidatable = if let Some(g) = invalidatable.upgrade() { g } else { - // The Graph has been dropped: we're done. + // The Invalidatable has been dropped: we're done. break; }; match event_res { @@ -156,7 +172,7 @@ impl InvalidationWatcher { // Only invalidate stuff if we have paths that weren't filtered out by gitignore. if !paths.is_empty() { debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); - InvalidationWatcher::invalidate(&graph, &paths, "notify"); + invalidatable.invalidate(&paths, "notify"); }; } Ok(Err(err)) => { @@ -228,19 +244,8 @@ impl InvalidationWatcher { Err(TryRecvError::Empty) => true, } } +} - pub fn invalidate(graph: &Graph, paths: &HashSet, caller: &str) -> usize { - let InvalidationResult { cleared, dirtied } = graph.invalidate_from_roots(move |node| { - if let Some(fs_subject) = node.fs_subject() { - paths.contains(fs_subject) - } else { - false - } - }); - info!( - "{} invalidation: cleared {} and dirtied {} nodes for: {:?}", - caller, cleared, dirtied, paths - ); - cleared + dirtied - } +pub trait Invalidatable: Send + Sync + 'static { + fn invalidate(&self, paths: &HashSet, caller: &str) -> usize; } diff --git a/src/rust/engine/src/watch_tests.rs b/src/rust/engine/watch/src/tests.rs similarity index 52% rename from src/rust/engine/src/watch_tests.rs rename to src/rust/engine/watch/src/tests.rs index 14b4e938bb8..7655253a974 100644 --- a/src/rust/engine/src/watch_tests.rs +++ b/src/rust/engine/watch/src/tests.rs @@ -1,25 +1,18 @@ -use crate::nodes::{DigestFile, NodeKey, NodeResult}; -use crate::watch::InvalidationWatcher; -use crossbeam_channel; -use fs::{File, GitignoreStyleExcludes}; -use graph::entry::{EntryResult, EntryState, Generation, RunToken}; -use graph::{test_support::TestGraph, EntryId, Graph}; -use hashing::EMPTY_DIGEST; -use notify; +use crate::{Invalidatable, InvalidationWatcher}; + +use std::collections::HashSet; use std::fs::create_dir; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::sync::Arc; use std::thread::sleep; use std::time::Duration; -use task_executor::Executor; -use testutil::{append_to_exisiting_file, make_file}; -fn init_logger() -> () { - match env_logger::try_init() { - Ok(()) => (), - Err(_) => (), - } -} +use crossbeam_channel; +use fs::GitignoreStyleExcludes; +use notify; +use parking_lot::Mutex; +use task_executor::Executor; +use testutil::{append_to_existing_file, make_file}; fn setup_fs() -> (tempfile::TempDir, PathBuf) { // setup a build_root with a file in it to watch. @@ -32,35 +25,16 @@ fn setup_fs() -> (tempfile::TempDir, PathBuf) { (tempdir, file_path) } -fn setup_graph(fs_subject: PathBuf) -> (Arc>, EntryId) { - let node = NodeKey::DigestFile(DigestFile(File { - path: fs_subject, - is_executable: false, - })); - let graph = Arc::new(Graph::new()); - let entry_id = graph.add_fixture_entry(node); - let completed_state = EntryState::Completed { - run_token: RunToken::initial(), - generation: Generation::initial(), - result: EntryResult::Clean(Ok(NodeResult::Digest(EMPTY_DIGEST))), - dep_generations: vec![], - }; - graph.set_fixture_entry_state_for_id(entry_id, completed_state); - // Assert the nodes initial state is completed - assert!(graph.entry_state(entry_id) == "completed"); - (graph, entry_id) -} - fn setup_watch( ignorer: Arc, - graph: Arc>, + invalidatable: Arc, build_root: PathBuf, file_path: PathBuf, ) -> InvalidationWatcher { let mut rt = tokio::runtime::Runtime::new().unwrap(); let executor = Executor::new(rt.handle().clone()); let watcher = InvalidationWatcher::new( - Arc::downgrade(&graph), + Arc::downgrade(&invalidatable), executor, build_root, ignorer, @@ -73,35 +47,34 @@ fn setup_watch( #[test] fn receive_watch_event_on_file_change() { - // set up a node in the graph to check that it gets cleared by the invalidation watcher. // Instantiate a watcher and watch the file in question. - init_logger(); let (tempdir, file_path) = setup_fs(); let build_root = tempdir.path().to_path_buf(); - let (graph, entry_id) = setup_graph( - file_path - .clone() - .strip_prefix(build_root.clone()) - .unwrap() - .to_path_buf(), - ); + let file_path_rel = file_path + .clone() + .strip_prefix(build_root.clone()) + .unwrap() + .to_path_buf(); + let invalidatable = Arc::new(TestInvalidatable::default()); let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); let _watcher = setup_watch( ignorer, - graph.clone(), + invalidatable.clone(), build_root.clone(), file_path.clone(), ); + // Update the content of the file being watched. let new_content = "stnetnoc".as_bytes().to_vec(); - append_to_exisiting_file(&file_path, &new_content); - // Wait for watcher background thread to trigger a node invalidation, - // by checking the entry state for the node. It will be reset to EntryState::NotStarted - // when Graph::invalidate_from_roots calls clear on the node. + append_to_existing_file(&file_path, &new_content); + + // Wait for the watcher background thread to trigger a node invalidation, which will cause the + // new salt to be used. for _ in 0..10 { sleep(Duration::from_millis(100)); - if graph.entry_state(entry_id) == "not started" { + if invalidatable.was_invalidated(&file_path_rel) { + // Observed invalidation. return; } } @@ -114,34 +87,32 @@ fn receive_watch_event_on_file_change() { #[test] fn ignore_file_events_matching_patterns_in_pants_ignore() { - init_logger(); let (tempdir, file_path) = setup_fs(); let build_root = tempdir.path().to_path_buf(); - let (graph, entry_id) = setup_graph( - file_path - .clone() - .strip_prefix(build_root.clone()) - .unwrap() - .to_path_buf(), - ); + let file_path_rel = file_path + .clone() + .strip_prefix(build_root.clone()) + .unwrap() + .to_path_buf(); + let invalidatable = Arc::new(TestInvalidatable::default()); let ignorer = GitignoreStyleExcludes::create(&["/foo".to_string()]).unwrap(); let _watcher = setup_watch( ignorer, - graph.clone(), + invalidatable.clone(), build_root.clone(), file_path.clone(), ); + // Update the content of the file being watched. let new_content = "stnetnoc".as_bytes().to_vec(); - append_to_exisiting_file(&file_path, &new_content); - // Wait for watcher background thread to trigger a node invalidation, - // by checking the entry state for the node. It will be reset to EntryState::NotStarted - // when Graph::invalidate_from_roots calls clear on the node. + append_to_existing_file(&file_path, &new_content); + + // Wait for the watcher background thread to trigger a node invalidation, which would cause the + // new salt to be used. for _ in 0..10 { sleep(Duration::from_millis(100)); - // If the state changed the node was invalidated so fail. - if graph.entry_state(entry_id) != "completed" { + if invalidatable.was_invalidated(&file_path_rel) { assert!(false, "Node was invalidated even though it was ignored") } } @@ -149,22 +120,15 @@ fn ignore_file_events_matching_patterns_in_pants_ignore() { #[test] fn test_liveness() { - init_logger(); - let (tempdir, file_path) = setup_fs(); + let (tempdir, _) = setup_fs(); let build_root = tempdir.path().to_path_buf(); - let (graph, _entry_id) = setup_graph( - file_path - .clone() - .strip_prefix(build_root.clone()) - .unwrap() - .to_path_buf(), - ); + let invalidatable = Arc::new(TestInvalidatable::default()); let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); let (liveness_sender, liveness_receiver) = crossbeam_channel::unbounded(); let (event_sender, event_receiver) = crossbeam_channel::unbounded(); InvalidationWatcher::start_background_thread( - Arc::downgrade(&graph), + Arc::downgrade(&invalidatable), ignorer, build_root, liveness_sender, @@ -179,3 +143,24 @@ fn test_liveness() { .recv_timeout(Duration::from_millis(100)) .is_ok()); } + +#[derive(Default)] +struct TestInvalidatable { + pub calls: Mutex>>, +} + +impl TestInvalidatable { + fn was_invalidated(&self, path: &Path) -> bool { + let calls = self.calls.lock(); + calls.iter().any(|call| call.contains(path)) + } +} + +impl Invalidatable for TestInvalidatable { + fn invalidate(&self, paths: &HashSet, _caller: &str) -> usize { + let invalidated = paths.len(); + let mut calls = self.calls.lock(); + calls.push(paths.clone()); + invalidated + } +} From 698b82a0d507e3be4b95e192f6a4bb4c06198c49 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Fri, 1 May 2020 12:26:18 -0700 Subject: [PATCH 08/15] Simplify Scheduler::execute and unify Graph retry (#9674) Both the `Graph` and the `Scheduler` implemented retry for requested Nodes, but the `Scheduler` implementation was pre-async-await and much more complicated. Unify the retry implementations into `Graph::get` (either roots or uncacheable nodes are retried), and simplify the `Scheduler`'s loop down to: ``` let maybe_display_handle = Self::maybe_display_initialize(&session); let result = loop { if let Ok(res) = receiver.recv_timeout(refresh_interval) { break Ok(res); } else if let Err(e) = Self::maybe_display_render(&session, &mut tasks) { break Err(e); } }; Self::maybe_display_teardown(session, maybe_display_handle); result ``` A single, more modern retry implementation (thanks @hrfuller!), and a cleaner `Scheduler::execute` loop. --- build-support/githooks/pre-commit | 4 +- src/rust/engine/Cargo.lock | 12 +- src/rust/engine/graph/src/lib.rs | 159 ++++++++++++------------ src/rust/engine/graph/src/tests.rs | 12 +- src/rust/engine/src/context.rs | 10 +- src/rust/engine/src/scheduler.rs | 189 ++++++++++++++--------------- 6 files changed, 181 insertions(+), 205 deletions(-) diff --git a/build-support/githooks/pre-commit b/build-support/githooks/pre-commit index e0149cb2091..7bd37ad9144 100755 --- a/build-support/githooks/pre-commit +++ b/build-support/githooks/pre-commit @@ -66,8 +66,6 @@ if git rev-parse --verify "${MERGE_BASE}" &>/dev/null; then ./build-support/bin/mypy.py || exit 1 if git diff "${MERGE_BASE}" --name-only | grep '\.rs$' > /dev/null; then - echo "* Checking formatting of Rust files" - ./build-support/bin/check_rust_formatting.sh || exit 1 # Clippy happens on a different Travis CI shard because of separate caching concerns. # The TRAVIS env var is documented here: # https://docs.travis-ci.com/user/environment-variables/#default-environment-variables @@ -75,6 +73,8 @@ if git rev-parse --verify "${MERGE_BASE}" &>/dev/null; then echo "* Running cargo clippy" ./build-support/bin/check_clippy.sh || exit 1 fi + echo "* Checking formatting of Rust files" + ./build-support/bin/check_rust_formatting.sh || exit 1 echo "* Checking Rust target headers" build-support/bin/check_rust_target_headers.sh || exit 1 fi diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index 44cc5affee8..f854b8d37ae 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -356,7 +356,7 @@ dependencies = [ "strsim 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "textwrap 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-width 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", + "vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1155,7 +1155,7 @@ dependencies = [ [[package]] name = "h2" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1264,7 +1264,7 @@ dependencies = [ "futures-channel 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures-util 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "h2 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", + "h2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "http 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "http-body 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.3.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3396,7 +3396,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "vec_map" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -3734,7 +3734,7 @@ dependencies = [ "checksum grpcio-compiler 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a63ccc27b0099347d2bea2c3d0f1c79c018a13cfd08b814a1992e341b645d5e1" "checksum grpcio-sys 0.2.3 (git+https://github.com/pantsbuild/grpc-rs.git?rev=b582ef3dc4e8c7289093c8febff8dadf0997b532)" = "" "checksum h2 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" -"checksum h2 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "377038bf3c89d18d6ca1431e7a5027194fbd724ca10592b9487ede5e8e144f42" +"checksum h2 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "79b7246d7e4b979c03fa093da39cfb3617a96bbeee6310af63991668d7e843ff" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum hermit-abi 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "61565ff7aaace3525556587bd2dc31d4a07071957be715e63ce7b1eccf51a8f4" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" @@ -3956,7 +3956,7 @@ dependencies = [ "checksum uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "78c590b5bd79ed10aad8fb75f078a59d8db445af6c743e55c4a53227fc01c13f" "checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a" "checksum vcpkg 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3fc439f2794e98976c88a2a2dafce96b930fe8010b0a256b3c2199a773933168" -"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" +"checksum vec_map 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" "checksum version_check 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" "checksum walkdir 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c66c0b9792f0a765345452775f3adbd28dde9d33f30d13e5dcc5ae17cf6f3780" "checksum walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs index 9103c1f4c90..bd66937f8d9 100644 --- a/src/rust/engine/graph/src/lib.rs +++ b/src/rust/engine/graph/src/lib.rs @@ -670,90 +670,101 @@ impl Graph { } /// - /// In the context of the given src Node, declare a dependency on the given dst Node and - /// begin its execution if it has not already started. + /// Request the given dst Node, optionally in the context of the given src Node. + /// + /// If there is no src Node, or the src Node is not cacheable, this method will retry for + /// invalidation until the Node completes. /// pub fn get( &self, - src_id: EntryId, + src_id: Option, context: &N::Context, dst_node: N, ) -> BoxFuture { - let maybe_entries_and_id = { + // Compute information about the dst under the Graph lock, and then release it. + let (dst_retry, mut entry, entry_id) = { // Get or create the destination, and then insert the dep and return its state. let mut inner = self.inner.lock(); if inner.draining { - None - } else { - let dst_id = { - // TODO: doing cycle detection under the lock... unfortunate, but probably unavoidable - // without a much more complicated algorithm. - let potential_dst_id = inner.ensure_entry(dst_node); - if let Some(cycle_path) = - Self::report_cycle(src_id, potential_dst_id, &mut inner, context) - { - // Cyclic dependency: render an error. - let path_strs = cycle_path - .into_iter() - .map(|e| e.node().to_string()) - .collect(); - return future::err(N::Error::cyclic(path_strs)).to_boxed(); - } else { - // Valid dependency. - trace!( - "Adding dependency from {:?} to {:?}", - inner.entry_for_id(src_id).unwrap().node(), - inner.entry_for_id(potential_dst_id).unwrap().node() - ); - potential_dst_id - } - }; + return future::err(N::Error::invalidated()).to_boxed(); + } + + // TODO: doing cycle detection under the lock... unfortunate, but probably unavoidable + // without a much more complicated algorithm. + let dst_id = inner.ensure_entry(dst_node); + let dst_retry = if let Some(src_id) = src_id { + if let Some(cycle_path) = Self::report_cycle(src_id, dst_id, &mut inner, context) { + // Cyclic dependency: render an error. + let path_strs = cycle_path + .into_iter() + .map(|e| e.node().to_string()) + .collect(); + return future::err(N::Error::cyclic(path_strs)).to_boxed(); + } + + // Valid dependency. + trace!( + "Adding dependency from {:?} to {:?}", + inner.entry_for_id(src_id).unwrap().node(), + inner.entry_for_id(dst_id).unwrap().node() + ); // All edges get a weight of 1.0 so that we can Bellman-Ford over the graph, treating each // edge as having equal weight. inner.pg.add_edge(src_id, dst_id, 1.0); - let src_entry = inner.entry_for_id(src_id).cloned().unwrap(); - inner - .entry_for_id(dst_id) - .cloned() - .map(|dst_entry| (src_entry, dst_entry, dst_id)) - } - }; - // Declare the dep, and return the state of the destination. - if let Some((src_entry, mut entry, entry_id)) = maybe_entries_and_id { - if src_entry.node().cacheable() { - entry.get(context, entry_id).map(|(res, _)| res).to_boxed() + // We can retry the dst Node if the src Node is not cacheable. If the src is not cacheable, + // it only be allowed to run once, and so Node invalidation does not pass through it. + !inner.entry_for_id(src_id).unwrap().node().cacheable() } else { - // Src node is uncacheable, which means it is side-effecting, and can only be allowed to run once. - // We retry its dependencies a number of times here in case a side effect of the Node invalidated - // some of its dependencies, or another (external) process causes invalidation. - let context2 = context.clone(); + // Otherwise, this is an external request: always retry. + trace!( + "Requesting node {:?}", + inner.entry_for_id(dst_id).unwrap().node() + ); + true + }; + + let dst_entry = inner.entry_for_id(dst_id).cloned().unwrap(); + (dst_retry, dst_entry, dst_id) + }; + + // Return the state of the destination. + if dst_retry { + // Retry the dst a number of times to handle Node invalidation. + let context = context.clone(); + let uncached_node = async move { let mut counter: usize = 8; - let uncached_node = async move { - loop { - counter -= 1; - if counter == 0 { - break Err(N::Error::exhausted()); - } - let dep_res = entry - .get(&context2, entry_id) - .map(|(res, _)| res) - .compat() - .await; - match dep_res { - Ok(r) => break Ok(r), - Err(err) if err == N::Error::invalidated() => continue, - Err(other_err) => break Err(other_err), - } + loop { + counter -= 1; + if counter == 0 { + break Err(N::Error::exhausted()); } - }; - uncached_node.boxed().compat().to_boxed() - } + let dep_res = entry + .get(&context, entry_id) + .map(|(res, _)| res) + .compat() + .await; + match dep_res { + Ok(r) => break Ok(r), + Err(err) if err == N::Error::invalidated() => continue, + Err(other_err) => break Err(other_err), + } + } + }; + uncached_node.boxed().compat().to_boxed() } else { - future::err(N::Error::invalidated()).to_boxed() + // Not retriable. + entry.get(context, entry_id).map(|(res, _)| res).to_boxed() } } + /// + /// Return the value of the given Node. Shorthand for `self.get(None, context, node)`. + /// + pub fn create(&self, node: N, context: &N::Context) -> BoxFuture { + self.get(None, context, node) + } + fn report_cycle( src_id: EntryId, potential_dst_id: EntryId, @@ -820,26 +831,6 @@ impl Graph { self.inner.lock().critical_path(roots, duration) } - /// - /// Create the given Node if it does not already exist. - /// - pub fn create(&self, node: N, context: &N::Context) -> BoxFuture { - let maybe_entry_and_id = { - let mut inner = self.inner.lock(); - if inner.draining { - None - } else { - let id = inner.ensure_entry(node); - inner.entry_for_id(id).cloned().map(|entry| (entry, id)) - } - }; - if let Some((mut entry, entry_id)) = maybe_entry_and_id { - entry.get(context, entry_id).map(|(res, _)| res).to_boxed() - } else { - future::err(N::Error::invalidated()).to_boxed() - } - } - /// /// Gets the generations of the dependencies of the given EntryId, (re)computing or cleaning /// them first if necessary. diff --git a/src/rust/engine/graph/src/tests.rs b/src/rust/engine/graph/src/tests.rs index df56f8cd420..4438bfa95a7 100644 --- a/src/rust/engine/graph/src/tests.rs +++ b/src/rust/engine/graph/src/tests.rs @@ -307,7 +307,7 @@ fn exhaust_uncacheable_retries() { graph2.invalidate_from_roots(|&TNode(n, _)| n == 0); }); let (assertion, subject) = match graph.create(TNode::new(2), &context).wait() { - Err(TError::Throw) => (true, None), + Err(TError::Exhausted) => (true, None), Err(e) => (false, Some(Err(e))), other => (false, Some(other)), }; @@ -315,7 +315,7 @@ fn exhaust_uncacheable_retries() { assert!( assertion, "expected {:?} found {:?}", - Err::<(), TError>(TError::Throw), + Err::<(), TError>(TError::Exhausted), subject ); } @@ -350,7 +350,7 @@ fn drain_and_resume() { // drain. assert_eq!( graph.create(TNode::new(2), &context).wait(), - Err(TError::Invalidated), + Err(TError::Exhausted), ); // Unmark the Graph draining, and try again: we expect the `Invalidated` result we saw before @@ -730,7 +730,7 @@ impl TContext { } fn get(&self, dst: TNode) -> BoxFuture, TError> { - self.graph.get(self.entry_id.unwrap(), self, dst) + self.graph.get(self.entry_id, self, dst) } fn ran(&self, node: TNode) { @@ -770,8 +770,8 @@ impl TContext { #[derive(Clone, Debug, Eq, PartialEq)] enum TError { Cyclic, + Exhausted, Invalidated, - Throw, } impl NodeError for TError { fn invalidated() -> Self { @@ -779,7 +779,7 @@ impl NodeError for TError { } fn exhausted() -> Self { - TError::Throw + TError::Exhausted } fn cyclic(_path: Vec) -> Self { diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index 96af1d9c207..b173ccacbb9 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -331,12 +331,10 @@ impl Context { pub fn get(&self, node: N) -> BoxFuture { // TODO: Odd place for this... could do it periodically in the background? maybe_drop_handles(); - let result = if let Some(entry_id) = self.entry_id { - self.core.graph.get(entry_id, self, node.into()).to_boxed() - } else { - self.core.graph.create(node.into(), self).to_boxed() - }; - result + self + .core + .graph + .get(self.entry_id, self, node.into()) .map(|node_result| { node_result .try_into() diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index 6b4b49bb5c5..3432f4935ae 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -9,7 +9,8 @@ use std::sync::{mpsc, Arc}; use std::time::Duration; use futures::compat::Future01CompatExt; -use futures01::future::{self, Future}; +use futures::future::{self as future03}; +use futures01::future::Future; use crate::context::{Context, Core}; use crate::core::{Failure, Params, TypeId, Value}; @@ -22,6 +23,7 @@ use log::{debug, info, warn}; use logging::logger::LOGGER; use parking_lot::Mutex; use ui::{EngineDisplay, KeyboardCommand}; +use uuid::Uuid; use watch::Invalidatable; use workunit_store::WorkUnitStore; @@ -278,64 +280,35 @@ impl Scheduler { } /// - /// Attempts to complete all of the given roots, retrying the entire set (up to `count` - /// times) if any of them fail with `Failure::Invalidated`. Sends the result on the given - /// mpsc Sender, which allows the caller to poll a channel for the result without blocking - /// uninterruptibly on a Future. - /// - /// In common usage, graph entries won't be repeatedly invalidated, but in a case where they - /// were (say by an automated process changing files under pants), we'd want to eventually - /// give up. + /// Attempts to complete all of the given roots, and send the result on the given mpsc Sender, + /// which allows the caller to poll a channel for the result without blocking uninterruptibly + /// on a Future. /// fn execute_helper( context: Context, sender: mpsc::Sender>>, roots: Vec, - count: usize, ) { let core = context.core.clone(); - // Attempt all roots in parallel, failing fast to retry for `Invalidated`. - let roots_res = future::join_all( - roots - .clone() - .into_iter() - .map(|root| { - context - .core - .graph - .create(root.clone().into(), &context) - .then::<_, Result, Failure>>(move |r| { - match r { - Err(Failure::Invalidated) if count > 0 => { - // A node was invalidated: fail quickly so that all roots can be retried. - Err(Failure::Invalidated) - } - other => { - // Otherwise (if it is a success, some other type of Failure, or if we've run - // out of retries) recover to complete the join, which will cause the results to - // propagate to the user. - debug!("Root {} completed.", NodeKey::Select(Box::new(root))); - Ok(other.map(|res| { - res - .try_into() - .unwrap_or_else(|_| panic!("A Node implementation was ambiguous.")) - })) - } - } - }) - }) - .collect::>(), - ); - - // If the join failed (due to `Invalidated`, since that is the only error we propagate), retry - // the entire set of roots. core.executor.spawn_and_ignore(async move { - let res = roots_res.compat().await; - if let Ok(res) = res { - let _ = sender.send(res); - } else { - Scheduler::execute_helper(context, sender, roots, count - 1); - } + let res = future03::join_all( + roots + .into_iter() + .map(|root| { + context + .core + .graph + .create(root.into(), &context) + .map(|nr| { + nr.try_into() + .unwrap_or_else(|e| panic!("A Node implementation was ambiguous: {:?}", e)) + }) + .compat() + }) + .collect::>(), + ) + .await; + let _ = sender.send(res); }); } @@ -357,7 +330,7 @@ impl Scheduler { let context = Context::new(self.core.clone(), session.clone()); let (sender, receiver) = mpsc::channel(); - Scheduler::execute_helper(context, sender, request.roots.clone(), 8); + Scheduler::execute_helper(context, sender, request.roots.clone()); let roots: Vec = request .roots .clone() @@ -367,62 +340,61 @@ impl Scheduler { // This map keeps the k most relevant jobs in assigned possitions. // Keys are positions in the display (display workers) and the values are the actual jobs to print. - let mut tasks_to_display = IndexMap::new(); + let mut tasks = IndexMap::new(); let refresh_interval = Duration::from_millis(100); - Ok(match session.maybe_display() { - Some(display) => { - { - let mut display = display.lock(); - display.start(); - } - let unique_handle = LOGGER.register_engine_display(display.clone()); - - let results = loop { - if let Ok(res) = receiver.recv_timeout(refresh_interval) { - break res; - } else { - let render_result = Scheduler::display_ongoing_tasks( - &self.core.graph, - &roots, - display, - &mut tasks_to_display, - ); - match render_result { - Err(e) => warn!("{}", e), - Ok(KeyboardCommand::CtrlC) => { - info!("Exiting early in response to Ctrl-C"); - { - let mut display = display.lock(); - display.finish(); - } - return Err(ExecutionTermination::KeyboardInterrupt); - } - Ok(KeyboardCommand::None) => (), - }; - } - }; - LOGGER.deregister_engine_display(unique_handle); - { - let mut display = display.lock(); - display.finish(); - } - results + let maybe_display_handle = Self::maybe_display_initialize(&session); + let result = loop { + if let Ok(res) = receiver.recv_timeout(refresh_interval) { + break Ok(res); + } else if let Err(e) = Self::maybe_display_render( + &self.core.graph, + &roots, + &session, + &mut tasks) + { + break Err(e); } - None => loop { - if let Ok(res) = receiver.recv_timeout(refresh_interval) { - break res; - } - }, - }) + }; + Self::maybe_display_teardown(session, maybe_display_handle); + + result + } + + fn maybe_display_initialize(session: &Session) -> Option { + if let Some(display) = session.maybe_display() { + { + let mut display = display.lock(); + display.start(); + } + Some(LOGGER.register_engine_display(display.clone())) + } else { + None + } + } + + fn maybe_display_teardown(session: &Session, handle: Option) { + if let Some(handle) = handle { + LOGGER.deregister_engine_display(handle); + } + if let Some(display) = session.maybe_display() { + let mut display = display.lock(); + display.finish(); + } } - fn display_ongoing_tasks( + fn maybe_display_render( graph: &Graph, roots: &[NodeKey], - display: &Mutex, + session: &Session, tasks_to_display: &mut IndexMap, - ) -> Result { + ) -> Result<(), ExecutionTermination> { + let display = if let Some(display) = session.maybe_display() { + display + } else { + return Ok(()); + }; + // Update the graph. To do that, we iterate over heavy hitters. let worker_count = { @@ -462,7 +434,22 @@ impl Scheduler { for i in tasks_to_display.len()..worker_count { d.update(i.to_string(), "".to_string()); } - d.render() + + match d.render() { + Err(e) => { + warn!("{}", e); + Ok(()) + } + Ok(KeyboardCommand::CtrlC) => { + info!("Exiting early in response to Ctrl-C"); + { + let mut display = display.lock(); + display.finish(); + } + Err(ExecutionTermination::KeyboardInterrupt) + } + Ok(KeyboardCommand::None) => Ok(()), + } } } From 2106871fccbb887c86fe7435a615a58935b61af3 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Wed, 6 May 2020 16:19:21 -0700 Subject: [PATCH 09/15] Move file invalidation handling to rust (#9636) A few different kinds of file watching span the boundary between the `SchedulerService` and `FSEventService`: 1. pantsd invalidation globs - how `pantsd` detects that its implementing code or config has changed 2. pidfile - watches `pantsd`'s pidfile to ensure that the daemon exits if it loses exclusivity 3. graph invalidation - any files changing in the workspace should invalidate the engine's `Graph` 4. `--loop` - implemented directly in the `SchedulerService` Because of the semi-cyclic nature of the relationship between the `SchedulerService` and `FSEventService`, it's challenging to understand the interplay of these usecases. And, unsurprisingly, that lead to the `notify` crate implementation only satisfying one of them. The fundamental change in this PR is to add support for two new parameters to engine executions which are implemented by the `Graph`: * `poll: bool` - When `poll` is enabled, a `product_request` will wait for the requested Nodes to have changed from their last-observed values before returning. When a poll waits, an optional `poll_delay` is applied before it returns to "debounce" polls. * `timeout: Optional[Duration]` - When a `timeout` is set, a `product_request` will wait up to the given duration for the requested Node(s) to complete (including any time `poll`ing). These features are then used by: * `--loop`: uses `poll` (with a `poll_delay`, but without a `timeout`) to immediately re-run a `Goal` when its inputs have changed. * invalidation globs and pidfile watching: use `poll` (with no `poll_delay`) and `timeout` to block their `SchedulerService` thread and watch for changes to those files. The `FSEventService` and `SchedulerService` are decoupled, and each now interacts only with the `Scheduler`: `FSEventService` to push `watchman` events to the `Graph`, and the `SchedulerService` to pull invalidation information from the `Graph`. Because all events now flow through the `Graph`, the `notify` crate has reached feature parity with `watchman`. In followup changes we can remove the experimental flag, disable `watchman` (and thus the `FSEventService`) by default, and remove the dependency between `--loop` and `pantsd`. --- pants.toml | 9 +- rust-toolchain | 2 +- src/python/pants/bin/local_pants_runner.py | 4 +- src/python/pants/engine/scheduler.py | 118 +++++-- src/python/pants/init/engine_initializer.py | 33 +- src/python/pants/init/options_initializer.py | 39 ++- src/python/pants/option/global_options.py | 2 +- src/python/pants/pantsd/pants_daemon.py | 79 +++-- .../pants/pantsd/service/fs_event_service.py | 106 +++--- .../pants/pantsd/service/scheduler_service.py | 323 +++++++----------- src/rust/engine/Cargo.lock | 34 +- src/rust/engine/engine_cffi/src/lib.rs | 78 ++++- src/rust/engine/graph/Cargo.toml | 4 +- src/rust/engine/graph/src/entry.rs | 140 +++++++- src/rust/engine/graph/src/lib.rs | 105 ++++-- src/rust/engine/graph/src/node.rs | 14 +- src/rust/engine/graph/src/tests.rs | 106 +++++- src/rust/engine/logging/src/logger.rs | 2 +- src/rust/engine/src/context.rs | 22 +- src/rust/engine/src/lib.rs | 6 +- src/rust/engine/src/scheduler.rs | 210 +++++++++--- src/rust/engine/watch/Cargo.toml | 8 +- src/rust/engine/watch/src/lib.rs | 180 +++++----- src/rust/engine/watch/src/tests.rs | 75 ++-- ...est_deferred_sources_mapper_integration.py | 13 +- .../test_prep_command_integration.py | 43 ++- .../legacy/test_goal_rule_integration.py | 19 +- .../pants_test/engine/test_build_files.py | 6 +- .../pantsd/pantsd_integration_test_base.py | 17 +- .../pantsd/service/test_fs_event_service.py | 44 +-- .../pantsd/test_pantsd_integration.py | 4 +- 31 files changed, 1124 insertions(+), 721 deletions(-) diff --git a/pants.toml b/pants.toml index 78ece7a7bb8..ce929d9f7df 100644 --- a/pants.toml +++ b/pants.toml @@ -78,8 +78,13 @@ backend_packages2 = [ "internal_backend.rules_for_testing", ] -# The pants script in this repo consumes these files to run pants -pantsd_invalidation_globs.add = ["src/python/**/*.py"] +# The invalidation globs cover the PYTHONPATH by default, but we additionally add the rust code. +pantsd_invalidation_globs.add = [ + "!*_test.py", + # NB: The `target` directory is ignored via `pants_ignore` below. + "src/rust/engine/**/*.rs", + "src/rust/engine/**/*.toml", +] # Path patterns to ignore for filesystem operations on top of the builtin patterns. pants_ignore.add = [ # venv directories under build-support. diff --git a/rust-toolchain b/rust-toolchain index 32b7211cb61..a50908ca3da 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.40.0 +1.42.0 diff --git a/src/python/pants/bin/local_pants_runner.py b/src/python/pants/bin/local_pants_runner.py index 7f41b10c9ac..d3597bbdf83 100644 --- a/src/python/pants/bin/local_pants_runner.py +++ b/src/python/pants/bin/local_pants_runner.py @@ -127,6 +127,7 @@ def _maybe_init_graph_session( ) v2_ui = options.for_global_scope().get("v2_ui", False) + use_colors = options.for_global_scope().get("colors", True) zipkin_trace_v2 = options.for_scope("reporting").zipkin_trace_v2 # TODO(#8658) This should_report_workunits flag must be set to True for # StreamingWorkunitHandler to receive WorkUnits. It should eventually @@ -137,7 +138,8 @@ def _maybe_init_graph_session( graph_session = graph_scheduler_helper.new_session( zipkin_trace_v2, RunTracker.global_instance().run_id, - v2_ui, + v2_ui=v2_ui, + use_colors=use_colors, should_report_workunits=stream_workunits, ) return graph_session, graph_session.scheduler_session diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 2b97715efee..4540d14e496 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -9,7 +9,7 @@ import traceback from dataclasses import dataclass from textwrap import dedent -from typing import TYPE_CHECKING, Any, Dict, Tuple, cast +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Type, Union, cast from pants.base.exception_sink import ExceptionSink from pants.base.exiter import PANTS_FAILED_EXIT_CODE @@ -59,6 +59,10 @@ def end_user_messages(self): return [str(exc) for exc in self.wrapped_exceptions] +class ExecutionTimeoutError(ExecutionError): + """An ExecutionRequest specified a timeout which elapsed before the request completed.""" + + class Scheduler: def __init__( self, @@ -242,13 +246,14 @@ def invalidate_files(self, direct_filenames): def invalidate_all_files(self): return self._native.lib.graph_invalidate_all_paths(self._scheduler) - def check_invalidation_watcher_liveness(self) -> bool: - return cast(bool, self._native.lib.check_invalidation_watcher_liveness(self._scheduler)) + def check_invalidation_watcher_liveness(self): + res = self._native.lib.check_invalidation_watcher_liveness(self._scheduler) + self._raise_or_return(res) def graph_len(self): return self._native.lib.graph_len(self._scheduler) - def add_root_selection(self, execution_request, subject_or_params, product): + def execution_add_root_select(self, execution_request, subject_or_params, product): if isinstance(subject_or_params, Params): params = subject_or_params.params else: @@ -258,6 +263,17 @@ def add_root_selection(self, execution_request, subject_or_params, product): ) self._raise_or_return(res) + def execution_set_timeout(self, execution_request, timeout: float): + timeout_in_ms = int(timeout * 1000) + self._native.lib.execution_set_timeout(execution_request, timeout_in_ms) + + def execution_set_poll(self, execution_request, poll: bool): + self._native.lib.execution_set_poll(execution_request, poll) + + def execution_set_poll_delay(self, execution_request, poll_delay: float): + poll_delay_in_ms = int(poll_delay * 1000) + self._native.lib.execution_set_poll_delay(execution_request, poll_delay_in_ms) + @property def visualize_to_dir(self): return self._visualize_to_dir @@ -278,8 +294,14 @@ def with_fork_context(self, func): def _run_and_return_roots(self, session, execution_request): raw_roots = self._native.lib.scheduler_execute(self._scheduler, session, execution_request) - if raw_roots == self._native.ffi.NULL: + if raw_roots.err == self._native.lib.NoError: + pass + elif raw_roots.err == self._native.lib.KeyboardInterrupt: raise KeyboardInterrupt + elif raw_roots.err == self._native.lib.Timeout: + raise ExecutionTimeoutError("Timed out") + else: + raise Exception(f"Unrecognized error type from native execution: {raw_roots.err}") remaining_runtime_exceptions_to_capture = list( self._native.consume_cffi_extern_method_runtime_exceptions() @@ -357,8 +379,6 @@ class SchedulerSession: Session. """ - execution_error_type = ExecutionError - def __init__(self, scheduler, session): self._scheduler = scheduler self._session = session @@ -375,6 +395,15 @@ def poll_workunits(self) -> Tuple[Dict[str, Any], ...]: def graph_len(self): return self._scheduler.graph_len() + def new_run_id(self): + """Assigns a new "run id" to this Session, without creating a new Session. + + Usually each Session corresponds to one end user "run", but there are exceptions: notably, + the `--loop` feature uses one Session, but would like to observe new values for uncacheable + nodes in each iteration of its loop. + """ + self._scheduler._native.lib.session_new_run_id(self._session) + def trace(self, execution_request): """Yields a stringified 'stacktrace' starting from the scheduler's roots.""" for line in self._scheduler.graph_trace(self._session, execution_request.native): @@ -390,30 +419,41 @@ def visualize_graph_to_file(self, filename): def visualize_rule_graph_to_file(self, filename): self._scheduler.visualize_rule_graph_to_file(filename) - def execution_request_literal(self, request_specs): - native_execution_request = self._scheduler._native.new_execution_request() - for subject, product in request_specs: - self._scheduler.add_root_selection(native_execution_request, subject, product) - return ExecutionRequest(request_specs, native_execution_request) - - def execution_request(self, products, subjects): + def execution_request( + self, + products: Sequence[Type], + subjects: Sequence[Union[Any, Params]], + poll: bool = False, + poll_delay: Optional[float] = None, + timeout: Optional[float] = None, + ) -> ExecutionRequest: """Create and return an ExecutionRequest for the given products and subjects. The resulting ExecutionRequest object will contain keys tied to this scheduler's product Graph, and so it will not be directly usable with other scheduler instances without being re-created. - NB: This method does a "cross product", mapping all subjects to all products. To create a - request for just the given list of subject -> product tuples, use `execution_request_literal()`! + NB: This method does a "cross product", mapping all subjects to all products. :param products: A list of product types to request for the roots. - :type products: list of types - :param subjects: A list of AddressSpec and/or PathGlobs objects. - :type subject: list of :class:`pants.base.specs.AddressSpec`, `pants.build_graph.Address`, and/or - :class:`pants.engine.fs.PathGlobs` objects. + :param subjects: A list of singleton input parameters or Params instances. + :param poll: True to wait for _all_ of the given roots to + have changed since their last observed values in this SchedulerSession. + :param poll_delay: A delay (in seconds) to wait after observing a change, and before + beginning to compute a new value. + :param timeout: An optional timeout to wait for the request to complete (in seconds). If the + request has not completed before the timeout has elapsed, ExecutionTimeoutError is raised. :returns: An ExecutionRequest for the given products and subjects. """ - roots = tuple((s, p) for s in subjects for p in products) - return self.execution_request_literal(roots) + request_specs = tuple((s, p) for s in subjects for p in products) + native_execution_request = self._scheduler._native.new_execution_request() + for subject, product in request_specs: + self._scheduler.execution_add_root_select(native_execution_request, subject, product) + if timeout: + self._scheduler.execution_set_timeout(native_execution_request, timeout) + if poll_delay: + self._scheduler.execution_set_poll_delay(native_execution_request, poll_delay) + self._scheduler.execution_set_poll(native_execution_request, poll) + return ExecutionRequest(request_specs, native_execution_request) def invalidate_files(self, direct_filenames): """Invalidates the given filenames in an internal product Graph instance.""" @@ -447,7 +487,7 @@ def _maybe_visualize(self): self._run_count += 1 self.visualize_graph_to_file(os.path.join(self._scheduler.visualize_to_dir, name)) - def execute(self, execution_request): + def execute(self, execution_request: ExecutionRequest): """Invoke the engine for the given ExecutionRequest, returning Return and Throw states. :return: A tuple of (root, Return) tuples and (root, Throw) tuples. @@ -473,7 +513,7 @@ def execute(self, execution_request): returns = tuple((root, state) for root, state in roots if type(state) is Return) throws = tuple((root, state) for root, state in roots if type(state) is Throw) - return returns, throws + return cast(Tuple[Tuple[Return, ...], Tuple[Throw, ...]], (returns, throws)) def _trace_on_error(self, unique_exceptions, request): exception_noun = pluralize(len(unique_exceptions), "Exception") @@ -493,13 +533,21 @@ def _trace_on_error(self, unique_exceptions, request): unique_exceptions, ) - def run_goal_rule(self, product, subject): + def run_goal_rule( + self, + product: Type, + subject: Union[Any, Params], + poll: bool = False, + poll_delay: Optional[float] = None, + ) -> int: """ :param product: A Goal subtype. :param subject: subject for the request. + :param poll: See self.execution_request. + :param poll_delay: See self.execution_request. :returns: An exit_code for the given Goal. """ - request = self.execution_request([product], [subject]) + request = self.execution_request([product], [subject], poll=poll, poll_delay=poll_delay) returns, throws = self.execute(request) if throws: @@ -510,17 +558,25 @@ def run_goal_rule(self, product, subject): _, state = returns[0] return state.value.exit_code - def product_request(self, product, subjects): + def product_request( + self, + product: Type, + subjects: Sequence[Union[Any, Params]], + poll: bool = False, + timeout: Optional[float] = None, + ): """Executes a request for a single product for some subjects, and returns the products. - :param class product: A product type for the request. - :param list subjects: A list of subjects or Params instances for the request. + :param product: A product type for the request. + :param subjects: A list of subjects or Params instances for the request. + :param poll: See self.execution_request. + :param timeout: See self.execution_request. :returns: A list of the requested products, with length match len(subjects). """ request = None raised_exception = None try: - request = self.execution_request([product], subjects) + request = self.execution_request([product], subjects, poll=poll, timeout=timeout) except: # noqa: T803 # If there are any exceptions during CFFI extern method calls, we want to return an error with # them and whatever failure results from it. This typically results from unhashable types. @@ -579,7 +635,7 @@ def product_request(self, product, subjects): ) ) - returns, throws = self.execute(request) + returns, throws = self.execute(cast(ExecutionRequest, request)) # Throw handling. if throws: diff --git a/src/python/pants/init/engine_initializer.py b/src/python/pants/init/engine_initializer.py index 7d4bc2a79ff..8bd03537827 100644 --- a/src/python/pants/init/engine_initializer.py +++ b/src/python/pants/init/engine_initializer.py @@ -177,12 +177,18 @@ class LegacyGraphScheduler: goal_map: Any def new_session( - self, zipkin_trace_v2, build_id, v2_ui=False, should_report_workunits=False + self, + zipkin_trace_v2, + build_id, + v2_ui=False, + use_colors=True, + should_report_workunits=False, ) -> "LegacyGraphSession": session = self.scheduler.new_session( zipkin_trace_v2, build_id, v2_ui, should_report_workunits ) - return LegacyGraphSession(session, self.build_file_aliases, self.goal_map) + console = Console(use_colors=use_colors, session=session if v2_ui else None,) + return LegacyGraphSession(session, console, self.build_file_aliases, self.goal_map) @dataclass(frozen=True) @@ -190,6 +196,7 @@ class LegacyGraphSession: """A thin wrapper around a SchedulerSession configured with @rules for a symbol table.""" scheduler_session: SchedulerSession + console: Console build_file_aliases: Any goal_map: Any @@ -210,7 +217,9 @@ def run_goal_rules( options: Options, goals: Iterable[str], specs: Specs, - ): + poll: bool = False, + poll_delay: Optional[float] = None, + ) -> int: """Runs @goal_rules sequentially and interactively by requesting their implicit Goal products. @@ -219,12 +228,6 @@ def run_goal_rules( :returns: An exit code. """ - global_options = options.for_global_scope() - - console = Console( - use_colors=global_options.colors, - session=self.scheduler_session if global_options.get("v2_ui") else None, - ) workspace = Workspace(self.scheduler_session) interactive_runner = InteractiveRunner(self.scheduler_session) @@ -240,13 +243,19 @@ def run_goal_rules( if not is_implemented: continue params = Params( - specs.provided_specs, options_bootstrapper, console, workspace, interactive_runner, + specs.provided_specs, + options_bootstrapper, + self.console, + workspace, + interactive_runner, ) logger.debug(f"requesting {goal_product} to satisfy execution of `{goal}` goal") try: - exit_code = self.scheduler_session.run_goal_rule(goal_product, params) + exit_code = self.scheduler_session.run_goal_rule( + goal_product, params, poll=poll, poll_delay=poll_delay + ) finally: - console.flush() + self.console.flush() if exit_code != PANTS_SUCCEEDED_EXIT_CODE: return exit_code diff --git a/src/python/pants/init/options_initializer.py b/src/python/pants/init/options_initializer.py index 2f62273b67d..eda28a4ac6d 100644 --- a/src/python/pants/init/options_initializer.py +++ b/src/python/pants/init/options_initializer.py @@ -3,7 +3,6 @@ import logging import os -import re import sys import pkg_resources @@ -108,18 +107,21 @@ def compute_pants_ignore(buildroot, global_options): """ pants_ignore = list(global_options.pants_ignore) - def add_ignore(absolute_path): + def add(absolute_path, include=False): # To ensure that the path is ignored regardless of whether it is a symlink or a directory, we # strip trailing slashes (which would signal that we wanted to ignore only directories). maybe_rel_path = fast_relpath_optional(absolute_path, buildroot) - # Exclude temp workdir from . - # temp workdir is /path/to//tmp/tmp.pants.d - if maybe_rel_path and not re.search("tmp/tmp(.+).pants.d", maybe_rel_path): + if maybe_rel_path: rel_path = maybe_rel_path.rstrip(os.path.sep) - pants_ignore.append(f"/{rel_path}") + prefix = "!" if include else "" + pants_ignore.append(f"{prefix}/{rel_path}") + + add(global_options.pants_workdir) + add(global_options.pants_distdir) + # NB: We punch a hole in the ignore patterns to allow pants to directly watch process + # metadata that is written to disk. + add(global_options.pants_subprocessdir, include=True) - add_ignore(global_options.pants_workdir) - add_ignore(global_options.pants_distdir) return pants_ignore @staticmethod @@ -129,23 +131,28 @@ def compute_pantsd_invalidation_globs(buildroot, bootstrap_options): Combines --pythonpath and --pants-config-files files that are in {buildroot} dir with those invalidation_globs provided by users. """ - invalidation_globs = [] - globs = ( - bootstrap_options.pythonpath + invalidation_globs = set() + globs = set( + sys.path + + bootstrap_options.pythonpath + bootstrap_options.pants_config_files + bootstrap_options.pantsd_invalidation_globs ) for glob in globs: - glob_relpath = os.path.relpath(glob, buildroot) - if glob_relpath and (not glob_relpath.startswith("../")): - invalidation_globs.extend([glob_relpath, glob_relpath + "/**"]) + if glob.startswith("!"): + invalidation_globs.add(glob) + continue + + glob_relpath = fast_relpath_optional(glob, buildroot) if os.path.isabs(glob) else glob + if glob_relpath: + invalidation_globs.update([glob_relpath, glob_relpath + "/**"]) else: - logging.getLogger(__name__).warning( + logger.debug( f"Changes to {glob}, outside of the buildroot, will not be invalidated." ) - return invalidation_globs + return list(sorted(invalidation_globs)) @classmethod def create(cls, options_bootstrapper, build_configuration, init_subsystems=True): diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 60b7e07c33f..2decd98ccbb 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -722,7 +722,7 @@ def register_bootstrap_options(cls, register): type=list, default=[], help="Filesystem events matching any of these globs will trigger a daemon restart. " - "The `--pythonpath` and `--pants-config-files` are inherently invalidated.", + "Pants' own code, plugins, and `--pants-config-files` are inherently invalidated.", ) # Watchman options. diff --git a/src/python/pants/pantsd/pants_daemon.py b/src/python/pants/pantsd/pants_daemon.py index 6286f764e7d..c4d20fe682a 100644 --- a/src/python/pants/pantsd/pants_daemon.py +++ b/src/python/pants/pantsd/pants_daemon.py @@ -190,6 +190,12 @@ def create(cls, options_bootstrapper, full_init=True): bootstrap_options=bootstrap_options, ) + @classmethod + def absolute_pidfile(cls): + return PantsDaemon.metadata_file_path( + "pantsd", "pid", bootstrap_options.pants_subprocessdir + ) + @staticmethod def _setup_services( build_root, @@ -204,31 +210,22 @@ def _setup_services( """ should_shutdown_after_run = bootstrap_options.shutdown_pantsd_after_run fs_event_service = ( - FSEventService(watchman, build_root,) if bootstrap_options.watchman_enable else None + FSEventService( + watchman, scheduler=legacy_graph_scheduler.scheduler, build_root=build_root + ) + if bootstrap_options.watchman_enable + else None ) - pidfile_absolute = PantsDaemon.metadata_file_path( - "pantsd", "pid", bootstrap_options.pants_subprocessdir + invalidation_globs = OptionsInitializer.compute_pantsd_invalidation_globs( + build_root, bootstrap_options ) - if pidfile_absolute.startswith(build_root): - pidfile = os.path.relpath(pidfile_absolute, build_root) - else: - pidfile = None - logging.getLogger(__name__).warning( - "Not watching pantsd pidfile because subprocessdir is outside of buildroot. Having " - "subprocessdir be a child of buildroot (as it is by default) may help avoid stray " - "pantsd processes." - ) - # TODO make SchedulerService handle fs_event_service_being None scheduler_service = SchedulerService( fs_event_service=fs_event_service, legacy_graph_scheduler=legacy_graph_scheduler, build_root=build_root, - invalidation_globs=OptionsInitializer.compute_pantsd_invalidation_globs( - build_root, bootstrap_options - ), - pantsd_pidfile=pidfile, + invalidation_globs=invalidation_globs, union_membership=union_membership, ) @@ -402,11 +399,9 @@ def _run_services(self, pants_services): f"service {service} failed to start, shutting down!" ) - # Once all services are started, write our pid. - self.write_pid() - self.write_metadata_by_name( - "pantsd", self.FINGERPRINT_KEY, ensure_text(self.options_fingerprint) - ) + # Once all services are started, write our pid and notify the SchedulerService to start + # watching it. + self._initialize_pid() # Monitor services. while not self.is_killed: @@ -425,6 +420,46 @@ def _write_named_sockets(self, socket_map): for socket_name, socket_info in socket_map.items(): self.write_named_socket(socket_name, socket_info) + def _initialize_pid(self): + """Writes out our pid and metadata, and begin watching it for validity. + + Once written and watched, does a one-time read of the pid to confirm that we haven't raced + another process starting. + + All services must already have been initialized before this is called. + """ + + # Write the pidfile. + self.write_pid() + self.write_metadata_by_name( + "pantsd", self.FINGERPRINT_KEY, ensure_text(self.options_fingerprint) + ) + + # Add the pidfile to watching via the scheduler. + pidfile_absolute = self._metadata_file_path("pantsd", "pid") + if pidfile_absolute.startswith(self._build_root): + scheduler_service = next( + s for s in self._services.services if isinstance(s, SchedulerService) + ) + scheduler_service.add_invalidation_glob( + os.path.relpath(pidfile_absolute, self._build_root) + ) + else: + logging.getLogger(__name__).warning( + "Not watching pantsd pidfile because subprocessdir is outside of buildroot. Having " + "subprocessdir be a child of buildroot (as it is by default) may help avoid stray " + "pantsd processes." + ) + + # Finally, once watched, confirm that we didn't race another process. + try: + with open(pidfile_absolute, "r") as f: + pid_from_file = f.read() + except IOError: + raise Exception(f"Could not read pants pidfile at {pidfile_absolute}.") + if int(pid_from_file) != os.getpid(): + raise Exception(f"Another instance of pantsd is running at {pid_from_file}") + def run_sync(self): """Synchronously run pantsd.""" os.environ.pop("PYTHONPATH") diff --git a/src/python/pants/pantsd/service/fs_event_service.py b/src/python/pants/pantsd/service/fs_event_service.py index 0e9ccf5324b..3f22d7242f7 100644 --- a/src/python/pants/pantsd/service/fs_event_service.py +++ b/src/python/pants/pantsd/service/fs_event_service.py @@ -3,7 +3,9 @@ import logging import os +import threading +from pants.engine.scheduler import Scheduler from pants.pantsd.service.pants_service import PantsService from pants.pantsd.watchman import Watchman @@ -19,28 +21,27 @@ class FSEventService(PantsService): ZERO_DEPTH = ["depth", "eq", 0] PANTS_ALL_FILES_SUBSCRIPTION_NAME = "all_files" - PANTS_PID_SUBSCRIPTION_NAME = "pantsd_pid" - def __init__(self, watchman, build_root): + def __init__( + self, watchman: Watchman, scheduler: Scheduler, build_root: str, + ): """ - :param Watchman watchman: The Watchman instance as provided by the WatchmanLauncher subsystem. - :param str build_root: The current build root. + :param watchman: The Watchman instance as provided by the WatchmanLauncher subsystem. + :param session: A SchedulerSession to invalidate for. + :param build_root: The current build root. """ super().__init__() self._logger = logging.getLogger(__name__) self._watchman = watchman self._build_root = os.path.realpath(build_root) - self._handlers = {} - - def register_all_files_handler(self, callback, name): - """Registers a subscription for all files under a given watch path. + self._watchman_is_running = threading.Event() + self._scheduler_session = scheduler.new_session( + zipkin_trace_v2=False, build_id="fs_event_service_session" + ) - :param func callback: the callback to execute on each filesystem event - :param str name: the subscription name as used by watchman - """ - self.register_handler( - name, - dict( + self._handler = Watchman.EventHandler( + name=self.PANTS_ALL_FILES_SUBSCRIPTION_NAME, + metadata=dict( fields=["name"], # Request events for all file types. # NB: Touching a file invalidates its parent directory due to: @@ -61,49 +62,30 @@ def register_all_files_handler(self, callback, name): # Related: https://github.com/pantsbuild/pants/issues/2956 ], ), - callback, + # NB: We stream events from Watchman in `self.run`, so we don't need a callback. + callback=lambda: None, ) - def register_pidfile_handler(self, pidfile_path, callback): - """ - - :param pidfile_path: Path to the pidfile, relative to the build root - :param callback: - :return: - """ - self.register_handler( - self.PANTS_PID_SUBSCRIPTION_NAME, - dict( - fields=["name"], - expression=[ - "allof", - ["dirname", os.path.dirname(pidfile_path)], - ["name", os.path.basename(pidfile_path)], - ], - ), - callback, - ) - - def register_handler(self, name, metadata, callback): - """Register subscriptions and their event handlers. - - :param str name: the subscription name as used by watchman - :param dict metadata: a dictionary of metadata to be serialized and passed to the watchman - subscribe command. this should include the match expression as well - as any required callback fields. - :param func callback: the callback to execute on each matching filesystem event - """ - assert name not in self._handlers, "duplicate handler name: {}".format(name) - assert ( - isinstance(metadata, dict) and "fields" in metadata and "expression" in metadata - ), "invalid handler metadata!" - self._handlers[name] = Watchman.EventHandler( - name=name, metadata=metadata, callback=callback - ) - - def fire_callback(self, handler_name, event_data): - """Fire an event callback for a given handler.""" - return self._handlers[handler_name].callback(event_data) + def await_started(self): + return self._watchman_is_running.wait() + + def _handle_all_files_event(self, event): + """File event notification queue processor.""" + try: + is_initial_event, files = ( + event["is_fresh_instance"], + event["files"], + ) + except (KeyError, UnicodeDecodeError) as e: + self._logger.warning("%r raised by invalid watchman event: %s", e, event) + return + + # The first watchman event for all_files is a listing of all files - ignore it. + if is_initial_event: + self._logger.debug(f"watchman now watching {len(files)} files") + else: + self._logger.debug(f"handling change event for: {len(files)}") + self._scheduler_session.invalidate_files(files) def run(self): """Main service entrypoint. @@ -114,17 +96,17 @@ def run(self): if not (self._watchman and self._watchman.is_alive()): raise PantsService.ServiceError("watchman is not running, bailing!") - # Enable watchman for the build root. + # Enable watchman for the build root and register our all_files handler. self._watchman.watch_project(self._build_root) - subscriptions = list(self._handlers.values()) - # Setup subscriptions and begin the main event firing loop. - for handler_name, event_data in self._watchman.subscribed(self._build_root, subscriptions): + for _, event_data in self._watchman.subscribed(self._build_root, [self._handler]): self._state.maybe_pause() if self._state.is_terminating: break + if not event_data: + continue - if event_data: - # As we receive events from watchman, trigger the relevant handlers. - self.fire_callback(handler_name, event_data) + self._handle_all_files_event(event_data) + if not self._watchman_is_running.is_set(): + self._watchman_is_running.set() diff --git a/src/python/pants/pantsd/service/scheduler_service.py b/src/python/pants/pantsd/service/scheduler_service.py index d5b58505675..2ded354c0a5 100644 --- a/src/python/pants/pantsd/service/scheduler_service.py +++ b/src/python/pants/pantsd/service/scheduler_service.py @@ -2,17 +2,13 @@ # Licensed under the Apache License, Version 2.0 (see LICENSE). import logging -import os -import queue -import sys -import threading -import time -from typing import List, Optional, Set, Tuple, cast +from typing import List, Optional, Tuple, cast from pants.base.exiter import PANTS_SUCCEEDED_EXIT_CODE from pants.base.specs import Specs from pants.engine.fs import PathGlobs, Snapshot from pants.engine.rules import UnionMembership +from pants.engine.scheduler import ExecutionError, ExecutionTimeoutError from pants.goal.run_tracker import RunTracker from pants.init.engine_initializer import LegacyGraphScheduler, LegacyGraphSession from pants.init.specs_calculator import SpecsCalculator @@ -28,8 +24,10 @@ class SchedulerService(PantsService): This service holds an online Scheduler instance that is primed via watchman filesystem events. """ - QUEUE_SIZE = 64 - INVALIDATION_WATCHER_LIVENESS_CHECK_INTERVAL = 1 + # The interval on which we will long-poll the invalidation globs. If a glob changes, the poll + # will return immediately, so this value primarily affects how frequently the `run` method + # will check the terminated condition. + INVALIDATION_POLL_INTERVAL = 0.5 def __init__( self, @@ -38,7 +36,6 @@ def __init__( legacy_graph_scheduler: LegacyGraphScheduler, build_root: str, invalidation_globs: List[str], - pantsd_pidfile: Optional[str], union_membership: UnionMembership, ) -> None: """ @@ -47,14 +44,11 @@ def __init__( :param build_root: The current build root. :param invalidation_globs: A list of `globs` that when encountered in filesystem event subscriptions will tear down the daemon. - :param pantsd_pidfile: The path to the pantsd pidfile for fs event monitoring. """ super().__init__() self._fs_event_service = fs_event_service self._graph_helper = legacy_graph_scheduler - self._invalidation_globs = invalidation_globs self._build_root = build_root - self._pantsd_pidfile = pantsd_pidfile self._union_membership = union_membership self._scheduler = legacy_graph_scheduler.scheduler @@ -64,147 +58,89 @@ def __init__( zipkin_trace_v2=False, build_id="scheduler_service_session", ) self._logger = logging.getLogger(__name__) - self._event_queue: queue.Queue = queue.Queue(maxsize=self.QUEUE_SIZE) - self._watchman_is_running = threading.Event() - self._invalidating_snapshot = None - self._invalidating_files: Set[str] = set() - self._loop_condition = LoopCondition() + # NB: We declare these as a single field so that they can be changed atomically + # by add_invalidation_glob. + self._invalidation_globs_and_snapshot: Tuple[Tuple[str, ...], Optional[Snapshot]] = ( + tuple(invalidation_globs), + None, + ) - def _get_snapshot(self): - """Returns a Snapshot of the input globs.""" - return self._scheduler_session.product_request( - Snapshot, subjects=[PathGlobs(self._invalidation_globs)] - )[0] + def _get_snapshot(self, globs: Tuple[str, ...], poll: bool) -> Optional[Snapshot]: + """Returns a Snapshot of the input globs. + + If poll=True, will wait for up to INVALIDATION_POLL_INTERVAL for the globs to have changed, + and will return None if they have not changed. + """ + timeout = self.INVALIDATION_POLL_INTERVAL if poll else None + try: + snapshot = self._scheduler_session.product_request( + Snapshot, subjects=[PathGlobs(globs)], poll=poll, timeout=timeout, + )[0] + return cast(Snapshot, snapshot) + except ExecutionTimeoutError: + if poll: + return None + raise def setup(self, services): """Service setup.""" super().setup(services) - # Register filesystem event handlers on an FSEventService instance. - if self._fs_event_service is not None: - self._fs_event_service.register_all_files_handler( - self._enqueue_fs_event, self._fs_event_service.PANTS_ALL_FILES_SUBSCRIPTION_NAME - ) # N.B. We compute the invalidating fileset eagerly at launch with an assumption that files # that exist at startup are the only ones that can affect the running daemon. - if self._fs_event_service is not None: - if self._invalidation_globs: - self._invalidating_snapshot = self._get_snapshot() - self._invalidating_files = self._invalidating_snapshot.files - self._logger.info( - "watching invalidating files: {}".format(self._invalidating_files) - ) - - if self._pantsd_pidfile: - self._fs_event_service.register_pidfile_handler( - self._pantsd_pidfile, self._enqueue_fs_event - ) - - def _enqueue_fs_event(self, event): - """Watchman filesystem event handler for BUILD/requirements.txt updates. - - Called via a thread. - """ - self._logger.info( - "enqueuing {} changes for subscription {}".format( - len(event["files"]), event["subscription"] - ) - ) - self._event_queue.put(event) - - def _maybe_invalidate_scheduler_batch(self): - new_snapshot = self._get_snapshot() - if ( - self._invalidating_snapshot - and new_snapshot.directory_digest != self._invalidating_snapshot.directory_digest - ): - self._logger.fatal( - "saw file events covered by invalidation globs [{}], terminating the daemon.".format( - self._invalidating_files - ) - ) - self.terminate() + globs, _ = self._invalidation_globs_and_snapshot + self._invalidation_globs_and_snapshot = (globs, self._get_snapshot(globs, poll=False)) + self._logger.info("watching invalidation patterns: {}".format(globs)) - def _maybe_invalidate_scheduler_pidfile(self): - new_pid = self._check_pid_changed() - if new_pid is not False: - self._logger.fatal( - "{} says pantsd PID is {} but my PID is: {}: terminating".format( - self._pantsd_pidfile, new_pid, os.getpid(), - ) - ) - self.terminate() + def add_invalidation_glob(self, glob: str): + """Add an invalidation glob to monitoring after startup. - def _check_pid_changed(self): - """Reads pidfile and returns False if its PID is ours, else a printable (maybe falsey) - value.""" - try: - with open(os.path.join(self._build_root, self._pantsd_pidfile), "r") as f: - pid_from_file = f.read() - except IOError: - return "[no file could be read]" - if int(pid_from_file) != os.getpid(): - return pid_from_file - else: - return False + NB: This exists effectively entirely because pantsd needs to be fully started before writing + its pid file: all other globs should be passed via the constructor. + """ + self._logger.info("adding invalidation pattern: {}".format(glob)) - def _handle_batch_event(self, files): - self._logger.debug("handling change event for: %s", files) + # Check one more time synchronously with our current set of globs. + self._check_invalidation_globs(poll=False) - invalidated = self._scheduler.invalidate_files(files) - if invalidated: - self._loop_condition.notify_all() + # Synchronously invalidate the path on disk to prevent races with async invalidation, which + # might otherwise take time to notice that the file had been created. + self._scheduler.invalidate_files([glob]) - self._maybe_invalidate_scheduler_batch() + # Swap out the globs and snapshot. + globs, _ = self._invalidation_globs_and_snapshot + globs = globs + (glob,) + self._invalidation_globs_and_snapshot = (globs, self._get_snapshot(globs, poll=False)) - def _process_event_queue(self): - """File event notification queue processor.""" - try: - event = self._event_queue.get(timeout=0.05) - except queue.Empty: - return + def _check_invalidation_globs(self, poll: bool): + """Check the digest of our invalidation Snapshot and exit if it has changed.""" + globs, invalidation_snapshot = self._invalidation_globs_and_snapshot + assert invalidation_snapshot is not None, "Service.setup was not called." - try: - subscription, is_initial_event, files = ( - event["subscription"], - event["is_fresh_instance"], - event["files"], - ) - except (KeyError, UnicodeDecodeError) as e: - self._logger.warning("%r raised by invalid watchman event: %s", e, event) + snapshot = self._get_snapshot(globs, poll=poll) + if snapshot is None or snapshot.directory_digest == invalidation_snapshot.directory_digest: return - self._logger.debug( - "processing {} files for subscription {} (first_event={})".format( - len(files), subscription, is_initial_event - ) + before = set(invalidation_snapshot.files + invalidation_snapshot.dirs) + after = set(snapshot.files + snapshot.dirs) + added = after - before + removed = before - after + if added or removed: + description = f"+{added or '{}'}, -{removed or '{}'}" + else: + description = f"content changed ({snapshot.directory_digest} fs {invalidation_snapshot.directory_digest})" + self._logger.critical( + f"saw filesystem changes covered by invalidation globs: {description}. terminating the daemon." ) - - # The first watchman event for all_files is a listing of all files - ignore it. - if ( - not is_initial_event - and self._fs_event_service is not None - and subscription == self._fs_event_service.PANTS_ALL_FILES_SUBSCRIPTION_NAME - ): - self._handle_batch_event(files) - - # However, we do want to check for the initial event in the pid file creation. - if subscription == self._fs_event_service.PANTS_PID_SUBSCRIPTION_NAME: - self._maybe_invalidate_scheduler_pidfile() - - if not self._watchman_is_running.is_set(): - self._watchman_is_running.set() - - self._event_queue.task_done() + self.terminate() def _check_invalidation_watcher_liveness(self): - time.sleep(self.INVALIDATION_WATCHER_LIVENESS_CHECK_INTERVAL) - if not self._scheduler.check_invalidation_watcher_liveness(): + try: + self._scheduler.check_invalidation_watcher_liveness() + except Exception as e: # Watcher failed for some reason - self._logger.critical( - "The graph invalidation watcher failed, so we are shutting down. Check the pantsd.log for details" - ) + self._logger.critical(f"The scheduler was invalidated: {e}") self.terminate() def prepare_v1_graph_run_v2( @@ -221,114 +157,83 @@ def prepare_v1_graph_run_v2( self._logger.debug( f"fs event service is running and graph_len > 0: waiting for initial watchman event" ) - self._watchman_is_running.wait() + self._fs_event_service.await_started() + + global_options = options.for_global_scope() build_id = RunTracker.global_instance().run_id v2_ui = options.for_global_scope().get("v2_ui", False) + use_colors = global_options.get("colors", True) zipkin_trace_v2 = options.for_scope("reporting").zipkin_trace_v2 - session = self._graph_helper.new_session(zipkin_trace_v2, build_id, v2_ui) + session = self._graph_helper.new_session( + zipkin_trace_v2, build_id, v2_ui=v2_ui, use_colors=use_colors + ) - if options.for_global_scope().get("loop", False): - fn = self._loop - else: - fn = self._body + specs = SpecsCalculator.create( + options=options, + session=session.scheduler_session, + exclude_patterns=tuple(global_options.exclude_target_regexp) + if global_options.exclude_target_regexp + else tuple(), + tags=tuple(global_options.tag) if global_options.tag else tuple(), + ) - specs, exit_code = fn(session, options, options_bootstrapper) - return session, specs, exit_code + v2 = global_options.v2 + perform_loop = global_options.get("loop", False) + if not perform_loop: + return ( + session, + specs, + self._body(session, options, options_bootstrapper, specs, v2, poll=False), + ) - def _loop( - self, - session: LegacyGraphSession, - options: Options, - options_bootstrapper: OptionsBootstrapper, - ) -> Tuple[Specs, int]: - # TODO: See https://github.com/pantsbuild/pants/issues/6288 regarding Ctrl+C handling. - iterations = options.for_global_scope().loop_max - specs = None + iterations = global_options.loop_max exit_code = PANTS_SUCCEEDED_EXIT_CODE while iterations and not self._state.is_terminating: + # NB: We generate a new "run id" per iteration of the loop in order to allow us to + # observe fresh values for Goals. See notes in `scheduler.rs`. + session.scheduler_session.new_run_id() try: - specs, exit_code = self._body(session, options, options_bootstrapper) - except session.scheduler_session.execution_error_type as e: - # Render retryable exceptions raised by the Scheduler. - print(e, file=sys.stderr) - + exit_code = self._body(session, options, options_bootstrapper, specs, v2, poll=True) + except ExecutionError as e: + self._logger.warning(e) iterations -= 1 - while ( - iterations - and not self._state.is_terminating - and not self._loop_condition.wait(timeout=1) - ): - continue - return cast(Specs, specs), exit_code + + return session, specs, exit_code def _body( self, session: LegacyGraphSession, options: Options, options_bootstrapper: OptionsBootstrapper, - ) -> Tuple[Specs, int]: - global_options = options.for_global_scope() - specs = SpecsCalculator.create( - options=options, - session=session.scheduler_session, - exclude_patterns=tuple(global_options.exclude_target_regexp) - if global_options.exclude_target_regexp - else tuple(), - tags=tuple(global_options.tag) if global_options.tag else tuple(), - ) + specs: Specs, + v2: bool, + poll: bool, + ) -> int: exit_code = PANTS_SUCCEEDED_EXIT_CODE - v1_goals, ambiguous_goals, v2_goals = options.goals_by_version + _, ambiguous_goals, v2_goals = options.goals_by_version - if v2_goals or (ambiguous_goals and global_options.v2): - goals = v2_goals + (ambiguous_goals if global_options.v2 else tuple()) + if v2_goals or (ambiguous_goals and v2): + goals = v2_goals + (ambiguous_goals if v2 else tuple()) - # N.B. @goal_rules run pre-fork in order to cache the products they request during execution. + # When polling we use a delay (only applied in cases where we have waited for something + # to do) in order to avoid re-running too quickly when changes arrive in clusters. exit_code = session.run_goal_rules( options_bootstrapper=options_bootstrapper, union_membership=self._union_membership, options=options, goals=goals, specs=specs, + poll=poll, + poll_delay=(0.1 if poll else None), ) - return specs, exit_code + return exit_code def run(self): """Main service entrypoint.""" while not self._state.is_terminating: - if self._fs_event_service is not None: - self._process_event_queue() - else: - self._check_invalidation_watcher_liveness() self._state.maybe_pause() - - -class LoopCondition: - """A wrapped condition variable to handle deciding when loop consumers should re-run. - - Any number of threads may wait and/or notify the condition. - """ - - def __init__(self): - super().__init__() - self._condition = threading.Condition(threading.Lock()) - self._iteration = 0 - - def notify_all(self): - """Notifies all threads waiting for the condition.""" - with self._condition: - self._iteration += 1 - self._condition.notify_all() - - def wait(self, timeout): - """Waits for the condition for at most the given timeout and returns True if the condition - triggered. - - Generally called in a loop until the condition triggers. - """ - - with self._condition: - previous_iteration = self._iteration - self._condition.wait(timeout) - return previous_iteration != self._iteration + self._check_invalidation_watcher_liveness() + # NB: This is a long poll that will keep us from looping too quickly here. + self._check_invalidation_globs(poll=True) diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index f854b8d37ae..35766778b12 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -963,16 +963,6 @@ name = "futures-io" version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "futures-locks" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "futures-macro" version = "0.3.4" @@ -1102,6 +1092,7 @@ dependencies = [ "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3080,24 +3071,6 @@ dependencies = [ "tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "tokio-current-thread" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "tokio-executor" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "crossbeam-utils 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "tokio-io" version = "0.1.13" @@ -3505,9 +3478,7 @@ version = "0.0.1" dependencies = [ "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "fs 0.0.1", - "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "futures-locks 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "graph 0.0.1", "hashing 0.0.1", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3717,7 +3688,6 @@ dependencies = [ "checksum futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f25592f769825e89b92358db00d26f965761e094951ac44d3663ef25b7ac464a" "checksum futures-executor 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f674f3e1bcb15b37284a90cedf55afdba482ab061c407a9c0ebbd0f3109741ba" "checksum futures-io 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a638959aa96152c7a4cddf50fcb1e3fede0583b27157c26e67d6f99904090dc6" -"checksum futures-locks 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bd5658075ca5ae3918993c5bc95b43fcf22f927227660556a947da598f9f8981" "checksum futures-macro 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9a5081aa3de1f7542a794a397cde100ed903b0630152d0973479018fd85423a7" "checksum futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3466821b4bc114d95b087b850a724c6f83115e929bc88f1fa98a3304a944c8a6" "checksum futures-task 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0a34e53cf6cdcd0178aa573aed466b646eb3db769570841fda0c7ede375a27" @@ -3921,8 +3891,6 @@ dependencies = [ "checksum time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" "checksum tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "05c1d570eb1a36f0345a5ce9c6c6e665b70b73d11236912c0b477616aeec47b1" "checksum tokio-connect 0.1.0 (git+https://github.com/pantsbuild/tokio-connect?rev=f7ad1ca437973d6e24037ac6f7d5ef1013833c0b)" = "" -"checksum tokio-current-thread 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e" -"checksum tokio-executor 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671" "checksum tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" "checksum tokio-macros 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c3acc6aa564495a0f2e1d59fab677cd7f81a19994cfc7f3ad0e64301560389" "checksum tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4adb8b3e5f86b707f1b54e7c15b6de52617a823608ccda98a15d3a24222f265a" diff --git a/src/rust/engine/engine_cffi/src/lib.rs b/src/rust/engine/engine_cffi/src/lib.rs index 6f872f5c0f2..f94d822586a 100644 --- a/src/rust/engine/engine_cffi/src/lib.rs +++ b/src/rust/engine/engine_cffi/src/lib.rs @@ -40,8 +40,8 @@ mod cffi_externs; use engine::externs::*; use engine::{ - externs, nodes, Core, ExecutionRequest, ExecutionTermination, Function, Handle, Key, Params, - RootResult, Rule, Scheduler, Session, Tasks, TypeId, Types, Value, + externs, nodes, Core, ExecutionRequest, ExecutionTermination, Failure, Function, Handle, + Key, Params, Rule, Scheduler, Session, Tasks, TypeId, Types, Value, }; use futures::compat::Future01CompatExt; use futures01::{future, Future}; @@ -66,18 +66,40 @@ use workunit_store::WorkUnit; #[cfg(test)] mod tests; +/// +/// A clone of ExecutionTermination with a "no error" case in order to handle the fact that +/// cbindgen cannot handle Options. +/// +#[repr(u8)] +pub enum RawExecutionTermination { + KeyboardInterrupt, + Timeout, + NoError, +} + +impl From for RawExecutionTermination { + fn from(et: ExecutionTermination) -> Self { + match et { + ExecutionTermination::KeyboardInterrupt => RawExecutionTermination::KeyboardInterrupt, + ExecutionTermination::Timeout => RawExecutionTermination::Timeout, + } + } +} + // TODO: Consider renaming and making generic for collections of PyResults. #[repr(C)] pub struct RawNodes { + err: RawExecutionTermination, nodes_ptr: *const PyResult, nodes_len: u64, nodes: Vec, } impl RawNodes { - fn create(node_states: Vec) -> Box { + fn create(node_states: Vec>) -> Box { let nodes = node_states.into_iter().map(PyResult::from).collect(); let mut raw_nodes = Box::new(RawNodes { + err: RawExecutionTermination::NoError, nodes_ptr: Vec::new().as_ptr(), nodes_len: 0, nodes: nodes, @@ -87,6 +109,15 @@ impl RawNodes { raw_nodes.nodes_len = raw_nodes.nodes.len() as u64; raw_nodes } + + fn create_for_error(err: ExecutionTermination) -> Box { + Box::new(RawNodes { + err: err.into(), + nodes_ptr: Vec::new().as_ptr(), + nodes_len: 0, + nodes: Vec::new(), + }) + } } #[no_mangle] @@ -507,10 +538,7 @@ pub extern "C" fn scheduler_execute( with_session(session_ptr, |session| { match scheduler.execute(execution_request, session) { Ok(raw_results) => Box::into_raw(RawNodes::create(raw_results)), - //TODO: Passing a raw null pointer to Python is a less-than-ideal way - //of noting an error condition. When we have a better way to send complicated - //error-signaling values over the FFI boundary, we should revisit this. - Err(ExecutionTermination::KeyboardInterrupt) => std::ptr::null(), + Err(e) => Box::into_raw(RawNodes::create_for_error(e)), } }) }) @@ -540,6 +568,33 @@ pub extern "C" fn execution_add_root_select( }) } +#[no_mangle] +pub extern "C" fn execution_set_poll(execution_request_ptr: *mut ExecutionRequest, poll: bool) { + with_execution_request(execution_request_ptr, |execution_request| { + execution_request.poll = poll; + }) +} + +#[no_mangle] +pub extern "C" fn execution_set_poll_delay( + execution_request_ptr: *mut ExecutionRequest, + poll_delay_in_ms: u64, +) { + with_execution_request(execution_request_ptr, |execution_request| { + execution_request.poll_delay = Some(Duration::from_millis(poll_delay_in_ms)); + }) +} + +#[no_mangle] +pub extern "C" fn execution_set_timeout( + execution_request_ptr: *mut ExecutionRequest, + timeout_in_ms: u64, +) { + with_execution_request(execution_request_ptr, |execution_request| { + execution_request.timeout = Some(Duration::from_millis(timeout_in_ms)); + }) +} + #[no_mangle] pub extern "C" fn tasks_create() -> *const Tasks { // Allocate on the heap via `Box` and return a raw pointer to the boxed value. @@ -614,8 +669,8 @@ pub extern "C" fn graph_invalidate_all_paths(scheduler_ptr: *mut Scheduler) -> u } #[no_mangle] -pub extern "C" fn check_invalidation_watcher_liveness(scheduler_ptr: *mut Scheduler) -> bool { - with_scheduler(scheduler_ptr, |scheduler| scheduler.core.watcher.is_alive()) +pub extern "C" fn check_invalidation_watcher_liveness(scheduler_ptr: *mut Scheduler) -> PyResult { + with_scheduler(scheduler_ptr, |scheduler| scheduler.is_valid().into()) } #[no_mangle] @@ -720,6 +775,11 @@ pub extern "C" fn session_create( }) } +#[no_mangle] +pub extern "C" fn session_new_run_id(session_ptr: *mut Session) { + with_session(session_ptr, |session| session.new_run_id()) +} + #[no_mangle] pub extern "C" fn session_destroy(ptr: *mut Session) { let _ = unsafe { Box::from_raw(ptr) }; diff --git a/src/rust/engine/graph/Cargo.toml b/src/rust/engine/graph/Cargo.toml index 758ea40c616..868fa26f524 100644 --- a/src/rust/engine/graph/Cargo.toml +++ b/src/rust/engine/graph/Cargo.toml @@ -8,14 +8,16 @@ publish = false [dependencies] boxfuture = { path = "../boxfuture" } fnv = "1.0.5" -futures01 = { package = "futures", version = "0.1" } futures = { version = "0.3", features = ["compat"] } +futures01 = { package = "futures", version = "0.1" } hashing = { path = "../hashing" } indexmap = "1.0.2" log = "0.4" parking_lot = "0.6" petgraph = "0.4.5" +tokio = { version = "0.2", features = ["time"] } [dev-dependencies] rand = "0.6" env_logger = "0.5.4" +tokio = { version = "0.2", features = ["macros", "rt-threaded", "time"] } diff --git a/src/rust/engine/graph/src/entry.rs b/src/rust/engine/graph/src/entry.rs index bf957c72fb6..5162cb0fc3f 100644 --- a/src/rust/engine/graph/src/entry.rs +++ b/src/rust/engine/graph/src/entry.rs @@ -59,18 +59,24 @@ impl Generation { /// same values as they did when this Node was last run; if so, the value can be re-used /// (and should be marked "Clean"). /// -/// If the value is Uncacheable it may only be consumed in the Session that produced it, and should -/// be recomputed in a new Session. +/// If the value is Uncacheable it may only be consumed in the same Run that produced it, and should +/// be recomputed in a new Run. +/// +/// A value of type UncacheableDependencies has Uncacheable dependencies, and is treated as +/// equivalent to Dirty in all cases except when `poll`d: since `poll` requests are waiting for +/// meaningful work to do, they need to differentiate between a truly invalidated/changed (Dirty) +/// Node and a Node that would be re-cleaned once per session. /// /// If the value is Clean, the consumer can simply use the value as-is. /// #[derive(Clone, Debug)] pub enum EntryResult { Clean(Result), + UncacheableDependencies(Result), Dirty(Result), Uncacheable( Result, - <::Context as NodeContext>::SessionId, + <::Context as NodeContext>::RunId, ), } @@ -78,8 +84,26 @@ impl EntryResult { fn is_clean(&self, context: &N::Context) -> bool { match self { EntryResult::Clean(..) => true, - EntryResult::Uncacheable(_, session_id) => context.session_id() == session_id, + EntryResult::Uncacheable(_, run_id) => context.run_id() == run_id, + EntryResult::Dirty(..) => false, + EntryResult::UncacheableDependencies(..) => false, + } + } + + fn has_uncacheable_deps(&self) -> bool { + match self { + EntryResult::Uncacheable(_, _) | EntryResult::UncacheableDependencies(_) => true, + EntryResult::Clean(..) | EntryResult::Dirty(..) => false, + } + } + + /// Returns true if this result should block for polling (because there is no work to do + /// currently to clean it). + fn poll_should_wait(&self, context: &N::Context) -> bool { + match self { + EntryResult::Uncacheable(_, run_id) => context.run_id() == run_id, EntryResult::Dirty(..) => false, + EntryResult::UncacheableDependencies(_) | EntryResult::Clean(..) => true, } } @@ -91,19 +115,29 @@ impl EntryResult { } } - /// Iff the value is Clean, mark it Dirty. + /// If the value is in a Clean state, mark it Dirty. fn dirty(&mut self) { - if let EntryResult::Clean(value) = self { - *self = EntryResult::Dirty(value.clone()) + match self { + EntryResult::Clean(v) | EntryResult::UncacheableDependencies(v) => { + *self = EntryResult::Dirty(v.clone()); + } + EntryResult::Dirty(_) | EntryResult::Uncacheable(_, _) => {} } } - /// Iff the value is Dirty, mark it Clean. + /// If the value is Dirty, mark it Clean. fn clean(&mut self) { if let EntryResult::Dirty(value) = self { *self = EntryResult::Clean(value.clone()) } } + + /// If the value is Dirty, mark it UncacheableDependencies. + fn uncacheable_deps(&mut self) { + if let EntryResult::Dirty(value) = self { + *self = EntryResult::UncacheableDependencies(value.clone()) + } + } } impl AsRef> for EntryResult { @@ -112,6 +146,7 @@ impl AsRef> for EntryResult { EntryResult::Clean(v) => v, EntryResult::Dirty(v) => v, EntryResult::Uncacheable(v, _) => v, + EntryResult::UncacheableDependencies(v) => v, } } } @@ -146,9 +181,13 @@ pub enum EntryState { // A node that has completed, and then possibly been marked dirty. Because marking a node // dirty does not eagerly re-execute any logic, it will stay this way until a caller moves it // back to Running. + // + // A Completed entry can have "pollers" whom are waiting for the Node to either be dirtied or + // otherwise invalidated. Completed { run_token: RunToken, generation: Generation, + pollers: Vec>, result: EntryResult, dep_generations: Vec, }, @@ -194,6 +233,42 @@ impl Entry { &self.node } + /// + /// If this Node is currently complete and clean with the given Generation, then return a Future + /// that will be satisfied when it is changed in any way. If the node is not clean, or the + /// generation mismatches, returns immediately. + /// + /// NB: The returned Future is infalliable. + /// + pub fn poll(&self, context: &N::Context, last_seen_generation: Generation) -> BoxFuture<(), ()> { + let mut state = self.state.lock(); + match *state { + EntryState::Completed { + ref result, + generation, + ref mut pollers, + .. + } => { + if generation == last_seen_generation && result.poll_should_wait(context) { + // The Node is currently clean with the observed generation: add a poller on the + // Completed node that will be notified when it is dirtied or dropped. If the Node moves + // to another state, the received will be notified that the sender was dropped, and it + // will be converted into a successful result. + let (send, recv) = oneshot::channel(); + pollers.push(send); + recv.then(|_| Ok(())).to_boxed() + } else { + // The Node is not clean, or the generation has changed. + future::ok(()).to_boxed() + } + } + _ => { + // The Node is not Completed, and should be requested. + future::ok(()).to_boxed() + } + } + } + /// /// If the Future for this Node has already completed, returns a clone of its result. /// @@ -345,6 +420,7 @@ impl Entry { EntryState::Completed { run_token, generation, + pollers, result, dep_generations, } => { @@ -358,12 +434,14 @@ impl Entry { "A clean Node should not reach this point: {:?}", result ); + // NB: Explicitly drop the pollers: would happen anyway, but avoids an unused variable. + mem::drop(pollers); // The Node has already completed but needs to re-run. If the Node is dirty, we are the // first caller to request it since it was marked dirty. We attempt to clean it (which // will cause it to re-run if the dep_generations mismatch). // // On the other hand, if the Node is uncacheable, we store the previous result as - // Uncacheable, which allows its value to be used only within the current Session. + // Uncacheable, which allows its value to be used only within the current Run. Self::run( context, &self.node, @@ -409,7 +487,7 @@ impl Entry { result_run_token: RunToken, dep_generations: Vec, result: Option>, - has_dirty_dependencies: bool, + has_uncacheable_deps: bool, _graph: &mut super::InnerGraph, ) { let mut state = self.state.lock(); @@ -474,9 +552,9 @@ impl Entry { // If the new result does not match the previous result, the generation increments. let (generation, next_result) = if let Some(result) = result { let next_result = if !self.node.cacheable() { - EntryResult::Uncacheable(result, context.session_id().clone()) - } else if has_dirty_dependencies { - EntryResult::Dirty(result) + EntryResult::Uncacheable(result, context.run_id().clone()) + } else if has_uncacheable_deps { + EntryResult::UncacheableDependencies(result) } else { EntryResult::Clean(result) }; @@ -487,12 +565,12 @@ impl Entry { (generation.next(), next_result) } } else { - // Node was marked clean. + // Node was clean. // NB: The `expect` here avoids a clone and a comparison: see the method docs. let mut result = previous_result.expect("A Node cannot be marked clean without a previous result."); - if has_dirty_dependencies { - result.dirty(); + if has_uncacheable_deps { + result.uncacheable_deps(); } else { result.clean(); } @@ -514,6 +592,7 @@ impl Entry { } EntryState::Completed { result: next_result, + pollers: Vec::new(), dep_generations, run_token, generation, @@ -633,15 +712,33 @@ impl Entry { trace!("Dirtying node {:?}", self.node); match state { &mut EntryState::Running { ref mut dirty, .. } => { - *dirty = true; + // An uncacheable node can never be marked dirty. + if self.node.cacheable() { + *dirty = true; + } } - &mut EntryState::Completed { ref mut result, .. } => { + &mut EntryState::Completed { + ref mut result, + ref mut pollers, + .. + } => { + // Notify all pollers (ignoring any that have gone away.) + for poller in pollers.drain(..) { + let _ = poller.send(()); + } result.dirty(); } &mut EntryState::NotStarted { .. } => {} } } + pub fn is_started(&self) -> bool { + match *self.state.lock() { + EntryState::NotStarted { .. } => false, + EntryState::Completed { .. } | EntryState::Running { .. } => true, + } + } + pub fn is_clean(&self, context: &N::Context) -> bool { match *self.state.lock() { EntryState::NotStarted { @@ -662,6 +759,13 @@ impl Entry { } } + pub fn has_uncacheable_deps(&self) -> bool { + match *self.state.lock() { + EntryState::Completed { ref result, .. } => result.has_uncacheable_deps(), + EntryState::NotStarted { .. } | EntryState::Running { .. } => false, + } + } + pub(crate) fn format(&self, context: &N::Context) -> String { let state = match self.peek(context) { Some(Ok(ref nr)) => format!("{:?}", nr), diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs index bd66937f8d9..44b626bda50 100644 --- a/src/rust/engine/graph/src/lib.rs +++ b/src/rust/engine/graph/src/lib.rs @@ -56,6 +56,7 @@ use parking_lot::Mutex; use petgraph::graph::DiGraph; use petgraph::visit::EdgeRef; use petgraph::Direction; +use tokio::time::delay_for; pub use crate::node::{EntryId, Node, NodeContext, NodeError, NodeTracer, NodeVisualizer}; use boxfuture::{BoxFuture, Boxable}; @@ -325,8 +326,11 @@ impl InnerGraph { let root_ids: HashSet<_, FNV> = self .nodes .iter() - .filter_map(|(entry, &entry_id)| { - if predicate(entry) { + .filter_map(|(node, &entry_id)| { + // A NotStarted entry does not need clearing, and we can assume that its dependencies are + // either already dirtied, or have never observed a value for it. Filtering these redundant + // events helps to "debounce" invalidation (ie, avoid redundent re-dirtying of dependencies). + if predicate(node) && self.unsafe_entry_for_id(entry_id).is_started() { Some(entry_id) } else { None @@ -338,7 +342,7 @@ impl InnerGraph { .walk( root_ids.iter().cloned().collect(), Direction::Incoming, - |id| !self.entry_for_id(*id).unwrap().node().cacheable(), + |_| false, ) .filter(|eid| !root_ids.contains(eid)) .collect(); @@ -669,18 +673,12 @@ impl Graph { inner.nodes.len() } - /// - /// Request the given dst Node, optionally in the context of the given src Node. - /// - /// If there is no src Node, or the src Node is not cacheable, this method will retry for - /// invalidation until the Node completes. - /// - pub fn get( + fn get_inner( &self, src_id: Option, context: &N::Context, dst_node: N, - ) -> BoxFuture { + ) -> BoxFuture<(N::Item, Generation), N::Error> { // Compute information about the dst under the Graph lock, and then release it. let (dst_retry, mut entry, entry_id) = { // Get or create the destination, and then insert the dep and return its state. @@ -739,11 +737,7 @@ impl Graph { if counter == 0 { break Err(N::Error::exhausted()); } - let dep_res = entry - .get(&context, entry_id) - .map(|(res, _)| res) - .compat() - .await; + let dep_res = entry.get(&context, entry_id).compat().await; match dep_res { Ok(r) => break Ok(r), Err(err) if err == N::Error::invalidated() => continue, @@ -754,10 +748,28 @@ impl Graph { uncached_node.boxed().compat().to_boxed() } else { // Not retriable. - entry.get(context, entry_id).map(|(res, _)| res).to_boxed() + entry.get(context, entry_id) } } + /// + /// Request the given dst Node, optionally in the context of the given src Node. + /// + /// If there is no src Node, or the src Node is not cacheable, this method will retry for + /// invalidation until the Node completes. + /// + pub fn get( + &self, + src_id: Option, + context: &N::Context, + dst_node: N, + ) -> BoxFuture { + self + .get_inner(src_id, context, dst_node) + .map(|(res, _generation)| res) + .to_boxed() + } + /// /// Return the value of the given Node. Shorthand for `self.get(None, context, node)`. /// @@ -765,6 +777,40 @@ impl Graph { self.get(None, context, node) } + /// + /// Gets the value of the given Node (optionally waiting for it to have changed since the given + /// LastObserved token), and then returns its new value and a new LastObserved token. + /// + pub async fn poll( + &self, + node: N, + token: Option, + delay: Option, + context: &N::Context, + ) -> Result<(N::Item, LastObserved), N::Error> { + // If the node is currently clean at the given token, Entry::poll will delay until it has + // changed in some way. + if let Some(LastObserved(generation)) = token { + let entry = { + let mut inner = self.inner.lock(); + let entry_id = inner.ensure_entry(node.clone()); + inner.unsafe_entry_for_id(entry_id).clone() + }; + entry + .poll(context, generation) + .compat() + .await + .expect("Polling is infalliable"); + if let Some(delay) = delay { + delay_for(delay).await; + } + }; + + // Re-request the Node. + let (res, generation) = self.get_inner(None, context, node).compat().await?; + Ok((res, LastObserved(generation))) + } + fn report_cycle( src_id: EntryId, potential_dst_id: EntryId, @@ -926,9 +972,9 @@ impl Graph { run_token: RunToken, result: Option>, ) { - let (entry, has_dirty_dependencies, dep_generations) = { + let (entry, has_uncacheable_deps, dep_generations) = { let inner = self.inner.lock(); - let mut has_dirty_dependencies = false; + let mut has_uncacheable_deps = false; // Get the Generations of all dependencies of the Node. We can trust that these have not changed // since we began executing, as long as we are not currently marked dirty (see the method doc). let dep_generations = inner @@ -936,18 +982,19 @@ impl Graph { .neighbors_directed(entry_id, Direction::Outgoing) .filter_map(|dep_id| inner.entry_for_id(dep_id)) .map(|entry| { - // If a dependency is uncacheable or currently dirty, this Node should complete as dirty, - // independent of matching Generation values. This is to allow for the behaviour that an - // uncacheable Node should always have dirty dependents, transitively. - if !entry.node().cacheable() || !entry.is_clean(context) { - has_dirty_dependencies = true; + // If a dependency is itself uncacheable or has uncacheable deps, this Node should + // also complete as having uncacheable dpes, independent of matching Generation values. + // This is to allow for the behaviour that an uncacheable Node should always have "dirty" + // (marked as UncacheableDependencies) dependents, transitively. + if !entry.node().cacheable() || entry.has_uncacheable_deps() { + has_uncacheable_deps = true; } entry.generation() }) .collect(); ( inner.entry_for_id(entry_id).cloned(), - has_dirty_dependencies, + has_uncacheable_deps, dep_generations, ) }; @@ -959,7 +1006,7 @@ impl Graph { run_token, dep_generations, result, - has_dirty_dependencies, + has_uncacheable_deps, &mut inner, ); } @@ -1046,6 +1093,12 @@ impl Graph { } } +/// +/// An opaque token that represents a particular observed "version" of a Node. +/// +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct LastObserved(Generation); + /// /// Represents the state of a particular walk through a Graph. Implements Iterator and has the same /// lifetime as the Graph itself. diff --git a/src/rust/engine/graph/src/node.rs b/src/rust/engine/graph/src/node.rs index a7670dc38a3..f640023683a 100644 --- a/src/rust/engine/graph/src/node.rs +++ b/src/rust/engine/graph/src/node.rs @@ -113,11 +113,11 @@ pub trait NodeContext: Clone + Send + Sync + 'static { type Node: Node; /// - /// The Session ID type for this Context. Some Node behaviours (in particular: Node::cacheable) - /// have Session-specific semantics. More than one context object might be associated with a - /// single caller "session". + /// The Run ID type for this Context. Some Node behaviours have Run-specific semantics. In + /// particular: an uncacheable (Node::cacheable) Node will execute once per Run, regardless + /// of other invalidation. /// - type SessionId: Clone + Debug + Eq + Send; + type RunId: Clone + Debug + Eq + Send; /// /// Creates a clone of this NodeContext to be used for a different Node. @@ -127,10 +127,10 @@ pub trait NodeContext: Clone + Send + Sync + 'static { fn clone_for(&self, entry_id: EntryId) -> ::Context; /// - /// Returns the SessionId for this Context, which should uniquely identify a caller's run for the - /// purposes of "once per Session" behaviour. + /// Returns the RunId for this Context, which should uniquely identify a caller's run for the + /// purposes of "once per Run" behaviour. /// - fn session_id(&self) -> &Self::SessionId; + fn run_id(&self) -> &Self::RunId; /// /// Returns a reference to the Graph for this Context. diff --git a/src/rust/engine/graph/src/tests.rs b/src/rust/engine/graph/src/tests.rs index 4438bfa95a7..9edf9ef429d 100644 --- a/src/rust/engine/graph/src/tests.rs +++ b/src/rust/engine/graph/src/tests.rs @@ -12,6 +12,7 @@ use boxfuture::{BoxFuture, Boxable}; use futures01::future::{self, Future}; use hashing::Digest; use parking_lot::Mutex; +use tokio::time::{timeout, Elapsed}; use rand::Rng; @@ -88,7 +89,7 @@ fn invalidate_and_rerun() { // Request with a different salt, which will cause both the middle and upper nodes to rerun since // their input values have changed. - let context = context.new_session(1).with_salt(1); + let context = context.new_run(1).with_salt(1); assert_eq!( graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 1), T(2, 1)]) @@ -201,6 +202,77 @@ fn invalidate_randomly() { ); } +#[tokio::test] +async fn poll_cacheable() { + let graph = Arc::new(Graph::new()); + let context = TContext::new(graph.clone()); + + // Poll with an empty graph should succeed. + let (result, token1) = graph + .poll(TNode::new(2), None, None, &context) + .await + .unwrap(); + assert_eq!(result, vec![T(0, 0), T(1, 0), T(2, 0)]); + + // Re-polling on a non-empty graph but with no LastObserved token should return immediately with + // the same value, and the same token. + let (result, token2) = graph + .poll(TNode::new(2), None, None, &context) + .await + .unwrap(); + assert_eq!(result, vec![T(0, 0), T(1, 0), T(2, 0)]); + assert_eq!(token1, token2); + + // But polling with the previous token should wait, since nothing has changed. + let request = graph.poll(TNode::new(2), Some(token2), None, &context); + match timeout(Duration::from_millis(1000), request).await { + Err(Elapsed { .. }) => (), + e => panic!("Should have timed out, instead got: {:?}", e), + } + + // Invalidating something and re-polling should re-compute. + graph.invalidate_from_roots(|&TNode(n, _)| n == 0); + let (result, _) = graph + .poll(TNode::new(2), Some(token2), None, &context) + .await + .unwrap(); + assert_eq!(result, vec![T(0, 0), T(1, 0), T(2, 0)]); +} + +#[tokio::test] +async fn poll_uncacheable() { + let _logger = env_logger::try_init(); + let graph = Arc::new(Graph::new()); + // Create a context where the middle node is uncacheable. + let context = { + let mut uncacheable = HashSet::new(); + uncacheable.insert(TNode::new(1)); + TContext::new(graph.clone()).with_uncacheable(uncacheable) + }; + + // Poll with an empty graph should succeed. + let (result, token1) = graph + .poll(TNode::new(2), None, None, &context) + .await + .unwrap(); + assert_eq!(result, vec![T(0, 0), T(1, 0), T(2, 0)]); + + // Polling with the previous token (in the same session) should wait, since nothing has changed. + let request = graph.poll(TNode::new(2), Some(token1), None, &context); + match timeout(Duration::from_millis(1000), request).await { + Err(Elapsed { .. }) => (), + e => panic!("Should have timed out, instead got: {:?}", e), + } + + // Invalidating something and re-polling should re-compute. + graph.invalidate_from_roots(|&TNode(n, _)| n == 0); + let (result, _) = graph + .poll(TNode::new(2), Some(token1), None, &context) + .await + .unwrap(); + assert_eq!(result, vec![T(0, 0), T(1, 0), T(2, 0)]); +} + #[test] fn dirty_dependents_of_uncacheable_node() { let graph = Arc::new(Graph::new()); @@ -223,7 +295,7 @@ fn dirty_dependents_of_uncacheable_node() { ); // Re-request the root in a new session and confirm that only the bottom node re-runs. - let context = context.new_session(1); + let context = context.new_run(1); assert_eq!( graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) @@ -232,7 +304,7 @@ fn dirty_dependents_of_uncacheable_node() { // Re-request with a new session and different salt, and confirm that everything re-runs bottom // up (the order of node cleaning). - let context = context.new_session(2).with_salt(1); + let context = context.new_run(2).with_salt(1); assert_eq!( graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 1), T(1, 1), T(2, 1)]) @@ -272,11 +344,17 @@ fn uncachable_node_only_runs_once() { graph.create(TNode::new(2), &context).wait(), Ok(vec![T(0, 0), T(1, 0), T(2, 0)]) ); - // TNode(0) was cleared by the invalidation while all nodes were running, - // but the uncacheable node TNode(1) reties it directly, so it runs twice. + // TNode(0) and TNode(2) are cleared and dirtied (respectively) before completing, and + // so run twice each. But the uncacheable node runs once. assert_eq!( context.runs(), - vec![TNode::new(2), TNode::new(1), TNode::new(0), TNode::new(0)] + vec![ + TNode::new(2), + TNode::new(1), + TNode::new(0), + TNode::new(0), + TNode::new(2) + ] ); } @@ -630,7 +708,7 @@ impl TNode { /// #[derive(Clone)] struct TContext { - session_id: usize, + run_id: usize, // A value that is included in every value computed by this context. Stands in for "the state of the // outside world". A test that wants to "change the outside world" and observe its effect on the // graph should change the salt to do so. @@ -648,11 +726,11 @@ struct TContext { } impl NodeContext for TContext { type Node = TNode; - type SessionId = usize; + type RunId = usize; fn clone_for(&self, entry_id: EntryId) -> TContext { TContext { - session_id: self.session_id, + run_id: self.run_id, salt: self.salt, edges: self.edges.clone(), delays: self.delays.clone(), @@ -663,8 +741,8 @@ impl NodeContext for TContext { } } - fn session_id(&self) -> &usize { - &self.session_id + fn run_id(&self) -> &usize { + &self.run_id } fn graph(&self) -> &Graph { @@ -685,7 +763,7 @@ impl NodeContext for TContext { impl TContext { fn new(graph: Arc>) -> TContext { TContext { - session_id: 0, + run_id: 0, salt: 0, edges: Arc::default(), delays: Arc::default(), @@ -716,8 +794,8 @@ impl TContext { self } - fn new_session(mut self, new_session_id: usize) -> TContext { - self.session_id = new_session_id; + fn new_run(mut self, new_run_id: usize) -> TContext { + self.run_id = new_run_id; { let mut runs = self.runs.lock(); runs.clear(); diff --git a/src/rust/engine/logging/src/logger.rs b/src/rust/engine/logging/src/logger.rs index 1b2f86cb611..c22e90e6ccd 100644 --- a/src/rust/engine/logging/src/logger.rs +++ b/src/rust/engine/logging/src/logger.rs @@ -23,7 +23,7 @@ use tokio::task_local; use ui::EngineDisplay; use uuid::Uuid; -const TIME_FORMAT_STR: &str = "%H:%M:%S"; +const TIME_FORMAT_STR: &str = "%H:%M:%S:%3f"; lazy_static! { pub static ref LOGGER: Logger = Logger::new(); diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index b173ccacbb9..d785e71e55d 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -1,7 +1,6 @@ // Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). -use std; use std::collections::{BTreeMap, HashSet}; use std::convert::{Into, TryInto}; use std::ops::Deref; @@ -9,9 +8,6 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; -use futures::compat::Future01CompatExt; -use futures01::Future; - use crate::core::{Failure, TypeId}; use crate::handles::maybe_drop_handles; use crate::nodes::{NodeKey, WrappedNode}; @@ -20,8 +16,9 @@ use crate::tasks::{Rule, Tasks}; use crate::types::Types; use boxfuture::{BoxFuture, Boxable}; -use core::clone::Clone; use fs::{safe_create_dir_all_ioerror, GitignoreStyleExcludes, PosixFS}; +use futures::compat::Future01CompatExt; +use futures01::Future; use graph::{EntryId, Graph, InvalidationResult, NodeContext}; use log::info; use process_execution::{ @@ -34,6 +31,7 @@ use rule_graph::RuleGraph; use sharded_lmdb::ShardedLmdb; use store::Store; use tokio::runtime::{Builder, Runtime}; +use uuid::Uuid; use watch::{Invalidatable, InvalidationWatcher}; const GIGABYTES: usize = 1024 * 1024 * 1024; @@ -57,7 +55,7 @@ pub struct Core { pub command_runner: Box, pub http_client: reqwest::Client, pub vfs: PosixFS, - pub watcher: InvalidationWatcher, + pub watcher: Arc, pub build_root: PathBuf, } @@ -251,12 +249,12 @@ impl Core { })?; let watcher = InvalidationWatcher::new( - Arc::downgrade(&graph), executor.clone(), build_root.clone(), ignorer.clone(), experimental_fs_watcher, )?; + watcher.start(&graph); Ok(Core { graph: graph, @@ -314,14 +312,17 @@ pub struct Context { entry_id: Option, pub core: Arc, pub session: Session, + run_id: Uuid, } impl Context { pub fn new(core: Arc, session: Session) -> Context { + let run_id = session.run_id(); Context { entry_id: None, core, session, + run_id, } } @@ -346,7 +347,7 @@ impl Context { impl NodeContext for Context { type Node = NodeKey; - type SessionId = String; + type RunId = Uuid; /// /// Clones this Context for a new EntryId. Because the Core of the context is an Arc, this @@ -357,11 +358,12 @@ impl NodeContext for Context { entry_id: Some(entry_id), core: self.core.clone(), session: self.session.clone(), + run_id: self.run_id, } } - fn session_id(&self) -> &Self::SessionId { - self.session.build_id() + fn run_id(&self) -> &Self::RunId { + &self.run_id } fn graph(&self) -> &Graph { diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index 90cb5465c49..3a2df865eca 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -42,10 +42,8 @@ mod tasks; mod types; pub use crate::context::Core; -pub use crate::core::{Function, Key, Params, TypeId, Value}; +pub use crate::core::{Failure, Function, Key, Params, TypeId, Value}; pub use crate::handles::Handle; -pub use crate::scheduler::{ - ExecutionRequest, ExecutionTermination, RootResult, Scheduler, Session, -}; +pub use crate::scheduler::{ExecutionRequest, ExecutionTermination, Scheduler, Session}; pub use crate::tasks::{Rule, Tasks}; pub use crate::types::Types; diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index 3432f4935ae..55194da8c6a 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -6,17 +6,16 @@ use std::convert::TryInto; use std::io; use std::path::{Path, PathBuf}; use std::sync::{mpsc, Arc}; -use std::time::Duration; +use std::time::{Duration, Instant}; use futures::compat::Future01CompatExt; -use futures::future::{self as future03}; -use futures01::future::Future; +use futures::future; use crate::context::{Context, Core}; use crate::core::{Failure, Params, TypeId, Value}; use crate::nodes::{NodeKey, Select, Tracer, Visualizer}; -use graph::{Graph, InvalidationResult}; +use graph::{Graph, InvalidationResult, LastObserved}; use hashing; use indexmap::IndexMap; use log::{debug, info, warn}; @@ -29,6 +28,7 @@ use workunit_store::WorkUnitStore; pub enum ExecutionTermination { KeyboardInterrupt, + Timeout, } /// @@ -41,8 +41,9 @@ pub enum ExecutionTermination { struct InnerSession { // The total size of the graph at Session-creation time. preceding_graph_size: usize, - // The set of roots that have been requested within this session. - roots: Mutex>, + // The set of roots that have been requested within this session, with associated LastObserved + // times if they were polled. + roots: Mutex>>, // If enabled, the display that will render the progress of the V2 engine. This is only // Some(_) if the --v2-ui option is enabled. display: Option>>, @@ -50,8 +51,16 @@ struct InnerSession { should_record_zipkin_spans: bool, // A place to store info about workunits in rust part workunit_store: WorkUnitStore, - // The unique id for this run. Used as the id of the session, and for metrics gathering purposes. + // The unique id for this Session: used for metrics gathering purposes. build_id: String, + // An id used to control the visibility of uncacheable rules. Generally this is identical for an + // entire Session, but in some cases (in particular, a `--loop`) the caller wants to retain the + // same Session while still observing new values for uncacheable rules like Goals. + // + // TODO: Figure out how the `--loop` interplays with metrics. It's possible that for metrics + // purposes, each iteration of a loop should be considered to be a new Session, but for now the + // Session/build_id would be stable. + run_id: Mutex, should_report_workunits: bool, } @@ -77,24 +86,36 @@ impl Session { let inner_session = InnerSession { preceding_graph_size: scheduler.core.graph.len(), - roots: Mutex::new(HashSet::new()), + roots: Mutex::new(HashMap::new()), display, should_record_zipkin_spans, workunit_store: WorkUnitStore::new(), build_id, + run_id: Mutex::new(Uuid::new_v4()), should_report_workunits, }; Session(Arc::new(inner_session)) } - fn extend(&self, new_roots: &[Root]) { + fn extend(&self, new_roots: Vec<(Root, Option)>) { let mut roots = self.0.roots.lock(); - roots.extend(new_roots.iter().cloned()); + roots.extend(new_roots); + } + + fn zip_last_observed(&self, inputs: &[Root]) -> Vec<(Root, Option)> { + let roots = self.0.roots.lock(); + inputs + .iter() + .map(|root| { + let last_observed = roots.get(root).cloned().unwrap_or(None); + (root.clone(), last_observed) + }) + .collect() } fn root_nodes(&self) -> Vec { let roots = self.0.roots.lock(); - roots.iter().map(|r| r.clone().into()).collect() + roots.keys().map(|r| r.clone().into()).collect() } pub fn preceding_graph_size(&self) -> usize { @@ -121,6 +142,16 @@ impl Session { &self.0.build_id } + pub fn run_id(&self) -> Uuid { + let run_id = self.0.run_id.lock(); + *run_id + } + + pub fn new_run_id(&self) { + let mut run_id = self.0.run_id.lock(); + *run_id = Uuid::new_v4(); + } + pub fn write_stdout(&self, msg: &str) { if let Some(display) = self.maybe_display() { let mut d = display.lock(); @@ -160,11 +191,34 @@ impl Session { pub struct ExecutionRequest { // Set of roots for an execution, in the order they were declared. pub roots: Vec, + // An ExecutionRequest with `poll` set will wait for _all_ of the given roots to have changed + // since their previous observed value in this Session before returning them. + // + // Example: if an ExecutionRequest is made twice in a row for roots within the same Session, + // and this value is set, the first run will request the roots and return immediately when they + // complete. The second request will check whether the roots have changed, and if they haven't + // changed, will wait until they have (or until the timeout elapses) before re-requesting them. + // + // TODO: The `poll`, `poll_delay`, and `timeout` parameters exist to support a coarse-grained API + // for synchronous Node-watching to Python. Rather than further expanding this `execute` API, we + // should likely port those usecases to rust. + pub poll: bool, + // If poll is set, a delay to apply after having noticed that Nodes have changed and before + // requesting them. + pub poll_delay: Option, + // A timeout applied globally to the request. When a request times out, work is _not_ cancelled, + // and will continue to completion in the background. + pub timeout: Option, } impl ExecutionRequest { pub fn new() -> ExecutionRequest { - ExecutionRequest { roots: Vec::new() } + ExecutionRequest { + roots: Vec::new(), + poll: false, + poll_delay: None, + timeout: None, + } } /// @@ -271,6 +325,18 @@ impl Scheduler { m } + /// + /// Return unit if the Scheduler is still valid, or an error string if something has invalidated + /// the Scheduler, indicating that it should re-initialize. See InvalidationWatcher. + /// + pub fn is_valid(&self) -> Result<(), String> { + let core = self.core.clone(); + self.core.executor.block_on(async move { + // Confirm that our InvalidationWatcher is still alive. + core.watcher.is_valid().await + }) + } + /// /// Return all Digests currently in memory in this Scheduler. /// @@ -279,31 +345,60 @@ impl Scheduler { self.core.graph.all_digests(&context) } + async fn poll_or_create( + context: &Context, + root: Root, + last_observed: Option, + poll: bool, + poll_delay: Option, + ) -> ObservedValueResult { + let (result, last_observed) = if poll { + let (result, last_observed) = context + .core + .graph + .poll(root.into(), last_observed, poll_delay, &context) + .await?; + (result, Some(last_observed)) + } else { + let result = context + .core + .graph + .create(root.into(), &context) + .compat() + .await?; + (result, None) + }; + + Ok(( + result + .try_into() + .unwrap_or_else(|e| panic!("A Node implementation was ambiguous: {:?}", e)), + last_observed, + )) + } + /// /// Attempts to complete all of the given roots, and send the result on the given mpsc Sender, /// which allows the caller to poll a channel for the result without blocking uninterruptibly /// on a Future. /// fn execute_helper( - context: Context, - sender: mpsc::Sender>>, - roots: Vec, + &self, + request: &ExecutionRequest, + session: &Session, + sender: mpsc::Sender>, ) { + let context = Context::new(self.core.clone(), session.clone()); + let roots = session.zip_last_observed(&request.roots); + let poll = request.poll; + let poll_delay = request.poll_delay; let core = context.core.clone(); core.executor.spawn_and_ignore(async move { - let res = future03::join_all( + let res = future::join_all( roots .into_iter() - .map(|root| { - context - .core - .graph - .create(root.into(), &context) - .map(|nr| { - nr.try_into() - .unwrap_or_else(|e| panic!("A Node implementation was ambiguous: {:?}", e)) - }) - .compat() + .map(|(root, last_observed)| { + Self::poll_or_create(&context, root, last_observed, poll, poll_delay) }) .collect::>(), ) @@ -312,6 +407,32 @@ impl Scheduler { }); } + fn execute_record_results( + roots: &[Root], + session: &Session, + results: Vec, + ) -> Vec> { + // Store the roots that were operated on and their LastObserved values. + session.extend( + results + .iter() + .zip(roots.iter()) + .map(|(result, root)| { + let last_observed = result + .as_ref() + .ok() + .and_then(|(_value, last_observed)| *last_observed); + (root.clone(), last_observed) + }) + .collect::>(), + ); + + results + .into_iter() + .map(|res| res.map(|(value, _last_observed)| value)) + .collect() + } + /// /// Compute the results for roots in the given request. /// @@ -319,18 +440,17 @@ impl Scheduler { &self, request: &ExecutionRequest, session: &Session, - ) -> Result, ExecutionTermination> { - // Bootstrap tasks for the roots, and then wait for all of them. - debug!("Launching {} roots.", request.roots.len()); - - session.extend(&request.roots); + ) -> Result>, ExecutionTermination> { + debug!( + "Launching {} roots (poll={}).", + request.roots.len(), + request.poll + ); - // Wait for all roots to complete. Failure here should be impossible, because each + // Spawn and wait for all roots to complete. Failure here should be impossible, because each // individual Future in the join was (eventually) mapped into success. - let context = Context::new(self.core.clone(), session.clone()); let (sender, receiver) = mpsc::channel(); - - Scheduler::execute_helper(context, sender, request.roots.clone()); + self.execute_helper(request, session, sender); let roots: Vec = request .roots .clone() @@ -341,12 +461,13 @@ impl Scheduler { // This map keeps the k most relevant jobs in assigned possitions. // Keys are positions in the display (display workers) and the values are the actual jobs to print. let mut tasks = IndexMap::new(); - let refresh_interval = Duration::from_millis(100); + let deadline = request.timeout.map(|timeout| Instant::now() + timeout); let maybe_display_handle = Self::maybe_display_initialize(&session); let result = loop { - if let Ok(res) = receiver.recv_timeout(refresh_interval) { - break Ok(res); + if let Ok(res) = receiver.recv_timeout(Self::compute_refresh_delay(deadline)) { + // Completed successfully. + break Ok(Self::execute_record_results(&request.roots, &session, res)); } else if let Err(e) = Self::maybe_display_render( &self.core.graph, &roots, @@ -354,6 +475,9 @@ impl Scheduler { &mut tasks) { break Err(e); + } else if deadline.map(|d| d < Instant::now()).unwrap_or(false) { + // The timeout on the request has been exceeded. + break Err(ExecutionTermination::Timeout); } }; Self::maybe_display_teardown(session, maybe_display_handle); @@ -361,6 +485,14 @@ impl Scheduler { result } + fn compute_refresh_delay(deadline: Option) -> Duration { + let refresh_interval = Duration::from_millis(100); + deadline + .and_then(|deadline| deadline.checked_duration_since(Instant::now())) + .map(|duration_till_deadline| std::cmp::min(refresh_interval, duration_till_deadline)) + .unwrap_or(refresh_interval) + } + fn maybe_display_initialize(session: &Session) -> Option { if let Some(display) = session.maybe_display() { { @@ -466,4 +598,4 @@ impl Drop for Scheduler { /// type Root = Select; -pub type RootResult = Result; +pub type ObservedValueResult = Result<(Value, Option), Failure>; diff --git a/src/rust/engine/watch/Cargo.toml b/src/rust/engine/watch/Cargo.toml index 0b4b43b5799..c64dccefc3e 100644 --- a/src/rust/engine/watch/Cargo.toml +++ b/src/rust/engine/watch/Cargo.toml @@ -8,10 +8,9 @@ publish = false [dependencies] crossbeam-channel = "0.3" fs = { path = "../fs" } -futures = { version = "0.3", features = ["compat"] } -futures-locks = "0.3.0" -futures01 = { package = "futures", version = "0.1" } +futures = "0.3" graph = { path = "../graph" } +hashing = { path = "../hashing" } log = "0.4" logging = { path = "../logging" } # notify is currently an experimental API, we are pinning to https://docs.rs/notify/5.0.0-pre.1/notify/ @@ -19,11 +18,10 @@ logging = { path = "../logging" } # The author suggests they will add the debounced watcher back into the stable 5.0.0 release. When that happens # we can move to it. notify = { git = "https://github.com/notify-rs/notify", rev = "fba00891d9105e2f581c69fbe415a58cb7966fdd" } +parking_lot = "0.6" task_executor = { path = "../task_executor" } [dev-dependencies] -hashing = { path = "../hashing" } -parking_lot = "0.6" tempfile = "3" testutil = { path = "../testutil" } tokio = { version = "0.2", features = ["rt-core", "macros"] } diff --git a/src/rust/engine/watch/src/lib.rs b/src/rust/engine/watch/src/lib.rs index 48e65110adf..e2e35aa4277 100644 --- a/src/rust/engine/watch/src/lib.rs +++ b/src/rust/engine/watch/src/lib.rs @@ -35,14 +35,12 @@ use std::thread; use std::time::Duration; use crossbeam_channel::{self, Receiver, RecvTimeoutError, TryRecvError}; -use futures::compat::Future01CompatExt; -use futures_locks::Mutex; -use log::{debug, error, warn}; -use notify::{RecommendedWatcher, RecursiveMode, Watcher}; -use task_executor::Executor; - use fs::GitignoreStyleExcludes; +use log::{debug, trace, warn}; use logging; +use notify::{RecommendedWatcher, RecursiveMode, Watcher}; +use parking_lot::Mutex; +use task_executor::Executor; /// /// An InvalidationWatcher maintains a Thread that receives events from a notify Watcher. @@ -51,23 +49,33 @@ use logging; /// and the caller should create a new InvalidationWatcher (or shut down, in some cases). Generally /// this will mean polling. /// -/// TODO: Need the above polling -/// -pub struct InvalidationWatcher { - watcher: Arc>, +struct Inner { + watcher: RecommendedWatcher, executor: Executor, - liveness: Receiver<()>, + liveness: Receiver, enabled: bool, + // Until the background task has started, contains the relevant inputs to launch it via + // start_background_thread. The decoupling of creating the `InvalidationWatcher` and starting it + // is to allow for testing of the background thread. + background_task_inputs: Option, } +type WatcherTaskInputs = ( + Arc, + PathBuf, + crossbeam_channel::Sender, + Receiver>, +); + +pub struct InvalidationWatcher(Mutex); + impl InvalidationWatcher { - pub fn new( - invalidatable: Weak, + pub fn new( executor: Executor, build_root: PathBuf, ignorer: Arc, enabled: bool, - ) -> Result { + ) -> Result, String> { // Inotify events contain canonical paths to the files being watched. // If the build_root contains a symlink the paths returned in notify events // wouldn't have the build_root as a prefix, and so we would miss invalidating certain nodes. @@ -78,7 +86,7 @@ impl InvalidationWatcher { let mut watcher: RecommendedWatcher = Watcher::new(watch_sender, Duration::from_millis(50)) .map_err(|e| format!("Failed to begin watching the filesystem: {}", e))?; - let (thread_liveness_sender, thread_liveness_receiver) = crossbeam_channel::unbounded(); + let (liveness_sender, liveness_receiver) = crossbeam_channel::unbounded(); if enabled { // On darwin the notify API is much more efficient if you watch the build root // recursively, so we set up that watch here and then return early when watch() is @@ -96,20 +104,37 @@ impl InvalidationWatcher { } } + Ok(Arc::new(InvalidationWatcher(Mutex::new(Inner { + watcher, + executor, + liveness: liveness_receiver, + enabled, + background_task_inputs: Some(( + ignorer, + canonical_build_root, + liveness_sender, + watch_receiver, + )), + })))) + } + + /// + /// Starts the background task that monitors watch events. Panics if called more than once. + /// + pub fn start(&self, invalidatable: &Arc) { + let mut inner = self.0.lock(); + let (ignorer, canonical_build_root, liveness_sender, watch_receiver) = inner + .background_task_inputs + .take() + .expect("An InvalidationWatcher can only be started once."); + InvalidationWatcher::start_background_thread( - invalidatable, + Arc::downgrade(&invalidatable), ignorer, canonical_build_root, - thread_liveness_sender, + liveness_sender, watch_receiver, ); - - Ok(InvalidationWatcher { - watcher: Arc::new(Mutex::new(watcher)), - executor, - liveness: thread_liveness_receiver, - enabled, - }) } // Public for testing purposes. @@ -117,18 +142,18 @@ impl InvalidationWatcher { invalidatable: Weak, ignorer: Arc, canonical_build_root: PathBuf, - liveness_sender: crossbeam_channel::Sender<()>, + liveness_sender: crossbeam_channel::Sender, watch_receiver: Receiver>, - ) { + ) -> thread::JoinHandle<()> { thread::spawn(move || { logging::set_thread_destination(logging::Destination::Pantsd); - loop { + let exit_msg = loop { let event_res = watch_receiver.recv_timeout(Duration::from_millis(10)); let invalidatable = if let Some(g) = invalidatable.upgrade() { g } else { // The Invalidatable has been dropped: we're done. - break; + break "The watcher was shut down.".to_string(); }; match event_res { Ok(Ok(ev)) => { @@ -154,12 +179,13 @@ impl InvalidationWatcher { &path_relative_to_build_root, /* is_dir */ false, ) { + trace!("notify ignoring {:?}", path_relative_to_build_root); None } else { Some(path_relative_to_build_root) } }) - .map(|path_relative_to_build_root| { + .flat_map(|path_relative_to_build_root| { let mut paths_to_invalidate: Vec = vec![]; if let Some(parent_dir) = path_relative_to_build_root.parent() { paths_to_invalidate.push(parent_dir.to_path_buf()); @@ -167,8 +193,8 @@ impl InvalidationWatcher { paths_to_invalidate.push(path_relative_to_build_root); paths_to_invalidate }) - .flatten() .collect(); + // Only invalidate stuff if we have paths that weren't filtered out by gitignore. if !paths.is_empty() { debug!("notify invalidating {:?} because of {:?}", paths, ev.kind); @@ -180,69 +206,71 @@ impl InvalidationWatcher { warn!("Path(s) did not exist: {:?}", err.paths); continue; } else { - error!("File watcher failing with: {}", err); - break; + break format!("Watch error: {}", err); } } Err(RecvTimeoutError::Timeout) => continue, Err(RecvTimeoutError::Disconnected) => { - // The Watcher is gone: we're done. - break; + break "The watch provider exited.".to_owned(); } }; - } - debug!("Watch thread exiting."); - // Signal that we're exiting (which we would also do by just dropping the channel). - let _ = liveness_sender.send(()); - }); - } + }; - pub fn is_alive(&self) -> bool { - if let Ok(()) = self.liveness.try_recv() { - // The watcher background thread set the exit condition. Return false to signal that - // the watcher is not alive. - false - } else { - true - } + // Log and send the exit code. + warn!("File watcher exiting with: {}", exit_msg); + let _ = liveness_sender.send(exit_msg); + }) } /// - /// Watch the given path non-recursively. + /// An InvalidationWatcher will never restart on its own: a consumer should re-initialize if this + /// method returns an error. + /// + /// NB: This is currently polled by pantsd, but it could be long-polled or a callback. /// - pub async fn watch(&self, path: PathBuf) -> Result<(), notify::Error> { - // Short circuit here if we are on a Darwin platform because we should be watching - // the entire build root recursively already, or if we are not enabled. - if cfg!(target_os = "macos") || !self.enabled { - Ok(()) - } else { - // Using a futurized mutex here because for some reason using a regular mutex - // to block the io pool causes the v2 ui to not update which nodes its working - // on properly. - let watcher_lock = self.watcher.lock().compat().await; - match watcher_lock { - Ok(mut watcher_lock) => { - self - .executor - .spawn_blocking(move || watcher_lock.watch(path, RecursiveMode::NonRecursive)) - .await - } - Err(()) => Err(notify::Error::new(notify::ErrorKind::Generic( - "Couldn't lock mutex for invalidation watcher".to_string(), - ))), + pub async fn is_valid(&self) -> Result<(), String> { + // Confirm that the Watcher itself is still alive. + let watcher = self.0.lock(); + match watcher.liveness.try_recv() { + Ok(msg) => { + // The watcher background task set the exit condition. + Err(msg) + } + Err(TryRecvError::Disconnected) => { + // The watcher background task died (panic, possible?). + Err( + "The filesystem watcher exited abnormally: please see the log for more information." + .to_owned(), + ) + } + Err(TryRecvError::Empty) => { + // Still alive. + Ok(()) } } } /// - /// Returns true if this InvalidationWatcher is still valid: if it is not valid, it will have - /// already logged some sort of error, and will never restart on its own. + /// Add a path to the set of paths being watched by this invalidation watcher, non-recursively. /// - pub fn running(&self) -> bool { - match self.liveness.try_recv() { - Ok(()) | Err(TryRecvError::Disconnected) => false, - Err(TryRecvError::Empty) => true, - } + pub async fn watch(self: &Arc, path: PathBuf) -> Result<(), notify::Error> { + let executor = { + let inner = self.0.lock(); + if cfg!(target_os = "macos") || !inner.enabled { + // Short circuit here if we are on a Darwin platform because we should be watching + // the entire build root recursively already, or if we are not enabled. + return Ok(()); + } + inner.executor.clone() + }; + + let watcher = self.clone(); + executor + .spawn_blocking(move || { + let mut inner = watcher.0.lock(); + inner.watcher.watch(path, RecursiveMode::NonRecursive) + }) + .await } } diff --git a/src/rust/engine/watch/src/tests.rs b/src/rust/engine/watch/src/tests.rs index 7655253a974..ab22cf482ec 100644 --- a/src/rust/engine/watch/src/tests.rs +++ b/src/rust/engine/watch/src/tests.rs @@ -7,12 +7,13 @@ use std::sync::Arc; use std::thread::sleep; use std::time::Duration; -use crossbeam_channel; +use crossbeam_channel::{self, RecvTimeoutError}; use fs::GitignoreStyleExcludes; use notify; use parking_lot::Mutex; use task_executor::Executor; use testutil::{append_to_existing_file, make_file}; +use tokio::runtime::Handle; fn setup_fs() -> (tempfile::TempDir, PathBuf) { // setup a build_root with a file in it to watch. @@ -25,28 +26,21 @@ fn setup_fs() -> (tempfile::TempDir, PathBuf) { (tempdir, file_path) } -fn setup_watch( +/// Create (but don't start) an InvalidationWatcher. +async fn setup_watch( ignorer: Arc, - invalidatable: Arc, build_root: PathBuf, file_path: PathBuf, -) -> InvalidationWatcher { - let mut rt = tokio::runtime::Runtime::new().unwrap(); - let executor = Executor::new(rt.handle().clone()); - let watcher = InvalidationWatcher::new( - Arc::downgrade(&invalidatable), - executor, - build_root, - ignorer, - /*enabled*/ true, - ) - .expect("Couldn't create InvalidationWatcher"); - rt.block_on(watcher.watch(file_path)).unwrap(); +) -> Arc { + let executor = Executor::new(Handle::current()); + let watcher = InvalidationWatcher::new(executor, build_root, ignorer, /*enabled*/ true) + .expect("Couldn't create InvalidationWatcher"); + watcher.watch(file_path).await.unwrap(); watcher } -#[test] -fn receive_watch_event_on_file_change() { +#[tokio::test] +async fn receive_watch_event_on_file_change() { // Instantiate a watcher and watch the file in question. let (tempdir, file_path) = setup_fs(); let build_root = tempdir.path().to_path_buf(); @@ -58,12 +52,8 @@ fn receive_watch_event_on_file_change() { let invalidatable = Arc::new(TestInvalidatable::default()); let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); - let _watcher = setup_watch( - ignorer, - invalidatable.clone(), - build_root.clone(), - file_path.clone(), - ); + let watcher = setup_watch(ignorer, build_root.clone(), file_path.clone()).await; + watcher.start(&invalidatable); // Update the content of the file being watched. let new_content = "stnetnoc".as_bytes().to_vec(); @@ -79,14 +69,11 @@ fn receive_watch_event_on_file_change() { } } // If we didn't find a new state fail the test. - assert!( - false, - "Nodes EntryState was not invalidated, or reset to NotStarted." - ) + assert!(false, "Did not observe invalidation.") } -#[test] -fn ignore_file_events_matching_patterns_in_pants_ignore() { +#[tokio::test] +async fn ignore_file_events_matching_patterns_in_pants_ignore() { let (tempdir, file_path) = setup_fs(); let build_root = tempdir.path().to_path_buf(); let file_path_rel = file_path @@ -97,12 +84,8 @@ fn ignore_file_events_matching_patterns_in_pants_ignore() { let invalidatable = Arc::new(TestInvalidatable::default()); let ignorer = GitignoreStyleExcludes::create(&["/foo".to_string()]).unwrap(); - let _watcher = setup_watch( - ignorer, - invalidatable.clone(), - build_root.clone(), - file_path.clone(), - ); + let watcher = setup_watch(ignorer, build_root, file_path.clone()).await; + watcher.start(&invalidatable); // Update the content of the file being watched. let new_content = "stnetnoc".as_bytes().to_vec(); @@ -118,30 +101,42 @@ fn ignore_file_events_matching_patterns_in_pants_ignore() { } } -#[test] -fn test_liveness() { - let (tempdir, _) = setup_fs(); +#[tokio::test] +async fn liveness_watch_error() { + let (tempdir, file_path) = setup_fs(); let build_root = tempdir.path().to_path_buf(); let invalidatable = Arc::new(TestInvalidatable::default()); let ignorer = GitignoreStyleExcludes::create(&[]).unwrap(); + // NB: We create this watcher, but we don't call start: instead we create the background thread + // directly. + let _watcher = setup_watch(ignorer.clone(), build_root.clone(), file_path.clone()).await; let (liveness_sender, liveness_receiver) = crossbeam_channel::unbounded(); let (event_sender, event_receiver) = crossbeam_channel::unbounded(); - InvalidationWatcher::start_background_thread( + let join_handle = InvalidationWatcher::start_background_thread( Arc::downgrade(&invalidatable), ignorer, build_root, liveness_sender, event_receiver, ); + + // Should not exit. + assert_eq!( + Err(RecvTimeoutError::Timeout), + liveness_receiver.recv_timeout(Duration::from_millis(100)) + ); event_sender .send(Err(notify::Error::generic( "This should kill the background thread", ))) .unwrap(); + + // Should exit. assert!(liveness_receiver - .recv_timeout(Duration::from_millis(100)) + .recv_timeout(Duration::from_millis(1000)) .is_ok()); + join_handle.join().unwrap(); } #[derive(Default)] diff --git a/tests/python/pants_test/core_tasks/test_deferred_sources_mapper_integration.py b/tests/python/pants_test/core_tasks/test_deferred_sources_mapper_integration.py index 461d2404a11..9f8c6cdcd82 100644 --- a/tests/python/pants_test/core_tasks/test_deferred_sources_mapper_integration.py +++ b/tests/python/pants_test/core_tasks/test_deferred_sources_mapper_integration.py @@ -12,8 +12,8 @@ class DeferredSourcesMapperIntegration(PantsRunIntegrationTest): @classmethod - def _emit_targets(cls, workdir): - with safe_open(os.path.join(workdir, "BUILD"), "w") as f: + def _emit_targets(cls, buildroot): + with safe_open(os.path.join(buildroot, "BUILD"), "w") as f: f.write( dedent( """ @@ -81,7 +81,7 @@ def _emit_targets(cls, workdir): ) ) return [ - f"{os.path.relpath(workdir, get_buildroot())}:proto-{suffix}" + f"{os.path.relpath(buildroot, get_buildroot())}:proto-{suffix}" for suffix in (8, 9, "other") ] @@ -90,7 +90,6 @@ def _configured_pants_run(self, command, workdir): command=command, workdir=workdir, config={ - "GLOBAL": {"build_ignore": [], "pants_ignore": []}, "jvm-platform": { "default_platform": "java8", "platforms": { @@ -105,12 +104,14 @@ def _configured_pants_run(self, command, workdir): def test_deferred_sources_gen_successfully(self): with self.temporary_workdir() as workdir: - pants_run = self._configured_pants_run(["gen", self._emit_targets(workdir)[0]], workdir) + pants_run = self._configured_pants_run( + ["gen", self._emit_targets(os.getcwd())[0]], workdir + ) self.assert_success(pants_run) def test_deferred_sources_export_successfully(self): with self.temporary_workdir() as workdir: - proto8, proto9, proto_other = self._emit_targets(workdir) + proto8, proto9, proto_other = self._emit_targets(os.getcwd()) pants_run = self._configured_pants_run(["export", proto8, proto9, proto_other], workdir) self.assert_success(pants_run) diff --git a/tests/python/pants_test/core_tasks/test_prep_command_integration.py b/tests/python/pants_test/core_tasks/test_prep_command_integration.py index a7a1a463058..62a93ed1ff1 100644 --- a/tests/python/pants_test/core_tasks/test_prep_command_integration.py +++ b/tests/python/pants_test/core_tasks/test_prep_command_integration.py @@ -6,6 +6,7 @@ from textwrap import dedent from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest +from pants.util.contextutil import temporary_dir from pants.util.dirutil import safe_open @@ -18,8 +19,8 @@ class PrepCommandIntegrationTest(PantsRunIntegrationTest): } @classmethod - def _emit_targets(cls, workdir): - prep_command_path = os.path.join(workdir, "src/java/org/pantsbuild/prepcommand") + def _emit_targets(cls, buildroot): + prep_command_path = os.path.join(buildroot, "src/java/org/pantsbuild/prepcommand") with safe_open(os.path.join(prep_command_path, "BUILD"), "w") as fp: for name, touch_target in cls._SENTINELS.items(): fp.write( @@ -32,7 +33,7 @@ def _emit_targets(cls, workdir): prep_args=['{tmpdir}/{touch_target}'], ) """.format( - name=name, goal=name, tmpdir=workdir, touch_target=touch_target + name=name, goal=name, tmpdir=buildroot, touch_target=touch_target ) ) ) @@ -50,30 +51,26 @@ def _assert_goal_did_not_run(self, basedir, goal): @contextmanager def _execute_pants(self, goal): - with self.temporary_workdir() as workdir: - prep_commands_specs = self._emit_targets(workdir) - # Make sure the emitted BUILD under .pants.d is not ignored. - config = {"GLOBAL": {"build_ignore": [], "pants_ignore": []}} - pants_run = self.run_pants_with_workdir( - [goal] + prep_commands_specs, workdir, config=config - ) + with temporary_dir(os.getcwd()) as buildroot, self.temporary_workdir(buildroot) as workdir: + prep_commands_specs = self._emit_targets(buildroot) + pants_run = self.run_pants_with_workdir([goal] + prep_commands_specs, workdir) self.assert_success(pants_run) - yield workdir + yield buildroot def test_prep_command_in_compile(self): - with self._execute_pants("compile") as workdir: - self._assert_goal_ran(workdir, "compile") - self._assert_goal_did_not_run(workdir, "test") - self._assert_goal_did_not_run(workdir, "binary") + with self._execute_pants("compile") as buildroot: + self._assert_goal_ran(buildroot, "compile") + self._assert_goal_did_not_run(buildroot, "test") + self._assert_goal_did_not_run(buildroot, "binary") def test_prep_command_in_test(self): - with self._execute_pants("test") as workdir: - self._assert_goal_ran(workdir, "compile") - self._assert_goal_ran(workdir, "test") - self._assert_goal_did_not_run(workdir, "binary") + with self._execute_pants("test") as buildroot: + self._assert_goal_ran(buildroot, "compile") + self._assert_goal_ran(buildroot, "test") + self._assert_goal_did_not_run(buildroot, "binary") def test_prep_command_in_binary(self): - with self._execute_pants("binary") as workdir: - self._assert_goal_ran(workdir, "compile") - self._assert_goal_ran(workdir, "binary") - self._assert_goal_did_not_run(workdir, "test") + with self._execute_pants("binary") as buildroot: + self._assert_goal_ran(buildroot, "compile") + self._assert_goal_ran(buildroot, "binary") + self._assert_goal_did_not_run(buildroot, "test") diff --git a/tests/python/pants_test/engine/legacy/test_goal_rule_integration.py b/tests/python/pants_test/engine/legacy/test_goal_rule_integration.py index c9d3247ac2c..6c28068f3cb 100644 --- a/tests/python/pants_test/engine/legacy/test_goal_rule_integration.py +++ b/tests/python/pants_test/engine/legacy/test_goal_rule_integration.py @@ -59,18 +59,29 @@ def dump(content): # Launch the loop as a background process. handle = self.run_pants_with_workdir_without_waiting( - ["--no-v1", "--v2", "--loop", "--loop-max=3", "list", f"{tmpdir}:",], + # NB: We disable watchman here because in the context of `--loop`, the total count + # of invalidations matters, and with both `notify` and `watchman` enabled we get + # twice as many. + [ + "--no-v1", + "--v2", + "--no-watchman-enable", + "--loop", + "--loop-max=3", + "list", + f"{tmpdir}:", + ], workdir, config, ) - # Wait for the loop to stabilize. - time.sleep(10) + # Wait for pantsd to come up and for the loop to stabilize. checker.assert_started() + time.sleep(10) # Replace the BUILD file content twice. dump('target(name="two")') - time.sleep(5) + time.sleep(10) dump('target(name="three")') # Verify that the three different target states were listed, and that the process exited. diff --git a/tests/python/pants_test/engine/test_build_files.py b/tests/python/pants_test/engine/test_build_files.py index a43f25d82b6..6f9b5f56ab9 100644 --- a/tests/python/pants_test/engine/test_build_files.py +++ b/tests/python/pants_test/engine/test_build_files.py @@ -24,7 +24,7 @@ from pants.engine.nodes import Return, State, Throw from pants.engine.parser import HydratedStruct, SymbolTable from pants.engine.rules import rule -from pants.engine.scheduler import SchedulerSession +from pants.engine.scheduler import ExecutionRequest, SchedulerSession from pants.engine.struct import Struct, StructWithDeps from pants.testutil.engine.util import MockGet, Target, run_rule from pants.util.objects import Exactly @@ -164,7 +164,7 @@ def test_exclude_pattern_with_single_address(self) -> None: address_specs = AddressSpecs( [SingleAddress("root", "not_me")], exclude_patterns=tuple(["root.*"]) ) - address_family = AddressFamily("root", {"not_me": ("root/BUILD", TargetAdaptor()),}) + address_family = AddressFamily("root", {"not_me": ("root/BUILD", TargetAdaptor())}) targets = self._resolve_addresses( address_specs, address_family, self._snapshot(), self._address_mapper() @@ -239,7 +239,7 @@ def create_json(self) -> SchedulerSession: def _populate( self, scheduler: SchedulerSession, address: Address, - ) -> Tuple[HydratedStruct, State]: + ) -> Tuple[ExecutionRequest, State]: """Perform an ExecutionRequest to parse the given Address into a Struct.""" request = scheduler.execution_request([HydratedStruct], [address]) returns, throws = scheduler.execute(request) diff --git a/tests/python/pants_test/pantsd/pantsd_integration_test_base.py b/tests/python/pants_test/pantsd/pantsd_integration_test_base.py index a7ff7a25896..ea41df1ab78 100644 --- a/tests/python/pants_test/pantsd/pantsd_integration_test_base.py +++ b/tests/python/pants_test/pantsd/pantsd_integration_test_base.py @@ -13,6 +13,7 @@ from pants.testutil.pants_run_integration_test import PantsRunIntegrationTest, read_pantsd_log from pants.testutil.process_test_util import no_lingering_process_by_command from pants.util.collections import recursively_update +from pants.util.contextutil import temporary_dir def banner(s): @@ -50,8 +51,9 @@ def assert_pantsd_runner_started(self, client_pid, timeout=12): def _check_pantsd_is_alive(self): self._log() assert ( - self._pid is not None and self.is_alive() + self._pid is not None ), "cannot assert that pantsd is running. Try calling assert_started before calling this method." + assert self.is_alive(), "pantsd was not alive." return self._pid def current_memory_usage(self): @@ -86,7 +88,7 @@ def use_pantsd_env_var(cls): @contextmanager def pantsd_test_context(self, log_level="info", extra_config=None): with no_lingering_process_by_command("pantsd") as runner_process_context: - with self.temporary_workdir() as workdir_base: + with temporary_dir(root_dir=os.getcwd()) as workdir_base: pid_dir = os.path.join(workdir_base, ".pids") workdir = os.path.join(workdir_base, ".workdir.pants.d") print(f"\npantsd log is {workdir}/pantsd/pantsd.log") @@ -182,7 +184,11 @@ def assert_runner( elapsed = time.time() - start_time print(bold(cyan(f"\ncompleted in {elapsed} seconds"))) - # TODO: uncomment this and add an issue link! + if success: + self.assert_success(run) + else: + self.assert_failure(run) + runs_created = self._run_count(workdir) - run_count self.assertEqual( runs_created, @@ -191,8 +197,5 @@ def assert_runner( expected_runs, runs_created, ), ) - if success: - self.assert_success(run) - else: - self.assert_failure(run) + return run diff --git a/tests/python/pants_test/pantsd/service/test_fs_event_service.py b/tests/python/pants_test/pantsd/service/test_fs_event_service.py index 01183a7fcd5..5f0b208d1d2 100644 --- a/tests/python/pants_test/pantsd/service/test_fs_event_service.py +++ b/tests/python/pants_test/pantsd/service/test_fs_event_service.py @@ -12,40 +12,22 @@ class TestFSEventService(TestBase): BUILD_ROOT = "/build_root" EMPTY_EVENT = (None, None) - FAKE_EVENT = ("test", dict(subscription="test", files=["a/BUILD", "b/BUILD"])) - FAKE_EVENT_STREAM = [FAKE_EVENT, EMPTY_EVENT, EMPTY_EVENT, FAKE_EVENT, EMPTY_EVENT] + FAKE_EVENT = dict(subscription="test", files=["a/BUILD", "b/BUILD"]) + FAKE_EVENT_STREAM = [ + ("ignored", ev) for ev in [FAKE_EVENT, EMPTY_EVENT, EMPTY_EVENT, FAKE_EVENT, EMPTY_EVENT] + ] WORKER_COUNT = 1 def setUp(self): super().setUp() self.mock_watchman = unittest.mock.create_autospec(Watchman, spec_set=True) - self.service = FSEventService(self.mock_watchman, self.BUILD_ROOT) + self.service = FSEventService(self.mock_watchman, self.scheduler.scheduler, self.BUILD_ROOT) self.service.setup(None) - self.service.register_all_files_handler(lambda x: True, name="test") - self.service.register_all_files_handler(lambda x: False, name="test2") - - def test_registration(self): - # N.B. This test implicitly tests register_handler; no need to duplicate work. - self.assertTrue("test" in self.service._handlers) - self.assertTrue("test2" in self.service._handlers) - self.assertIsInstance(self.service._handlers["test"], Watchman.EventHandler) - self.assertIsInstance(self.service._handlers["test2"], Watchman.EventHandler) - - def test_register_handler_duplicate(self): - with self.assertRaises(AssertionError): - self.service.register_handler("test", "test", lambda x: True) - - with self.assertRaises(AssertionError): - self.service.register_handler("test", dict(test=1), lambda x: True) - - def test_fire_callback(self): - self.assertTrue(self.service.fire_callback("test", {})) - self.assertFalse(self.service.fire_callback("test2", {})) @contextmanager def mocked_run(self, asserts=True): - self.service.fire_callback = unittest.mock.Mock() - yield self.service.fire_callback + self.service._handle_all_files_event = unittest.mock.Mock() + yield self.service._handle_all_files_event if asserts: self.mock_watchman.watch_project.assert_called_once_with(self.BUILD_ROOT) @@ -59,17 +41,7 @@ def test_run(self): self.mock_watchman.subscribed.return_value = self.FAKE_EVENT_STREAM self.service.run() mock_callback.assert_has_calls( - [unittest.mock.call(*self.FAKE_EVENT), unittest.mock.call(*self.FAKE_EVENT)], - any_order=True, - ) - - def test_run_failed_callback(self): - with self.mocked_run() as mock_callback: - self.mock_watchman.subscribed.return_value = self.FAKE_EVENT_STREAM - mock_callback.side_effect = [False, True] - self.service.run() - mock_callback.assert_has_calls( - [unittest.mock.call(*self.FAKE_EVENT), unittest.mock.call(*self.FAKE_EVENT)], + [unittest.mock.call(self.FAKE_EVENT), unittest.mock.call(self.FAKE_EVENT)], any_order=True, ) diff --git a/tests/python/pants_test/pantsd/test_pantsd_integration.py b/tests/python/pants_test/pantsd/test_pantsd_integration.py index 6ea5a40caca..70709725ec3 100644 --- a/tests/python/pants_test/pantsd/test_pantsd_integration.py +++ b/tests/python/pants_test/pantsd/test_pantsd_integration.py @@ -316,7 +316,7 @@ def full_pantsd_log(): # Check the logs. self.assertRegex( - full_pantsd_log(), r"watching invalidating files:.*{}".format(test_dir) + full_pantsd_log(), r"watching invalidation patterns:.*{}".format(test_dir) ) checker.assert_running() @@ -329,7 +329,7 @@ def full_pantsd_log(): time.sleep(10) checker.assert_stopped() - self.assertIn("saw file events covered by invalidation globs", full_pantsd_log()) + self.assertIn("saw filesystem changes covered by invalidation globs", full_pantsd_log()) def test_pantsd_invalidation_pants_toml_file(self): # Test tmp_pants_toml (--pants-config-files=$tmp_pants_toml)'s removal From adeb6cfb56c819b3fed96d7dda8d48e6ce765522 Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Fri, 8 May 2020 20:06:13 -0700 Subject: [PATCH 10/15] pin tokio at exactly 0.2.13 --- src/rust/engine/Cargo.lock | 48 +++++++++++++++++++------------------- src/rust/engine/Cargo.toml | 2 +- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index 35766778b12..0369f0ade39 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -58,7 +58,7 @@ dependencies = [ "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -194,7 +194,7 @@ dependencies = [ "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -673,7 +673,7 @@ dependencies = [ "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "ui 0.0.1", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -831,7 +831,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -863,7 +863,7 @@ dependencies = [ "serde_json 1.0.52 (registry+https://github.com/rust-lang/crates.io-index)", "store 0.1.0", "task_executor 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -1092,7 +1092,7 @@ dependencies = [ "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1158,7 +1158,7 @@ dependencies = [ "indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-util 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1264,7 +1264,7 @@ dependencies = [ "net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "tower-service 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "want 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1281,7 +1281,7 @@ dependencies = [ "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustls-native-certs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1551,7 +1551,7 @@ dependencies = [ "num_enum 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "simplelog 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "ui 0.0.1", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1704,7 +1704,7 @@ dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2091,7 +2091,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-util 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "uname 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2109,7 +2109,7 @@ dependencies = [ "process_execution 0.0.1", "store 0.1.0", "task_executor 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -2466,7 +2466,7 @@ dependencies = [ "serde 1.0.106 (registry+https://github.com/rust-lang/crates.io-index)", "serde_urlencoded 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-rustls 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "wasm-bindgen 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2698,7 +2698,7 @@ dependencies = [ "maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2836,7 +2836,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)", "walkdir 2.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", @@ -2943,7 +2943,7 @@ version = "0.0.1" dependencies = [ "futures 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "workunit_store 0.0.1", ] @@ -3041,7 +3041,7 @@ dependencies = [ [[package]] name = "tokio" -version = "0.2.20" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bytes 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3098,7 +3098,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures-core 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "rustls 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.21.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3112,7 +3112,7 @@ dependencies = [ "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3125,7 +3125,7 @@ dependencies = [ "futures-sink 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "pin-project-lite 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3488,7 +3488,7 @@ dependencies = [ "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3578,7 +3578,7 @@ dependencies = [ "concrete_time 0.0.1", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)", + "tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -3889,7 +3889,7 @@ dependencies = [ "checksum thread-scoped 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bcbb6aa301e5d3b0b5ef639c9a9c7e2f1c944f177b460c04dc24c69b1fa2bd99" "checksum thread_local 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14" "checksum time 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "ca8a50ef2360fbd1eeb0ecd46795a87a19024eb4b53c5dc916ca1fd95fe62438" -"checksum tokio 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "05c1d570eb1a36f0345a5ce9c6c6e665b70b73d11236912c0b477616aeec47b1" +"checksum tokio 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "0fa5e81d6bc4e67fe889d5783bd2a128ab2e0cfa487e0be16b6a8d177b101616" "checksum tokio-connect 0.1.0 (git+https://github.com/pantsbuild/tokio-connect?rev=f7ad1ca437973d6e24037ac6f7d5ef1013833c0b)" = "" "checksum tokio-io 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" "checksum tokio-macros 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c3acc6aa564495a0f2e1d59fab677cd7f81a19994cfc7f3ad0e64301560389" diff --git a/src/rust/engine/Cargo.toml b/src/rust/engine/Cargo.toml index 6df346f3dc4..0b83383be63 100644 --- a/src/rust/engine/Cargo.toml +++ b/src/rust/engine/Cargo.toml @@ -112,7 +112,7 @@ store = { path = "fs/store" } task_executor = { path = "task_executor" } tempfile = "3" time = "0.1.40" -tokio = { version = "0.2", features = ["rt-threaded"] } +tokio = { version = "=0.2.13", features = ["rt-threaded"] } ui = { path = "ui" } url = "2.1" uuid = { version = "0.7", features = ["v4"] } From d08894f289544be7c56f8aaaa995605646f6060d Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Mon, 11 May 2020 13:34:56 -0700 Subject: [PATCH 11/15] fix lint issues --- .../backend/python/subsystems/ipex/ipex_launcher.py | 9 ++++----- .../backend/python/subsystems/python_native_code.py | 2 +- src/python/pants/engine/scheduler.py | 2 +- src/python/pants/pantsd/pants_daemon.py | 6 ------ 4 files changed, 6 insertions(+), 13 deletions(-) diff --git a/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py b/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py index ddbfcd9b554..e8cac439171 100644 --- a/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py +++ b/src/python/pants/backend/python/subsystems/ipex/ipex_launcher.py @@ -67,11 +67,10 @@ def _hydrate_pex_file(self, hydrated_pex_file): # Perform a fully pinned intransitive resolve to hydrate the install cache. resolver_settings = ipex_info["resolver_settings"] - fetchers = ( - [Fetcher([url]) for url in resolver_settings.pop('find_links')] + - [PyPIFetcher(url) for url in resolver_settings.pop('indexes')] - ) - resolver_settings['fetchers'] = fetchers + fetchers = [Fetcher([url]) for url in resolver_settings.pop("find_links")] + [ + PyPIFetcher(url) for url in resolver_settings.pop("indexes") + ] + resolver_settings["fetchers"] = fetchers resolved_distributions = resolver.resolve( requirements=bootstrap_info.requirements, diff --git a/src/python/pants/backend/python/subsystems/python_native_code.py b/src/python/pants/backend/python/subsystems/python_native_code.py index da17fa3ccce..640713fc40f 100644 --- a/src/python/pants/backend/python/subsystems/python_native_code.py +++ b/src/python/pants/backend/python/subsystems/python_native_code.py @@ -10,11 +10,11 @@ from pants.backend.native.subsystems.native_toolchain import NativeToolchain from pants.backend.native.targets.native_library import NativeLibrary from pants.backend.python.subsystems.executable_pex_tool import ExecutablePexTool -from pants.python.python_requirement import PythonRequirement from pants.backend.python.targets.python_distribution import PythonDistribution from pants.base.exceptions import IncompatiblePlatformsError from pants.engine.rules import rule, subsystem_rule from pants.python import pex_build_util +from pants.python.python_requirement import PythonRequirement from pants.python.python_setup import PythonSetup from pants.subsystem.subsystem import Subsystem from pants.util.memo import memoized_property diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 4540d14e496..32217ca2f70 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -9,7 +9,7 @@ import traceback from dataclasses import dataclass from textwrap import dedent -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Type, Union, cast +from typing import TYPE_CHECKING, Any, Dict, Optional, Sequence, Tuple, Type, Union, cast from pants.base.exception_sink import ExceptionSink from pants.base.exiter import PANTS_FAILED_EXIT_CODE diff --git a/src/python/pants/pantsd/pants_daemon.py b/src/python/pants/pantsd/pants_daemon.py index c4d20fe682a..1ef3666e98c 100644 --- a/src/python/pants/pantsd/pants_daemon.py +++ b/src/python/pants/pantsd/pants_daemon.py @@ -190,12 +190,6 @@ def create(cls, options_bootstrapper, full_init=True): bootstrap_options=bootstrap_options, ) - @classmethod - def absolute_pidfile(cls): - return PantsDaemon.metadata_file_path( - "pantsd", "pid", bootstrap_options.pants_subprocessdir - ) - @staticmethod def _setup_services( build_root, From dbd54575e098dc3707e8dca2e2bf9576075d2cba Mon Sep 17 00:00:00 2001 From: Henry Fuller Date: Wed, 13 May 2020 12:07:59 -0700 Subject: [PATCH 12/15] fix mypy typing issues --- src/python/pants/engine/scheduler.py | 2 +- src/python/pants/python/pex_build_util.py | 1 - tests/python/pants_test/init/test_util.py | 3 ++- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 32217ca2f70..3300a8ce275 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -556,7 +556,7 @@ def run_goal_rule( self._trace_on_error([exc], request) return PANTS_FAILED_EXIT_CODE _, state = returns[0] - return state.value.exit_code + return cast(int, state.value.exit_code) def product_request( self, diff --git a/src/python/pants/python/pex_build_util.py b/src/python/pants/python/pex_build_util.py index cb4b8271555..a9a35af7058 100644 --- a/src/python/pants/python/pex_build_util.py +++ b/src/python/pants/python/pex_build_util.py @@ -317,7 +317,6 @@ def _resolve_multi( find_links = list(find_links) if find_links else [] find_links.extend(python_repos.repos) - distributions = {} fetchers = python_repos.get_fetchers() fetchers.extend(Fetcher([path]) for path in find_links) diff --git a/tests/python/pants_test/init/test_util.py b/tests/python/pants_test/init/test_util.py index ae33bf8b5ee..55f97917fe5 100644 --- a/tests/python/pants_test/init/test_util.py +++ b/tests/python/pants_test/init/test_util.py @@ -3,6 +3,7 @@ import os from contextlib import contextmanager +from typing import Generator from pants.fs.fs import safe_filename_from_path from pants.init.util import init_workdir @@ -13,7 +14,7 @@ class UtilTest(TestBase): @contextmanager - def physical_workdir_base(self) -> OptionValueContainer: + def physical_workdir_base(self) -> Generator[OptionValueContainer, None, None]: with temporary_dir(cleanup=False) as physical_workdir_base: bootstrap_options = self.get_bootstrap_options( [f"--pants-physical-workdir-base={physical_workdir_base}"] From f518032b3f9af93ac47cea41d7d89b50036f6cb5 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Wed, 13 May 2020 11:45:56 -0700 Subject: [PATCH 13/15] Move away from the debounced notify watcher #9754 --- src/rust/engine/Cargo.lock | 31 +++++++++---------- src/rust/engine/engine_cffi/src/lib.rs | 4 +-- .../engine/process_execution/src/cache.rs | 6 +++- .../process_execution/src/cache_tests.rs | 5 ++- .../process_execution/src/local_tests.rs | 4 ++- src/rust/engine/src/scheduler.rs | 7 ++--- src/rust/engine/watch/Cargo.toml | 6 +--- src/rust/engine/watch/src/lib.rs | 10 ++++-- 8 files changed, 40 insertions(+), 33 deletions(-) diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index 0369f0ade39..e3a58ec2d69 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -869,16 +869,16 @@ dependencies = [ [[package]] name = "fsevent" -version = "0.4.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fsevent-sys 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "fsevent-sys" -version = "2.0.1" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1344,7 +1344,7 @@ dependencies = [ [[package]] name = "inotify" -version = "0.7.0" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1720,18 +1720,17 @@ dependencies = [ [[package]] name = "notify" -version = "5.0.0-pre.1" -source = "git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd#fba00891d9105e2f581c69fbe415a58cb7966fdd" +version = "5.0.0-pre.2" +source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "anymap 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "chashmap 2.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "crossbeam-channel 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", - "fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "inotify 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "fsevent 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fsevent-sys 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)", + "inotify 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.69 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.22 (registry+https://github.com/rust-lang/crates.io-index)", "mio-extras 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3483,7 +3482,7 @@ dependencies = [ "hashing 0.0.1", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "logging 0.0.1", - "notify 5.0.0-pre.1 (git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd)", + "notify 5.0.0-pre.2 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", "task_executor 0.0.1", "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3676,8 +3675,8 @@ dependencies = [ "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" "checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" "checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -"checksum fsevent 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" -"checksum fsevent-sys 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" +"checksum fsevent 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1616e68919f49d311720c3cf316e0a3522d8f2bd08f8da35f6b8a0fa12f9234b" +"checksum fsevent-sys 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a41f1722e9bf862f62429d192f37d0c82c589aa18783aa06f0c4e5c3c90649fb" "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" @@ -3721,7 +3720,7 @@ dependencies = [ "checksum ignore 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)" = "ddf60d063dbe6b75388eec66cfc07781167ae3d34a09e0c433e6c5de0511f7fb" "checksum im-rc 12.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e882e6e7cd335baacae574b56aa3ce74844ec82fc6777def7c0ac368837dc3d5" "checksum indexmap 1.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "076f042c5b7b98f31d205f1249267e12a6518c1481e9dae9764af19b707d2292" -"checksum inotify 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24e40d6fd5d64e2082e0c796495c8ef5ad667a96d03e5aaa0becfd9d47bcbfb8" +"checksum inotify 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bc39ee997811267bf8aa0b10e1674c5bea6caacc1957eede5ea45251fe33c6d5" "checksum inotify-sys 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e74a1aa87c59aeff6ef2cc2fa62d41bc43f54952f55652656b18a02fd5e356c0" "checksum iovec 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" "checksum itertools 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "0d47946d458e94a1b7bcabbf6521ea7c037062c81f534615abcad76e84d4970d" @@ -3759,7 +3758,7 @@ dependencies = [ "checksum multimap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb04b9f127583ed176e163fb9ec6f3e793b87e21deedd5734a69386a18a0151" "checksum nails 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7d0a901335354f0d61e36d04729f735b38d88d7c3dd9c09a02c66501fc6d7c0d" "checksum net2 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)" = "2ba7c918ac76704fb42afcbbb43891e72731f3dcca3bef2a19786297baf14af7" -"checksum notify 5.0.0-pre.1 (git+https://github.com/notify-rs/notify?rev=fba00891d9105e2f581c69fbe415a58cb7966fdd)" = "" +"checksum notify 5.0.0-pre.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7b00c0b65188bffb5598c302e19b062feb94adef02c31f15622a163c95d673c3" "checksum num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)" = "4703ad64153382334aa8db57c637364c322d3372e097840c72000dabdcf6156e" "checksum num-bigint 0.1.44 (registry+https://github.com/rust-lang/crates.io-index)" = "e63899ad0da84ce718c14936262a41cee2c79c981fc0a0e7c7beb47d5a07e8c1" "checksum num-complex 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "b288631d7878aaf59442cffd36910ea604ecd7745c36054328595114001c9656" diff --git a/src/rust/engine/engine_cffi/src/lib.rs b/src/rust/engine/engine_cffi/src/lib.rs index f94d822586a..0cae2120913 100644 --- a/src/rust/engine/engine_cffi/src/lib.rs +++ b/src/rust/engine/engine_cffi/src/lib.rs @@ -40,8 +40,8 @@ mod cffi_externs; use engine::externs::*; use engine::{ - externs, nodes, Core, ExecutionRequest, ExecutionTermination, Failure, Function, Handle, - Key, Params, Rule, Scheduler, Session, Tasks, TypeId, Types, Value, + externs, nodes, Core, ExecutionRequest, ExecutionTermination, Failure, Function, Handle, Key, + Params, Rule, Scheduler, Session, Tasks, TypeId, Types, Value, }; use futures::compat::Future01CompatExt; use futures01::{future, Future}; diff --git a/src/rust/engine/process_execution/src/cache.rs b/src/rust/engine/process_execution/src/cache.rs index dfbd48289ad..d19ac91c104 100644 --- a/src/rust/engine/process_execution/src/cache.rs +++ b/src/rust/engine/process_execution/src/cache.rs @@ -147,6 +147,10 @@ impl CommandRunner { .map(Bytes::from) .map_err(|err| format!("Error serializing execute process result to cache: {}", err)) }) - .and_then(move |bytes| process_execution_store.store_bytes(fingerprint, bytes, false).compat()) + .and_then(move |bytes| { + process_execution_store + .store_bytes(fingerprint, bytes, false) + .compat() + }) } } diff --git a/src/rust/engine/process_execution/src/cache_tests.rs b/src/rust/engine/process_execution/src/cache_tests.rs index cb76b0719a6..5275e54527e 100644 --- a/src/rust/engine/process_execution/src/cache_tests.rs +++ b/src/rust/engine/process_execution/src/cache_tests.rs @@ -86,7 +86,10 @@ async fn run_roundtrip(script_exit_code: i8) -> RoundtripResults { }, }; - let uncached_result = caching.run(request.clone().into(), Context::default()).compat().await; + let uncached_result = caching + .run(request.clone().into(), Context::default()) + .compat() + .await; assert_eq!(local_result, uncached_result); diff --git a/src/rust/engine/process_execution/src/local_tests.rs b/src/rust/engine/process_execution/src/local_tests.rs index 72771d43c34..963a28ee347 100644 --- a/src/rust/engine/process_execution/src/local_tests.rs +++ b/src/rust/engine/process_execution/src/local_tests.rs @@ -785,7 +785,9 @@ async fn working_directory() { ); } -async fn run_command_locally(req: ExecuteProcessRequest) -> Result { +async fn run_command_locally( + req: ExecuteProcessRequest, +) -> Result { let work_dir = TempDir::new().unwrap(); run_command_locally_in_dir_with_cleanup(req, work_dir.path().to_owned()).await } diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index 55194da8c6a..c7c60f958aa 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -468,11 +468,8 @@ impl Scheduler { if let Ok(res) = receiver.recv_timeout(Self::compute_refresh_delay(deadline)) { // Completed successfully. break Ok(Self::execute_record_results(&request.roots, &session, res)); - } else if let Err(e) = Self::maybe_display_render( - &self.core.graph, - &roots, - &session, - &mut tasks) + } else if let Err(e) = + Self::maybe_display_render(&self.core.graph, &roots, &session, &mut tasks) { break Err(e); } else if deadline.map(|d| d < Instant::now()).unwrap_or(false) { diff --git a/src/rust/engine/watch/Cargo.toml b/src/rust/engine/watch/Cargo.toml index c64dccefc3e..3d7fe8ff5b5 100644 --- a/src/rust/engine/watch/Cargo.toml +++ b/src/rust/engine/watch/Cargo.toml @@ -13,11 +13,7 @@ graph = { path = "../graph" } hashing = { path = "../hashing" } log = "0.4" logging = { path = "../logging" } -# notify is currently an experimental API, we are pinning to https://docs.rs/notify/5.0.0-pre.1/notify/ -# because the latest prerelease at time of writing has removed the debounced watcher which we would like to use. -# The author suggests they will add the debounced watcher back into the stable 5.0.0 release. When that happens -# we can move to it. -notify = { git = "https://github.com/notify-rs/notify", rev = "fba00891d9105e2f581c69fbe415a58cb7966fdd" } +notify = "5.0.0-pre.2" parking_lot = "0.6" task_executor = { path = "../task_executor" } diff --git a/src/rust/engine/watch/src/lib.rs b/src/rust/engine/watch/src/lib.rs index e2e35aa4277..3f2f4ec8826 100644 --- a/src/rust/engine/watch/src/lib.rs +++ b/src/rust/engine/watch/src/lib.rs @@ -83,8 +83,14 @@ impl InvalidationWatcher { let canonical_build_root = std::fs::canonicalize(build_root.as_path()).map_err(|e| format!("{:?}", e))?; let (watch_sender, watch_receiver) = crossbeam_channel::unbounded(); - let mut watcher: RecommendedWatcher = Watcher::new(watch_sender, Duration::from_millis(50)) - .map_err(|e| format!("Failed to begin watching the filesystem: {}", e))?; + let mut watcher: RecommendedWatcher = Watcher::new_immediate(move |ev| { + if watch_sender.send(ev).is_err() { + // The watch thread shutting down first is ok, because it can exit when the Invalidatable + // is dropped. + debug!("Watch thread has shutdown, but Watcher is still running."); + } + }) + .map_err(|e| format!("Failed to begin watching the filesystem: {}", e))?; let (liveness_sender, liveness_receiver) = crossbeam_channel::unbounded(); if enabled { From 20f39e7979c77605aff5a7c7fcff858a322ad387 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Wed, 13 May 2020 13:39:34 -0700 Subject: [PATCH 14/15] Remove test that has raced file invalidation ever since the notify backend was added, but which will now fairly consistently lose that race. # Delete this line to force CI to run the JVM tests. [ci skip-jvm-tests] --- tests/python/pants_test/engine/test_mapper.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/python/pants_test/engine/test_mapper.py b/tests/python/pants_test/engine/test_mapper.py index 6b91472fe56..fd43d2bfec7 100644 --- a/tests/python/pants_test/engine/test_mapper.py +++ b/tests/python/pants_test/engine/test_mapper.py @@ -191,11 +191,6 @@ def test_no_address_no_family(self) -> None: build_file = os.path.join(self.build_root, "a/c", "c.BUILD.json") with safe_open(build_file, "w") as fp: fp.write('{"type_alias": "struct", "name": "c"}') - - # Exists on disk, but not yet in memory. - with self.assertRaises(Exception): - self.resolve(spec) - self.scheduler.invalidate_files(["a/c"]) # Success. From 1f98eb61d1c6be5cbf93055d0efa52da791a9bf3 Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Thu, 14 May 2020 12:00:37 -0700 Subject: [PATCH 15/15] As explained in the comment: we can no longer create duplicate parallel BUILD files and hope that pants does not notice them before we scan the directory again! # Delete this line to force CI to run the JVM tests. [ci skip-jvm-tests] --- .../pants/contrib/go/tasks/go_buildgen.py | 29 +++++++++++-------- src/python/pants/option/global_options.py | 2 +- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py b/contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py index c8dd5575ed4..07f136b7bac 100644 --- a/contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py +++ b/contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py @@ -307,14 +307,21 @@ def _materialize(self, generation_result): remote = self.get_options().remote existing_go_buildfiles = set() - def gather_go_buildfiles(rel_path): - address_mapper = self.context.address_mapper - for build_file in address_mapper.scan_build_files(base_path=rel_path): - existing_go_buildfiles.add(build_file) + # We scan for existing BUILD files containing Go targets before we begin to materialize + # things, because once we have created files (possibly next to existing files), collisions + # between the existing definitions and the new definitions are possible. + def gather_go_buildfiles(rel_path, is_relevant_target): + for build_file in self.context.address_mapper.scan_build_files(rel_path): + spec_path = os.path.dirname(build_file) + for address in self.context.address_mapper.addresses_in_spec_path(spec_path): + if is_relevant_target(self.context.build_graph.resolve_address(address)): + existing_go_buildfiles.add(address.rel_path) - gather_go_buildfiles(generation_result.local_root) + gather_go_buildfiles(generation_result.local_root, lambda t: isinstance(t, GoLocalSource)) if remote and generation_result.remote_root != generation_result.local_root: - gather_go_buildfiles(generation_result.remote_root) + gather_go_buildfiles( + generation_result.remote_root, lambda t: isinstance(t, GoRemoteLibrary) + ) targets = set(self.context.build_graph.targets(self.is_go)) if remote and generation_result.remote_root: @@ -324,6 +331,7 @@ def gather_go_buildfiles(rel_path): remote_root = os.path.join(get_buildroot(), generation_result.remote_root) targets.update(self.context.scan(remote_root).targets(self.is_remote_lib)) + # Generate targets, and discard any BUILD files that were overwritten. failed_results = [] for result in self.generate_build_files(targets): existing_go_buildfiles.discard(result.build_file_path) @@ -331,15 +339,12 @@ def gather_go_buildfiles(rel_path): if result.failed: failed_results.append(result) + # Finally, unlink any BUILD files that were invalidated but not otherwise overwritten. if existing_go_buildfiles: deleted = [] for existing_go_buildfile in existing_go_buildfiles: - spec_path = os.path.dirname(existing_go_buildfile) - for address in self.context.address_mapper.addresses_in_spec_path(spec_path): - target = self.context.build_graph.resolve_address(address) - if isinstance(target, GoLocalSource): - os.unlink(os.path.join(get_buildroot(), existing_go_buildfile)) - deleted.append(existing_go_buildfile) + os.unlink(os.path.join(get_buildroot(), existing_go_buildfile)) + deleted.append(existing_go_buildfile) if deleted: self.context.log.info( "Deleted the following obsolete BUILD files:\n\t{}".format( diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 2decd98ccbb..826ff8e9b0b 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -948,7 +948,7 @@ def register_bootstrap_options(cls, register): register( "--experimental-fs-watcher", type=bool, - default=True, + default=False, advanced=True, help="Whether to use the engine filesystem watcher which registers the workspace" " for kernel file change events",