Skip to content

Commit

Permalink
Revert D26973911: Implement public API InferenceMode and its error ha…
Browse files Browse the repository at this point in the history
…ndling

Test Plan: revert-hammer

Differential Revision:
D26973911 (pytorch@7caa464)

Original commit changeset: 0ebdac7a3cd5

fbshipit-source-id: afd37a3785bc694e8ffbd679eba1cfed89ef2273
  • Loading branch information
Ailing Zhang authored and facebook-github-bot committed Mar 29, 2021
1 parent 1551bcc commit 263180d
Show file tree
Hide file tree
Showing 19 changed files with 13 additions and 699 deletions.
10 changes: 0 additions & 10 deletions c10/core/InferenceMode.cpp

This file was deleted.

65 changes: 0 additions & 65 deletions c10/core/InferenceMode.h

This file was deleted.

14 changes: 4 additions & 10 deletions c10/core/TensorImpl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
#include <c10/core/WrapDimMinimal.h>
#include <c10/core/impl/LocalDispatchKeySet.h>
#include <c10/util/Optional.h>
#include <c10/core/InferenceMode.h>

C10_DEFINE_bool(
caffe2_keep_on_shrink,
Expand Down Expand Up @@ -85,15 +84,10 @@ TensorImpl::TensorImpl(Storage&& storage, DispatchKeySet key_set, const caffe2::
// a backend DispatchKey and an AutogradBackend key.
// We automatically add the corresponding autograd key to key_set_ so that backends can stay
// in the old way of only registering with backend key like DispatchKey::CPU.
if (c10::InferenceMode::is_enabled()) {
// See Note [Expected TLS state in InferenceMode] for why we don't add Autograd & InplaceOrView keys.
key_set_ = key_set;
} else {
// TODO: Ideally we only add AutogradBackend key when the tensor requires grad.
// See Note [Dream: skip VariableType kernel when requires_grad=false]
DispatchKey k = key_set.highestPriorityBackendTypeId();
key_set_ = key_set | getAutogradRelatedKeySetFromBackend(k);
}
// TODO: Ideally this logic fits best in Variable/Autograd layer so that we only
// add AutogradBackend key when the tensor requires grad.
DispatchKey k = key_set.highestPriorityBackendTypeId();
key_set_ = key_set | getAutogradRelatedKeySetFromBackend(k);

// we would also like to check that non-cpu devices have an index, but some Caffe2 operators create
// Storages with default devices.
Expand Down
9 changes: 0 additions & 9 deletions c10/core/TensorImpl.h
Original file line number Diff line number Diff line change
Expand Up @@ -563,15 +563,6 @@ struct C10_API TensorImpl : public c10::intrusive_ptr_target {
key_set_ = key_set_ - autograd_dispatch_keyset;
}

// Inference tensor doesn't have autograd or InplaceOrView key.
bool is_inference_tensor() {
bool no_InplaceOrView = !key_set_.has(c10::DispatchKey::InplaceOrView);
bool no_Autograd = (key_set_ & c10::autograd_dispatch_keyset).empty();
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(no_InplaceOrView == no_Autograd,
"InplaceOrView and Autograd keys must be on/off at the same time.");
return no_InplaceOrView && no_Autograd;
}

int64_t get_device() const {
TORCH_CHECK(
device_opt_.has_value(),
Expand Down
10 changes: 2 additions & 8 deletions c10/core/impl/LocalDispatchKeySet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
namespace c10 {
namespace impl {

thread_local PODLocalDispatchKeySet raw_local_dispatch_key_set {DispatchKeySet(DispatchKey::InplaceOrView).raw_repr()};
// NB: POD, zero initialized!
thread_local PODLocalDispatchKeySet raw_local_dispatch_key_set;

#if defined(_MSC_VER) || defined(C10_ANDROID)
LocalDispatchKeySet tls_local_dispatch_key_set() {
Expand Down Expand Up @@ -100,11 +101,4 @@ void tls_set_dispatch_key_included(DispatchKey x, bool desired_state) {
}
}

bool tls_is_dispatch_keyset_excluded(DispatchKeySet ks) {
return raw_local_dispatch_key_set.excluded().isSupersetOf(ks);
}

bool tls_is_dispatch_keyset_included(DispatchKeySet ks) {
return raw_local_dispatch_key_set.included().isSupersetOf(ks);
}
}} // namespace c10::impl
2 changes: 0 additions & 2 deletions c10/core/impl/LocalDispatchKeySet.h
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,5 @@ C10_API bool tls_is_dispatch_key_excluded(DispatchKey x);
C10_API void tls_set_dispatch_key_excluded(DispatchKey x, bool desired_state);
C10_API bool tls_is_dispatch_key_included(DispatchKey x);
C10_API void tls_set_dispatch_key_included(DispatchKey x, bool desired_state);
C10_API bool tls_is_dispatch_keyset_excluded(DispatchKeySet ks);
C10_API bool tls_is_dispatch_keyset_included(DispatchKeySet ks);

}} // namespace c10::impl
2 changes: 0 additions & 2 deletions test/cpp/api/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ set(TORCH_API_TEST_SOURCES
${TORCH_API_TEST_DIR}/tensor_options.cpp
${TORCH_API_TEST_DIR}/tensor.cpp
${TORCH_API_TEST_DIR}/torch_include.cpp
${TORCH_API_TEST_DIR}/inference_mode.cpp
${TORCH_API_TEST_DIR}/grad_mode.cpp
)

if(USE_CUDA)
Expand Down
70 changes: 0 additions & 70 deletions test/cpp/api/grad_mode.cpp

This file was deleted.

Loading

0 comments on commit 263180d

Please sign in to comment.