From e6cfd99525529990ea69cef8a0f220072f3e9dea Mon Sep 17 00:00:00 2001 From: ydshieh Date: Mon, 8 Aug 2022 10:06:42 +0200 Subject: [PATCH 1/2] unpin torch to use 1.12.1 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 52b7837a88456d..b2a7db7e65d8b6 100644 --- a/setup.py +++ b/setup.py @@ -162,7 +162,7 @@ "timeout-decorator", "timm", "tokenizers>=0.11.1,!=0.11.3,<0.13", - "torch>=1.0,<1.12", + "torch>=1.0", "torchaudio", "pyctcdecode>=0.3.0", "tqdm>=4.27", From 7fc01e29942cf5ac5c2f718f082ca0366e50cc6a Mon Sep 17 00:00:00 2001 From: ydshieh Date: Mon, 8 Aug 2022 12:29:30 +0200 Subject: [PATCH 2/2] try fix --- setup.py | 2 +- src/transformers/models/deberta/modeling_deberta.py | 2 ++ src/transformers/models/deberta_v2/modeling_deberta_v2.py | 2 ++ src/transformers/models/sew_d/modeling_sew_d.py | 2 ++ 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b2a7db7e65d8b6..52b7837a88456d 100644 --- a/setup.py +++ b/setup.py @@ -162,7 +162,7 @@ "timeout-decorator", "timm", "tokenizers>=0.11.1,!=0.11.3,<0.13", - "torch>=1.0", + "torch>=1.0,<1.12", "torchaudio", "pyctcdecode>=0.3.0", "tqdm>=4.27", diff --git a/src/transformers/models/deberta/modeling_deberta.py b/src/transformers/models/deberta/modeling_deberta.py index 2d9e647c130cab..0e4614714b3fee 100644 --- a/src/transformers/models/deberta/modeling_deberta.py +++ b/src/transformers/models/deberta/modeling_deberta.py @@ -187,6 +187,8 @@ def backward(ctx, grad_output): @staticmethod def symbolic(g: torch._C.Graph, input: torch._C.Value, local_ctx: Union[float, DropoutContext]) -> torch._C.Value: + from torch.onnx import symbolic_opset12 + dropout_p = local_ctx if isinstance(local_ctx, DropoutContext): dropout_p = local_ctx.dropout diff --git a/src/transformers/models/deberta_v2/modeling_deberta_v2.py b/src/transformers/models/deberta_v2/modeling_deberta_v2.py index 738981648af956..c6e2b25dc0e812 100644 --- a/src/transformers/models/deberta_v2/modeling_deberta_v2.py +++ b/src/transformers/models/deberta_v2/modeling_deberta_v2.py @@ -193,6 +193,8 @@ def backward(ctx, grad_output): @staticmethod def symbolic(g: torch._C.Graph, input: torch._C.Value, local_ctx: Union[float, DropoutContext]) -> torch._C.Value: + from torch.onnx import symbolic_opset12 + dropout_p = local_ctx if isinstance(local_ctx, DropoutContext): dropout_p = local_ctx.dropout diff --git a/src/transformers/models/sew_d/modeling_sew_d.py b/src/transformers/models/sew_d/modeling_sew_d.py index e582705ab09424..da6fd8d168dc2a 100644 --- a/src/transformers/models/sew_d/modeling_sew_d.py +++ b/src/transformers/models/sew_d/modeling_sew_d.py @@ -597,6 +597,8 @@ def backward(ctx, grad_output): @staticmethod def symbolic(g: torch._C.Graph, input: torch._C.Value, local_ctx: Union[float, DropoutContext]) -> torch._C.Value: + from torch.onnx import symbolic_opset12 + dropout_p = local_ctx if isinstance(local_ctx, DropoutContext): dropout_p = local_ctx.dropout