diff --git a/CHANGELOG.md b/CHANGELOG.md index a94a93e6f19..75c57b692c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -28,6 +28,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - LZMA compression is now supported. - Added a way to give JSON blobs as input to dataset readers in the `evaluate` command. - Added the argument `sub_module` in `PretrainedTransformerMismatchedEmbedder` +- Updated the docs for `PytorchSeq2VecWrapper` to specify that `mask` is required rather than sequence lengths for clarity. ### Changed diff --git a/allennlp/modules/seq2vec_encoders/pytorch_seq2vec_wrapper.py b/allennlp/modules/seq2vec_encoders/pytorch_seq2vec_wrapper.py index a52445fa8c1..4f3eac15924 100644 --- a/allennlp/modules/seq2vec_encoders/pytorch_seq2vec_wrapper.py +++ b/allennlp/modules/seq2vec_encoders/pytorch_seq2vec_wrapper.py @@ -35,9 +35,10 @@ class PytorchSeq2VecWrapper(Seq2VecEncoder): This is what pytorch's RNN's look like - just make sure your class looks like those, and it should work. - Note that we *require* you to pass sequence lengths when you call this module, to avoid subtle - bugs around masking. If you already have a `PackedSequence` you can pass `None` as the - second parameter. + Note that we *require* you to pass a binary `mask` of shape + (batch_size, sequence_length) when you call this module, to avoid subtle + bugs around masking. If you already have a `PackedSequence` you can pass + `None` as the second parameter. """ def __init__(self, module: torch.nn.modules.RNNBase) -> None: