Skip to content

Commit

Permalink
Update CI to build without defaults (guillaume-be#299)
Browse files Browse the repository at this point in the history
* Avoid attention copy, fix remote feature

* revert conversation change

* Updated CI for build without defaults
  • Loading branch information
guillaume-be committed Nov 20, 2022
1 parent 05367b4 commit a93b406
Show file tree
Hide file tree
Showing 22 changed files with 42 additions and 24 deletions.
15 changes: 15 additions & 0 deletions .github/workflows/continuous-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,21 @@ jobs:
with:
command: build

build-no-defaults:
name: Build no defaults
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: build
args: --no-default-features

build-windows:
name: Build Windows
runs-on: windows-latest
Expand Down
2 changes: 1 addition & 1 deletion src/albert/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ impl AlbertLayerGroup {
hidden_state = temp.0;
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/bart/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ impl BartDecoder {
hidden_states.push(hidden_state.as_ref().copy());
};
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(value) = &mut next_decoder_cache {
value[layer_idx] = temp.2
Expand Down
2 changes: 1 addition & 1 deletion src/bart/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ impl BartEncoder {
hidden_state = temp.0;
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/bert/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ impl BertEncoder {
hidden_state = Some(layer_output.hidden_state);
attention_weights = layer_output.attention_weights;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().unwrap().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/deberta/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ impl DebertaEncoder {
hidden_state = Some(layer_output.0);
attention_weights = layer_output.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().unwrap().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/deberta_v2/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ impl DebertaV2Encoder {
}
attention_weights = layer_output.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(output_states.as_ref().unwrap().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/distilbert/transformer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ impl Transformer {
hidden_state = Some(temp.0);
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().unwrap().copy());
Expand Down
4 changes: 2 additions & 2 deletions src/gpt2/gpt2_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -470,10 +470,10 @@ impl Gpt2Model {
layer.forward_t(&hidden_state, past.as_ref(), attention_mask.as_ref(), train);
hidden_state = temp.0;
if let Some(presents) = all_presents.borrow_mut() {
presents.push(temp.1.as_ref().copy());
presents.push(temp.1);
};
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(temp.2.as_ref().unwrap().copy());
attentions.push(temp.2.unwrap());
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/gpt_neo/gpt_neo_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,7 @@ impl GptNeoModel {
attention_weights = temp.1;
next_cache[layer_idx] = temp.2;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(x.as_ref().unwrap().copy());
Expand Down
5 changes: 3 additions & 2 deletions src/longformer/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -340,10 +340,11 @@ impl LongformerEncoder {
attention_weights = temp.1;
global_attention_weights = temp.2;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().transpose(1, 2));
attentions.push(std::mem::take(&mut attention_weights.unwrap()).transpose(1, 2));
};
if let Some(global_attentions) = all_global_attentions.borrow_mut() {
global_attentions.push(global_attention_weights.as_ref().unwrap().transpose(2, 3));
global_attentions
.push(std::mem::take(&mut global_attention_weights.unwrap()).transpose(2, 3));
};
if let Some(all_hidden_states) = all_hidden_states.borrow_mut() {
all_hidden_states.push(x.as_ref().unwrap().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/m2m_100/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ impl M2M100Decoder {
hidden_states.push(hidden_state.as_ref().copy());
};
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(value) = &mut next_decoder_cache {
value[layer_idx] = temp.2
Expand Down
2 changes: 1 addition & 1 deletion src/m2m_100/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ impl M2M100Encoder {
hidden_state = temp.0;
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/mbart/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ impl MBartDecoder {
hidden_states.push(hidden_state.as_ref().copy());
};
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(value) = &mut next_decoder_cache {
value[layer_idx] = temp.2
Expand Down
2 changes: 1 addition & 1 deletion src/mbart/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ impl MBartEncoder {
hidden_state = temp.0;
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/mobilebert/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ impl MobileBertEncoder {
x = Some(temp.0);
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(x.as_ref().unwrap().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/openai_gpt/openai_gpt_model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ impl OpenAiGptModel {
let temp = layer.forward_t(&hidden_state, attention_mask.as_ref(), train);
hidden_state = temp.0;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(temp.1.as_ref().unwrap().copy());
attentions.push(temp.1.unwrap());
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
2 changes: 1 addition & 1 deletion src/pegasus/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ impl PegasusDecoder {
hidden_states.push(hidden_state.as_ref().copy());
};
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(value) = &mut next_decoder_cache {
value[layer_idx] = temp.2
Expand Down
2 changes: 1 addition & 1 deletion src/pegasus/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ impl PegasusEncoder {
hidden_state = temp.0;
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(hidden_state.as_ref().copy());
Expand Down
6 changes: 4 additions & 2 deletions src/pipelines/keywords_extraction/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@
/// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
/// SOFTWARE.
use crate::pipelines::keywords_extraction::tokenizer::StopWordsTokenizer;
#[cfg(feature = "remote")]
use crate::pipelines::sentence_embeddings::SentenceEmbeddingsModelType;
use crate::pipelines::sentence_embeddings::{
SentenceEmbeddingsConfig, SentenceEmbeddingsModel, SentenceEmbeddingsModelType,
SentenceEmbeddingsSentenceBertConfig, SentenceEmbeddingsTokenizerConfig,
SentenceEmbeddingsConfig, SentenceEmbeddingsModel, SentenceEmbeddingsSentenceBertConfig,
SentenceEmbeddingsTokenizerConfig,
};
use crate::{Config, RustBertError};
use regex::Regex;
Expand Down
2 changes: 1 addition & 1 deletion src/prophetnet/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ impl ProphetNetEncoder {
x = Some(temp.0);
attention_weights = temp.1;
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(hidden_states) = all_hidden_states.borrow_mut() {
hidden_states.push(x.as_ref().unwrap().transpose(0, 1));
Expand Down
2 changes: 1 addition & 1 deletion src/t5/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -509,7 +509,7 @@ impl T5Stack {
hidden_states.push(hidden_state.as_ref().copy().transpose(0, 1));
};
if let Some(attentions) = all_attentions.borrow_mut() {
attentions.push(attention_weights.as_ref().unwrap().copy());
attentions.push(std::mem::take(&mut attention_weights.unwrap()));
};
if let Some(value) = &mut next_cache {
value[layer_idx] = block_output.cache
Expand Down

0 comments on commit a93b406

Please sign in to comment.