diff --git a/src/pipelines/sentence_embeddings/mod.rs b/src/pipelines/sentence_embeddings/mod.rs index 974c33de..2c4f275b 100644 --- a/src/pipelines/sentence_embeddings/mod.rs +++ b/src/pipelines/sentence_embeddings/mod.rs @@ -42,7 +42,7 @@ pub use config::{ }; pub use pipeline::{ SentenceEmbeddingsModel, SentenceEmbeddingsModelOutput, SentenceEmbeddingsOption, - SentenceEmbeddingsTokenizerOuput, + SentenceEmbeddingsTokenizerOutput, }; pub use resources::{ diff --git a/src/pipelines/sentence_embeddings/pipeline.rs b/src/pipelines/sentence_embeddings/pipeline.rs index 24915185..c5b148d1 100644 --- a/src/pipelines/sentence_embeddings/pipeline.rs +++ b/src/pipelines/sentence_embeddings/pipeline.rs @@ -283,7 +283,7 @@ impl SentenceEmbeddingsModel { } /// Tokenizes the inputs - pub fn tokenize(&self, inputs: &[S]) -> SentenceEmbeddingsTokenizerOuput + pub fn tokenize(&self, inputs: &[S]) -> SentenceEmbeddingsTokenizerOutput where S: AsRef + Sync, { @@ -327,7 +327,7 @@ impl SentenceEmbeddingsModel { .map(|input| Tensor::of_slice(&(input))) .collect::>(); - SentenceEmbeddingsTokenizerOuput { + SentenceEmbeddingsTokenizerOutput { tokens_ids, tokens_masks, } @@ -341,7 +341,7 @@ impl SentenceEmbeddingsModel { where S: AsRef + Sync, { - let SentenceEmbeddingsTokenizerOuput { + let SentenceEmbeddingsTokenizerOutput { tokens_ids, tokens_masks, } = self.tokenize(inputs); @@ -457,7 +457,7 @@ impl SentenceEmbeddingsModel { } /// Container for the SentenceEmbeddings tokenizer output. -pub struct SentenceEmbeddingsTokenizerOuput { +pub struct SentenceEmbeddingsTokenizerOutput { pub tokens_ids: Vec, pub tokens_masks: Vec, }