From 14d7aea4b12a32e2fe5e2eee1e263763feb100aa Mon Sep 17 00:00:00 2001 From: tpoisonooo Date: Wed, 8 Nov 2023 12:38:15 +0800 Subject: [PATCH] style(pytorch_poc): add code ref --- lmdeploy/pytorch_poc/models/functional.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lmdeploy/pytorch_poc/models/functional.py b/lmdeploy/pytorch_poc/models/functional.py index 2f7000c2d..98d419b69 100644 --- a/lmdeploy/pytorch_poc/models/functional.py +++ b/lmdeploy/pytorch_poc/models/functional.py @@ -269,7 +269,7 @@ def attention_forward_with_rerope( rotary_emb_context_fn (Callable): rotary embedding context callback. rotary_emb_generate_fn (Callable): rotary embedding generate callback. bias_type (str): type of attention bias. support ['default']. - training_lenght (int): model sequence length during trainning. + training_length (int): model sequence length during trainning. window (int): ReRoPE window size, default value is 512. """ hidden_size = -1