Skip to content

Commit

Permalink
Fix max macro in LoraFullyConnectedLayer.cpp (#1000)
Browse files Browse the repository at this point in the history
Signed-off-by: Valery Fedyunin <valery.fedyunin@abbyy.com>
  • Loading branch information
Valeriy Fedyunin committed Nov 29, 2023
1 parent ae945a7 commit 1509b6b
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions NeoML/src/Dnn/Layers/LoraFullyConnectedLayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ void CLoraFullyConnectedLayer::RunOnce()
const int tempAxBSize = inputHeight * BHeight;

const bool inference = !IsBackwardPerformed() && !IsLearningPerformed();
CFloatHandleStackVar temp( MathEngine(), inference ? 0 : std::max( inputSize, tempAxBSize ) );
CFloatHandleStackVar temp( MathEngine(), inference ? 0 : max( inputSize, tempAxBSize ) );
CFloatHandle tempAxB = temp.GetHandle();

/* +---------+ +--------+ \
Expand Down Expand Up @@ -250,7 +250,7 @@ void CLoraFullyConnectedLayer::BackwardOnce()
const int BWidth = WeightsB()->GetObjectSize();
const int tempBDiffSize = outputDiffHeight * BWidth;
NeoPresume( inputDiffSize == outputDiffHeight * weightsWidth );
CFloatHandleStackVar temp( MathEngine(), std::max( inputDiffSize, tempBDiffSize ) );
CFloatHandleStackVar temp( MathEngine(), max( inputDiffSize, tempBDiffSize ) );

CFloatHandle tempBDiff = temp.GetHandle();
MathEngine().MultiplyMatrixByMatrix( /*batchSize*/1,
Expand Down Expand Up @@ -306,7 +306,7 @@ void CLoraFullyConnectedLayer::LearnOnce()

const int tempBDiffSize = outputDiffHeight * BWidth;
const int tempInputASize = inputHeight * AHeight;
CFloatHandleStackVar temp( MathEngine(), std::max( tempBDiffSize, tempInputASize ) );
CFloatHandleStackVar temp( MathEngine(), max( tempBDiffSize, tempInputASize ) );

{
CFloatHandle tempBDiff = temp.GetHandle();
Expand Down

0 comments on commit 1509b6b

Please sign in to comment.