Skip to content

Commit

Permalink
[NeoML] total clean-up for BaseLayer
Browse files Browse the repository at this point in the history
Signed-off-by: Kirill Golikov <kirill.golikov@abbyy.com>
  • Loading branch information
favorart committed Apr 23, 2024
1 parent f1e59a2 commit 2e1fad5
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 8 deletions.
4 changes: 2 additions & 2 deletions NeoML/include/NeoML/Dnn/Dnn.h
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ class NEOML_API CBaseLayer : public virtual IObject {
virtual size_t GetOutputBlobsSize() const;

// Releases all temporary resources allocated for the layer
virtual void CleanUp();
virtual void CleanUp( bool totalCleanUp = false );

// Returns the total size of trainable parameters in this layer
// Returns the total size of trainable parameters of its internal layers, if layer is composite or recurrent
Expand Down Expand Up @@ -529,7 +529,7 @@ class NEOML_API CDnn : public CDnnLayerGraph {
void RunAndLearnOnce();

// Releases all temporary resources allocated for RunAndBackwardOnce()
void CleanUp();
void CleanUp( bool totalCleanUp = false );

// Gets the maximum sequence length
int GetMaxSequenceLength() const { return maxSequenceLength; }
Expand Down
2 changes: 1 addition & 1 deletion NeoML/include/NeoML/Dnn/Layers/CompositeLayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ class NEOML_API CCompositeLayer : public CBaseLayer, public CDnnLayerGraph {
size_t GetOutputBlobsSize() const override;

// Releases all temporary resources allocated for the layer
void CleanUp() override;
void CleanUp( bool totalCleanUp = false ) override;

// Returns the total size of trainable parameters
size_t GetTrainableParametersSize() const override;
Expand Down
2 changes: 2 additions & 0 deletions NeoML/include/NeoML/Dnn/Layers/SinkLayer.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ class NEOML_API CSinkLayer : public CBaseLayer {
// After each call to RunOnce this blob contains the results
const CPtr<CDnnBlob>& GetBlob() const;

void CleanUp( bool totalCleanUp = false ) override { CBaseLayer::CleanUp( totalCleanUp ); blob = nullptr; }

protected:
CPtr<CDnnBlob> blob;

Expand Down
12 changes: 11 additions & 1 deletion NeoML/src/Dnn/BaseLayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -272,12 +272,22 @@ size_t CBaseLayer::GetOutputBlobsSize() const
return result;
}

void CBaseLayer::CleanUp()
void CBaseLayer::CleanUp( bool totalCleanUp )
{
inputBlobs.DeleteAll();
inputBlobs.SetSize(inputDescs.Size());
outputBlobs.DeleteAll();
outputBlobs.SetSize(outputDescs.Size());

if ( totalCleanUp ) {
inputDiffBlobs.DeleteAll();
outputDiffBlobs.DeleteAll();
paramDiffBlobs.DeleteAll();
readyOutputDiffs.DeleteAll();
clearAllRuntimeBlobs();

ForceReshape();
}
}

size_t CBaseLayer::GetTrainableParametersSize() const
Expand Down
4 changes: 2 additions & 2 deletions NeoML/src/Dnn/Dnn.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -678,10 +678,10 @@ void CDnn::RunAndLearnOnce()
solver->Train();
}

void CDnn::CleanUp()
void CDnn::CleanUp( bool totalCleanUp )
{
for( int i = 0; i < layers.Size(); i++ ) {
layers[i]->CleanUp();
layers[i]->CleanUp( totalCleanUp );
}
}

Expand Down
5 changes: 3 additions & 2 deletions NeoML/src/Dnn/Layers/CompositeLayer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -499,10 +499,11 @@ size_t CCompositeLayer::GetOutputBlobsSize() const
return result;
}

void CCompositeLayer::CleanUp()
void CCompositeLayer::CleanUp( bool totalCleanUp )
{
CBaseLayer::CleanUp( totalCleanUp );
for( int i = 0; i < internalDnn->layers.Size(); i++ ) {
internalDnn->layers[i]->CleanUp();
internalDnn->layers[i]->CleanUp( totalCleanUp );
}
}

Expand Down

0 comments on commit 2e1fad5

Please sign in to comment.