Allocate activation on heap

This commit is contained in:
2024-04-22 18:59:16 +02:00
parent 26cea9b12c
commit a32c737785
10 changed files with 17 additions and 15 deletions

View File

@@ -38,7 +38,7 @@ class AvgPooling2D : public SequentialLayer {
float* d_output;
Activation activation;
Activation* activation;
};
} // namespace CUDANet::Layers

View File

@@ -120,8 +120,7 @@ class Conv2d : public WeightedLayer {
float* d_weights;
float* d_biases;
// Kernels
Activation activation;
Activation* activation;
/**
* @brief Initialize weights of the convolutional layer with zeros

View File

@@ -77,7 +77,7 @@ class Dense : public WeightedLayer {
std::vector<float> weights;
std::vector<float> biases;
Layers::Activation activation;
Layers::Activation* activation;
// Precompute kernel launch parameters
unsigned int forwardGridSize;

View File

@@ -38,7 +38,7 @@ class MaxPooling2D : public SequentialLayer {
float* d_output;
Activation activation;
Activation* activation;
};
} // namespace CUDANet::Layers