Refactor layers

This commit is contained in:
2024-03-19 21:35:05 +01:00
parent 8d14b74f66
commit b6c4b7d2ae
12 changed files with 87 additions and 67 deletions

View File

@@ -13,6 +13,10 @@ namespace CUDANet::Layers {
*/
enum ActivationType { SIGMOID, RELU, SOFTMAX, NONE };
/**
* @brief Utility class that performs activation
*
*/
class Activation {
public:

View File

@@ -19,14 +19,13 @@ class Add {
~Add();
/**
* @brief Adds the two inputs
* @brief Adds first input to second input
*
* @param d_inputA Device pointer to the first input
* @param d_inputB Device pointer to the second input
*
* @return Device pointer to the output
*/
float* forward(const float* d_inputA, const float* d_inputB);
void forward(const float* d_inputA, const float* d_inputB);
private:
int inputSize;

View File

@@ -6,7 +6,7 @@
#include "activation.cuh"
#include "convolution.cuh"
#include "weighted_layer.cuh"
#include "layer.cuh"
namespace CUDANet::Layers {

View File

@@ -5,7 +5,7 @@
#include <string>
#include <vector>
#include "weighted_layer.cuh"
#include "layer.cuh"
namespace CUDANet::Layers {

View File

@@ -8,7 +8,7 @@ namespace CUDANet::Layers {
/**
* @brief Padding types
*
*
* SAME: Zero padding such that the output size is the same as the input
* VALID: No padding
*
@@ -16,40 +16,60 @@ namespace CUDANet::Layers {
enum Padding { SAME, VALID };
/**
* @brief Base class for all layers
* @brief Basic Sequential Layer
*
*/
class WeightedLayer {
class SequentialLayer {
public:
/**
* @brief Destroy the Sequential Layer
*
*/
virtual ~SequentialLayer() {};
/**
* @brief Forward propagation virtual function
*
* @param input Device pointer to the input
* @return float* Device pointer to the output
*/
virtual float* forward(const float* input) = 0;
};
/**
* @brief Base class for layers with weights and biases
*/
class WeightedLayer : public SequentialLayer {
public:
/**
* @brief Destroy the ILayer object
*
*
*/
virtual ~WeightedLayer() {}
virtual ~WeightedLayer() {};
/**
* @brief Virtual function for forward pass
*
*
* @param input (Device) Pointer to the input
* @return float* Device pointer to the output
*/
virtual float* forward(const float* input) = 0;
virtual float* forward(const float* input) = 0;
/**
* @brief Virtual function for setting weights
*
*
* @param weights Pointer to the weights
*/
virtual void setWeights(const float* weights) = 0;
virtual void setWeights(const float* weights) = 0;
/**
* @brief Virtual function for setting biases
*
*
* @param biases Pointer to the biases
*/
virtual void setBiases(const float* biases) = 0;
virtual void setBiases(const float* biases) = 0;
private:
/**
* @brief Initialize the weights
*/
@@ -58,7 +78,7 @@ class WeightedLayer {
/**
* @brief Initialize the biases
*/
virtual void initializeBiases() = 0;
virtual void initializeBiases() = 0;
/**
* @brief Copy the weights and biases to the device