Implement concat layer

This commit is contained in:
2024-03-17 21:38:29 +01:00
parent 9aab7cdb10
commit aac0c3a826
9 changed files with 121 additions and 33 deletions

45
include/layers/concat.cuh Normal file
View File

@@ -0,0 +1,45 @@
#ifndef CUDANET_CONCAT_LAYER_H
#define CUDANET_CONCAT_LAYER_H
#include <ilayer.cuh>
namespace CUDANet::Layers {
/**
* @brief Concatenate layers
*
*/
class Concat {
public:
/**
* @brief Create a new Concat layer
*
* @param layers Layers to concatenate
*/
Concat(const unsigned int inputASize, const unsigned int inputBSize);
/**
* @brief Destroy the Concat layer
*
*/
~Concat();
/**
* @brief Forward pass of the concat layer
*
* @param d_input_A Device pointer to the first input
* @param d_input_B Device pointer to the second input
* @return Device pointer to the output
*/
float* forward(const float* d_input_A, const float* d_input_B);
private:
unsigned int inputASize;
unsigned int inputBSize;
float* d_output;
};
} // namespace CUDANet::Layers
#endif // CUDANET_CONCAT_LAYER_H

View File

@@ -64,18 +64,6 @@ class ILayer {
* @brief Copy the weights and biases to the device
*/
virtual void toCuda() = 0;
int inputSize;
int outputSize;
float* d_output;
float* d_weights;
float* d_biases;
std::vector<float> weights;
std::vector<float> biases;
};
} // namespace CUDANet::Layers

View File

@@ -9,7 +9,7 @@ namespace CUDANet::Layers {
* @brief Input layer, just copies the input to the device
*
*/
class Input : public ILayer {
class Input {
public:
/**
* @brief Create a new Input layer
@@ -32,15 +32,7 @@ class Input : public ILayer {
*/
float* forward(const float* input);
void setWeights(const float* weights);
void setBiases(const float* biases);
private:
void initializeWeights();
void initializeBiases();
void toCuda();
int inputSize;
float* d_output;
};