From 33d4a43dcafee32272c3cd3ad86244336791744d Mon Sep 17 00:00:00 2001 From: LordMathis Date: Sun, 19 May 2024 20:21:13 +0200 Subject: [PATCH] Update concat layer --- include/layers/concat.cuh | 8 +++++--- src/layers/concat.cu | 8 +++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/include/layers/concat.cuh b/include/layers/concat.cuh index 71e0fe5..753be48 100644 --- a/include/layers/concat.cuh +++ b/include/layers/concat.cuh @@ -15,7 +15,7 @@ class Concat { * @param inputASize Size of the first input * @param inputBSize Size of the second input */ - Concat(const unsigned int inputASize, const unsigned int inputBSize); + Concat(const int inputASize, const int inputBSize); /** * @brief Destroy the Concat layer @@ -33,9 +33,11 @@ class Concat { */ float* forward(const float* d_input_A, const float* d_input_B); + int getOutputSize(); + private: - unsigned int inputASize; - unsigned int inputBSize; + int inputASize; + int inputBSize; float* d_output; }; diff --git a/src/layers/concat.cu b/src/layers/concat.cu index f353f7f..f7ef037 100644 --- a/src/layers/concat.cu +++ b/src/layers/concat.cu @@ -4,21 +4,19 @@ using namespace CUDANet::Layers; -Concat::Concat(const unsigned int inputASize, const unsigned int inputBSize) +Concat::Concat(const int inputASize, const int inputBSize) : inputASize(inputASize), inputBSize(inputBSize) { d_output = nullptr; CUDA_CHECK(cudaMalloc( (void**)&d_output, sizeof(float) * (inputASize + inputBSize) )); - } Concat::~Concat() { cudaFree(d_output); } - float* Concat::forward(const float* d_input_A, const float* d_input_B) { CUDA_CHECK(cudaMemcpy( d_output, d_input_A, sizeof(float) * inputASize, cudaMemcpyDeviceToDevice @@ -33,3 +31,7 @@ float* Concat::forward(const float* d_input_A, const float* d_input_B) { return d_output; } + +int Concat::getOutputSize() { + return inputASize + inputBSize; +};