Update concat layer

This commit is contained in:
2024-05-19 20:21:13 +02:00
parent e8bffe22d5
commit 33d4a43dca
2 changed files with 10 additions and 6 deletions

View File

@@ -15,7 +15,7 @@ class Concat {
* @param inputASize Size of the first input
* @param inputBSize Size of the second input
*/
Concat(const unsigned int inputASize, const unsigned int inputBSize);
Concat(const int inputASize, const int inputBSize);
/**
* @brief Destroy the Concat layer
@@ -33,9 +33,11 @@ class Concat {
*/
float* forward(const float* d_input_A, const float* d_input_B);
int getOutputSize();
private:
unsigned int inputASize;
unsigned int inputBSize;
int inputASize;
int inputBSize;
float* d_output;
};

View File

@@ -4,21 +4,19 @@
using namespace CUDANet::Layers;
Concat::Concat(const unsigned int inputASize, const unsigned int inputBSize)
Concat::Concat(const int inputASize, const int inputBSize)
: inputASize(inputASize), inputBSize(inputBSize) {
d_output = nullptr;
CUDA_CHECK(cudaMalloc(
(void**)&d_output, sizeof(float) * (inputASize + inputBSize)
));
}
Concat::~Concat() {
cudaFree(d_output);
}
float* Concat::forward(const float* d_input_A, const float* d_input_B) {
CUDA_CHECK(cudaMemcpy(
d_output, d_input_A, sizeof(float) * inputASize, cudaMemcpyDeviceToDevice
@@ -33,3 +31,7 @@ float* Concat::forward(const float* d_input_A, const float* d_input_B) {
return d_output;
}
int Concat::getOutputSize() {
return inputASize + inputBSize;
};