From d9c6c663c8bb023c45508135e41e272db5618d3b Mon Sep 17 00:00:00 2001 From: LordMathis Date: Mon, 18 Mar 2024 20:36:52 +0100 Subject: [PATCH] Rename ILayer to WeightedLayer --- include/layers/concat.cuh | 8 ++++---- include/layers/conv2d.cuh | 4 ++-- include/layers/dense.cuh | 4 ++-- include/layers/input.cuh | 2 -- include/layers/{ilayer.cuh => weighted_layer.cuh} | 4 ++-- 5 files changed, 10 insertions(+), 12 deletions(-) rename include/layers/{ilayer.cuh => weighted_layer.cuh} (96%) diff --git a/include/layers/concat.cuh b/include/layers/concat.cuh index 4b5513f..71e0fe5 100644 --- a/include/layers/concat.cuh +++ b/include/layers/concat.cuh @@ -1,8 +1,6 @@ #ifndef CUDANET_CONCAT_LAYER_H #define CUDANET_CONCAT_LAYER_H -#include - namespace CUDANet::Layers { /** @@ -14,7 +12,8 @@ class Concat { /** * @brief Create a new Concat layer * - * @param layers Layers to concatenate + * @param inputASize Size of the first input + * @param inputBSize Size of the second input */ Concat(const unsigned int inputASize, const unsigned int inputBSize); @@ -25,10 +24,11 @@ class Concat { ~Concat(); /** - * @brief Forward pass of the concat layer + * @brief Concatenates the two inputs * * @param d_input_A Device pointer to the first input * @param d_input_B Device pointer to the second input + * * @return Device pointer to the output */ float* forward(const float* d_input_A, const float* d_input_B); diff --git a/include/layers/conv2d.cuh b/include/layers/conv2d.cuh index ab80e64..b74109c 100644 --- a/include/layers/conv2d.cuh +++ b/include/layers/conv2d.cuh @@ -6,7 +6,7 @@ #include "activation.cuh" #include "convolution.cuh" -#include "ilayer.cuh" +#include "weighted_layer.cuh" namespace CUDANet::Layers { @@ -14,7 +14,7 @@ namespace CUDANet::Layers { * @brief 2D convolutional layer * */ -class Conv2d : public ILayer { +class Conv2d : public WeightedLayer { public: /** * @brief Construct a new Conv 2d layer diff --git a/include/layers/dense.cuh b/include/layers/dense.cuh index b8fb207..7bb4a71 100644 --- a/include/layers/dense.cuh +++ b/include/layers/dense.cuh @@ -5,7 +5,7 @@ #include #include -#include "ilayer.cuh" +#include "weighted_layer.cuh" namespace CUDANet::Layers { @@ -13,7 +13,7 @@ namespace CUDANet::Layers { * @brief Dense (fully connected) layer * */ -class Dense : public ILayer { +class Dense : public WeightedLayer { public: /** * @brief Construct a new Dense layer diff --git a/include/layers/input.cuh b/include/layers/input.cuh index fda2722..33fdb1d 100644 --- a/include/layers/input.cuh +++ b/include/layers/input.cuh @@ -1,8 +1,6 @@ #ifndef CUDANET_INPUT_LAYER_H #define CUDANET_INPUT_LAYER_H -#include - namespace CUDANet::Layers { /** diff --git a/include/layers/ilayer.cuh b/include/layers/weighted_layer.cuh similarity index 96% rename from include/layers/ilayer.cuh rename to include/layers/weighted_layer.cuh index 3ab2109..f538dad 100644 --- a/include/layers/ilayer.cuh +++ b/include/layers/weighted_layer.cuh @@ -18,13 +18,13 @@ enum Padding { SAME, VALID }; /** * @brief Base class for all layers */ -class ILayer { +class WeightedLayer { public: /** * @brief Destroy the ILayer object * */ - virtual ~ILayer() {} + virtual ~WeightedLayer() {} /** * @brief Virtual function for forward pass