mirror of
https://github.com/lordmathis/CUDANet.git
synced 2025-11-06 01:34:22 +00:00
Add documentation comments
This commit is contained in:
@@ -9,17 +9,47 @@
|
||||
|
||||
namespace Layers {
|
||||
|
||||
/**
|
||||
* @brief Dense (fully connected) layer
|
||||
*
|
||||
*/
|
||||
class Dense : public ILayer {
|
||||
public:
|
||||
Dense(
|
||||
int inputSize,
|
||||
int outputSize,
|
||||
Layers::Activation activation
|
||||
);
|
||||
/**
|
||||
* @brief Construct a new Dense layer
|
||||
*
|
||||
* @param inputSize Size of the input vector
|
||||
* @param outputSize Size of the output vector
|
||||
* @param activation Activation function ('RELU', 'SIGMOID' or 'NONE')
|
||||
*/
|
||||
Dense(int inputSize, int outputSize, Layers::Activation activation);
|
||||
|
||||
/**
|
||||
* @brief Destroy the Dense layer
|
||||
*
|
||||
*/
|
||||
~Dense();
|
||||
|
||||
/**
|
||||
* @brief Forward pass of the dense layer
|
||||
*
|
||||
* @param d_input Device pointer to the input vector
|
||||
* @return Device pointer to the output vector
|
||||
*/
|
||||
float* forward(const float* d_input);
|
||||
|
||||
/**
|
||||
* @brief Set the weights of the layer
|
||||
*
|
||||
* @param weights Pointer to vector of weights
|
||||
*/
|
||||
void setWeights(const float* weights);
|
||||
|
||||
/**
|
||||
* @brief Set the biases of the layer
|
||||
*
|
||||
* @param biases Pointer to vector of biases
|
||||
*/
|
||||
void setBiases(const float* biases);
|
||||
|
||||
private:
|
||||
@@ -36,8 +66,22 @@ class Dense : public ILayer {
|
||||
|
||||
Layers::Activation activation;
|
||||
|
||||
/**
|
||||
* @brief Initialize the weights to zeros
|
||||
*
|
||||
*/
|
||||
void initializeWeights();
|
||||
|
||||
/**
|
||||
* @brief Initialize the biases to zeros
|
||||
*
|
||||
*/
|
||||
void initializeBiases();
|
||||
|
||||
/**
|
||||
* @brief Copy the weights and biases to the device
|
||||
*
|
||||
*/
|
||||
void toCuda();
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user