Abstract activation and implement softmax

This commit is contained in:
2024-03-17 18:37:15 +01:00
parent b1621819ca
commit 42d646750b
19 changed files with 370 additions and 205 deletions

View File

@@ -0,0 +1,55 @@
#ifndef CUDANET_ACTIVATION_H
#define CUDANET_ACTIVATION_H
namespace CUDANet::Layers {
/**
* @brief Activation functions
*
* SIGMOID: Sigmoid
* RELU: Rectified Linear Unit
* SOFTMAX: Softmax
*
*/
enum ActivationType { SIGMOID, RELU, SOFTMAX, NONE };
class Activation {
public:
Activation() = default;
/**
* @brief Construct a new Activation object
*
* @param activation Type of activation
* @param length Length of the input
*/
Activation(ActivationType activation, const unsigned int length);
/**
* @brief Destroy the Activation object
*
*/
~Activation();
/**
* @brief Run the activation function on the input
*
* @param d_input Pointer to the input vector on the device
*/
void activate(float* d_input);
private:
ActivationType activationType;
unsigned int length;
unsigned int gridSize;
float* d_softmax_sum;
};
} // namespace CUDANet::Layers
#endif // CUDANET_ACTIVATION_H