Abstract activation and implement softmax

This commit is contained in:
2024-03-17 18:37:15 +01:00
parent b1621819ca
commit 42d646750b
19 changed files with 370 additions and 205 deletions

View File

@@ -6,15 +6,6 @@
namespace CUDANet::Layers {
/**
* @brief Activation functions
*
* SIGMOID: Sigmoid
* RELU: Rectified Linear Unit
*
*/
enum Activation { SIGMOID, RELU, NONE };
/**
* @brief Padding types
*
@@ -85,7 +76,6 @@ class ILayer {
std::vector<float> weights;
std::vector<float> biases;
Layers::Activation activation;
};
} // namespace CUDANet::Layers