mirror of
https://github.com/lordmathis/CUDANet.git
synced 2025-11-07 02:04:26 +00:00
Abstract activation and implement softmax
This commit is contained in:
@@ -6,15 +6,6 @@
|
||||
|
||||
namespace CUDANet::Layers {
|
||||
|
||||
/**
|
||||
* @brief Activation functions
|
||||
*
|
||||
* SIGMOID: Sigmoid
|
||||
* RELU: Rectified Linear Unit
|
||||
*
|
||||
*/
|
||||
enum Activation { SIGMOID, RELU, NONE };
|
||||
|
||||
/**
|
||||
* @brief Padding types
|
||||
*
|
||||
@@ -85,7 +76,6 @@ class ILayer {
|
||||
std::vector<float> weights;
|
||||
std::vector<float> biases;
|
||||
|
||||
Layers::Activation activation;
|
||||
};
|
||||
|
||||
} // namespace CUDANet::Layers
|
||||
|
||||
Reference in New Issue
Block a user