mirror of
https://github.com/lordmathis/CUDANet.git
synced 2025-11-06 17:54:27 +00:00
Abstract activation and implement softmax
This commit is contained in:
74
include/kernels/activation_functions.cuh
Normal file
74
include/kernels/activation_functions.cuh
Normal file
@@ -0,0 +1,74 @@
|
||||
#ifndef CUDANET_ACTIVATION_FUNCTIONS_H
|
||||
#define CUDANET_ACTIVATION_FUNCTIONS_H
|
||||
|
||||
namespace CUDANet::Kernels {
|
||||
|
||||
/**
|
||||
* @brief Sigmoid activation function kernel
|
||||
*
|
||||
* @param src Pointer to the source array
|
||||
* @param dst Pointer to the destination array
|
||||
* @param len Length of the arrays
|
||||
*/
|
||||
__global__ void sigmoid(
|
||||
const float* __restrict__ src,
|
||||
float* __restrict__ dst,
|
||||
const unsigned int len
|
||||
);
|
||||
|
||||
/**
|
||||
* @brief Relu activation function kernel
|
||||
*
|
||||
* @param src Pointer to the source array
|
||||
* @param dst Pointer to the destination array
|
||||
* @param len Length of the arrays
|
||||
*/
|
||||
__global__ void relu(
|
||||
const float* __restrict__ src,
|
||||
float* __restrict__ dst,
|
||||
const unsigned int len
|
||||
);
|
||||
|
||||
/**
|
||||
* @brief Softmax activation exponentiation kernel
|
||||
*
|
||||
* @param src Pointer to the source array
|
||||
* @param dst Pointer to the destination array
|
||||
* @param len Length of the arrays
|
||||
*/
|
||||
__global__ void softmax_exp(
|
||||
const float* __restrict__ src,
|
||||
float* __restrict__ dst,
|
||||
const unsigned int len
|
||||
);
|
||||
|
||||
/**
|
||||
* @brief
|
||||
*
|
||||
* @param d_vector Device pointer to vector
|
||||
* @param d_output Device pointer to output vector
|
||||
* @param w Length of the vector
|
||||
*/
|
||||
__global__ void softmax_sum(
|
||||
const float* __restrict__ d_vector,
|
||||
float* __restrict__ d_output,
|
||||
const unsigned int w
|
||||
);
|
||||
|
||||
/**
|
||||
* @brief Softmax activation function kernel
|
||||
*
|
||||
* @param src Pointer to the source array
|
||||
* @param dst Pointer to the destination array
|
||||
* @param len Length of the arrays
|
||||
*/
|
||||
__global__ void softmax_div(
|
||||
const float* __restrict__ src,
|
||||
float* __restrict__ dst,
|
||||
const float* __restrict__ sum,
|
||||
const unsigned int len
|
||||
);
|
||||
|
||||
} // namespace CUDANet::Kernels
|
||||
|
||||
#endif // CUDANET_ACTIVATION_FUNCTIONS_H
|
||||
@@ -1,28 +0,0 @@
|
||||
#ifndef CUDANET_ACTIVATIONS_H
|
||||
#define CUDANET_ACTIVATIONS_H
|
||||
|
||||
namespace CUDANet::Kernels {
|
||||
|
||||
/**
|
||||
* @brief Sigmoid activation function kernel
|
||||
*
|
||||
* @param src Pointer to the source array
|
||||
* @param dst Pointer to the destination array
|
||||
* @param len Length of the arrays
|
||||
*/
|
||||
__global__ void
|
||||
sigmoid(const float* __restrict__ src, float* __restrict__ dst, int len);
|
||||
|
||||
/**
|
||||
* @brief Relu activation function kernel
|
||||
*
|
||||
* @param src Pointer to the source array
|
||||
* @param dst Pointer to the destination array
|
||||
* @param len Length of the arrays
|
||||
*/
|
||||
__global__ void
|
||||
relu(const float* __restrict__ src, float* __restrict__ dst, int len);
|
||||
|
||||
} // namespace CUDANet::Kernels
|
||||
|
||||
#endif // CUDANET_ACTIVATIONS_H
|
||||
@@ -35,19 +35,6 @@ __global__ void vec_vec_add(
|
||||
const unsigned int w
|
||||
);
|
||||
|
||||
/**
|
||||
* @brief
|
||||
*
|
||||
* @param d_vector Device pointer to vector
|
||||
* @param d_output Device pointer to output vector
|
||||
* @param w Length of the vector
|
||||
*/
|
||||
__global__ void reduce_sum(
|
||||
const float* __restrict__ d_vector,
|
||||
float* __restrict__ d_output,
|
||||
const unsigned int w
|
||||
);
|
||||
|
||||
} // namespace CUDANet::Kernels
|
||||
|
||||
#endif // CUDANET_MATMUL_H
|
||||
55
include/layers/activation.cuh
Normal file
55
include/layers/activation.cuh
Normal file
@@ -0,0 +1,55 @@
|
||||
#ifndef CUDANET_ACTIVATION_H
|
||||
#define CUDANET_ACTIVATION_H
|
||||
|
||||
namespace CUDANet::Layers {
|
||||
|
||||
/**
|
||||
* @brief Activation functions
|
||||
*
|
||||
* SIGMOID: Sigmoid
|
||||
* RELU: Rectified Linear Unit
|
||||
* SOFTMAX: Softmax
|
||||
*
|
||||
*/
|
||||
enum ActivationType { SIGMOID, RELU, SOFTMAX, NONE };
|
||||
|
||||
class Activation {
|
||||
public:
|
||||
|
||||
Activation() = default;
|
||||
|
||||
/**
|
||||
* @brief Construct a new Activation object
|
||||
*
|
||||
* @param activation Type of activation
|
||||
* @param length Length of the input
|
||||
*/
|
||||
Activation(ActivationType activation, const unsigned int length);
|
||||
|
||||
/**
|
||||
* @brief Destroy the Activation object
|
||||
*
|
||||
*/
|
||||
~Activation();
|
||||
|
||||
/**
|
||||
* @brief Run the activation function on the input
|
||||
*
|
||||
* @param d_input Pointer to the input vector on the device
|
||||
*/
|
||||
void activate(float* d_input);
|
||||
|
||||
|
||||
private:
|
||||
ActivationType activationType;
|
||||
unsigned int length;
|
||||
unsigned int gridSize;
|
||||
|
||||
float* d_softmax_sum;
|
||||
|
||||
};
|
||||
|
||||
|
||||
} // namespace CUDANet::Layers
|
||||
|
||||
#endif // CUDANET_ACTIVATION_H
|
||||
@@ -4,7 +4,7 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "activations.cuh"
|
||||
#include "activation.cuh"
|
||||
#include "convolution.cuh"
|
||||
#include "ilayer.cuh"
|
||||
|
||||
@@ -23,18 +23,18 @@ class Conv2d : public ILayer {
|
||||
* @param inputChannels Number of channels in the input matrix
|
||||
* @param kernelSize Width and height of the convolution kernel
|
||||
* @param stride Convolution stride
|
||||
* @param padding Padding type ('SAME' or 'VALID')
|
||||
* @param numFilters Number of output filters
|
||||
* @param activation Activation function ('RELU', 'SIGMOID' or 'NONE')
|
||||
* @param padding Padding type ('SAME' or 'VALID')
|
||||
* @param activationType Activation function type ('RELU', 'SIGMOID', 'SOFTMAX' or 'NONE')
|
||||
*/
|
||||
Conv2d(
|
||||
int inputSize,
|
||||
int inputChannels,
|
||||
int kernelSize,
|
||||
int stride,
|
||||
Layers::Padding padding,
|
||||
int numFilters,
|
||||
Layers::Activation activation
|
||||
int inputSize,
|
||||
int inputChannels,
|
||||
int kernelSize,
|
||||
int stride,
|
||||
int numFilters,
|
||||
Layers::Padding padding,
|
||||
Layers::ActivationType activationType
|
||||
);
|
||||
|
||||
/**
|
||||
@@ -67,17 +67,21 @@ class Conv2d : public ILayer {
|
||||
|
||||
/**
|
||||
* @brief Get the output width (/ height) of the layer
|
||||
*
|
||||
* @return int
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
int getOutputSize() { return outputSize; }
|
||||
int getOutputSize() {
|
||||
return outputSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Get the padding size of the layer
|
||||
*
|
||||
* @return int
|
||||
*
|
||||
* @return int
|
||||
*/
|
||||
int getPaddingSize() { return paddingSize; }
|
||||
int getPaddingSize() {
|
||||
return paddingSize;
|
||||
}
|
||||
|
||||
private:
|
||||
// Inputs
|
||||
|
||||
@@ -20,9 +20,9 @@ class Dense : public ILayer {
|
||||
*
|
||||
* @param inputSize Size of the input vector
|
||||
* @param outputSize Size of the output vector
|
||||
* @param activation Activation function ('RELU', 'SIGMOID' or 'NONE')
|
||||
* @param activationType Activation function type ('RELU', 'SIGMOID', 'SOFTMAX' or 'NONE')
|
||||
*/
|
||||
Dense(int inputSize, int outputSize, Layers::Activation activation);
|
||||
Dense(int inputSize, int outputSize, Layers::ActivationType activationType);
|
||||
|
||||
/**
|
||||
* @brief Destroy the Dense layer
|
||||
|
||||
@@ -6,15 +6,6 @@
|
||||
|
||||
namespace CUDANet::Layers {
|
||||
|
||||
/**
|
||||
* @brief Activation functions
|
||||
*
|
||||
* SIGMOID: Sigmoid
|
||||
* RELU: Rectified Linear Unit
|
||||
*
|
||||
*/
|
||||
enum Activation { SIGMOID, RELU, NONE };
|
||||
|
||||
/**
|
||||
* @brief Padding types
|
||||
*
|
||||
@@ -85,7 +76,6 @@ class ILayer {
|
||||
std::vector<float> weights;
|
||||
std::vector<float> biases;
|
||||
|
||||
Layers::Activation activation;
|
||||
};
|
||||
|
||||
} // namespace CUDANet::Layers
|
||||
|
||||
Reference in New Issue
Block a user