mirror of
https://github.com/lordmathis/CUDANet.git
synced 2025-11-06 01:34:22 +00:00
Add activations enum
This commit is contained in:
@@ -10,4 +10,10 @@ relu_kernel(const float* __restrict__ src, float* __restrict__ dst, int len);
|
||||
__global__ void
|
||||
linear_kernel(const float* __restrict__ src, float* __restrict__ dst, int len);
|
||||
|
||||
enum Activation {
|
||||
SIGMOID,
|
||||
RELU,
|
||||
LINEAR
|
||||
};
|
||||
|
||||
#endif // ACTIVATIONS_H
|
||||
@@ -16,7 +16,7 @@ class Dense : public ILayer {
|
||||
Dense(
|
||||
int inputSize,
|
||||
int outputSize,
|
||||
std::string activation,
|
||||
Activation activation,
|
||||
cublasHandle_t cublasHandle
|
||||
);
|
||||
~Dense();
|
||||
@@ -37,7 +37,7 @@ class Dense : public ILayer {
|
||||
std::vector<float> weights;
|
||||
std::vector<float> biases;
|
||||
|
||||
std::string activation;
|
||||
Activation activation;
|
||||
|
||||
void initializeWeights();
|
||||
void initializeBiases();
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
Layers::Dense::Dense(
|
||||
int inputSize,
|
||||
int outputSize,
|
||||
std::string activation,
|
||||
Activation activation,
|
||||
cublasHandle_t cublasHandle
|
||||
)
|
||||
: inputSize(inputSize),
|
||||
@@ -68,18 +68,24 @@ void Layers::Dense::forward(const float* d_input, float* d_output) {
|
||||
int threadsPerBlock = 256;
|
||||
int blocksPerGrid = (outputSize + threadsPerBlock - 1) / threadsPerBlock;
|
||||
|
||||
if (activation == "sigmoid") {
|
||||
sigmoid_kernel<<<blocksPerGrid, threadsPerBlock>>>(
|
||||
d_output, d_output, outputSize
|
||||
);
|
||||
} else if (activation == "relu") {
|
||||
relu_kernel<<<blocksPerGrid, threadsPerBlock>>>(
|
||||
d_output, d_output, outputSize
|
||||
);
|
||||
} else {
|
||||
linear_kernel<<<blocksPerGrid, threadsPerBlock>>>(
|
||||
d_output, d_output, outputSize
|
||||
);
|
||||
switch (activation) {
|
||||
case SIGMOID:
|
||||
sigmoid_kernel<<<blocksPerGrid, threadsPerBlock>>>(
|
||||
d_output, d_output, outputSize
|
||||
);
|
||||
break;
|
||||
|
||||
case RELU:
|
||||
relu_kernel<<<blocksPerGrid, threadsPerBlock>>>(
|
||||
d_output, d_output, outputSize
|
||||
);
|
||||
break;
|
||||
|
||||
default:
|
||||
linear_kernel<<<blocksPerGrid, threadsPerBlock>>>(
|
||||
d_output, d_output, outputSize
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
CUDA_CHECK(cudaDeviceSynchronize());
|
||||
|
||||
@@ -17,7 +17,7 @@ class DenseLayerTest : public CublasTestFixture {
|
||||
std::vector<float>& biases,
|
||||
float*& d_input,
|
||||
float*& d_output,
|
||||
std::string activation
|
||||
Activation activation
|
||||
) {
|
||||
// Create Dense layer
|
||||
Layers::Dense denseLayer(
|
||||
@@ -63,7 +63,7 @@ TEST_F(DenseLayerTest, Init) {
|
||||
// std::cout << "Dense layer: input size = " << inputSize << ",
|
||||
// output size = " << outputSize << std::endl;
|
||||
Layers::Dense denseLayer(
|
||||
inputSize, outputSize, "sigmoid", cublasHandle
|
||||
inputSize, outputSize, SIGMOID, cublasHandle
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -81,7 +81,7 @@ TEST_F(DenseLayerTest, setWeights) {
|
||||
{1.3f, 0.5f, 0.0f, 1.7f}
|
||||
};
|
||||
|
||||
Layers::Dense denseLayer(inputSize, outputSize, "sigmoid", cublasHandle);
|
||||
Layers::Dense denseLayer(inputSize, outputSize, SIGMOID, cublasHandle);
|
||||
|
||||
denseLayer.setWeights(weights);
|
||||
}
|
||||
@@ -108,8 +108,7 @@ TEST_F(DenseLayerTest, ForwardUnitWeightMatrixLinear) {
|
||||
float* d_output;
|
||||
|
||||
Layers::Dense denseLayer = commonTestSetup(
|
||||
inputSize, outputSize, input, weights, biases, d_input, d_output,
|
||||
"linear"
|
||||
inputSize, outputSize, input, weights, biases, d_input, d_output, LINEAR
|
||||
);
|
||||
denseLayer.forward(d_input, d_output);
|
||||
|
||||
@@ -145,7 +144,7 @@ TEST_F(DenseLayerTest, ForwardRandomWeightMatrixRelu) {
|
||||
float* d_output;
|
||||
|
||||
Layers::Dense denseLayer = commonTestSetup(
|
||||
inputSize, outputSize, input, weights, biases, d_input, d_output, "relu"
|
||||
inputSize, outputSize, input, weights, biases, d_input, d_output, RELU
|
||||
);
|
||||
|
||||
denseLayer.forward(d_input, d_output);
|
||||
@@ -188,7 +187,7 @@ TEST_F(DenseLayerTest, ForwardRandomWeightMatrixSigmoid) {
|
||||
|
||||
Layers::Dense denseLayer = commonTestSetup(
|
||||
inputSize, outputSize, input, weights, biases, d_input, d_output,
|
||||
"sigmoid"
|
||||
SIGMOID
|
||||
);
|
||||
|
||||
denseLayer.forward(d_input, d_output);
|
||||
|
||||
Reference in New Issue
Block a user