Refactor layers

This commit is contained in:
2024-03-19 21:35:05 +01:00
parent 8d14b74f66
commit b6c4b7d2ae
12 changed files with 87 additions and 67 deletions

View File

@@ -3,9 +3,9 @@
#include "cuda_helper.cuh"
#include "activation_functions.cuh"
using namespace CUDANet;
using namespace CUDANet::Layers;
Layers::Activation::Activation(ActivationType activation, const unsigned int length)
Activation::Activation(ActivationType activation, const unsigned int length)
: activationType(activation), length(length) {
if (activationType == SOFTMAX) {
@@ -16,13 +16,13 @@ Layers::Activation::Activation(ActivationType activation, const unsigned int len
gridSize = (length + BLOCK_SIZE - 1) / BLOCK_SIZE;
}
Layers::Activation::~Activation() {
Activation::~Activation() {
if (activationType == SOFTMAX) {
cudaFree(d_softmax_sum);
}
}
void Layers::Activation::activate(float* __restrict__ d_input) {
void Activation::activate(float* __restrict__ d_input) {
switch (activationType) {
case SIGMOID: