diff --git a/include/layers/activation.hpp b/include/layers/activation.hpp index 49787cf..b1aa3d7 100644 --- a/include/layers/activation.hpp +++ b/include/layers/activation.hpp @@ -30,7 +30,7 @@ class Activation { * @param activation Type of activation * @param length Length of the input */ - Activation(ActivationType activation, const int length); + Activation(CUDANet::Backend::IBackend* backend, ActivationType activation, const int length); /** * @brief Destroy the Activation object diff --git a/src/layers/activation.cpp b/src/layers/activation.cpp index 64f7590..892ce8f 100644 --- a/src/layers/activation.cpp +++ b/src/layers/activation.cpp @@ -6,13 +6,13 @@ using namespace CUDANet::Layers; -Activation::Activation(ActivationType activation, const int length) - : activationType(activation), length(length) { +Activation::Activation(CUDANet::Backend::IBackend* backend, ActivationType activation, const int length) + : backend(backend), activationType(activation), length(length) { if (activationType == SOFTMAX) { - softmax_sum = CUDANet::Backend::Tensor({static_cast(length)}, CUDANet::Backend::DType::FLOAT32, nullptr); - tensor_max = CUDANet::Backend::Tensor({static_cast(length)}, CUDANet::Backend::DType::FLOAT32, nullptr); + softmax_sum = CUDANet::Backend::Tensor({static_cast(length)}, CUDANet::Backend::DType::FLOAT32, backend); + tensor_max = CUDANet::Backend::Tensor({static_cast(length)}, CUDANet::Backend::DType::FLOAT32, backend); } } @@ -23,10 +23,10 @@ void Activation::activate(CUDANet::Backend::Tensor input) { backend->sigmoid(input); break; case ActivationType::RELU: - /* code */ + backend->relu(input); break; case ActivationType::SOFTMAX: - /* code */ + backend->softmax(input, tensor_max, softmax_sum); break; default: break;