mirror of
https://github.com/lordmathis/CUDANet.git
synced 2025-12-22 22:34:22 +00:00
WIP Refactor Layer and Activation classes
This commit is contained in:
@@ -26,9 +26,9 @@ class Layer {
|
||||
|
||||
virtual CUDANet::Shape output_shape() = 0;
|
||||
|
||||
virtual int input_size() = 0;
|
||||
virtual size_t input_size() = 0;
|
||||
|
||||
virtual int output_size() = 0;
|
||||
virtual size_t output_size() = 0;
|
||||
|
||||
virtual void set_weights(CUDANet::Tensor &input) = 0;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#pragma once
|
||||
|
||||
#include "backend/tensor.hpp"
|
||||
#include "backend/backend.hpp"
|
||||
#include "layers/layer.hpp"
|
||||
#include "tensor.hpp"
|
||||
#include "backend.hpp"
|
||||
#include "layer.hpp"
|
||||
|
||||
namespace CUDANet::Layers {
|
||||
|
||||
@@ -20,40 +20,41 @@ enum ActivationType { SIGMOID, RELU, SOFTMAX, NONE };
|
||||
* @brief Utility class that performs activation
|
||||
*
|
||||
*/
|
||||
class Activation : Layer {
|
||||
class Activation : public Layer {
|
||||
public:
|
||||
|
||||
Activation() = default;
|
||||
|
||||
/**
|
||||
* @brief Construct a new Activation object
|
||||
*
|
||||
* @param activation Type of activation
|
||||
* @param length Length of the input
|
||||
*/
|
||||
Activation(CUDANet::Backend::IBackend* backend, ActivationType activation, const int length);
|
||||
Activation(CUDANet::Backend* backend, ActivationType activation, const CUDANet::Shape &shape);
|
||||
|
||||
/**
|
||||
* @brief Destroy the Activation object
|
||||
*
|
||||
*/
|
||||
~Activation();
|
||||
~Activation() = default;
|
||||
|
||||
/**
|
||||
* @brief Run the activation function on the input
|
||||
*
|
||||
* @param d_input Pointer to the input vector on the device
|
||||
*/
|
||||
void activate(CUDANet::Backend::Tensor input);
|
||||
CUDANet::Tensor& forward(CUDANet::Tensor &input);
|
||||
|
||||
CUDANet::Shape input_shape();
|
||||
|
||||
CUDANet::Shape output_shape();
|
||||
|
||||
size_t input_size();
|
||||
|
||||
size_t output_size();
|
||||
|
||||
void set_weights(CUDANet::Tensor &input);
|
||||
|
||||
CUDANet::Tensor& get_weights();
|
||||
|
||||
void set_biases(CUDANet::Tensor &input);
|
||||
|
||||
CUDANet::Tensor& get_biases();
|
||||
|
||||
|
||||
private:
|
||||
CUDANet::Backend::IBackend* backend;
|
||||
CUDANet::Backend* backend;
|
||||
ActivationType activationType;
|
||||
int length;
|
||||
CUDANet::Shape shape;
|
||||
|
||||
CUDANet::Backend::Tensor softmax_sum;
|
||||
CUDANet::Backend::Tensor tensor_max;
|
||||
CUDANet::Tensor softmax_sum;
|
||||
CUDANet::Tensor tensor_max;
|
||||
};
|
||||
|
||||
} // namespace CUDANet::Layers
|
||||
|
||||
@@ -21,7 +21,7 @@ class Tensor
|
||||
public:
|
||||
|
||||
Tensor() = default;
|
||||
Tensor(Shape shape, DType dtype, CUDANet::Backend::IBackend* backend);
|
||||
Tensor(Shape shape, DType dtype, CUDANet::Backend* backend);
|
||||
~Tensor();
|
||||
|
||||
size_t size() const;
|
||||
@@ -40,7 +40,7 @@ private:
|
||||
size_t total_elms;
|
||||
size_t total_size;
|
||||
|
||||
CUDANet::Backend::IBackend* backend;
|
||||
CUDANet::Backend* backend;
|
||||
void* d_ptr;
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user