mirror of
https://github.com/lordmathis/CUDANet.git
synced 2025-11-05 17:34:21 +00:00
Cleanup and refactor
This commit is contained in:
@@ -6,10 +6,11 @@
|
||||
|
||||
#include "activations.cuh"
|
||||
#include "padding.cuh"
|
||||
#include "ilayer.cuh"
|
||||
|
||||
namespace Layers {
|
||||
|
||||
class Conv2d {
|
||||
class Conv2d : public ILayer {
|
||||
public:
|
||||
Conv2d(
|
||||
int inputSize,
|
||||
@@ -26,8 +27,8 @@ class Conv2d {
|
||||
int outputSize;
|
||||
|
||||
void forward(const float* d_input, float* d_output);
|
||||
void setKernels(const std::vector<float>& kernels_input);
|
||||
|
||||
void setWeights(const float* weights_input);
|
||||
void setBiases(const float* biases_input);
|
||||
void host_conv(const float* input, float* output);
|
||||
|
||||
private:
|
||||
@@ -42,18 +43,18 @@ class Conv2d {
|
||||
int numFilters;
|
||||
|
||||
// Kernels
|
||||
std::vector<float> kernels;
|
||||
std::vector<float> weights;
|
||||
std::vector<float> biases;
|
||||
|
||||
// Cuda
|
||||
float* d_kernels;
|
||||
float* d_weights;
|
||||
float* d_biases;
|
||||
float* d_padded;
|
||||
|
||||
// Kernels
|
||||
Activation activation;
|
||||
|
||||
void initializeKernels();
|
||||
void initializeWeights();
|
||||
void initializeBiases();
|
||||
void toCuda();
|
||||
};
|
||||
|
||||
@@ -19,8 +19,8 @@ class Dense : public ILayer {
|
||||
~Dense();
|
||||
|
||||
void forward(const float* d_input, float* d_output);
|
||||
void setWeights(const std::vector<std::vector<float>>& weights);
|
||||
void setBiases(const std::vector<float>& biases);
|
||||
void setWeights(const float* weights);
|
||||
void setBiases(const float* biases);
|
||||
|
||||
private:
|
||||
int inputSize;
|
||||
|
||||
@@ -10,9 +10,26 @@ class ILayer {
|
||||
public:
|
||||
virtual ~ILayer() {}
|
||||
|
||||
virtual void forward(const float* input, float* output) = 0;
|
||||
virtual void setWeights(const std::vector<std::vector<float>>& weights) = 0;
|
||||
virtual void setBiases(const std::vector<float>& biases) = 0;
|
||||
virtual void forward(const float* input, float* output) = 0;
|
||||
virtual void setWeights(const float* weights) = 0;
|
||||
virtual void setBiases(const float* biases) = 0;
|
||||
|
||||
private:
|
||||
virtual void initializeWeights() = 0;
|
||||
virtual void initializeBiases() = 0;
|
||||
|
||||
virtual void toCuda() = 0;
|
||||
|
||||
int inputSize;
|
||||
int outputSize;
|
||||
|
||||
float* d_weights;
|
||||
float* d_biases;
|
||||
|
||||
std::vector<float> weights;
|
||||
std::vector<float> biases;
|
||||
|
||||
Activation activation;
|
||||
};
|
||||
|
||||
} // namespace Layers
|
||||
|
||||
Reference in New Issue
Block a user