Initial activations implementation

This commit is contained in:
2024-02-27 00:24:57 +01:00
parent 6e99525ad0
commit 5e1e0ed1d1
9 changed files with 104 additions and 24 deletions

View File

@@ -0,0 +1,14 @@
#include <functional>
#ifndef ACTIVATIONS_H
#define ACTIVATIONS_H
__device__ float sigmoid(float a);
__device__ float relu(float a);
__device__ float linear(float a);
__global__ void sigmoid_kernel(const float* __restrict__ src, float* __restrict__ dst, int len);
__global__ void relu_kernel(const float* __restrict__ src, float* __restrict__ dst, int len);
__global__ void linear_kernel(const float* __restrict__ src, float* __restrict__ dst, int len);
#endif // ACTIVATIONS_H

View File

@@ -1,5 +1,3 @@
// fully_connected_layer.h
#ifndef CONV_LAYER_H
#define CONV_LAYER_H

View File

@@ -1,15 +1,17 @@
#ifndef DENSE_LAYER_H
#define DENSE_LAYER_H
#include <functional>
#include <vector>
#include <cublas_v2.h>
#include <ilayer.cuh>
#include <string>
#include "ilayer.cuh"
namespace Layers {
class Dense : public ILayer {
public:
Dense(int inputSize, int outputSize, cublasHandle_t cublasHandle);
Dense(int inputSize, int outputSize, std::string activation, cublasHandle_t cublasHandle);
~Dense();
void forward(const float* input, float* output);
@@ -28,6 +30,8 @@ namespace Layers {
std::vector<float> weights;
std::vector<float> biases;
std::string activation;
void initializeWeights();
void initializeBiases();
void toCuda();