Add epsilon param to batch norm

This commit is contained in:
2024-05-19 15:13:22 +02:00
parent c84f58b97c
commit 4a1c4a5f91
3 changed files with 3 additions and 3 deletions

View File

@@ -10,7 +10,7 @@ namespace CUDANet::Layers {
class BatchNorm : public WeightedLayer { class BatchNorm : public WeightedLayer {
public: public:
BatchNorm(int inputSize, int inputChannels, ActivationType activationType); BatchNorm(int inputSize, int inputChannels, float epsilon, ActivationType activationType);
~BatchNorm(); ~BatchNorm();

View File

@@ -12,6 +12,7 @@ using namespace CUDANet::Layers;
BatchNorm::BatchNorm( BatchNorm::BatchNorm(
int inputSize, int inputSize,
int inputChannels, int inputChannels,
float epsilon,
ActivationType activationType ActivationType activationType
) )
: inputSize(inputSize), inputChannels(inputChannels) { : inputSize(inputSize), inputChannels(inputChannels) {
@@ -47,7 +48,6 @@ BatchNorm::BatchNorm(
CUDA_CHECK(cudaMemcpy(d_length, &length, sizeof(float), cudaMemcpyHostToDevice)); CUDA_CHECK(cudaMemcpy(d_length, &length, sizeof(float), cudaMemcpyHostToDevice));
d_epsilon = nullptr; d_epsilon = nullptr;
float epsilon = 1e-5f;
CUDA_CHECK(cudaMalloc((void **)&d_epsilon, sizeof(float))); CUDA_CHECK(cudaMalloc((void **)&d_epsilon, sizeof(float)));
CUDA_CHECK(cudaMemcpy(d_epsilon, &epsilon, sizeof(float), cudaMemcpyHostToDevice)); CUDA_CHECK(cudaMemcpy(d_epsilon, &epsilon, sizeof(float), cudaMemcpyHostToDevice));

View File

@@ -13,7 +13,7 @@ TEST(BatchNormLayerTest, BatchNormSmallForwardTest) {
cudaError_t cudaStatus; cudaError_t cudaStatus;
CUDANet::Layers::BatchNorm batchNorm( CUDANet::Layers::BatchNorm batchNorm(
inputSize, nChannels, CUDANet::Layers::ActivationType::NONE inputSize, nChannels, 1e-5f, CUDANet::Layers::ActivationType::NONE
); );
std::vector<float> weights = {0.63508f, 0.64903f}; std::vector<float> weights = {0.63508f, 0.64903f};