From 58af95eb250aff1b3353d0ef95a358ac8971b2ca Mon Sep 17 00:00:00 2001 From: LordMathis Date: Sun, 21 Apr 2024 22:47:12 +0200 Subject: [PATCH] Remove cudaDeviceReset calls from tests --- src/layers/activation.cu | 13 +++++-------- test/kernels/test_activation_functions.cu | 2 +- test/kernels/test_matmul.cu | 8 ++++---- test/layers/test_activation.cu | 9 +++------ test/layers/test_avg_pooling.cu | 2 -- test/layers/test_concat.cu | 2 -- test/layers/test_conv2d.cu | 2 +- test/layers/test_dense.cu | 2 +- test/layers/test_input.cu | 2 +- test/layers/test_max_pooling.cu | 2 +- test/layers/test_output.cu | 2 +- 11 files changed, 18 insertions(+), 28 deletions(-) diff --git a/src/layers/activation.cu b/src/layers/activation.cu index a78248a..0991fcf 100644 --- a/src/layers/activation.cu +++ b/src/layers/activation.cu @@ -12,13 +12,11 @@ using namespace CUDANet::Layers; Activation::Activation(ActivationType activation, const int length) : activationType(activation), length(length) { if (activationType == SOFTMAX) { - d_max = nullptr; - CUDA_CHECK(cudaMalloc((void**)&d_max, sizeof(float) * length)); - d_softmax_sum = nullptr; CUDA_CHECK(cudaMalloc((void**)&d_softmax_sum, sizeof(float) * length)); - std::cout << "Activation: Softmax " << length << std::endl; + d_max = nullptr; + CUDA_CHECK(cudaMalloc((void**)&d_max, sizeof(float) * length)); } gridSize = (length + BLOCK_SIZE - 1) / BLOCK_SIZE; @@ -26,8 +24,8 @@ Activation::Activation(ActivationType activation, const int length) Activation::~Activation() { if (activationType == SOFTMAX) { - cudaFree(d_softmax_sum); - cudaFree(d_max); + CUDA_CHECK(cudaFree(d_softmax_sum)); + CUDA_CHECK(cudaFree(d_max)); } } @@ -63,7 +61,7 @@ void Activation::activate(float* d_input) { d_input, d_input, length ); CUDA_CHECK(cudaGetLastError()); - + // Find sum Utils::sum(d_input, d_softmax_sum, length); @@ -71,7 +69,6 @@ void Activation::activate(float* d_input) { d_input, d_input, d_softmax_sum, length ); CUDA_CHECK(cudaGetLastError()); - break; default: diff --git a/test/kernels/test_activation_functions.cu b/test/kernels/test_activation_functions.cu index d52f7e7..327e608 100644 --- a/test/kernels/test_activation_functions.cu +++ b/test/kernels/test_activation_functions.cu @@ -45,5 +45,5 @@ TEST(ActivationFunctionsTest, SigmoidSanityCheck) { cudaFree(d_input); cudaFree(d_output); - cudaDeviceReset(); + } diff --git a/test/kernels/test_matmul.cu b/test/kernels/test_matmul.cu index de2fd0d..abef1b5 100644 --- a/test/kernels/test_matmul.cu +++ b/test/kernels/test_matmul.cu @@ -68,7 +68,7 @@ TEST(MatMulTest, MatVecMulTest) { cudaFree(d_vector); cudaFree(d_output); - cudaDeviceReset(); + } TEST(MatMulTest, MaxReduceTest) { @@ -113,7 +113,7 @@ TEST(MatMulTest, MaxReduceTest) { cudaFree(d_input); cudaFree(d_output); - cudaDeviceReset(); + } TEST(MatMulTest, VecExpTest) { @@ -157,7 +157,7 @@ TEST(MatMulTest, VecExpTest) { cudaFree(d_input); cudaFree(d_output); - cudaDeviceReset(); + } TEST(MatMulTest, SumReduceTest) { @@ -210,5 +210,5 @@ TEST(MatMulTest, SumReduceTest) { cudaFree(d_input); cudaFree(d_sum); - cudaDeviceReset(); + } \ No newline at end of file diff --git a/test/layers/test_activation.cu b/test/layers/test_activation.cu index 7ed6509..7452fb4 100644 --- a/test/layers/test_activation.cu +++ b/test/layers/test_activation.cu @@ -38,10 +38,7 @@ TEST(ActivationTest, SoftmaxTest1) { EXPECT_NEAR(sum, 1.0f, 1e-5f); - cudaFree(d_input); - cudaDeviceReset(); - - cudaStatus = cudaGetLastError(); + cudaStatus = cudaFree(d_input); EXPECT_EQ(cudaStatus, cudaSuccess); } @@ -82,6 +79,6 @@ TEST(ActivationTest, SoftmaxTest2) { EXPECT_NEAR(sum, 1.0f, 1e-5f); // Cleanup - cudaFree(d_input); - cudaDeviceReset(); + cudaStatus = cudaFree(d_input); + EXPECT_EQ(cudaStatus, cudaSuccess); } \ No newline at end of file diff --git a/test/layers/test_avg_pooling.cu b/test/layers/test_avg_pooling.cu index 5e1401d..2fcacec 100644 --- a/test/layers/test_avg_pooling.cu +++ b/test/layers/test_avg_pooling.cu @@ -67,6 +67,4 @@ TEST(AvgPoolingLayerTest, AvgPoolForwardTest) { cudaFree(d_input); cudaFree(d_output); - - cudaDeviceReset(); } diff --git a/test/layers/test_concat.cu b/test/layers/test_concat.cu index bb14134..a5adc97 100644 --- a/test/layers/test_concat.cu +++ b/test/layers/test_concat.cu @@ -34,6 +34,4 @@ TEST(ConcatLayerTest, Init) { EXPECT_EQ(output[i + 5], inputB[i]); } cudaFree(d_output); - - cudaDeviceReset(); } \ No newline at end of file diff --git a/test/layers/test_conv2d.cu b/test/layers/test_conv2d.cu index 4057c7f..d054e95 100644 --- a/test/layers/test_conv2d.cu +++ b/test/layers/test_conv2d.cu @@ -47,7 +47,7 @@ class Conv2dTest : public ::testing::Test { void commonTestTeardown(float* d_input) { // Free device memory cudaFree(d_input); - cudaDeviceReset(); + } cudaError_t cudaStatus; diff --git a/test/layers/test_dense.cu b/test/layers/test_dense.cu index 1491915..bae4ae3 100644 --- a/test/layers/test_dense.cu +++ b/test/layers/test_dense.cu @@ -41,7 +41,7 @@ class DenseLayerTest : public ::testing::Test { void commonTestTeardown(float* d_input) { // Free device memory cudaFree(d_input); - cudaDeviceReset(); + } cudaError_t cudaStatus; diff --git a/test/layers/test_input.cu b/test/layers/test_input.cu index 6b09559..e10ebb3 100644 --- a/test/layers/test_input.cu +++ b/test/layers/test_input.cu @@ -15,5 +15,5 @@ TEST(InputLayerTest, InputForward) { EXPECT_EQ(cudaStatus, cudaSuccess); EXPECT_EQ(input, output); - cudaDeviceReset(); + } \ No newline at end of file diff --git a/test/layers/test_max_pooling.cu b/test/layers/test_max_pooling.cu index e9b59a9..703b82b 100644 --- a/test/layers/test_max_pooling.cu +++ b/test/layers/test_max_pooling.cu @@ -68,5 +68,5 @@ TEST(MaxPoolingLayerTest, MaxPoolForwardTest) { cudaFree(d_input); cudaFree(d_output); - cudaDeviceReset(); + } diff --git a/test/layers/test_output.cu b/test/layers/test_output.cu index bae4ca4..8cd1947 100644 --- a/test/layers/test_output.cu +++ b/test/layers/test_output.cu @@ -23,5 +23,5 @@ TEST(OutputLayerTest, OutputForward) { } cudaFree(d_input); - cudaDeviceReset(); + } \ No newline at end of file