From 82a0e7c19d8f0b6067014e36af6fba6650458625 Mon Sep 17 00:00:00 2001 From: LordMathis Date: Sun, 23 Nov 2025 18:50:57 +0100 Subject: [PATCH] Fix some compilation errors --- include/backend.hpp | 2 ++ include/backend/cpu.hpp | 2 +- include/backend/cuda.cuh | 2 +- include/kernels/activation_functions.cuh | 5 +---- include/kernels/matmul.cuh | 5 +---- include/layers/activation.hpp | 2 +- include/layers/conv2d.hpp | 2 +- include/utils/imagenet.hpp | 5 +---- src/backends/cuda/cuda_backend.cu | 2 +- src/backends/cuda/kernels/activation_functions.cu | 1 - src/backends/cuda/kernels/matmul.cu | 2 +- src/backends/cuda/layer_ops.cu | 2 +- src/backends/cuda/tensor_ops.cu | 2 +- src/layers/activation.cpp | 4 ++-- 14 files changed, 15 insertions(+), 23 deletions(-) diff --git a/include/backend.hpp b/include/backend.hpp index 88b2887..07359e2 100644 --- a/include/backend.hpp +++ b/include/backend.hpp @@ -2,6 +2,8 @@ #include +#include "shape.hpp" + namespace CUDANet { // Forward declaration diff --git a/include/backend/cpu.hpp b/include/backend/cpu.hpp index ad261bb..e6b63c7 100644 --- a/include/backend/cpu.hpp +++ b/include/backend/cpu.hpp @@ -3,7 +3,7 @@ #include "backend.hpp" #include "tensor.hpp" -namespace CUDANet::Backend { +namespace CUDANet::Backends { class CPU : public Backend { public: diff --git a/include/backend/cuda.cuh b/include/backend/cuda.cuh index 783dca0..e60f374 100644 --- a/include/backend/cuda.cuh +++ b/include/backend/cuda.cuh @@ -24,7 +24,7 @@ do { \ } \ } while (0) -namespace CUDANet::Backend { +namespace CUDANet::Backends { class CUDA : public Backend { public: diff --git a/include/kernels/activation_functions.cuh b/include/kernels/activation_functions.cuh index db2809d..e5f2def 100644 --- a/include/kernels/activation_functions.cuh +++ b/include/kernels/activation_functions.cuh @@ -1,5 +1,4 @@ -#ifndef CUDANET_ACTIVATION_FUNCTIONS_H -#define CUDANET_ACTIVATION_FUNCTIONS_H +#pragma once #include @@ -32,5 +31,3 @@ __global__ void relu( ); } // namespace CUDANet::Kernels - -#endif // CUDANET_ACTIVATION_FUNCTIONS_H \ No newline at end of file diff --git a/include/kernels/matmul.cuh b/include/kernels/matmul.cuh index 0c0bfef..55b5856 100644 --- a/include/kernels/matmul.cuh +++ b/include/kernels/matmul.cuh @@ -1,5 +1,4 @@ -#ifndef CUDANET_MATMUL_H -#define CUDANET_MATMUL_H +#pragma once #include @@ -191,5 +190,3 @@ __global__ void sum_reduce( ); } // namespace CUDANet::Kernels - -#endif // CUDANET_MATMUL_H \ No newline at end of file diff --git a/include/layers/activation.hpp b/include/layers/activation.hpp index 323ec22..dfa25a6 100644 --- a/include/layers/activation.hpp +++ b/include/layers/activation.hpp @@ -25,7 +25,7 @@ class Activation : public Layer { Activation() = default; - Activation(CUDANet::Backend* backend, ActivationType activation, const CUDANet::Shape &shape); + Activation(ActivationType activation, const CUDANet::Shape &shape, CUDANet::Backend* backend); ~Activation() = default; diff --git a/include/layers/conv2d.hpp b/include/layers/conv2d.hpp index d520cf2..e3eff27 100644 --- a/include/layers/conv2d.hpp +++ b/include/layers/conv2d.hpp @@ -18,7 +18,7 @@ class Conv2d : public Layer { CUDANet::Backend* backend ); - ~Conv2d() {}; + ~Conv2d(); CUDANet::Tensor& forward(CUDANet::Tensor& input) override; diff --git a/include/utils/imagenet.hpp b/include/utils/imagenet.hpp index 476841c..0c55636 100644 --- a/include/utils/imagenet.hpp +++ b/include/utils/imagenet.hpp @@ -1,5 +1,4 @@ -#ifndef CUDANET_IMAGENET_H -#define CUDANET_IMAGENET_H +#pragma once #include #include @@ -1012,5 +1011,3 @@ const std::map IMAGENET_CLASS_MAP = { // clang-format on } - -#endif // CUDANET_IMAGENET_H \ No newline at end of file diff --git a/src/backends/cuda/cuda_backend.cu b/src/backends/cuda/cuda_backend.cu index f178579..8bc4fd9 100644 --- a/src/backends/cuda/cuda_backend.cu +++ b/src/backends/cuda/cuda_backend.cu @@ -25,7 +25,7 @@ cudaDeviceProp initializeCUDA() { return deviceProp; } -using namespace CUDANet::Backend; +using namespace CUDANet::Backends; void* CUDA::allocate(size_t bytes) { void* d_ptr = nullptr; diff --git a/src/backends/cuda/kernels/activation_functions.cu b/src/backends/cuda/kernels/activation_functions.cu index 72bca66..1512ec5 100644 --- a/src/backends/cuda/kernels/activation_functions.cu +++ b/src/backends/cuda/kernels/activation_functions.cu @@ -1,5 +1,4 @@ #include "activation_functions.cuh" -#include "cuda_helper.cuh" using namespace CUDANet; diff --git a/src/backends/cuda/kernels/matmul.cu b/src/backends/cuda/kernels/matmul.cu index 31c68d5..f3ad0f1 100644 --- a/src/backends/cuda/kernels/matmul.cu +++ b/src/backends/cuda/kernels/matmul.cu @@ -1,4 +1,4 @@ -#include "cuda_helper.cuh" +#include "backend/cuda.cuh" #include "matmul.cuh" using namespace CUDANet; diff --git a/src/backends/cuda/layer_ops.cu b/src/backends/cuda/layer_ops.cu index 0f6b557..11210b2 100644 --- a/src/backends/cuda/layer_ops.cu +++ b/src/backends/cuda/layer_ops.cu @@ -4,7 +4,7 @@ #include "kernels/matmul.cuh" #include "kernels/pool.cuh" -using namespace CUDANet::Backend; +using namespace CUDANet::Backends; void CUDA::relu(Tensor& tensor) { int gridSize = (tensor.numel() + BLOCK_SIZE - 1) / BLOCK_SIZE; diff --git a/src/backends/cuda/tensor_ops.cu b/src/backends/cuda/tensor_ops.cu index 7653ee9..ffd6e7a 100644 --- a/src/backends/cuda/tensor_ops.cu +++ b/src/backends/cuda/tensor_ops.cu @@ -4,7 +4,7 @@ #include "backend/cuda.cuh" #include "kernels/matmul.cuh" -using namespace CUDANet::Backend; +using namespace CUDANet::Backends; void CUDA::print(const CUDANet::Tensor &input) { auto length = input.numel(); diff --git a/src/layers/activation.cpp b/src/layers/activation.cpp index def0c67..f7661be 100644 --- a/src/layers/activation.cpp +++ b/src/layers/activation.cpp @@ -7,11 +7,11 @@ using namespace CUDANet::Layers; -Activation::Activation(CUDANet::Backend* backend, ActivationType activation, const CUDANet::Shape &shape) +Activation::Activation(ActivationType activation, const CUDANet::Shape &shape, CUDANet::Backend* backend) : backend(backend), activationType(activation), shape(shape) { if (shape.size() != 1) { - throw std::runtime_error(std::format("Invalid shape. Expected [1], got {}", shape)); + throw InvalidShapeException("input", 1, shape.size()); } auto length = shape[0];