Skip to content

Commit 28a1d72

Browse files
committed
Removed useless comments
1 parent cb3109c commit 28a1d72

File tree

5 files changed

+47
-70
lines changed

5 files changed

+47
-70
lines changed

include/Layers/dense.h

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,13 +22,12 @@ typedef struct
2222
int input_size;
2323
int output_size;
2424

25-
// Add members for optimizers
26-
float *rmsprop_cache_w; // Cache for RMSProp weights
27-
float *rmsprop_cache_b; // Cache for RMSProp biases
28-
float *adam_v_w; // Adam first moment vector for weights
29-
float *adam_v_b; // Adam first moment vector for biases
30-
float *adam_s_w; // Adam second moment vector for weights
31-
float *adam_s_b; // Adam second moment vector for biases
25+
float *rmsprop_cache_w;
26+
float *rmsprop_cache_b;
27+
float *adam_v_w;
28+
float *adam_v_b;
29+
float *adam_s_w;
30+
float *adam_s_b;
3231
} DenseLayer;
3332

3433
/**

include/Metrics/r2_score.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,4 @@
1111
*/
1212
float r2_score(const float *y_true, const float *y_pred, int size);
1313

14-
#endif // R2_SCORE_H
14+
#endif

src/Layers/dense.c

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77
#include "../../include/Core/memory_management.h"
88
#include "../../include/Core/logging.h"
99

10-
11-
1210
/**
1311
* @brief Initializes a Dense Layer with random weights and biases.
1412
*
@@ -30,17 +28,14 @@ int initialize_dense(DenseLayer *layer, int input_size, int output_size)
3028
LOG_ERROR("Invalid input size (%d) or output size (%d)", input_size, output_size);
3129
return CM_INVALID_PARAMETER_ERROR;
3230
}
33-
34-
// initialize struct members to NULL
35-
// cm_safe_free can be called - even inadvertently - without crashing
31+
3632
layer->weights = NULL;
3733
layer->biases = NULL;
3834
layer->adam_v_w = NULL;
3935
layer->adam_v_b = NULL;
4036
layer->adam_s_w = NULL;
4137
layer->adam_s_b = NULL;
4238

43-
// if we don't see this Log message, we had a prolem zero-ing out memory
4439
LOG_DEBUG("Initialized DenseLayer with input size (%d) and output size (%d)", input_size, output_size);
4540

4641
layer->input_size = input_size;
@@ -66,7 +61,6 @@ int initialize_dense(DenseLayer *layer, int input_size, int output_size)
6661
layer->biases[i] = ((float)rand() / RAND_MAX) - 0.5;
6762
}
6863

69-
// Allocate and initialize Adam optimizer's moment vectors
7064
layer->adam_v_w = (float *)cm_safe_malloc(input_size * output_size * sizeof(float), __FILE__, __LINE__);
7165
layer->adam_v_b = (float *)cm_safe_malloc(output_size * sizeof(float), __FILE__, __LINE__);
7266
layer->adam_s_w = (float *)cm_safe_malloc(input_size * output_size * sizeof(float), __FILE__, __LINE__);
@@ -148,9 +142,7 @@ int backward_dense(DenseLayer *layer, float *input, float *output, float *d_outp
148142
d_input[i] = 0;
149143
for (int j = 0; j < layer->output_size; j++)
150144
{
151-
// Breakpoint condition: Check for potential out-of-bounds access
152-
// For GDB:
153-
// break dense.c:108 if (i + j * layer->input_size) >= (layer->input_size * layer->output_size)
145+
154146
d_input[i] += d_output[j] * layer->weights[i + j * layer->input_size];
155147
}
156148
}

test/Core/test_logging.c

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5,48 +5,44 @@
55

66
/**
77
* @brief Test the logging system
8-
*
8+
*
99
* @return int 0 if all tests pass, non-zero otherwise
1010
*/
11-
int main() {
11+
int main()
12+
{
1213
printf("Running logging tests...\n");
13-
14-
// Test setting log level
14+
1515
set_log_level(LOG_LEVEL_DEBUG);
16-
17-
// Test different log levels
16+
1817
LOG_DEBUG("This is a debug message: %d", 42);
1918
LOG_INFO("This is an info message: %s", "hello");
2019
LOG_WARNING("This is a warning message: %.2f", 3.14);
2120
LOG_ERROR("This is an error message: %c", 'X');
22-
23-
// Test changing log level
21+
2422
printf("\nChanging log level to INFO (DEBUG messages should not appear):\n");
2523
set_log_level(LOG_LEVEL_INFO);
26-
24+
2725
LOG_DEBUG("This debug message should NOT appear");
2826
LOG_INFO("This info message should appear");
2927
LOG_WARNING("This warning message should appear");
3028
LOG_ERROR("This error message should appear");
31-
32-
// Test changing log level to WARNING
29+
3330
printf("\nChanging log level to WARNING (DEBUG and INFO messages should not appear):\n");
3431
set_log_level(LOG_LEVEL_WARNING);
35-
32+
3633
LOG_DEBUG("This debug message should NOT appear");
3734
LOG_INFO("This info message should NOT appear");
3835
LOG_WARNING("This warning message should appear");
3936
LOG_ERROR("This error message should appear");
40-
41-
// Test changing log level to ERROR
37+
4238
printf("\nChanging log level to ERROR (only ERROR messages should appear):\n");
4339
set_log_level(LOG_LEVEL_ERROR);
44-
40+
4541
LOG_DEBUG("This debug message should NOT appear");
4642
LOG_INFO("This info message should NOT appear");
4743
LOG_WARNING("This warning message should NOT appear");
4844
LOG_ERROR("This error message should appear");
49-
45+
5046
printf("\nLogging tests completed successfully!\n");
5147
return 0;
5248
}

test/Core/test_training.c

Lines changed: 26 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -6,75 +6,65 @@
66
#include "../../include/Core/error_codes.h"
77
#include "../../include/Core/logging.h"
88

9-
void test_large_layer_sizes() {
9+
void test_large_layer_sizes()
10+
{
1011
set_log_level(LOG_LEVEL_DEBUG);
11-
// Create a network with very large layer sizes
12+
1213
NeuralNetwork *network = create_neural_network(1000);
13-
14-
// Add layers with increasing sizes to test buffer allocation
14+
1515
model_add(network, LAYER_DENSE, ACTIVATION_RELU, 1000, 2000, 0.0f, 0, 0);
1616
model_add(network, LAYER_DENSE, ACTIVATION_RELU, 2000, 500, 0.0f, 0, 0);
17-
18-
// Test forward pass with large input
17+
1918
float *input = (float *)cm_safe_malloc(1000 * sizeof(float), __FILE__, __LINE__);
2019
float *output = (float *)cm_safe_malloc(500 * sizeof(float), __FILE__, __LINE__);
21-
22-
// Fill input with test data
23-
for (int i = 0; i < 1000; i++) input[i] = 0.1f;
24-
25-
// This would segfault without the bug fixes
20+
21+
for (int i = 0; i < 1000; i++)
22+
input[i] = 0.1f;
23+
2624
CM_Error error = forward_pass(network, input, output, 1000, 500, 0);
27-
28-
// Assert no error
25+
2926
assert(error == CM_SUCCESS);
30-
31-
// Clean up
27+
3228
free_neural_network(network);
3329
cm_safe_free((void **)&input);
3430
cm_safe_free((void **)&output);
3531
}
3632

37-
void test_mismatched_layer_sizes() {
38-
// Create network with input size 10
33+
void test_mismatched_layer_sizes()
34+
{
35+
3936
NeuralNetwork *network = create_neural_network(10);
40-
41-
// Add layers with different input/output sizes
37+
4238
model_add(network, LAYER_DENSE, ACTIVATION_RELU, 10, 20, 0.0f, 0, 0);
4339
model_add(network, LAYER_DENSE, ACTIVATION_RELU, 20, 5, 0.0f, 0, 0);
44-
45-
// Try to run forward pass with input size 15 (mismatch)
40+
4641
float input[15] = {0};
4742
float output[5] = {0};
48-
49-
// This should return an error code, not segfault
43+
5044
CM_Error error = forward_pass(network, input, output, 15, 5, 0);
51-
52-
// Assert proper error handling
45+
5346
assert(error == CM_INVALID_LAYER_DIMENSIONS_ERROR);
54-
47+
5548
free_neural_network(network);
5649
}
5750

58-
void test_optimizer_memory() {
59-
// Create network
51+
void test_optimizer_memory()
52+
{
53+
6054
NeuralNetwork *network = create_neural_network(10);
61-
62-
// Add layers
55+
6356
model_add(network, LAYER_DENSE, ACTIVATION_RELU, 10, 100, 0.0f, 0, 0);
6457
model_add(network, LAYER_DENSE, ACTIVATION_RELU, 100, 50, 0.0f, 0, 0);
65-
66-
// Build with Adam optimizer (requires memory allocation)
58+
6759
CM_Error error = build_network(network, OPTIMIZER_ADAM, 0.01f, LOSS_MSE, 0.0f, 0.0f);
6860
assert(error == CM_SUCCESS);
69-
70-
// Initialize optimizer params (would segfault without your fixes)
61+
7162
error = initialize_optimizer_params(network);
7263
assert(error == CM_SUCCESS);
73-
64+
7465
free_neural_network(network);
7566
}
7667

77-
7868
int main()
7969
{
8070
printf("Testing Neural Network Training\n");

0 commit comments

Comments
 (0)