Skip to content

Commit c2b6e5f

Browse files
committed
small api cleanup
1 parent b308b6b commit c2b6e5f

File tree

4 files changed

+18
-18
lines changed

4 files changed

+18
-18
lines changed

README.md

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ ann_confusion_matrix | compute binary confusion matrix and MCC
138138
ann_print_confusion_matrix | print formatted confusion matrix
139139
ann_class_prediction | determine predicted class from output activations
140140
ann_print_props | print network properties and configuration
141-
print_outputs | print output layer activations (debug)
141+
ann_print_outputs | print output layer activations (debug)
142142
ann_strerror | convert error code to human-readable message
143143
ann_set_error_log_callback | install error logging callback
144144
ann_get_error_log_callback | get current error callback
@@ -150,22 +150,22 @@ Built-in schedulers adjust the learning rate during training:
150150

151151
| Scheduler | Function | Description |
152152
|-----------|----------|-------------|
153-
| Step decay | `lr_scheduler_step` | Multiply LR by gamma every N epochs |
154-
| Exponential | `lr_scheduler_exponential` | Multiply LR by gamma each epoch |
155-
| Cosine | `lr_scheduler_cosine` | Smooth decay from base LR to min LR |
153+
| Step decay | `ann_lr_scheduler_step` | Multiply LR by gamma every N epochs |
154+
| Exponential | `ann_lr_scheduler_exponential` | Multiply LR by gamma each epoch |
155+
| Cosine | `ann_lr_scheduler_cosine` | Smooth decay from base LR to min LR |
156156

157157
```c
158158
// Step decay: halve LR every 10 epochs
159159
LRStepParams step_params = { .step_size = 10, .gamma = 0.5f };
160-
ann_set_lr_scheduler(net, lr_scheduler_step, &step_params);
160+
ann_set_lr_scheduler(net, ann_lr_scheduler_step, &step_params);
161161

162162
// Exponential decay: 5% reduction per epoch
163163
LRExponentialParams exp_params = { .gamma = 0.95f };
164-
ann_set_lr_scheduler(net, lr_scheduler_exponential, &exp_params);
164+
ann_set_lr_scheduler(net, ann_lr_scheduler_exponential, &exp_params);
165165

166166
// Cosine annealing: decay to 0.0001 over 100 epochs
167167
LRCosineParams cos_params = { .T_max = 100, .min_lr = 0.0001f };
168-
ann_set_lr_scheduler(net, lr_scheduler_cosine, &cos_params);
168+
ann_set_lr_scheduler(net, ann_lr_scheduler_cosine, &cos_params);
169169

170170
// Custom scheduler
171171
real my_scheduler(unsigned epoch, real base_lr, void *data) {
@@ -681,7 +681,7 @@ int main(int argc, char *argv[])
681681
real outputs[1];
682682
ann_predict(pnet, &data[0], outputs);
683683

684-
print_outputs(pnet);
684+
ann_print_outputs(pnet);
685685

686686
// free resources
687687
ann_free_network(pnet);

ann.c

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -435,7 +435,7 @@ static void init_weights(PNetwork pnet)
435435
//--------------------------------
436436
// print nodes in the output layer
437437
//--------------------------------
438-
void print_outputs(const PNetwork pnet)
438+
void ann_print_outputs(const PNetwork pnet)
439439
{
440440
if (!pnet)
441441
return;
@@ -2481,7 +2481,7 @@ void ann_set_lr_scheduler(PNetwork pnet, LRSchedulerFunc scheduler, void *user_d
24812481
// Step decay LR scheduler
24822482
// LR = base_lr * (gamma ^ (epoch / step_size))
24832483
//------------------------------
2484-
real lr_scheduler_step(unsigned epoch, real base_lr, void *user_data)
2484+
real ann_lr_scheduler_step(unsigned epoch, real base_lr, void *user_data)
24852485
{
24862486
if (!user_data)
24872487
return base_lr;
@@ -2500,7 +2500,7 @@ real lr_scheduler_step(unsigned epoch, real base_lr, void *user_data)
25002500
// Exponential decay LR scheduler
25012501
// LR = base_lr * (gamma ^ epoch)
25022502
//------------------------------
2503-
real lr_scheduler_exponential(unsigned epoch, real base_lr, void *user_data)
2503+
real ann_lr_scheduler_exponential(unsigned epoch, real base_lr, void *user_data)
25042504
{
25052505
if (!user_data)
25062506
return base_lr;
@@ -2513,7 +2513,7 @@ real lr_scheduler_exponential(unsigned epoch, real base_lr, void *user_data)
25132513
// Cosine annealing LR scheduler
25142514
// LR = min_lr + (base_lr - min_lr) * (1 + cos(pi * epoch / T_max)) / 2
25152515
//------------------------------
2516-
real lr_scheduler_cosine(unsigned epoch, real base_lr, void *user_data)
2516+
real ann_lr_scheduler_cosine(unsigned epoch, real base_lr, void *user_data)
25172517
{
25182518
if (!user_data)
25192519
return base_lr;

ann.h

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -670,7 +670,7 @@ void ann_set_epoch_limit(PNetwork pnet, unsigned limit);
670670
* @param scheduler Scheduler callback function, or NULL to disable
671671
* @param user_data User-provided data passed to scheduler (e.g., parameters)
672672
*
673-
* @see lr_scheduler_step, lr_scheduler_exponential, lr_scheduler_cosine
673+
* @see ann_lr_scheduler_step, ann_lr_scheduler_exponential, ann_lr_scheduler_cosine
674674
*/
675675
void ann_set_lr_scheduler(PNetwork pnet, LRSchedulerFunc scheduler, void *user_data);
676676

@@ -709,7 +709,7 @@ typedef struct {
709709
* @param user_data Pointer to LRStepParams
710710
* @return Scheduled learning rate
711711
*/
712-
real lr_scheduler_step(unsigned epoch, real base_lr, void *user_data);
712+
real ann_lr_scheduler_step(unsigned epoch, real base_lr, void *user_data);
713713

714714
/**
715715
* Exponential decay scheduler: multiplies LR by gamma each epoch.
@@ -720,7 +720,7 @@ real lr_scheduler_step(unsigned epoch, real base_lr, void *user_data);
720720
* @param user_data Pointer to LRExponentialParams
721721
* @return Scheduled learning rate
722722
*/
723-
real lr_scheduler_exponential(unsigned epoch, real base_lr, void *user_data);
723+
real ann_lr_scheduler_exponential(unsigned epoch, real base_lr, void *user_data);
724724

725725
/**
726726
* Cosine annealing scheduler: smooth decay to min_lr.
@@ -731,7 +731,7 @@ real lr_scheduler_exponential(unsigned epoch, real base_lr, void *user_data);
731731
* @param user_data Pointer to LRCosineParams
732732
* @return Scheduled learning rate
733733
*/
734-
real lr_scheduler_cosine(unsigned epoch, real base_lr, void *user_data);
734+
real ann_lr_scheduler_cosine(unsigned epoch, real base_lr, void *user_data);
735735

736736
// ============================================================================
737737
// DROPOUT REGULARIZATION
@@ -826,7 +826,7 @@ void ann_print_props(const PNetwork pnet);
826826
*
827827
* @param pnet Network whose outputs to print (must not be NULL)
828828
*/
829-
void print_outputs(const PNetwork pnet);
829+
void ann_print_outputs(const PNetwork pnet);
830830

831831
// ============================================================================
832832
// ERROR HANDLING

logic.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ int main(int argc, char *argv[])
7474
real outputs[1];
7575
CHECK_OK(ann_predict(pnet, &data[0], outputs));
7676

77-
print_outputs(pnet);
77+
ann_print_outputs(pnet);
7878

7979
ann_free_network(pnet);
8080

0 commit comments

Comments
 (0)