21
21
#include "../reftable/reftable-iterator.h"
22
22
#include "../setup.h"
23
23
#include "../strmap.h"
24
+ #include "../trace2.h"
24
25
#include "parse.h"
25
26
#include "refs-internal.h"
26
27
@@ -447,10 +448,81 @@ struct reftable_ref_iterator {
447
448
448
449
const char * prefix ;
449
450
size_t prefix_len ;
451
+ char * * exclude_patterns ;
452
+ size_t exclude_patterns_index ;
453
+ size_t exclude_patterns_strlen ;
450
454
unsigned int flags ;
451
455
int err ;
452
456
};
453
457
458
+ /*
459
+ * Handle exclude patterns. Returns either `1`, which tells the caller that the
460
+ * current reference shall not be shown. Or `0`, which indicates that it should
461
+ * be shown.
462
+ */
463
+ static int should_exclude_current_ref (struct reftable_ref_iterator * iter )
464
+ {
465
+ while (iter -> exclude_patterns [iter -> exclude_patterns_index ]) {
466
+ const char * pattern = iter -> exclude_patterns [iter -> exclude_patterns_index ];
467
+ char * ref_after_pattern ;
468
+ int cmp ;
469
+
470
+ /*
471
+ * Lazily cache the pattern length so that we don't have to
472
+ * recompute it every time this function is called.
473
+ */
474
+ if (!iter -> exclude_patterns_strlen )
475
+ iter -> exclude_patterns_strlen = strlen (pattern );
476
+
477
+ /*
478
+ * When the reference name is lexicographically bigger than the
479
+ * current exclude pattern we know that it won't ever match any
480
+ * of the following references, either. We thus advance to the
481
+ * next pattern and re-check whether it matches.
482
+ *
483
+ * Otherwise, if it's smaller, then we do not have a match and
484
+ * thus want to show the current reference.
485
+ */
486
+ cmp = strncmp (iter -> ref .refname , pattern ,
487
+ iter -> exclude_patterns_strlen );
488
+ if (cmp > 0 ) {
489
+ iter -> exclude_patterns_index ++ ;
490
+ iter -> exclude_patterns_strlen = 0 ;
491
+ continue ;
492
+ }
493
+ if (cmp < 0 )
494
+ return 0 ;
495
+
496
+ /*
497
+ * The reference shares a prefix with the exclude pattern and
498
+ * shall thus be omitted. We skip all references that match the
499
+ * pattern by seeking to the first reference after the block of
500
+ * matches.
501
+ *
502
+ * This is done by appending the highest possible character to
503
+ * the pattern. Consequently, all references that have the
504
+ * pattern as prefix and whose suffix starts with anything in
505
+ * the range [0x00, 0xfe] are skipped. And given that 0xff is a
506
+ * non-printable character that shouldn't ever be in a ref name,
507
+ * we'd not yield any such record, either.
508
+ *
509
+ * Note that the seeked-to reference may also be excluded. This
510
+ * is not handled here though, but the caller is expected to
511
+ * loop and re-verify the next reference for us.
512
+ */
513
+ ref_after_pattern = xstrfmt ("%s%c" , pattern , 0xff );
514
+ iter -> err = reftable_iterator_seek_ref (& iter -> iter , ref_after_pattern );
515
+ iter -> exclude_patterns_index ++ ;
516
+ iter -> exclude_patterns_strlen = 0 ;
517
+ trace2_counter_add (TRACE2_COUNTER_ID_REFTABLE_RESEEKS , 1 );
518
+
519
+ free (ref_after_pattern );
520
+ return 1 ;
521
+ }
522
+
523
+ return 0 ;
524
+ }
525
+
454
526
static int reftable_ref_iterator_advance (struct ref_iterator * ref_iterator )
455
527
{
456
528
struct reftable_ref_iterator * iter =
@@ -481,6 +553,9 @@ static int reftable_ref_iterator_advance(struct ref_iterator *ref_iterator)
481
553
break ;
482
554
}
483
555
556
+ if (iter -> exclude_patterns && should_exclude_current_ref (iter ))
557
+ continue ;
558
+
484
559
if (iter -> flags & DO_FOR_EACH_PER_WORKTREE_ONLY &&
485
560
parse_worktree_ref (iter -> ref .refname , NULL , NULL , NULL ) !=
486
561
REF_WORKTREE_CURRENT )
@@ -570,6 +645,11 @@ static int reftable_ref_iterator_abort(struct ref_iterator *ref_iterator)
570
645
(struct reftable_ref_iterator * )ref_iterator ;
571
646
reftable_ref_record_release (& iter -> ref );
572
647
reftable_iterator_destroy (& iter -> iter );
648
+ if (iter -> exclude_patterns ) {
649
+ for (size_t i = 0 ; iter -> exclude_patterns [i ]; i ++ )
650
+ free (iter -> exclude_patterns [i ]);
651
+ free (iter -> exclude_patterns );
652
+ }
573
653
free (iter );
574
654
return ITER_DONE ;
575
655
}
@@ -580,9 +660,53 @@ static struct ref_iterator_vtable reftable_ref_iterator_vtable = {
580
660
.abort = reftable_ref_iterator_abort
581
661
};
582
662
663
+ static int qsort_strcmp (const void * va , const void * vb )
664
+ {
665
+ const char * a = * (const char * * )va ;
666
+ const char * b = * (const char * * )vb ;
667
+ return strcmp (a , b );
668
+ }
669
+
670
+ static char * * filter_exclude_patterns (const char * * exclude_patterns )
671
+ {
672
+ size_t filtered_size = 0 , filtered_alloc = 0 ;
673
+ char * * filtered = NULL ;
674
+
675
+ if (!exclude_patterns )
676
+ return NULL ;
677
+
678
+ for (size_t i = 0 ; ; i ++ ) {
679
+ const char * exclude_pattern = exclude_patterns [i ];
680
+ int has_glob = 0 ;
681
+
682
+ if (!exclude_pattern )
683
+ break ;
684
+
685
+ for (const char * p = exclude_pattern ; * p ; p ++ ) {
686
+ has_glob = is_glob_special (* p );
687
+ if (has_glob )
688
+ break ;
689
+ }
690
+ if (has_glob )
691
+ continue ;
692
+
693
+ ALLOC_GROW (filtered , filtered_size + 1 , filtered_alloc );
694
+ filtered [filtered_size ++ ] = xstrdup (exclude_pattern );
695
+ }
696
+
697
+ if (filtered_size ) {
698
+ QSORT (filtered , filtered_size , qsort_strcmp );
699
+ ALLOC_GROW (filtered , filtered_size + 1 , filtered_alloc );
700
+ filtered [filtered_size ++ ] = NULL ;
701
+ }
702
+
703
+ return filtered ;
704
+ }
705
+
583
706
static struct reftable_ref_iterator * ref_iterator_for_stack (struct reftable_ref_store * refs ,
584
707
struct reftable_stack * stack ,
585
708
const char * prefix ,
709
+ const char * * exclude_patterns ,
586
710
int flags )
587
711
{
588
712
struct reftable_ref_iterator * iter ;
@@ -595,6 +719,7 @@ static struct reftable_ref_iterator *ref_iterator_for_stack(struct reftable_ref_
595
719
iter -> base .oid = & iter -> oid ;
596
720
iter -> flags = flags ;
597
721
iter -> refs = refs ;
722
+ iter -> exclude_patterns = filter_exclude_patterns (exclude_patterns );
598
723
599
724
ret = refs -> err ;
600
725
if (ret )
@@ -616,7 +741,7 @@ static struct reftable_ref_iterator *ref_iterator_for_stack(struct reftable_ref_
616
741
617
742
static struct ref_iterator * reftable_be_iterator_begin (struct ref_store * ref_store ,
618
743
const char * prefix ,
619
- const char * * exclude_patterns UNUSED ,
744
+ const char * * exclude_patterns ,
620
745
unsigned int flags )
621
746
{
622
747
struct reftable_ref_iterator * main_iter , * worktree_iter ;
@@ -627,7 +752,8 @@ static struct ref_iterator *reftable_be_iterator_begin(struct ref_store *ref_sto
627
752
required_flags |= REF_STORE_ODB ;
628
753
refs = reftable_be_downcast (ref_store , required_flags , "ref_iterator_begin" );
629
754
630
- main_iter = ref_iterator_for_stack (refs , refs -> main_stack , prefix , flags );
755
+ main_iter = ref_iterator_for_stack (refs , refs -> main_stack , prefix ,
756
+ exclude_patterns , flags );
631
757
632
758
/*
633
759
* The worktree stack is only set when we're in an actual worktree
@@ -641,7 +767,8 @@ static struct ref_iterator *reftable_be_iterator_begin(struct ref_store *ref_sto
641
767
* Otherwise we merge both the common and the per-worktree refs into a
642
768
* single iterator.
643
769
*/
644
- worktree_iter = ref_iterator_for_stack (refs , refs -> worktree_stack , prefix , flags );
770
+ worktree_iter = ref_iterator_for_stack (refs , refs -> worktree_stack , prefix ,
771
+ exclude_patterns , flags );
645
772
return merge_ref_iterator_begin (& worktree_iter -> base , & main_iter -> base ,
646
773
ref_iterator_select , NULL );
647
774
}
0 commit comments