@@ -2968,6 +2968,182 @@ foldRemove(garray_T *gap, linenr_T top, linenr_T bot)
29682968 }
29692969}
29702970
2971+ /* foldReverseOrder() {{{2 */
2972+ static void
2973+ foldReverseOrder (garray_T * gap , linenr_T start , linenr_T end )
2974+ {
2975+ fold_T * left , * right ;
2976+ fold_T tmp ;
2977+
2978+ for (; start < end ; start ++ , end -- )
2979+ {
2980+ left = (fold_T * )gap -> ga_data + start ;
2981+ right = (fold_T * )gap -> ga_data + end ;
2982+ tmp = * left ;
2983+ * left = * right ;
2984+ * right = tmp ;
2985+ }
2986+ }
2987+
2988+ /* foldMoveRange() {{{2 */
2989+ /*
2990+ * Move folds within the inclusive range "line1" to "line2" to after "dest"
2991+ * requires "line1" <= "line2" <= "dest"
2992+ *
2993+ * There are the following situations for the first fold at or below line1 - 1.
2994+ * 1 2 3 4
2995+ * 1 2 3 4
2996+ * line1 2 3 4
2997+ * 2 3 4 5 6 7
2998+ * line2 3 4 5 6 7
2999+ * 3 4 6 7 8 9
3000+ * dest 4 7 8 9
3001+ * 4 7 8 10
3002+ * 4 7 8 10
3003+ *
3004+ * In the following descriptions, "moved" means moving in the buffer, *and* in
3005+ * the fold array.
3006+ * Meanwhile, "shifted" just means moving in the buffer.
3007+ * 1. not changed
3008+ * 2. truncated above line1
3009+ * 3. length reduced by line2 - line1, folds starting between the end of 3 and
3010+ * dest are truncated and shifted up
3011+ * 4. internal folds moved (from [line1, line2] to dest)
3012+ * 5. moved to dest.
3013+ * 6. truncated below line2 and moved.
3014+ * 7. length reduced by line2 - dest, folds starting between line2 and dest are
3015+ * removed, top is moved down by move_len.
3016+ * 8. truncated below dest and shifted up.
3017+ * 9. shifted up
3018+ * 10. not changed
3019+ */
3020+
3021+ static void
3022+ truncate_fold (fold_T * fp , linenr_T end )
3023+ {
3024+ foldRemove (& fp -> fd_nested , end - fp -> fd_top , MAXLNUM );
3025+ fp -> fd_len = end - fp -> fd_top + 1 ;
3026+ }
3027+
3028+ #define fold_end (fp ) ((fp)->fd_top + (fp)->fd_len - 1)
3029+ #define valid_fold (fp , gap ) ((fp) < ((fold_T *)(gap)->ga_data + (gap)->ga_len))
3030+ #define fold_index (fp , gap ) ((size_t)(fp - ((fold_T *)(gap)->ga_data)))
3031+
3032+ void
3033+ foldMoveRange (garray_T * gap , linenr_T line1 , linenr_T line2 , linenr_T dest )
3034+ {
3035+ fold_T * fp ;
3036+ linenr_T range_len = line2 - line1 + 1 ;
3037+ linenr_T move_len = dest - line2 ;
3038+ int at_start = foldFind (gap , line1 - 1 , & fp );
3039+ size_t move_start = 0 , move_end = 0 , dest_index = 0 ;
3040+
3041+ if (at_start )
3042+ {
3043+ if (fold_end (fp ) > dest )
3044+ {
3045+ /* Case 4
3046+ * don't have to change this fold, but have to move nested folds.
3047+ */
3048+ foldMoveRange (& fp -> fd_nested , line1 - fp -> fd_top , line2 -
3049+ fp -> fd_top , dest - fp -> fd_top );
3050+ return ;
3051+ }
3052+ else if (fold_end (fp ) > line2 )
3053+ {
3054+ /* Case 3
3055+ * Remove nested folds between line1 and line2 & reduce the
3056+ * length of fold by "range_len".
3057+ * Folds after this one must be dealt with.
3058+ */
3059+ foldMarkAdjustRecurse (& fp -> fd_nested , line1 - fp -> fd_top , line2 -
3060+ fp -> fd_top , MAXLNUM , - range_len );
3061+ fp -> fd_len -= range_len ;
3062+ }
3063+ else
3064+ /* Case 2 truncate fold, folds after this one must be dealt with. */
3065+ truncate_fold (fp , line1 );
3066+
3067+ /* Look at the next fold, and treat that one as if it were the first
3068+ * after "line1" (because now it is). */
3069+ fp = fp + 1 ;
3070+ }
3071+
3072+ if (!valid_fold (fp , gap ) || fp -> fd_top > dest )
3073+ {
3074+ /* Case 10
3075+ * No folds after "line1" and before "dest"
3076+ */
3077+ return ;
3078+ }
3079+ else if (fp -> fd_top > line2 )
3080+ {
3081+ for (; valid_fold (fp , gap ) && fold_end (fp ) < dest ; fp ++ )
3082+ /* Case 9. (for all case 9's) -- shift up. */
3083+ fp -> fd_top -= range_len ;
3084+
3085+ if (valid_fold (fp , gap ) && fp -> fd_top < dest )
3086+ {
3087+ /* Case 8. -- ensure truncated at dest, shift up */
3088+ truncate_fold (fp , dest );
3089+ fp -> fd_top -= range_len ;
3090+ }
3091+ return ;
3092+ }
3093+ else if (fold_end (fp ) > dest )
3094+ {
3095+ /* Case 7 -- remove nested folds and shrink */
3096+ foldMarkAdjustRecurse (& fp -> fd_nested , line2 + 1 - fp -> fd_top , dest -
3097+ fp -> fd_top , MAXLNUM , - move_len );
3098+ fp -> fd_len -= move_len ;
3099+ fp -> fd_top += move_len ;
3100+ return ;
3101+ }
3102+
3103+ /* Case 5 or 6
3104+ * changes rely on whether there are folds between the end of
3105+ * this fold and "dest".
3106+ */
3107+ move_start = fold_index (fp , gap );
3108+
3109+ for (; valid_fold (fp , gap ) && fp -> fd_top <= dest ; fp ++ )
3110+ {
3111+ if (fp -> fd_top <= line2 )
3112+ {
3113+ /* 1. 2. or 3. */
3114+ if (fold_end (fp ) > line2 )
3115+ /* 2. or 3., truncate before moving */
3116+ truncate_fold (fp , line2 );
3117+
3118+ fp -> fd_top += move_len ;
3119+ continue ;
3120+ }
3121+
3122+ /* Record index of the first fold after the moved range. */
3123+ if (move_end == 0 )
3124+ move_end = fold_index (fp , gap );
3125+
3126+ if (fold_end (fp ) > dest )
3127+ truncate_fold (fp , dest );
3128+
3129+ fp -> fd_top -= range_len ;
3130+ }
3131+
3132+ dest_index = fold_index (fp , gap );
3133+
3134+ /*
3135+ * All folds are now correct, but they are not necessarily in the correct
3136+ * order. We have to swap folds in the range [move_end, dest_index) with
3137+ * those in the range [move_start, move_end).
3138+ */
3139+ foldReverseOrder (gap , move_start , dest_index - 1 );
3140+ foldReverseOrder (gap , move_start , move_start + dest_index - move_end - 1 );
3141+ foldReverseOrder (gap , move_start + dest_index - move_end , dest_index - 1 );
3142+ }
3143+ #undef fold_end
3144+ #undef valid_fold
3145+ #undef fold_index
3146+
29713147/* foldMerge() {{{2 */
29723148/*
29733149 * Merge two adjacent folds (and the nested ones in them).
0 commit comments