Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,23 @@ You can use custom fzf options by defining `@fzf-url-fzf-options`.
set -g @fzf-url-fzf-options '-w 50% -h 50% --multi -0 --no-preview --no-border'
```

You can control how URLs are sorted by setting `@fzf-url-sort-by`:

```tmux
# Sort URLs alphabetically (default)
set -g @fzf-url-sort-by 'alphabetical'

# Sort URLs by recency - most recent URLs closest to search bar
set -g @fzf-url-sort-by 'recency'
```

**Smart Layout Detection**: When using recency sorting, the plugin automatically detects your fzf layout and adjusts the sort order to keep the most recent URLs closest to the search bar:

- **Default layout** (search at bottom): Recent URLs appear first (top of list)
- **Reverse layout** (with `--reverse` option): Recent URLs appear last (bottom of list)

This ensures optimal accessibility regardless of your preferred fzf layout.

By default, `tmux-fzf-url` will use `xdg-open`, `open`, or the `BROWSER`
environment variable to open the url, respectively. If you want to use a
different command, you can set `@fzf-url-open` to the command you want to use.
Expand Down
46 changes: 35 additions & 11 deletions fzf-url.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ fzf_filter() {
}

custom_open=$3
sort_by=$4
open_url() {
if [[ -n $custom_open ]]; then
$custom_open "$@"
Expand All @@ -37,21 +38,44 @@ else
content="$(tmux capture-pane -J -p -e -S -"$limit" |sed -r 's/\x1B\[[0-9;]*[mK]//g'))"
fi

urls=$(echo "$content" |grep -oE '(https?|ftp|file):/?//[-A-Za-z0-9+&@#/%?=~_|!:,.;]*[-A-Za-z0-9+&@#/%=~_|]')
wwws=$(echo "$content" |grep -oE '(http?s://)?www\.[a-zA-Z](-?[a-zA-Z0-9])+\.[a-zA-Z]{2,}(/\S+)*' | grep -vE '^https?://' |sed 's/^\(.*\)$/http:\/\/\1/')
ips=$(echo "$content" |grep -oE '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}(:[0-9]{1,5})?(/\S+)*' |sed 's/^\(.*\)$/http:\/\/\1/')
gits=$(echo "$content" |grep -oE '(ssh://)?git@\S*' | sed 's/:/\//g' | sed 's/^\(ssh\/\/\/\)\{0,1\}git@\(.*\)$/https:\/\/\2/')
gh=$(echo "$content" |grep -oE "['\"]([_A-Za-z0-9-]*/[_.A-Za-z0-9-]*)['\"]" | sed "s/['\"]//g" | sed 's#.#https://github.com/&#')
# Extract URLs with line numbers to preserve position
urls=$(echo "$content" | grep -noE '(https?|ftp|file):/?//[-A-Za-z0-9+&@#/%?=~_|!:,.;]*[-A-Za-z0-9+&@#/%=~_|]')
wwws=$(echo "$content" | grep -noE '(http?s://)?www\.[a-zA-Z](-?[a-zA-Z0-9])+\.[a-zA-Z]{2,}(/\S+)*' | grep -vE ':[0-9]*:https?://' | sed 's/^\([0-9]*\):\(.*\)$/\1:http:\/\/\2/')
ips=$(echo "$content" | grep -noE '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}(:[0-9]{1,5})?(/\S+)*' | sed 's/^\([0-9]*\):\(.*\)$/\1:http:\/\/\2/')
gits=$(echo "$content" | grep -noE '(ssh://)?git@\S*' | sed 's/:/\//g' | sed 's/^\([0-9]*\)\/\/\(ssh\/\/\/\)\{0,1\}git@\(.*\)$/\1:https:\/\/\3/')
gh=$(echo "$content" | grep -noE "['\"]([_A-Za-z0-9-]*/[_.A-Za-z0-9-]*)['\"]" | sed "s/['\"]//g" | sed 's/^\([0-9]*\):\(.*\)$/\1:https:\/\/github.com\/\2/')

if [[ $# -ge 1 && "$1" != '' ]]; then
extras=$(echo "$content" |eval "$1")
extras=$(echo "$content" | nl -nln | eval "$1" | sed 's/^\([0-9]*\)\t\(.*\)$/\1:\2/')
fi

items=$(printf '%s\n' "${urls[@]}" "${wwws[@]}" "${gh[@]}" "${ips[@]}" "${gits[@]}" "${extras[@]}" |
grep -v '^$' |
sort -u |
nl -w3 -s ' '
)
# Combine all URLs with their line numbers
all_urls=$(printf '%s\n' "${urls[@]}" "${wwws[@]}" "${gh[@]}" "${ips[@]}" "${gits[@]}" "${extras[@]}" | grep -v '^$')

# Sort and deduplicate based on sort_by option
if [[ "$sort_by" == "recency" ]]; then
# Recency behavior: adjust sort order based on fzf layout
fzf_options="$(get_fzf_options)"
if [[ "$fzf_options" == *"--reverse"* ]]; then
# Reverse layout (search at top): oldest URLs first so recent ones are closest to search
sort_order="-n"
else
# Default layout (search at bottom): newest URLs first so recent ones are closest to search
sort_order="-nr"
fi

items=$(echo "$all_urls" | awk -F: '
{
url = substr($0, index($0, ":") + 1)
if (!seen[url]) {
seen[url] = 1
print $1 ":" url
}
Comment on lines +70 to +73
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the same URL appears multiple times, shouldn’t we keep the line number of its last occurrence instead of the first?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@wfxr Ohhh, that's an interesting idea. wdyt? I could go either way

Copy link
Owner

@wfxr wfxr Jul 28, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@josephschmitt IMO it's more intuitive to sort by the last occurrence. However, if this is much more complicated to implement, the current solution is acceptable.

}' | sort $sort_order | cut -d: -f2- | nl -w3 -s ' ')
else
# Default alphabetical behavior: sort alphabetically and remove duplicates
items=$(echo "$all_urls" | cut -d: -f2- | sort -u | nl -w3 -s ' ')
fi
[ -z "$items" ] && tmux display 'tmux-fzf-url: no URLs found' && exit

fzf_filter <<< "$items" | awk '{print $2}' | \
Expand Down
3 changes: 2 additions & 1 deletion fzf-url.tmux
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ key="$(tmux_get '@fzf-url-bind' 'u')"
history_limit="$(tmux_get '@fzf-url-history-limit' 'screen')"
extra_filter="$(tmux_get '@fzf-url-extra-filter' '')"
custom_open="$(tmux_get '@fzf-url-open' '')"
sort_by="$(tmux_get '@fzf-url-sort-by' 'alphabetical')"
echo "$extra_filter" > /tmp/filter

tmux bind-key "$key" run -b "$SCRIPT_DIR/fzf-url.sh '$extra_filter' $history_limit '$custom_open'";
tmux bind-key "$key" run -b "$SCRIPT_DIR/fzf-url.sh '$extra_filter' $history_limit '$custom_open' '$sort_by'";