mirror of
https://github.com/junegunn/fzf.git
synced 2026-03-13 02:10:51 +08:00
Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce58d08ee3 | ||
|
|
997a7e5947 | ||
|
|
88e48619d6 |
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -12,6 +12,6 @@ jobs:
|
|||||||
label:
|
label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v6
|
- uses: actions/labeler@v5
|
||||||
with:
|
with:
|
||||||
configuration-path: .github/labeler.yml
|
configuration-path: .github/labeler.yml
|
||||||
|
|||||||
5
.github/workflows/linux.yml
vendored
5
.github/workflows/linux.yml
vendored
@@ -44,10 +44,5 @@ jobs:
|
|||||||
- name: Unit test
|
- name: Unit test
|
||||||
run: make test
|
run: make test
|
||||||
|
|
||||||
- name: Fuzz test
|
|
||||||
run: |
|
|
||||||
go test ./src/algo/ -fuzz=FuzzIndexByteTwo -fuzztime=5s
|
|
||||||
go test ./src/algo/ -fuzz=FuzzLastIndexByteTwo -fuzztime=5s
|
|
||||||
|
|
||||||
- name: Integration test
|
- name: Integration test
|
||||||
run: make install && ./install --all && tmux new-session -d && ruby test/runner.rb --verbose
|
run: make install && ./install --all && tmux new-session -d && ruby test/runner.rb --verbose
|
||||||
|
|||||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,62 +1,6 @@
|
|||||||
CHANGELOG
|
CHANGELOG
|
||||||
=========
|
=========
|
||||||
|
|
||||||
0.70.1
|
|
||||||
------
|
|
||||||
- Performance improvements
|
|
||||||
- The search performance now scales linearly with the number of CPU cores, as we dropped static partitioning to allow better load balancing across threads.
|
|
||||||
```
|
|
||||||
=== query: 'linux' ===
|
|
||||||
[all] baseline: 17.12ms current: 14.28ms (1.20x) matches: 179966 (12.79%)
|
|
||||||
[1T] baseline: 136.49ms current: 137.25ms (0.99x) matches: 179966 (12.79%)
|
|
||||||
[2T] baseline: 75.74ms current: 68.75ms (1.10x) matches: 179966 (12.79%)
|
|
||||||
[4T] baseline: 41.16ms current: 34.97ms (1.18x) matches: 179966 (12.79%)
|
|
||||||
[8T] baseline: 32.82ms current: 17.79ms (1.84x) matches: 179966 (12.79%)
|
|
||||||
```
|
|
||||||
- Improved the cache structure, reducing memory footprint per entry by 86x.
|
|
||||||
- With the reduced per-entry cost, the cache now has broader coverage.
|
|
||||||
- fish: Improved command history (CTRL-R) (#44703) (@bitraid)
|
|
||||||
- Bug fixes
|
|
||||||
- Fixed AWK tokenizer not treating a new line character as whitespace
|
|
||||||
- Fixed `--{accept,with}-nth` removing trailing whitespaces with a non-default `--delimiter`
|
|
||||||
- Fixed OSC8 hyperlinks being mangled when the URL contains unicode characters (#4707)
|
|
||||||
- Fixed `--with-shell` not handling quoted arguments correctly (#4709)
|
|
||||||
|
|
||||||
0.70.0
|
|
||||||
------
|
|
||||||
- Added `change-with-nth` action for dynamically changing the `--with-nth` option.
|
|
||||||
- Requires `--with-nth` to be set initially.
|
|
||||||
- Multiple options separated by `|` can be given to cycle through.
|
|
||||||
```sh
|
|
||||||
echo -e "a b c\nd e f\ng h i" | fzf --with-nth .. \
|
|
||||||
--bind 'space:change-with-nth(1|2|3|1,3|2,3|)'
|
|
||||||
```
|
|
||||||
- Added `change-header-lines` action for dynamically changing the `--header-lines` option
|
|
||||||
- Performance improvements (1.3x to 1.9x faster filtering depending on query)
|
|
||||||
```
|
|
||||||
=== query: 'l' ===
|
|
||||||
[all] baseline: 168.87ms current: 95.21ms (1.77x) matches: 5069891 (94.78%)
|
|
||||||
[1T] baseline: 1652.22ms current: 841.40ms (1.96x) matches: 5069891 (94.78%)
|
|
||||||
|
|
||||||
=== query: 'lin' ===
|
|
||||||
[all] baseline: 343.27ms current: 252.59ms (1.36x) matches: 3516507 (65.74%)
|
|
||||||
[1T] baseline: 3199.89ms current: 2230.64ms (1.43x) matches: 3516507 (65.74%)
|
|
||||||
|
|
||||||
=== query: 'linux' ===
|
|
||||||
[all] baseline: 85.47ms current: 63.72ms (1.34x) matches: 307229 (5.74%)
|
|
||||||
[1T] baseline: 774.64ms current: 589.32ms (1.31x) matches: 307229 (5.74%)
|
|
||||||
|
|
||||||
=== query: 'linuxlinux' ===
|
|
||||||
[all] baseline: 55.13ms current: 35.67ms (1.55x) matches: 12230 (0.23%)
|
|
||||||
[1T] baseline: 461.99ms current: 332.38ms (1.39x) matches: 12230 (0.23%)
|
|
||||||
|
|
||||||
=== query: 'linuxlinuxlinux' ===
|
|
||||||
[all] baseline: 51.77ms current: 32.53ms (1.59x) matches: 865 (0.02%)
|
|
||||||
[1T] baseline: 409.99ms current: 296.33ms (1.38x) matches: 865 (0.02%)
|
|
||||||
```
|
|
||||||
- Fixed `nth` attribute merge order to respect precedence hierarchy (#4697)
|
|
||||||
- bash: Replaced `printf` with builtin `printf` to bypass local indirections (#4684) (@DarrenBishop)
|
|
||||||
|
|
||||||
0.68.0
|
0.68.0
|
||||||
------
|
------
|
||||||
- Implemented word wrapping in the list section
|
- Implemented word wrapping in the list section
|
||||||
|
|||||||
2
install
2
install
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
set -u
|
set -u
|
||||||
|
|
||||||
version=0.70.0
|
version=0.68.0
|
||||||
auto_completion=
|
auto_completion=
|
||||||
key_bindings=
|
key_bindings=
|
||||||
update_config=2
|
update_config=2
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
$version="0.70.0"
|
$version="0.68.0"
|
||||||
|
|
||||||
$fzf_base=Split-Path -Parent $MyInvocation.MyCommand.Definition
|
$fzf_base=Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
|
||||||
|
|||||||
2
main.go
2
main.go
@@ -11,7 +11,7 @@ import (
|
|||||||
"github.com/junegunn/fzf/src/protector"
|
"github.com/junegunn/fzf/src/protector"
|
||||||
)
|
)
|
||||||
|
|
||||||
var version = "0.70"
|
var version = "0.68"
|
||||||
var revision = "devel"
|
var revision = "devel"
|
||||||
|
|
||||||
//go:embed shell/key-bindings.bash
|
//go:embed shell/key-bindings.bash
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
..
|
..
|
||||||
.TH fzf\-tmux 1 "Mar 2026" "fzf 0.70.0" "fzf\-tmux - open fzf in tmux split pane"
|
.TH fzf\-tmux 1 "Feb 2026" "fzf 0.68.0" "fzf\-tmux - open fzf in tmux split pane"
|
||||||
|
|
||||||
.SH NAME
|
.SH NAME
|
||||||
fzf\-tmux - open fzf in tmux split pane
|
fzf\-tmux - open fzf in tmux split pane
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
..
|
..
|
||||||
.TH fzf 1 "Mar 2026" "fzf 0.70.0" "fzf - a command-line fuzzy finder"
|
.TH fzf 1 "Feb 2026" "fzf 0.68.0" "fzf - a command-line fuzzy finder"
|
||||||
|
|
||||||
.SH NAME
|
.SH NAME
|
||||||
fzf - a command-line fuzzy finder
|
fzf - a command-line fuzzy finder
|
||||||
@@ -134,14 +134,6 @@ e.g.
|
|||||||
# Use template to rearrange fields
|
# Use template to rearrange fields
|
||||||
echo foo,bar,baz | fzf --delimiter , --with-nth '{n},{1},{3},{2},{1..2}'
|
echo foo,bar,baz | fzf --delimiter , --with-nth '{n},{1},{3},{2},{1..2}'
|
||||||
.RE
|
.RE
|
||||||
.RS
|
|
||||||
|
|
||||||
\fBchange\-with\-nth\fR action is only available when \fB\-\-with\-nth\fR is set.
|
|
||||||
When \fB\-\-with\-nth\fR is used, fzf retains the original input lines in memory
|
|
||||||
so they can be re\-transformed on the fly (e.g. \fB\-\-with\-nth ..\fR to keep
|
|
||||||
the original presentation). This increases memory usage, so only use
|
|
||||||
\fB\-\-with\-nth\fR when you actually need field transformation.
|
|
||||||
.RE
|
|
||||||
.TP
|
.TP
|
||||||
.BI "\-\-accept\-nth=" "N[,..] or TEMPLATE"
|
.BI "\-\-accept\-nth=" "N[,..] or TEMPLATE"
|
||||||
Define which fields to print on accept. The last delimiter is stripped from the
|
Define which fields to print on accept. The last delimiter is stripped from the
|
||||||
@@ -1237,18 +1229,6 @@ Here is an example script that uses a Unix socket instead of a TCP port.
|
|||||||
curl --unix-socket /tmp/fzf.sock http -d up
|
curl --unix-socket /tmp/fzf.sock http -d up
|
||||||
\fR
|
\fR
|
||||||
|
|
||||||
.TP
|
|
||||||
.BI "\-\-threads=" "N"
|
|
||||||
Number of matcher threads to use. The default value is
|
|
||||||
\fBmin(8 * NUM_CPU, 32)\fR.
|
|
||||||
.TP
|
|
||||||
.BI "\-\-bench=" "DURATION"
|
|
||||||
Repeatedly run \fB\-\-filter\fR for the given duration and print timing
|
|
||||||
statistics. Must be used with \fB\-\-filter\fR.
|
|
||||||
|
|
||||||
e.g.
|
|
||||||
\fBcat /usr/share/dict/words | fzf \-\-filter abc \-\-bench 10s\fR
|
|
||||||
|
|
||||||
.SS DIRECTORY TRAVERSAL
|
.SS DIRECTORY TRAVERSAL
|
||||||
.TP
|
.TP
|
||||||
.B "\-\-walker=[file][,dir][,follow][,hidden]"
|
.B "\-\-walker=[file][,dir][,follow][,hidden]"
|
||||||
@@ -1418,8 +1398,6 @@ fzf exports the following environment variables to its child processes.
|
|||||||
.br
|
.br
|
||||||
.BR FZF_NTH " Current \-\-nth option"
|
.BR FZF_NTH " Current \-\-nth option"
|
||||||
.br
|
.br
|
||||||
.BR FZF_WITH_NTH " Current \-\-with\-nth option"
|
|
||||||
.br
|
|
||||||
.BR FZF_PROMPT " Prompt string"
|
.BR FZF_PROMPT " Prompt string"
|
||||||
.br
|
.br
|
||||||
.BR FZF_GHOST " Ghost string"
|
.BR FZF_GHOST " Ghost string"
|
||||||
@@ -1910,7 +1888,6 @@ A key or an event can be bound to one or more of the following actions.
|
|||||||
\fBchange\-multi\fR (enable multi-select mode with no limit)
|
\fBchange\-multi\fR (enable multi-select mode with no limit)
|
||||||
\fBchange\-multi(...)\fR (enable multi-select mode with a limit or disable it with 0)
|
\fBchange\-multi(...)\fR (enable multi-select mode with a limit or disable it with 0)
|
||||||
\fBchange\-nth(...)\fR (change \fB\-\-nth\fR option; rotate through the multiple options separated by '|')
|
\fBchange\-nth(...)\fR (change \fB\-\-nth\fR option; rotate through the multiple options separated by '|')
|
||||||
\fBchange\-with\-nth(...)\fR (change \fB\-\-with\-nth\fR option; rotate through the multiple options separated by '|')
|
|
||||||
\fBchange\-pointer(...)\fR (change \fB\-\-pointer\fR option)
|
\fBchange\-pointer(...)\fR (change \fB\-\-pointer\fR option)
|
||||||
\fBchange\-preview(...)\fR (change \fB\-\-preview\fR option)
|
\fBchange\-preview(...)\fR (change \fB\-\-preview\fR option)
|
||||||
\fBchange\-preview\-label(...)\fR (change \fB\-\-preview\-label\fR to the given string)
|
\fBchange\-preview\-label(...)\fR (change \fB\-\-preview\-label\fR to the given string)
|
||||||
@@ -2016,7 +1993,6 @@ A key or an event can be bound to one or more of the following actions.
|
|||||||
\fBtransform\-input\-label(...)\fR (transform input label using an external command)
|
\fBtransform\-input\-label(...)\fR (transform input label using an external command)
|
||||||
\fBtransform\-list\-label(...)\fR (transform list label using an external command)
|
\fBtransform\-list\-label(...)\fR (transform list label using an external command)
|
||||||
\fBtransform\-nth(...)\fR (transform nth using an external command)
|
\fBtransform\-nth(...)\fR (transform nth using an external command)
|
||||||
\fBtransform\-with\-nth(...)\fR (transform with-nth using an external command)
|
|
||||||
\fBtransform\-pointer(...)\fR (transform pointer using an external command)
|
\fBtransform\-pointer(...)\fR (transform pointer using an external command)
|
||||||
\fBtransform\-preview\-label(...)\fR (transform preview label using an external command)
|
\fBtransform\-preview\-label(...)\fR (transform preview label using an external command)
|
||||||
\fBtransform\-prompt(...)\fR (transform prompt string using an external command)
|
\fBtransform\-prompt(...)\fR (transform prompt string using an external command)
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
__fzf_defaults() {
|
__fzf_defaults() {
|
||||||
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
# $1: Prepend to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||||
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
# $2: Append to FZF_DEFAULT_OPTS_FILE and FZF_DEFAULT_OPTS
|
||||||
builtin printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
builtin printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_exec_awk() {
|
__fzf_exec_awk() {
|
||||||
|
|||||||
@@ -38,9 +38,9 @@ if [[ $- =~ i ]]; then
|
|||||||
# the changes. See code comments in "common.sh" for the implementation details.
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
__fzf_defaults() {
|
__fzf_defaults() {
|
||||||
builtin printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
builtin printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_exec_awk() {
|
__fzf_exec_awk() {
|
||||||
@@ -81,7 +81,7 @@ __fzf_orig_completion() {
|
|||||||
f="${BASH_REMATCH[2]}"
|
f="${BASH_REMATCH[2]}"
|
||||||
cmd="${BASH_REMATCH[3]}"
|
cmd="${BASH_REMATCH[3]}"
|
||||||
[[ $f == _fzf_* ]] && continue
|
[[ $f == _fzf_* ]] && continue
|
||||||
builtin printf -v "_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}" "%s" "${comp} %s ${cmd} #${f}"
|
printf -v "_fzf_orig_completion_${cmd//[^A-Za-z0-9_]/_}" "%s" "${comp} %s ${cmd} #${f}"
|
||||||
if [[ $l == *" -o nospace "* ]] && [[ ${__fzf_nospace_commands-} != *" $cmd "* ]]; then
|
if [[ $l == *" -o nospace "* ]] && [[ ${__fzf_nospace_commands-} != *" $cmd "* ]]; then
|
||||||
__fzf_nospace_commands="${__fzf_nospace_commands-} $cmd "
|
__fzf_nospace_commands="${__fzf_nospace_commands-} $cmd "
|
||||||
fi
|
fi
|
||||||
@@ -111,7 +111,7 @@ __fzf_orig_completion_instantiate() {
|
|||||||
orig="${!orig_var-}"
|
orig="${!orig_var-}"
|
||||||
orig="${orig%#*}"
|
orig="${orig%#*}"
|
||||||
[[ $orig == *' %s '* ]] || return 1
|
[[ $orig == *' %s '* ]] || return 1
|
||||||
builtin printf -v REPLY "$orig" "$func"
|
printf -v REPLY "$orig" "$func"
|
||||||
}
|
}
|
||||||
|
|
||||||
_fzf_opts_completion() {
|
_fzf_opts_completion() {
|
||||||
@@ -376,7 +376,7 @@ __fzf_generic_path_completion() {
|
|||||||
eval "rest=(${FZF_COMPLETION_PATH_OPTS-})"
|
eval "rest=(${FZF_COMPLETION_PATH_OPTS-})"
|
||||||
fi
|
fi
|
||||||
if declare -F "$1" > /dev/null; then
|
if declare -F "$1" > /dev/null; then
|
||||||
eval "$1 $(builtin printf %q "$dir")" | __fzf_comprun "$4" -q "$leftover" "${rest[@]}"
|
eval "$1 $(printf %q "$dir")" | __fzf_comprun "$4" -q "$leftover" "${rest[@]}"
|
||||||
else
|
else
|
||||||
if [[ $1 =~ dir ]]; then
|
if [[ $1 =~ dir ]]; then
|
||||||
walker=dir,follow
|
walker=dir,follow
|
||||||
@@ -385,7 +385,7 @@ __fzf_generic_path_completion() {
|
|||||||
fi
|
fi
|
||||||
__fzf_comprun "$4" -q "$leftover" --walker "$walker" --walker-root="$dir" "${rest[@]}"
|
__fzf_comprun "$4" -q "$leftover" --walker "$walker" --walker-root="$dir" "${rest[@]}"
|
||||||
fi | while read -r item; do
|
fi | while read -r item; do
|
||||||
builtin printf "%q " "${item%$3}$3"
|
printf "%q " "${item%$3}$3"
|
||||||
done
|
done
|
||||||
)
|
)
|
||||||
matches=${matches% }
|
matches=${matches% }
|
||||||
@@ -395,9 +395,9 @@ __fzf_generic_path_completion() {
|
|||||||
else
|
else
|
||||||
COMPREPLY=("$cur")
|
COMPREPLY=("$cur")
|
||||||
fi
|
fi
|
||||||
# To redraw line after fzf closes (builtin printf '\e[5n')
|
# To redraw line after fzf closes (printf '\e[5n')
|
||||||
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
||||||
builtin printf '\e[5n'
|
printf '\e[5n'
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
dir=$(command dirname "$dir")
|
dir=$(command dirname "$dir")
|
||||||
@@ -455,7 +455,7 @@ _fzf_complete() {
|
|||||||
COMPREPLY=("$cur")
|
COMPREPLY=("$cur")
|
||||||
fi
|
fi
|
||||||
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
bind '"\e[0n": redraw-current-line' 2> /dev/null
|
||||||
builtin printf '\e[5n'
|
printf '\e[5n'
|
||||||
return 0
|
return 0
|
||||||
else
|
else
|
||||||
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
_fzf_handle_dynamic_completion "$cmd" "${rest[@]}"
|
||||||
@@ -527,7 +527,7 @@ _fzf_proc_completion_post() {
|
|||||||
# # Set the local attribute for any non-local variable that is set by _known_hosts_real()
|
# # Set the local attribute for any non-local variable that is set by _known_hosts_real()
|
||||||
# local COMPREPLY=()
|
# local COMPREPLY=()
|
||||||
# _known_hosts_real ''
|
# _known_hosts_real ''
|
||||||
# builtin printf '%s\n' "${COMPREPLY[@]}" | command sort -u --version-sort
|
# printf '%s\n' "${COMPREPLY[@]}" | command sort -u --version-sort
|
||||||
# }
|
# }
|
||||||
if ! declare -F __fzf_list_hosts > /dev/null; then
|
if ! declare -F __fzf_list_hosts > /dev/null; then
|
||||||
__fzf_list_hosts() {
|
__fzf_list_hosts() {
|
||||||
|
|||||||
@@ -102,9 +102,9 @@ if [[ -o interactive ]]; then
|
|||||||
# the changes. See code comments in "common.sh" for the implementation details.
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
__fzf_defaults() {
|
__fzf_defaults() {
|
||||||
builtin printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
builtin printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_exec_awk() {
|
__fzf_exec_awk() {
|
||||||
|
|||||||
@@ -25,9 +25,9 @@ if [[ $- =~ i ]]; then
|
|||||||
# the changes. See code comments in "common.sh" for the implementation details.
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
__fzf_defaults() {
|
__fzf_defaults() {
|
||||||
builtin printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
builtin printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_exec_awk() {
|
__fzf_exec_awk() {
|
||||||
|
|||||||
@@ -45,9 +45,9 @@ if [[ -o interactive ]]; then
|
|||||||
# the changes. See code comments in "common.sh" for the implementation details.
|
# the changes. See code comments in "common.sh" for the implementation details.
|
||||||
|
|
||||||
__fzf_defaults() {
|
__fzf_defaults() {
|
||||||
builtin printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
printf '%s\n' "--height ${FZF_TMUX_HEIGHT:-40%} --min-height 20+ --bind=ctrl-z:ignore $1"
|
||||||
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
command cat "${FZF_DEFAULT_OPTS_FILE-}" 2> /dev/null
|
||||||
builtin printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
printf '%s\n' "${FZF_DEFAULT_OPTS-} $2"
|
||||||
}
|
}
|
||||||
|
|
||||||
__fzf_exec_awk() {
|
__fzf_exec_awk() {
|
||||||
|
|||||||
@@ -38,159 +38,156 @@ func _() {
|
|||||||
_ = x[actChangeListLabel-27]
|
_ = x[actChangeListLabel-27]
|
||||||
_ = x[actChangeMulti-28]
|
_ = x[actChangeMulti-28]
|
||||||
_ = x[actChangeNth-29]
|
_ = x[actChangeNth-29]
|
||||||
_ = x[actChangeWithNth-30]
|
_ = x[actChangePointer-30]
|
||||||
_ = x[actChangePointer-31]
|
_ = x[actChangePreview-31]
|
||||||
_ = x[actChangePreview-32]
|
_ = x[actChangePreviewLabel-32]
|
||||||
_ = x[actChangePreviewLabel-33]
|
_ = x[actChangePreviewWindow-33]
|
||||||
_ = x[actChangePreviewWindow-34]
|
_ = x[actChangePrompt-34]
|
||||||
_ = x[actChangePrompt-35]
|
_ = x[actChangeQuery-35]
|
||||||
_ = x[actChangeQuery-36]
|
_ = x[actClearScreen-36]
|
||||||
_ = x[actClearScreen-37]
|
_ = x[actClearQuery-37]
|
||||||
_ = x[actClearQuery-38]
|
_ = x[actClearSelection-38]
|
||||||
_ = x[actClearSelection-39]
|
_ = x[actClose-39]
|
||||||
_ = x[actClose-40]
|
_ = x[actDeleteChar-40]
|
||||||
_ = x[actDeleteChar-41]
|
_ = x[actDeleteCharEof-41]
|
||||||
_ = x[actDeleteCharEof-42]
|
_ = x[actEndOfLine-42]
|
||||||
_ = x[actEndOfLine-43]
|
_ = x[actFatal-43]
|
||||||
_ = x[actFatal-44]
|
_ = x[actForwardChar-44]
|
||||||
_ = x[actForwardChar-45]
|
_ = x[actForwardWord-45]
|
||||||
_ = x[actForwardWord-46]
|
_ = x[actForwardSubWord-46]
|
||||||
_ = x[actForwardSubWord-47]
|
_ = x[actKillLine-47]
|
||||||
_ = x[actKillLine-48]
|
_ = x[actKillWord-48]
|
||||||
_ = x[actKillWord-49]
|
_ = x[actKillSubWord-49]
|
||||||
_ = x[actKillSubWord-50]
|
_ = x[actUnixLineDiscard-50]
|
||||||
_ = x[actUnixLineDiscard-51]
|
_ = x[actUnixWordRubout-51]
|
||||||
_ = x[actUnixWordRubout-52]
|
_ = x[actYank-52]
|
||||||
_ = x[actYank-53]
|
_ = x[actBackwardKillWord-53]
|
||||||
_ = x[actBackwardKillWord-54]
|
_ = x[actBackwardKillSubWord-54]
|
||||||
_ = x[actBackwardKillSubWord-55]
|
_ = x[actSelectAll-55]
|
||||||
_ = x[actSelectAll-56]
|
_ = x[actDeselectAll-56]
|
||||||
_ = x[actDeselectAll-57]
|
_ = x[actToggle-57]
|
||||||
_ = x[actToggle-58]
|
_ = x[actToggleSearch-58]
|
||||||
_ = x[actToggleSearch-59]
|
_ = x[actToggleAll-59]
|
||||||
_ = x[actToggleAll-60]
|
_ = x[actToggleDown-60]
|
||||||
_ = x[actToggleDown-61]
|
_ = x[actToggleUp-61]
|
||||||
_ = x[actToggleUp-62]
|
_ = x[actToggleIn-62]
|
||||||
_ = x[actToggleIn-63]
|
_ = x[actToggleOut-63]
|
||||||
_ = x[actToggleOut-64]
|
_ = x[actToggleTrack-64]
|
||||||
_ = x[actToggleTrack-65]
|
_ = x[actToggleTrackCurrent-65]
|
||||||
_ = x[actToggleTrackCurrent-66]
|
_ = x[actToggleHeader-66]
|
||||||
_ = x[actToggleHeader-67]
|
_ = x[actToggleWrap-67]
|
||||||
_ = x[actToggleWrap-68]
|
_ = x[actToggleWrapWord-68]
|
||||||
_ = x[actToggleWrapWord-69]
|
_ = x[actToggleMultiLine-69]
|
||||||
_ = x[actToggleMultiLine-70]
|
_ = x[actToggleHscroll-70]
|
||||||
_ = x[actToggleHscroll-71]
|
_ = x[actToggleRaw-71]
|
||||||
_ = x[actToggleRaw-72]
|
_ = x[actEnableRaw-72]
|
||||||
_ = x[actEnableRaw-73]
|
_ = x[actDisableRaw-73]
|
||||||
_ = x[actDisableRaw-74]
|
_ = x[actTrackCurrent-74]
|
||||||
_ = x[actTrackCurrent-75]
|
_ = x[actToggleInput-75]
|
||||||
_ = x[actToggleInput-76]
|
_ = x[actHideInput-76]
|
||||||
_ = x[actHideInput-77]
|
_ = x[actShowInput-77]
|
||||||
_ = x[actShowInput-78]
|
_ = x[actUntrackCurrent-78]
|
||||||
_ = x[actUntrackCurrent-79]
|
_ = x[actDown-79]
|
||||||
_ = x[actDown-80]
|
_ = x[actDownMatch-80]
|
||||||
_ = x[actDownMatch-81]
|
_ = x[actUp-81]
|
||||||
_ = x[actUp-82]
|
_ = x[actUpMatch-82]
|
||||||
_ = x[actUpMatch-83]
|
_ = x[actPageUp-83]
|
||||||
_ = x[actPageUp-84]
|
_ = x[actPageDown-84]
|
||||||
_ = x[actPageDown-85]
|
_ = x[actPosition-85]
|
||||||
_ = x[actPosition-86]
|
_ = x[actHalfPageUp-86]
|
||||||
_ = x[actHalfPageUp-87]
|
_ = x[actHalfPageDown-87]
|
||||||
_ = x[actHalfPageDown-88]
|
_ = x[actOffsetUp-88]
|
||||||
_ = x[actOffsetUp-89]
|
_ = x[actOffsetDown-89]
|
||||||
_ = x[actOffsetDown-90]
|
_ = x[actOffsetMiddle-90]
|
||||||
_ = x[actOffsetMiddle-91]
|
_ = x[actJump-91]
|
||||||
_ = x[actJump-92]
|
_ = x[actJumpAccept-92]
|
||||||
_ = x[actJumpAccept-93]
|
_ = x[actPrintQuery-93]
|
||||||
_ = x[actPrintQuery-94]
|
_ = x[actRefreshPreview-94]
|
||||||
_ = x[actRefreshPreview-95]
|
_ = x[actReplaceQuery-95]
|
||||||
_ = x[actReplaceQuery-96]
|
_ = x[actToggleSort-96]
|
||||||
_ = x[actToggleSort-97]
|
_ = x[actShowPreview-97]
|
||||||
_ = x[actShowPreview-98]
|
_ = x[actHidePreview-98]
|
||||||
_ = x[actHidePreview-99]
|
_ = x[actTogglePreview-99]
|
||||||
_ = x[actTogglePreview-100]
|
_ = x[actTogglePreviewWrap-100]
|
||||||
_ = x[actTogglePreviewWrap-101]
|
_ = x[actTogglePreviewWrapWord-101]
|
||||||
_ = x[actTogglePreviewWrapWord-102]
|
_ = x[actTransform-102]
|
||||||
_ = x[actTransform-103]
|
_ = x[actTransformBorderLabel-103]
|
||||||
_ = x[actTransformBorderLabel-104]
|
_ = x[actTransformGhost-104]
|
||||||
_ = x[actTransformGhost-105]
|
_ = x[actTransformHeader-105]
|
||||||
_ = x[actTransformHeader-106]
|
_ = x[actTransformHeaderLines-106]
|
||||||
_ = x[actTransformHeaderLines-107]
|
_ = x[actTransformFooter-107]
|
||||||
_ = x[actTransformFooter-108]
|
_ = x[actTransformHeaderLabel-108]
|
||||||
_ = x[actTransformHeaderLabel-109]
|
_ = x[actTransformFooterLabel-109]
|
||||||
_ = x[actTransformFooterLabel-110]
|
_ = x[actTransformInputLabel-110]
|
||||||
_ = x[actTransformInputLabel-111]
|
_ = x[actTransformListLabel-111]
|
||||||
_ = x[actTransformListLabel-112]
|
_ = x[actTransformNth-112]
|
||||||
_ = x[actTransformNth-113]
|
_ = x[actTransformPointer-113]
|
||||||
_ = x[actTransformWithNth-114]
|
_ = x[actTransformPreviewLabel-114]
|
||||||
_ = x[actTransformPointer-115]
|
_ = x[actTransformPrompt-115]
|
||||||
_ = x[actTransformPreviewLabel-116]
|
_ = x[actTransformQuery-116]
|
||||||
_ = x[actTransformPrompt-117]
|
_ = x[actTransformSearch-117]
|
||||||
_ = x[actTransformQuery-118]
|
_ = x[actTrigger-118]
|
||||||
_ = x[actTransformSearch-119]
|
_ = x[actBgTransform-119]
|
||||||
_ = x[actTrigger-120]
|
_ = x[actBgTransformBorderLabel-120]
|
||||||
_ = x[actBgTransform-121]
|
_ = x[actBgTransformGhost-121]
|
||||||
_ = x[actBgTransformBorderLabel-122]
|
_ = x[actBgTransformHeader-122]
|
||||||
_ = x[actBgTransformGhost-123]
|
_ = x[actBgTransformHeaderLines-123]
|
||||||
_ = x[actBgTransformHeader-124]
|
_ = x[actBgTransformFooter-124]
|
||||||
_ = x[actBgTransformHeaderLines-125]
|
_ = x[actBgTransformHeaderLabel-125]
|
||||||
_ = x[actBgTransformFooter-126]
|
_ = x[actBgTransformFooterLabel-126]
|
||||||
_ = x[actBgTransformHeaderLabel-127]
|
_ = x[actBgTransformInputLabel-127]
|
||||||
_ = x[actBgTransformFooterLabel-128]
|
_ = x[actBgTransformListLabel-128]
|
||||||
_ = x[actBgTransformInputLabel-129]
|
_ = x[actBgTransformNth-129]
|
||||||
_ = x[actBgTransformListLabel-130]
|
_ = x[actBgTransformPointer-130]
|
||||||
_ = x[actBgTransformNth-131]
|
_ = x[actBgTransformPreviewLabel-131]
|
||||||
_ = x[actBgTransformWithNth-132]
|
_ = x[actBgTransformPrompt-132]
|
||||||
_ = x[actBgTransformPointer-133]
|
_ = x[actBgTransformQuery-133]
|
||||||
_ = x[actBgTransformPreviewLabel-134]
|
_ = x[actBgTransformSearch-134]
|
||||||
_ = x[actBgTransformPrompt-135]
|
_ = x[actBgCancel-135]
|
||||||
_ = x[actBgTransformQuery-136]
|
_ = x[actSearch-136]
|
||||||
_ = x[actBgTransformSearch-137]
|
_ = x[actPreview-137]
|
||||||
_ = x[actBgCancel-138]
|
_ = x[actPreviewTop-138]
|
||||||
_ = x[actSearch-139]
|
_ = x[actPreviewBottom-139]
|
||||||
_ = x[actPreview-140]
|
_ = x[actPreviewUp-140]
|
||||||
_ = x[actPreviewTop-141]
|
_ = x[actPreviewDown-141]
|
||||||
_ = x[actPreviewBottom-142]
|
_ = x[actPreviewPageUp-142]
|
||||||
_ = x[actPreviewUp-143]
|
_ = x[actPreviewPageDown-143]
|
||||||
_ = x[actPreviewDown-144]
|
_ = x[actPreviewHalfPageUp-144]
|
||||||
_ = x[actPreviewPageUp-145]
|
_ = x[actPreviewHalfPageDown-145]
|
||||||
_ = x[actPreviewPageDown-146]
|
_ = x[actPrevHistory-146]
|
||||||
_ = x[actPreviewHalfPageUp-147]
|
_ = x[actPrevSelected-147]
|
||||||
_ = x[actPreviewHalfPageDown-148]
|
_ = x[actPrint-148]
|
||||||
_ = x[actPrevHistory-149]
|
_ = x[actPut-149]
|
||||||
_ = x[actPrevSelected-150]
|
_ = x[actNextHistory-150]
|
||||||
_ = x[actPrint-151]
|
_ = x[actNextSelected-151]
|
||||||
_ = x[actPut-152]
|
_ = x[actExecute-152]
|
||||||
_ = x[actNextHistory-153]
|
_ = x[actExecuteSilent-153]
|
||||||
_ = x[actNextSelected-154]
|
_ = x[actExecuteMulti-154]
|
||||||
_ = x[actExecute-155]
|
_ = x[actSigStop-155]
|
||||||
_ = x[actExecuteSilent-156]
|
_ = x[actBest-156]
|
||||||
_ = x[actExecuteMulti-157]
|
_ = x[actFirst-157]
|
||||||
_ = x[actSigStop-158]
|
_ = x[actLast-158]
|
||||||
_ = x[actBest-159]
|
_ = x[actReload-159]
|
||||||
_ = x[actFirst-160]
|
_ = x[actReloadSync-160]
|
||||||
_ = x[actLast-161]
|
_ = x[actDisableSearch-161]
|
||||||
_ = x[actReload-162]
|
_ = x[actEnableSearch-162]
|
||||||
_ = x[actReloadSync-163]
|
_ = x[actSelect-163]
|
||||||
_ = x[actDisableSearch-164]
|
_ = x[actDeselect-164]
|
||||||
_ = x[actEnableSearch-165]
|
_ = x[actUnbind-165]
|
||||||
_ = x[actSelect-166]
|
_ = x[actRebind-166]
|
||||||
_ = x[actDeselect-167]
|
_ = x[actToggleBind-167]
|
||||||
_ = x[actUnbind-168]
|
_ = x[actBecome-168]
|
||||||
_ = x[actRebind-169]
|
_ = x[actShowHeader-169]
|
||||||
_ = x[actToggleBind-170]
|
_ = x[actHideHeader-170]
|
||||||
_ = x[actBecome-171]
|
_ = x[actBell-171]
|
||||||
_ = x[actShowHeader-172]
|
_ = x[actExclude-172]
|
||||||
_ = x[actHideHeader-173]
|
_ = x[actExcludeMulti-173]
|
||||||
_ = x[actBell-174]
|
_ = x[actAsync-174]
|
||||||
_ = x[actExclude-175]
|
|
||||||
_ = x[actExcludeMulti-176]
|
|
||||||
_ = x[actAsync-177]
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const _actionType_name = "actIgnoreactStartactClickactInvalidactBracketedPasteBeginactBracketedPasteEndactCharactMouseactBeginningOfLineactAbortactAcceptactAcceptNonEmptyactAcceptOrPrintQueryactBackwardCharactBackwardDeleteCharactBackwardDeleteCharEofactBackwardWordactBackwardSubWordactCancelactChangeBorderLabelactChangeGhostactChangeHeaderactChangeHeaderLinesactChangeFooteractChangeHeaderLabelactChangeFooterLabelactChangeInputLabelactChangeListLabelactChangeMultiactChangeNthactChangeWithNthactChangePointeractChangePreviewactChangePreviewLabelactChangePreviewWindowactChangePromptactChangeQueryactClearScreenactClearQueryactClearSelectionactCloseactDeleteCharactDeleteCharEofactEndOfLineactFatalactForwardCharactForwardWordactForwardSubWordactKillLineactKillWordactKillSubWordactUnixLineDiscardactUnixWordRuboutactYankactBackwardKillWordactBackwardKillSubWordactSelectAllactDeselectAllactToggleactToggleSearchactToggleAllactToggleDownactToggleUpactToggleInactToggleOutactToggleTrackactToggleTrackCurrentactToggleHeaderactToggleWrapactToggleWrapWordactToggleMultiLineactToggleHscrollactToggleRawactEnableRawactDisableRawactTrackCurrentactToggleInputactHideInputactShowInputactUntrackCurrentactDownactDownMatchactUpactUpMatchactPageUpactPageDownactPositionactHalfPageUpactHalfPageDownactOffsetUpactOffsetDownactOffsetMiddleactJumpactJumpAcceptactPrintQueryactRefreshPreviewactReplaceQueryactToggleSortactShowPreviewactHidePreviewactTogglePreviewactTogglePreviewWrapactTogglePreviewWrapWordactTransformactTransformBorderLabelactTransformGhostactTransformHeaderactTransformHeaderLinesactTransformFooteractTransformHeaderLabelactTransformFooterLabelactTransformInputLabelactTransformListLabelactTransformNthactTransformWithNthactTransformPointeractTransformPreviewLabelactTransformPromptactTransformQueryactTransformSearchactTriggeractBgTransformactBgTransformBorderLabelactBgTransformGhostactBgTransformHeaderactBgTransformHeaderLinesactBgTransformFooteractBgTransformHeaderLabelactBgTransformFooterLabelactBgTransformInputLabelactBgTransformListLabelactBgTransformNthactBgTransformWithNthactBgTransformPointeractBgTransformPreviewLabelactBgTransformPromptactBgTransformQueryactBgTransformSearchactBgCancelactSearchactPreviewactPreviewTopactPreviewBottomactPreviewUpactPreviewDownactPreviewPageUpactPreviewPageDownactPreviewHalfPageUpactPreviewHalfPageDownactPrevHistoryactPrevSelectedactPrintactPutactNextHistoryactNextSelectedactExecuteactExecuteSilentactExecuteMultiactSigStopactBestactFirstactLastactReloadactReloadSyncactDisableSearchactEnableSearchactSelectactDeselectactUnbindactRebindactToggleBindactBecomeactShowHeaderactHideHeaderactBellactExcludeactExcludeMultiactAsync"
|
const _actionType_name = "actIgnoreactStartactClickactInvalidactBracketedPasteBeginactBracketedPasteEndactCharactMouseactBeginningOfLineactAbortactAcceptactAcceptNonEmptyactAcceptOrPrintQueryactBackwardCharactBackwardDeleteCharactBackwardDeleteCharEofactBackwardWordactBackwardSubWordactCancelactChangeBorderLabelactChangeGhostactChangeHeaderactChangeHeaderLinesactChangeFooteractChangeHeaderLabelactChangeFooterLabelactChangeInputLabelactChangeListLabelactChangeMultiactChangeNthactChangePointeractChangePreviewactChangePreviewLabelactChangePreviewWindowactChangePromptactChangeQueryactClearScreenactClearQueryactClearSelectionactCloseactDeleteCharactDeleteCharEofactEndOfLineactFatalactForwardCharactForwardWordactForwardSubWordactKillLineactKillWordactKillSubWordactUnixLineDiscardactUnixWordRuboutactYankactBackwardKillWordactBackwardKillSubWordactSelectAllactDeselectAllactToggleactToggleSearchactToggleAllactToggleDownactToggleUpactToggleInactToggleOutactToggleTrackactToggleTrackCurrentactToggleHeaderactToggleWrapactToggleWrapWordactToggleMultiLineactToggleHscrollactToggleRawactEnableRawactDisableRawactTrackCurrentactToggleInputactHideInputactShowInputactUntrackCurrentactDownactDownMatchactUpactUpMatchactPageUpactPageDownactPositionactHalfPageUpactHalfPageDownactOffsetUpactOffsetDownactOffsetMiddleactJumpactJumpAcceptactPrintQueryactRefreshPreviewactReplaceQueryactToggleSortactShowPreviewactHidePreviewactTogglePreviewactTogglePreviewWrapactTogglePreviewWrapWordactTransformactTransformBorderLabelactTransformGhostactTransformHeaderactTransformHeaderLinesactTransformFooteractTransformHeaderLabelactTransformFooterLabelactTransformInputLabelactTransformListLabelactTransformNthactTransformPointeractTransformPreviewLabelactTransformPromptactTransformQueryactTransformSearchactTriggeractBgTransformactBgTransformBorderLabelactBgTransformGhostactBgTransformHeaderactBgTransformHeaderLinesactBgTransformFooteractBgTransformHeaderLabelactBgTransformFooterLabelactBgTransformInputLabelactBgTransformListLabelactBgTransformNthactBgTransformPointeractBgTransformPreviewLabelactBgTransformPromptactBgTransformQueryactBgTransformSearchactBgCancelactSearchactPreviewactPreviewTopactPreviewBottomactPreviewUpactPreviewDownactPreviewPageUpactPreviewPageDownactPreviewHalfPageUpactPreviewHalfPageDownactPrevHistoryactPrevSelectedactPrintactPutactNextHistoryactNextSelectedactExecuteactExecuteSilentactExecuteMultiactSigStopactBestactFirstactLastactReloadactReloadSyncactDisableSearchactEnableSearchactSelectactDeselectactUnbindactRebindactToggleBindactBecomeactShowHeaderactHideHeaderactBellactExcludeactExcludeMultiactAsync"
|
||||||
|
|
||||||
var _actionType_index = [...]uint16{0, 9, 17, 25, 35, 57, 77, 84, 92, 110, 118, 127, 144, 165, 180, 201, 225, 240, 258, 267, 287, 301, 316, 336, 351, 371, 391, 410, 428, 442, 454, 470, 486, 502, 523, 545, 560, 574, 588, 601, 618, 626, 639, 655, 667, 675, 689, 703, 720, 731, 742, 756, 774, 791, 798, 817, 839, 851, 865, 874, 889, 901, 914, 925, 936, 948, 962, 983, 998, 1011, 1028, 1046, 1062, 1074, 1086, 1099, 1114, 1128, 1140, 1152, 1169, 1176, 1188, 1193, 1203, 1212, 1223, 1234, 1247, 1262, 1273, 1286, 1301, 1308, 1321, 1334, 1351, 1366, 1379, 1393, 1407, 1423, 1443, 1467, 1479, 1502, 1519, 1537, 1560, 1578, 1601, 1624, 1646, 1667, 1682, 1701, 1720, 1744, 1762, 1779, 1797, 1807, 1821, 1846, 1865, 1885, 1910, 1930, 1955, 1980, 2004, 2027, 2044, 2065, 2086, 2112, 2132, 2151, 2171, 2182, 2191, 2201, 2214, 2230, 2242, 2256, 2272, 2290, 2310, 2332, 2346, 2361, 2369, 2375, 2389, 2404, 2414, 2430, 2445, 2455, 2462, 2470, 2477, 2486, 2499, 2515, 2530, 2539, 2550, 2559, 2568, 2581, 2590, 2603, 2616, 2623, 2633, 2648, 2656}
|
var _actionType_index = [...]uint16{0, 9, 17, 25, 35, 57, 77, 84, 92, 110, 118, 127, 144, 165, 180, 201, 225, 240, 258, 267, 287, 301, 316, 336, 351, 371, 391, 410, 428, 442, 454, 470, 486, 507, 529, 544, 558, 572, 585, 602, 610, 623, 639, 651, 659, 673, 687, 704, 715, 726, 740, 758, 775, 782, 801, 823, 835, 849, 858, 873, 885, 898, 909, 920, 932, 946, 967, 982, 995, 1012, 1030, 1046, 1058, 1070, 1083, 1098, 1112, 1124, 1136, 1153, 1160, 1172, 1177, 1187, 1196, 1207, 1218, 1231, 1246, 1257, 1270, 1285, 1292, 1305, 1318, 1335, 1350, 1363, 1377, 1391, 1407, 1427, 1451, 1463, 1486, 1503, 1521, 1544, 1562, 1585, 1608, 1630, 1651, 1666, 1685, 1709, 1727, 1744, 1762, 1772, 1786, 1811, 1830, 1850, 1875, 1895, 1920, 1945, 1969, 1992, 2009, 2030, 2056, 2076, 2095, 2115, 2126, 2135, 2145, 2158, 2174, 2186, 2200, 2216, 2234, 2254, 2276, 2290, 2305, 2313, 2319, 2333, 2348, 2358, 2374, 2389, 2399, 2406, 2414, 2421, 2430, 2443, 2459, 2474, 2483, 2494, 2503, 2512, 2525, 2534, 2547, 2560, 2567, 2577, 2592, 2600}
|
||||||
|
|
||||||
func (i actionType) String() string {
|
func (i actionType) String() string {
|
||||||
if i < 0 || i >= actionType(len(_actionType_index)-1) {
|
if i < 0 || i >= actionType(len(_actionType_index)-1) {
|
||||||
|
|||||||
@@ -1,99 +0,0 @@
|
|||||||
# SIMD byte search: `indexByteTwo` / `lastIndexByteTwo`
|
|
||||||
|
|
||||||
## What these functions do
|
|
||||||
|
|
||||||
`indexByteTwo(s []byte, b1, b2 byte) int` — returns the index of the
|
|
||||||
**first** occurrence of `b1` or `b2` in `s`, or `-1`.
|
|
||||||
|
|
||||||
`lastIndexByteTwo(s []byte, b1, b2 byte) int` — returns the index of the
|
|
||||||
**last** occurrence of `b1` or `b2` in `s`, or `-1`.
|
|
||||||
|
|
||||||
They are used by the fuzzy matching algorithm (`algo.go`) to skip ahead
|
|
||||||
during case-insensitive search. Instead of calling `bytes.IndexByte` twice
|
|
||||||
(once for lowercase, once for uppercase), a single SIMD pass finds both at
|
|
||||||
once.
|
|
||||||
|
|
||||||
## File layout
|
|
||||||
|
|
||||||
| File | Purpose |
|
|
||||||
| ------ | --------- |
|
|
||||||
| `indexbyte2_arm64.go` | Go declarations (`//go:noescape`) for ARM64 |
|
|
||||||
| `indexbyte2_arm64.s` | ARM64 NEON assembly (32-byte aligned blocks, syndrome extraction) |
|
|
||||||
| `indexbyte2_amd64.go` | Go declarations + AVX2 runtime detection for AMD64 |
|
|
||||||
| `indexbyte2_amd64.s` | AMD64 AVX2/SSE2 assembly with CPUID dispatch |
|
|
||||||
| `indexbyte2_other.go` | Pure Go fallback for all other architectures |
|
|
||||||
| `indexbyte2_test.go` | Unit tests, exhaustive tests, fuzz tests, and benchmarks |
|
|
||||||
|
|
||||||
## How the SIMD implementations work
|
|
||||||
|
|
||||||
**ARM64 (NEON):**
|
|
||||||
- Broadcasts both needle bytes into NEON registers (`VMOV`).
|
|
||||||
- Processes 32-byte aligned chunks. For each chunk, compares all bytes
|
|
||||||
against both needles (`VCMEQ`), ORs the results (`VORR`), and builds a
|
|
||||||
64-bit syndrome with 2 bits per byte.
|
|
||||||
- `indexByteTwo` uses `RBIT` + `CLZ` to find the lowest set bit (first match).
|
|
||||||
- `lastIndexByteTwo` scans backward and uses `CLZ` on the raw syndrome to
|
|
||||||
find the highest set bit (last match).
|
|
||||||
- Handles alignment and partial first/last blocks with bit masking.
|
|
||||||
- Adapted from Go's `internal/bytealg/indexbyte_arm64.s`.
|
|
||||||
|
|
||||||
**AMD64 (AVX2 with SSE2 fallback):**
|
|
||||||
- At init time, `cpuHasAVX2()` checks CPUID + XGETBV for AVX2 and OS YMM
|
|
||||||
support. The result is cached in `_useAVX2`.
|
|
||||||
- **AVX2 path** (inputs >= 32 bytes, when available):
|
|
||||||
- Broadcasts both needles via `VPBROADCASTB`.
|
|
||||||
- Processes 32-byte blocks: `VPCMPEQB` against both needles, `VPOR`, then
|
|
||||||
`VPMOVMSKB` to get a 32-bit mask.
|
|
||||||
- 5 instructions per loop iteration (vs 7 for SSE2) at 2x the throughput.
|
|
||||||
- `VZEROUPPER` before every return to avoid SSE/AVX transition penalties.
|
|
||||||
- **SSE2 fallback** (inputs < 32 bytes, or CPUs without AVX2):
|
|
||||||
- Broadcasts via `PUNPCKLBW` + `PSHUFL`.
|
|
||||||
- Processes 16-byte blocks: `PCMPEQB`, `POR`, `PMOVMSKB`.
|
|
||||||
- Small inputs (<16 bytes) are handled with page-boundary-safe loads.
|
|
||||||
- Both paths use `BSFL` (forward) / `BSRL` (reverse) for bit scanning.
|
|
||||||
- Adapted from Go's `internal/bytealg/indexbyte_amd64.s`.
|
|
||||||
|
|
||||||
**Fallback (other platforms):**
|
|
||||||
- `indexByteTwo` uses two `bytes.IndexByte` calls with scope-limiting
|
|
||||||
(search `b1` first, then limit the `b2` search to `s[:i1]`).
|
|
||||||
- `lastIndexByteTwo` uses a simple backward for loop.
|
|
||||||
|
|
||||||
## Running tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Unit + exhaustive tests
|
|
||||||
go test ./src/algo/ -run 'TestIndexByteTwo|TestLastIndexByteTwo' -v
|
|
||||||
|
|
||||||
# Fuzz tests (run for 10 seconds each)
|
|
||||||
go test ./src/algo/ -run '^$' -fuzz FuzzIndexByteTwo -fuzztime 10s
|
|
||||||
go test ./src/algo/ -run '^$' -fuzz FuzzLastIndexByteTwo -fuzztime 10s
|
|
||||||
|
|
||||||
# Cross-architecture: test amd64 on an arm64 Mac (via Rosetta)
|
|
||||||
GOARCH=amd64 go test ./src/algo/ -run 'TestIndexByteTwo|TestLastIndexByteTwo' -v
|
|
||||||
GOARCH=amd64 go test ./src/algo/ -run '^$' -fuzz FuzzIndexByteTwo -fuzztime 10s
|
|
||||||
GOARCH=amd64 go test ./src/algo/ -run '^$' -fuzz FuzzLastIndexByteTwo -fuzztime 10s
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running micro-benchmarks
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# All indexByteTwo / lastIndexByteTwo benchmarks
|
|
||||||
go test ./src/algo/ -bench 'IndexByteTwo' -benchmem
|
|
||||||
|
|
||||||
# Specific size
|
|
||||||
go test ./src/algo/ -bench 'IndexByteTwo_1000'
|
|
||||||
```
|
|
||||||
|
|
||||||
Each benchmark compares the SIMD `asm` implementation against reference
|
|
||||||
implementations (`2xIndexByte` using `bytes.IndexByte`, and a simple `loop`).
|
|
||||||
|
|
||||||
## Correctness verification
|
|
||||||
|
|
||||||
The assembly is verified by three layers of testing:
|
|
||||||
|
|
||||||
1. **Table-driven tests** — known inputs with expected outputs.
|
|
||||||
2. **Exhaustive tests** — all lengths 0–256, every match position, no-match
|
|
||||||
cases, and both-bytes-present cases, compared against a simple loop
|
|
||||||
reference.
|
|
||||||
3. **Fuzz tests** — randomized inputs via `testing.F`, compared against the
|
|
||||||
same loop reference.
|
|
||||||
@@ -321,15 +321,22 @@ type Algo func(caseSensitive bool, normalize bool, forward bool, input *util.Cha
|
|||||||
|
|
||||||
func trySkip(input *util.Chars, caseSensitive bool, b byte, from int) int {
|
func trySkip(input *util.Chars, caseSensitive bool, b byte, from int) int {
|
||||||
byteArray := input.Bytes()[from:]
|
byteArray := input.Bytes()[from:]
|
||||||
// For case-insensitive search of a letter, search for both cases in one pass
|
|
||||||
if !caseSensitive && b >= 'a' && b <= 'z' {
|
|
||||||
idx := IndexByteTwo(byteArray, b, b-32)
|
|
||||||
if idx < 0 {
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
return from + idx
|
|
||||||
}
|
|
||||||
idx := bytes.IndexByte(byteArray, b)
|
idx := bytes.IndexByte(byteArray, b)
|
||||||
|
if idx == 0 {
|
||||||
|
// Can't skip any further
|
||||||
|
return from
|
||||||
|
}
|
||||||
|
// We may need to search for the uppercase letter again. We don't have to
|
||||||
|
// consider normalization as we can be sure that this is an ASCII string.
|
||||||
|
if !caseSensitive && b >= 'a' && b <= 'z' {
|
||||||
|
if idx > 0 {
|
||||||
|
byteArray = byteArray[:idx]
|
||||||
|
}
|
||||||
|
uidx := bytes.IndexByte(byteArray, b-32)
|
||||||
|
if uidx >= 0 {
|
||||||
|
idx = uidx
|
||||||
|
}
|
||||||
|
}
|
||||||
if idx < 0 {
|
if idx < 0 {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
@@ -373,17 +380,14 @@ func asciiFuzzyIndex(input *util.Chars, pattern []rune, caseSensitive bool) (int
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Find the last appearance of the last character of the pattern to limit the search scope
|
// Find the last appearance of the last character of the pattern to limit the search scope
|
||||||
scope := input.Bytes()[lastIdx:]
|
bu := b
|
||||||
if len(scope) > 1 {
|
|
||||||
tail := scope[1:]
|
|
||||||
var end int
|
|
||||||
if !caseSensitive && b >= 'a' && b <= 'z' {
|
if !caseSensitive && b >= 'a' && b <= 'z' {
|
||||||
end = lastIndexByteTwo(tail, b, b-32)
|
bu = b - 32
|
||||||
} else {
|
|
||||||
end = bytes.LastIndexByte(tail, b)
|
|
||||||
}
|
}
|
||||||
if end >= 0 {
|
scope := input.Bytes()[lastIdx:]
|
||||||
return firstIdx, lastIdx + 1 + end + 1
|
for offset := len(scope) - 1; offset > 0; offset-- {
|
||||||
|
if scope[offset] == b || scope[offset] == bu {
|
||||||
|
return firstIdx, lastIdx + offset + 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return firstIdx, lastIdx + 1
|
return firstIdx, lastIdx + 1
|
||||||
|
|||||||
@@ -1,24 +0,0 @@
|
|||||||
//go:build amd64
|
|
||||||
|
|
||||||
package algo
|
|
||||||
|
|
||||||
var _useAVX2 bool
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
_useAVX2 = cpuHasAVX2()
|
|
||||||
}
|
|
||||||
|
|
||||||
//go:noescape
|
|
||||||
func cpuHasAVX2() bool
|
|
||||||
|
|
||||||
// indexByteTwo returns the index of the first occurrence of b1 or b2 in s,
|
|
||||||
// or -1 if neither is present. Uses AVX2 when available, SSE2 otherwise.
|
|
||||||
//
|
|
||||||
//go:noescape
|
|
||||||
func IndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
|
|
||||||
// lastIndexByteTwo returns the index of the last occurrence of b1 or b2 in s,
|
|
||||||
// or -1 if neither is present. Uses AVX2 when available, SSE2 otherwise.
|
|
||||||
//
|
|
||||||
//go:noescape
|
|
||||||
func lastIndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
@@ -1,377 +0,0 @@
|
|||||||
#include "textflag.h"
|
|
||||||
|
|
||||||
// func cpuHasAVX2() bool
|
|
||||||
//
|
|
||||||
// Checks CPUID and XGETBV for AVX2 + OS YMM support.
|
|
||||||
TEXT ·cpuHasAVX2(SB),NOSPLIT,$0-1
|
|
||||||
MOVQ BX, R8 // save BX (callee-saved, clobbered by CPUID)
|
|
||||||
|
|
||||||
// Check max CPUID leaf >= 7
|
|
||||||
MOVL $0, AX
|
|
||||||
CPUID
|
|
||||||
CMPL AX, $7
|
|
||||||
JL cpuid_no
|
|
||||||
|
|
||||||
// Check OSXSAVE (CPUID.1:ECX bit 27)
|
|
||||||
MOVL $1, AX
|
|
||||||
CPUID
|
|
||||||
TESTL $(1<<27), CX
|
|
||||||
JZ cpuid_no
|
|
||||||
|
|
||||||
// Check AVX2 (CPUID.7.0:EBX bit 5)
|
|
||||||
MOVL $7, AX
|
|
||||||
MOVL $0, CX
|
|
||||||
CPUID
|
|
||||||
TESTL $(1<<5), BX
|
|
||||||
JZ cpuid_no
|
|
||||||
|
|
||||||
// Check OS YMM state support via XGETBV
|
|
||||||
MOVL $0, CX
|
|
||||||
BYTE $0x0F; BYTE $0x01; BYTE $0xD0 // XGETBV → EDX:EAX
|
|
||||||
ANDL $6, AX // bits 1 (XMM) and 2 (YMM)
|
|
||||||
CMPL AX, $6
|
|
||||||
JNE cpuid_no
|
|
||||||
|
|
||||||
MOVQ R8, BX // restore BX
|
|
||||||
MOVB $1, ret+0(FP)
|
|
||||||
RET
|
|
||||||
|
|
||||||
cpuid_no:
|
|
||||||
MOVQ R8, BX
|
|
||||||
MOVB $0, ret+0(FP)
|
|
||||||
RET
|
|
||||||
|
|
||||||
// func IndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
//
|
|
||||||
// Returns the index of the first occurrence of b1 or b2 in s, or -1.
|
|
||||||
// Uses AVX2 (32 bytes/iter) when available, SSE2 (16 bytes/iter) otherwise.
|
|
||||||
TEXT ·IndexByteTwo(SB),NOSPLIT,$0-40
|
|
||||||
MOVQ s_base+0(FP), SI
|
|
||||||
MOVQ s_len+8(FP), BX
|
|
||||||
MOVBLZX b1+24(FP), AX
|
|
||||||
MOVBLZX b2+25(FP), CX
|
|
||||||
LEAQ ret+32(FP), R8
|
|
||||||
|
|
||||||
TESTQ BX, BX
|
|
||||||
JEQ fwd_failure
|
|
||||||
|
|
||||||
// Try AVX2 for inputs >= 32 bytes
|
|
||||||
CMPQ BX, $32
|
|
||||||
JLT fwd_sse2
|
|
||||||
CMPB ·_useAVX2(SB), $1
|
|
||||||
JNE fwd_sse2
|
|
||||||
|
|
||||||
// ====== AVX2 forward search ======
|
|
||||||
MOVD AX, X0
|
|
||||||
VPBROADCASTB X0, Y0 // Y0 = splat(b1)
|
|
||||||
MOVD CX, X1
|
|
||||||
VPBROADCASTB X1, Y1 // Y1 = splat(b2)
|
|
||||||
|
|
||||||
MOVQ SI, DI
|
|
||||||
LEAQ -32(SI)(BX*1), AX // AX = last valid 32-byte chunk
|
|
||||||
JMP fwd_avx2_entry
|
|
||||||
|
|
||||||
fwd_avx2_loop:
|
|
||||||
VMOVDQU (DI), Y2
|
|
||||||
VPCMPEQB Y0, Y2, Y3
|
|
||||||
VPCMPEQB Y1, Y2, Y4
|
|
||||||
VPOR Y3, Y4, Y3
|
|
||||||
VPMOVMSKB Y3, DX
|
|
||||||
BSFL DX, DX
|
|
||||||
JNZ fwd_avx2_success
|
|
||||||
ADDQ $32, DI
|
|
||||||
|
|
||||||
fwd_avx2_entry:
|
|
||||||
CMPQ DI, AX
|
|
||||||
JB fwd_avx2_loop
|
|
||||||
|
|
||||||
// Last 32-byte chunk (may overlap with previous)
|
|
||||||
MOVQ AX, DI
|
|
||||||
VMOVDQU (AX), Y2
|
|
||||||
VPCMPEQB Y0, Y2, Y3
|
|
||||||
VPCMPEQB Y1, Y2, Y4
|
|
||||||
VPOR Y3, Y4, Y3
|
|
||||||
VPMOVMSKB Y3, DX
|
|
||||||
BSFL DX, DX
|
|
||||||
JNZ fwd_avx2_success
|
|
||||||
|
|
||||||
MOVQ $-1, (R8)
|
|
||||||
VZEROUPPER
|
|
||||||
RET
|
|
||||||
|
|
||||||
fwd_avx2_success:
|
|
||||||
SUBQ SI, DI
|
|
||||||
ADDQ DX, DI
|
|
||||||
MOVQ DI, (R8)
|
|
||||||
VZEROUPPER
|
|
||||||
RET
|
|
||||||
|
|
||||||
// ====== SSE2 forward search (< 32 bytes or no AVX2) ======
|
|
||||||
|
|
||||||
fwd_sse2:
|
|
||||||
// Broadcast b1 into X0
|
|
||||||
MOVD AX, X0
|
|
||||||
PUNPCKLBW X0, X0
|
|
||||||
PUNPCKLBW X0, X0
|
|
||||||
PSHUFL $0, X0, X0
|
|
||||||
|
|
||||||
// Broadcast b2 into X4
|
|
||||||
MOVD CX, X4
|
|
||||||
PUNPCKLBW X4, X4
|
|
||||||
PUNPCKLBW X4, X4
|
|
||||||
PSHUFL $0, X4, X4
|
|
||||||
|
|
||||||
CMPQ BX, $16
|
|
||||||
JLT fwd_small
|
|
||||||
|
|
||||||
MOVQ SI, DI
|
|
||||||
LEAQ -16(SI)(BX*1), AX
|
|
||||||
JMP fwd_sseloopentry
|
|
||||||
|
|
||||||
fwd_sseloop:
|
|
||||||
MOVOU (DI), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
BSFL DX, DX
|
|
||||||
JNZ fwd_ssesuccess
|
|
||||||
ADDQ $16, DI
|
|
||||||
|
|
||||||
fwd_sseloopentry:
|
|
||||||
CMPQ DI, AX
|
|
||||||
JB fwd_sseloop
|
|
||||||
|
|
||||||
// Search the last 16-byte chunk (may overlap)
|
|
||||||
MOVQ AX, DI
|
|
||||||
MOVOU (AX), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
BSFL DX, DX
|
|
||||||
JNZ fwd_ssesuccess
|
|
||||||
|
|
||||||
fwd_failure:
|
|
||||||
MOVQ $-1, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
fwd_ssesuccess:
|
|
||||||
SUBQ SI, DI
|
|
||||||
ADDQ DX, DI
|
|
||||||
MOVQ DI, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
fwd_small:
|
|
||||||
// Check if loading 16 bytes from SI would cross a page boundary
|
|
||||||
LEAQ 16(SI), AX
|
|
||||||
TESTW $0xff0, AX
|
|
||||||
JEQ fwd_endofpage
|
|
||||||
|
|
||||||
MOVOU (SI), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
BSFL DX, DX
|
|
||||||
JZ fwd_failure
|
|
||||||
CMPL DX, BX
|
|
||||||
JAE fwd_failure
|
|
||||||
MOVQ DX, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
fwd_endofpage:
|
|
||||||
MOVOU -16(SI)(BX*1), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
MOVL BX, CX
|
|
||||||
SHLL CX, DX
|
|
||||||
SHRL $16, DX
|
|
||||||
BSFL DX, DX
|
|
||||||
JZ fwd_failure
|
|
||||||
MOVQ DX, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
// func lastIndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
//
|
|
||||||
// Returns the index of the last occurrence of b1 or b2 in s, or -1.
|
|
||||||
// Uses AVX2 (32 bytes/iter) when available, SSE2 (16 bytes/iter) otherwise.
|
|
||||||
TEXT ·lastIndexByteTwo(SB),NOSPLIT,$0-40
|
|
||||||
MOVQ s_base+0(FP), SI
|
|
||||||
MOVQ s_len+8(FP), BX
|
|
||||||
MOVBLZX b1+24(FP), AX
|
|
||||||
MOVBLZX b2+25(FP), CX
|
|
||||||
LEAQ ret+32(FP), R8
|
|
||||||
|
|
||||||
TESTQ BX, BX
|
|
||||||
JEQ back_failure
|
|
||||||
|
|
||||||
// Try AVX2 for inputs >= 32 bytes
|
|
||||||
CMPQ BX, $32
|
|
||||||
JLT back_sse2
|
|
||||||
CMPB ·_useAVX2(SB), $1
|
|
||||||
JNE back_sse2
|
|
||||||
|
|
||||||
// ====== AVX2 backward search ======
|
|
||||||
MOVD AX, X0
|
|
||||||
VPBROADCASTB X0, Y0
|
|
||||||
MOVD CX, X1
|
|
||||||
VPBROADCASTB X1, Y1
|
|
||||||
|
|
||||||
// DI = start of last 32-byte chunk
|
|
||||||
LEAQ -32(SI)(BX*1), DI
|
|
||||||
|
|
||||||
back_avx2_loop:
|
|
||||||
CMPQ DI, SI
|
|
||||||
JBE back_avx2_first
|
|
||||||
|
|
||||||
VMOVDQU (DI), Y2
|
|
||||||
VPCMPEQB Y0, Y2, Y3
|
|
||||||
VPCMPEQB Y1, Y2, Y4
|
|
||||||
VPOR Y3, Y4, Y3
|
|
||||||
VPMOVMSKB Y3, DX
|
|
||||||
BSRL DX, DX
|
|
||||||
JNZ back_avx2_success
|
|
||||||
SUBQ $32, DI
|
|
||||||
JMP back_avx2_loop
|
|
||||||
|
|
||||||
back_avx2_first:
|
|
||||||
// First 32 bytes (DI <= SI, load from SI)
|
|
||||||
VMOVDQU (SI), Y2
|
|
||||||
VPCMPEQB Y0, Y2, Y3
|
|
||||||
VPCMPEQB Y1, Y2, Y4
|
|
||||||
VPOR Y3, Y4, Y3
|
|
||||||
VPMOVMSKB Y3, DX
|
|
||||||
BSRL DX, DX
|
|
||||||
JNZ back_avx2_firstsuccess
|
|
||||||
|
|
||||||
MOVQ $-1, (R8)
|
|
||||||
VZEROUPPER
|
|
||||||
RET
|
|
||||||
|
|
||||||
back_avx2_success:
|
|
||||||
SUBQ SI, DI
|
|
||||||
ADDQ DX, DI
|
|
||||||
MOVQ DI, (R8)
|
|
||||||
VZEROUPPER
|
|
||||||
RET
|
|
||||||
|
|
||||||
back_avx2_firstsuccess:
|
|
||||||
MOVQ DX, (R8)
|
|
||||||
VZEROUPPER
|
|
||||||
RET
|
|
||||||
|
|
||||||
// ====== SSE2 backward search (< 32 bytes or no AVX2) ======
|
|
||||||
|
|
||||||
back_sse2:
|
|
||||||
// Broadcast b1 into X0
|
|
||||||
MOVD AX, X0
|
|
||||||
PUNPCKLBW X0, X0
|
|
||||||
PUNPCKLBW X0, X0
|
|
||||||
PSHUFL $0, X0, X0
|
|
||||||
|
|
||||||
// Broadcast b2 into X4
|
|
||||||
MOVD CX, X4
|
|
||||||
PUNPCKLBW X4, X4
|
|
||||||
PUNPCKLBW X4, X4
|
|
||||||
PSHUFL $0, X4, X4
|
|
||||||
|
|
||||||
CMPQ BX, $16
|
|
||||||
JLT back_small
|
|
||||||
|
|
||||||
// DI = start of last 16-byte chunk
|
|
||||||
LEAQ -16(SI)(BX*1), DI
|
|
||||||
|
|
||||||
back_sseloop:
|
|
||||||
CMPQ DI, SI
|
|
||||||
JBE back_ssefirst
|
|
||||||
|
|
||||||
MOVOU (DI), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
BSRL DX, DX
|
|
||||||
JNZ back_ssesuccess
|
|
||||||
SUBQ $16, DI
|
|
||||||
JMP back_sseloop
|
|
||||||
|
|
||||||
back_ssefirst:
|
|
||||||
// First 16 bytes (DI <= SI, load from SI)
|
|
||||||
MOVOU (SI), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
BSRL DX, DX
|
|
||||||
JNZ back_ssefirstsuccess
|
|
||||||
|
|
||||||
back_failure:
|
|
||||||
MOVQ $-1, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
back_ssesuccess:
|
|
||||||
SUBQ SI, DI
|
|
||||||
ADDQ DX, DI
|
|
||||||
MOVQ DI, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
back_ssefirstsuccess:
|
|
||||||
// DX = byte offset from base
|
|
||||||
MOVQ DX, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
back_small:
|
|
||||||
// Check page boundary
|
|
||||||
LEAQ 16(SI), AX
|
|
||||||
TESTW $0xff0, AX
|
|
||||||
JEQ back_endofpage
|
|
||||||
|
|
||||||
MOVOU (SI), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
// Mask to first BX bytes: keep bits 0..BX-1
|
|
||||||
MOVL $1, AX
|
|
||||||
MOVL BX, CX
|
|
||||||
SHLL CX, AX
|
|
||||||
DECL AX
|
|
||||||
ANDL AX, DX
|
|
||||||
BSRL DX, DX
|
|
||||||
JZ back_failure
|
|
||||||
MOVQ DX, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
back_endofpage:
|
|
||||||
// Load 16 bytes ending at base+n
|
|
||||||
MOVOU -16(SI)(BX*1), X1
|
|
||||||
MOVOU X1, X2
|
|
||||||
PCMPEQB X0, X1
|
|
||||||
PCMPEQB X4, X2
|
|
||||||
POR X2, X1
|
|
||||||
PMOVMSKB X1, DX
|
|
||||||
// Bits correspond to bytes [base+n-16, base+n).
|
|
||||||
// We want original bytes [0, n), which are bits [16-n, 16).
|
|
||||||
// Mask: keep bits (16-n) through 15.
|
|
||||||
MOVL $16, CX
|
|
||||||
SUBL BX, CX
|
|
||||||
SHRL CX, DX
|
|
||||||
SHLL CX, DX
|
|
||||||
BSRL DX, DX
|
|
||||||
JZ back_failure
|
|
||||||
// DX is the bit position in the loaded chunk.
|
|
||||||
// Original byte index = DX - (16 - n) = DX + n - 16
|
|
||||||
ADDL BX, DX
|
|
||||||
SUBL $16, DX
|
|
||||||
MOVQ DX, (R8)
|
|
||||||
RET
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
//go:build arm64
|
|
||||||
|
|
||||||
package algo
|
|
||||||
|
|
||||||
// indexByteTwo returns the index of the first occurrence of b1 or b2 in s,
|
|
||||||
// or -1 if neither is present. Implemented in assembly using ARM64 NEON
|
|
||||||
// to search for both bytes in a single pass.
|
|
||||||
//
|
|
||||||
//go:noescape
|
|
||||||
func IndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
|
|
||||||
// lastIndexByteTwo returns the index of the last occurrence of b1 or b2 in s,
|
|
||||||
// or -1 if neither is present. Implemented in assembly using ARM64 NEON,
|
|
||||||
// scanning backward.
|
|
||||||
//
|
|
||||||
//go:noescape
|
|
||||||
func lastIndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
@@ -1,249 +0,0 @@
|
|||||||
#include "textflag.h"
|
|
||||||
|
|
||||||
// func IndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
//
|
|
||||||
// Returns the index of the first occurrence of b1 or b2 in s, or -1.
|
|
||||||
// Uses ARM64 NEON to search for both bytes in a single pass over the data.
|
|
||||||
// Adapted from Go's internal/bytealg/indexbyte_arm64.s (single-byte version).
|
|
||||||
TEXT ·IndexByteTwo(SB),NOSPLIT,$0-40
|
|
||||||
MOVD s_base+0(FP), R0
|
|
||||||
MOVD s_len+8(FP), R2
|
|
||||||
MOVBU b1+24(FP), R1
|
|
||||||
MOVBU b2+25(FP), R7
|
|
||||||
MOVD $ret+32(FP), R8
|
|
||||||
|
|
||||||
// Core algorithm:
|
|
||||||
// For each 32-byte chunk we calculate a 64-bit syndrome value,
|
|
||||||
// with two bits per byte. We compare against both b1 and b2,
|
|
||||||
// OR the results, then use the same syndrome extraction as
|
|
||||||
// Go's IndexByte.
|
|
||||||
|
|
||||||
CBZ R2, fail
|
|
||||||
MOVD R0, R11
|
|
||||||
// Magic constant 0x40100401 allows us to identify which lane matches.
|
|
||||||
// Each byte in the group of 4 gets a distinct bit: 1, 4, 16, 64.
|
|
||||||
MOVD $0x40100401, R5
|
|
||||||
VMOV R1, V0.B16 // V0 = splat(b1)
|
|
||||||
VMOV R7, V7.B16 // V7 = splat(b2)
|
|
||||||
// Work with aligned 32-byte chunks
|
|
||||||
BIC $0x1f, R0, R3
|
|
||||||
VMOV R5, V5.S4
|
|
||||||
ANDS $0x1f, R0, R9
|
|
||||||
AND $0x1f, R2, R10
|
|
||||||
BEQ loop
|
|
||||||
|
|
||||||
// Input string is not 32-byte aligned. Process the first
|
|
||||||
// aligned 32-byte block and mask off bytes before our start.
|
|
||||||
VLD1.P (R3), [V1.B16, V2.B16]
|
|
||||||
SUB $0x20, R9, R4
|
|
||||||
ADDS R4, R2, R2
|
|
||||||
// Compare against both needles
|
|
||||||
VCMEQ V0.B16, V1.B16, V3.B16 // b1 vs first 16 bytes
|
|
||||||
VCMEQ V7.B16, V1.B16, V8.B16 // b2 vs first 16 bytes
|
|
||||||
VORR V8.B16, V3.B16, V3.B16 // combine
|
|
||||||
VCMEQ V0.B16, V2.B16, V4.B16 // b1 vs second 16 bytes
|
|
||||||
VCMEQ V7.B16, V2.B16, V9.B16 // b2 vs second 16 bytes
|
|
||||||
VORR V9.B16, V4.B16, V4.B16 // combine
|
|
||||||
// Build syndrome
|
|
||||||
VAND V5.B16, V3.B16, V3.B16
|
|
||||||
VAND V5.B16, V4.B16, V4.B16
|
|
||||||
VADDP V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.B16, V6.B16, V6.B16
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
// Clear the irrelevant lower bits
|
|
||||||
LSL $1, R9, R4
|
|
||||||
LSR R4, R6, R6
|
|
||||||
LSL R4, R6, R6
|
|
||||||
// The first block can also be the last
|
|
||||||
BLS masklast
|
|
||||||
// Have we found something already?
|
|
||||||
CBNZ R6, tail
|
|
||||||
|
|
||||||
loop:
|
|
||||||
VLD1.P (R3), [V1.B16, V2.B16]
|
|
||||||
SUBS $0x20, R2, R2
|
|
||||||
// Compare against both needles, OR results
|
|
||||||
VCMEQ V0.B16, V1.B16, V3.B16
|
|
||||||
VCMEQ V7.B16, V1.B16, V8.B16
|
|
||||||
VORR V8.B16, V3.B16, V3.B16
|
|
||||||
VCMEQ V0.B16, V2.B16, V4.B16
|
|
||||||
VCMEQ V7.B16, V2.B16, V9.B16
|
|
||||||
VORR V9.B16, V4.B16, V4.B16
|
|
||||||
// If we're out of data we finish regardless of the result
|
|
||||||
BLS end
|
|
||||||
// Fast check: OR both halves and check for any match
|
|
||||||
VORR V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.D2, V6.D2, V6.D2
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
CBZ R6, loop
|
|
||||||
|
|
||||||
end:
|
|
||||||
// Found something or out of data — build full syndrome
|
|
||||||
VAND V5.B16, V3.B16, V3.B16
|
|
||||||
VAND V5.B16, V4.B16, V4.B16
|
|
||||||
VADDP V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.B16, V6.B16, V6.B16
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
// Only mask for the last block
|
|
||||||
BHS tail
|
|
||||||
|
|
||||||
masklast:
|
|
||||||
// Clear irrelevant upper bits
|
|
||||||
ADD R9, R10, R4
|
|
||||||
AND $0x1f, R4, R4
|
|
||||||
SUB $0x20, R4, R4
|
|
||||||
NEG R4<<1, R4
|
|
||||||
LSL R4, R6, R6
|
|
||||||
LSR R4, R6, R6
|
|
||||||
|
|
||||||
tail:
|
|
||||||
CBZ R6, fail
|
|
||||||
RBIT R6, R6
|
|
||||||
SUB $0x20, R3, R3
|
|
||||||
CLZ R6, R6
|
|
||||||
ADD R6>>1, R3, R0
|
|
||||||
SUB R11, R0, R0
|
|
||||||
MOVD R0, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
fail:
|
|
||||||
MOVD $-1, R0
|
|
||||||
MOVD R0, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
// func lastIndexByteTwo(s []byte, b1, b2 byte) int
|
|
||||||
//
|
|
||||||
// Returns the index of the last occurrence of b1 or b2 in s, or -1.
|
|
||||||
// Scans backward using ARM64 NEON.
|
|
||||||
TEXT ·lastIndexByteTwo(SB),NOSPLIT,$0-40
|
|
||||||
MOVD s_base+0(FP), R0
|
|
||||||
MOVD s_len+8(FP), R2
|
|
||||||
MOVBU b1+24(FP), R1
|
|
||||||
MOVBU b2+25(FP), R7
|
|
||||||
MOVD $ret+32(FP), R8
|
|
||||||
|
|
||||||
CBZ R2, lfail
|
|
||||||
MOVD R0, R11 // save base
|
|
||||||
ADD R0, R2, R12 // R12 = end = base + len
|
|
||||||
MOVD $0x40100401, R5
|
|
||||||
VMOV R1, V0.B16 // V0 = splat(b1)
|
|
||||||
VMOV R7, V7.B16 // V7 = splat(b2)
|
|
||||||
VMOV R5, V5.S4
|
|
||||||
|
|
||||||
// Align: find the aligned block containing the last byte
|
|
||||||
SUB $1, R12, R3
|
|
||||||
BIC $0x1f, R3, R3 // R3 = start of aligned block containing last byte
|
|
||||||
|
|
||||||
// --- Process tail block ---
|
|
||||||
VLD1 (R3), [V1.B16, V2.B16]
|
|
||||||
VCMEQ V0.B16, V1.B16, V3.B16
|
|
||||||
VCMEQ V7.B16, V1.B16, V8.B16
|
|
||||||
VORR V8.B16, V3.B16, V3.B16
|
|
||||||
VCMEQ V0.B16, V2.B16, V4.B16
|
|
||||||
VCMEQ V7.B16, V2.B16, V9.B16
|
|
||||||
VORR V9.B16, V4.B16, V4.B16
|
|
||||||
VAND V5.B16, V3.B16, V3.B16
|
|
||||||
VAND V5.B16, V4.B16, V4.B16
|
|
||||||
VADDP V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.B16, V6.B16, V6.B16
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
|
|
||||||
// Mask upper bits (bytes past end of slice)
|
|
||||||
// tail_bytes = end - R3 (1..32)
|
|
||||||
SUB R3, R12, R10 // R10 = tail_bytes
|
|
||||||
MOVD $64, R4
|
|
||||||
SUB R10<<1, R4, R4 // R4 = 64 - 2*tail_bytes
|
|
||||||
LSL R4, R6, R6
|
|
||||||
LSR R4, R6, R6
|
|
||||||
|
|
||||||
// Is this also the head block?
|
|
||||||
CMP R11, R3 // R3 - R11
|
|
||||||
BLO lmaskfirst // R3 < base: head+tail in same block
|
|
||||||
BEQ ltailonly // R3 == base: single aligned block
|
|
||||||
|
|
||||||
// R3 > base: more blocks before this one
|
|
||||||
CBNZ R6, llast
|
|
||||||
B lbacksetup
|
|
||||||
|
|
||||||
ltailonly:
|
|
||||||
// Single block, already masked upper bits
|
|
||||||
CBNZ R6, llast
|
|
||||||
B lfail
|
|
||||||
|
|
||||||
lmaskfirst:
|
|
||||||
// Mask lower bits (bytes before start of slice)
|
|
||||||
SUB R3, R11, R4 // R4 = base - R3
|
|
||||||
LSL $1, R4, R4
|
|
||||||
LSR R4, R6, R6
|
|
||||||
LSL R4, R6, R6
|
|
||||||
CBNZ R6, llast
|
|
||||||
B lfail
|
|
||||||
|
|
||||||
lbacksetup:
|
|
||||||
SUB $0x20, R3
|
|
||||||
|
|
||||||
lbackloop:
|
|
||||||
VLD1 (R3), [V1.B16, V2.B16]
|
|
||||||
VCMEQ V0.B16, V1.B16, V3.B16
|
|
||||||
VCMEQ V7.B16, V1.B16, V8.B16
|
|
||||||
VORR V8.B16, V3.B16, V3.B16
|
|
||||||
VCMEQ V0.B16, V2.B16, V4.B16
|
|
||||||
VCMEQ V7.B16, V2.B16, V9.B16
|
|
||||||
VORR V9.B16, V4.B16, V4.B16
|
|
||||||
// Quick check: any match in this block?
|
|
||||||
VORR V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.D2, V6.D2, V6.D2
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
|
|
||||||
// Is this a head block? (R3 < base)
|
|
||||||
CMP R11, R3
|
|
||||||
BLO lheadblock
|
|
||||||
|
|
||||||
// Full block (R3 >= base)
|
|
||||||
CBNZ R6, lbackfound
|
|
||||||
// More blocks?
|
|
||||||
BEQ lfail // R3 == base, no more
|
|
||||||
SUB $0x20, R3
|
|
||||||
B lbackloop
|
|
||||||
|
|
||||||
lbackfound:
|
|
||||||
// Build full syndrome
|
|
||||||
VAND V5.B16, V3.B16, V3.B16
|
|
||||||
VAND V5.B16, V4.B16, V4.B16
|
|
||||||
VADDP V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.B16, V6.B16, V6.B16
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
B llast
|
|
||||||
|
|
||||||
lheadblock:
|
|
||||||
// R3 < base. Build full syndrome if quick check had a match.
|
|
||||||
CBZ R6, lfail
|
|
||||||
VAND V5.B16, V3.B16, V3.B16
|
|
||||||
VAND V5.B16, V4.B16, V4.B16
|
|
||||||
VADDP V4.B16, V3.B16, V6.B16
|
|
||||||
VADDP V6.B16, V6.B16, V6.B16
|
|
||||||
VMOV V6.D[0], R6
|
|
||||||
// Mask lower bits
|
|
||||||
SUB R3, R11, R4 // R4 = base - R3
|
|
||||||
LSL $1, R4, R4
|
|
||||||
LSR R4, R6, R6
|
|
||||||
LSL R4, R6, R6
|
|
||||||
CBZ R6, lfail
|
|
||||||
|
|
||||||
llast:
|
|
||||||
// Find last match: highest set bit in syndrome
|
|
||||||
// Syndrome has bit 2i set for matching byte i.
|
|
||||||
// CLZ gives leading zeros; byte_offset = (63 - CLZ) / 2.
|
|
||||||
CLZ R6, R6
|
|
||||||
MOVD $63, R4
|
|
||||||
SUB R6, R4, R6 // R6 = 63 - CLZ = bit position
|
|
||||||
LSR $1, R6 // R6 = byte offset within block
|
|
||||||
ADD R3, R6, R0 // R0 = absolute address
|
|
||||||
SUB R11, R0, R0 // R0 = slice index
|
|
||||||
MOVD R0, (R8)
|
|
||||||
RET
|
|
||||||
|
|
||||||
lfail:
|
|
||||||
MOVD $-1, R0
|
|
||||||
MOVD R0, (R8)
|
|
||||||
RET
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
//go:build !arm64 && !amd64
|
|
||||||
|
|
||||||
package algo
|
|
||||||
|
|
||||||
import "bytes"
|
|
||||||
|
|
||||||
// indexByteTwo returns the index of the first occurrence of b1 or b2 in s,
|
|
||||||
// or -1 if neither is present.
|
|
||||||
func IndexByteTwo(s []byte, b1, b2 byte) int {
|
|
||||||
i1 := bytes.IndexByte(s, b1)
|
|
||||||
if i1 == 0 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
scope := s
|
|
||||||
if i1 > 0 {
|
|
||||||
scope = s[:i1]
|
|
||||||
}
|
|
||||||
if i2 := bytes.IndexByte(scope, b2); i2 >= 0 {
|
|
||||||
return i2
|
|
||||||
}
|
|
||||||
return i1
|
|
||||||
}
|
|
||||||
|
|
||||||
// lastIndexByteTwo returns the index of the last occurrence of b1 or b2 in s,
|
|
||||||
// or -1 if neither is present.
|
|
||||||
func lastIndexByteTwo(s []byte, b1, b2 byte) int {
|
|
||||||
for i := len(s) - 1; i >= 0; i-- {
|
|
||||||
if s[i] == b1 || s[i] == b2 {
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
@@ -1,259 +0,0 @@
|
|||||||
package algo
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestIndexByteTwo(t *testing.T) {
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
s string
|
|
||||||
b1 byte
|
|
||||||
b2 byte
|
|
||||||
want int
|
|
||||||
}{
|
|
||||||
{"empty", "", 'a', 'b', -1},
|
|
||||||
{"single_b1", "a", 'a', 'b', 0},
|
|
||||||
{"single_b2", "b", 'a', 'b', 0},
|
|
||||||
{"single_none", "c", 'a', 'b', -1},
|
|
||||||
{"b1_first", "xaxb", 'a', 'b', 1},
|
|
||||||
{"b2_first", "xbxa", 'a', 'b', 1},
|
|
||||||
{"same_byte", "xxa", 'a', 'a', 2},
|
|
||||||
{"at_end", "xxxxa", 'a', 'b', 4},
|
|
||||||
{"not_found", "xxxxxxxx", 'a', 'b', -1},
|
|
||||||
{"long_b1_at_3000", string(make([]byte, 3000)) + "a" + string(make([]byte, 1000)), 'a', 'b', 3000},
|
|
||||||
{"long_b2_at_3000", string(make([]byte, 3000)) + "b" + string(make([]byte, 1000)), 'a', 'b', 3000},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got := IndexByteTwo([]byte(tt.s), tt.b1, tt.b2)
|
|
||||||
if got != tt.want {
|
|
||||||
t.Errorf("IndexByteTwo(%q, %c, %c) = %d, want %d", tt.s[:min(len(tt.s), 40)], tt.b1, tt.b2, got, tt.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exhaustive test: compare against loop reference for various lengths,
|
|
||||||
// including sizes around SIMD block boundaries (16, 32, 64).
|
|
||||||
for n := 0; n <= 256; n++ {
|
|
||||||
data := make([]byte, n)
|
|
||||||
for i := range data {
|
|
||||||
data[i] = byte('c' + (i % 20))
|
|
||||||
}
|
|
||||||
// Test with match at every position
|
|
||||||
for pos := 0; pos < n; pos++ {
|
|
||||||
for _, b := range []byte{'A', 'B'} {
|
|
||||||
data[pos] = b
|
|
||||||
got := IndexByteTwo(data, 'A', 'B')
|
|
||||||
want := loopIndexByteTwo(data, 'A', 'B')
|
|
||||||
if got != want {
|
|
||||||
t.Fatalf("IndexByteTwo(len=%d, match=%c@%d) = %d, want %d", n, b, pos, got, want)
|
|
||||||
}
|
|
||||||
data[pos] = byte('c' + (pos % 20))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Test with no match
|
|
||||||
got := IndexByteTwo(data, 'A', 'B')
|
|
||||||
if got != -1 {
|
|
||||||
t.Fatalf("IndexByteTwo(len=%d, no match) = %d, want -1", n, got)
|
|
||||||
}
|
|
||||||
// Test with both bytes present
|
|
||||||
if n >= 2 {
|
|
||||||
data[n/3] = 'A'
|
|
||||||
data[n*2/3] = 'B'
|
|
||||||
got := IndexByteTwo(data, 'A', 'B')
|
|
||||||
want := loopIndexByteTwo(data, 'A', 'B')
|
|
||||||
if got != want {
|
|
||||||
t.Fatalf("IndexByteTwo(len=%d, both@%d,%d) = %d, want %d", n, n/3, n*2/3, got, want)
|
|
||||||
}
|
|
||||||
data[n/3] = byte('c' + ((n / 3) % 20))
|
|
||||||
data[n*2/3] = byte('c' + ((n * 2 / 3) % 20))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLastIndexByteTwo(t *testing.T) {
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
s string
|
|
||||||
b1 byte
|
|
||||||
b2 byte
|
|
||||||
want int
|
|
||||||
}{
|
|
||||||
{"empty", "", 'a', 'b', -1},
|
|
||||||
{"single_b1", "a", 'a', 'b', 0},
|
|
||||||
{"single_b2", "b", 'a', 'b', 0},
|
|
||||||
{"single_none", "c", 'a', 'b', -1},
|
|
||||||
{"b1_last", "xbxa", 'a', 'b', 3},
|
|
||||||
{"b2_last", "xaxb", 'a', 'b', 3},
|
|
||||||
{"same_byte", "axx", 'a', 'a', 0},
|
|
||||||
{"at_start", "axxxx", 'a', 'b', 0},
|
|
||||||
{"both_present", "axbx", 'a', 'b', 2},
|
|
||||||
{"not_found", "xxxxxxxx", 'a', 'b', -1},
|
|
||||||
{"long_b1_at_3000", string(make([]byte, 3000)) + "a" + string(make([]byte, 1000)), 'a', 'b', 3000},
|
|
||||||
{"long_b2_at_end", string(make([]byte, 4000)) + "b", 'a', 'b', 4000},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
got := lastIndexByteTwo([]byte(tt.s), tt.b1, tt.b2)
|
|
||||||
if got != tt.want {
|
|
||||||
t.Errorf("lastIndexByteTwo(%q, %c, %c) = %d, want %d", tt.s[:min(len(tt.s), 40)], tt.b1, tt.b2, got, tt.want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exhaustive test against loop reference
|
|
||||||
for n := 0; n <= 256; n++ {
|
|
||||||
data := make([]byte, n)
|
|
||||||
for i := range data {
|
|
||||||
data[i] = byte('c' + (i % 20))
|
|
||||||
}
|
|
||||||
for pos := 0; pos < n; pos++ {
|
|
||||||
for _, b := range []byte{'A', 'B'} {
|
|
||||||
data[pos] = b
|
|
||||||
got := lastIndexByteTwo(data, 'A', 'B')
|
|
||||||
want := refLastIndexByteTwo(data, 'A', 'B')
|
|
||||||
if got != want {
|
|
||||||
t.Fatalf("lastIndexByteTwo(len=%d, match=%c@%d) = %d, want %d", n, b, pos, got, want)
|
|
||||||
}
|
|
||||||
data[pos] = byte('c' + (pos % 20))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// No match
|
|
||||||
got := lastIndexByteTwo(data, 'A', 'B')
|
|
||||||
if got != -1 {
|
|
||||||
t.Fatalf("lastIndexByteTwo(len=%d, no match) = %d, want -1", n, got)
|
|
||||||
}
|
|
||||||
// Both bytes present
|
|
||||||
if n >= 2 {
|
|
||||||
data[n/3] = 'A'
|
|
||||||
data[n*2/3] = 'B'
|
|
||||||
got := lastIndexByteTwo(data, 'A', 'B')
|
|
||||||
want := refLastIndexByteTwo(data, 'A', 'B')
|
|
||||||
if got != want {
|
|
||||||
t.Fatalf("lastIndexByteTwo(len=%d, both@%d,%d) = %d, want %d", n, n/3, n*2/3, got, want)
|
|
||||||
}
|
|
||||||
data[n/3] = byte('c' + ((n / 3) % 20))
|
|
||||||
data[n*2/3] = byte('c' + ((n * 2 / 3) % 20))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func FuzzIndexByteTwo(f *testing.F) {
|
|
||||||
f.Add([]byte("hello world"), byte('o'), byte('l'))
|
|
||||||
f.Add([]byte(""), byte('a'), byte('b'))
|
|
||||||
f.Add([]byte("aaa"), byte('a'), byte('a'))
|
|
||||||
f.Fuzz(func(t *testing.T, data []byte, b1, b2 byte) {
|
|
||||||
got := IndexByteTwo(data, b1, b2)
|
|
||||||
want := loopIndexByteTwo(data, b1, b2)
|
|
||||||
if got != want {
|
|
||||||
t.Errorf("IndexByteTwo(len=%d, b1=%d, b2=%d) = %d, want %d", len(data), b1, b2, got, want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func FuzzLastIndexByteTwo(f *testing.F) {
|
|
||||||
f.Add([]byte("hello world"), byte('o'), byte('l'))
|
|
||||||
f.Add([]byte(""), byte('a'), byte('b'))
|
|
||||||
f.Add([]byte("aaa"), byte('a'), byte('a'))
|
|
||||||
f.Fuzz(func(t *testing.T, data []byte, b1, b2 byte) {
|
|
||||||
got := lastIndexByteTwo(data, b1, b2)
|
|
||||||
want := refLastIndexByteTwo(data, b1, b2)
|
|
||||||
if got != want {
|
|
||||||
t.Errorf("lastIndexByteTwo(len=%d, b1=%d, b2=%d) = %d, want %d", len(data), b1, b2, got, want)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reference implementations for correctness checking
|
|
||||||
func refIndexByteTwo(s []byte, b1, b2 byte) int {
|
|
||||||
i1 := bytes.IndexByte(s, b1)
|
|
||||||
if i1 == 0 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
scope := s
|
|
||||||
if i1 > 0 {
|
|
||||||
scope = s[:i1]
|
|
||||||
}
|
|
||||||
if i2 := bytes.IndexByte(scope, b2); i2 >= 0 {
|
|
||||||
return i2
|
|
||||||
}
|
|
||||||
return i1
|
|
||||||
}
|
|
||||||
|
|
||||||
func loopIndexByteTwo(s []byte, b1, b2 byte) int {
|
|
||||||
for i, b := range s {
|
|
||||||
if b == b1 || b == b2 {
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
func refLastIndexByteTwo(s []byte, b1, b2 byte) int {
|
|
||||||
for i := len(s) - 1; i >= 0; i-- {
|
|
||||||
if s[i] == b1 || s[i] == b2 {
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchIndexByteTwo(b *testing.B, size int, pos int) {
|
|
||||||
data := make([]byte, size)
|
|
||||||
for i := range data {
|
|
||||||
data[i] = byte('a' + (i % 20))
|
|
||||||
}
|
|
||||||
data[pos] = 'Z'
|
|
||||||
|
|
||||||
type impl struct {
|
|
||||||
name string
|
|
||||||
fn func([]byte, byte, byte) int
|
|
||||||
}
|
|
||||||
impls := []impl{
|
|
||||||
{"asm", IndexByteTwo},
|
|
||||||
{"2xIndexByte", refIndexByteTwo},
|
|
||||||
{"loop", loopIndexByteTwo},
|
|
||||||
}
|
|
||||||
for _, im := range impls {
|
|
||||||
b.Run(im.name, func(b *testing.B) {
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
im.fn(data, 'Z', 'z')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchLastIndexByteTwo(b *testing.B, size int, pos int) {
|
|
||||||
data := make([]byte, size)
|
|
||||||
for i := range data {
|
|
||||||
data[i] = byte('a' + (i % 20))
|
|
||||||
}
|
|
||||||
data[pos] = 'Z'
|
|
||||||
|
|
||||||
type impl struct {
|
|
||||||
name string
|
|
||||||
fn func([]byte, byte, byte) int
|
|
||||||
}
|
|
||||||
impls := []impl{
|
|
||||||
{"asm", lastIndexByteTwo},
|
|
||||||
{"loop", refLastIndexByteTwo},
|
|
||||||
}
|
|
||||||
for _, im := range impls {
|
|
||||||
b.Run(im.name, func(b *testing.B) {
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
im.fn(data, 'Z', 'z')
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkIndexByteTwo_10(b *testing.B) { benchIndexByteTwo(b, 10, 8) }
|
|
||||||
func BenchmarkIndexByteTwo_100(b *testing.B) { benchIndexByteTwo(b, 100, 80) }
|
|
||||||
func BenchmarkIndexByteTwo_1000(b *testing.B) { benchIndexByteTwo(b, 1000, 800) }
|
|
||||||
func BenchmarkLastIndexByteTwo_10(b *testing.B) { benchLastIndexByteTwo(b, 10, 2) }
|
|
||||||
func BenchmarkLastIndexByteTwo_100(b *testing.B) { benchLastIndexByteTwo(b, 100, 20) }
|
|
||||||
func BenchmarkLastIndexByteTwo_1000(b *testing.B) { benchLastIndexByteTwo(b, 1000, 200) }
|
|
||||||
21
src/ansi.go
21
src/ansi.go
@@ -6,7 +6,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/algo"
|
|
||||||
"github.com/junegunn/fzf/src/tui"
|
"github.com/junegunn/fzf/src/tui"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -124,31 +123,31 @@ func toAnsiString(color tui.Color, offset int) string {
|
|||||||
return ret + ";"
|
return ret + ";"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isPrint(c uint8) bool {
|
||||||
|
return '\x20' <= c && c <= '\x7e'
|
||||||
|
}
|
||||||
|
|
||||||
func matchOperatingSystemCommand(s string, start int) int {
|
func matchOperatingSystemCommand(s string, start int) int {
|
||||||
// `\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)`
|
// `\x1b][0-9][;:][[:print:]]+(?:\x1b\\\\|\x07)`
|
||||||
// ^ match starting here after the first printable character
|
// ^ match starting here after the first printable character
|
||||||
//
|
//
|
||||||
i := start // prefix matched in nextAnsiEscapeSequence()
|
i := start // prefix matched in nextAnsiEscapeSequence()
|
||||||
|
for ; i < len(s) && isPrint(s[i]); i++ {
|
||||||
// Find the terminator: BEL (\x07) or ESC (\x1b) for ST (\x1b\\)
|
|
||||||
idx := algo.IndexByteTwo(stringBytes(s[i:]), '\x07', '\x1b')
|
|
||||||
if idx < 0 {
|
|
||||||
return -1
|
|
||||||
}
|
}
|
||||||
i += idx
|
if i < len(s) {
|
||||||
|
|
||||||
if s[i] == '\x07' {
|
if s[i] == '\x07' {
|
||||||
return i + 1
|
return i + 1
|
||||||
}
|
}
|
||||||
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
||||||
// ------
|
// ------
|
||||||
if i < len(s)-1 && s[i+1] == '\\' {
|
if s[i] == '\x1b' && i < len(s)-1 && s[i+1] == '\\' {
|
||||||
return i + 2
|
return i + 2
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
// `\x1b]8;PARAMS;URI\x1b\\TITLE\x1b]8;;\x1b`
|
||||||
// ------------
|
// ------------
|
||||||
if s[:i+1] == "\x1b]8;;\x1b" {
|
if i < len(s) && s[:i+1] == "\x1b]8;;\x1b" {
|
||||||
return i + 1
|
return i + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -234,7 +233,7 @@ Loop:
|
|||||||
|
|
||||||
// \x1b][0-9]+[;:][[:print:]]+(?:\x1b\\\\|\x07)
|
// \x1b][0-9]+[;:][[:print:]]+(?:\x1b\\\\|\x07)
|
||||||
// ---------------
|
// ---------------
|
||||||
if j > 2 && i+j+1 < len(s) && (s[i+j] == ';' || s[i+j] == ':') && s[i+j+1] >= '\x20' {
|
if j > 2 && i+j+1 < len(s) && (s[i+j] == ';' || s[i+j] == ':') && isPrint(s[i+j+1]) {
|
||||||
if k := matchOperatingSystemCommand(s[i:], j+2); k != -1 {
|
if k := matchOperatingSystemCommand(s[i:], j+2); k != -1 {
|
||||||
return i, i + k
|
return i, i + k
|
||||||
}
|
}
|
||||||
|
|||||||
33
src/cache.go
33
src/cache.go
@@ -2,13 +2,10 @@ package fzf
|
|||||||
|
|
||||||
import "sync"
|
import "sync"
|
||||||
|
|
||||||
// ChunkBitmap is a bitmap with one bit per item in a chunk.
|
// queryCache associates strings to lists of items
|
||||||
type ChunkBitmap [chunkBitWords]uint64
|
type queryCache map[string][]Result
|
||||||
|
|
||||||
// queryCache associates query strings to bitmaps of matching items
|
// ChunkCache associates Chunk and query string to lists of items
|
||||||
type queryCache map[string]ChunkBitmap
|
|
||||||
|
|
||||||
// ChunkCache associates Chunk and query string to bitmaps
|
|
||||||
type ChunkCache struct {
|
type ChunkCache struct {
|
||||||
mutex sync.Mutex
|
mutex sync.Mutex
|
||||||
cache map[*Chunk]*queryCache
|
cache map[*Chunk]*queryCache
|
||||||
@@ -33,9 +30,9 @@ func (cc *ChunkCache) retire(chunk ...*Chunk) {
|
|||||||
cc.mutex.Unlock()
|
cc.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add stores the bitmap for the given chunk and key
|
// Add adds the list to the cache
|
||||||
func (cc *ChunkCache) Add(chunk *Chunk, key string, bitmap ChunkBitmap, matchCount int) {
|
func (cc *ChunkCache) Add(chunk *Chunk, key string, list []Result) {
|
||||||
if len(key) == 0 || !chunk.IsFull() || matchCount > queryCacheMax {
|
if len(key) == 0 || !chunk.IsFull() || len(list) > queryCacheMax {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,11 +44,11 @@ func (cc *ChunkCache) Add(chunk *Chunk, key string, bitmap ChunkBitmap, matchCou
|
|||||||
cc.cache[chunk] = &queryCache{}
|
cc.cache[chunk] = &queryCache{}
|
||||||
qc = cc.cache[chunk]
|
qc = cc.cache[chunk]
|
||||||
}
|
}
|
||||||
(*qc)[key] = bitmap
|
(*qc)[key] = list
|
||||||
}
|
}
|
||||||
|
|
||||||
// Lookup returns the bitmap for the exact key
|
// Lookup is called to lookup ChunkCache
|
||||||
func (cc *ChunkCache) Lookup(chunk *Chunk, key string) *ChunkBitmap {
|
func (cc *ChunkCache) Lookup(chunk *Chunk, key string) []Result {
|
||||||
if len(key) == 0 || !chunk.IsFull() {
|
if len(key) == 0 || !chunk.IsFull() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -61,15 +58,15 @@ func (cc *ChunkCache) Lookup(chunk *Chunk, key string) *ChunkBitmap {
|
|||||||
|
|
||||||
qc, ok := cc.cache[chunk]
|
qc, ok := cc.cache[chunk]
|
||||||
if ok {
|
if ok {
|
||||||
if bm, ok := (*qc)[key]; ok {
|
list, ok := (*qc)[key]
|
||||||
return &bm
|
if ok {
|
||||||
|
return list
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Search finds the bitmap for the longest prefix or suffix of the key
|
func (cc *ChunkCache) Search(chunk *Chunk, key string) []Result {
|
||||||
func (cc *ChunkCache) Search(chunk *Chunk, key string) *ChunkBitmap {
|
|
||||||
if len(key) == 0 || !chunk.IsFull() {
|
if len(key) == 0 || !chunk.IsFull() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -89,8 +86,8 @@ func (cc *ChunkCache) Search(chunk *Chunk, key string) *ChunkBitmap {
|
|||||||
prefix := key[:len(key)-idx]
|
prefix := key[:len(key)-idx]
|
||||||
suffix := key[idx:]
|
suffix := key[idx:]
|
||||||
for _, substr := range [2]string{prefix, suffix} {
|
for _, substr := range [2]string{prefix, suffix} {
|
||||||
if bm, found := (*qc)[substr]; found {
|
if cached, found := (*qc)[substr]; found {
|
||||||
return &bm
|
return cached
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,34 +6,34 @@ func TestChunkCache(t *testing.T) {
|
|||||||
cache := NewChunkCache()
|
cache := NewChunkCache()
|
||||||
chunk1p := &Chunk{}
|
chunk1p := &Chunk{}
|
||||||
chunk2p := &Chunk{count: chunkSize}
|
chunk2p := &Chunk{count: chunkSize}
|
||||||
bm1 := ChunkBitmap{1}
|
items1 := []Result{{}}
|
||||||
bm2 := ChunkBitmap{1, 2}
|
items2 := []Result{{}, {}}
|
||||||
cache.Add(chunk1p, "foo", bm1, 1)
|
cache.Add(chunk1p, "foo", items1)
|
||||||
cache.Add(chunk2p, "foo", bm1, 1)
|
cache.Add(chunk2p, "foo", items1)
|
||||||
cache.Add(chunk2p, "bar", bm2, 2)
|
cache.Add(chunk2p, "bar", items2)
|
||||||
|
|
||||||
{ // chunk1 is not full
|
{ // chunk1 is not full
|
||||||
cached := cache.Lookup(chunk1p, "foo")
|
cached := cache.Lookup(chunk1p, "foo")
|
||||||
if cached != nil {
|
if cached != nil {
|
||||||
t.Error("Cached disabled for non-full chunks", cached)
|
t.Error("Cached disabled for non-empty chunks", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
cached := cache.Lookup(chunk2p, "foo")
|
cached := cache.Lookup(chunk2p, "foo")
|
||||||
if cached == nil || cached[0] != 1 {
|
if cached == nil || len(cached) != 1 {
|
||||||
t.Error("Expected bitmap cached", cached)
|
t.Error("Expected 1 item cached", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
cached := cache.Lookup(chunk2p, "bar")
|
cached := cache.Lookup(chunk2p, "bar")
|
||||||
if cached == nil || cached[1] != 2 {
|
if cached == nil || len(cached) != 2 {
|
||||||
t.Error("Expected bitmap cached", cached)
|
t.Error("Expected 2 items cached", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
cached := cache.Lookup(chunk1p, "foobar")
|
cached := cache.Lookup(chunk1p, "foobar")
|
||||||
if cached != nil {
|
if cached != nil {
|
||||||
t.Error("Expected nil cached", cached)
|
t.Error("Expected 0 item cached", cached)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -99,21 +99,6 @@ func (cl *ChunkList) Clear() {
|
|||||||
cl.mutex.Unlock()
|
cl.mutex.Unlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ForEachItem iterates all items and applies fn to each one.
|
|
||||||
// The done callback runs under the lock to safely update shared state.
|
|
||||||
func (cl *ChunkList) ForEachItem(fn func(*Item), done func()) {
|
|
||||||
cl.mutex.Lock()
|
|
||||||
for _, chunk := range cl.chunks {
|
|
||||||
for i := 0; i < chunk.count; i++ {
|
|
||||||
fn(&chunk.items[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if done != nil {
|
|
||||||
done()
|
|
||||||
}
|
|
||||||
cl.mutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Snapshot returns immutable snapshot of the ChunkList
|
// Snapshot returns immutable snapshot of the ChunkList
|
||||||
func (cl *ChunkList) Snapshot(tail int) ([]*Chunk, int, bool) {
|
func (cl *ChunkList) Snapshot(tail int) ([]*Chunk, int, bool) {
|
||||||
cl.mutex.Lock()
|
cl.mutex.Lock()
|
||||||
|
|||||||
@@ -34,18 +34,19 @@ const (
|
|||||||
maxBgProcessesPerAction = 3
|
maxBgProcessesPerAction = 3
|
||||||
|
|
||||||
// Matcher
|
// Matcher
|
||||||
|
numPartitionsMultiplier = 8
|
||||||
|
maxPartitions = 32
|
||||||
progressMinDuration = 200 * time.Millisecond
|
progressMinDuration = 200 * time.Millisecond
|
||||||
|
|
||||||
// Capacity of each chunk
|
// Capacity of each chunk
|
||||||
chunkSize int = 1024
|
chunkSize int = 1000
|
||||||
chunkBitWords = (chunkSize + 63) / 64
|
|
||||||
|
|
||||||
// Pre-allocated memory slices to minimize GC
|
// Pre-allocated memory slices to minimize GC
|
||||||
slab16Size int = 100 * 1024 // 200KB * 32 = 12.8MB
|
slab16Size int = 100 * 1024 // 200KB * 32 = 12.8MB
|
||||||
slab32Size int = 2048 // 8KB * 32 = 256KB
|
slab32Size int = 2048 // 8KB * 32 = 256KB
|
||||||
|
|
||||||
// Do not cache results of low selectivity queries
|
// Do not cache results of low selectivity queries
|
||||||
queryCacheMax int = chunkSize / 2
|
queryCacheMax int = chunkSize / 5
|
||||||
|
|
||||||
// Not to cache mergers with large lists
|
// Not to cache mergers with large lists
|
||||||
mergerCacheMax int = 100000
|
mergerCacheMax int = 100000
|
||||||
|
|||||||
72
src/core.go
72
src/core.go
@@ -113,9 +113,16 @@ func Run(opts *Options) (int, error) {
|
|||||||
cache := NewChunkCache()
|
cache := NewChunkCache()
|
||||||
var chunkList *ChunkList
|
var chunkList *ChunkList
|
||||||
var itemIndex int32
|
var itemIndex int32
|
||||||
// transformItem applies with-nth transformation to an item's raw data.
|
if opts.WithNth == nil {
|
||||||
// It handles ANSI token propagation using prevLineAnsiState for cross-line continuity.
|
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
||||||
transformItem := func(item *Item, data []byte, transformer func([]Token, int32) string, index int32) {
|
item.text, item.colors = ansiProcessor(data)
|
||||||
|
item.text.Index = itemIndex
|
||||||
|
itemIndex++
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
nthTransformer := opts.WithNth(opts.Delimiter)
|
||||||
|
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
||||||
tokens := Tokenize(byteString(data), opts.Delimiter)
|
tokens := Tokenize(byteString(data), opts.Delimiter)
|
||||||
if opts.Ansi && len(tokens) > 1 {
|
if opts.Ansi && len(tokens) > 1 {
|
||||||
var ansiState *ansiState
|
var ansiState *ansiState
|
||||||
@@ -133,7 +140,7 @@ func Run(opts *Options) (int, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
transformed := transformer(tokens, index)
|
transformed := nthTransformer(tokens, itemIndex)
|
||||||
item.text, item.colors = ansiProcessor(stringBytes(transformed))
|
item.text, item.colors = ansiProcessor(stringBytes(transformed))
|
||||||
|
|
||||||
// We should not trim trailing whitespaces with background colors
|
// We should not trim trailing whitespaces with background colors
|
||||||
@@ -146,24 +153,6 @@ func Run(opts *Options) (int, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
item.text.TrimTrailingWhitespaces(int(maxColorOffset))
|
item.text.TrimTrailingWhitespaces(int(maxColorOffset))
|
||||||
}
|
|
||||||
|
|
||||||
var nthTransformer func([]Token, int32) string
|
|
||||||
if opts.WithNth == nil {
|
|
||||||
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
|
||||||
item.text, item.colors = ansiProcessor(data)
|
|
||||||
item.text.Index = itemIndex
|
|
||||||
itemIndex++
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
nthTransformer = opts.WithNth(opts.Delimiter)
|
|
||||||
chunkList = NewChunkList(cache, func(item *Item, data []byte) bool {
|
|
||||||
if nthTransformer == nil {
|
|
||||||
item.text, item.colors = ansiProcessor(data)
|
|
||||||
} else {
|
|
||||||
transformItem(item, data, nthTransformer, itemIndex)
|
|
||||||
}
|
|
||||||
item.text.Index = itemIndex
|
item.text.Index = itemIndex
|
||||||
item.origText = &data
|
item.origText = &data
|
||||||
itemIndex++
|
itemIndex++
|
||||||
@@ -195,13 +184,11 @@ func Run(opts *Options) (int, error) {
|
|||||||
// Reader
|
// Reader
|
||||||
streamingFilter := opts.Filter != nil && !sort && !opts.Tac && !opts.Sync && opts.Bench == 0
|
streamingFilter := opts.Filter != nil && !sort && !opts.Tac && !opts.Sync && opts.Bench == 0
|
||||||
var reader *Reader
|
var reader *Reader
|
||||||
var ingestionStart time.Time
|
|
||||||
if !streamingFilter {
|
if !streamingFilter {
|
||||||
reader = NewReader(func(data []byte) bool {
|
reader = NewReader(func(data []byte) bool {
|
||||||
return chunkList.Push(data)
|
return chunkList.Push(data)
|
||||||
}, eventBox, executor, opts.ReadZero, opts.Filter == nil)
|
}, eventBox, executor, opts.ReadZero, opts.Filter == nil)
|
||||||
|
|
||||||
ingestionStart = time.Now()
|
|
||||||
readyChan := make(chan bool)
|
readyChan := make(chan bool)
|
||||||
go reader.ReadSource(opts.Input, opts.WalkerRoot, opts.WalkerOpts, opts.WalkerSkip, initialReload, initialEnv, readyChan)
|
go reader.ReadSource(opts.Input, opts.WalkerRoot, opts.WalkerOpts, opts.WalkerSkip, initialReload, initialEnv, readyChan)
|
||||||
<-readyChan
|
<-readyChan
|
||||||
@@ -285,7 +272,6 @@ func Run(opts *Options) (int, error) {
|
|||||||
} else {
|
} else {
|
||||||
eventBox.Unwatch(EvtReadNew)
|
eventBox.Unwatch(EvtReadNew)
|
||||||
eventBox.WaitFor(EvtReadFin)
|
eventBox.WaitFor(EvtReadFin)
|
||||||
ingestionTime := time.Since(ingestionStart)
|
|
||||||
|
|
||||||
// NOTE: Streaming filter is inherently not compatible with --tail
|
// NOTE: Streaming filter is inherently not compatible with --tail
|
||||||
snapshot, _, _ := chunkList.Snapshot(opts.Tail)
|
snapshot, _, _ := chunkList.Snapshot(opts.Tail)
|
||||||
@@ -319,14 +305,13 @@ func Run(opts *Options) (int, error) {
|
|||||||
}
|
}
|
||||||
avg := total / time.Duration(len(times))
|
avg := total / time.Duration(len(times))
|
||||||
selectivity := float64(matchCount) / float64(totalItems) * 100
|
selectivity := float64(matchCount) / float64(totalItems) * 100
|
||||||
fmt.Printf(" %d iterations avg: %.2fms min: %.2fms max: %.2fms total: %.2fs items: %d matches: %d (%.2f%%) ingestion: %.2fms\n",
|
fmt.Printf(" %d iterations avg: %.2fms min: %.2fms max: %.2fms total: %.2fs items: %d matches: %d (%.2f%%)\n",
|
||||||
len(times),
|
len(times),
|
||||||
float64(avg.Microseconds())/1000,
|
float64(avg.Microseconds())/1000,
|
||||||
float64(minD.Microseconds())/1000,
|
float64(minD.Microseconds())/1000,
|
||||||
float64(maxD.Microseconds())/1000,
|
float64(maxD.Microseconds())/1000,
|
||||||
total.Seconds(),
|
total.Seconds(),
|
||||||
totalItems, matchCount, selectivity,
|
totalItems, matchCount, selectivity)
|
||||||
float64(ingestionTime.Microseconds())/1000)
|
|
||||||
return ExitOk, nil
|
return ExitOk, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -476,7 +461,6 @@ func Run(opts *Options) (int, error) {
|
|||||||
var environ []string
|
var environ []string
|
||||||
var changed bool
|
var changed bool
|
||||||
headerLinesChanged := false
|
headerLinesChanged := false
|
||||||
withNthChanged := false
|
|
||||||
switch val := value.(type) {
|
switch val := value.(type) {
|
||||||
case searchRequest:
|
case searchRequest:
|
||||||
sort = val.sort
|
sort = val.sort
|
||||||
@@ -503,34 +487,6 @@ func Run(opts *Options) (int, error) {
|
|||||||
headerLinesChanged = true
|
headerLinesChanged = true
|
||||||
bump = true
|
bump = true
|
||||||
}
|
}
|
||||||
if val.withNth != nil {
|
|
||||||
newTransformer := val.withNth.fn
|
|
||||||
// Cancel any in-flight scan and block the terminal from reading
|
|
||||||
// items before mutating them in-place. Snapshot shares middle
|
|
||||||
// chunk pointers, so the matcher and terminal can race with us.
|
|
||||||
matcher.CancelScan()
|
|
||||||
terminal.PauseRendering()
|
|
||||||
// Reset cross-line ANSI state before re-processing all items
|
|
||||||
lineAnsiState = nil
|
|
||||||
prevLineAnsiState = nil
|
|
||||||
chunkList.ForEachItem(func(item *Item) {
|
|
||||||
origBytes := *item.origText
|
|
||||||
savedIndex := item.Index()
|
|
||||||
if newTransformer != nil {
|
|
||||||
transformItem(item, origBytes, newTransformer, savedIndex)
|
|
||||||
} else {
|
|
||||||
item.text, item.colors = ansiProcessor(origBytes)
|
|
||||||
}
|
|
||||||
item.text.Index = savedIndex
|
|
||||||
item.transformed = nil
|
|
||||||
}, func() {
|
|
||||||
nthTransformer = newTransformer
|
|
||||||
})
|
|
||||||
terminal.ResumeRendering()
|
|
||||||
matcher.ResumeScan()
|
|
||||||
withNthChanged = true
|
|
||||||
bump = true
|
|
||||||
}
|
|
||||||
if bump {
|
if bump {
|
||||||
patternCache = make(map[string]*Pattern)
|
patternCache = make(map[string]*Pattern)
|
||||||
cache.Clear()
|
cache.Clear()
|
||||||
@@ -574,8 +530,6 @@ func Run(opts *Options) (int, error) {
|
|||||||
} else {
|
} else {
|
||||||
terminal.UpdateHeader(nil)
|
terminal.UpdateHeader(nil)
|
||||||
}
|
}
|
||||||
} else if withNthChanged && headerLines > 0 {
|
|
||||||
terminal.UpdateHeader(GetItems(snapshot, int(headerLines)))
|
|
||||||
}
|
}
|
||||||
matcher.Reset(snapshot, input(), true, !reading, sort, snapshotRevision)
|
matcher.Reset(snapshot, input(), true, !reading, sort, snapshotRevision)
|
||||||
delay = false
|
delay = false
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"runtime"
|
"runtime"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/util"
|
"github.com/junegunn/fzf/src/util"
|
||||||
@@ -46,8 +45,6 @@ type Matcher struct {
|
|||||||
sortBuf [][]Result
|
sortBuf [][]Result
|
||||||
mergerCache map[string]MatchResult
|
mergerCache map[string]MatchResult
|
||||||
revision revision
|
revision revision
|
||||||
scanMutex sync.Mutex
|
|
||||||
cancelScan *util.AtomicBool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -58,7 +55,7 @@ const (
|
|||||||
// NewMatcher returns a new Matcher
|
// NewMatcher returns a new Matcher
|
||||||
func NewMatcher(cache *ChunkCache, patternBuilder func([]rune) *Pattern,
|
func NewMatcher(cache *ChunkCache, patternBuilder func([]rune) *Pattern,
|
||||||
sort bool, tac bool, eventBox *util.EventBox, revision revision, threads int) *Matcher {
|
sort bool, tac bool, eventBox *util.EventBox, revision revision, threads int) *Matcher {
|
||||||
partitions := runtime.NumCPU()
|
partitions := min(numPartitionsMultiplier*runtime.NumCPU(), maxPartitions)
|
||||||
if threads > 0 {
|
if threads > 0 {
|
||||||
partitions = threads
|
partitions = threads
|
||||||
}
|
}
|
||||||
@@ -73,8 +70,7 @@ func NewMatcher(cache *ChunkCache, patternBuilder func([]rune) *Pattern,
|
|||||||
slab: make([]*util.Slab, partitions),
|
slab: make([]*util.Slab, partitions),
|
||||||
sortBuf: make([][]Result, partitions),
|
sortBuf: make([][]Result, partitions),
|
||||||
mergerCache: make(map[string]MatchResult),
|
mergerCache: make(map[string]MatchResult),
|
||||||
revision: revision,
|
revision: revision}
|
||||||
cancelScan: util.NewAtomicBool(false)}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Loop puts Matcher in action
|
// Loop puts Matcher in action
|
||||||
@@ -134,9 +130,7 @@ func (m *Matcher) Loop() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if result.merger == nil {
|
if result.merger == nil {
|
||||||
m.scanMutex.Lock()
|
|
||||||
result = m.scan(request)
|
result = m.scan(request)
|
||||||
m.scanMutex.Unlock()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if !result.cancelled {
|
if !result.cancelled {
|
||||||
@@ -149,6 +143,27 @@ func (m *Matcher) Loop() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *Matcher) sliceChunks(chunks []*Chunk) [][]*Chunk {
|
||||||
|
partitions := m.partitions
|
||||||
|
perSlice := len(chunks) / partitions
|
||||||
|
|
||||||
|
if perSlice == 0 {
|
||||||
|
partitions = len(chunks)
|
||||||
|
perSlice = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
slices := make([][]*Chunk, partitions)
|
||||||
|
for i := 0; i < partitions; i++ {
|
||||||
|
start := i * perSlice
|
||||||
|
end := start + perSlice
|
||||||
|
if i == partitions-1 {
|
||||||
|
end = len(chunks)
|
||||||
|
}
|
||||||
|
slices[i] = chunks[start:end]
|
||||||
|
}
|
||||||
|
return slices
|
||||||
|
}
|
||||||
|
|
||||||
type partialResult struct {
|
type partialResult struct {
|
||||||
index int
|
index int
|
||||||
matches []Result
|
matches []Result
|
||||||
@@ -172,37 +187,39 @@ func (m *Matcher) scan(request MatchRequest) MatchResult {
|
|||||||
maxIndex := request.chunks[numChunks-1].lastIndex(minIndex)
|
maxIndex := request.chunks[numChunks-1].lastIndex(minIndex)
|
||||||
cancelled := util.NewAtomicBool(false)
|
cancelled := util.NewAtomicBool(false)
|
||||||
|
|
||||||
numWorkers := min(m.partitions, numChunks)
|
slices := m.sliceChunks(request.chunks)
|
||||||
var nextChunk atomic.Int32
|
numSlices := len(slices)
|
||||||
resultChan := make(chan partialResult, numWorkers)
|
resultChan := make(chan partialResult, numSlices)
|
||||||
countChan := make(chan int, numChunks)
|
countChan := make(chan int, numChunks)
|
||||||
waitGroup := sync.WaitGroup{}
|
waitGroup := sync.WaitGroup{}
|
||||||
|
|
||||||
for idx := range numWorkers {
|
for idx, chunks := range slices {
|
||||||
waitGroup.Add(1)
|
waitGroup.Add(1)
|
||||||
if m.slab[idx] == nil {
|
if m.slab[idx] == nil {
|
||||||
m.slab[idx] = util.MakeSlab(slab16Size, slab32Size)
|
m.slab[idx] = util.MakeSlab(slab16Size, slab32Size)
|
||||||
}
|
}
|
||||||
go func(idx int, slab *util.Slab) {
|
go func(idx int, slab *util.Slab, chunks []*Chunk) {
|
||||||
defer waitGroup.Done()
|
defer func() { waitGroup.Done() }()
|
||||||
var matches []Result
|
count := 0
|
||||||
for {
|
allMatches := make([][]Result, len(chunks))
|
||||||
ci := int(nextChunk.Add(1)) - 1
|
for idx, chunk := range chunks {
|
||||||
if ci >= numChunks {
|
matches := request.pattern.Match(chunk, slab)
|
||||||
break
|
allMatches[idx] = matches
|
||||||
}
|
count += len(matches)
|
||||||
chunkMatches := request.pattern.Match(request.chunks[ci], slab)
|
|
||||||
matches = append(matches, chunkMatches...)
|
|
||||||
if cancelled.Get() {
|
if cancelled.Get() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
countChan <- len(chunkMatches)
|
countChan <- len(matches)
|
||||||
|
}
|
||||||
|
sliceMatches := make([]Result, 0, count)
|
||||||
|
for _, matches := range allMatches {
|
||||||
|
sliceMatches = append(sliceMatches, matches...)
|
||||||
}
|
}
|
||||||
if m.sort && request.pattern.sortable {
|
if m.sort && request.pattern.sortable {
|
||||||
m.sortBuf[idx] = radixSortResults(matches, m.tac, m.sortBuf[idx])
|
m.sortBuf[idx] = radixSortResults(sliceMatches, m.tac, m.sortBuf[idx])
|
||||||
}
|
}
|
||||||
resultChan <- partialResult{idx, matches}
|
resultChan <- partialResult{idx, sliceMatches}
|
||||||
}(idx, m.slab[idx])
|
}(idx, m.slab[idx], chunks)
|
||||||
}
|
}
|
||||||
|
|
||||||
wait := func() bool {
|
wait := func() bool {
|
||||||
@@ -221,7 +238,7 @@ func (m *Matcher) scan(request MatchRequest) MatchResult {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if m.cancelScan.Get() || m.reqBox.Peek(reqReset) {
|
if m.reqBox.Peek(reqReset) {
|
||||||
return MatchResult{nil, nil, wait()}
|
return MatchResult{nil, nil, wait()}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -230,8 +247,8 @@ func (m *Matcher) scan(request MatchRequest) MatchResult {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
partialResults := make([][]Result, numWorkers)
|
partialResults := make([][]Result, numSlices)
|
||||||
for range numWorkers {
|
for range slices {
|
||||||
partialResult := <-resultChan
|
partialResult := <-resultChan
|
||||||
partialResults[partialResult.index] = partialResult.matches
|
partialResults[partialResult.index] = partialResult.matches
|
||||||
}
|
}
|
||||||
@@ -252,20 +269,6 @@ func (m *Matcher) Reset(chunks []*Chunk, patternRunes []rune, cancel bool, final
|
|||||||
m.reqBox.Set(event, MatchRequest{chunks, pattern, final, sort, revision})
|
m.reqBox.Set(event, MatchRequest{chunks, pattern, final, sort, revision})
|
||||||
}
|
}
|
||||||
|
|
||||||
// CancelScan cancels any in-flight scan, waits for it to finish,
|
|
||||||
// and prevents new scans from starting until ResumeScan is called.
|
|
||||||
// This is used to safely mutate shared items (e.g., during with-nth changes).
|
|
||||||
func (m *Matcher) CancelScan() {
|
|
||||||
m.cancelScan.Set(true)
|
|
||||||
m.scanMutex.Lock()
|
|
||||||
m.cancelScan.Set(false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResumeScan allows scans to proceed again after CancelScan.
|
|
||||||
func (m *Matcher) ResumeScan() {
|
|
||||||
m.scanMutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Matcher) Stop() {
|
func (m *Matcher) Stop() {
|
||||||
m.reqBox.Set(reqQuit, nil)
|
m.reqBox.Set(reqQuit, nil)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -588,7 +588,6 @@ type Options struct {
|
|||||||
FreezeLeft int
|
FreezeLeft int
|
||||||
FreezeRight int
|
FreezeRight int
|
||||||
WithNth func(Delimiter) func([]Token, int32) string
|
WithNth func(Delimiter) func([]Token, int32) string
|
||||||
WithNthExpr string
|
|
||||||
AcceptNth func(Delimiter) func([]Token, int32) string
|
AcceptNth func(Delimiter) func([]Token, int32) string
|
||||||
Delimiter Delimiter
|
Delimiter Delimiter
|
||||||
Sort int
|
Sort int
|
||||||
@@ -1630,7 +1629,7 @@ const (
|
|||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
executeRegexp = regexp.MustCompile(
|
executeRegexp = regexp.MustCompile(
|
||||||
`(?si)[:+](become|execute(?:-multi|-silent)?|reload(?:-sync)?|preview|(?:change|bg-transform|transform)-(?:query|prompt|(?:border|list|preview|input|header|footer)-label|header-lines|header|footer|search|with-nth|nth|pointer|ghost)|bg-transform|transform|change-(?:preview-window|preview|multi)|(?:re|un|toggle-)bind|pos|put|print|search|trigger)`)
|
`(?si)[:+](become|execute(?:-multi|-silent)?|reload(?:-sync)?|preview|(?:change|bg-transform|transform)-(?:query|prompt|(?:border|list|preview|input|header|footer)-label|header-lines|header|footer|search|nth|pointer|ghost)|bg-transform|transform|change-(?:preview-window|preview|multi)|(?:re|un|toggle-)bind|pos|put|print|search|trigger)`)
|
||||||
splitRegexp = regexp.MustCompile("[,:]+")
|
splitRegexp = regexp.MustCompile("[,:]+")
|
||||||
actionNameRegexp = regexp.MustCompile("(?i)^[a-z-]+")
|
actionNameRegexp = regexp.MustCompile("(?i)^[a-z-]+")
|
||||||
}
|
}
|
||||||
@@ -2073,8 +2072,6 @@ func isExecuteAction(str string) actionType {
|
|||||||
return actChangeMulti
|
return actChangeMulti
|
||||||
case "change-nth":
|
case "change-nth":
|
||||||
return actChangeNth
|
return actChangeNth
|
||||||
case "change-with-nth":
|
|
||||||
return actChangeWithNth
|
|
||||||
case "pos":
|
case "pos":
|
||||||
return actPosition
|
return actPosition
|
||||||
case "execute":
|
case "execute":
|
||||||
@@ -2111,8 +2108,6 @@ func isExecuteAction(str string) actionType {
|
|||||||
return actTransformGhost
|
return actTransformGhost
|
||||||
case "transform-nth":
|
case "transform-nth":
|
||||||
return actTransformNth
|
return actTransformNth
|
||||||
case "transform-with-nth":
|
|
||||||
return actTransformWithNth
|
|
||||||
case "transform-pointer":
|
case "transform-pointer":
|
||||||
return actTransformPointer
|
return actTransformPointer
|
||||||
case "transform-prompt":
|
case "transform-prompt":
|
||||||
@@ -2145,8 +2140,6 @@ func isExecuteAction(str string) actionType {
|
|||||||
return actBgTransformGhost
|
return actBgTransformGhost
|
||||||
case "bg-transform-nth":
|
case "bg-transform-nth":
|
||||||
return actBgTransformNth
|
return actBgTransformNth
|
||||||
case "bg-transform-with-nth":
|
|
||||||
return actBgTransformWithNth
|
|
||||||
case "bg-transform-pointer":
|
case "bg-transform-pointer":
|
||||||
return actBgTransformPointer
|
return actBgTransformPointer
|
||||||
case "bg-transform-prompt":
|
case "bg-transform-prompt":
|
||||||
@@ -2788,7 +2781,6 @@ func parseOptions(index *int, opts *Options, allArgs []string) error {
|
|||||||
if opts.WithNth, err = nthTransformer(str); err != nil {
|
if opts.WithNth, err = nthTransformer(str); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
opts.WithNthExpr = str
|
|
||||||
case "--accept-nth":
|
case "--accept-nth":
|
||||||
str, err := nextString("nth expression required")
|
str, err := nextString("nth expression required")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ type Pattern struct {
|
|||||||
delimiter Delimiter
|
delimiter Delimiter
|
||||||
nth []Range
|
nth []Range
|
||||||
revision revision
|
revision revision
|
||||||
procFun [6]algo.Algo
|
procFun map[termType]algo.Algo
|
||||||
cache *ChunkCache
|
cache *ChunkCache
|
||||||
denylist map[int32]struct{}
|
denylist map[int32]struct{}
|
||||||
startIndex int32
|
startIndex int32
|
||||||
@@ -150,7 +150,7 @@ func BuildPattern(cache *ChunkCache, patternCache map[string]*Pattern, fuzzy boo
|
|||||||
cache: cache,
|
cache: cache,
|
||||||
denylist: denylist,
|
denylist: denylist,
|
||||||
startIndex: startIndex,
|
startIndex: startIndex,
|
||||||
}
|
procFun: make(map[termType]algo.Algo)}
|
||||||
|
|
||||||
ptr.cacheKey = ptr.buildCacheKey()
|
ptr.cacheKey = ptr.buildCacheKey()
|
||||||
ptr.directAlgo, ptr.directTerm = ptr.buildDirectAlgo(fuzzyAlgo)
|
ptr.directAlgo, ptr.directTerm = ptr.buildDirectAlgo(fuzzyAlgo)
|
||||||
@@ -300,87 +300,104 @@ func (p *Pattern) CacheKey() string {
|
|||||||
|
|
||||||
// Match returns the list of matches Items in the given Chunk
|
// Match returns the list of matches Items in the given Chunk
|
||||||
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []Result {
|
func (p *Pattern) Match(chunk *Chunk, slab *util.Slab) []Result {
|
||||||
|
// ChunkCache: Exact match
|
||||||
cacheKey := p.CacheKey()
|
cacheKey := p.CacheKey()
|
||||||
|
|
||||||
// Bitmap cache: exact match or prefix/suffix
|
|
||||||
var cachedBitmap *ChunkBitmap
|
|
||||||
if p.cacheable {
|
if p.cacheable {
|
||||||
cachedBitmap = p.cache.Lookup(chunk, cacheKey)
|
if cached := p.cache.Lookup(chunk, cacheKey); cached != nil {
|
||||||
|
return cached
|
||||||
}
|
}
|
||||||
if cachedBitmap == nil {
|
|
||||||
cachedBitmap = p.cache.Search(chunk, cacheKey)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
matches, bitmap := p.matchChunk(chunk, cachedBitmap, slab)
|
// Prefix/suffix cache
|
||||||
|
space := p.cache.Search(chunk, cacheKey)
|
||||||
|
|
||||||
|
matches := p.matchChunk(chunk, space, slab)
|
||||||
|
|
||||||
if p.cacheable {
|
if p.cacheable {
|
||||||
p.cache.Add(chunk, cacheKey, bitmap, len(matches))
|
p.cache.Add(chunk, cacheKey, matches)
|
||||||
}
|
}
|
||||||
return matches
|
return matches
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Pattern) matchChunk(chunk *Chunk, cachedBitmap *ChunkBitmap, slab *util.Slab) ([]Result, ChunkBitmap) {
|
func (p *Pattern) matchChunk(chunk *Chunk, space []Result, slab *util.Slab) []Result {
|
||||||
matches := []Result{}
|
matches := []Result{}
|
||||||
var bitmap ChunkBitmap
|
|
||||||
|
|
||||||
// Skip header items in chunks that contain them
|
// Skip header items in chunks that contain them
|
||||||
startIdx := 0
|
startIdx := 0
|
||||||
if p.startIndex > 0 && chunk.count > 0 && chunk.items[0].Index() < p.startIndex {
|
if p.startIndex > 0 && chunk.count > 0 && chunk.items[0].Index() < p.startIndex {
|
||||||
startIdx = int(p.startIndex - chunk.items[0].Index())
|
startIdx = int(p.startIndex - chunk.items[0].Index())
|
||||||
if startIdx >= chunk.count {
|
if startIdx >= chunk.count {
|
||||||
return matches, bitmap
|
return matches
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
hasCachedBitmap := cachedBitmap != nil
|
|
||||||
|
|
||||||
// Fast path: single fuzzy term, no nth, no denylist.
|
// Fast path: single fuzzy term, no nth, no denylist.
|
||||||
// Calls the algo function directly, bypassing MatchItem/extendedMatch/iter
|
// Calls the algo function directly, bypassing MatchItem/extendedMatch/iter
|
||||||
// and avoiding per-match []Offset heap allocation.
|
// and avoiding per-match []Offset heap allocation.
|
||||||
if p.directAlgo != nil && len(p.denylist) == 0 {
|
if p.directAlgo != nil && len(p.denylist) == 0 {
|
||||||
t := p.directTerm
|
t := p.directTerm
|
||||||
|
if space == nil {
|
||||||
for idx := startIdx; idx < chunk.count; idx++ {
|
for idx := startIdx; idx < chunk.count; idx++ {
|
||||||
if hasCachedBitmap && cachedBitmap[idx/64]&(uint64(1)<<(idx%64)) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
res, _ := p.directAlgo(t.caseSensitive, t.normalize, p.forward,
|
res, _ := p.directAlgo(t.caseSensitive, t.normalize, p.forward,
|
||||||
&chunk.items[idx].text, t.text, p.withPos, slab)
|
&chunk.items[idx].text, t.text, p.withPos, slab)
|
||||||
if res.Start >= 0 {
|
if res.Start >= 0 {
|
||||||
bitmap[idx/64] |= uint64(1) << (idx % 64)
|
|
||||||
matches = append(matches, buildResultFromBounds(
|
matches = append(matches, buildResultFromBounds(
|
||||||
&chunk.items[idx], res.Score,
|
&chunk.items[idx], res.Score,
|
||||||
int(res.Start), int(res.End), int(res.End), true))
|
int(res.Start), int(res.End), int(res.End), true))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return matches, bitmap
|
} else {
|
||||||
|
for _, result := range space {
|
||||||
|
res, _ := p.directAlgo(t.caseSensitive, t.normalize, p.forward,
|
||||||
|
&result.item.text, t.text, p.withPos, slab)
|
||||||
|
if res.Start >= 0 {
|
||||||
|
matches = append(matches, buildResultFromBounds(
|
||||||
|
result.item, res.Score,
|
||||||
|
int(res.Start), int(res.End), int(res.End), true))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(p.denylist) == 0 {
|
if len(p.denylist) == 0 {
|
||||||
|
// Huge code duplication for minimizing unnecessary map lookups
|
||||||
|
if space == nil {
|
||||||
for idx := startIdx; idx < chunk.count; idx++ {
|
for idx := startIdx; idx < chunk.count; idx++ {
|
||||||
if hasCachedBitmap && cachedBitmap[idx/64]&(uint64(1)<<(idx%64)) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match.item != nil {
|
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match.item != nil {
|
||||||
bitmap[idx/64] |= uint64(1) << (idx % 64)
|
|
||||||
matches = append(matches, match)
|
matches = append(matches, match)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return matches, bitmap
|
} else {
|
||||||
|
for _, result := range space {
|
||||||
|
if match, _, _ := p.MatchItem(result.item, p.withPos, slab); match.item != nil {
|
||||||
|
matches = append(matches, match)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if space == nil {
|
||||||
for idx := startIdx; idx < chunk.count; idx++ {
|
for idx := startIdx; idx < chunk.count; idx++ {
|
||||||
if hasCachedBitmap && cachedBitmap[idx/64]&(uint64(1)<<(idx%64)) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, prs := p.denylist[chunk.items[idx].Index()]; prs {
|
if _, prs := p.denylist[chunk.items[idx].Index()]; prs {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match.item != nil {
|
if match, _, _ := p.MatchItem(&chunk.items[idx], p.withPos, slab); match.item != nil {
|
||||||
bitmap[idx/64] |= uint64(1) << (idx % 64)
|
|
||||||
matches = append(matches, match)
|
matches = append(matches, match)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return matches, bitmap
|
} else {
|
||||||
|
for _, result := range space {
|
||||||
|
if _, prs := p.denylist[result.item.Index()]; prs {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if match, _, _ := p.MatchItem(result.item, p.withPos, slab); match.item != nil {
|
||||||
|
matches = append(matches, match)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matches
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchItem returns the match result if the Item is a match.
|
// MatchItem returns the match result if the Item is a match.
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package fzf
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"runtime"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/junegunn/fzf/src/algo"
|
"github.com/junegunn/fzf/src/algo"
|
||||||
@@ -138,7 +137,7 @@ func TestOrigTextAndTransformed(t *testing.T) {
|
|||||||
origText: &origBytes,
|
origText: &origBytes,
|
||||||
transformed: &transformed{pattern.revision, trans}}
|
transformed: &transformed{pattern.revision, trans}}
|
||||||
pattern.extended = extended
|
pattern.extended = extended
|
||||||
matches, _ := pattern.matchChunk(&chunk, nil, slab) // No cache
|
matches := pattern.matchChunk(&chunk, nil, slab) // No cache
|
||||||
if !(matches[0].item.text.ToString() == "junegunn" &&
|
if !(matches[0].item.text.ToString() == "junegunn" &&
|
||||||
string(*matches[0].item.origText) == "junegunn.choi" &&
|
string(*matches[0].item.origText) == "junegunn.choi" &&
|
||||||
reflect.DeepEqual((*matches[0].item.transformed).tokens, trans)) {
|
reflect.DeepEqual((*matches[0].item.transformed).tokens, trans)) {
|
||||||
@@ -200,119 +199,3 @@ func TestCacheable(t *testing.T) {
|
|||||||
test(false, "foo 'bar", "foo", false)
|
test(false, "foo 'bar", "foo", false)
|
||||||
test(false, "foo !bar", "foo", false)
|
test(false, "foo !bar", "foo", false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildChunks(numChunks int) []*Chunk {
|
|
||||||
chunks := make([]*Chunk, numChunks)
|
|
||||||
words := []string{
|
|
||||||
"src/main/java/com/example/service/UserService.java",
|
|
||||||
"src/test/java/com/example/service/UserServiceTest.java",
|
|
||||||
"docs/api/reference/endpoints.md",
|
|
||||||
"lib/internal/utils/string_helper.go",
|
|
||||||
"pkg/server/http/handler/auth.go",
|
|
||||||
"build/output/release/app.exe",
|
|
||||||
"config/production/database.yml",
|
|
||||||
"scripts/deploy/kubernetes/setup.sh",
|
|
||||||
"vendor/github.com/junegunn/fzf/src/core.go",
|
|
||||||
"node_modules/.cache/babel/transform.js",
|
|
||||||
}
|
|
||||||
for ci := range numChunks {
|
|
||||||
chunks[ci] = &Chunk{count: chunkSize}
|
|
||||||
for i := range chunkSize {
|
|
||||||
text := words[(ci*chunkSize+i)%len(words)]
|
|
||||||
chunks[ci].items[i] = Item{text: util.ToChars([]byte(text))}
|
|
||||||
chunks[ci].items[i].text.Index = int32(ci*chunkSize + i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return chunks
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildPatternWith(cache *ChunkCache, runes []rune) *Pattern {
|
|
||||||
return BuildPattern(cache, make(map[string]*Pattern),
|
|
||||||
true, algo.FuzzyMatchV2, true, CaseSmart, false, true,
|
|
||||||
false, true, []Range{}, Delimiter{}, revision{}, runes, nil, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBitmapCacheBenefit(t *testing.T) {
|
|
||||||
numChunks := 100
|
|
||||||
chunks := buildChunks(numChunks)
|
|
||||||
queries := []string{"s", "se", "ser", "serv", "servi"}
|
|
||||||
|
|
||||||
// 1. Run all queries with shared cache (simulates incremental typing)
|
|
||||||
cache := NewChunkCache()
|
|
||||||
for _, q := range queries {
|
|
||||||
pat := buildPatternWith(cache, []rune(q))
|
|
||||||
for _, chunk := range chunks {
|
|
||||||
pat.Match(chunk, slab)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. GC and measure memory with cache populated
|
|
||||||
runtime.GC()
|
|
||||||
runtime.GC()
|
|
||||||
var memWith runtime.MemStats
|
|
||||||
runtime.ReadMemStats(&memWith)
|
|
||||||
|
|
||||||
// 3. Clear cache, GC, measure again
|
|
||||||
cache.Clear()
|
|
||||||
runtime.GC()
|
|
||||||
runtime.GC()
|
|
||||||
var memWithout runtime.MemStats
|
|
||||||
runtime.ReadMemStats(&memWithout)
|
|
||||||
|
|
||||||
cacheMem := int64(memWith.Alloc) - int64(memWithout.Alloc)
|
|
||||||
t.Logf("Chunks: %d, Queries: %d", numChunks, len(queries))
|
|
||||||
t.Logf("Cache memory: %d bytes (%.1f KB)", cacheMem, float64(cacheMem)/1024)
|
|
||||||
t.Logf("Per-chunk-per-query: %.0f bytes", float64(cacheMem)/float64(numChunks*len(queries)))
|
|
||||||
|
|
||||||
// 4. Verify correctness: cached vs uncached produce same results
|
|
||||||
cache2 := NewChunkCache()
|
|
||||||
for _, q := range queries {
|
|
||||||
pat := buildPatternWith(cache2, []rune(q))
|
|
||||||
for _, chunk := range chunks {
|
|
||||||
pat.Match(chunk, slab)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, q := range queries {
|
|
||||||
patCached := buildPatternWith(cache2, []rune(q))
|
|
||||||
patFresh := buildPatternWith(NewChunkCache(), []rune(q))
|
|
||||||
var countCached, countFresh int
|
|
||||||
for _, chunk := range chunks {
|
|
||||||
countCached += len(patCached.Match(chunk, slab))
|
|
||||||
countFresh += len(patFresh.Match(chunk, slab))
|
|
||||||
}
|
|
||||||
if countCached != countFresh {
|
|
||||||
t.Errorf("query=%q: cached=%d, fresh=%d", q, countCached, countFresh)
|
|
||||||
}
|
|
||||||
t.Logf("query=%q: matches=%d", q, countCached)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkWithCache(b *testing.B) {
|
|
||||||
numChunks := 100
|
|
||||||
chunks := buildChunks(numChunks)
|
|
||||||
queries := []string{"s", "se", "ser", "serv", "servi"}
|
|
||||||
|
|
||||||
b.Run("cached", func(b *testing.B) {
|
|
||||||
for range b.N {
|
|
||||||
cache := NewChunkCache()
|
|
||||||
for _, q := range queries {
|
|
||||||
pat := buildPatternWith(cache, []rune(q))
|
|
||||||
for _, chunk := range chunks {
|
|
||||||
pat.Match(chunk, slab)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
b.Run("uncached", func(b *testing.B) {
|
|
||||||
for range b.N {
|
|
||||||
for _, q := range queries {
|
|
||||||
cache := NewChunkCache()
|
|
||||||
pat := buildPatternWith(cache, []rune(q))
|
|
||||||
for _, chunk := range chunks {
|
|
||||||
pat.Match(chunk, slab)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -6,5 +6,5 @@ import "golang.org/x/sys/unix"
|
|||||||
|
|
||||||
// Protect calls OS specific protections like pledge on OpenBSD
|
// Protect calls OS specific protections like pledge on OpenBSD
|
||||||
func Protect() {
|
func Protect() {
|
||||||
unix.PledgePromises("stdio cpath dpath wpath rpath tty proc exec inet")
|
unix.PledgePromises("stdio dpath wpath rpath tty proc exec inet tmppath")
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ func minRank() Result {
|
|||||||
return Result{item: &minItem, points: [4]uint16{math.MaxUint16, 0, 0, 0}}
|
return Result{item: &minItem, points: [4]uint16{math.MaxUint16, 0, 0, 0}}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, theme *tui.ColorTheme, colBase tui.ColorPair, colMatch tui.ColorPair, attrNth tui.Attr, nthOverlay tui.Attr, hidden bool) []colorOffset {
|
func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, theme *tui.ColorTheme, colBase tui.ColorPair, colMatch tui.ColorPair, attrNth tui.Attr, hidden bool) []colorOffset {
|
||||||
itemColors := result.item.Colors()
|
itemColors := result.item.Colors()
|
||||||
|
|
||||||
// No ANSI codes
|
// No ANSI codes
|
||||||
@@ -213,10 +213,6 @@ func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, t
|
|||||||
}
|
}
|
||||||
return tui.NewColorPair(fg, bg, ansi.color.attr).WithUl(ansi.color.ul).MergeAttr(base)
|
return tui.NewColorPair(fg, bg, ansi.color.attr).WithUl(ansi.color.ul).MergeAttr(base)
|
||||||
}
|
}
|
||||||
fgAttr := tui.ColNormal.Attr()
|
|
||||||
nthAttrFinal := fgAttr.Merge(attrNth).Merge(nthOverlay)
|
|
||||||
nthBase := colBase.WithNewAttr(nthAttrFinal)
|
|
||||||
|
|
||||||
var colors []colorOffset
|
var colors []colorOffset
|
||||||
add := func(idx int) {
|
add := func(idx int) {
|
||||||
if curr.fbg >= 0 {
|
if curr.fbg >= 0 {
|
||||||
@@ -230,7 +226,7 @@ func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, t
|
|||||||
if curr.match {
|
if curr.match {
|
||||||
var color tui.ColorPair
|
var color tui.ColorPair
|
||||||
if curr.nth {
|
if curr.nth {
|
||||||
color = nthBase.Merge(colMatch)
|
color = colBase.WithAttr(attrNth).Merge(colMatch)
|
||||||
} else {
|
} else {
|
||||||
color = colBase.Merge(colMatch)
|
color = colBase.Merge(colMatch)
|
||||||
}
|
}
|
||||||
@@ -250,7 +246,7 @@ func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, t
|
|||||||
if color.Fg().IsDefault() && origColor.HasBg() {
|
if color.Fg().IsDefault() && origColor.HasBg() {
|
||||||
color = origColor
|
color = origColor
|
||||||
if curr.nth {
|
if curr.nth {
|
||||||
color = color.WithAttr((attrNth &^ tui.AttrRegular).Merge(nthOverlay))
|
color = color.WithAttr(attrNth &^ tui.AttrRegular)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
color = origColor.MergeNonDefault(color)
|
color = origColor.MergeNonDefault(color)
|
||||||
@@ -262,7 +258,7 @@ func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, t
|
|||||||
ansi := itemColors[curr.index]
|
ansi := itemColors[curr.index]
|
||||||
base := colBase
|
base := colBase
|
||||||
if curr.nth {
|
if curr.nth {
|
||||||
base = nthBase
|
base = base.WithAttr(attrNth)
|
||||||
}
|
}
|
||||||
if hidden {
|
if hidden {
|
||||||
base = base.WithFg(theme.Nomatch)
|
base = base.WithFg(theme.Nomatch)
|
||||||
@@ -274,7 +270,7 @@ func (result *Result) colorOffsets(matchOffsets []Offset, nthOffsets []Offset, t
|
|||||||
match: false,
|
match: false,
|
||||||
url: ansi.color.url})
|
url: ansi.color.url})
|
||||||
} else {
|
} else {
|
||||||
color := nthBase
|
color := colBase.WithAttr(attrNth)
|
||||||
if hidden {
|
if hidden {
|
||||||
color = color.WithFg(theme.Nomatch)
|
color = color.WithFg(theme.Nomatch)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -132,7 +132,7 @@ func TestColorOffset(t *testing.T) {
|
|||||||
|
|
||||||
colBase := tui.NewColorPair(89, 189, tui.AttrUndefined)
|
colBase := tui.NewColorPair(89, 189, tui.AttrUndefined)
|
||||||
colMatch := tui.NewColorPair(99, 199, tui.AttrUndefined)
|
colMatch := tui.NewColorPair(99, 199, tui.AttrUndefined)
|
||||||
colors := item.colorOffsets(offsets, nil, tui.Dark256, colBase, colMatch, tui.AttrUndefined, 0, false)
|
colors := item.colorOffsets(offsets, nil, tui.Dark256, colBase, colMatch, tui.AttrUndefined, false)
|
||||||
assert := func(idx int, b int32, e int32, c tui.ColorPair) {
|
assert := func(idx int, b int32, e int32, c tui.ColorPair) {
|
||||||
o := colors[idx]
|
o := colors[idx]
|
||||||
if o.offset[0] != b || o.offset[1] != e || o.color != c {
|
if o.offset[0] != b || o.offset[1] != e || o.color != c {
|
||||||
@@ -159,7 +159,7 @@ func TestColorOffset(t *testing.T) {
|
|||||||
|
|
||||||
nthOffsets := []Offset{{37, 39}, {42, 45}}
|
nthOffsets := []Offset{{37, 39}, {42, 45}}
|
||||||
for _, attr := range []tui.Attr{tui.AttrRegular, tui.StrikeThrough} {
|
for _, attr := range []tui.Attr{tui.AttrRegular, tui.StrikeThrough} {
|
||||||
colors = item.colorOffsets(offsets, nthOffsets, tui.Dark256, colRegular, colUnderline, attr, 0, false)
|
colors = item.colorOffsets(offsets, nthOffsets, tui.Dark256, colRegular, colUnderline, attr, false)
|
||||||
|
|
||||||
// [{[0 5] {1 5 0}} {[5 15] {1 5 8}} {[15 20] {1 5 0}}
|
// [{[0 5] {1 5 0}} {[5 15] {1 5 8}} {[15 20] {1 5 0}}
|
||||||
// {[22 25] {2 6 1}} {[25 27] {2 6 9}} {[27 30] {-1 -1 8}}
|
// {[22 25] {2 6 1}} {[25 27] {2 6 9}} {[27 30] {-1 -1 8}}
|
||||||
@@ -182,37 +182,6 @@ func TestColorOffset(t *testing.T) {
|
|||||||
assert(10, 37, 39, tui.NewColorPair(4, 8, expected))
|
assert(10, 37, 39, tui.NewColorPair(4, 8, expected))
|
||||||
assert(11, 39, 40, tui.NewColorPair(4, 8, tui.Bold))
|
assert(11, 39, 40, tui.NewColorPair(4, 8, tui.Bold))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test nthOverlay: simulates nth:regular with current-fg:underline
|
|
||||||
// The overlay (underline) should survive even though nth:regular clears attrs.
|
|
||||||
// Precedence: fg < nth < current-fg
|
|
||||||
colors = item.colorOffsets(offsets, nthOffsets, tui.Dark256, colRegular, colUnderline, tui.AttrRegular, tui.Underline, false)
|
|
||||||
|
|
||||||
// nth regions should have Underline (from overlay), not cleared by AttrRegular
|
|
||||||
// Non-nth regions keep colBase attrs (AttrUndefined)
|
|
||||||
assert(0, 0, 5, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
|
||||||
assert(1, 5, 15, tui.NewColorPair(1, 5, tui.Underline))
|
|
||||||
assert(2, 15, 20, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
|
||||||
assert(3, 22, 25, tui.NewColorPair(2, 6, tui.Bold))
|
|
||||||
assert(4, 25, 27, tui.NewColorPair(2, 6, tui.Bold|tui.Underline))
|
|
||||||
assert(5, 27, 30, colUnderline)
|
|
||||||
assert(6, 30, 32, tui.NewColorPair(3, 7, tui.Underline))
|
|
||||||
assert(7, 32, 33, colUnderline)
|
|
||||||
assert(8, 33, 35, tui.NewColorPair(4, 8, tui.Bold|tui.Underline))
|
|
||||||
assert(9, 35, 37, tui.NewColorPair(4, 8, tui.Bold))
|
|
||||||
// nth region within ANSI bold: AttrRegular clears, overlay adds Underline back
|
|
||||||
assert(10, 37, 39, tui.NewColorPair(4, 8, tui.Bold|tui.Underline))
|
|
||||||
assert(11, 39, 40, tui.NewColorPair(4, 8, tui.Bold))
|
|
||||||
|
|
||||||
// Test nthOverlay with additive attrs: nth:strikethrough with selected-fg:bold
|
|
||||||
colors = item.colorOffsets(offsets, nthOffsets, tui.Dark256, colRegular, colUnderline, tui.StrikeThrough, tui.Bold, false)
|
|
||||||
|
|
||||||
// Non-nth entries unchanged from overlay=0 case
|
|
||||||
assert(0, 0, 5, tui.NewColorPair(1, 5, tui.AttrUndefined))
|
|
||||||
assert(5, 27, 30, colUnderline) // match only, no nth
|
|
||||||
assert(7, 32, 33, colUnderline) // match only, no nth
|
|
||||||
// nth region within ANSI bold: StrikeThrough|Bold merged with ANSI Bold
|
|
||||||
assert(10, 37, 39, tui.NewColorPair(4, 8, tui.Bold|tui.StrikeThrough))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRadixSortResults(t *testing.T) {
|
func TestRadixSortResults(t *testing.T) {
|
||||||
|
|||||||
131
src/terminal.go
131
src/terminal.go
@@ -340,9 +340,6 @@ type Terminal struct {
|
|||||||
nthAttr tui.Attr
|
nthAttr tui.Attr
|
||||||
nth []Range
|
nth []Range
|
||||||
nthCurrent []Range
|
nthCurrent []Range
|
||||||
withNthDefault string
|
|
||||||
withNthExpr string
|
|
||||||
withNthEnabled bool
|
|
||||||
acceptNth func([]Token, int32) string
|
acceptNth func([]Token, int32) string
|
||||||
tabstop int
|
tabstop int
|
||||||
margin [4]sizeSpec
|
margin [4]sizeSpec
|
||||||
@@ -387,7 +384,6 @@ type Terminal struct {
|
|||||||
hasLoadActions bool
|
hasLoadActions bool
|
||||||
hasResizeActions bool
|
hasResizeActions bool
|
||||||
triggerLoad bool
|
triggerLoad bool
|
||||||
filterSelection bool
|
|
||||||
reading bool
|
reading bool
|
||||||
running *util.AtomicBool
|
running *util.AtomicBool
|
||||||
failed *string
|
failed *string
|
||||||
@@ -555,7 +551,6 @@ const (
|
|||||||
actChangeListLabel
|
actChangeListLabel
|
||||||
actChangeMulti
|
actChangeMulti
|
||||||
actChangeNth
|
actChangeNth
|
||||||
actChangeWithNth
|
|
||||||
actChangePointer
|
actChangePointer
|
||||||
actChangePreview
|
actChangePreview
|
||||||
actChangePreviewLabel
|
actChangePreviewLabel
|
||||||
@@ -641,7 +636,6 @@ const (
|
|||||||
actTransformInputLabel
|
actTransformInputLabel
|
||||||
actTransformListLabel
|
actTransformListLabel
|
||||||
actTransformNth
|
actTransformNth
|
||||||
actTransformWithNth
|
|
||||||
actTransformPointer
|
actTransformPointer
|
||||||
actTransformPreviewLabel
|
actTransformPreviewLabel
|
||||||
actTransformPrompt
|
actTransformPrompt
|
||||||
@@ -661,7 +655,6 @@ const (
|
|||||||
actBgTransformInputLabel
|
actBgTransformInputLabel
|
||||||
actBgTransformListLabel
|
actBgTransformListLabel
|
||||||
actBgTransformNth
|
actBgTransformNth
|
||||||
actBgTransformWithNth
|
|
||||||
actBgTransformPointer
|
actBgTransformPointer
|
||||||
actBgTransformPreviewLabel
|
actBgTransformPreviewLabel
|
||||||
actBgTransformPrompt
|
actBgTransformPrompt
|
||||||
@@ -728,7 +721,6 @@ func processExecution(action actionType) bool {
|
|||||||
actTransformInputLabel,
|
actTransformInputLabel,
|
||||||
actTransformListLabel,
|
actTransformListLabel,
|
||||||
actTransformNth,
|
actTransformNth,
|
||||||
actTransformWithNth,
|
|
||||||
actTransformPointer,
|
actTransformPointer,
|
||||||
actTransformPreviewLabel,
|
actTransformPreviewLabel,
|
||||||
actTransformPrompt,
|
actTransformPrompt,
|
||||||
@@ -745,7 +737,6 @@ func processExecution(action actionType) bool {
|
|||||||
actBgTransformInputLabel,
|
actBgTransformInputLabel,
|
||||||
actBgTransformListLabel,
|
actBgTransformListLabel,
|
||||||
actBgTransformNth,
|
actBgTransformNth,
|
||||||
actBgTransformWithNth,
|
|
||||||
actBgTransformPointer,
|
actBgTransformPointer,
|
||||||
actBgTransformPreviewLabel,
|
actBgTransformPreviewLabel,
|
||||||
actBgTransformPrompt,
|
actBgTransformPrompt,
|
||||||
@@ -775,15 +766,10 @@ type placeholderFlags struct {
|
|||||||
raw bool
|
raw bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type withNthSpec struct {
|
|
||||||
fn func([]Token, int32) string // nil = clear (restore original)
|
|
||||||
}
|
|
||||||
|
|
||||||
type searchRequest struct {
|
type searchRequest struct {
|
||||||
sort bool
|
sort bool
|
||||||
sync bool
|
sync bool
|
||||||
nth *[]Range
|
nth *[]Range
|
||||||
withNth *withNthSpec
|
|
||||||
headerLines *int
|
headerLines *int
|
||||||
command *commandSpec
|
command *commandSpec
|
||||||
environ []string
|
environ []string
|
||||||
@@ -1094,9 +1080,6 @@ func NewTerminal(opts *Options, eventBox *util.EventBox, executor *util.Executor
|
|||||||
nthAttr: opts.Theme.Nth.Attr,
|
nthAttr: opts.Theme.Nth.Attr,
|
||||||
nth: opts.Nth,
|
nth: opts.Nth,
|
||||||
nthCurrent: opts.Nth,
|
nthCurrent: opts.Nth,
|
||||||
withNthDefault: opts.WithNthExpr,
|
|
||||||
withNthExpr: opts.WithNthExpr,
|
|
||||||
withNthEnabled: opts.WithNth != nil,
|
|
||||||
tabstop: opts.Tabstop,
|
tabstop: opts.Tabstop,
|
||||||
raw: opts.Raw,
|
raw: opts.Raw,
|
||||||
hasStartActions: false,
|
hasStartActions: false,
|
||||||
@@ -1368,9 +1351,6 @@ func (t *Terminal) environImpl(forPreview bool) []string {
|
|||||||
if len(t.nthCurrent) > 0 {
|
if len(t.nthCurrent) > 0 {
|
||||||
env = append(env, "FZF_NTH="+RangesToString(t.nthCurrent))
|
env = append(env, "FZF_NTH="+RangesToString(t.nthCurrent))
|
||||||
}
|
}
|
||||||
if len(t.withNthExpr) > 0 {
|
|
||||||
env = append(env, "FZF_WITH_NTH="+t.withNthExpr)
|
|
||||||
}
|
|
||||||
if t.raw {
|
if t.raw {
|
||||||
val := "0"
|
val := "0"
|
||||||
if t.isCurrentItemMatch() {
|
if t.isCurrentItemMatch() {
|
||||||
@@ -1554,7 +1534,7 @@ func (t *Terminal) ansiLabelPrinter(str string, color *tui.ColorPair, fill bool)
|
|||||||
printFn := func(window tui.Window, limit int) {
|
printFn := func(window tui.Window, limit int) {
|
||||||
if offsets == nil {
|
if offsets == nil {
|
||||||
// tui.Col* are not initialized until renderer.Init()
|
// tui.Col* are not initialized until renderer.Init()
|
||||||
offsets = result.colorOffsets(nil, nil, t.theme, *color, *color, t.nthAttr, 0, false)
|
offsets = result.colorOffsets(nil, nil, t.theme, *color, *color, t.nthAttr, false)
|
||||||
}
|
}
|
||||||
for limit > 0 {
|
for limit > 0 {
|
||||||
if length > limit {
|
if length > limit {
|
||||||
@@ -1617,7 +1597,7 @@ func (t *Terminal) parsePrompt(prompt string) (func(), int) {
|
|||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
t.printHighlighted(
|
t.printHighlighted(
|
||||||
Result{item: item}, tui.ColPrompt, tui.ColPrompt, false, false, false, line, line, true, preTask, nil, 0)
|
Result{item: item}, tui.ColPrompt, tui.ColPrompt, false, false, false, line, line, true, preTask, nil)
|
||||||
})
|
})
|
||||||
t.wrap = wrap
|
t.wrap = wrap
|
||||||
}
|
}
|
||||||
@@ -1740,17 +1720,6 @@ func (t *Terminal) Input() (bool, []rune) {
|
|||||||
return paused, copySlice(src)
|
return paused, copySlice(src)
|
||||||
}
|
}
|
||||||
|
|
||||||
// PauseRendering blocks the terminal from reading items.
|
|
||||||
// Must be paired with ResumeRendering.
|
|
||||||
func (t *Terminal) PauseRendering() {
|
|
||||||
t.mutex.Lock()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResumeRendering releases the lock acquired by PauseRendering.
|
|
||||||
func (t *Terminal) ResumeRendering() {
|
|
||||||
t.mutex.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateCount updates the count information
|
// UpdateCount updates the count information
|
||||||
func (t *Terminal) UpdateCount(cnt int, final bool, failedCommand *string) {
|
func (t *Terminal) UpdateCount(cnt int, final bool, failedCommand *string) {
|
||||||
t.mutex.Lock()
|
t.mutex.Lock()
|
||||||
@@ -1873,21 +1842,6 @@ func (t *Terminal) UpdateList(result MatchResult) {
|
|||||||
}
|
}
|
||||||
t.revision = newRevision
|
t.revision = newRevision
|
||||||
t.version++
|
t.version++
|
||||||
|
|
||||||
// Filter out selections that no longer match after with-nth change.
|
|
||||||
// Must be inside the revision check so we don't consume the flag
|
|
||||||
// on a stale EvtSearchFin from a previous search.
|
|
||||||
if t.filterSelection && t.multi > 0 && len(t.selected) > 0 {
|
|
||||||
matchMap := t.resultMerger.ToMap()
|
|
||||||
filtered := make(map[int32]selectedItem)
|
|
||||||
for k, v := range t.selected {
|
|
||||||
if _, matched := matchMap[k]; matched {
|
|
||||||
filtered[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
t.selected = filtered
|
|
||||||
}
|
|
||||||
t.filterSelection = false
|
|
||||||
}
|
}
|
||||||
if t.triggerLoad {
|
if t.triggerLoad {
|
||||||
t.triggerLoad = false
|
t.triggerLoad = false
|
||||||
@@ -3185,7 +3139,7 @@ func (t *Terminal) printFooter() {
|
|||||||
func(markerClass) int {
|
func(markerClass) int {
|
||||||
t.footerWindow.Print(indent)
|
t.footerWindow.Print(indent)
|
||||||
return indentSize
|
return indentSize
|
||||||
}, nil, 0)
|
}, nil)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
t.wrap = wrap
|
t.wrap = wrap
|
||||||
@@ -3269,7 +3223,7 @@ func (t *Terminal) printHeaderImpl(window tui.Window, borderShape tui.BorderShap
|
|||||||
func(markerClass) int {
|
func(markerClass) int {
|
||||||
t.window.Print(indent)
|
t.window.Print(indent)
|
||||||
return indentSize
|
return indentSize
|
||||||
}, nil, 0)
|
}, nil)
|
||||||
}
|
}
|
||||||
t.wrap = wrap
|
t.wrap = wrap
|
||||||
}
|
}
|
||||||
@@ -3507,14 +3461,7 @@ func (t *Terminal) printItem(result Result, line int, maxLine int, index int, cu
|
|||||||
}
|
}
|
||||||
return indentSize
|
return indentSize
|
||||||
}
|
}
|
||||||
colCurrent := tui.ColCurrent
|
finalLineNum = t.printHighlighted(result, tui.ColCurrent, tui.ColCurrentMatch, true, true, !matched, line, maxLine, forceRedraw, preTask, postTask)
|
||||||
nthOverlay := t.theme.NthCurrentAttr
|
|
||||||
if selected {
|
|
||||||
nthOverlay = t.theme.NthSelectedAttr.Merge(t.theme.NthCurrentAttr)
|
|
||||||
baseAttr := tui.ColNormal.Attr().Merge(t.theme.NthSelectedAttr).Merge(t.theme.NthCurrentAttr)
|
|
||||||
colCurrent = colCurrent.WithNewAttr(baseAttr)
|
|
||||||
}
|
|
||||||
finalLineNum = t.printHighlighted(result, colCurrent, tui.ColCurrentMatch, true, true, !matched, line, maxLine, forceRedraw, preTask, postTask, nthOverlay)
|
|
||||||
} else {
|
} else {
|
||||||
preTask := func(marker markerClass) int {
|
preTask := func(marker markerClass) int {
|
||||||
w := t.window.Width() - t.pointerLen
|
w := t.window.Width() - t.pointerLen
|
||||||
@@ -3548,11 +3495,7 @@ func (t *Terminal) printItem(result Result, line int, maxLine int, index int, cu
|
|||||||
base = base.WithBg(altBg)
|
base = base.WithBg(altBg)
|
||||||
match = match.WithBg(altBg)
|
match = match.WithBg(altBg)
|
||||||
}
|
}
|
||||||
var nthOverlay tui.Attr
|
finalLineNum = t.printHighlighted(result, base, match, false, true, !matched, line, maxLine, forceRedraw, preTask, postTask)
|
||||||
if selected {
|
|
||||||
nthOverlay = t.theme.NthSelectedAttr
|
|
||||||
}
|
|
||||||
finalLineNum = t.printHighlighted(result, base, match, false, true, !matched, line, maxLine, forceRedraw, preTask, postTask, nthOverlay)
|
|
||||||
}
|
}
|
||||||
for i := 0; i < t.gap && finalLineNum < maxLine; i++ {
|
for i := 0; i < t.gap && finalLineNum < maxLine; i++ {
|
||||||
finalLineNum++
|
finalLineNum++
|
||||||
@@ -3653,7 +3596,7 @@ func (t *Terminal) overflow(runes []rune, max int) bool {
|
|||||||
return t.displayWidthWithLimit(runes, 0, max) > max
|
return t.displayWidthWithLimit(runes, 0, max) > max
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMatch tui.ColorPair, current bool, match bool, hidden bool, lineNum int, maxLineNum int, forceRedraw bool, preTask func(markerClass) int, postTask func(int, int, bool, bool, tui.ColorPair), nthOverlay tui.Attr) int {
|
func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMatch tui.ColorPair, current bool, match bool, hidden bool, lineNum int, maxLineNum int, forceRedraw bool, preTask func(markerClass) int, postTask func(int, int, bool, bool, tui.ColorPair)) int {
|
||||||
var displayWidth int
|
var displayWidth int
|
||||||
item := result.item
|
item := result.item
|
||||||
matchOffsets := []Offset{}
|
matchOffsets := []Offset{}
|
||||||
@@ -3694,9 +3637,7 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
// But if 'nth' is set to 'regular', it's a sign that you're applying
|
// But if 'nth' is set to 'regular', it's a sign that you're applying
|
||||||
// a different style to the rest of the string. e.g. 'nth:regular,fg:dim'
|
// a different style to the rest of the string. e.g. 'nth:regular,fg:dim'
|
||||||
// In this case, we still need to apply it to clear the style.
|
// In this case, we still need to apply it to clear the style.
|
||||||
fgAttr := tui.ColNormal.Attr()
|
colBase = colBase.WithAttr(t.nthAttr)
|
||||||
nthAttrFinal := fgAttr.Merge(t.nthAttr).Merge(nthOverlay)
|
|
||||||
colBase = colBase.WithNewAttr(nthAttrFinal)
|
|
||||||
}
|
}
|
||||||
if !wholeCovered && t.nthAttr > 0 {
|
if !wholeCovered && t.nthAttr > 0 {
|
||||||
var tokens []Token
|
var tokens []Token
|
||||||
@@ -3715,7 +3656,7 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
sort.Sort(ByOrder(nthOffsets))
|
sort.Sort(ByOrder(nthOffsets))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
allOffsets := result.colorOffsets(charOffsets, nthOffsets, t.theme, colBase, colMatch, t.nthAttr, nthOverlay, hidden)
|
allOffsets := result.colorOffsets(charOffsets, nthOffsets, t.theme, colBase, colMatch, t.nthAttr, hidden)
|
||||||
|
|
||||||
// Determine split offset for horizontal scrolling with freeze
|
// Determine split offset for horizontal scrolling with freeze
|
||||||
splitOffset1 := -1
|
splitOffset1 := -1
|
||||||
@@ -3905,7 +3846,6 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
frozenRight = line[splitOffsetRight:]
|
frozenRight = line[splitOffsetRight:]
|
||||||
}
|
}
|
||||||
displayWidthSum := 0
|
displayWidthSum := 0
|
||||||
displayWidthLeft := 0
|
|
||||||
todo := [3]func(){}
|
todo := [3]func(){}
|
||||||
for fidx, runes := range [][]rune{frozenLeft, frozenRight, middle} {
|
for fidx, runes := range [][]rune{frozenLeft, frozenRight, middle} {
|
||||||
if len(runes) == 0 {
|
if len(runes) == 0 {
|
||||||
@@ -3931,11 +3871,7 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
// For frozen parts, reserve space for the ellipsis in the middle part
|
// For frozen parts, reserve space for the ellipsis in the middle part
|
||||||
adjustedMaxWidth -= ellipsisWidth
|
adjustedMaxWidth -= ellipsisWidth
|
||||||
}
|
}
|
||||||
var prefixWidth int
|
displayWidth = t.displayWidthWithLimit(runes, 0, adjustedMaxWidth)
|
||||||
if fidx == 2 {
|
|
||||||
prefixWidth = displayWidthLeft
|
|
||||||
}
|
|
||||||
displayWidth = t.displayWidthWithLimit(runes, prefixWidth, adjustedMaxWidth)
|
|
||||||
if !t.wrap && displayWidth > adjustedMaxWidth {
|
if !t.wrap && displayWidth > adjustedMaxWidth {
|
||||||
maxe = util.Constrain(maxe+min(maxWidth/2-ellipsisWidth, t.hscrollOff), 0, len(runes))
|
maxe = util.Constrain(maxe+min(maxWidth/2-ellipsisWidth, t.hscrollOff), 0, len(runes))
|
||||||
transformOffsets := func(diff int32) {
|
transformOffsets := func(diff int32) {
|
||||||
@@ -3973,9 +3909,6 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
displayWidth = t.displayWidthWithLimit(runes, 0, maxWidth)
|
displayWidth = t.displayWidthWithLimit(runes, 0, maxWidth)
|
||||||
}
|
}
|
||||||
displayWidthSum += displayWidth
|
displayWidthSum += displayWidth
|
||||||
if fidx == 0 {
|
|
||||||
displayWidthLeft = displayWidth
|
|
||||||
}
|
|
||||||
|
|
||||||
if maxWidth > 0 {
|
if maxWidth > 0 {
|
||||||
color := colBase
|
color := colBase
|
||||||
@@ -3983,7 +3916,7 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
color = color.WithFg(t.theme.Nomatch)
|
color = color.WithFg(t.theme.Nomatch)
|
||||||
}
|
}
|
||||||
todo[fidx] = func() {
|
todo[fidx] = func() {
|
||||||
t.printColoredString(t.window, runes, offs, color, prefixWidth)
|
t.printColoredString(t.window, runes, offs, color)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
break
|
break
|
||||||
@@ -4010,13 +3943,10 @@ func (t *Terminal) printHighlighted(result Result, colBase tui.ColorPair, colMat
|
|||||||
return finalLineNum
|
return finalLineNum
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *Terminal) printColoredString(window tui.Window, text []rune, offsets []colorOffset, colBase tui.ColorPair, initialPrefixWidth ...int) {
|
func (t *Terminal) printColoredString(window tui.Window, text []rune, offsets []colorOffset, colBase tui.ColorPair) {
|
||||||
var index int32
|
var index int32
|
||||||
var substr string
|
var substr string
|
||||||
var prefixWidth int
|
var prefixWidth int
|
||||||
if len(initialPrefixWidth) > 0 {
|
|
||||||
prefixWidth = initialPrefixWidth[0]
|
|
||||||
}
|
|
||||||
maxOffset := int32(len(text))
|
maxOffset := int32(len(text))
|
||||||
var url *url
|
var url *url
|
||||||
for _, offset := range offsets {
|
for _, offset := range offsets {
|
||||||
@@ -4223,7 +4153,7 @@ func (t *Terminal) followOffset() int {
|
|||||||
for i := len(body) - 1; i >= 0; i-- {
|
for i := len(body) - 1; i >= 0; i-- {
|
||||||
h := t.previewLineHeight(body[i], maxWidth)
|
h := t.previewLineHeight(body[i], maxWidth)
|
||||||
if visualLines+h > height {
|
if visualLines+h > height {
|
||||||
return min(len(lines)-1, headerLines+i+1)
|
return headerLines + i + 1
|
||||||
}
|
}
|
||||||
visualLines += h
|
visualLines += h
|
||||||
}
|
}
|
||||||
@@ -4521,7 +4451,7 @@ Loop:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
t.previewer.scrollable = t.previewer.scrollable || t.pwindow.Y() == height-1 && t.pwindow.X() == t.pwindow.Width() || t.previewed.filled
|
t.previewer.scrollable = t.previewer.scrollable || t.pwindow.Y() == height-1 && t.pwindow.X() == t.pwindow.Width()
|
||||||
if fillRet == tui.FillNextLine {
|
if fillRet == tui.FillNextLine {
|
||||||
continue
|
continue
|
||||||
} else if fillRet == tui.FillSuspend {
|
} else if fillRet == tui.FillSuspend {
|
||||||
@@ -4544,7 +4474,7 @@ Loop:
|
|||||||
}
|
}
|
||||||
lineNo++
|
lineNo++
|
||||||
}
|
}
|
||||||
t.previewer.scrollable = t.previewer.scrollable || t.previewed.filled || index < len(lines)-1
|
t.previewer.scrollable = t.previewer.scrollable || index < len(lines)-1
|
||||||
t.previewed.image = image
|
t.previewed.image = image
|
||||||
t.previewed.wireframe = wireframe
|
t.previewed.wireframe = wireframe
|
||||||
}
|
}
|
||||||
@@ -5992,7 +5922,6 @@ func (t *Terminal) Loop() error {
|
|||||||
events := []util.EventType{}
|
events := []util.EventType{}
|
||||||
changed := false
|
changed := false
|
||||||
var newNth *[]Range
|
var newNth *[]Range
|
||||||
var newWithNth *withNthSpec
|
|
||||||
var newHeaderLines *int
|
var newHeaderLines *int
|
||||||
req := func(evts ...util.EventType) {
|
req := func(evts ...util.EventType) {
|
||||||
for _, event := range evts {
|
for _, event := range evts {
|
||||||
@@ -6010,7 +5939,6 @@ func (t *Terminal) Loop() error {
|
|||||||
events = []util.EventType{}
|
events = []util.EventType{}
|
||||||
changed = false
|
changed = false
|
||||||
newNth = nil
|
newNth = nil
|
||||||
newWithNth = nil
|
|
||||||
newHeaderLines = nil
|
newHeaderLines = nil
|
||||||
beof := false
|
beof := false
|
||||||
queryChanged := false
|
queryChanged := false
|
||||||
@@ -6402,33 +6330,6 @@ func (t *Terminal) Loop() error {
|
|||||||
t.forceRerenderList()
|
t.forceRerenderList()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
case actChangeWithNth, actTransformWithNth, actBgTransformWithNth:
|
|
||||||
if !t.withNthEnabled {
|
|
||||||
break Action
|
|
||||||
}
|
|
||||||
capture(true, func(expr string) {
|
|
||||||
tokens := strings.Split(expr, "|")
|
|
||||||
withNthExpr := tokens[0]
|
|
||||||
if len(tokens) > 1 {
|
|
||||||
a.a = strings.Join(append(tokens[1:], tokens[0]), "|")
|
|
||||||
}
|
|
||||||
// Empty value restores the default --with-nth
|
|
||||||
if len(withNthExpr) == 0 {
|
|
||||||
withNthExpr = t.withNthDefault
|
|
||||||
}
|
|
||||||
if withNthExpr != t.withNthExpr {
|
|
||||||
if factory, err := nthTransformer(withNthExpr); err == nil {
|
|
||||||
newWithNth = &withNthSpec{fn: factory(t.delimiter)}
|
|
||||||
} else {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
t.withNthExpr = withNthExpr
|
|
||||||
t.filterSelection = true
|
|
||||||
changed = true
|
|
||||||
t.clearNumLinesCache()
|
|
||||||
t.forceRerenderList()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
case actChangeQuery:
|
case actChangeQuery:
|
||||||
t.input = []rune(a.a)
|
t.input = []rune(a.a)
|
||||||
t.cx = len(t.input)
|
t.cx = len(t.input)
|
||||||
@@ -7576,7 +7477,7 @@ func (t *Terminal) Loop() error {
|
|||||||
reload := changed || newCommand != nil
|
reload := changed || newCommand != nil
|
||||||
var reloadRequest *searchRequest
|
var reloadRequest *searchRequest
|
||||||
if reload {
|
if reload {
|
||||||
reloadRequest = &searchRequest{sort: t.sort, sync: reloadSync, nth: newNth, withNth: newWithNth, headerLines: newHeaderLines, command: newCommand, environ: t.environ(), changed: changed, denylist: denylist, revision: t.resultMerger.Revision()}
|
reloadRequest = &searchRequest{sort: t.sort, sync: reloadSync, nth: newNth, headerLines: newHeaderLines, command: newCommand, environ: t.environ(), changed: changed, denylist: denylist, revision: t.resultMerger.Revision()}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Dispatch queued background requests
|
// Dispatch queued background requests
|
||||||
|
|||||||
@@ -161,7 +161,7 @@ func awkTokenizer(input string) ([]string, int) {
|
|||||||
end := 0
|
end := 0
|
||||||
for idx := 0; idx < len(input); idx++ {
|
for idx := 0; idx < len(input); idx++ {
|
||||||
r := input[idx]
|
r := input[idx]
|
||||||
white := r == 9 || r == 32 || r == 10
|
white := r == 9 || r == 32
|
||||||
switch state {
|
switch state {
|
||||||
case awkNil:
|
case awkNil:
|
||||||
if white {
|
if white {
|
||||||
@@ -218,12 +218,11 @@ func Tokenize(text string, delimiter Delimiter) []Token {
|
|||||||
return withPrefixLengths(tokens, 0)
|
return withPrefixLengths(tokens, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
// StripLastDelimiter removes the trailing delimiter
|
// StripLastDelimiter removes the trailing delimiter and whitespaces
|
||||||
func StripLastDelimiter(str string, delimiter Delimiter) string {
|
func StripLastDelimiter(str string, delimiter Delimiter) string {
|
||||||
if delimiter.str != nil {
|
if delimiter.str != nil {
|
||||||
return strings.TrimSuffix(str, *delimiter.str)
|
str = strings.TrimSuffix(str, *delimiter.str)
|
||||||
}
|
} else if delimiter.regex != nil {
|
||||||
if delimiter.regex != nil {
|
|
||||||
locs := delimiter.regex.FindAllStringIndex(str, -1)
|
locs := delimiter.regex.FindAllStringIndex(str, -1)
|
||||||
if len(locs) > 0 {
|
if len(locs) > 0 {
|
||||||
lastLoc := locs[len(locs)-1]
|
lastLoc := locs[len(locs)-1]
|
||||||
@@ -231,7 +230,6 @@ func StripLastDelimiter(str string, delimiter Delimiter) string {
|
|||||||
str = str[:lastLoc[0]]
|
str = str[:lastLoc[0]]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return str
|
|
||||||
}
|
}
|
||||||
return strings.TrimRightFunc(str, unicode.IsSpace)
|
return strings.TrimRightFunc(str, unicode.IsSpace)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,9 +56,9 @@ func TestParseRange(t *testing.T) {
|
|||||||
|
|
||||||
func TestTokenize(t *testing.T) {
|
func TestTokenize(t *testing.T) {
|
||||||
// AWK-style
|
// AWK-style
|
||||||
input := " abc: \n\t def: ghi "
|
input := " abc: def: ghi "
|
||||||
tokens := Tokenize(input, Delimiter{})
|
tokens := Tokenize(input, Delimiter{})
|
||||||
if tokens[0].text.ToString() != "abc: \n\t " || tokens[0].prefixLength != 2 {
|
if tokens[0].text.ToString() != "abc: " || tokens[0].prefixLength != 2 {
|
||||||
t.Errorf("%s", tokens)
|
t.Errorf("%s", tokens)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -71,9 +71,9 @@ func TestTokenize(t *testing.T) {
|
|||||||
// With delimiter regex
|
// With delimiter regex
|
||||||
tokens = Tokenize(input, delimiterRegexp("\\s+"))
|
tokens = Tokenize(input, delimiterRegexp("\\s+"))
|
||||||
if tokens[0].text.ToString() != " " || tokens[0].prefixLength != 0 ||
|
if tokens[0].text.ToString() != " " || tokens[0].prefixLength != 0 ||
|
||||||
tokens[1].text.ToString() != "abc: \n\t " || tokens[1].prefixLength != 2 ||
|
tokens[1].text.ToString() != "abc: " || tokens[1].prefixLength != 2 ||
|
||||||
tokens[2].text.ToString() != "def: " || tokens[2].prefixLength != 10 ||
|
tokens[2].text.ToString() != "def: " || tokens[2].prefixLength != 8 ||
|
||||||
tokens[3].text.ToString() != "ghi " || tokens[3].prefixLength != 16 {
|
tokens[3].text.ToString() != "ghi " || tokens[3].prefixLength != 14 {
|
||||||
t.Errorf("%s", tokens)
|
t.Errorf("%s", tokens)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -447,12 +447,6 @@ func (p ColorPair) WithAttr(attr Attr) ColorPair {
|
|||||||
return dup
|
return dup
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p ColorPair) WithNewAttr(attr Attr) ColorPair {
|
|
||||||
dup := p
|
|
||||||
dup.attr = attr
|
|
||||||
return dup
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p ColorPair) WithFg(fg ColorAttr) ColorPair {
|
func (p ColorPair) WithFg(fg ColorAttr) ColorPair {
|
||||||
dup := p
|
dup := p
|
||||||
fgPair := ColorPair{fg.Color, colUndefined, colUndefined, fg.Attr}
|
fgPair := ColorPair{fg.Color, colUndefined, colUndefined, fg.Attr}
|
||||||
@@ -526,8 +520,6 @@ type ColorTheme struct {
|
|||||||
ListLabel ColorAttr
|
ListLabel ColorAttr
|
||||||
ListBorder ColorAttr
|
ListBorder ColorAttr
|
||||||
GapLine ColorAttr
|
GapLine ColorAttr
|
||||||
NthCurrentAttr Attr // raw current-fg attr (before fg merge) for nth overlay
|
|
||||||
NthSelectedAttr Attr // raw selected-fg attr (before ListFg inherit) for nth overlay
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Event struct {
|
type Event struct {
|
||||||
@@ -1207,19 +1199,13 @@ func InitTheme(theme *ColorTheme, baseTheme *ColorTheme, boldify bool, forceBlac
|
|||||||
match.Attr = Underline
|
match.Attr = Underline
|
||||||
}
|
}
|
||||||
theme.Match = o(baseTheme.Match, match)
|
theme.Match = o(baseTheme.Match, match)
|
||||||
// These colors are not defined in the base themes.
|
// Inherit from 'fg', so that we don't have to write 'current-fg:dim'
|
||||||
// Resolve ListFg/ListBg early so Current and Selected can inherit from them.
|
|
||||||
theme.ListFg = o(theme.Fg, theme.ListFg)
|
|
||||||
theme.ListBg = o(theme.Bg, theme.ListBg)
|
|
||||||
// Inherit from 'list-fg', so that we don't have to write 'current-fg:dim'
|
|
||||||
// e.g. fzf --delimiter / --nth -1 --color fg:dim,nth:regular
|
// e.g. fzf --delimiter / --nth -1 --color fg:dim,nth:regular
|
||||||
current := theme.Current
|
current := theme.Current
|
||||||
if !baseTheme.Colored && current.IsUndefined() {
|
if !baseTheme.Colored && current.IsUndefined() {
|
||||||
current.Attr |= Reverse
|
current.Attr |= Reverse
|
||||||
}
|
}
|
||||||
resolvedCurrent := o(baseTheme.Current, current)
|
theme.Current = theme.Fg.Merge(o(baseTheme.Current, current))
|
||||||
theme.NthCurrentAttr = resolvedCurrent.Attr
|
|
||||||
theme.Current = theme.ListFg.Merge(resolvedCurrent)
|
|
||||||
currentMatch := theme.CurrentMatch
|
currentMatch := theme.CurrentMatch
|
||||||
if !baseTheme.Colored && currentMatch.IsUndefined() {
|
if !baseTheme.Colored && currentMatch.IsUndefined() {
|
||||||
currentMatch.Attr |= Reverse | Underline
|
currentMatch.Attr |= Reverse | Underline
|
||||||
@@ -1244,8 +1230,10 @@ func InitTheme(theme *ColorTheme, baseTheme *ColorTheme, boldify bool, forceBlac
|
|||||||
scrollbarDefined := theme.Scrollbar != undefined
|
scrollbarDefined := theme.Scrollbar != undefined
|
||||||
previewBorderDefined := theme.PreviewBorder != undefined
|
previewBorderDefined := theme.PreviewBorder != undefined
|
||||||
|
|
||||||
theme.NthSelectedAttr = theme.SelectedFg.Attr
|
// These colors are not defined in the base themes
|
||||||
theme.SelectedFg = theme.ListFg.Merge(theme.SelectedFg)
|
theme.ListFg = o(theme.Fg, theme.ListFg)
|
||||||
|
theme.ListBg = o(theme.Bg, theme.ListBg)
|
||||||
|
theme.SelectedFg = o(theme.ListFg, theme.SelectedFg)
|
||||||
theme.SelectedBg = o(theme.ListBg, theme.SelectedBg)
|
theme.SelectedBg = o(theme.ListBg, theme.SelectedBg)
|
||||||
theme.SelectedMatch = o(theme.Match, theme.SelectedMatch)
|
theme.SelectedMatch = o(theme.Match, theme.SelectedMatch)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"syscall"
|
"syscall"
|
||||||
|
|
||||||
"github.com/junegunn/go-shellwords"
|
|
||||||
"golang.org/x/sys/unix"
|
"golang.org/x/sys/unix"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -21,8 +20,8 @@ type Executor struct {
|
|||||||
|
|
||||||
func NewExecutor(withShell string) *Executor {
|
func NewExecutor(withShell string) *Executor {
|
||||||
shell := os.Getenv("SHELL")
|
shell := os.Getenv("SHELL")
|
||||||
args, err := shellwords.Parse(withShell)
|
args := strings.Fields(withShell)
|
||||||
if err == nil && len(args) > 0 {
|
if len(args) > 0 {
|
||||||
shell = args[0]
|
shell = args[0]
|
||||||
args = args[1:]
|
args = args[1:]
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -1190,16 +1190,6 @@ class TestCore < TestInteractive
|
|||||||
tmux.until { |lines| assert lines.any_include?('9999␊10000') }
|
tmux.until { |lines| assert lines.any_include?('9999␊10000') }
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_freeze_left_tabstop
|
|
||||||
writelines(%W[1\t2\t3])
|
|
||||||
# With --freeze-left 1 and --tabstop=2:
|
|
||||||
# Frozen left: "1" (width 1)
|
|
||||||
# Middle starts with "\t" at prefix width 1, tabstop 2 → 1 space
|
|
||||||
# Then "2" at column 2, next "\t" at column 3 → 1 space, then "3"
|
|
||||||
tmux.send_keys %(cat #{tempname} | #{FZF} --tabstop=2 --freeze-left 1), :Enter
|
|
||||||
tmux.until { |lines| assert_equal '> 1 2 3', lines[-3] }
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_freeze_left_keep_right
|
def test_freeze_left_keep_right
|
||||||
tmux.send_keys %(seq 10000 | #{FZF} --read0 --delimiter "\n" --freeze-left 3 --keep-right --ellipsis XX --no-multi-line --bind space:toggle-multi-line), :Enter
|
tmux.send_keys %(seq 10000 | #{FZF} --read0 --delimiter "\n" --freeze-left 3 --keep-right --ellipsis XX --no-multi-line --bind space:toggle-multi-line), :Enter
|
||||||
tmux.until { |lines| assert_match(/^> 1␊2␊3XX.*10000␊$/, lines[-3]) }
|
tmux.until { |lines| assert_match(/^> 1␊2␊3XX.*10000␊$/, lines[-3]) }
|
||||||
@@ -1755,191 +1745,6 @@ class TestCore < TestInteractive
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_change_with_nth
|
|
||||||
input = [
|
|
||||||
'foo bar baz',
|
|
||||||
'aaa bbb ccc',
|
|
||||||
'xxx yyy zzz'
|
|
||||||
]
|
|
||||||
writelines(input)
|
|
||||||
# Start with field 1 only, cycle through fields, verify $FZF_WITH_NTH via prompt
|
|
||||||
tmux.send_keys %(#{FZF} --with-nth 1 --bind 'space:change-with-nth(2|3|1),result:transform-prompt:echo "[$FZF_WITH_NTH]> "' < #{tempname}), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 3, lines.item_count
|
|
||||||
assert lines.any_include?('[1]>')
|
|
||||||
assert lines.any_include?('foo')
|
|
||||||
refute lines.any_include?('bar')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('[2]>')
|
|
||||||
assert lines.any_include?('bar')
|
|
||||||
refute lines.any_include?('foo')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('[3]>')
|
|
||||||
assert lines.any_include?('baz')
|
|
||||||
refute lines.any_include?('bar')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('[1]>')
|
|
||||||
assert lines.any_include?('foo')
|
|
||||||
refute lines.any_include?('bar')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_change_with_nth_default
|
|
||||||
# Empty value restores the default --with-nth
|
|
||||||
tmux.send_keys %(echo -e 'a b c\nd e f' | #{FZF} --with-nth 1 --bind 'space:change-with-nth(2|)'), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 2, lines.item_count
|
|
||||||
assert lines.any_include?('a')
|
|
||||||
refute lines.any_include?('b')
|
|
||||||
end
|
|
||||||
# Switch to field 2
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('b')
|
|
||||||
refute lines.any_include?('a')
|
|
||||||
end
|
|
||||||
# Empty restores default (field 1)
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('a')
|
|
||||||
refute lines.any_include?('b')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_transform_with_nth_search
|
|
||||||
input = [
|
|
||||||
'alpha bravo charlie',
|
|
||||||
'delta echo foxtrot',
|
|
||||||
'golf hotel india'
|
|
||||||
]
|
|
||||||
writelines(input)
|
|
||||||
tmux.send_keys %(#{FZF} --with-nth 1 --bind 'space:transform-with-nth(echo 2)' -q '^bravo$' < #{tempname}), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 0, lines.match_count
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 1, lines.match_count
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_bg_transform_with_nth_output
|
|
||||||
tmux.send_keys %(echo -e 'a b c\nd e f' | #{FZF} --with-nth 2 --bind 'space:bg-transform-with-nth(echo 3)'), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 2, lines.item_count
|
|
||||||
assert lines.any_include?('b')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('c')
|
|
||||||
refute lines.any_include?('b')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Enter
|
|
||||||
tmux.until { |lines| assert lines.any_include?('a b c') || lines.any_include?('d e f') }
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_change_with_nth_search
|
|
||||||
input = [
|
|
||||||
'alpha bravo charlie',
|
|
||||||
'delta echo foxtrot',
|
|
||||||
'golf hotel india'
|
|
||||||
]
|
|
||||||
writelines(input)
|
|
||||||
tmux.send_keys %(#{FZF} --with-nth 1 --bind 'space:change-with-nth(2)' -q '^bravo$' < #{tempname}), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 0, lines.match_count
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 1, lines.match_count
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_change_with_nth_output
|
|
||||||
tmux.send_keys %(echo -e 'a b c\nd e f' | #{FZF} --with-nth 2 --bind 'space:change-with-nth(3)'), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 2, lines.item_count
|
|
||||||
assert lines.any_include?('b')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('c')
|
|
||||||
refute lines.any_include?('b')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Enter
|
|
||||||
tmux.until { |lines| assert lines.any_include?('a b c') || lines.any_include?('d e f') }
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_change_with_nth_selection
|
|
||||||
# Items: field1 has unique values, field2 has 'match' or 'miss'
|
|
||||||
input = [
|
|
||||||
'one match x',
|
|
||||||
'two miss y',
|
|
||||||
'three match z'
|
|
||||||
]
|
|
||||||
writelines(input)
|
|
||||||
# Start showing field 2 (match/miss), query 'match', select all matches, then switch to field 3
|
|
||||||
tmux.send_keys %(#{FZF} --with-nth 2 --multi --bind 'ctrl-a:select-all,space:change-with-nth(3)' -q match < #{tempname}), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 2, lines.match_count
|
|
||||||
end
|
|
||||||
# Select all matching items
|
|
||||||
tmux.send_keys 'C-a'
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('(2)')
|
|
||||||
end
|
|
||||||
# Now change with-nth to field 3; 'x' and 'z' don't contain 'match'
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 0, lines.match_count
|
|
||||||
# Selections of non-matching items should be cleared
|
|
||||||
assert lines.any_include?('(0)')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_change_with_nth_multiline
|
|
||||||
# Each item has 3 lines: "N-a\nN-b\nN-c"
|
|
||||||
# --with-nth 1 shows 1 line per item, --with-nth 1..3 shows 3 lines per item
|
|
||||||
tmux.send_keys %(seq 20 | xargs -I{} printf '{}-a\\n{}-b\\n{}-c\\0' | #{FZF} --read0 --delimiter "\n" --with-nth 1 --bind 'space:change-with-nth(1..3|1)' --no-sort), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 20, lines.item_count
|
|
||||||
assert lines.any_include?('1-a')
|
|
||||||
refute lines.any_include?('1-b')
|
|
||||||
end
|
|
||||||
# Expand to 3 lines per item
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('1-a')
|
|
||||||
assert lines.any_include?('1-b')
|
|
||||||
assert lines.any_include?('1-c')
|
|
||||||
end
|
|
||||||
# Scroll down a few items
|
|
||||||
5.times { tmux.send_keys :Down }
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('6-a')
|
|
||||||
assert lines.any_include?('6-b')
|
|
||||||
assert lines.any_include?('6-c')
|
|
||||||
end
|
|
||||||
# Collapse back to 1 line per item
|
|
||||||
tmux.send_keys :Space
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('6-a')
|
|
||||||
refute lines.any_include?('6-b')
|
|
||||||
end
|
|
||||||
# Scroll down more after collapse
|
|
||||||
5.times { tmux.send_keys :Down }
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert lines.any_include?('11-a')
|
|
||||||
refute lines.any_include?('11-b')
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_env_vars
|
def test_env_vars
|
||||||
def env_vars
|
def env_vars
|
||||||
return {} unless File.exist?(tempname)
|
return {} unless File.exist?(tempname)
|
||||||
@@ -2095,13 +1900,13 @@ class TestCore < TestInteractive
|
|||||||
tmux.send_keys %(echo "foo ,bar,baz" | #{FZF} -d, --accept-nth 2,2,1,3,1 --sync --bind start:accept > #{tempname}), :Enter
|
tmux.send_keys %(echo "foo ,bar,baz" | #{FZF} -d, --accept-nth 2,2,1,3,1 --sync --bind start:accept > #{tempname}), :Enter
|
||||||
wait do
|
wait do
|
||||||
assert_path_exists tempname
|
assert_path_exists tempname
|
||||||
# Last delimiter is removed
|
# Last delimiter and the whitespaces are removed
|
||||||
assert_equal ['bar,bar,foo ,bazfoo'], File.readlines(tempname, chomp: true)
|
assert_equal ['bar,bar,foo ,bazfoo'], File.readlines(tempname, chomp: true)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_accept_nth_regex_delimiter
|
def test_accept_nth_regex_delimiter
|
||||||
tmux.send_keys %(echo "foo :,:bar,baz" | #{FZF} --delimiter=' *[:,]+ *' --accept-nth 2,2,1,3,1 --sync --bind start:accept > #{tempname}), :Enter
|
tmux.send_keys %(echo "foo :,:bar,baz" | #{FZF} --delimiter='[:,]+' --accept-nth 2,2,1,3,1 --sync --bind start:accept > #{tempname}), :Enter
|
||||||
wait do
|
wait do
|
||||||
assert_path_exists tempname
|
assert_path_exists tempname
|
||||||
# Last delimiter and the whitespaces are removed
|
# Last delimiter and the whitespaces are removed
|
||||||
@@ -2119,7 +1924,7 @@ class TestCore < TestInteractive
|
|||||||
end
|
end
|
||||||
|
|
||||||
def test_accept_nth_template
|
def test_accept_nth_template
|
||||||
tmux.send_keys %(echo "foo ,bar,baz" | #{FZF} -d " *, *" --accept-nth '[{n}] 1st: {1}, 3rd: {3}, 2nd: {2}' --sync --bind start:accept > #{tempname}), :Enter
|
tmux.send_keys %(echo "foo ,bar,baz" | #{FZF} -d, --accept-nth '[{n}] 1st: {1}, 3rd: {3}, 2nd: {2}' --sync --bind start:accept > #{tempname}), :Enter
|
||||||
wait do
|
wait do
|
||||||
assert_path_exists tempname
|
assert_path_exists tempname
|
||||||
# Last delimiter and the whitespaces are removed
|
# Last delimiter and the whitespaces are removed
|
||||||
|
|||||||
@@ -393,20 +393,6 @@ class TestPreview < TestInteractive
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def test_preview_follow_wrap_long_line
|
|
||||||
tmux.send_keys %(seq 1 | #{FZF} --preview "seq 2; yes yes | head -10000 | tr '\n' ' '" --preview-window follow,wrap --bind up:preview-up,down:preview-down), :Enter
|
|
||||||
tmux.until do |lines|
|
|
||||||
assert_equal 1, lines.match_count
|
|
||||||
assert lines.any_include?('3/3 │')
|
|
||||||
end
|
|
||||||
tmux.send_keys :Up
|
|
||||||
tmux.until { |lines| assert lines.any_include?('2/3 │') }
|
|
||||||
tmux.send_keys :Up
|
|
||||||
tmux.until { |lines| assert lines.any_include?('1/3 │') }
|
|
||||||
tmux.send_keys :Down
|
|
||||||
tmux.until { |lines| assert lines.any_include?('2/3 │') }
|
|
||||||
end
|
|
||||||
|
|
||||||
def test_close
|
def test_close
|
||||||
tmux.send_keys "seq 100 | #{FZF} --preview 'echo foo' --bind ctrl-c:close", :Enter
|
tmux.send_keys "seq 100 | #{FZF} --preview 'echo foo' --bind ctrl-c:close", :Enter
|
||||||
tmux.until { |lines| assert_equal 100, lines.match_count }
|
tmux.until { |lines| assert_equal 100, lines.match_count }
|
||||||
@@ -607,7 +593,7 @@ class TestPreview < TestInteractive
|
|||||||
end
|
end
|
||||||
|
|
||||||
def test_preview_wrap_sign_between_ansi_fragments_overflow
|
def test_preview_wrap_sign_between_ansi_fragments_overflow
|
||||||
tmux.send_keys %(seq 1 | #{FZF} --preview 'echo -e "\\x1b[33m123 \\x1b[mhi"; echo -e "\\x1b[33m123 \\x1b[mhi"' --preview-window 2,wrap-word,noinfo), :Enter
|
tmux.send_keys %(seq 1 | #{FZF} --preview 'echo -e "\\x1b[33m1234567890 \\x1b[mhello"; echo -e "\\x1b[33m1234567890 \\x1b[mhello"' --preview-window 2,wrap-word), :Enter
|
||||||
tmux.until do |lines|
|
tmux.until do |lines|
|
||||||
assert_equal 1, lines.match_count
|
assert_equal 1, lines.match_count
|
||||||
assert_equal(2, lines.count { |line| line.include?('│ 12 │') })
|
assert_equal(2, lines.count { |line| line.include?('│ 12 │') })
|
||||||
@@ -616,7 +602,7 @@ class TestPreview < TestInteractive
|
|||||||
end
|
end
|
||||||
|
|
||||||
def test_preview_wrap_sign_between_ansi_fragments_overflow2
|
def test_preview_wrap_sign_between_ansi_fragments_overflow2
|
||||||
tmux.send_keys %(seq 1 | #{FZF} --preview 'echo -e "\\x1b[33m123 \\x1b[mhi"; echo -e "\\x1b[33m123 \\x1b[mhi"' --preview-window 1,wrap-word,noinfo), :Enter
|
tmux.send_keys %(seq 1 | #{FZF} --preview 'echo -e "\\x1b[33m1234567890 \\x1b[mhello"; echo -e "\\x1b[33m1234567890 \\x1b[mhello"' --preview-window 1,wrap-word), :Enter
|
||||||
tmux.until do |lines|
|
tmux.until do |lines|
|
||||||
assert_equal 1, lines.match_count
|
assert_equal 1, lines.match_count
|
||||||
assert_equal(2, lines.count { |line| line.include?('│ 1 │') })
|
assert_equal(2, lines.count { |line| line.include?('│ 1 │') })
|
||||||
|
|||||||
@@ -7,4 +7,4 @@ tabe = "tabe"
|
|||||||
Iterm = "Iterm"
|
Iterm = "Iterm"
|
||||||
|
|
||||||
[files]
|
[files]
|
||||||
extend-exclude = ["README.md", "*.s"]
|
extend-exclude = ["README.md"]
|
||||||
|
|||||||
Reference in New Issue
Block a user