mirror of
https://github.com/Llewellynvdm/fzf.git
synced 2025-01-24 07:38:36 +00:00
parent
aa5dae391b
commit
f092e4038f
@ -34,6 +34,10 @@ CHANGELOG
|
|||||||
fzf --delimiter : --preview 'nl {1}' --preview-window +{2}-5
|
fzf --delimiter : --preview 'nl {1}' --preview-window +{2}-5
|
||||||
```
|
```
|
||||||
- Added support for ANSI colors in `--prompt` string
|
- Added support for ANSI colors in `--prompt` string
|
||||||
|
- Smart match of accented characters
|
||||||
|
- An unaccented character in the query string will match both accented and
|
||||||
|
unaccented characters, while an accented character will only match
|
||||||
|
accented characters. This is similar to how "smart-case" match works.
|
||||||
- Vim plugin
|
- Vim plugin
|
||||||
- `tmux` layout option for using fzf-tmux
|
- `tmux` layout option for using fzf-tmux
|
||||||
```vim
|
```vim
|
||||||
|
@ -33,6 +33,7 @@ type term struct {
|
|||||||
inv bool
|
inv bool
|
||||||
text []rune
|
text []rune
|
||||||
caseSensitive bool
|
caseSensitive bool
|
||||||
|
normalize bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// String returns the string representation of a term.
|
// String returns the string representation of a term.
|
||||||
@ -128,6 +129,8 @@ func BuildPattern(fuzzy bool, fuzzyAlgo algo.Algo, extended bool, caseMode Case,
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
lowerString := strings.ToLower(asString)
|
lowerString := strings.ToLower(asString)
|
||||||
|
normalize = normalize &&
|
||||||
|
lowerString == string(algo.NormalizeRunes([]rune(lowerString)))
|
||||||
caseSensitive = caseMode == CaseRespect ||
|
caseSensitive = caseMode == CaseRespect ||
|
||||||
caseMode == CaseSmart && lowerString != asString
|
caseMode == CaseSmart && lowerString != asString
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
@ -173,6 +176,8 @@ func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet
|
|||||||
lowerText := strings.ToLower(text)
|
lowerText := strings.ToLower(text)
|
||||||
caseSensitive := caseMode == CaseRespect ||
|
caseSensitive := caseMode == CaseRespect ||
|
||||||
caseMode == CaseSmart && text != lowerText
|
caseMode == CaseSmart && text != lowerText
|
||||||
|
normalizeTerm := normalize &&
|
||||||
|
lowerText == string(algo.NormalizeRunes([]rune(lowerText)))
|
||||||
if !caseSensitive {
|
if !caseSensitive {
|
||||||
text = lowerText
|
text = lowerText
|
||||||
}
|
}
|
||||||
@ -222,14 +227,15 @@ func parseTerms(fuzzy bool, caseMode Case, normalize bool, str string) []termSet
|
|||||||
set = termSet{}
|
set = termSet{}
|
||||||
}
|
}
|
||||||
textRunes := []rune(text)
|
textRunes := []rune(text)
|
||||||
if normalize {
|
if normalizeTerm {
|
||||||
textRunes = algo.NormalizeRunes(textRunes)
|
textRunes = algo.NormalizeRunes(textRunes)
|
||||||
}
|
}
|
||||||
set = append(set, term{
|
set = append(set, term{
|
||||||
typ: typ,
|
typ: typ,
|
||||||
inv: inv,
|
inv: inv,
|
||||||
text: textRunes,
|
text: textRunes,
|
||||||
caseSensitive: caseSensitive})
|
caseSensitive: caseSensitive,
|
||||||
|
normalize: normalizeTerm})
|
||||||
switchSet = true
|
switchSet = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -360,7 +366,7 @@ func (p *Pattern) extendedMatch(item *Item, withPos bool, slab *util.Slab) ([]Of
|
|||||||
matched := false
|
matched := false
|
||||||
for _, term := range termSet {
|
for _, term := range termSet {
|
||||||
pfun := p.procFun[term.typ]
|
pfun := p.procFun[term.typ]
|
||||||
off, score, pos := p.iter(pfun, input, term.caseSensitive, p.normalize, p.forward, term.text, withPos, slab)
|
off, score, pos := p.iter(pfun, input, term.caseSensitive, term.normalize, p.forward, term.text, withPos, slab)
|
||||||
if sidx := off[0]; sidx >= 0 {
|
if sidx := off[0]; sidx >= 0 {
|
||||||
if term.inv {
|
if term.inv {
|
||||||
continue
|
continue
|
||||||
|
@ -1805,6 +1805,14 @@ class TestGoFZF < TestBase
|
|||||||
tmux.until { |lines| lines.item_count == 1 }
|
tmux.until { |lines| lines.item_count == 1 }
|
||||||
tmux.until { |lines| assert_match %r{121.*121/1000}, lines[1] }
|
tmux.until { |lines| assert_match %r{121.*121/1000}, lines[1] }
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_normalized_match
|
||||||
|
echoes = '(echo a; echo á; echo A; echo Á;)'
|
||||||
|
assert_equal %w[a á A Á], `#{echoes} | #{FZF} -f a`.lines.map(&:chomp)
|
||||||
|
assert_equal %w[á Á], `#{echoes} | #{FZF} -f á`.lines.map(&:chomp)
|
||||||
|
assert_equal %w[A Á], `#{echoes} | #{FZF} -f A`.lines.map(&:chomp)
|
||||||
|
assert_equal %w[Á], `#{echoes} | #{FZF} -f Á`.lines.map(&:chomp)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
module TestShell
|
module TestShell
|
||||||
|
Loading…
x
Reference in New Issue
Block a user