mirror of
https://github.com/Llewellynvdm/fzf.git
synced 2024-11-26 14:56:30 +00:00
Make --filter non-blocking when --no-sort (#132)
When fzf works in filtering mode (--filter) and sorting is disabled (--no-sort), there's no need to block until input is complete. This commit makes fzf print the matches on-the-fly when the following condition is met: --filter FILTER --no-sort [--no-tac --no-sync] or simply: -f FILTER +s This removes unnecessary delay in use cases like the following: fzf -f xxx +s | head -5 However, in this case, fzf processes the input lines sequentially, so it cannot utilize multiple cores, which makes it slightly slower than the previous mode of execution where filtering is done in parallel after the entire input is loaded. If the user is concerned about the performance problem, one can add --sync option to re-enable buffering.
This commit is contained in:
parent
4d2d18649c
commit
94e8e6419f
42
src/core.go
42
src/core.go
@ -85,8 +85,11 @@ func Run(options *Options) {
|
||||
}
|
||||
|
||||
// Reader
|
||||
reader := Reader{func(str string) { chunkList.Push(str) }, eventBox}
|
||||
go reader.ReadSource()
|
||||
streamingFilter := opts.Filter != nil && opts.Sort == 0 && !opts.Tac && !opts.Sync
|
||||
if !streamingFilter {
|
||||
reader := Reader{func(str string) { chunkList.Push(str) }, eventBox}
|
||||
go reader.ReadSource()
|
||||
}
|
||||
|
||||
// Matcher
|
||||
patternBuilder := func(runes []rune) *Pattern {
|
||||
@ -97,21 +100,32 @@ func Run(options *Options) {
|
||||
|
||||
// Filtering mode
|
||||
if opts.Filter != nil {
|
||||
pattern := patternBuilder([]rune(*opts.Filter))
|
||||
|
||||
eventBox.Unwatch(EvtReadNew)
|
||||
eventBox.WaitFor(EvtReadFin)
|
||||
|
||||
snapshot, _ := chunkList.Snapshot()
|
||||
merger, _ := matcher.scan(MatchRequest{
|
||||
chunks: snapshot,
|
||||
pattern: pattern})
|
||||
|
||||
if opts.PrintQuery {
|
||||
fmt.Println(*opts.Filter)
|
||||
}
|
||||
for i := 0; i < merger.Length(); i++ {
|
||||
fmt.Println(merger.Get(i).AsString())
|
||||
|
||||
pattern := patternBuilder([]rune(*opts.Filter))
|
||||
|
||||
if streamingFilter {
|
||||
reader := Reader{
|
||||
func(str string) {
|
||||
item := chunkList.trans(&str, 0)
|
||||
if pattern.MatchItem(item) {
|
||||
fmt.Println(*item.text)
|
||||
}
|
||||
}, eventBox}
|
||||
reader.ReadSource()
|
||||
} else {
|
||||
eventBox.Unwatch(EvtReadNew)
|
||||
eventBox.WaitFor(EvtReadFin)
|
||||
|
||||
snapshot, _ := chunkList.Snapshot()
|
||||
merger, _ := matcher.scan(MatchRequest{
|
||||
chunks: snapshot,
|
||||
pattern: pattern})
|
||||
for i := 0; i < merger.Length(); i++ {
|
||||
fmt.Println(merger.Get(i).AsString())
|
||||
}
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
@ -219,12 +219,7 @@ Loop:
|
||||
}
|
||||
}
|
||||
|
||||
var matches []*Item
|
||||
if p.mode == ModeFuzzy {
|
||||
matches = p.fuzzyMatch(space)
|
||||
} else {
|
||||
matches = p.extendedMatch(space)
|
||||
}
|
||||
matches := p.matchChunk(space)
|
||||
|
||||
if !p.hasInvTerm {
|
||||
_cache.Add(chunk, cacheKey, matches)
|
||||
@ -232,6 +227,35 @@ Loop:
|
||||
return matches
|
||||
}
|
||||
|
||||
func (p *Pattern) matchChunk(chunk *Chunk) []*Item {
|
||||
matches := []*Item{}
|
||||
if p.mode == ModeFuzzy {
|
||||
for _, item := range *chunk {
|
||||
if sidx, eidx := p.fuzzyMatch(item); sidx >= 0 {
|
||||
matches = append(matches,
|
||||
dupItem(item, []Offset{Offset{int32(sidx), int32(eidx)}}))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for _, item := range *chunk {
|
||||
if offsets := p.extendedMatch(item); len(offsets) == len(p.terms) {
|
||||
matches = append(matches, dupItem(item, offsets))
|
||||
}
|
||||
}
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
// MatchItem returns true if the Item is a match
|
||||
func (p *Pattern) MatchItem(item *Item) bool {
|
||||
if p.mode == ModeFuzzy {
|
||||
sidx, _ := p.fuzzyMatch(item)
|
||||
return sidx >= 0
|
||||
}
|
||||
offsets := p.extendedMatch(item)
|
||||
return len(offsets) == len(p.terms)
|
||||
}
|
||||
|
||||
func dupItem(item *Item, offsets []Offset) *Item {
|
||||
sort.Sort(ByOrder(offsets))
|
||||
return &Item{
|
||||
@ -243,39 +267,26 @@ func dupItem(item *Item, offsets []Offset) *Item {
|
||||
rank: Rank{0, 0, item.index}}
|
||||
}
|
||||
|
||||
func (p *Pattern) fuzzyMatch(chunk *Chunk) []*Item {
|
||||
matches := []*Item{}
|
||||
for _, item := range *chunk {
|
||||
input := p.prepareInput(item)
|
||||
if sidx, eidx := p.iter(algo.FuzzyMatch, input, p.text); sidx >= 0 {
|
||||
matches = append(matches,
|
||||
dupItem(item, []Offset{Offset{int32(sidx), int32(eidx)}}))
|
||||
}
|
||||
}
|
||||
return matches
|
||||
func (p *Pattern) fuzzyMatch(item *Item) (int, int) {
|
||||
input := p.prepareInput(item)
|
||||
return p.iter(algo.FuzzyMatch, input, p.text)
|
||||
}
|
||||
|
||||
func (p *Pattern) extendedMatch(chunk *Chunk) []*Item {
|
||||
matches := []*Item{}
|
||||
for _, item := range *chunk {
|
||||
input := p.prepareInput(item)
|
||||
offsets := []Offset{}
|
||||
for _, term := range p.terms {
|
||||
pfun := p.procFun[term.typ]
|
||||
if sidx, eidx := p.iter(pfun, input, term.text); sidx >= 0 {
|
||||
if term.inv {
|
||||
break
|
||||
}
|
||||
offsets = append(offsets, Offset{int32(sidx), int32(eidx)})
|
||||
} else if term.inv {
|
||||
offsets = append(offsets, Offset{0, 0})
|
||||
func (p *Pattern) extendedMatch(item *Item) []Offset {
|
||||
input := p.prepareInput(item)
|
||||
offsets := []Offset{}
|
||||
for _, term := range p.terms {
|
||||
pfun := p.procFun[term.typ]
|
||||
if sidx, eidx := p.iter(pfun, input, term.text); sidx >= 0 {
|
||||
if term.inv {
|
||||
break
|
||||
}
|
||||
}
|
||||
if len(offsets) == len(p.terms) {
|
||||
matches = append(matches, dupItem(item, offsets))
|
||||
offsets = append(offsets, Offset{int32(sidx), int32(eidx)})
|
||||
} else if term.inv {
|
||||
offsets = append(offsets, Offset{0, 0})
|
||||
}
|
||||
}
|
||||
return matches
|
||||
return offsets
|
||||
}
|
||||
|
||||
func (p *Pattern) prepareInput(item *Item) *Transformed {
|
||||
|
@ -98,14 +98,15 @@ func TestOrigTextAndTransformed(t *testing.T) {
|
||||
tokens := Tokenize(strptr("junegunn"), nil)
|
||||
trans := Transform(tokens, []Range{Range{1, 1}})
|
||||
|
||||
for _, fun := range []func(*Chunk) []*Item{pattern.fuzzyMatch, pattern.extendedMatch} {
|
||||
for _, mode := range []Mode{ModeFuzzy, ModeExtended} {
|
||||
chunk := Chunk{
|
||||
&Item{
|
||||
text: strptr("junegunn"),
|
||||
origText: strptr("junegunn.choi"),
|
||||
transformed: trans},
|
||||
}
|
||||
matches := fun(&chunk)
|
||||
pattern.mode = mode
|
||||
matches := pattern.matchChunk(&chunk)
|
||||
if *matches[0].text != "junegunn" || *matches[0].origText != "junegunn.choi" ||
|
||||
matches[0].offsets[0][0] != 0 || matches[0].offsets[0][1] != 5 ||
|
||||
matches[0].transformed != trans {
|
||||
|
Loading…
Reference in New Issue
Block a user