mirror of
https://github.com/Llewellynvdm/fzf.git
synced 2024-11-29 16:23:57 +00:00
Minor refactoring
This commit is contained in:
parent
f401c42f9c
commit
d303c5b3eb
@ -90,7 +90,7 @@ func ExactMatchNaive(caseSensitive bool, input *string, pattern []rune) (int, in
|
|||||||
runes := []rune(*input)
|
runes := []rune(*input)
|
||||||
numRunes := len(runes)
|
numRunes := len(runes)
|
||||||
plen := len(pattern)
|
plen := len(pattern)
|
||||||
if len(runes) < plen {
|
if numRunes < plen {
|
||||||
return -1, -1
|
return -1, -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -118,14 +118,13 @@ func awkTokenizer(input *string) ([]string, int) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Tokenize(str *string, delimiter *regexp.Regexp) []Token {
|
func Tokenize(str *string, delimiter *regexp.Regexp) []Token {
|
||||||
prefixLength := 0
|
|
||||||
if delimiter == nil {
|
if delimiter == nil {
|
||||||
// AWK-style (\S+\s*)
|
// AWK-style (\S+\s*)
|
||||||
tokens, prefixLength := awkTokenizer(str)
|
tokens, prefixLength := awkTokenizer(str)
|
||||||
return withPrefixLengths(tokens, prefixLength)
|
return withPrefixLengths(tokens, prefixLength)
|
||||||
} else {
|
} else {
|
||||||
tokens := delimiter.FindAllString(*str, -1)
|
tokens := delimiter.FindAllString(*str, -1)
|
||||||
return withPrefixLengths(tokens, prefixLength)
|
return withPrefixLengths(tokens, 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user