mirror of
https://github.com/octoleo/syncthing.git
synced 2024-11-08 22:31:04 +00:00
lib/ignore: Replace lib/fnmatch with github.com/gobwas/glob
Because it's literally ten times faster: benchmark old ns/op new ns/op delta BenchmarkMatch-8 13842 1200 -91.33% BenchmarkMatchCached-8 139 147 +5.76% benchmark old allocs new allocs delta BenchmarkMatch-8 0 0 +0.00% BenchmarkMatchCached-8 0 0 +0.00% benchmark old bytes new bytes delta BenchmarkMatch-8 12 0 -100.00% BenchmarkMatchCached-8 0 0 +0.00%
This commit is contained in:
parent
46e913dc23
commit
4c3cd4c9e3
@ -934,7 +934,7 @@ func (s *apiService) getDBIgnores(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
sendJSON(w, map[string][]string{
|
||||
"ignore": ignores,
|
||||
"patterns": patterns,
|
||||
"expanded": patterns,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1,79 +0,0 @@
|
||||
// Copyright (C) 2014 The Syncthing Authors.
|
||||
//
|
||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||
// License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
// You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
package fnmatch
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
NoEscape = (1 << iota)
|
||||
PathName
|
||||
CaseFold
|
||||
)
|
||||
|
||||
func Convert(pattern string, flags int) (*regexp.Regexp, error) {
|
||||
any := "."
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
flags |= NoEscape | CaseFold
|
||||
pattern = filepath.FromSlash(pattern)
|
||||
if flags&PathName != 0 {
|
||||
any = `[^\\]`
|
||||
}
|
||||
case "darwin":
|
||||
flags |= CaseFold
|
||||
fallthrough
|
||||
default:
|
||||
if flags&PathName != 0 {
|
||||
any = `[^/]`
|
||||
}
|
||||
}
|
||||
|
||||
if flags&NoEscape != 0 {
|
||||
pattern = strings.Replace(pattern, `\`, `\\`, -1)
|
||||
} else {
|
||||
pattern = strings.Replace(pattern, `\*`, "[:escapedstar:]", -1)
|
||||
pattern = strings.Replace(pattern, `\?`, "[:escapedques:]", -1)
|
||||
pattern = strings.Replace(pattern, `\.`, "[:escapeddot:]", -1)
|
||||
}
|
||||
|
||||
// Characters that are special in regexps but not in glob, must be
|
||||
// escaped.
|
||||
for _, char := range []string{`.`, `+`, `$`, `^`, `(`, `)`, `|`} {
|
||||
pattern = strings.Replace(pattern, char, `\`+char, -1)
|
||||
}
|
||||
|
||||
pattern = strings.Replace(pattern, `**`, `[:doublestar:]`, -1)
|
||||
pattern = strings.Replace(pattern, `*`, any+`*`, -1)
|
||||
pattern = strings.Replace(pattern, `[:doublestar:]`, `.*`, -1)
|
||||
pattern = strings.Replace(pattern, `?`, any, -1)
|
||||
|
||||
pattern = strings.Replace(pattern, `[:escapedstar:]`, `\*`, -1)
|
||||
pattern = strings.Replace(pattern, `[:escapedques:]`, `\?`, -1)
|
||||
pattern = strings.Replace(pattern, `[:escapeddot:]`, `\.`, -1)
|
||||
|
||||
pattern = `^` + pattern + `$`
|
||||
if flags&CaseFold != 0 {
|
||||
pattern = `(?i)` + pattern
|
||||
}
|
||||
return regexp.Compile(pattern)
|
||||
}
|
||||
|
||||
// Match matches the pattern against the string, with the given flags, and
|
||||
// returns true if the match is successful.
|
||||
func Match(pattern, s string, flags int) (bool, error) {
|
||||
exp, err := Convert(pattern, flags)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
return exp.MatchString(s), nil
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
// Copyright (C) 2014 The Syncthing Authors.
|
||||
//
|
||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||
// License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
// You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
package fnmatch
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type testcase struct {
|
||||
pat string
|
||||
name string
|
||||
flags int
|
||||
match bool
|
||||
}
|
||||
|
||||
var testcases = []testcase{
|
||||
{"", "", 0, true},
|
||||
{"*", "", 0, true},
|
||||
{"*", "foo", 0, true},
|
||||
{"*", "bar", 0, true},
|
||||
{"*", "*", 0, true},
|
||||
{"**", "f", 0, true},
|
||||
{"**", "foo.txt", 0, true},
|
||||
{"*.*", "foo.txt", 0, true},
|
||||
{"foo*.txt", "foobar.txt", 0, true},
|
||||
{"foo.txt", "foo.txt", 0, true},
|
||||
|
||||
{"foo.txt", "bar/foo.txt", 0, false},
|
||||
{"*/foo.txt", "bar/foo.txt", 0, true},
|
||||
{"f?o.txt", "foo.txt", 0, true},
|
||||
{"f?o.txt", "fooo.txt", 0, false},
|
||||
{"f[ab]o.txt", "foo.txt", 0, false},
|
||||
{"f[ab]o.txt", "fao.txt", 0, true},
|
||||
{"f[ab]o.txt", "fbo.txt", 0, true},
|
||||
{"f[ab]o.txt", "fco.txt", 0, false},
|
||||
{"f[ab]o.txt", "fabo.txt", 0, false},
|
||||
{"f[ab]o.txt", "f[ab]o.txt", 0, false},
|
||||
{"f\\[ab\\]o.txt", "f[ab]o.txt", NoEscape, false},
|
||||
|
||||
{"*foo.txt", "bar/foo.txt", 0, true},
|
||||
{"*foo.txt", "bar/foo.txt", PathName, false},
|
||||
{"*/foo.txt", "bar/foo.txt", 0, true},
|
||||
{"*/foo.txt", "bar/foo.txt", PathName, true},
|
||||
{"*/foo.txt", "bar/baz/foo.txt", 0, true},
|
||||
{"*/foo.txt", "bar/baz/foo.txt", PathName, false},
|
||||
{"**/foo.txt", "bar/baz/foo.txt", 0, true},
|
||||
{"**/foo.txt", "bar/baz/foo.txt", PathName, true},
|
||||
|
||||
{"foo.txt", "foo.TXT", CaseFold, true},
|
||||
|
||||
// These characters are literals in glob, but not in regexp.
|
||||
{"hey$hello", "hey$hello", 0, true},
|
||||
{"hey^hello", "hey^hello", 0, true},
|
||||
{"hey{hello", "hey{hello", 0, true},
|
||||
{"hey}hello", "hey}hello", 0, true},
|
||||
{"hey(hello", "hey(hello", 0, true},
|
||||
{"hey)hello", "hey)hello", 0, true},
|
||||
{"hey|hello", "hey|hello", 0, true},
|
||||
{"hey|hello", "hey|other", 0, false},
|
||||
}
|
||||
|
||||
func TestMatch(t *testing.T) {
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
testcases = append(testcases, testcase{"foo.txt", "foo.TXT", 0, true})
|
||||
case "darwin":
|
||||
testcases = append(testcases, testcase{"foo.txt", "foo.TXT", 0, true})
|
||||
fallthrough
|
||||
default:
|
||||
testcases = append(testcases, testcase{"f\\[ab\\]o.txt", "f[ab]o.txt", 0, true})
|
||||
testcases = append(testcases, testcase{"foo\\.txt", "foo.txt", 0, true})
|
||||
testcases = append(testcases, testcase{"foo\\*.txt", "foo*.txt", 0, true})
|
||||
testcases = append(testcases, testcase{"foo\\.txt", "foo.txt", NoEscape, false})
|
||||
testcases = append(testcases, testcase{"f\\\\\\[ab\\\\\\]o.txt", "f\\[ab\\]o.txt", 0, true})
|
||||
}
|
||||
|
||||
for _, tc := range testcases {
|
||||
if m, err := Match(tc.pat, filepath.FromSlash(tc.name), tc.flags); m != tc.match {
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
} else {
|
||||
t.Errorf("Match(%q, %q, %d) != %v", tc.pat, tc.name, tc.flags, tc.match)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalid(t *testing.T) {
|
||||
if _, err := Match("foo[bar", "...", 0); err == nil {
|
||||
t.Error("Unexpected nil error")
|
||||
}
|
||||
}
|
@ -14,24 +14,30 @@ import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/syncthing/syncthing/lib/fnmatch"
|
||||
"github.com/gobwas/glob"
|
||||
"github.com/syncthing/syncthing/lib/sync"
|
||||
)
|
||||
|
||||
type Pattern struct {
|
||||
match *regexp.Regexp
|
||||
include bool
|
||||
pattern string
|
||||
match glob.Glob
|
||||
include bool
|
||||
foldCase bool
|
||||
}
|
||||
|
||||
func (p Pattern) String() string {
|
||||
if p.include {
|
||||
return p.match.String()
|
||||
ret := p.pattern
|
||||
if !p.include {
|
||||
ret = "!" + ret
|
||||
}
|
||||
return "(?exclude)" + p.match.String()
|
||||
if p.foldCase {
|
||||
ret = "(?i)" + ret
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
type Matcher struct {
|
||||
@ -119,9 +125,20 @@ func (m *Matcher) Match(file string) (result bool) {
|
||||
}
|
||||
|
||||
// Check all the patterns for a match.
|
||||
file = filepath.ToSlash(file)
|
||||
var lowercaseFile string
|
||||
for _, pattern := range m.patterns {
|
||||
if pattern.match.MatchString(file) {
|
||||
return pattern.include
|
||||
if pattern.foldCase {
|
||||
if lowercaseFile == "" {
|
||||
lowercaseFile = strings.ToLower(file)
|
||||
}
|
||||
if pattern.match.Match(lowercaseFile) {
|
||||
return pattern.include
|
||||
}
|
||||
} else {
|
||||
if pattern.match.Match(file) {
|
||||
return pattern.include
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -129,7 +146,7 @@ func (m *Matcher) Match(file string) (result bool) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Patterns return a list of the loaded regexp patterns, as strings
|
||||
// Patterns return a list of the loaded patterns, as they've been parsed
|
||||
func (m *Matcher) Patterns() []string {
|
||||
if m == nil {
|
||||
return nil
|
||||
@ -200,38 +217,43 @@ func parseIgnoreFile(fd io.Reader, currentFile string, seen map[string]bool) ([]
|
||||
var patterns []Pattern
|
||||
|
||||
addPattern := func(line string) error {
|
||||
include := true
|
||||
pattern := Pattern{
|
||||
pattern: line,
|
||||
include: true,
|
||||
foldCase: runtime.GOOS == "darwin" || runtime.GOOS == "windows",
|
||||
}
|
||||
|
||||
if strings.HasPrefix(line, "!") {
|
||||
line = line[1:]
|
||||
include = false
|
||||
pattern.include = false
|
||||
}
|
||||
|
||||
flags := fnmatch.PathName
|
||||
if strings.HasPrefix(line, "(?i)") {
|
||||
line = line[4:]
|
||||
flags |= fnmatch.CaseFold
|
||||
line = strings.ToLower(line[4:])
|
||||
pattern.foldCase = true
|
||||
}
|
||||
|
||||
var err error
|
||||
if strings.HasPrefix(line, "/") {
|
||||
// Pattern is rooted in the current dir only
|
||||
exp, err := fnmatch.Convert(line[1:], flags)
|
||||
pattern.match, err = glob.Compile(line[1:])
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid pattern %q in ignore file", line)
|
||||
}
|
||||
patterns = append(patterns, Pattern{exp, include})
|
||||
patterns = append(patterns, pattern)
|
||||
} else if strings.HasPrefix(line, "**/") {
|
||||
// Add the pattern as is, and without **/ so it matches in current dir
|
||||
exp, err := fnmatch.Convert(line, flags)
|
||||
pattern.match, err = glob.Compile(line)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid pattern %q in ignore file", line)
|
||||
}
|
||||
patterns = append(patterns, Pattern{exp, include})
|
||||
patterns = append(patterns, pattern)
|
||||
|
||||
exp, err = fnmatch.Convert(line[3:], flags)
|
||||
pattern.match, err = glob.Compile(line[3:])
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid pattern %q in ignore file", line)
|
||||
}
|
||||
patterns = append(patterns, Pattern{exp, include})
|
||||
patterns = append(patterns, pattern)
|
||||
} else if strings.HasPrefix(line, "#include ") {
|
||||
includeRel := line[len("#include "):]
|
||||
includeFile := filepath.Join(filepath.Dir(currentFile), includeRel)
|
||||
@ -243,17 +265,17 @@ func parseIgnoreFile(fd io.Reader, currentFile string, seen map[string]bool) ([]
|
||||
} else {
|
||||
// Path name or pattern, add it so it matches files both in
|
||||
// current directory and subdirs.
|
||||
exp, err := fnmatch.Convert(line, flags)
|
||||
pattern.match, err = glob.Compile(line)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid pattern %q in ignore file", line)
|
||||
}
|
||||
patterns = append(patterns, Pattern{exp, include})
|
||||
patterns = append(patterns, pattern)
|
||||
|
||||
exp, err = fnmatch.Convert("**/"+line, flags)
|
||||
pattern.match, err = glob.Compile("**/" + line)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid pattern %q in ignore file", line)
|
||||
}
|
||||
patterns = append(patterns, Pattern{exp, include})
|
||||
patterns = append(patterns, pattern)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -267,6 +289,10 @@ func parseIgnoreFile(fd io.Reader, currentFile string, seen map[string]bool) ([]
|
||||
continue
|
||||
case strings.HasPrefix(line, "//"):
|
||||
continue
|
||||
}
|
||||
|
||||
line = filepath.ToSlash(line)
|
||||
switch {
|
||||
case strings.HasPrefix(line, "#"):
|
||||
err = addPattern(line)
|
||||
case strings.HasSuffix(line, "/**"):
|
||||
|
@ -509,3 +509,29 @@ func TestHashOfEmpty(t *testing.T) {
|
||||
t.Error("there are more than zero patterns")
|
||||
}
|
||||
}
|
||||
|
||||
func TestWindowsPatterns(t *testing.T) {
|
||||
// We should accept patterns as both a/b and a\b and match that against
|
||||
// both kinds of slash as well.
|
||||
if runtime.GOOS != "windows" {
|
||||
t.Skip("Windows specific test")
|
||||
return
|
||||
}
|
||||
|
||||
stignore := `
|
||||
a/b
|
||||
c\d
|
||||
`
|
||||
pats := New(true)
|
||||
err := pats.Parse(bytes.NewBufferString(stignore), ".stignore")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []string{`a\b`, `c\d`}
|
||||
for _, pat := range tests {
|
||||
if !pats.Match(pat) {
|
||||
t.Errorf("Should match %s", pat)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
21
vendor/github.com/gobwas/glob/LICENSE
generated
vendored
Normal file
21
vendor/github.com/gobwas/glob/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Sergey Kamardin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
26
vendor/github.com/gobwas/glob/bench.sh
generated
vendored
Normal file
26
vendor/github.com/gobwas/glob/bench.sh
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
#! /bin/bash
|
||||
|
||||
bench() {
|
||||
filename="/tmp/$1-$2.bench"
|
||||
if test -e "${filename}";
|
||||
then
|
||||
echo "Already exists ${filename}"
|
||||
else
|
||||
backup=`git rev-parse --abbrev-ref HEAD`
|
||||
git checkout $1
|
||||
echo -n "Creating ${filename}... "
|
||||
go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem
|
||||
echo "OK"
|
||||
git checkout ${backup}
|
||||
sleep 5
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
to=$1
|
||||
current=`git rev-parse --abbrev-ref HEAD`
|
||||
|
||||
bench ${to} $2
|
||||
bench ${current} $2
|
||||
|
||||
benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench"
|
44
vendor/github.com/gobwas/glob/cmd/globdraw/main.go
generated
vendored
Normal file
44
vendor/github.com/gobwas/glob/cmd/globdraw/main.go
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob"
|
||||
"github.com/gobwas/glob/match"
|
||||
"github.com/gobwas/glob/match/debug"
|
||||
"os"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func main() {
|
||||
pattern := flag.String("p", "", "pattern to draw")
|
||||
sep := flag.String("s", "", "comma separated list of separators characters")
|
||||
flag.Parse()
|
||||
|
||||
if *pattern == "" {
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var separators []rune
|
||||
if len(*sep) > 0 {
|
||||
for _, c := range strings.Split(*sep, ",") {
|
||||
if r, w := utf8.DecodeRuneInString(c); len(c) > w {
|
||||
fmt.Println("only single charactered separators are allowed")
|
||||
os.Exit(1)
|
||||
} else {
|
||||
separators = append(separators, r)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
glob, err := glob.Compile(*pattern, separators...)
|
||||
if err != nil {
|
||||
fmt.Println("could not compile pattern:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
matcher := glob.(match.Matcher)
|
||||
fmt.Fprint(os.Stdout, debug.Graphviz(*pattern, matcher))
|
||||
}
|
82
vendor/github.com/gobwas/glob/cmd/globtest/main.go
generated
vendored
Normal file
82
vendor/github.com/gobwas/glob/cmd/globtest/main.go
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func benchString(r testing.BenchmarkResult) string {
|
||||
nsop := r.NsPerOp()
|
||||
ns := fmt.Sprintf("%10d ns/op", nsop)
|
||||
allocs := "0"
|
||||
if r.N > 0 {
|
||||
if nsop < 100 {
|
||||
// The format specifiers here make sure that
|
||||
// the ones digits line up for all three possible formats.
|
||||
if nsop < 10 {
|
||||
ns = fmt.Sprintf("%13.2f ns/op", float64(r.T.Nanoseconds())/float64(r.N))
|
||||
} else {
|
||||
ns = fmt.Sprintf("%12.1f ns/op", float64(r.T.Nanoseconds())/float64(r.N))
|
||||
}
|
||||
}
|
||||
|
||||
allocs = fmt.Sprintf("%d", r.MemAllocs/uint64(r.N))
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%8d\t%s\t%s allocs", r.N, ns, allocs)
|
||||
}
|
||||
|
||||
func main() {
|
||||
pattern := flag.String("p", "", "pattern to draw")
|
||||
sep := flag.String("s", "", "comma separated list of separators")
|
||||
fixture := flag.String("f", "", "fixture")
|
||||
verbose := flag.Bool("v", false, "verbose")
|
||||
flag.Parse()
|
||||
|
||||
if *pattern == "" {
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var separators []rune
|
||||
for _, c := range strings.Split(*sep, ",") {
|
||||
if r, w := utf8.DecodeRuneInString(c); len(c) > w {
|
||||
fmt.Println("only single charactered separators are allowed")
|
||||
os.Exit(1)
|
||||
} else {
|
||||
separators = append(separators, r)
|
||||
}
|
||||
}
|
||||
|
||||
g, err := glob.Compile(*pattern, separators...)
|
||||
if err != nil {
|
||||
fmt.Println("could not compile pattern:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if !*verbose {
|
||||
fmt.Println(g.Match(*fixture))
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Printf("result: %t\n", g.Match(*fixture))
|
||||
|
||||
cb := testing.Benchmark(func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
glob.Compile(*pattern, separators...)
|
||||
}
|
||||
})
|
||||
fmt.Println("compile:", benchString(cb))
|
||||
|
||||
mb := testing.Benchmark(func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
g.Match(*fixture)
|
||||
}
|
||||
})
|
||||
fmt.Println("match: ", benchString(mb))
|
||||
}
|
682
vendor/github.com/gobwas/glob/compiler.go
generated
vendored
Normal file
682
vendor/github.com/gobwas/glob/compiler.go
generated
vendored
Normal file
@ -0,0 +1,682 @@
|
||||
package glob
|
||||
|
||||
// TODO use constructor with all matchers, and to their structs private
|
||||
// TODO glue multiple Text nodes (like after QuoteMeta)
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/match"
|
||||
"github.com/gobwas/glob/runes"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func optimize(matcher match.Matcher) match.Matcher {
|
||||
switch m := matcher.(type) {
|
||||
|
||||
case match.Any:
|
||||
if len(m.Separators) == 0 {
|
||||
return match.NewSuper()
|
||||
}
|
||||
|
||||
case match.AnyOf:
|
||||
if len(m.Matchers) == 1 {
|
||||
return m.Matchers[0]
|
||||
}
|
||||
|
||||
return m
|
||||
|
||||
case match.List:
|
||||
if m.Not == false && len(m.List) == 1 {
|
||||
return match.NewText(string(m.List))
|
||||
}
|
||||
|
||||
return m
|
||||
|
||||
case match.BTree:
|
||||
m.Left = optimize(m.Left)
|
||||
m.Right = optimize(m.Right)
|
||||
|
||||
r, ok := m.Value.(match.Text)
|
||||
if !ok {
|
||||
return m
|
||||
}
|
||||
|
||||
leftNil := m.Left == nil
|
||||
rightNil := m.Right == nil
|
||||
|
||||
if leftNil && rightNil {
|
||||
return match.NewText(r.Str)
|
||||
}
|
||||
|
||||
_, leftSuper := m.Left.(match.Super)
|
||||
lp, leftPrefix := m.Left.(match.Prefix)
|
||||
|
||||
_, rightSuper := m.Right.(match.Super)
|
||||
rs, rightSuffix := m.Right.(match.Suffix)
|
||||
|
||||
if leftSuper && rightSuper {
|
||||
return match.NewContains(r.Str, false)
|
||||
}
|
||||
|
||||
if leftSuper && rightNil {
|
||||
return match.NewSuffix(r.Str)
|
||||
}
|
||||
|
||||
if rightSuper && leftNil {
|
||||
return match.NewPrefix(r.Str)
|
||||
}
|
||||
|
||||
if leftNil && rightSuffix {
|
||||
return match.NewPrefixSuffix(r.Str, rs.Suffix)
|
||||
}
|
||||
|
||||
if rightNil && leftPrefix {
|
||||
return match.NewPrefixSuffix(lp.Prefix, r.Str)
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
return matcher
|
||||
}
|
||||
|
||||
func glueMatchers(matchers []match.Matcher) match.Matcher {
|
||||
var (
|
||||
glued []match.Matcher
|
||||
winner match.Matcher
|
||||
)
|
||||
maxLen := -1
|
||||
|
||||
if m := glueAsEvery(matchers); m != nil {
|
||||
glued = append(glued, m)
|
||||
return m
|
||||
}
|
||||
|
||||
if m := glueAsRow(matchers); m != nil {
|
||||
glued = append(glued, m)
|
||||
return m
|
||||
}
|
||||
|
||||
for _, g := range glued {
|
||||
if l := g.Len(); l > maxLen {
|
||||
maxLen = l
|
||||
winner = g
|
||||
}
|
||||
}
|
||||
|
||||
return winner
|
||||
}
|
||||
|
||||
func glueAsRow(matchers []match.Matcher) match.Matcher {
|
||||
if len(matchers) <= 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
c []match.Matcher
|
||||
l int
|
||||
)
|
||||
for _, matcher := range matchers {
|
||||
if ml := matcher.Len(); ml == -1 {
|
||||
return nil
|
||||
} else {
|
||||
c = append(c, matcher)
|
||||
l += ml
|
||||
}
|
||||
}
|
||||
|
||||
return match.NewRow(l, c...)
|
||||
}
|
||||
|
||||
func glueAsEvery(matchers []match.Matcher) match.Matcher {
|
||||
if len(matchers) <= 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
hasAny bool
|
||||
hasSuper bool
|
||||
hasSingle bool
|
||||
min int
|
||||
separator []rune
|
||||
)
|
||||
|
||||
for i, matcher := range matchers {
|
||||
var sep []rune
|
||||
|
||||
switch m := matcher.(type) {
|
||||
case match.Super:
|
||||
sep = []rune{}
|
||||
hasSuper = true
|
||||
|
||||
case match.Any:
|
||||
sep = m.Separators
|
||||
hasAny = true
|
||||
|
||||
case match.Single:
|
||||
sep = m.Separators
|
||||
hasSingle = true
|
||||
min++
|
||||
|
||||
case match.List:
|
||||
if !m.Not {
|
||||
return nil
|
||||
}
|
||||
sep = m.List
|
||||
hasSingle = true
|
||||
min++
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
// initialize
|
||||
if i == 0 {
|
||||
separator = sep
|
||||
}
|
||||
|
||||
if runes.Equal(sep, separator) {
|
||||
continue
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if hasSuper && !hasAny && !hasSingle {
|
||||
return match.NewSuper()
|
||||
}
|
||||
|
||||
if hasAny && !hasSuper && !hasSingle {
|
||||
return match.NewAny(separator)
|
||||
}
|
||||
|
||||
if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
|
||||
return match.NewMin(min)
|
||||
}
|
||||
|
||||
every := match.NewEveryOf()
|
||||
|
||||
if min > 0 {
|
||||
every.Add(match.NewMin(min))
|
||||
|
||||
if !hasAny && !hasSuper {
|
||||
every.Add(match.NewMax(min))
|
||||
}
|
||||
}
|
||||
|
||||
if len(separator) > 0 {
|
||||
every.Add(match.NewContains(string(separator), true))
|
||||
}
|
||||
|
||||
return every
|
||||
}
|
||||
|
||||
func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
|
||||
var done match.Matcher
|
||||
var left, right, count int
|
||||
|
||||
for l := 0; l < len(matchers); l++ {
|
||||
for r := len(matchers); r > l; r-- {
|
||||
if glued := glueMatchers(matchers[l:r]); glued != nil {
|
||||
var swap bool
|
||||
|
||||
if done == nil {
|
||||
swap = true
|
||||
} else {
|
||||
cl, gl := done.Len(), glued.Len()
|
||||
swap = cl > -1 && gl > -1 && gl > cl
|
||||
swap = swap || count < r-l
|
||||
}
|
||||
|
||||
if swap {
|
||||
done = glued
|
||||
left = l
|
||||
right = r
|
||||
count = r - l
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if done == nil {
|
||||
return matchers
|
||||
}
|
||||
|
||||
next := append(append([]match.Matcher{}, matchers[:left]...), done)
|
||||
if right < len(matchers) {
|
||||
next = append(next, matchers[right:]...)
|
||||
}
|
||||
|
||||
if len(next) == len(matchers) {
|
||||
return next
|
||||
}
|
||||
|
||||
return minimizeMatchers(next)
|
||||
}
|
||||
|
||||
func minimizeAnyOf(children []node) node {
|
||||
var nodes [][]node
|
||||
var min int
|
||||
var idx int
|
||||
for i, desc := range children {
|
||||
pat, ok := desc.(*nodePattern)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
n := pat.children()
|
||||
ln := len(n)
|
||||
if len(nodes) == 0 || (ln < min) {
|
||||
min = ln
|
||||
idx = i
|
||||
}
|
||||
|
||||
nodes = append(nodes, pat.children())
|
||||
}
|
||||
|
||||
minNodes := nodes[idx]
|
||||
if idx+1 < len(nodes) {
|
||||
nodes = append(nodes[:idx], nodes[idx+1:]...)
|
||||
} else {
|
||||
nodes = nodes[:idx]
|
||||
}
|
||||
|
||||
var commonLeft []node
|
||||
var commonLeftCount int
|
||||
for i, n := range minNodes {
|
||||
has := true
|
||||
for _, t := range nodes {
|
||||
if !reflect.DeepEqual(n, t[i]) {
|
||||
has = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if has {
|
||||
commonLeft = append(commonLeft, n)
|
||||
commonLeftCount++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var commonRight []node
|
||||
var commonRightCount int
|
||||
for i := min - 1; i > commonLeftCount-1; i-- {
|
||||
n := minNodes[i]
|
||||
has := true
|
||||
for _, t := range nodes {
|
||||
if !reflect.DeepEqual(n, t[len(t)-(min-i)]) {
|
||||
has = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if has {
|
||||
commonRight = append(commonRight, n)
|
||||
commonRightCount++
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if commonLeftCount == 0 && commonRightCount == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
nodes = append(nodes, minNodes)
|
||||
nodes[len(nodes)-1], nodes[idx] = nodes[idx], nodes[len(nodes)-1]
|
||||
|
||||
var result []node
|
||||
if commonLeftCount > 0 {
|
||||
result = append(result, &nodePattern{nodeImpl: nodeImpl{desc: commonLeft}})
|
||||
}
|
||||
|
||||
var anyOf []node
|
||||
for _, n := range nodes {
|
||||
if commonLeftCount+commonRightCount == len(n) {
|
||||
anyOf = append(anyOf, nil)
|
||||
} else {
|
||||
anyOf = append(anyOf, &nodePattern{nodeImpl: nodeImpl{desc: n[commonLeftCount : len(n)-commonRightCount]}})
|
||||
}
|
||||
}
|
||||
|
||||
anyOf = uniqueNodes(anyOf)
|
||||
if len(anyOf) == 1 {
|
||||
if anyOf[0] != nil {
|
||||
result = append(result, &nodePattern{nodeImpl: nodeImpl{desc: anyOf}})
|
||||
}
|
||||
} else {
|
||||
result = append(result, &nodeAnyOf{nodeImpl: nodeImpl{desc: anyOf}})
|
||||
}
|
||||
|
||||
if commonRightCount > 0 {
|
||||
result = append(result, &nodePattern{nodeImpl: nodeImpl{desc: commonRight}})
|
||||
}
|
||||
|
||||
return &nodePattern{nodeImpl: nodeImpl{desc: result}}
|
||||
}
|
||||
|
||||
func uniqueNodes(nodes []node) (result []node) {
|
||||
head:
|
||||
for _, n := range nodes {
|
||||
for _, e := range result {
|
||||
if reflect.DeepEqual(e, n) {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, n)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
|
||||
if len(matchers) == 0 {
|
||||
return nil, fmt.Errorf("compile error: need at least one matcher")
|
||||
}
|
||||
|
||||
if len(matchers) == 1 {
|
||||
return matchers[0], nil
|
||||
}
|
||||
|
||||
if m := glueMatchers(matchers); m != nil {
|
||||
return m, nil
|
||||
}
|
||||
|
||||
var (
|
||||
val match.Matcher
|
||||
idx int
|
||||
)
|
||||
maxLen := -1
|
||||
for i, matcher := range matchers {
|
||||
l := matcher.Len()
|
||||
if l >= maxLen {
|
||||
maxLen = l
|
||||
idx = i
|
||||
val = matcher
|
||||
}
|
||||
}
|
||||
|
||||
left := matchers[:idx]
|
||||
var right []match.Matcher
|
||||
if len(matchers) > idx+1 {
|
||||
right = matchers[idx+1:]
|
||||
}
|
||||
|
||||
var l, r match.Matcher
|
||||
var err error
|
||||
if len(left) > 0 {
|
||||
l, err = compileMatchers(left)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(right) > 0 {
|
||||
r, err = compileMatchers(right)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return match.NewBTree(val, l, r), nil
|
||||
}
|
||||
|
||||
//func complexity(m match.Matcher) int {
|
||||
// var matchers []match.Matcher
|
||||
// var k int
|
||||
//
|
||||
// switch matcher := m.(type) {
|
||||
//
|
||||
// case match.Nothing:
|
||||
// return 0
|
||||
//
|
||||
// case match.Max, match.Range, match.Suffix, match.Text:
|
||||
// return 1
|
||||
//
|
||||
// case match.PrefixSuffix, match.Single, match.Row:
|
||||
// return 2
|
||||
//
|
||||
// case match.Any, match.Contains, match.List, match.Min, match.Prefix, match.Super:
|
||||
// return 4
|
||||
//
|
||||
// case match.BTree:
|
||||
// matchers = append(matchers, matcher.Value)
|
||||
// if matcher.Left != nil {
|
||||
// matchers = append(matchers, matcher.Left)
|
||||
// }
|
||||
// if matcher.Right != nil {
|
||||
// matchers = append(matchers, matcher.Right)
|
||||
// }
|
||||
// k = 1
|
||||
//
|
||||
// case match.AnyOf:
|
||||
// matchers = matcher.Matchers
|
||||
// k = 1
|
||||
// case match.EveryOf:
|
||||
// matchers = matcher.Matchers
|
||||
// k = 1
|
||||
//
|
||||
// default:
|
||||
// return 0
|
||||
// }
|
||||
//
|
||||
// var sum int
|
||||
// for _, m := range matchers {
|
||||
// sum += complexity(m)
|
||||
// }
|
||||
//
|
||||
// return sum * k
|
||||
//}
|
||||
|
||||
func doAnyOf(n *nodeAnyOf, s []rune) (match.Matcher, error) {
|
||||
var matchers []match.Matcher
|
||||
for _, desc := range n.children() {
|
||||
if desc == nil {
|
||||
matchers = append(matchers, match.NewNothing())
|
||||
continue
|
||||
}
|
||||
|
||||
m, err := do(desc, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matchers = append(matchers, optimize(m))
|
||||
}
|
||||
|
||||
return match.NewAnyOf(matchers...), nil
|
||||
}
|
||||
|
||||
func do(leaf node, s []rune) (m match.Matcher, err error) {
|
||||
switch n := leaf.(type) {
|
||||
|
||||
case *nodeAnyOf:
|
||||
// todo this could be faster on pattern_alternatives_combine_lite
|
||||
if n := minimizeAnyOf(n.children()); n != nil {
|
||||
return do(n, s)
|
||||
}
|
||||
|
||||
var matchers []match.Matcher
|
||||
for _, desc := range n.children() {
|
||||
if desc == nil {
|
||||
matchers = append(matchers, match.NewNothing())
|
||||
continue
|
||||
}
|
||||
|
||||
m, err := do(desc, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matchers = append(matchers, optimize(m))
|
||||
}
|
||||
|
||||
return match.NewAnyOf(matchers...), nil
|
||||
|
||||
case *nodePattern:
|
||||
nodes := leaf.children()
|
||||
if len(nodes) == 0 {
|
||||
return match.NewNothing(), nil
|
||||
}
|
||||
|
||||
var matchers []match.Matcher
|
||||
for _, desc := range nodes {
|
||||
m, err := do(desc, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matchers = append(matchers, optimize(m))
|
||||
}
|
||||
|
||||
m, err = compileMatchers(minimizeMatchers(matchers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
case *nodeList:
|
||||
m = match.NewList([]rune(n.chars), n.not)
|
||||
|
||||
case *nodeRange:
|
||||
m = match.NewRange(n.lo, n.hi, n.not)
|
||||
|
||||
case *nodeAny:
|
||||
m = match.NewAny(s)
|
||||
|
||||
case *nodeSuper:
|
||||
m = match.NewSuper()
|
||||
|
||||
case *nodeSingle:
|
||||
m = match.NewSingle(s)
|
||||
|
||||
case *nodeText:
|
||||
m = match.NewText(n.text)
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("could not compile tree: unknown node type")
|
||||
}
|
||||
|
||||
return optimize(m), nil
|
||||
}
|
||||
|
||||
func do2(node node, s []rune) ([]match.Matcher, error) {
|
||||
var result []match.Matcher
|
||||
|
||||
switch n := node.(type) {
|
||||
|
||||
case *nodePattern:
|
||||
ways := [][]match.Matcher{[]match.Matcher{}}
|
||||
|
||||
for _, desc := range node.children() {
|
||||
variants, err := do2(desc, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fmt.Println("variants pat", variants)
|
||||
|
||||
for i, l := 0, len(ways); i < l; i++ {
|
||||
for i := 0; i < len(variants); i++ {
|
||||
o := optimize(variants[i])
|
||||
if i == len(variants)-1 {
|
||||
ways[i] = append(ways[i], o)
|
||||
} else {
|
||||
var w []match.Matcher
|
||||
copy(w, ways[i])
|
||||
ways = append(ways, append(w, o))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("ways pat", ways)
|
||||
}
|
||||
|
||||
for _, matchers := range ways {
|
||||
c, err := compileMatchers(minimizeMatchers(matchers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, c)
|
||||
}
|
||||
|
||||
case *nodeAnyOf:
|
||||
ways := make([][]match.Matcher, len(node.children()))
|
||||
for _, desc := range node.children() {
|
||||
variants, err := do2(desc, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fmt.Println("variants any", variants)
|
||||
|
||||
for x, l := 0, len(ways); x < l; x++ {
|
||||
for i := 0; i < len(variants); i++ {
|
||||
o := optimize(variants[i])
|
||||
if i == len(variants)-1 {
|
||||
ways[x] = append(ways[x], o)
|
||||
} else {
|
||||
var w []match.Matcher
|
||||
copy(w, ways[x])
|
||||
ways = append(ways, append(w, o))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println("ways any", ways)
|
||||
}
|
||||
|
||||
for _, matchers := range ways {
|
||||
c, err := compileMatchers(minimizeMatchers(matchers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, c)
|
||||
}
|
||||
|
||||
case *nodeList:
|
||||
result = append(result, match.NewList([]rune(n.chars), n.not))
|
||||
|
||||
case *nodeRange:
|
||||
result = append(result, match.NewRange(n.lo, n.hi, n.not))
|
||||
|
||||
case *nodeAny:
|
||||
result = append(result, match.NewAny(s))
|
||||
|
||||
case *nodeSuper:
|
||||
result = append(result, match.NewSuper())
|
||||
|
||||
case *nodeSingle:
|
||||
result = append(result, match.NewSingle(s))
|
||||
|
||||
case *nodeText:
|
||||
result = append(result, match.NewText(n.text))
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("could not compile tree: unknown node type")
|
||||
}
|
||||
|
||||
for i, m := range result {
|
||||
result[i] = optimize(m)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func compile(ast *nodePattern, s []rune) (Glob, error) {
|
||||
// ms, err := do2(ast, s)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
// if len(ms) == 1 {
|
||||
// return ms[0], nil
|
||||
// } else {
|
||||
// return match.NewAnyOf(ms), nil
|
||||
// }
|
||||
|
||||
g, err := do(ast, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return g, nil
|
||||
}
|
546
vendor/github.com/gobwas/glob/compiler_test.go
generated
vendored
Normal file
546
vendor/github.com/gobwas/glob/compiler_test.go
generated
vendored
Normal file
@ -0,0 +1,546 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"github.com/gobwas/glob/match"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var separators = []rune{'.'}
|
||||
|
||||
func TestGlueMatchers(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
in []match.Matcher
|
||||
exp match.Matcher
|
||||
}{
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewSuper(),
|
||||
match.NewSingle(nil),
|
||||
},
|
||||
match.NewMin(1),
|
||||
},
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewAny(separators),
|
||||
match.NewSingle(separators),
|
||||
},
|
||||
match.EveryOf{match.Matchers{
|
||||
match.NewMin(1),
|
||||
match.NewContains(string(separators), true),
|
||||
}},
|
||||
},
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewSingle(nil),
|
||||
match.NewSingle(nil),
|
||||
match.NewSingle(nil),
|
||||
},
|
||||
match.EveryOf{match.Matchers{
|
||||
match.NewMin(3),
|
||||
match.NewMax(3),
|
||||
}},
|
||||
},
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewList([]rune{'a'}, true),
|
||||
match.NewAny([]rune{'a'}),
|
||||
},
|
||||
match.EveryOf{match.Matchers{
|
||||
match.NewMin(1),
|
||||
match.NewContains("a", true),
|
||||
}},
|
||||
},
|
||||
} {
|
||||
act, err := compileMatchers(test.in)
|
||||
if err != nil {
|
||||
t.Errorf("#%d convert matchers error: %s", id, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(act, test.exp) {
|
||||
t.Errorf("#%d unexpected convert matchers result:\nact: %#v;\nexp: %#v", id, act, test.exp)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileMatchers(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
in []match.Matcher
|
||||
exp match.Matcher
|
||||
}{
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewSuper(),
|
||||
match.NewSingle(separators),
|
||||
match.NewText("c"),
|
||||
},
|
||||
match.NewBTree(
|
||||
match.NewText("c"),
|
||||
match.NewBTree(
|
||||
match.NewSingle(separators),
|
||||
match.NewSuper(),
|
||||
nil,
|
||||
),
|
||||
nil,
|
||||
),
|
||||
},
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewAny(nil),
|
||||
match.NewText("c"),
|
||||
match.NewAny(nil),
|
||||
},
|
||||
match.NewBTree(
|
||||
match.NewText("c"),
|
||||
match.NewAny(nil),
|
||||
match.NewAny(nil),
|
||||
),
|
||||
},
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewRange('a', 'c', true),
|
||||
match.NewList([]rune{'z', 't', 'e'}, false),
|
||||
match.NewText("c"),
|
||||
match.NewSingle(nil),
|
||||
},
|
||||
match.NewRow(
|
||||
4,
|
||||
match.Matchers{
|
||||
match.NewRange('a', 'c', true),
|
||||
match.NewList([]rune{'z', 't', 'e'}, false),
|
||||
match.NewText("c"),
|
||||
match.NewSingle(nil),
|
||||
}...,
|
||||
),
|
||||
},
|
||||
} {
|
||||
act, err := compileMatchers(test.in)
|
||||
if err != nil {
|
||||
t.Errorf("#%d convert matchers error: %s", id, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(act, test.exp) {
|
||||
t.Errorf("#%d unexpected convert matchers result:\nact: %#v\nexp: %#v", id, act, test.exp)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertMatchers(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
in, exp []match.Matcher
|
||||
}{
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewRange('a', 'c', true),
|
||||
match.NewList([]rune{'z', 't', 'e'}, false),
|
||||
match.NewText("c"),
|
||||
match.NewSingle(nil),
|
||||
match.NewAny(nil),
|
||||
},
|
||||
[]match.Matcher{
|
||||
match.NewRow(
|
||||
4,
|
||||
[]match.Matcher{
|
||||
match.NewRange('a', 'c', true),
|
||||
match.NewList([]rune{'z', 't', 'e'}, false),
|
||||
match.NewText("c"),
|
||||
match.NewSingle(nil),
|
||||
}...,
|
||||
),
|
||||
match.NewAny(nil),
|
||||
},
|
||||
},
|
||||
{
|
||||
[]match.Matcher{
|
||||
match.NewRange('a', 'c', true),
|
||||
match.NewList([]rune{'z', 't', 'e'}, false),
|
||||
match.NewText("c"),
|
||||
match.NewSingle(nil),
|
||||
match.NewAny(nil),
|
||||
match.NewSingle(nil),
|
||||
match.NewSingle(nil),
|
||||
match.NewAny(nil),
|
||||
},
|
||||
[]match.Matcher{
|
||||
match.NewRow(
|
||||
3,
|
||||
match.Matchers{
|
||||
match.NewRange('a', 'c', true),
|
||||
match.NewList([]rune{'z', 't', 'e'}, false),
|
||||
match.NewText("c"),
|
||||
}...,
|
||||
),
|
||||
match.NewMin(3),
|
||||
},
|
||||
},
|
||||
} {
|
||||
act := minimizeMatchers(test.in)
|
||||
if !reflect.DeepEqual(act, test.exp) {
|
||||
t.Errorf("#%d unexpected convert matchers 2 result:\nact: %#v\nexp: %#v", id, act, test.exp)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func pattern(nodes ...node) *nodePattern {
|
||||
return &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: nodes,
|
||||
},
|
||||
}
|
||||
}
|
||||
func anyOf(nodes ...node) *nodeAnyOf {
|
||||
return &nodeAnyOf{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: nodes,
|
||||
},
|
||||
}
|
||||
}
|
||||
func TestCompiler(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
ast *nodePattern
|
||||
result Glob
|
||||
sep []rune
|
||||
}{
|
||||
{
|
||||
ast: pattern(&nodeText{text: "abc"}),
|
||||
result: match.NewText("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}),
|
||||
sep: separators,
|
||||
result: match.NewAny(separators),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}),
|
||||
result: match.NewSuper(),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSuper{}),
|
||||
result: match.NewSuper(),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.NewSingle(separators),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeRange{
|
||||
lo: 'a',
|
||||
hi: 'z',
|
||||
not: true,
|
||||
}),
|
||||
result: match.NewRange('a', 'z', true),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeList{
|
||||
chars: "abc",
|
||||
not: true,
|
||||
}),
|
||||
result: match.NewList([]rune{'a', 'b', 'c'}, true),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.EveryOf{Matchers: match.Matchers{
|
||||
match.NewMin(3),
|
||||
match.NewContains(string(separators), true),
|
||||
}},
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeSingle{}, &nodeSingle{}, &nodeSingle{}),
|
||||
result: match.NewMin(3),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeText{text: "abc"}, &nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.NewBTree(
|
||||
match.NewRow(
|
||||
4,
|
||||
match.Matchers{
|
||||
match.NewText("abc"),
|
||||
match.NewSingle(separators),
|
||||
}...,
|
||||
),
|
||||
match.NewAny(separators),
|
||||
nil,
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSingle{}),
|
||||
sep: separators,
|
||||
result: match.NewBTree(
|
||||
match.NewRow(
|
||||
5,
|
||||
match.Matchers{
|
||||
match.NewSingle(separators),
|
||||
match.NewText("abc"),
|
||||
match.NewSingle(separators),
|
||||
}...,
|
||||
),
|
||||
match.NewSuper(),
|
||||
nil,
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeText{text: "abc"}),
|
||||
result: match.NewSuffix("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeText{text: "abc"}, &nodeAny{}),
|
||||
result: match.NewPrefix("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeText{text: "abc"}, &nodeAny{}, &nodeText{text: "def"}),
|
||||
result: match.NewPrefixSuffix("abc", "def"),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
|
||||
result: match.NewContains("abc", false),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeAny{}, &nodeAny{}, &nodeAny{}, &nodeText{text: "abc"}, &nodeAny{}, &nodeAny{}),
|
||||
sep: separators,
|
||||
result: match.NewBTree(
|
||||
match.NewText("abc"),
|
||||
match.NewAny(separators),
|
||||
match.NewAny(separators),
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(&nodeSuper{}, &nodeSingle{}, &nodeText{text: "abc"}, &nodeSuper{}, &nodeSingle{}),
|
||||
result: match.NewBTree(
|
||||
match.NewText("abc"),
|
||||
match.NewMin(1),
|
||||
match.NewMin(1),
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(&nodeText{text: "abc"})),
|
||||
result: match.NewText("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(pattern(anyOf(pattern(&nodeText{text: "abc"}))))),
|
||||
result: match.NewText("abc"),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeSingle{},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeList{chars: "def"},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
),
|
||||
)),
|
||||
result: match.NewBTree(
|
||||
match.NewText("abc"),
|
||||
nil,
|
||||
match.AnyOf{Matchers: match.Matchers{
|
||||
match.NewSingle(nil),
|
||||
match.NewList([]rune{'d', 'e', 'f'}, false),
|
||||
match.NewNothing(),
|
||||
}},
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(
|
||||
&nodeRange{lo: 'a', hi: 'z'},
|
||||
&nodeRange{lo: 'a', hi: 'x', not: true},
|
||||
&nodeAny{},
|
||||
),
|
||||
result: match.NewBTree(
|
||||
match.NewRow(
|
||||
2,
|
||||
match.Matchers{
|
||||
match.NewRange('a', 'z', false),
|
||||
match.NewRange('a', 'x', true),
|
||||
}...,
|
||||
),
|
||||
nil,
|
||||
match.NewSuper(),
|
||||
),
|
||||
},
|
||||
{
|
||||
ast: pattern(anyOf(
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeList{chars: "abc"},
|
||||
&nodeText{text: "ghi"},
|
||||
),
|
||||
pattern(
|
||||
&nodeText{text: "abc"},
|
||||
&nodeList{chars: "def"},
|
||||
&nodeText{text: "ghi"},
|
||||
),
|
||||
)),
|
||||
result: match.NewRow(
|
||||
7,
|
||||
match.Matchers{
|
||||
match.NewText("abc"),
|
||||
match.AnyOf{Matchers: match.Matchers{
|
||||
match.NewList([]rune{'a', 'b', 'c'}, false),
|
||||
match.NewList([]rune{'d', 'e', 'f'}, false),
|
||||
}},
|
||||
match.NewText("ghi"),
|
||||
}...,
|
||||
),
|
||||
},
|
||||
// {
|
||||
// ast: pattern(
|
||||
// anyOf(&nodeText{text: "a"}, &nodeText{text: "b"}),
|
||||
// anyOf(&nodeText{text: "c"}, &nodeText{text: "d"}),
|
||||
// ),
|
||||
// result: match.AnyOf{Matchers: match.Matchers{
|
||||
// match.NewRow(Matchers: match.Matchers{match.Raw{"a"}, match.Raw{"c", 1}}),
|
||||
// match.NewRow(Matchers: match.Matchers{match.Raw{"a"}, match.Raw{"d"}}),
|
||||
// match.NewRow(Matchers: match.Matchers{match.Raw{"b"}, match.Raw{"c", 1}}),
|
||||
// match.NewRow(Matchers: match.Matchers{match.Raw{"b"}, match.Raw{"d"}}),
|
||||
// }},
|
||||
// },
|
||||
} {
|
||||
m, err := compile(test.ast, test.sep)
|
||||
if err != nil {
|
||||
t.Errorf("compilation error: %s", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(m, test.result) {
|
||||
t.Errorf("#%d results are not equal:\nexp: %#v\nact: %#v", id, test.result, m)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const complexityString = "abcd"
|
||||
|
||||
//func BenchmarkComplexityAny(b *testing.B) {
|
||||
// m := match.NewAny(nil)
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityContains(b *testing.B) {
|
||||
// m := match.NewContains()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityList(b *testing.B) {
|
||||
// m := match.NewList()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityMax(b *testing.B) {
|
||||
// m := match.NewMax()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityMin(b *testing.B) {
|
||||
// m := match.NewMin()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityNothing(b *testing.B) {
|
||||
// m := match.NewNothing()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityPrefix(b *testing.B) {
|
||||
// m := match.NewPrefix()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityPrefixSuffix(b *testing.B) {
|
||||
// m := match.NewPrefixSuffix()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityRange(b *testing.B) {
|
||||
// m := match.NewRange()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityRow(b *testing.B) {
|
||||
// m := match.NewRow()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexitySingle(b *testing.B) {
|
||||
// m := match.NewSingle(nil)
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexitySuffix(b *testing.B) {
|
||||
// m := match.NewSuffix()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexitySuper(b *testing.B) {
|
||||
// m := match.NewSuper()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityText(b *testing.B) {
|
||||
// m := match.NewText()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityAnyOf(b *testing.B) {
|
||||
// m := match.NewAnyOf()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityBTree(b *testing.B) {
|
||||
// m := match.NewBTree(match.NewText("abc"), match.NewText("d"), match.NewText("e"))
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
||||
//func BenchmarkComplexityEveryOf(b *testing.B) {
|
||||
// m := match.NewEveryOf()
|
||||
// for i := 0; i < b.N; i++ {
|
||||
// _ = m.Match(complexityString)
|
||||
// _, _ = m.Index(complexityString)
|
||||
// }
|
||||
//}
|
75
vendor/github.com/gobwas/glob/glob.go
generated
vendored
Normal file
75
vendor/github.com/gobwas/glob/glob.go
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
package glob
|
||||
|
||||
// Glob represents compiled glob pattern.
|
||||
type Glob interface {
|
||||
Match(string) bool
|
||||
}
|
||||
|
||||
// Compile creates Glob for given pattern and strings (if any present after pattern) as separators.
|
||||
// The pattern syntax is:
|
||||
//
|
||||
// pattern:
|
||||
// { term }
|
||||
//
|
||||
// term:
|
||||
// `*` matches any sequence of non-separator characters
|
||||
// `**` matches any sequence of characters
|
||||
// `?` matches any single non-separator character
|
||||
// `[` [ `!` ] { character-range } `]`
|
||||
// character class (must be non-empty)
|
||||
// `{` pattern-list `}`
|
||||
// pattern alternatives
|
||||
// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`)
|
||||
// `\` c matches character c
|
||||
//
|
||||
// character-range:
|
||||
// c matches character c (c != `\\`, `-`, `]`)
|
||||
// `\` c matches character c
|
||||
// lo `-` hi matches character c for lo <= c <= hi
|
||||
//
|
||||
// pattern-list:
|
||||
// pattern { `,` pattern }
|
||||
// comma-separated (without spaces) patterns
|
||||
//
|
||||
func Compile(pattern string, separators ...rune) (Glob, error) {
|
||||
ast, err := parse(newLexer(pattern))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
matcher, err := compile(ast, separators)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return matcher, nil
|
||||
}
|
||||
|
||||
// MustCompile is the same as Compile, except that if Compile returns error, this will panic
|
||||
func MustCompile(pattern string, separators ...rune) Glob {
|
||||
g, err := Compile(pattern, separators...)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return g
|
||||
}
|
||||
|
||||
// QuoteMeta returns a string that quotes all glob pattern meta characters
|
||||
// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`.
|
||||
func QuoteMeta(s string) string {
|
||||
b := make([]byte, 2*len(s))
|
||||
|
||||
// a byte loop is correct because all meta characters are ASCII
|
||||
j := 0
|
||||
for i := 0; i < len(s); i++ {
|
||||
if special(s[i]) {
|
||||
b[j] = '\\'
|
||||
j++
|
||||
}
|
||||
b[j] = s[i]
|
||||
j++
|
||||
}
|
||||
|
||||
return string(b[0:j])
|
||||
}
|
507
vendor/github.com/gobwas/glob/glob_test.go
generated
vendored
Normal file
507
vendor/github.com/gobwas/glob/glob_test.go
generated
vendored
Normal file
@ -0,0 +1,507 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const (
|
||||
pattern_all = "[a-z][!a-x]*cat*[h][!b]*eyes*"
|
||||
regexp_all = `^[a-z][^a-x].*cat.*[h][^b].*eyes.*$`
|
||||
fixture_all_match = "my cat has very bright eyes"
|
||||
fixture_all_mismatch = "my dog has very bright eyes"
|
||||
|
||||
pattern_plain = "google.com"
|
||||
regexp_plain = `^google\.com$`
|
||||
fixture_plain_match = "google.com"
|
||||
fixture_plain_mismatch = "gobwas.com"
|
||||
|
||||
pattern_multiple = "https://*.google.*"
|
||||
regexp_multiple = `^https:\/\/.*\.google\..*$`
|
||||
fixture_multiple_match = "https://account.google.com"
|
||||
fixture_multiple_mismatch = "https://google.com"
|
||||
|
||||
pattern_alternatives = "{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}"
|
||||
regexp_alternatives = `^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$`
|
||||
fixture_alternatives_match = "http://yahoo.com"
|
||||
fixture_alternatives_mismatch = "http://google.com"
|
||||
|
||||
pattern_alternatives_suffix = "{https://*gobwas.com,http://exclude.gobwas.com}"
|
||||
regexp_alternatives_suffix = `^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$`
|
||||
fixture_alternatives_suffix_first_match = "https://safe.gobwas.com"
|
||||
fixture_alternatives_suffix_first_mismatch = "http://safe.gobwas.com"
|
||||
fixture_alternatives_suffix_second = "http://exclude.gobwas.com"
|
||||
|
||||
pattern_prefix = "abc*"
|
||||
regexp_prefix = `^abc.*$`
|
||||
pattern_suffix = "*def"
|
||||
regexp_suffix = `^.*def$`
|
||||
pattern_prefix_suffix = "ab*ef"
|
||||
regexp_prefix_suffix = `^ab.*ef$`
|
||||
fixture_prefix_suffix_match = "abcdef"
|
||||
fixture_prefix_suffix_mismatch = "af"
|
||||
|
||||
pattern_alternatives_combine_lite = "{abc*def,abc?def,abc[zte]def}"
|
||||
regexp_alternatives_combine_lite = `^(abc.*def|abc.def|abc[zte]def)$`
|
||||
fixture_alternatives_combine_lite = "abczdef"
|
||||
|
||||
pattern_alternatives_combine_hard = "{abc*[a-c]def,abc?[d-g]def,abc[zte]?def}"
|
||||
regexp_alternatives_combine_hard = `^(abc.*[a-c]def|abc.[d-g]def|abc[zte].def)$`
|
||||
fixture_alternatives_combine_hard = "abczqdef"
|
||||
)
|
||||
|
||||
type test struct {
|
||||
pattern, match string
|
||||
should bool
|
||||
delimiters []rune
|
||||
}
|
||||
|
||||
func glob(s bool, p, m string, d ...rune) test {
|
||||
return test{p, m, s, d}
|
||||
}
|
||||
|
||||
func TestGlob(t *testing.T) {
|
||||
for _, test := range []test{
|
||||
glob(true, "* ?at * eyes", "my cat has very bright eyes"),
|
||||
|
||||
glob(true, "abc", "abc"),
|
||||
glob(true, "a*c", "abc"),
|
||||
glob(true, "a*c", "a12345c"),
|
||||
glob(true, "a?c", "a1c"),
|
||||
glob(true, "a.b", "a.b", '.'),
|
||||
glob(true, "a.*", "a.b", '.'),
|
||||
glob(true, "a.**", "a.b.c", '.'),
|
||||
glob(true, "a.?.c", "a.b.c", '.'),
|
||||
glob(true, "a.?.?", "a.b.c", '.'),
|
||||
glob(true, "?at", "cat"),
|
||||
glob(true, "?at", "fat"),
|
||||
glob(true, "*", "abc"),
|
||||
glob(true, `\*`, "*"),
|
||||
glob(true, "**", "a.b.c", '.'),
|
||||
|
||||
glob(false, "?at", "at"),
|
||||
glob(false, "?at", "fat", 'f'),
|
||||
glob(false, "a.*", "a.b.c", '.'),
|
||||
glob(false, "a.?.c", "a.bb.c", '.'),
|
||||
glob(false, "*", "a.b.c", '.'),
|
||||
|
||||
glob(true, "*test", "this is a test"),
|
||||
glob(true, "this*", "this is a test"),
|
||||
glob(true, "*is *", "this is a test"),
|
||||
glob(true, "*is*a*", "this is a test"),
|
||||
glob(true, "**test**", "this is a test"),
|
||||
glob(true, "**is**a***test*", "this is a test"),
|
||||
|
||||
glob(false, "*is", "this is a test"),
|
||||
glob(false, "*no*", "this is a test"),
|
||||
glob(true, "[!a]*", "this is a test3"),
|
||||
|
||||
glob(true, "*abc", "abcabc"),
|
||||
glob(true, "**abc", "abcabc"),
|
||||
glob(true, "???", "abc"),
|
||||
glob(true, "?*?", "abc"),
|
||||
glob(true, "?*?", "ac"),
|
||||
|
||||
glob(true, "{abc,def}ghi", "defghi"),
|
||||
glob(true, "{abc,abcd}a", "abcda"),
|
||||
glob(true, "{a,ab}{bc,f}", "abc"),
|
||||
glob(true, "{*,**}{a,b}", "ab"),
|
||||
glob(false, "{*,**}{a,b}", "ac"),
|
||||
|
||||
glob(true, pattern_all, fixture_all_match),
|
||||
glob(false, pattern_all, fixture_all_mismatch),
|
||||
|
||||
glob(true, pattern_plain, fixture_plain_match),
|
||||
glob(false, pattern_plain, fixture_plain_mismatch),
|
||||
|
||||
glob(true, pattern_multiple, fixture_multiple_match),
|
||||
glob(false, pattern_multiple, fixture_multiple_mismatch),
|
||||
|
||||
glob(true, pattern_alternatives, fixture_alternatives_match),
|
||||
glob(false, pattern_alternatives, fixture_alternatives_mismatch),
|
||||
|
||||
glob(true, pattern_alternatives_suffix, fixture_alternatives_suffix_first_match),
|
||||
glob(false, pattern_alternatives_suffix, fixture_alternatives_suffix_first_mismatch),
|
||||
glob(true, pattern_alternatives_suffix, fixture_alternatives_suffix_second),
|
||||
|
||||
glob(true, pattern_alternatives_combine_hard, fixture_alternatives_combine_hard),
|
||||
|
||||
glob(true, pattern_alternatives_combine_lite, fixture_alternatives_combine_lite),
|
||||
|
||||
glob(true, pattern_prefix, fixture_prefix_suffix_match),
|
||||
glob(false, pattern_prefix, fixture_prefix_suffix_mismatch),
|
||||
|
||||
glob(true, pattern_suffix, fixture_prefix_suffix_match),
|
||||
glob(false, pattern_suffix, fixture_prefix_suffix_mismatch),
|
||||
|
||||
glob(true, pattern_prefix_suffix, fixture_prefix_suffix_match),
|
||||
glob(false, pattern_prefix_suffix, fixture_prefix_suffix_mismatch),
|
||||
} {
|
||||
g, err := Compile(test.pattern, test.delimiters...)
|
||||
if err != nil {
|
||||
t.Errorf("parsing pattern %q error: %s", test.pattern, err)
|
||||
continue
|
||||
}
|
||||
|
||||
result := g.Match(test.match)
|
||||
if result != test.should {
|
||||
t.Errorf("pattern %q matching %q should be %v but got %v\n%s", test.pattern, test.match, test.should, result, g)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestQuoteMeta(t *testing.T) {
|
||||
specialsQuoted := make([]byte, len(specials)*2)
|
||||
for i, j := 0, 0; i < len(specials); i, j = i+1, j+2 {
|
||||
specialsQuoted[j] = '\\'
|
||||
specialsQuoted[j+1] = specials[i]
|
||||
}
|
||||
|
||||
for id, test := range []struct {
|
||||
in, out string
|
||||
}{
|
||||
{
|
||||
in: `[foo*]`,
|
||||
out: `\[foo\*\]`,
|
||||
},
|
||||
{
|
||||
in: string(specials),
|
||||
out: string(specialsQuoted),
|
||||
},
|
||||
{
|
||||
in: string(append([]byte("some text and"), specials...)),
|
||||
out: string(append([]byte("some text and"), specialsQuoted...)),
|
||||
},
|
||||
} {
|
||||
act := QuoteMeta(test.in)
|
||||
if act != test.out {
|
||||
t.Errorf("#%d QuoteMeta(%q) = %q; want %q", id, test.in, act, test.out)
|
||||
}
|
||||
if _, err := Compile(act); err != nil {
|
||||
t.Errorf("#%d _, err := Compile(QuoteMeta(%q) = %q); err = %q", id, test.in, act, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParseGlob(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
Compile(pattern_all)
|
||||
}
|
||||
}
|
||||
func BenchmarkParseRegexp(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
regexp.MustCompile(regexp_all)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAllGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_all)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_all_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkAllGlobMatchParallel(b *testing.B) {
|
||||
m, _ := Compile(pattern_all)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_ = m.Match(fixture_all_match)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkAllRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_all)
|
||||
f := []byte(fixture_all_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkAllGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_all)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_all_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkAllGlobMismatchParallel(b *testing.B) {
|
||||
m, _ := Compile(pattern_all)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_ = m.Match(fixture_all_mismatch)
|
||||
}
|
||||
})
|
||||
}
|
||||
func BenchmarkAllRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_all)
|
||||
f := []byte(fixture_all_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkMultipleGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_multiple)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_multiple_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkMultipleRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_multiple)
|
||||
f := []byte(fixture_multiple_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkMultipleGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_multiple)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_multiple_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkMultipleRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_multiple)
|
||||
f := []byte(fixture_multiple_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAlternativesGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives)
|
||||
f := []byte(fixture_alternatives_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives)
|
||||
f := []byte(fixture_alternatives_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAlternativesSuffixFirstGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_suffix_first_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesSuffixFirstGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_suffix_first_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesSuffixSecondGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_suffix_second)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesCombineLiteGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives_combine_lite)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_combine_lite)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesCombineHardGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_alternatives_combine_hard)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_alternatives_combine_hard)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesSuffixFirstRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives_suffix)
|
||||
f := []byte(fixture_alternatives_suffix_first_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesSuffixFirstRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives_suffix)
|
||||
f := []byte(fixture_alternatives_suffix_first_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesSuffixSecondRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives_suffix)
|
||||
f := []byte(fixture_alternatives_suffix_second)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesCombineLiteRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives_combine_lite)
|
||||
f := []byte(fixture_alternatives_combine_lite)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkAlternativesCombineHardRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_alternatives_combine_hard)
|
||||
f := []byte(fixture_alternatives_combine_hard)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPlainGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_plain)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_plain_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkPlainRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_plain)
|
||||
f := []byte(fixture_plain_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkPlainGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_plain)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_plain_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkPlainRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_plain)
|
||||
f := []byte(fixture_plain_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPrefixGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_prefix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_prefix_suffix_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkPrefixRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_prefix)
|
||||
f := []byte(fixture_prefix_suffix_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkPrefixGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_prefix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_prefix_suffix_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkPrefixRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_prefix)
|
||||
f := []byte(fixture_prefix_suffix_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSuffixGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_prefix_suffix_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkSuffixRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_suffix)
|
||||
f := []byte(fixture_prefix_suffix_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkSuffixGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_prefix_suffix_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkSuffixRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_suffix)
|
||||
f := []byte(fixture_prefix_suffix_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPrefixSuffixGlobMatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_prefix_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_prefix_suffix_match)
|
||||
}
|
||||
}
|
||||
func BenchmarkPrefixSuffixRegexpMatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_prefix_suffix)
|
||||
f := []byte(fixture_prefix_suffix_match)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
||||
func BenchmarkPrefixSuffixGlobMismatch(b *testing.B) {
|
||||
m, _ := Compile(pattern_prefix_suffix)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(fixture_prefix_suffix_mismatch)
|
||||
}
|
||||
}
|
||||
func BenchmarkPrefixSuffixRegexpMismatch(b *testing.B) {
|
||||
m := regexp.MustCompile(regexp_prefix_suffix)
|
||||
f := []byte(fixture_prefix_suffix_mismatch)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = m.Match(f)
|
||||
}
|
||||
}
|
478
vendor/github.com/gobwas/glob/lexer.go
generated
vendored
Normal file
478
vendor/github.com/gobwas/glob/lexer.go
generated
vendored
Normal file
@ -0,0 +1,478 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
char_any = '*'
|
||||
char_separator = ','
|
||||
char_single = '?'
|
||||
char_escape = '\\'
|
||||
char_range_open = '['
|
||||
char_range_close = ']'
|
||||
char_terms_open = '{'
|
||||
char_terms_close = '}'
|
||||
char_range_not = '!'
|
||||
char_range_between = '-'
|
||||
)
|
||||
|
||||
var specials = []byte{
|
||||
char_any,
|
||||
char_single,
|
||||
char_escape,
|
||||
char_range_open,
|
||||
char_range_close,
|
||||
char_terms_open,
|
||||
char_terms_close,
|
||||
}
|
||||
|
||||
func special(c byte) bool {
|
||||
return bytes.IndexByte(specials, c) != -1
|
||||
}
|
||||
|
||||
var eof rune = 0
|
||||
|
||||
type stateFn func(*lexer) stateFn
|
||||
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
item_eof itemType = iota
|
||||
item_error
|
||||
item_text
|
||||
item_char
|
||||
item_any
|
||||
item_super
|
||||
item_single
|
||||
item_not
|
||||
item_separator
|
||||
item_range_open
|
||||
item_range_close
|
||||
item_range_lo
|
||||
item_range_hi
|
||||
item_range_between
|
||||
item_terms_open
|
||||
item_terms_close
|
||||
)
|
||||
|
||||
func (i itemType) String() string {
|
||||
switch i {
|
||||
case item_eof:
|
||||
return "eof"
|
||||
|
||||
case item_error:
|
||||
return "error"
|
||||
|
||||
case item_text:
|
||||
return "text"
|
||||
|
||||
case item_char:
|
||||
return "char"
|
||||
|
||||
case item_any:
|
||||
return "any"
|
||||
|
||||
case item_super:
|
||||
return "super"
|
||||
|
||||
case item_single:
|
||||
return "single"
|
||||
|
||||
case item_not:
|
||||
return "not"
|
||||
|
||||
case item_separator:
|
||||
return "separator"
|
||||
|
||||
case item_range_open:
|
||||
return "range_open"
|
||||
|
||||
case item_range_close:
|
||||
return "range_close"
|
||||
|
||||
case item_range_lo:
|
||||
return "range_lo"
|
||||
|
||||
case item_range_hi:
|
||||
return "range_hi"
|
||||
|
||||
case item_range_between:
|
||||
return "range_between"
|
||||
|
||||
case item_terms_open:
|
||||
return "terms_open"
|
||||
|
||||
case item_terms_close:
|
||||
return "terms_close"
|
||||
|
||||
default:
|
||||
return "undef"
|
||||
}
|
||||
}
|
||||
|
||||
type item struct {
|
||||
t itemType
|
||||
s string
|
||||
}
|
||||
|
||||
func (i item) String() string {
|
||||
return fmt.Sprintf("%v<%s>", i.t, i.s)
|
||||
}
|
||||
|
||||
type lexer struct {
|
||||
input string
|
||||
start int
|
||||
pos int
|
||||
width int
|
||||
runes int
|
||||
termScopes []int
|
||||
termPhrases map[int]int
|
||||
state stateFn
|
||||
items chan item
|
||||
}
|
||||
|
||||
func newLexer(source string) *lexer {
|
||||
l := &lexer{
|
||||
input: source,
|
||||
state: lexText,
|
||||
items: make(chan item, len(source)),
|
||||
termPhrases: make(map[int]int),
|
||||
}
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *lexer) run() {
|
||||
for state := lexText; state != nil; {
|
||||
state = state(l)
|
||||
}
|
||||
close(l.items)
|
||||
}
|
||||
|
||||
func (l *lexer) nextItem() item {
|
||||
for {
|
||||
select {
|
||||
case item := <-l.items:
|
||||
return item
|
||||
default:
|
||||
if l.state == nil {
|
||||
return item{t: item_eof}
|
||||
}
|
||||
|
||||
l.state = l.state(l)
|
||||
}
|
||||
}
|
||||
|
||||
panic("something went wrong")
|
||||
}
|
||||
|
||||
func (l *lexer) read() (r rune) {
|
||||
if l.pos >= len(l.input) {
|
||||
return eof
|
||||
}
|
||||
|
||||
r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
|
||||
l.pos += l.width
|
||||
l.runes++
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (l *lexer) unread() {
|
||||
l.pos -= l.width
|
||||
l.runes--
|
||||
}
|
||||
|
||||
func (l *lexer) reset() {
|
||||
l.pos = l.start
|
||||
l.runes = 0
|
||||
}
|
||||
|
||||
func (l *lexer) ignore() {
|
||||
l.start = l.pos
|
||||
l.runes = 0
|
||||
}
|
||||
|
||||
func (l *lexer) lookahead() rune {
|
||||
r := l.read()
|
||||
if r != eof {
|
||||
l.unread()
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func (l *lexer) accept(valid string) bool {
|
||||
if strings.IndexRune(valid, l.read()) != -1 {
|
||||
return true
|
||||
}
|
||||
l.unread()
|
||||
return false
|
||||
}
|
||||
|
||||
func (l *lexer) acceptAll(valid string) {
|
||||
for strings.IndexRune(valid, l.read()) != -1 {
|
||||
}
|
||||
l.unread()
|
||||
}
|
||||
|
||||
func (l *lexer) emit(t itemType) {
|
||||
if l.pos == len(l.input) {
|
||||
l.items <- item{t, l.input[l.start:]}
|
||||
} else {
|
||||
l.items <- item{t, l.input[l.start:l.pos]}
|
||||
}
|
||||
|
||||
l.start = l.pos
|
||||
l.runes = 0
|
||||
l.width = 0
|
||||
}
|
||||
|
||||
func (l *lexer) emitMaybe(t itemType) {
|
||||
if l.pos > l.start {
|
||||
l.emit(t)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *lexer) errorf(format string, args ...interface{}) {
|
||||
l.items <- item{item_error, fmt.Sprintf(format, args...)}
|
||||
}
|
||||
|
||||
func lexText(l *lexer) stateFn {
|
||||
for {
|
||||
c := l.read()
|
||||
if c == eof {
|
||||
break
|
||||
}
|
||||
|
||||
switch c {
|
||||
case char_escape:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
|
||||
l.read()
|
||||
l.ignore()
|
||||
|
||||
if l.read() == eof {
|
||||
l.errorf("unclosed '%s' character", string(char_escape))
|
||||
return nil
|
||||
}
|
||||
|
||||
case char_single:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return lexSingle
|
||||
|
||||
case char_any:
|
||||
var n stateFn
|
||||
if l.lookahead() == char_any {
|
||||
n = lexSuper
|
||||
} else {
|
||||
n = lexAny
|
||||
}
|
||||
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return n
|
||||
|
||||
case char_range_open:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return lexRangeOpen
|
||||
|
||||
case char_terms_open:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return lexTermsOpen
|
||||
|
||||
case char_terms_close:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return lexTermsClose
|
||||
|
||||
case char_separator:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return lexSeparator
|
||||
}
|
||||
}
|
||||
|
||||
if l.pos > l.start {
|
||||
l.emit(item_text)
|
||||
}
|
||||
|
||||
if len(l.termScopes) != 0 {
|
||||
l.errorf("invalid pattern syntax: unclosed terms")
|
||||
return nil
|
||||
}
|
||||
|
||||
l.emit(item_eof)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func lexInsideRange(l *lexer) stateFn {
|
||||
for {
|
||||
c := l.read()
|
||||
if c == eof {
|
||||
l.errorf("unclosed range construction")
|
||||
return nil
|
||||
}
|
||||
|
||||
switch c {
|
||||
case char_range_not:
|
||||
// only first char makes sense
|
||||
if l.pos-l.width == l.start {
|
||||
l.emit(item_not)
|
||||
}
|
||||
|
||||
case char_range_between:
|
||||
if l.runes != 2 {
|
||||
l.errorf("unexpected length of lo char inside range")
|
||||
return nil
|
||||
}
|
||||
|
||||
l.reset()
|
||||
return lexRangeHiLo
|
||||
|
||||
case char_range_close:
|
||||
l.unread()
|
||||
l.emitMaybe(item_text)
|
||||
return lexRangeClose
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lexRangeHiLo(l *lexer) stateFn {
|
||||
start := l.start
|
||||
|
||||
for {
|
||||
c := l.read()
|
||||
if c == eof {
|
||||
l.errorf("unexpected end of input")
|
||||
return nil
|
||||
}
|
||||
|
||||
switch c {
|
||||
case char_range_between:
|
||||
if l.runes != 1 {
|
||||
l.errorf("unexpected length of range: single character expected before minus")
|
||||
return nil
|
||||
}
|
||||
|
||||
l.emit(item_range_between)
|
||||
|
||||
case char_range_close:
|
||||
l.unread()
|
||||
|
||||
if l.runes != 1 {
|
||||
l.errorf("unexpected length of range: single character expected before close")
|
||||
return nil
|
||||
}
|
||||
|
||||
l.emit(item_range_hi)
|
||||
return lexRangeClose
|
||||
|
||||
default:
|
||||
if start != l.start {
|
||||
continue
|
||||
}
|
||||
|
||||
if l.runes != 1 {
|
||||
l.errorf("unexpected length of range: single character expected at the begining")
|
||||
return nil
|
||||
}
|
||||
|
||||
l.emit(item_range_lo)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lexAny(l *lexer) stateFn {
|
||||
l.pos += 1
|
||||
l.emit(item_any)
|
||||
return lexText
|
||||
}
|
||||
|
||||
func lexSuper(l *lexer) stateFn {
|
||||
l.pos += 2
|
||||
l.emit(item_super)
|
||||
return lexText
|
||||
}
|
||||
|
||||
func lexSingle(l *lexer) stateFn {
|
||||
l.pos += 1
|
||||
l.emit(item_single)
|
||||
return lexText
|
||||
}
|
||||
|
||||
func lexSeparator(l *lexer) stateFn {
|
||||
if len(l.termScopes) == 0 {
|
||||
l.errorf("syntax error: separator not inside terms list")
|
||||
return nil
|
||||
}
|
||||
|
||||
posOpen := l.termScopes[len(l.termScopes)-1]
|
||||
|
||||
if l.pos-posOpen == 1 {
|
||||
l.errorf("syntax error: empty term before separator")
|
||||
return nil
|
||||
}
|
||||
|
||||
l.termPhrases[posOpen] += 1
|
||||
l.pos += 1
|
||||
l.emit(item_separator)
|
||||
return lexText
|
||||
}
|
||||
|
||||
func lexTermsOpen(l *lexer) stateFn {
|
||||
l.termScopes = append(l.termScopes, l.pos)
|
||||
l.pos += 1
|
||||
l.emit(item_terms_open)
|
||||
|
||||
return lexText
|
||||
}
|
||||
|
||||
func lexTermsClose(l *lexer) stateFn {
|
||||
if len(l.termScopes) == 0 {
|
||||
l.errorf("unexpected closing of terms: there is no opened terms")
|
||||
return nil
|
||||
}
|
||||
|
||||
lastOpen := len(l.termScopes) - 1
|
||||
posOpen := l.termScopes[lastOpen]
|
||||
|
||||
// if it is empty term
|
||||
if posOpen == l.pos-1 {
|
||||
l.errorf("term could not be empty")
|
||||
return nil
|
||||
}
|
||||
|
||||
if l.termPhrases[posOpen] == 0 {
|
||||
l.errorf("term must contain >1 phrases")
|
||||
return nil
|
||||
}
|
||||
|
||||
// cleanup
|
||||
l.termScopes = l.termScopes[:lastOpen]
|
||||
delete(l.termPhrases, posOpen)
|
||||
|
||||
l.pos += 1
|
||||
l.emit(item_terms_close)
|
||||
|
||||
return lexText
|
||||
}
|
||||
|
||||
func lexRangeOpen(l *lexer) stateFn {
|
||||
l.pos += 1
|
||||
l.emit(item_range_open)
|
||||
return lexInsideRange
|
||||
}
|
||||
|
||||
func lexRangeClose(l *lexer) stateFn {
|
||||
l.pos += 1
|
||||
l.emit(item_range_close)
|
||||
return lexText
|
||||
}
|
136
vendor/github.com/gobwas/glob/lexer_test.go
generated
vendored
Normal file
136
vendor/github.com/gobwas/glob/lexer_test.go
generated
vendored
Normal file
@ -0,0 +1,136 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestLexGood(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
pattern string
|
||||
items []item
|
||||
}{
|
||||
{
|
||||
pattern: "hello",
|
||||
items: []item{
|
||||
item{item_text, "hello"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "hello?",
|
||||
items: []item{
|
||||
item{item_text, "hello"},
|
||||
item{item_single, "?"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "hellof*",
|
||||
items: []item{
|
||||
item{item_text, "hellof"},
|
||||
item{item_any, "*"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "hello**",
|
||||
items: []item{
|
||||
item{item_text, "hello"},
|
||||
item{item_super, "**"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "[日-語]",
|
||||
items: []item{
|
||||
item{item_range_open, "["},
|
||||
item{item_range_lo, "日"},
|
||||
item{item_range_between, "-"},
|
||||
item{item_range_hi, "語"},
|
||||
item{item_range_close, "]"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "[!日-語]",
|
||||
items: []item{
|
||||
item{item_range_open, "["},
|
||||
item{item_not, "!"},
|
||||
item{item_range_lo, "日"},
|
||||
item{item_range_between, "-"},
|
||||
item{item_range_hi, "語"},
|
||||
item{item_range_close, "]"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "[日本語]",
|
||||
items: []item{
|
||||
item{item_range_open, "["},
|
||||
item{item_text, "日本語"},
|
||||
item{item_range_close, "]"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "[!日本語]",
|
||||
items: []item{
|
||||
item{item_range_open, "["},
|
||||
item{item_not, "!"},
|
||||
item{item_text, "日本語"},
|
||||
item{item_range_close, "]"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "{a,b}",
|
||||
items: []item{
|
||||
item{item_terms_open, "{"},
|
||||
item{item_text, "a"},
|
||||
item{item_separator, ","},
|
||||
item{item_text, "b"},
|
||||
item{item_terms_close, "}"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "{[!日-語],*,?,{a,b,\\c}}",
|
||||
items: []item{
|
||||
item{item_terms_open, "{"},
|
||||
item{item_range_open, "["},
|
||||
item{item_not, "!"},
|
||||
item{item_range_lo, "日"},
|
||||
item{item_range_between, "-"},
|
||||
item{item_range_hi, "語"},
|
||||
item{item_range_close, "]"},
|
||||
item{item_separator, ","},
|
||||
item{item_any, "*"},
|
||||
item{item_separator, ","},
|
||||
item{item_single, "?"},
|
||||
item{item_separator, ","},
|
||||
item{item_terms_open, "{"},
|
||||
item{item_text, "a"},
|
||||
item{item_separator, ","},
|
||||
item{item_text, "b"},
|
||||
item{item_separator, ","},
|
||||
item{item_text, "c"},
|
||||
item{item_terms_close, "}"},
|
||||
item{item_terms_close, "}"},
|
||||
item{item_eof, ""},
|
||||
},
|
||||
},
|
||||
} {
|
||||
lexer := newLexer(test.pattern)
|
||||
for i, exp := range test.items {
|
||||
act := lexer.nextItem()
|
||||
if act.t != exp.t {
|
||||
t.Errorf("#%d wrong %d-th item type: exp: %v; act: %v (%s vs %s)", id, i, exp.t, act.t, exp, act)
|
||||
break
|
||||
}
|
||||
if act.s != exp.s {
|
||||
t.Errorf("#%d wrong %d-th item contents: exp: %q; act: %q (%s vs %s)", id, i, exp.s, act.s, exp, act)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
45
vendor/github.com/gobwas/glob/match/any.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/any.go
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/strings"
|
||||
)
|
||||
|
||||
type Any struct {
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewAny(s []rune) Any {
|
||||
return Any{s}
|
||||
}
|
||||
|
||||
func (self Any) Match(s string) bool {
|
||||
return strings.IndexAnyRunes(s, self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self Any) Index(s string) (int, []int) {
|
||||
found := strings.IndexAnyRunes(s, self.Separators)
|
||||
switch found {
|
||||
case -1:
|
||||
case 0:
|
||||
return 0, segments0
|
||||
default:
|
||||
s = s[:found]
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s))
|
||||
for i := range s {
|
||||
segments = append(segments, i)
|
||||
}
|
||||
segments = append(segments, len(s))
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Any) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Any) String() string {
|
||||
return fmt.Sprintf("<any:![%s]>", string(self.Separators))
|
||||
}
|
85
vendor/github.com/gobwas/glob/match/any_of.go
generated
vendored
Normal file
85
vendor/github.com/gobwas/glob/match/any_of.go
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type AnyOf struct {
|
||||
Matchers Matchers
|
||||
}
|
||||
|
||||
func NewAnyOf(m ...Matcher) AnyOf {
|
||||
return AnyOf{Matchers(m)}
|
||||
}
|
||||
|
||||
func (self *AnyOf) Add(m Matcher) error {
|
||||
self.Matchers = append(self.Matchers, m)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self AnyOf) Match(s string) bool {
|
||||
for _, m := range self.Matchers {
|
||||
if m.Match(s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self AnyOf) Index(s string) (int, []int) {
|
||||
index := -1
|
||||
|
||||
segments := acquireSegments(len(s))
|
||||
for _, m := range self.Matchers {
|
||||
idx, seg := m.Index(s)
|
||||
if idx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
if index == -1 || idx < index {
|
||||
index = idx
|
||||
segments = append(segments[:0], seg...)
|
||||
continue
|
||||
}
|
||||
|
||||
if idx > index {
|
||||
continue
|
||||
}
|
||||
|
||||
// here idx == index
|
||||
segments = appendMerge(segments, seg)
|
||||
}
|
||||
|
||||
if index == -1 {
|
||||
releaseSegments(segments)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return index, segments
|
||||
}
|
||||
|
||||
func (self AnyOf) Len() (l int) {
|
||||
l = -1
|
||||
for _, m := range self.Matchers {
|
||||
ml := m.Len()
|
||||
if ml == -1 {
|
||||
return -1
|
||||
}
|
||||
|
||||
if l == -1 {
|
||||
l = ml
|
||||
continue
|
||||
}
|
||||
|
||||
if l != ml {
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self AnyOf) String() string {
|
||||
return fmt.Sprintf("<any_of:[%s]>", self.Matchers)
|
||||
}
|
53
vendor/github.com/gobwas/glob/match/any_of_test.go
generated
vendored
Normal file
53
vendor/github.com/gobwas/glob/match/any_of_test.go
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAnyOfIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
matchers Matchers
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
Matchers{
|
||||
NewAny(nil),
|
||||
NewText("b"),
|
||||
NewText("c"),
|
||||
},
|
||||
"abc",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
{
|
||||
Matchers{
|
||||
NewPrefix("b"),
|
||||
NewSuffix("c"),
|
||||
},
|
||||
"abc",
|
||||
0,
|
||||
[]int{3},
|
||||
},
|
||||
{
|
||||
Matchers{
|
||||
NewList([]rune("[def]"), false),
|
||||
NewList([]rune("[abc]"), false),
|
||||
},
|
||||
"abcdef",
|
||||
0,
|
||||
[]int{1},
|
||||
},
|
||||
} {
|
||||
everyOf := NewAnyOf(test.matchers...)
|
||||
index, segments := everyOf.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/any_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/any_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAnyIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
sep []rune
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
[]rune{'.'},
|
||||
"abc",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
{
|
||||
[]rune{'.'},
|
||||
"abc.def",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
} {
|
||||
p := NewAny(test.sep)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexAny(b *testing.B) {
|
||||
m := NewAny(bench_separators)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexAnyParallel(b *testing.B) {
|
||||
m := NewAny(bench_separators)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
146
vendor/github.com/gobwas/glob/match/btree.go
generated
vendored
Normal file
146
vendor/github.com/gobwas/glob/match/btree.go
generated
vendored
Normal file
@ -0,0 +1,146 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type BTree struct {
|
||||
Value Matcher
|
||||
Left Matcher
|
||||
Right Matcher
|
||||
ValueLengthRunes int
|
||||
LeftLengthRunes int
|
||||
RightLengthRunes int
|
||||
LengthRunes int
|
||||
}
|
||||
|
||||
func NewBTree(Value, Left, Right Matcher) (tree BTree) {
|
||||
tree.Value = Value
|
||||
tree.Left = Left
|
||||
tree.Right = Right
|
||||
|
||||
lenOk := true
|
||||
if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
|
||||
if Left != nil {
|
||||
if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
}
|
||||
|
||||
if Right != nil {
|
||||
if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
}
|
||||
|
||||
if lenOk {
|
||||
tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes
|
||||
} else {
|
||||
tree.LengthRunes = -1
|
||||
}
|
||||
|
||||
return tree
|
||||
}
|
||||
|
||||
func (self BTree) Len() int {
|
||||
return self.LengthRunes
|
||||
}
|
||||
|
||||
// todo?
|
||||
func (self BTree) Index(s string) (int, []int) {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self BTree) Match(s string) bool {
|
||||
inputLen := len(s)
|
||||
|
||||
// self.Length, self.RLen and self.LLen are values meaning the length of runes for each part
|
||||
// here we manipulating byte length for better optimizations
|
||||
// but these checks still works, cause minLen of 1-rune string is 1 byte.
|
||||
if self.LengthRunes != -1 && self.LengthRunes > inputLen {
|
||||
return false
|
||||
}
|
||||
|
||||
// try to cut unnecessary parts
|
||||
// by knowledge of length of right and left part
|
||||
var offset, limit int
|
||||
if self.LeftLengthRunes >= 0 {
|
||||
offset = self.LeftLengthRunes
|
||||
}
|
||||
if self.RightLengthRunes >= 0 {
|
||||
limit = inputLen - self.RightLengthRunes
|
||||
} else {
|
||||
limit = inputLen
|
||||
}
|
||||
|
||||
for offset < limit {
|
||||
// search for matching part in substring
|
||||
index, segments := self.Value.Index(s[offset:limit])
|
||||
if index == -1 {
|
||||
releaseSegments(segments)
|
||||
return false
|
||||
}
|
||||
|
||||
l := s[:offset+index]
|
||||
var left bool
|
||||
if self.Left != nil {
|
||||
left = self.Left.Match(l)
|
||||
} else {
|
||||
left = l == ""
|
||||
}
|
||||
|
||||
if left {
|
||||
for i := len(segments) - 1; i >= 0; i-- {
|
||||
length := segments[i]
|
||||
|
||||
var right bool
|
||||
var r string
|
||||
// if there is no string for the right branch
|
||||
if inputLen <= offset+index+length {
|
||||
r = ""
|
||||
} else {
|
||||
r = s[offset+index+length:]
|
||||
}
|
||||
|
||||
if self.Right != nil {
|
||||
right = self.Right.Match(r)
|
||||
} else {
|
||||
right = r == ""
|
||||
}
|
||||
|
||||
if right {
|
||||
releaseSegments(segments)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, step := utf8.DecodeRuneInString(s[offset+index:])
|
||||
offset += index + step
|
||||
|
||||
releaseSegments(segments)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self BTree) String() string {
|
||||
const n string = "<nil>"
|
||||
var l, r string
|
||||
if self.Left == nil {
|
||||
l = n
|
||||
} else {
|
||||
l = self.Left.String()
|
||||
}
|
||||
if self.Right == nil {
|
||||
r = n
|
||||
} else {
|
||||
r = self.Right.String()
|
||||
}
|
||||
|
||||
return fmt.Sprintf("<btree:[%s<-%s->%s]>", l, self.Value, r)
|
||||
}
|
90
vendor/github.com/gobwas/glob/match/btree_test.go
generated
vendored
Normal file
90
vendor/github.com/gobwas/glob/match/btree_test.go
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBTree(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
tree BTree
|
||||
str string
|
||||
exp bool
|
||||
}{
|
||||
{
|
||||
NewBTree(NewText("abc"), NewSuper(), NewSuper()),
|
||||
"abc",
|
||||
true,
|
||||
},
|
||||
{
|
||||
NewBTree(NewText("a"), NewSingle(nil), NewSingle(nil)),
|
||||
"aaa",
|
||||
true,
|
||||
},
|
||||
{
|
||||
NewBTree(NewText("b"), NewSingle(nil), nil),
|
||||
"bbb",
|
||||
false,
|
||||
},
|
||||
{
|
||||
NewBTree(
|
||||
NewText("c"),
|
||||
NewBTree(
|
||||
NewSingle(nil),
|
||||
NewSuper(),
|
||||
nil,
|
||||
),
|
||||
nil,
|
||||
),
|
||||
"abc",
|
||||
true,
|
||||
},
|
||||
} {
|
||||
act := test.tree.Match(test.str)
|
||||
if act != test.exp {
|
||||
t.Errorf("#%d match %q error: act: %t; exp: %t", id, test.str, act, test.exp)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type fakeMatcher struct {
|
||||
len int
|
||||
name string
|
||||
}
|
||||
|
||||
func (f *fakeMatcher) Match(string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
var i = 3
|
||||
|
||||
func (f *fakeMatcher) Index(s string) (int, []int) {
|
||||
seg := make([]int, 0, i)
|
||||
for x := 0; x < i; x++ {
|
||||
seg = append(seg, x)
|
||||
}
|
||||
return 0, seg
|
||||
}
|
||||
func (f *fakeMatcher) Len() int {
|
||||
return f.len
|
||||
}
|
||||
func (f *fakeMatcher) String() string {
|
||||
return f.name
|
||||
}
|
||||
|
||||
func BenchmarkMatchBTree(b *testing.B) {
|
||||
l := &fakeMatcher{4, "left_fake"}
|
||||
r := &fakeMatcher{4, "right_fake"}
|
||||
v := &fakeMatcher{2, "value_fake"}
|
||||
|
||||
// must be <= len(l + r + v)
|
||||
fixture := "abcdefghijabcdefghijabcdefghijabcdefghijabcdefghijabcdefghijabcdefghijabcdefghij"
|
||||
|
||||
bt := NewBTree(v, l, r)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
bt.Match(fixture)
|
||||
}
|
||||
})
|
||||
}
|
58
vendor/github.com/gobwas/glob/match/contains.go
generated
vendored
Normal file
58
vendor/github.com/gobwas/glob/match/contains.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Contains struct {
|
||||
Needle string
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewContains(needle string, not bool) Contains {
|
||||
return Contains{needle, not}
|
||||
}
|
||||
|
||||
func (self Contains) Match(s string) bool {
|
||||
return strings.Contains(s, self.Needle) != self.Not
|
||||
}
|
||||
|
||||
func (self Contains) Index(s string) (int, []int) {
|
||||
var offset int
|
||||
|
||||
idx := strings.Index(s, self.Needle)
|
||||
|
||||
if !self.Not {
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
offset = idx + len(self.Needle)
|
||||
if len(s) <= offset {
|
||||
return 0, []int{offset}
|
||||
}
|
||||
s = s[offset:]
|
||||
} else if idx != -1 {
|
||||
s = s[:idx]
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s) + 1)
|
||||
for i, _ := range s {
|
||||
segments = append(segments, offset+i)
|
||||
}
|
||||
|
||||
return 0, append(segments, offset+len(s))
|
||||
}
|
||||
|
||||
func (self Contains) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Contains) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
return fmt.Sprintf("<contains:%s[%s]>", not, self.Needle)
|
||||
}
|
74
vendor/github.com/gobwas/glob/match/contains_test.go
generated
vendored
Normal file
74
vendor/github.com/gobwas/glob/match/contains_test.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestContainsIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
prefix string
|
||||
not bool
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"ab",
|
||||
false,
|
||||
"abc",
|
||||
0,
|
||||
[]int{2, 3},
|
||||
},
|
||||
{
|
||||
"ab",
|
||||
false,
|
||||
"fffabfff",
|
||||
0,
|
||||
[]int{5, 6, 7, 8},
|
||||
},
|
||||
{
|
||||
"ab",
|
||||
true,
|
||||
"abc",
|
||||
0,
|
||||
[]int{0},
|
||||
},
|
||||
{
|
||||
"ab",
|
||||
true,
|
||||
"fffabfff",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
} {
|
||||
p := NewContains(test.prefix, test.not)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexContains(b *testing.B) {
|
||||
m := NewContains(string(bench_separators), true)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexContainsParallel(b *testing.B) {
|
||||
m := NewContains(string(bench_separators), true)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
55
vendor/github.com/gobwas/glob/match/debug/debug.go
generated
vendored
Normal file
55
vendor/github.com/gobwas/glob/match/debug/debug.go
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
package debug
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/match"
|
||||
"math/rand"
|
||||
)
|
||||
|
||||
func Graphviz(pattern string, m match.Matcher) string {
|
||||
return fmt.Sprintf(`digraph G {graph[label="%s"];%s}`, pattern, graphviz_internal(m, fmt.Sprintf("%x", rand.Int63())))
|
||||
}
|
||||
|
||||
func graphviz_internal(m match.Matcher, id string) string {
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
switch matcher := m.(type) {
|
||||
case match.BTree:
|
||||
fmt.Fprintf(buf, `"%s"[label="%s"];`, id, matcher.Value.String())
|
||||
for _, m := range []match.Matcher{matcher.Left, matcher.Right} {
|
||||
switch n := m.(type) {
|
||||
case nil:
|
||||
rnd := rand.Int63()
|
||||
fmt.Fprintf(buf, `"%x"[label="<nil>"];`, rnd)
|
||||
fmt.Fprintf(buf, `"%s"->"%x";`, id, rnd)
|
||||
|
||||
default:
|
||||
sub := fmt.Sprintf("%x", rand.Int63())
|
||||
fmt.Fprintf(buf, `"%s"->"%s";`, id, sub)
|
||||
fmt.Fprintf(buf, graphviz_internal(n, sub))
|
||||
}
|
||||
}
|
||||
|
||||
case match.AnyOf:
|
||||
fmt.Fprintf(buf, `"%s"[label="AnyOf"];`, id)
|
||||
for _, m := range matcher.Matchers {
|
||||
rnd := rand.Int63()
|
||||
fmt.Fprintf(buf, graphviz_internal(m, fmt.Sprintf("%x", rnd)))
|
||||
fmt.Fprintf(buf, `"%s"->"%x";`, id, rnd)
|
||||
}
|
||||
|
||||
case match.EveryOf:
|
||||
fmt.Fprintf(buf, `"%s"[label="EveryOf"];`, id)
|
||||
for _, m := range matcher.Matchers {
|
||||
rnd := rand.Int63()
|
||||
fmt.Fprintf(buf, graphviz_internal(m, fmt.Sprintf("%x", rnd)))
|
||||
fmt.Fprintf(buf, `"%s"->"%x";`, id, rnd)
|
||||
}
|
||||
|
||||
default:
|
||||
fmt.Fprintf(buf, `"%s"[label="%s"];`, id, m.String())
|
||||
}
|
||||
|
||||
return buf.String()
|
||||
}
|
99
vendor/github.com/gobwas/glob/match/every_of.go
generated
vendored
Normal file
99
vendor/github.com/gobwas/glob/match/every_of.go
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type EveryOf struct {
|
||||
Matchers Matchers
|
||||
}
|
||||
|
||||
func NewEveryOf(m ...Matcher) EveryOf {
|
||||
return EveryOf{Matchers(m)}
|
||||
}
|
||||
|
||||
func (self *EveryOf) Add(m Matcher) error {
|
||||
self.Matchers = append(self.Matchers, m)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self EveryOf) Len() (l int) {
|
||||
for _, m := range self.Matchers {
|
||||
if ml := m.Len(); l > 0 {
|
||||
l += ml
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self EveryOf) Index(s string) (int, []int) {
|
||||
var index int
|
||||
var offset int
|
||||
|
||||
// make `in` with cap as len(s),
|
||||
// cause it is the maximum size of output segments values
|
||||
next := acquireSegments(len(s))
|
||||
current := acquireSegments(len(s))
|
||||
|
||||
sub := s
|
||||
for i, m := range self.Matchers {
|
||||
idx, seg := m.Index(sub)
|
||||
if idx == -1 {
|
||||
releaseSegments(next)
|
||||
releaseSegments(current)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
// we use copy here instead of `current = seg`
|
||||
// cause seg is a slice from reusable buffer `in`
|
||||
// and it could be overwritten in next iteration
|
||||
current = append(current, seg...)
|
||||
} else {
|
||||
// clear the next
|
||||
next = next[:0]
|
||||
|
||||
delta := index - (idx + offset)
|
||||
for _, ex := range current {
|
||||
for _, n := range seg {
|
||||
if ex+delta == n {
|
||||
next = append(next, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(next) == 0 {
|
||||
releaseSegments(next)
|
||||
releaseSegments(current)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
current = append(current[:0], next...)
|
||||
}
|
||||
|
||||
index = idx + offset
|
||||
sub = s[index:]
|
||||
offset += idx
|
||||
}
|
||||
|
||||
releaseSegments(next)
|
||||
|
||||
return index, current
|
||||
}
|
||||
|
||||
func (self EveryOf) Match(s string) bool {
|
||||
for _, m := range self.Matchers {
|
||||
if !m.Match(s) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self EveryOf) String() string {
|
||||
return fmt.Sprintf("<every_of:[%s]>", self.Matchers)
|
||||
}
|
45
vendor/github.com/gobwas/glob/match/every_of_test.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/every_of_test.go
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestEveryOfIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
matchers Matchers
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
Matchers{
|
||||
NewAny(nil),
|
||||
NewText("b"),
|
||||
NewText("c"),
|
||||
},
|
||||
"dbc",
|
||||
-1,
|
||||
nil,
|
||||
},
|
||||
{
|
||||
Matchers{
|
||||
NewAny(nil),
|
||||
NewPrefix("b"),
|
||||
NewSuffix("c"),
|
||||
},
|
||||
"abc",
|
||||
1,
|
||||
[]int{2},
|
||||
},
|
||||
} {
|
||||
everyOf := NewEveryOf(test.matchers...)
|
||||
index, segments := everyOf.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
49
vendor/github.com/gobwas/glob/match/list.go
generated
vendored
Normal file
49
vendor/github.com/gobwas/glob/match/list.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type List struct {
|
||||
List []rune
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewList(list []rune, not bool) List {
|
||||
return List{list, not}
|
||||
}
|
||||
|
||||
func (self List) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
inList := runes.IndexRune(self.List, r) != -1
|
||||
return inList == !self.Not
|
||||
}
|
||||
|
||||
func (self List) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self List) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if self.Not == (runes.IndexRune(self.List, r) == -1) {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self List) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("<list:%s[%s]>", not, string(self.List))
|
||||
}
|
58
vendor/github.com/gobwas/glob/match/list_test.go
generated
vendored
Normal file
58
vendor/github.com/gobwas/glob/match/list_test.go
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestListIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
list []rune
|
||||
not bool
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
[]rune("ab"),
|
||||
false,
|
||||
"abc",
|
||||
0,
|
||||
[]int{1},
|
||||
},
|
||||
{
|
||||
[]rune("ab"),
|
||||
true,
|
||||
"fffabfff",
|
||||
0,
|
||||
[]int{1},
|
||||
},
|
||||
} {
|
||||
p := NewList(test.list, test.not)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexList(b *testing.B) {
|
||||
m := NewList([]rune("def"), false)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
m.Index(bench_pattern)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexListParallel(b *testing.B) {
|
||||
m := NewList([]rune("def"), false)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
m.Index(bench_pattern)
|
||||
}
|
||||
})
|
||||
}
|
81
vendor/github.com/gobwas/glob/match/match.go
generated
vendored
Normal file
81
vendor/github.com/gobwas/glob/match/match.go
generated
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
package match
|
||||
|
||||
// todo common table of rune's length
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const lenOne = 1
|
||||
const lenZero = 0
|
||||
const lenNo = -1
|
||||
|
||||
type Matcher interface {
|
||||
Match(string) bool
|
||||
Index(string) (int, []int)
|
||||
Len() int
|
||||
String() string
|
||||
}
|
||||
|
||||
type Matchers []Matcher
|
||||
|
||||
func (m Matchers) String() string {
|
||||
var s []string
|
||||
for _, matcher := range m {
|
||||
s = append(s, fmt.Sprint(matcher))
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s", strings.Join(s, ","))
|
||||
}
|
||||
|
||||
// appendMerge merges and sorts given already SORTED and UNIQUE segments.
|
||||
func appendMerge(target, sub []int) []int {
|
||||
lt, ls := len(target), len(sub)
|
||||
out := make([]int, 0, lt+ls)
|
||||
|
||||
for x, y := 0, 0; x < lt || y < ls; {
|
||||
if x >= lt {
|
||||
out = append(out, sub[y:]...)
|
||||
break
|
||||
}
|
||||
|
||||
if y >= ls {
|
||||
out = append(out, target[x:]...)
|
||||
break
|
||||
}
|
||||
|
||||
xValue := target[x]
|
||||
yValue := sub[y]
|
||||
|
||||
switch {
|
||||
|
||||
case xValue == yValue:
|
||||
out = append(out, xValue)
|
||||
x++
|
||||
y++
|
||||
|
||||
case xValue < yValue:
|
||||
out = append(out, xValue)
|
||||
x++
|
||||
|
||||
case yValue < xValue:
|
||||
out = append(out, yValue)
|
||||
y++
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
target = append(target[:0], out...)
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
func reverseSegments(input []int) {
|
||||
l := len(input)
|
||||
m := l / 2
|
||||
|
||||
for i := 0; i < m; i++ {
|
||||
input[i], input[l-i-1] = input[l-i-1], input[i]
|
||||
}
|
||||
}
|
90
vendor/github.com/gobwas/glob/match/match_test.go
generated
vendored
Normal file
90
vendor/github.com/gobwas/glob/match/match_test.go
generated
vendored
Normal file
@ -0,0 +1,90 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var bench_separators = []rune{'.'}
|
||||
|
||||
const bench_pattern = "abcdefghijklmnopqrstuvwxyz0123456789"
|
||||
|
||||
func TestAppendMerge(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
segments [2][]int
|
||||
exp []int
|
||||
}{
|
||||
{
|
||||
[2][]int{
|
||||
[]int{0, 6, 7},
|
||||
[]int{0, 1, 3},
|
||||
},
|
||||
[]int{0, 1, 3, 6, 7},
|
||||
},
|
||||
{
|
||||
[2][]int{
|
||||
[]int{0, 1, 3, 6, 7},
|
||||
[]int{0, 1, 10},
|
||||
},
|
||||
[]int{0, 1, 3, 6, 7, 10},
|
||||
},
|
||||
} {
|
||||
act := appendMerge(test.segments[0], test.segments[1])
|
||||
if !reflect.DeepEqual(act, test.exp) {
|
||||
t.Errorf("#%d merge sort segments unexpected:\nact: %v\nexp:%v", id, act, test.exp)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAppendMerge(b *testing.B) {
|
||||
s1 := []int{0, 1, 3, 6, 7}
|
||||
s2 := []int{0, 1, 3}
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
appendMerge(s1, s2)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkAppendMergeParallel(b *testing.B) {
|
||||
s1 := []int{0, 1, 3, 6, 7}
|
||||
s2 := []int{0, 1, 3}
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
appendMerge(s1, s2)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkReverse(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
reverseSegments([]int{1, 2, 3, 4})
|
||||
}
|
||||
}
|
||||
|
||||
func getTable() []int {
|
||||
table := make([]int, utf8.MaxRune+1)
|
||||
for i := 0; i <= utf8.MaxRune; i++ {
|
||||
table[i] = utf8.RuneLen(rune(i))
|
||||
}
|
||||
|
||||
return table
|
||||
}
|
||||
|
||||
var table = getTable()
|
||||
|
||||
const runeToLen = 'q'
|
||||
|
||||
func BenchmarkRuneLenFromTable(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = table[runeToLen]
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkRuneLenFromUTF8(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
_ = utf8.RuneLen(runeToLen)
|
||||
}
|
||||
}
|
49
vendor/github.com/gobwas/glob/match/max.go
generated
vendored
Normal file
49
vendor/github.com/gobwas/glob/match/max.go
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Max struct {
|
||||
Limit int
|
||||
}
|
||||
|
||||
func NewMax(l int) Max {
|
||||
return Max{l}
|
||||
}
|
||||
|
||||
func (self Max) Match(s string) bool {
|
||||
var l int
|
||||
for range s {
|
||||
l += 1
|
||||
if l > self.Limit {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Max) Index(s string) (int, []int) {
|
||||
segments := acquireSegments(self.Limit + 1)
|
||||
segments = append(segments, 0)
|
||||
var count int
|
||||
for i, r := range s {
|
||||
count++
|
||||
if count > self.Limit {
|
||||
break
|
||||
}
|
||||
segments = append(segments, i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Max) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Max) String() string {
|
||||
return fmt.Sprintf("<max:%d>", self.Limit)
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/max_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/max_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMaxIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
limit int
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
3,
|
||||
"abc",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
{
|
||||
3,
|
||||
"abcdef",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
} {
|
||||
p := NewMax(test.limit)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexMax(b *testing.B) {
|
||||
m := NewMax(10)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexMaxParallel(b *testing.B) {
|
||||
m := NewMax(10)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/min.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/min.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Min struct {
|
||||
Limit int
|
||||
}
|
||||
|
||||
func NewMin(l int) Min {
|
||||
return Min{l}
|
||||
}
|
||||
|
||||
func (self Min) Match(s string) bool {
|
||||
var l int
|
||||
for range s {
|
||||
l += 1
|
||||
if l >= self.Limit {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self Min) Index(s string) (int, []int) {
|
||||
var count int
|
||||
|
||||
c := len(s) - self.Limit + 1
|
||||
if c <= 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
segments := acquireSegments(c)
|
||||
for i, r := range s {
|
||||
count++
|
||||
if count >= self.Limit {
|
||||
segments = append(segments, i+utf8.RuneLen(r))
|
||||
}
|
||||
}
|
||||
|
||||
if len(segments) == 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Min) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Min) String() string {
|
||||
return fmt.Sprintf("<min:%d>", self.Limit)
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/min_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/min_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestMinIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
limit int
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
1,
|
||||
"abc",
|
||||
0,
|
||||
[]int{1, 2, 3},
|
||||
},
|
||||
{
|
||||
3,
|
||||
"abcd",
|
||||
0,
|
||||
[]int{3, 4},
|
||||
},
|
||||
} {
|
||||
p := NewMin(test.limit)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexMin(b *testing.B) {
|
||||
m := NewMin(10)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexMinParallel(b *testing.B) {
|
||||
m := NewMin(10)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
27
vendor/github.com/gobwas/glob/match/nothing.go
generated
vendored
Normal file
27
vendor/github.com/gobwas/glob/match/nothing.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Nothing struct{}
|
||||
|
||||
func NewNothing() Nothing {
|
||||
return Nothing{}
|
||||
}
|
||||
|
||||
func (self Nothing) Match(s string) bool {
|
||||
return len(s) == 0
|
||||
}
|
||||
|
||||
func (self Nothing) Index(s string) (int, []int) {
|
||||
return 0, segments0
|
||||
}
|
||||
|
||||
func (self Nothing) Len() int {
|
||||
return lenZero
|
||||
}
|
||||
|
||||
func (self Nothing) String() string {
|
||||
return fmt.Sprintf("<nothing>")
|
||||
}
|
54
vendor/github.com/gobwas/glob/match/nothing_test.go
generated
vendored
Normal file
54
vendor/github.com/gobwas/glob/match/nothing_test.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNothingIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"abc",
|
||||
0,
|
||||
[]int{0},
|
||||
},
|
||||
{
|
||||
"",
|
||||
0,
|
||||
[]int{0},
|
||||
},
|
||||
} {
|
||||
p := NewNothing()
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexNothing(b *testing.B) {
|
||||
m := NewNothing()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexNothingParallel(b *testing.B) {
|
||||
m := NewNothing()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
50
vendor/github.com/gobwas/glob/match/prefix.go
generated
vendored
Normal file
50
vendor/github.com/gobwas/glob/match/prefix.go
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Prefix struct {
|
||||
Prefix string
|
||||
}
|
||||
|
||||
func NewPrefix(p string) Prefix {
|
||||
return Prefix{p}
|
||||
}
|
||||
|
||||
func (self Prefix) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Prefix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
length := len(self.Prefix)
|
||||
var sub string
|
||||
if len(s) > idx+length {
|
||||
sub = s[idx+length:]
|
||||
} else {
|
||||
sub = ""
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(sub) + 1)
|
||||
segments = append(segments, length)
|
||||
for i, r := range sub {
|
||||
segments = append(segments, length+i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return idx, segments
|
||||
}
|
||||
|
||||
func (self Prefix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Prefix) Match(s string) bool {
|
||||
return strings.HasPrefix(s, self.Prefix)
|
||||
}
|
||||
|
||||
func (self Prefix) String() string {
|
||||
return fmt.Sprintf("<prefix:%s>", self.Prefix)
|
||||
}
|
62
vendor/github.com/gobwas/glob/match/prefix_suffix.go
generated
vendored
Normal file
62
vendor/github.com/gobwas/glob/match/prefix_suffix.go
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PrefixSuffix struct {
|
||||
Prefix, Suffix string
|
||||
}
|
||||
|
||||
func NewPrefixSuffix(p, s string) PrefixSuffix {
|
||||
return PrefixSuffix{p, s}
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Index(s string) (int, []int) {
|
||||
prefixIdx := strings.Index(s, self.Prefix)
|
||||
if prefixIdx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
suffixLen := len(self.Suffix)
|
||||
if suffixLen <= 0 {
|
||||
return prefixIdx, []int{len(s) - prefixIdx}
|
||||
}
|
||||
|
||||
if (len(s) - prefixIdx) <= 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s) - prefixIdx)
|
||||
for sub := s[prefixIdx:]; ; {
|
||||
suffixIdx := strings.LastIndex(sub, self.Suffix)
|
||||
if suffixIdx == -1 {
|
||||
break
|
||||
}
|
||||
|
||||
segments = append(segments, suffixIdx+suffixLen)
|
||||
sub = sub[:suffixIdx]
|
||||
}
|
||||
|
||||
if len(segments) == 0 {
|
||||
releaseSegments(segments)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
reverseSegments(segments)
|
||||
|
||||
return prefixIdx, segments
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Match(s string) bool {
|
||||
return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix)
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) String() string {
|
||||
return fmt.Sprintf("<prefix_suffix:[%s,%s]>", self.Prefix, self.Suffix)
|
||||
}
|
67
vendor/github.com/gobwas/glob/match/prefix_suffix_test.go
generated
vendored
Normal file
67
vendor/github.com/gobwas/glob/match/prefix_suffix_test.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPrefixSuffixIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
prefix string
|
||||
suffix string
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"a",
|
||||
"c",
|
||||
"abc",
|
||||
0,
|
||||
[]int{3},
|
||||
},
|
||||
{
|
||||
"f",
|
||||
"f",
|
||||
"fffabfff",
|
||||
0,
|
||||
[]int{1, 2, 3, 6, 7, 8},
|
||||
},
|
||||
{
|
||||
"ab",
|
||||
"bc",
|
||||
"abc",
|
||||
0,
|
||||
[]int{3},
|
||||
},
|
||||
} {
|
||||
p := NewPrefixSuffix(test.prefix, test.suffix)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexPrefixSuffix(b *testing.B) {
|
||||
m := NewPrefixSuffix("qew", "sqw")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexPrefixSuffixParallel(b *testing.B) {
|
||||
m := NewPrefixSuffix("qew", "sqw")
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/prefix_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/prefix_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPrefixIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
prefix string
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"ab",
|
||||
"abc",
|
||||
0,
|
||||
[]int{2, 3},
|
||||
},
|
||||
{
|
||||
"ab",
|
||||
"fffabfff",
|
||||
3,
|
||||
[]int{2, 3, 4, 5},
|
||||
},
|
||||
} {
|
||||
p := NewPrefix(test.prefix)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexPrefix(b *testing.B) {
|
||||
m := NewPrefix("qew")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexPrefixParallel(b *testing.B) {
|
||||
m := NewPrefix("qew")
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
48
vendor/github.com/gobwas/glob/match/range.go
generated
vendored
Normal file
48
vendor/github.com/gobwas/glob/match/range.go
generated
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Range struct {
|
||||
Lo, Hi rune
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewRange(lo, hi rune, not bool) Range {
|
||||
return Range{lo, hi, not}
|
||||
}
|
||||
|
||||
func (self Range) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self Range) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
inRange := r >= self.Lo && r <= self.Hi
|
||||
|
||||
return inRange == !self.Not
|
||||
}
|
||||
|
||||
func (self Range) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if self.Not != (r >= self.Lo && r <= self.Hi) {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Range) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
return fmt.Sprintf("<range:%s[%s,%s]>", not, string(self.Lo), string(self.Hi))
|
||||
}
|
67
vendor/github.com/gobwas/glob/match/range_test.go
generated
vendored
Normal file
67
vendor/github.com/gobwas/glob/match/range_test.go
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRangeIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
lo, hi rune
|
||||
not bool
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
'a', 'z',
|
||||
false,
|
||||
"abc",
|
||||
0,
|
||||
[]int{1},
|
||||
},
|
||||
{
|
||||
'a', 'c',
|
||||
false,
|
||||
"abcd",
|
||||
0,
|
||||
[]int{1},
|
||||
},
|
||||
{
|
||||
'a', 'c',
|
||||
true,
|
||||
"abcd",
|
||||
3,
|
||||
[]int{1},
|
||||
},
|
||||
} {
|
||||
m := NewRange(test.lo, test.hi, test.not)
|
||||
index, segments := m.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexRange(b *testing.B) {
|
||||
m := NewRange('0', '9', false)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexRangeParallel(b *testing.B) {
|
||||
m := NewRange('0', '9', false)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
85
vendor/github.com/gobwas/glob/match/row.go
generated
vendored
Normal file
85
vendor/github.com/gobwas/glob/match/row.go
generated
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Row struct {
|
||||
Matchers Matchers
|
||||
RunesLength int
|
||||
Segments []int
|
||||
}
|
||||
|
||||
func NewRow(len int, m ...Matcher) Row {
|
||||
return Row{
|
||||
Matchers: Matchers(m),
|
||||
RunesLength: len,
|
||||
Segments: []int{len},
|
||||
}
|
||||
}
|
||||
|
||||
func (self Row) matchAll(s string) bool {
|
||||
var idx int
|
||||
for _, m := range self.Matchers {
|
||||
length := m.Len()
|
||||
|
||||
var next, i int
|
||||
for next = range s[idx:] {
|
||||
i++
|
||||
if i == length {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if i < length || !m.Match(s[idx:idx+next+1]) {
|
||||
return false
|
||||
}
|
||||
|
||||
idx += next + 1
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Row) lenOk(s string) bool {
|
||||
var i int
|
||||
for range s {
|
||||
i++
|
||||
if i >= self.RunesLength {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self Row) Match(s string) bool {
|
||||
return self.lenOk(s) && self.matchAll(s)
|
||||
}
|
||||
|
||||
func (self Row) Len() (l int) {
|
||||
return self.RunesLength
|
||||
}
|
||||
|
||||
func (self Row) Index(s string) (int, []int) {
|
||||
if !self.lenOk(s) {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
for i := range s {
|
||||
// this is not strict check but useful
|
||||
if len(s[i:]) < self.RunesLength {
|
||||
break
|
||||
}
|
||||
|
||||
if self.matchAll(s[i:]) {
|
||||
return i, self.Segments
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Row) String() string {
|
||||
return fmt.Sprintf("<row_%d:[%s]>", self.RunesLength, self.Matchers)
|
||||
}
|
82
vendor/github.com/gobwas/glob/match/row_test.go
generated
vendored
Normal file
82
vendor/github.com/gobwas/glob/match/row_test.go
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRowIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
matchers Matchers
|
||||
length int
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
Matchers{
|
||||
NewText("abc"),
|
||||
NewText("def"),
|
||||
NewSingle(nil),
|
||||
},
|
||||
7,
|
||||
"qweabcdefghij",
|
||||
3,
|
||||
[]int{7},
|
||||
},
|
||||
{
|
||||
Matchers{
|
||||
NewText("abc"),
|
||||
NewText("def"),
|
||||
NewSingle(nil),
|
||||
},
|
||||
7,
|
||||
"abcd",
|
||||
-1,
|
||||
nil,
|
||||
},
|
||||
} {
|
||||
p := NewRow(test.length, test.matchers...)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkRowIndex(b *testing.B) {
|
||||
m := NewRow(
|
||||
7,
|
||||
Matchers{
|
||||
NewText("abc"),
|
||||
NewText("def"),
|
||||
NewSingle(nil),
|
||||
}...,
|
||||
)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexRowParallel(b *testing.B) {
|
||||
m := NewRow(
|
||||
7,
|
||||
Matchers{
|
||||
NewText("abc"),
|
||||
NewText("def"),
|
||||
NewSingle(nil),
|
||||
}...,
|
||||
)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
91
vendor/github.com/gobwas/glob/match/segments.go
generated
vendored
Normal file
91
vendor/github.com/gobwas/glob/match/segments.go
generated
vendored
Normal file
@ -0,0 +1,91 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type SomePool interface {
|
||||
Get() []int
|
||||
Put([]int)
|
||||
}
|
||||
|
||||
var segmentsPools [1024]sync.Pool
|
||||
|
||||
func toPowerOfTwo(v int) int {
|
||||
v--
|
||||
v |= v >> 1
|
||||
v |= v >> 2
|
||||
v |= v >> 4
|
||||
v |= v >> 8
|
||||
v |= v >> 16
|
||||
v++
|
||||
|
||||
return v
|
||||
}
|
||||
|
||||
const (
|
||||
cacheFrom = 16
|
||||
cacheToAndHigher = 1024
|
||||
cacheFromIndex = 15
|
||||
cacheToAndHigherIndex = 1023
|
||||
)
|
||||
|
||||
var (
|
||||
segments0 = []int{0}
|
||||
segments1 = []int{1}
|
||||
segments2 = []int{2}
|
||||
segments3 = []int{3}
|
||||
segments4 = []int{4}
|
||||
)
|
||||
|
||||
var segmentsByRuneLength [5][]int = [5][]int{
|
||||
0: segments0,
|
||||
1: segments1,
|
||||
2: segments2,
|
||||
3: segments3,
|
||||
4: segments4,
|
||||
}
|
||||
|
||||
func init() {
|
||||
for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 {
|
||||
func(i int) {
|
||||
segmentsPools[i-1] = sync.Pool{New: func() interface{} {
|
||||
return make([]int, 0, i)
|
||||
}}
|
||||
}(i)
|
||||
}
|
||||
}
|
||||
|
||||
func getTableIndex(c int) int {
|
||||
p := toPowerOfTwo(c)
|
||||
switch {
|
||||
case p >= cacheToAndHigher:
|
||||
return cacheToAndHigherIndex
|
||||
case p <= cacheFrom:
|
||||
return cacheFromIndex
|
||||
default:
|
||||
return p - 1
|
||||
}
|
||||
}
|
||||
|
||||
func acquireSegments(c int) []int {
|
||||
// make []int with less capacity than cacheFrom
|
||||
// is faster than acquiring it from pool
|
||||
if c < cacheFrom {
|
||||
return make([]int, 0, c)
|
||||
}
|
||||
|
||||
return segmentsPools[getTableIndex(c)].Get().([]int)[:0]
|
||||
}
|
||||
|
||||
func releaseSegments(s []int) {
|
||||
c := cap(s)
|
||||
|
||||
// make []int with less capacity than cacheFrom
|
||||
// is faster than acquiring it from pool
|
||||
if c < cacheFrom {
|
||||
return
|
||||
}
|
||||
|
||||
segmentsPools[getTableIndex(c)].Put(s)
|
||||
}
|
83
vendor/github.com/gobwas/glob/match/segments_test.go
generated
vendored
Normal file
83
vendor/github.com/gobwas/glob/match/segments_test.go
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func benchPool(i int, b *testing.B) {
|
||||
pool := sync.Pool{New: func() interface{} {
|
||||
return make([]int, 0, i)
|
||||
}}
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
s := pool.Get().([]int)[:0]
|
||||
pool.Put(s)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func benchMake(i int, b *testing.B) {
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_ = make([]int, 0, i)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkSegmentsPool_1(b *testing.B) {
|
||||
benchPool(1, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_2(b *testing.B) {
|
||||
benchPool(2, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_4(b *testing.B) {
|
||||
benchPool(4, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_8(b *testing.B) {
|
||||
benchPool(8, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_16(b *testing.B) {
|
||||
benchPool(16, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_32(b *testing.B) {
|
||||
benchPool(32, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_64(b *testing.B) {
|
||||
benchPool(64, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_128(b *testing.B) {
|
||||
benchPool(128, b)
|
||||
}
|
||||
func BenchmarkSegmentsPool_256(b *testing.B) {
|
||||
benchPool(256, b)
|
||||
}
|
||||
|
||||
func BenchmarkSegmentsMake_1(b *testing.B) {
|
||||
benchMake(1, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_2(b *testing.B) {
|
||||
benchMake(2, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_4(b *testing.B) {
|
||||
benchMake(4, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_8(b *testing.B) {
|
||||
benchMake(8, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_16(b *testing.B) {
|
||||
benchMake(16, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_32(b *testing.B) {
|
||||
benchMake(32, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_64(b *testing.B) {
|
||||
benchMake(64, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_128(b *testing.B) {
|
||||
benchMake(128, b)
|
||||
}
|
||||
func BenchmarkSegmentsMake_256(b *testing.B) {
|
||||
benchMake(256, b)
|
||||
}
|
43
vendor/github.com/gobwas/glob/match/single.go
generated
vendored
Normal file
43
vendor/github.com/gobwas/glob/match/single.go
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// single represents ?
|
||||
type Single struct {
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewSingle(s []rune) Single {
|
||||
return Single{s}
|
||||
}
|
||||
|
||||
func (self Single) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
return runes.IndexRune(self.Separators, r) == -1
|
||||
}
|
||||
|
||||
func (self Single) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self Single) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if runes.IndexRune(self.Separators, r) == -1 {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Single) String() string {
|
||||
return fmt.Sprintf("<single:![%s]>", string(self.Separators))
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/single_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/single_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSingleIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
separators []rune
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
[]rune{'.'},
|
||||
".abc",
|
||||
1,
|
||||
[]int{1},
|
||||
},
|
||||
{
|
||||
[]rune{'.'},
|
||||
".",
|
||||
-1,
|
||||
nil,
|
||||
},
|
||||
} {
|
||||
p := NewSingle(test.separators)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexSingle(b *testing.B) {
|
||||
m := NewSingle(bench_separators)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexSingleParallel(b *testing.B) {
|
||||
m := NewSingle(bench_separators)
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
35
vendor/github.com/gobwas/glob/match/suffix.go
generated
vendored
Normal file
35
vendor/github.com/gobwas/glob/match/suffix.go
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Suffix struct {
|
||||
Suffix string
|
||||
}
|
||||
|
||||
func NewSuffix(s string) Suffix {
|
||||
return Suffix{s}
|
||||
}
|
||||
|
||||
func (self Suffix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Suffix) Match(s string) bool {
|
||||
return strings.HasSuffix(s, self.Suffix)
|
||||
}
|
||||
|
||||
func (self Suffix) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Suffix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return 0, []int{idx + len(self.Suffix)}
|
||||
}
|
||||
|
||||
func (self Suffix) String() string {
|
||||
return fmt.Sprintf("<suffix:%s>", self.Suffix)
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/suffix_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/suffix_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSuffixIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
prefix string
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"ab",
|
||||
"abc",
|
||||
0,
|
||||
[]int{2},
|
||||
},
|
||||
{
|
||||
"ab",
|
||||
"fffabfff",
|
||||
0,
|
||||
[]int{5},
|
||||
},
|
||||
} {
|
||||
p := NewSuffix(test.prefix)
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexSuffix(b *testing.B) {
|
||||
m := NewSuffix("qwe")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexSuffixParallel(b *testing.B) {
|
||||
m := NewSuffix("qwe")
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
33
vendor/github.com/gobwas/glob/match/super.go
generated
vendored
Normal file
33
vendor/github.com/gobwas/glob/match/super.go
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Super struct{}
|
||||
|
||||
func NewSuper() Super {
|
||||
return Super{}
|
||||
}
|
||||
|
||||
func (self Super) Match(s string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Super) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Super) Index(s string) (int, []int) {
|
||||
segments := acquireSegments(len(s) + 1)
|
||||
for i := range s {
|
||||
segments = append(segments, i)
|
||||
}
|
||||
segments = append(segments, len(s))
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Super) String() string {
|
||||
return fmt.Sprintf("<super>")
|
||||
}
|
54
vendor/github.com/gobwas/glob/match/super_test.go
generated
vendored
Normal file
54
vendor/github.com/gobwas/glob/match/super_test.go
generated
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSuperIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"abc",
|
||||
0,
|
||||
[]int{0, 1, 2, 3},
|
||||
},
|
||||
{
|
||||
"",
|
||||
0,
|
||||
[]int{0},
|
||||
},
|
||||
} {
|
||||
p := NewSuper()
|
||||
index, segments := p.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexSuper(b *testing.B) {
|
||||
m := NewSuper()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexSuperParallel(b *testing.B) {
|
||||
m := NewSuper()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
45
vendor/github.com/gobwas/glob/match/text.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/text.go
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// raw represents raw string to match
|
||||
type Text struct {
|
||||
Str string
|
||||
RunesLength int
|
||||
BytesLength int
|
||||
Segments []int
|
||||
}
|
||||
|
||||
func NewText(s string) Text {
|
||||
return Text{
|
||||
Str: s,
|
||||
RunesLength: utf8.RuneCountInString(s),
|
||||
BytesLength: len(s),
|
||||
Segments: []int{len(s)},
|
||||
}
|
||||
}
|
||||
|
||||
func (self Text) Match(s string) bool {
|
||||
return self.Str == s
|
||||
}
|
||||
|
||||
func (self Text) Len() int {
|
||||
return self.RunesLength
|
||||
}
|
||||
|
||||
func (self Text) Index(s string) (int, []int) {
|
||||
index := strings.Index(s, self.Str)
|
||||
if index == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return index, self.Segments
|
||||
}
|
||||
|
||||
func (self Text) String() string {
|
||||
return fmt.Sprintf("<text:`%v`>", self.Str)
|
||||
}
|
57
vendor/github.com/gobwas/glob/match/text_test.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/text_test.go
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTextIndex(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
text string
|
||||
fixture string
|
||||
index int
|
||||
segments []int
|
||||
}{
|
||||
{
|
||||
"b",
|
||||
"abc",
|
||||
1,
|
||||
[]int{1},
|
||||
},
|
||||
{
|
||||
"f",
|
||||
"abcd",
|
||||
-1,
|
||||
nil,
|
||||
},
|
||||
} {
|
||||
m := NewText(test.text)
|
||||
index, segments := m.Index(test.fixture)
|
||||
if index != test.index {
|
||||
t.Errorf("#%d unexpected index: exp: %d, act: %d", id, test.index, index)
|
||||
}
|
||||
if !reflect.DeepEqual(segments, test.segments) {
|
||||
t.Errorf("#%d unexpected segments: exp: %v, act: %v", id, test.segments, segments)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexText(b *testing.B) {
|
||||
m := NewText("foo")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexTextParallel(b *testing.B) {
|
||||
m := NewText("foo")
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, s := m.Index(bench_pattern)
|
||||
releaseSegments(s)
|
||||
}
|
||||
})
|
||||
}
|
225
vendor/github.com/gobwas/glob/parser.go
generated
vendored
Normal file
225
vendor/github.com/gobwas/glob/parser.go
generated
vendored
Normal file
@ -0,0 +1,225 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type node interface {
|
||||
children() []node
|
||||
append(node)
|
||||
}
|
||||
|
||||
type nodeImpl struct {
|
||||
desc []node
|
||||
}
|
||||
|
||||
func (n *nodeImpl) append(c node) {
|
||||
n.desc = append(n.desc, c)
|
||||
}
|
||||
func (n *nodeImpl) children() []node {
|
||||
return n.desc
|
||||
}
|
||||
|
||||
type nodeList struct {
|
||||
nodeImpl
|
||||
not bool
|
||||
chars string
|
||||
}
|
||||
type nodeRange struct {
|
||||
nodeImpl
|
||||
not bool
|
||||
lo, hi rune
|
||||
}
|
||||
type nodeText struct {
|
||||
nodeImpl
|
||||
text string
|
||||
}
|
||||
|
||||
type nodePattern struct{ nodeImpl }
|
||||
type nodeAny struct{ nodeImpl }
|
||||
type nodeSuper struct{ nodeImpl }
|
||||
type nodeSingle struct{ nodeImpl }
|
||||
type nodeAnyOf struct{ nodeImpl }
|
||||
|
||||
type tree struct {
|
||||
root node
|
||||
current node
|
||||
path []node
|
||||
}
|
||||
|
||||
func (t *tree) enter(c node) {
|
||||
if t.root == nil {
|
||||
t.root = c
|
||||
t.current = c
|
||||
return
|
||||
}
|
||||
|
||||
t.current.append(c)
|
||||
t.path = append(t.path, c)
|
||||
t.current = c
|
||||
}
|
||||
|
||||
func (t *tree) leave() {
|
||||
if len(t.path)-1 <= 0 {
|
||||
t.current = t.root
|
||||
t.path = nil
|
||||
return
|
||||
}
|
||||
|
||||
t.path = t.path[:len(t.path)-1]
|
||||
t.current = t.path[len(t.path)-1]
|
||||
}
|
||||
|
||||
type parseFn func(*tree, *lexer) (parseFn, error)
|
||||
|
||||
func parse(lexer *lexer) (*nodePattern, error) {
|
||||
var parser parseFn
|
||||
|
||||
root := &nodePattern{}
|
||||
tree := &tree{}
|
||||
tree.enter(root)
|
||||
|
||||
for parser = parserMain; ; {
|
||||
next, err := parser(tree, lexer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if next == nil {
|
||||
break
|
||||
}
|
||||
|
||||
parser = next
|
||||
}
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func parserMain(tree *tree, lexer *lexer) (parseFn, error) {
|
||||
for stop := false; !stop; {
|
||||
item := lexer.nextItem()
|
||||
|
||||
switch item.t {
|
||||
case item_eof:
|
||||
stop = true
|
||||
continue
|
||||
|
||||
case item_error:
|
||||
return nil, errors.New(item.s)
|
||||
|
||||
case item_text:
|
||||
tree.current.append(&nodeText{text: item.s})
|
||||
return parserMain, nil
|
||||
|
||||
case item_any:
|
||||
tree.current.append(&nodeAny{})
|
||||
return parserMain, nil
|
||||
|
||||
case item_super:
|
||||
tree.current.append(&nodeSuper{})
|
||||
return parserMain, nil
|
||||
|
||||
case item_single:
|
||||
tree.current.append(&nodeSingle{})
|
||||
return parserMain, nil
|
||||
|
||||
case item_range_open:
|
||||
return parserRange, nil
|
||||
|
||||
case item_terms_open:
|
||||
tree.enter(&nodeAnyOf{})
|
||||
tree.enter(&nodePattern{})
|
||||
return parserMain, nil
|
||||
|
||||
case item_separator:
|
||||
tree.leave()
|
||||
tree.enter(&nodePattern{})
|
||||
return parserMain, nil
|
||||
|
||||
case item_terms_close:
|
||||
tree.leave()
|
||||
tree.leave()
|
||||
return parserMain, nil
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected token: %s", item)
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func parserRange(tree *tree, lexer *lexer) (parseFn, error) {
|
||||
var (
|
||||
not bool
|
||||
lo rune
|
||||
hi rune
|
||||
chars string
|
||||
)
|
||||
|
||||
for {
|
||||
item := lexer.nextItem()
|
||||
|
||||
switch item.t {
|
||||
case item_eof:
|
||||
return nil, errors.New("unexpected end")
|
||||
|
||||
case item_error:
|
||||
return nil, errors.New(item.s)
|
||||
|
||||
case item_not:
|
||||
not = true
|
||||
|
||||
case item_range_lo:
|
||||
r, w := utf8.DecodeRuneInString(item.s)
|
||||
if len(item.s) > w {
|
||||
return nil, fmt.Errorf("unexpected length of lo character")
|
||||
}
|
||||
|
||||
lo = r
|
||||
|
||||
case item_range_between:
|
||||
//
|
||||
|
||||
case item_range_hi:
|
||||
r, w := utf8.DecodeRuneInString(item.s)
|
||||
if len(item.s) > w {
|
||||
return nil, fmt.Errorf("unexpected length of lo character")
|
||||
}
|
||||
|
||||
hi = r
|
||||
|
||||
if hi < lo {
|
||||
return nil, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
|
||||
}
|
||||
|
||||
case item_text:
|
||||
chars = item.s
|
||||
|
||||
case item_range_close:
|
||||
isRange := lo != 0 && hi != 0
|
||||
isChars := chars != ""
|
||||
|
||||
if isChars == isRange {
|
||||
return nil, fmt.Errorf("could not parse range")
|
||||
}
|
||||
|
||||
if isRange {
|
||||
tree.current.append(&nodeRange{
|
||||
lo: lo,
|
||||
hi: hi,
|
||||
not: not,
|
||||
})
|
||||
} else {
|
||||
tree.current.append(&nodeList{
|
||||
chars: chars,
|
||||
not: not,
|
||||
})
|
||||
}
|
||||
|
||||
return parserMain, nil
|
||||
}
|
||||
}
|
||||
}
|
219
vendor/github.com/gobwas/glob/parser_test.go
generated
vendored
Normal file
219
vendor/github.com/gobwas/glob/parser_test.go
generated
vendored
Normal file
@ -0,0 +1,219 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseString(t *testing.T) {
|
||||
for id, test := range []struct {
|
||||
pattern string
|
||||
tree node
|
||||
}{
|
||||
{
|
||||
pattern: "abc",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeText{text: "abc"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "a*c",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeText{text: "a"},
|
||||
&nodeAny{},
|
||||
&nodeText{text: "c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "a**c",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeText{text: "a"},
|
||||
&nodeSuper{},
|
||||
&nodeText{text: "c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "a?c",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeText{text: "a"},
|
||||
&nodeSingle{},
|
||||
&nodeText{text: "c"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "[!a-z]",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeRange{lo: 'a', hi: 'z', not: true},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "[az]",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeList{chars: "az"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "{a,z}",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeAnyOf{nodeImpl: nodeImpl{desc: []node{
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeText{text: "a"},
|
||||
}},
|
||||
},
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeText{text: "z"},
|
||||
}},
|
||||
},
|
||||
}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
pattern: "{a,{x,y},?,[a-z],[!qwe]}",
|
||||
tree: &nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeAnyOf{nodeImpl: nodeImpl{desc: []node{
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeText{text: "a"},
|
||||
}},
|
||||
},
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeAnyOf{nodeImpl: nodeImpl{desc: []node{
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeText{text: "x"},
|
||||
}},
|
||||
},
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeText{text: "y"},
|
||||
}},
|
||||
},
|
||||
}}},
|
||||
}},
|
||||
},
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{desc: []node{
|
||||
&nodeSingle{},
|
||||
}},
|
||||
},
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeRange{lo: 'a', hi: 'z', not: false},
|
||||
},
|
||||
},
|
||||
},
|
||||
&nodePattern{
|
||||
nodeImpl: nodeImpl{
|
||||
desc: []node{
|
||||
&nodeList{chars: "qwe", not: true},
|
||||
},
|
||||
},
|
||||
},
|
||||
}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
} {
|
||||
pattern, err := parse(newLexer(test.pattern))
|
||||
if err != nil {
|
||||
t.Errorf("#%d %s", id, err)
|
||||
continue
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(test.tree, pattern) {
|
||||
t.Errorf("#%d tries are not equal", id)
|
||||
if err = nodeEqual(test.tree, pattern); err != nil {
|
||||
t.Errorf("#%d %s", id, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const abstractNodeImpl = "nodeImpl"
|
||||
|
||||
func nodeEqual(a, b node) error {
|
||||
if (a == nil || b == nil) && a != b {
|
||||
return fmt.Errorf("nodes are not equal: exp %s, act %s", a, b)
|
||||
}
|
||||
|
||||
aValue, bValue := reflect.Indirect(reflect.ValueOf(a)), reflect.Indirect(reflect.ValueOf(b))
|
||||
aType, bType := aValue.Type(), bValue.Type()
|
||||
if aType != bType {
|
||||
return fmt.Errorf("nodes are not equal: exp %s, act %s", aValue.Type(), bValue.Type())
|
||||
}
|
||||
|
||||
for i := 0; i < aType.NumField(); i++ {
|
||||
var eq bool
|
||||
|
||||
f := aType.Field(i).Name
|
||||
if f == abstractNodeImpl {
|
||||
continue
|
||||
}
|
||||
|
||||
af, bf := aValue.FieldByName(f), bValue.FieldByName(f)
|
||||
|
||||
switch af.Kind() {
|
||||
case reflect.String:
|
||||
eq = af.String() == bf.String()
|
||||
case reflect.Bool:
|
||||
eq = af.Bool() == bf.Bool()
|
||||
default:
|
||||
eq = fmt.Sprint(af) == fmt.Sprint(bf)
|
||||
}
|
||||
|
||||
if !eq {
|
||||
return fmt.Errorf("nodes<%s> %q fields are not equal: exp %q, act %q", aType, f, af, bf)
|
||||
}
|
||||
}
|
||||
|
||||
for i, aDesc := range a.children() {
|
||||
if len(b.children())-1 < i {
|
||||
return fmt.Errorf("node does not have enough children (got %d children, wanted %d-th token)", len(b.children()), i)
|
||||
}
|
||||
|
||||
bDesc := b.children()[i]
|
||||
|
||||
if err := nodeEqual(aDesc, bDesc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
148
vendor/github.com/gobwas/glob/readme.md
generated
vendored
Normal file
148
vendor/github.com/gobwas/glob/readme.md
generated
vendored
Normal file
@ -0,0 +1,148 @@
|
||||
# glob.[go](https://golang.org)
|
||||
|
||||
[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url]
|
||||
|
||||
> Go Globbing Library.
|
||||
|
||||
## Install
|
||||
|
||||
```shell
|
||||
go get github.com/gobwas/glob
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
```go
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gobwas/glob"
|
||||
|
||||
func main() {
|
||||
var g glob.Glob
|
||||
|
||||
// create simple glob
|
||||
g = glob.MustCompile("*.github.com")
|
||||
g.Match("api.github.com") // true
|
||||
|
||||
// quote meta characters and then create simple glob
|
||||
g = glob.MustCompile(glob.QuoteMeta("*.github.com"))
|
||||
g.Match("*.github.com") // true
|
||||
|
||||
// create new glob with set of delimiters as ["."]
|
||||
g = glob.MustCompile("api.*.com", '.')
|
||||
g.Match("api.github.com") // true
|
||||
g.Match("api.gi.hub.com") // false
|
||||
|
||||
// create new glob with set of delimiters as ["."]
|
||||
// but now with super wildcard
|
||||
g = glob.MustCompile("api.**.com", '.')
|
||||
g.Match("api.github.com") // true
|
||||
g.Match("api.gi.hub.com") // true
|
||||
|
||||
// create glob with single symbol wildcard
|
||||
g = glob.MustCompile("?at")
|
||||
g.Match("cat") // true
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with single symbol wildcard and delimiters ['f']
|
||||
g = glob.MustCompile("?at", 'f')
|
||||
g.Match("cat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-list matchers
|
||||
g = glob.MustCompile("[abc]at")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-list matchers
|
||||
g = glob.MustCompile("[!abc]at")
|
||||
g.Match("cat") // false
|
||||
g.Match("bat") // false
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-range matchers
|
||||
g = glob.MustCompile("[a-c]at")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-range matchers
|
||||
g = glob.MustCompile("[!a-c]at")
|
||||
g.Match("cat") // false
|
||||
g.Match("bat") // false
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with pattern-alternatives list
|
||||
g = glob.MustCompile("{cat,bat,[fr]at}")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // true
|
||||
g.Match("rat") // true
|
||||
g.Match("at") // false
|
||||
g.Match("zat") // false
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
This library is created for compile-once patterns. This means, that compilation could take time, but
|
||||
strings matching is done faster, than in case when always parsing template.
|
||||
|
||||
If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower.
|
||||
|
||||
Run `go test -bench=.` from source root to see the benchmarks:
|
||||
|
||||
Pattern | Fixture | Match | Speed (ns/op)
|
||||
--------|---------|-------|--------------
|
||||
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432
|
||||
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199
|
||||
`https://*.google.*` | `https://account.google.com` | `true` | 96
|
||||
`https://*.google.*` | `https://google.com` | `false` | 66
|
||||
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163
|
||||
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197
|
||||
`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22
|
||||
`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24
|
||||
`abc*` | `abcdef` | `true` | 8.15
|
||||
`abc*` | `af` | `false` | 5.68
|
||||
`*def` | `abcdef` | `true` | 8.84
|
||||
`*def` | `af` | `false` | 5.74
|
||||
`ab*ef` | `abcdef` | `true` | 15.2
|
||||
`ab*ef` | `af` | `false` | 10.4
|
||||
|
||||
The same things with `regexp` package:
|
||||
|
||||
Pattern | Fixture | Match | Speed (ns/op)
|
||||
--------|---------|-------|--------------
|
||||
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553
|
||||
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383
|
||||
`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205
|
||||
`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767
|
||||
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435
|
||||
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674
|
||||
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039
|
||||
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272
|
||||
`^abc.*$` | `abcdef` | `true` | 237
|
||||
`^abc.*$` | `af` | `false` | 100
|
||||
`^.*def$` | `abcdef` | `true` | 464
|
||||
`^.*def$` | `af` | `false` | 265
|
||||
`^ab.*ef$` | `abcdef` | `true` | 375
|
||||
`^ab.*ef$` | `af` | `false` | 145
|
||||
|
||||
[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg
|
||||
[godoc-url]: https://godoc.org/github.com/gobwas/glob
|
||||
[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master
|
||||
[travis-url]: https://travis-ci.org/gobwas/glob
|
||||
|
||||
## Syntax
|
||||
|
||||
Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm),
|
||||
except that `**` is aka super-asterisk, that do not sensitive for separators.
|
154
vendor/github.com/gobwas/glob/runes/runes.go
generated
vendored
Normal file
154
vendor/github.com/gobwas/glob/runes/runes.go
generated
vendored
Normal file
@ -0,0 +1,154 @@
|
||||
package runes
|
||||
|
||||
func Index(s, needle []rune) int {
|
||||
ls, ln := len(s), len(needle)
|
||||
|
||||
switch {
|
||||
case ln == 0:
|
||||
return 0
|
||||
case ln == 1:
|
||||
return IndexRune(s, needle[0])
|
||||
case ln == ls:
|
||||
if Equal(s, needle) {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
case ln > ls:
|
||||
return -1
|
||||
}
|
||||
|
||||
head:
|
||||
for i := 0; i < ls && ls-i >= ln; i++ {
|
||||
for y := 0; y < ln; y++ {
|
||||
if s[i+y] != needle[y] {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func LastIndex(s, needle []rune) int {
|
||||
ls, ln := len(s), len(needle)
|
||||
|
||||
switch {
|
||||
case ln == 0:
|
||||
if ls == 0 {
|
||||
return 0
|
||||
}
|
||||
return ls
|
||||
case ln == 1:
|
||||
return IndexLastRune(s, needle[0])
|
||||
case ln == ls:
|
||||
if Equal(s, needle) {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
case ln > ls:
|
||||
return -1
|
||||
}
|
||||
|
||||
head:
|
||||
for i := ls - 1; i >= 0 && i >= ln; i-- {
|
||||
for y := ln - 1; y >= 0; y-- {
|
||||
if s[i-(ln-y-1)] != needle[y] {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
return i - ln + 1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// IndexAny returns the index of the first instance of any Unicode code point
|
||||
// from chars in s, or -1 if no Unicode code point from chars is present in s.
|
||||
func IndexAny(s, chars []rune) int {
|
||||
if len(chars) > 0 {
|
||||
for i, c := range s {
|
||||
for _, m := range chars {
|
||||
if c == m {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func Contains(s, needle []rune) bool {
|
||||
return Index(s, needle) >= 0
|
||||
}
|
||||
|
||||
func Max(s []rune) (max rune) {
|
||||
for _, r := range s {
|
||||
if r > max {
|
||||
max = r
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func Min(s []rune) rune {
|
||||
min := rune(-1)
|
||||
for _, r := range s {
|
||||
if min == -1 {
|
||||
min = r
|
||||
continue
|
||||
}
|
||||
|
||||
if r < min {
|
||||
min = r
|
||||
}
|
||||
}
|
||||
|
||||
return min
|
||||
}
|
||||
|
||||
func IndexRune(s []rune, r rune) int {
|
||||
for i, c := range s {
|
||||
if c == r {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func IndexLastRune(s []rune, r rune) int {
|
||||
for i := len(s) - 1; i >= 0; i-- {
|
||||
if s[i] == r {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func Equal(a, b []rune) bool {
|
||||
if len(a) == len(b) {
|
||||
for i := 0; i < len(a); i++ {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// HasPrefix tests whether the string s begins with prefix.
|
||||
func HasPrefix(s, prefix []rune) bool {
|
||||
return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix)
|
||||
}
|
||||
|
||||
// HasSuffix tests whether the string s ends with suffix.
|
||||
func HasSuffix(s, suffix []rune) bool {
|
||||
return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix)
|
||||
}
|
222
vendor/github.com/gobwas/glob/runes/runes_test.go
generated
vendored
Normal file
222
vendor/github.com/gobwas/glob/runes/runes_test.go
generated
vendored
Normal file
@ -0,0 +1,222 @@
|
||||
package runes
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type indexTest struct {
|
||||
s []rune
|
||||
sep []rune
|
||||
out int
|
||||
}
|
||||
|
||||
type equalTest struct {
|
||||
a []rune
|
||||
b []rune
|
||||
out bool
|
||||
}
|
||||
|
||||
func newIndexTest(s, sep string, out int) indexTest {
|
||||
return indexTest{[]rune(s), []rune(sep), out}
|
||||
}
|
||||
func newEqualTest(s, sep string, out bool) equalTest {
|
||||
return equalTest{[]rune(s), []rune(sep), out}
|
||||
}
|
||||
|
||||
var dots = "1....2....3....4"
|
||||
|
||||
var indexTests = []indexTest{
|
||||
newIndexTest("", "", 0),
|
||||
newIndexTest("", "a", -1),
|
||||
newIndexTest("", "foo", -1),
|
||||
newIndexTest("fo", "foo", -1),
|
||||
newIndexTest("foo", "foo", 0),
|
||||
newIndexTest("oofofoofooo", "f", 2),
|
||||
newIndexTest("oofofoofooo", "foo", 4),
|
||||
newIndexTest("barfoobarfoo", "foo", 3),
|
||||
newIndexTest("foo", "", 0),
|
||||
newIndexTest("foo", "o", 1),
|
||||
newIndexTest("abcABCabc", "A", 3),
|
||||
// cases with one byte strings - test special case in Index()
|
||||
newIndexTest("", "a", -1),
|
||||
newIndexTest("x", "a", -1),
|
||||
newIndexTest("x", "x", 0),
|
||||
newIndexTest("abc", "a", 0),
|
||||
newIndexTest("abc", "b", 1),
|
||||
newIndexTest("abc", "c", 2),
|
||||
newIndexTest("abc", "x", -1),
|
||||
}
|
||||
|
||||
var lastIndexTests = []indexTest{
|
||||
newIndexTest("", "", 0),
|
||||
newIndexTest("", "a", -1),
|
||||
newIndexTest("", "foo", -1),
|
||||
newIndexTest("fo", "foo", -1),
|
||||
newIndexTest("foo", "foo", 0),
|
||||
newIndexTest("foo", "f", 0),
|
||||
newIndexTest("oofofoofooo", "f", 7),
|
||||
newIndexTest("oofofoofooo", "foo", 7),
|
||||
newIndexTest("barfoobarfoo", "foo", 9),
|
||||
newIndexTest("foo", "", 3),
|
||||
newIndexTest("foo", "o", 2),
|
||||
newIndexTest("abcABCabc", "A", 3),
|
||||
newIndexTest("abcABCabc", "a", 6),
|
||||
}
|
||||
|
||||
var indexAnyTests = []indexTest{
|
||||
newIndexTest("", "", -1),
|
||||
newIndexTest("", "a", -1),
|
||||
newIndexTest("", "abc", -1),
|
||||
newIndexTest("a", "", -1),
|
||||
newIndexTest("a", "a", 0),
|
||||
newIndexTest("aaa", "a", 0),
|
||||
newIndexTest("abc", "xyz", -1),
|
||||
newIndexTest("abc", "xcz", 2),
|
||||
newIndexTest("a☺b☻c☹d", "uvw☻xyz", 3),
|
||||
newIndexTest("aRegExp*", ".(|)*+?^$[]", 7),
|
||||
newIndexTest(dots+dots+dots, " ", -1),
|
||||
}
|
||||
|
||||
// Execute f on each test case. funcName should be the name of f; it's used
|
||||
// in failure reports.
|
||||
func runIndexTests(t *testing.T, f func(s, sep []rune) int, funcName string, testCases []indexTest) {
|
||||
for _, test := range testCases {
|
||||
actual := f(test.s, test.sep)
|
||||
if actual != test.out {
|
||||
t.Errorf("%s(%q,%q) = %v; want %v", funcName, test.s, test.sep, actual, test.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndex(t *testing.T) { runIndexTests(t, Index, "Index", indexTests) }
|
||||
func TestLastIndex(t *testing.T) { runIndexTests(t, LastIndex, "LastIndex", lastIndexTests) }
|
||||
func TestIndexAny(t *testing.T) { runIndexTests(t, IndexAny, "IndexAny", indexAnyTests) }
|
||||
|
||||
var equalTests = []equalTest{
|
||||
newEqualTest("a", "a", true),
|
||||
newEqualTest("a", "b", false),
|
||||
newEqualTest("a☺b☻c☹d", "uvw☻xyz", false),
|
||||
newEqualTest("a☺b☻c☹d", "a☺b☻c☹d", true),
|
||||
}
|
||||
|
||||
func TestEqual(t *testing.T) {
|
||||
for _, test := range equalTests {
|
||||
actual := Equal(test.a, test.b)
|
||||
if actual != test.out {
|
||||
t.Errorf("Equal(%q,%q) = %v; want %v", test.a, test.b, actual, test.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkLastIndexRunes(b *testing.B) {
|
||||
r := []rune("abcdef")
|
||||
n := []rune("cd")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
LastIndex(r, n)
|
||||
}
|
||||
}
|
||||
func BenchmarkLastIndexStrings(b *testing.B) {
|
||||
r := "abcdef"
|
||||
n := "cd"
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
strings.LastIndex(r, n)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexAnyRunes(b *testing.B) {
|
||||
s := []rune("...b...")
|
||||
c := []rune("abc")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
IndexAny(s, c)
|
||||
}
|
||||
}
|
||||
func BenchmarkIndexAnyStrings(b *testing.B) {
|
||||
s := "...b..."
|
||||
c := "abc"
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
strings.IndexAny(s, c)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexRuneRunes(b *testing.B) {
|
||||
s := []rune("...b...")
|
||||
r := 'b'
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
IndexRune(s, r)
|
||||
}
|
||||
}
|
||||
func BenchmarkIndexRuneStrings(b *testing.B) {
|
||||
s := "...b..."
|
||||
r := 'b'
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
strings.IndexRune(s, r)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIndexRunes(b *testing.B) {
|
||||
r := []rune("abcdef")
|
||||
n := []rune("cd")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
Index(r, n)
|
||||
}
|
||||
}
|
||||
func BenchmarkIndexStrings(b *testing.B) {
|
||||
r := "abcdef"
|
||||
n := "cd"
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
strings.Index(r, n)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEqualRunes(b *testing.B) {
|
||||
x := []rune("abc")
|
||||
y := []rune("abc")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if Equal(x, y) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkEqualStrings(b *testing.B) {
|
||||
x := "abc"
|
||||
y := "abc"
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if x == y {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNotEqualRunes(b *testing.B) {
|
||||
x := []rune("abc")
|
||||
y := []rune("abcd")
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if Equal(x, y) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkNotEqualStrings(b *testing.B) {
|
||||
x := "abc"
|
||||
y := "abcd"
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
if x == y {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
13
vendor/github.com/gobwas/glob/strings/strings.go
generated
vendored
Normal file
13
vendor/github.com/gobwas/glob/strings/strings.go
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
package strings
|
||||
|
||||
import "strings"
|
||||
|
||||
func IndexAnyRunes(s string, rs []rune) int {
|
||||
for _, r := range rs {
|
||||
if i := strings.IndexRune(s, r); i != -1 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
6
vendor/manifest
vendored
6
vendor/manifest
vendored
@ -31,6 +31,12 @@
|
||||
"revision": "bf29d7cd9038386a5b4a22e2d73c8fb20ae14602",
|
||||
"branch": "master"
|
||||
},
|
||||
{
|
||||
"importpath": "github.com/gobwas/glob",
|
||||
"repository": "https://github.com/gobwas/glob",
|
||||
"revision": "a3f5513f64fe4307f2c71ea06b25f6154eb9dc5f",
|
||||
"branch": "master"
|
||||
},
|
||||
{
|
||||
"importpath": "github.com/golang/snappy",
|
||||
"repository": "https://github.com/golang/snappy",
|
||||
|
Loading…
Reference in New Issue
Block a user