vendor: Update github.com/gobwas/glob

This commit is contained in:
Jakob Borg 2016-09-13 21:42:15 +02:00
parent 06dc91fadf
commit c3c7798446
15 changed files with 899 additions and 882 deletions

View File

@ -1,508 +0,0 @@
package glob
// TODO use constructor with all matchers, and to their structs private
// TODO glue multiple Text nodes (like after QuoteMeta)
import (
"fmt"
"github.com/gobwas/glob/match"
"github.com/gobwas/glob/runes"
"reflect"
)
func optimize(matcher match.Matcher) match.Matcher {
switch m := matcher.(type) {
case match.Any:
if len(m.Separators) == 0 {
return match.NewSuper()
}
case match.AnyOf:
if len(m.Matchers) == 1 {
return m.Matchers[0]
}
return m
case match.List:
if m.Not == false && len(m.List) == 1 {
return match.NewText(string(m.List))
}
return m
case match.BTree:
m.Left = optimize(m.Left)
m.Right = optimize(m.Right)
r, ok := m.Value.(match.Text)
if !ok {
return m
}
leftNil := m.Left == nil
rightNil := m.Right == nil
if leftNil && rightNil {
return match.NewText(r.Str)
}
_, leftSuper := m.Left.(match.Super)
lp, leftPrefix := m.Left.(match.Prefix)
_, rightSuper := m.Right.(match.Super)
rs, rightSuffix := m.Right.(match.Suffix)
if leftSuper && rightSuper {
return match.NewContains(r.Str, false)
}
if leftSuper && rightNil {
return match.NewSuffix(r.Str)
}
if rightSuper && leftNil {
return match.NewPrefix(r.Str)
}
if leftNil && rightSuffix {
return match.NewPrefixSuffix(r.Str, rs.Suffix)
}
if rightNil && leftPrefix {
return match.NewPrefixSuffix(lp.Prefix, r.Str)
}
return m
}
return matcher
}
func glueMatchers(matchers []match.Matcher) match.Matcher {
var (
glued []match.Matcher
winner match.Matcher
)
maxLen := -1
if m := glueAsEvery(matchers); m != nil {
glued = append(glued, m)
return m
}
if m := glueAsRow(matchers); m != nil {
glued = append(glued, m)
return m
}
for _, g := range glued {
if l := g.Len(); l > maxLen {
maxLen = l
winner = g
}
}
return winner
}
func glueAsRow(matchers []match.Matcher) match.Matcher {
if len(matchers) <= 1 {
return nil
}
var (
c []match.Matcher
l int
)
for _, matcher := range matchers {
if ml := matcher.Len(); ml == -1 {
return nil
} else {
c = append(c, matcher)
l += ml
}
}
return match.NewRow(l, c...)
}
func glueAsEvery(matchers []match.Matcher) match.Matcher {
if len(matchers) <= 1 {
return nil
}
var (
hasAny bool
hasSuper bool
hasSingle bool
min int
separator []rune
)
for i, matcher := range matchers {
var sep []rune
switch m := matcher.(type) {
case match.Super:
sep = []rune{}
hasSuper = true
case match.Any:
sep = m.Separators
hasAny = true
case match.Single:
sep = m.Separators
hasSingle = true
min++
case match.List:
if !m.Not {
return nil
}
sep = m.List
hasSingle = true
min++
default:
return nil
}
// initialize
if i == 0 {
separator = sep
}
if runes.Equal(sep, separator) {
continue
}
return nil
}
if hasSuper && !hasAny && !hasSingle {
return match.NewSuper()
}
if hasAny && !hasSuper && !hasSingle {
return match.NewAny(separator)
}
if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
return match.NewMin(min)
}
every := match.NewEveryOf()
if min > 0 {
every.Add(match.NewMin(min))
if !hasAny && !hasSuper {
every.Add(match.NewMax(min))
}
}
if len(separator) > 0 {
every.Add(match.NewContains(string(separator), true))
}
return every
}
func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
var done match.Matcher
var left, right, count int
for l := 0; l < len(matchers); l++ {
for r := len(matchers); r > l; r-- {
if glued := glueMatchers(matchers[l:r]); glued != nil {
var swap bool
if done == nil {
swap = true
} else {
cl, gl := done.Len(), glued.Len()
swap = cl > -1 && gl > -1 && gl > cl
swap = swap || count < r-l
}
if swap {
done = glued
left = l
right = r
count = r - l
}
}
}
}
if done == nil {
return matchers
}
next := append(append([]match.Matcher{}, matchers[:left]...), done)
if right < len(matchers) {
next = append(next, matchers[right:]...)
}
if len(next) == len(matchers) {
return next
}
return minimizeMatchers(next)
}
func minimizeAnyOf(children []node) node {
var nodes [][]node
var min int
var idx int
for i, desc := range children {
pat, ok := desc.(*nodePattern)
if !ok {
return nil
}
n := pat.children()
ln := len(n)
if len(nodes) == 0 || (ln < min) {
min = ln
idx = i
}
nodes = append(nodes, pat.children())
}
minNodes := nodes[idx]
if idx+1 < len(nodes) {
nodes = append(nodes[:idx], nodes[idx+1:]...)
} else {
nodes = nodes[:idx]
}
var commonLeft []node
var commonLeftCount int
for i, n := range minNodes {
has := true
for _, t := range nodes {
if !reflect.DeepEqual(n, t[i]) {
has = false
break
}
}
if has {
commonLeft = append(commonLeft, n)
commonLeftCount++
} else {
break
}
}
var commonRight []node
var commonRightCount int
for i := min - 1; i > commonLeftCount-1; i-- {
n := minNodes[i]
has := true
for _, t := range nodes {
if !reflect.DeepEqual(n, t[len(t)-(min-i)]) {
has = false
break
}
}
if has {
commonRight = append(commonRight, n)
commonRightCount++
} else {
break
}
}
if commonLeftCount == 0 && commonRightCount == 0 {
return nil
}
nodes = append(nodes, minNodes)
nodes[len(nodes)-1], nodes[idx] = nodes[idx], nodes[len(nodes)-1]
var result []node
if commonLeftCount > 0 {
result = append(result, &nodePattern{nodeImpl: nodeImpl{desc: commonLeft}})
}
var anyOf []node
for _, n := range nodes {
if commonLeftCount+commonRightCount == len(n) {
anyOf = append(anyOf, nil)
} else {
anyOf = append(anyOf, &nodePattern{nodeImpl: nodeImpl{desc: n[commonLeftCount : len(n)-commonRightCount]}})
}
}
anyOf = uniqueNodes(anyOf)
if len(anyOf) == 1 {
if anyOf[0] != nil {
result = append(result, &nodePattern{nodeImpl: nodeImpl{desc: anyOf}})
}
} else {
result = append(result, &nodeAnyOf{nodeImpl: nodeImpl{desc: anyOf}})
}
if commonRightCount > 0 {
result = append(result, &nodePattern{nodeImpl: nodeImpl{desc: commonRight}})
}
return &nodePattern{nodeImpl: nodeImpl{desc: result}}
}
func uniqueNodes(nodes []node) (result []node) {
head:
for _, n := range nodes {
for _, e := range result {
if reflect.DeepEqual(e, n) {
continue head
}
}
result = append(result, n)
}
return
}
func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
if len(matchers) == 0 {
return nil, fmt.Errorf("compile error: need at least one matcher")
}
if len(matchers) == 1 {
return matchers[0], nil
}
if m := glueMatchers(matchers); m != nil {
return m, nil
}
idx := -1
maxLen := -1
var val match.Matcher
for i, matcher := range matchers {
if l := matcher.Len(); l != -1 && l >= maxLen {
maxLen = l
idx = i
val = matcher
}
}
if val == nil { // not found matcher with static length
r, err := compileMatchers(matchers[1:])
if err != nil {
return nil, err
}
return match.NewBTree(matchers[0], nil, r), nil
}
left := matchers[:idx]
var right []match.Matcher
if len(matchers) > idx+1 {
right = matchers[idx+1:]
}
var l, r match.Matcher
var err error
if len(left) > 0 {
l, err = compileMatchers(left)
if err != nil {
return nil, err
}
}
if len(right) > 0 {
r, err = compileMatchers(right)
if err != nil {
return nil, err
}
}
return match.NewBTree(val, l, r), nil
}
func do(leaf node, s []rune) (m match.Matcher, err error) {
switch n := leaf.(type) {
case *nodeAnyOf:
// todo this could be faster on pattern_alternatives_combine_lite
if n := minimizeAnyOf(n.children()); n != nil {
return do(n, s)
}
var matchers []match.Matcher
for _, desc := range n.children() {
if desc == nil {
matchers = append(matchers, match.NewNothing())
continue
}
m, err := do(desc, s)
if err != nil {
return nil, err
}
matchers = append(matchers, optimize(m))
}
return match.NewAnyOf(matchers...), nil
case *nodePattern:
nodes := leaf.children()
if len(nodes) == 0 {
return match.NewNothing(), nil
}
var matchers []match.Matcher
for _, desc := range nodes {
m, err := do(desc, s)
if err != nil {
return nil, err
}
matchers = append(matchers, optimize(m))
}
m, err = compileMatchers(minimizeMatchers(matchers))
if err != nil {
return nil, err
}
case *nodeList:
m = match.NewList([]rune(n.chars), n.not)
case *nodeRange:
m = match.NewRange(n.lo, n.hi, n.not)
case *nodeAny:
m = match.NewAny(s)
case *nodeSuper:
m = match.NewSuper()
case *nodeSingle:
m = match.NewSingle(s)
case *nodeText:
m = match.NewText(n.text)
default:
return nil, fmt.Errorf("could not compile tree: unknown node type")
}
return optimize(m), nil
}
func compile(ast *nodePattern, s []rune) (Glob, error) {
g, err := do(ast, s)
if err != nil {
return nil, err
}
return g, nil
}

518
vendor/github.com/gobwas/glob/compiler/compiler.go generated vendored Normal file
View File

@ -0,0 +1,518 @@
package compiler
// TODO use constructor with all matchers, and to their structs private
// TODO glue multiple Text nodes (like after QuoteMeta)
import (
"fmt"
"github.com/gobwas/glob/match"
"github.com/gobwas/glob/syntax/ast"
"github.com/gobwas/glob/util/runes"
"reflect"
)
func optimizeMatcher(matcher match.Matcher) match.Matcher {
switch m := matcher.(type) {
case match.Any:
if len(m.Separators) == 0 {
return match.NewSuper()
}
case match.AnyOf:
if len(m.Matchers) == 1 {
return m.Matchers[0]
}
return m
case match.List:
if m.Not == false && len(m.List) == 1 {
return match.NewText(string(m.List))
}
return m
case match.BTree:
m.Left = optimizeMatcher(m.Left)
m.Right = optimizeMatcher(m.Right)
r, ok := m.Value.(match.Text)
if !ok {
return m
}
leftNil := m.Left == nil
rightNil := m.Right == nil
if leftNil && rightNil {
return match.NewText(r.Str)
}
_, leftSuper := m.Left.(match.Super)
lp, leftPrefix := m.Left.(match.Prefix)
_, rightSuper := m.Right.(match.Super)
rs, rightSuffix := m.Right.(match.Suffix)
if leftSuper && rightSuper {
return match.NewContains(r.Str, false)
}
if leftSuper && rightNil {
return match.NewSuffix(r.Str)
}
if rightSuper && leftNil {
return match.NewPrefix(r.Str)
}
if leftNil && rightSuffix {
return match.NewPrefixSuffix(r.Str, rs.Suffix)
}
if rightNil && leftPrefix {
return match.NewPrefixSuffix(lp.Prefix, r.Str)
}
return m
}
return matcher
}
func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
if len(matchers) == 0 {
return nil, fmt.Errorf("compile error: need at least one matcher")
}
if len(matchers) == 1 {
return matchers[0], nil
}
if m := glueMatchers(matchers); m != nil {
return m, nil
}
idx := -1
maxLen := -1
var val match.Matcher
for i, matcher := range matchers {
if l := matcher.Len(); l != -1 && l >= maxLen {
maxLen = l
idx = i
val = matcher
}
}
if val == nil { // not found matcher with static length
r, err := compileMatchers(matchers[1:])
if err != nil {
return nil, err
}
return match.NewBTree(matchers[0], nil, r), nil
}
left := matchers[:idx]
var right []match.Matcher
if len(matchers) > idx+1 {
right = matchers[idx+1:]
}
var l, r match.Matcher
var err error
if len(left) > 0 {
l, err = compileMatchers(left)
if err != nil {
return nil, err
}
}
if len(right) > 0 {
r, err = compileMatchers(right)
if err != nil {
return nil, err
}
}
return match.NewBTree(val, l, r), nil
}
func glueMatchers(matchers []match.Matcher) match.Matcher {
if m := glueMatchersAsEvery(matchers); m != nil {
return m
}
if m := glueMatchersAsRow(matchers); m != nil {
return m
}
return nil
}
func glueMatchersAsRow(matchers []match.Matcher) match.Matcher {
if len(matchers) <= 1 {
return nil
}
var (
c []match.Matcher
l int
)
for _, matcher := range matchers {
if ml := matcher.Len(); ml == -1 {
return nil
} else {
c = append(c, matcher)
l += ml
}
}
return match.NewRow(l, c...)
}
func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher {
if len(matchers) <= 1 {
return nil
}
var (
hasAny bool
hasSuper bool
hasSingle bool
min int
separator []rune
)
for i, matcher := range matchers {
var sep []rune
switch m := matcher.(type) {
case match.Super:
sep = []rune{}
hasSuper = true
case match.Any:
sep = m.Separators
hasAny = true
case match.Single:
sep = m.Separators
hasSingle = true
min++
case match.List:
if !m.Not {
return nil
}
sep = m.List
hasSingle = true
min++
default:
return nil
}
// initialize
if i == 0 {
separator = sep
}
if runes.Equal(sep, separator) {
continue
}
return nil
}
if hasSuper && !hasAny && !hasSingle {
return match.NewSuper()
}
if hasAny && !hasSuper && !hasSingle {
return match.NewAny(separator)
}
if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
return match.NewMin(min)
}
every := match.NewEveryOf()
if min > 0 {
every.Add(match.NewMin(min))
if !hasAny && !hasSuper {
every.Add(match.NewMax(min))
}
}
if len(separator) > 0 {
every.Add(match.NewContains(string(separator), true))
}
return every
}
func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
var done match.Matcher
var left, right, count int
for l := 0; l < len(matchers); l++ {
for r := len(matchers); r > l; r-- {
if glued := glueMatchers(matchers[l:r]); glued != nil {
var swap bool
if done == nil {
swap = true
} else {
cl, gl := done.Len(), glued.Len()
swap = cl > -1 && gl > -1 && gl > cl
swap = swap || count < r-l
}
if swap {
done = glued
left = l
right = r
count = r - l
}
}
}
}
if done == nil {
return matchers
}
next := append(append([]match.Matcher{}, matchers[:left]...), done)
if right < len(matchers) {
next = append(next, matchers[right:]...)
}
if len(next) == len(matchers) {
return next
}
return minimizeMatchers(next)
}
// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree
func minimizeTree(tree *ast.Node) *ast.Node {
switch tree.Kind {
case ast.KindAnyOf:
return minimizeTreeAnyOf(tree)
default:
return nil
}
}
// minimizeAnyOf tries to find common children of given node of AnyOf pattern
// it searches for common children from left and from right
// if any common children are found then it returns new optimized ast tree
// else it returns nil
func minimizeTreeAnyOf(tree *ast.Node) *ast.Node {
if !areOfSameKind(tree.Children, ast.KindPattern) {
return nil
}
commonLeft, commonRight := commonChildren(tree.Children)
commonLeftCount, commonRightCount := len(commonLeft), len(commonRight)
if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts
return nil
}
var result []*ast.Node
if commonLeftCount > 0 {
result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...))
}
var anyOf []*ast.Node
for _, child := range tree.Children {
reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount]
var node *ast.Node
if len(reuse) == 0 {
// this pattern is completely reduced by commonLeft and commonRight patterns
// so it become nothing
node = ast.NewNode(ast.KindNothing, nil)
} else {
node = ast.NewNode(ast.KindPattern, nil, reuse...)
}
anyOf = appendIfUnique(anyOf, node)
}
switch {
case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing:
result = append(result, anyOf[0])
case len(anyOf) > 1:
result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...))
}
if commonRightCount > 0 {
result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...))
}
return ast.NewNode(ast.KindPattern, nil, result...)
}
func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) {
if len(nodes) <= 1 {
return
}
// find node that has least number of children
idx := leastChildren(nodes)
if idx == -1 {
return
}
tree := nodes[idx]
treeLength := len(tree.Children)
// allocate max able size for rightCommon slice
// to get ability insert elements in reverse order (from end to start)
// without sorting
commonRight = make([]*ast.Node, treeLength)
lastRight := treeLength // will use this to get results as commonRight[lastRight:]
var (
breakLeft bool
breakRight bool
commonTotal int
)
for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakLeft); i, j = i+1, j-1 {
treeLeft := tree.Children[i]
treeRight := tree.Children[j]
for k := 0; k < len(nodes) && !(breakLeft && breakLeft); k++ {
// skip least children node
if k == idx {
continue
}
restLeft := nodes[k].Children[i]
restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength]
breakLeft = breakLeft || !treeLeft.Equal(restLeft)
// disable searching for right common parts, if left part is already overlapping
breakRight = breakRight || (!breakLeft && j <= i)
breakRight = breakRight || !treeRight.Equal(restRight)
}
if !breakLeft {
commonTotal++
commonLeft = append(commonLeft, treeLeft)
}
if !breakRight {
commonTotal++
lastRight = j
commonRight[j] = treeRight
}
}
commonRight = commonRight[lastRight:]
return
}
func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node {
for _, n := range target {
if reflect.DeepEqual(n, val) {
return target
}
}
return append(target, val)
}
func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool {
for _, n := range nodes {
if n.Kind != kind {
return false
}
}
return true
}
func leastChildren(nodes []*ast.Node) int {
min := -1
idx := -1
for i, n := range nodes {
if idx == -1 || (len(n.Children) < min) {
min = len(n.Children)
idx = i
}
}
return idx
}
func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) {
var matchers []match.Matcher
for _, desc := range tree.Children {
m, err := compile(desc, sep)
if err != nil {
return nil, err
}
matchers = append(matchers, optimizeMatcher(m))
}
return matchers, nil
}
func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) {
switch tree.Kind {
case ast.KindAnyOf:
// todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go)
if n := minimizeTree(tree); n != nil {
return compile(n, sep)
}
matchers, err := compileTreeChildren(tree, sep)
if err != nil {
return nil, err
}
return match.NewAnyOf(matchers...), nil
case ast.KindPattern:
if len(tree.Children) == 0 {
return match.NewNothing(), nil
}
matchers, err := compileTreeChildren(tree, sep)
if err != nil {
return nil, err
}
m, err = compileMatchers(minimizeMatchers(matchers))
if err != nil {
return nil, err
}
case ast.KindAny:
m = match.NewAny(sep)
case ast.KindSuper:
m = match.NewSuper()
case ast.KindSingle:
m = match.NewSingle(sep)
case ast.KindNothing:
m = match.NewNothing()
case ast.KindList:
l := tree.Value.(ast.List)
m = match.NewList([]rune(l.Chars), l.Not)
case ast.KindRange:
r := tree.Value.(ast.Range)
m = match.NewRange(r.Lo, r.Hi, r.Not)
case ast.KindText:
t := tree.Value.(ast.Text)
m = match.NewText(t.Text)
default:
return nil, fmt.Errorf("could not compile tree: unknown node type")
}
return optimizeMatcher(m), nil
}
func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) {
m, err := compile(tree, sep)
if err != nil {
return nil, err
}
return m, nil
}

View File

@ -1,5 +1,10 @@
package glob
import (
"github.com/gobwas/glob/compiler"
"github.com/gobwas/glob/syntax"
)
// Glob represents compiled glob pattern.
type Glob interface {
Match(string) bool
@ -32,12 +37,12 @@ type Glob interface {
// comma-separated (without spaces) patterns
//
func Compile(pattern string, separators ...rune) (Glob, error) {
ast, err := parse(newLexer(pattern))
ast, err := syntax.Parse(pattern)
if err != nil {
return nil, err
}
matcher, err := compile(ast, separators)
matcher, err := compiler.Compile(ast, separators)
if err != nil {
return nil, err
}
@ -63,7 +68,7 @@ func QuoteMeta(s string) string {
// a byte loop is correct because all meta characters are ASCII
j := 0
for i := 0; i < len(s); i++ {
if special(s[i]) {
if syntax.Special(s[i]) {
b[j] = '\\'
j++
}

View File

@ -2,7 +2,7 @@ package match
import (
"fmt"
"github.com/gobwas/glob/strings"
"github.com/gobwas/glob/util/strings"
)
type Any struct {

View File

@ -2,7 +2,7 @@ package match
import (
"fmt"
"github.com/gobwas/glob/runes"
"github.com/gobwas/glob/util/runes"
"unicode/utf8"
)

View File

@ -2,7 +2,7 @@ package match
import (
"fmt"
"github.com/gobwas/glob/runes"
"github.com/gobwas/glob/util/runes"
"unicode/utf8"
)

View File

@ -1,230 +0,0 @@
package glob
import (
"errors"
"fmt"
"unicode/utf8"
)
type node interface {
children() []node
append(node)
}
// todo may be split it into another package
type lexerIface interface {
nextItem() item
}
type nodeImpl struct {
desc []node
}
func (n *nodeImpl) append(c node) {
n.desc = append(n.desc, c)
}
func (n *nodeImpl) children() []node {
return n.desc
}
type nodeList struct {
nodeImpl
not bool
chars string
}
type nodeRange struct {
nodeImpl
not bool
lo, hi rune
}
type nodeText struct {
nodeImpl
text string
}
type nodePattern struct{ nodeImpl }
type nodeAny struct{ nodeImpl }
type nodeSuper struct{ nodeImpl }
type nodeSingle struct{ nodeImpl }
type nodeAnyOf struct{ nodeImpl }
type tree struct {
root node
current node
path []node
}
func (t *tree) enter(c node) {
if t.root == nil {
t.root = c
t.current = c
return
}
t.current.append(c)
t.path = append(t.path, c)
t.current = c
}
func (t *tree) leave() {
if len(t.path)-1 <= 0 {
t.current = t.root
t.path = nil
return
}
t.path = t.path[:len(t.path)-1]
t.current = t.path[len(t.path)-1]
}
type parseFn func(*tree, lexerIface) (parseFn, error)
func parse(lexer lexerIface) (*nodePattern, error) {
var parser parseFn
root := &nodePattern{}
tree := &tree{}
tree.enter(root)
for parser = parserMain; ; {
next, err := parser(tree, lexer)
if err != nil {
return nil, err
}
if next == nil {
break
}
parser = next
}
return root, nil
}
func parserMain(tree *tree, lexer lexerIface) (parseFn, error) {
for stop := false; !stop; {
item := lexer.nextItem()
switch item.t {
case item_eof:
stop = true
continue
case item_error:
return nil, errors.New(item.s)
case item_text:
tree.current.append(&nodeText{text: item.s})
return parserMain, nil
case item_any:
tree.current.append(&nodeAny{})
return parserMain, nil
case item_super:
tree.current.append(&nodeSuper{})
return parserMain, nil
case item_single:
tree.current.append(&nodeSingle{})
return parserMain, nil
case item_range_open:
return parserRange, nil
case item_terms_open:
tree.enter(&nodeAnyOf{})
tree.enter(&nodePattern{})
return parserMain, nil
case item_separator:
tree.leave()
tree.enter(&nodePattern{})
return parserMain, nil
case item_terms_close:
tree.leave()
tree.leave()
return parserMain, nil
default:
return nil, fmt.Errorf("unexpected token: %s", item)
}
}
return nil, nil
}
func parserRange(tree *tree, lexer lexerIface) (parseFn, error) {
var (
not bool
lo rune
hi rune
chars string
)
for {
item := lexer.nextItem()
switch item.t {
case item_eof:
return nil, errors.New("unexpected end")
case item_error:
return nil, errors.New(item.s)
case item_not:
not = true
case item_range_lo:
r, w := utf8.DecodeRuneInString(item.s)
if len(item.s) > w {
return nil, fmt.Errorf("unexpected length of lo character")
}
lo = r
case item_range_between:
//
case item_range_hi:
r, w := utf8.DecodeRuneInString(item.s)
if len(item.s) > w {
return nil, fmt.Errorf("unexpected length of lo character")
}
hi = r
if hi < lo {
return nil, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
}
case item_text:
chars = item.s
case item_range_close:
isRange := lo != 0 && hi != 0
isChars := chars != ""
if isChars == isRange {
return nil, fmt.Errorf("could not parse range")
}
if isRange {
tree.current.append(&nodeRange{
lo: lo,
hi: hi,
not: not,
})
} else {
tree.current.append(&nodeList{
chars: chars,
not: not,
})
}
return parserMain, nil
}
}
}

72
vendor/github.com/gobwas/glob/syntax/ast/ast.go generated vendored Normal file
View File

@ -0,0 +1,72 @@
package ast
type Node struct {
Parent *Node
Children []*Node
Value interface{}
Kind Kind
}
func NewNode(k Kind, v interface{}, ch ...*Node) *Node {
n := &Node{
Kind: k,
Value: v,
}
for _, c := range ch {
Insert(n, c)
}
return n
}
func (a *Node) Equal(b *Node) bool {
if a.Kind != b.Kind {
return false
}
if a.Value != b.Value {
return false
}
if len(a.Children) != len(b.Children) {
return false
}
for i, c := range a.Children {
if !c.Equal(b.Children[i]) {
return false
}
}
return true
}
func Insert(parent *Node, children ...*Node) {
parent.Children = append(parent.Children, children...)
for _, ch := range children {
ch.Parent = parent
}
}
type List struct {
Not bool
Chars string
}
type Range struct {
Not bool
Lo, Hi rune
}
type Text struct {
Text string
}
type Kind int
const (
KindNothing Kind = iota
KindPattern
KindList
KindRange
KindText
KindAny
KindSuper
KindSingle
KindAnyOf
)

157
vendor/github.com/gobwas/glob/syntax/ast/parser.go generated vendored Normal file
View File

@ -0,0 +1,157 @@
package ast
import (
"errors"
"fmt"
"github.com/gobwas/glob/syntax/lexer"
"unicode/utf8"
)
type Lexer interface {
Next() lexer.Token
}
type parseFn func(*Node, Lexer) (parseFn, *Node, error)
func Parse(lexer Lexer) (*Node, error) {
var parser parseFn
root := NewNode(KindPattern, nil)
var (
tree *Node
err error
)
for parser, tree = parserMain, root; parser != nil; {
parser, tree, err = parser(tree, lexer)
if err != nil {
return nil, err
}
}
return root, nil
}
func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) {
for {
token := lex.Next()
switch token.Type {
case lexer.EOF:
return nil, tree, nil
case lexer.Error:
return nil, tree, errors.New(token.Raw)
case lexer.Text:
Insert(tree, NewNode(KindText, Text{token.Raw}))
return parserMain, tree, nil
case lexer.Any:
Insert(tree, NewNode(KindAny, nil))
return parserMain, tree, nil
case lexer.Super:
Insert(tree, NewNode(KindSuper, nil))
return parserMain, tree, nil
case lexer.Single:
Insert(tree, NewNode(KindSingle, nil))
return parserMain, tree, nil
case lexer.RangeOpen:
return parserRange, tree, nil
case lexer.TermsOpen:
a := NewNode(KindAnyOf, nil)
Insert(tree, a)
p := NewNode(KindPattern, nil)
Insert(a, p)
return parserMain, p, nil
case lexer.Separator:
p := NewNode(KindPattern, nil)
Insert(tree.Parent, p)
return parserMain, p, nil
case lexer.TermsClose:
return parserMain, tree.Parent.Parent, nil
default:
return nil, tree, fmt.Errorf("unexpected token: %s", token)
}
}
return nil, tree, fmt.Errorf("unknown error")
}
func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) {
var (
not bool
lo rune
hi rune
chars string
)
for {
token := lex.Next()
switch token.Type {
case lexer.EOF:
return nil, tree, errors.New("unexpected end")
case lexer.Error:
return nil, tree, errors.New(token.Raw)
case lexer.Not:
not = true
case lexer.RangeLo:
r, w := utf8.DecodeRuneInString(token.Raw)
if len(token.Raw) > w {
return nil, tree, fmt.Errorf("unexpected length of lo character")
}
lo = r
case lexer.RangeBetween:
//
case lexer.RangeHi:
r, w := utf8.DecodeRuneInString(token.Raw)
if len(token.Raw) > w {
return nil, tree, fmt.Errorf("unexpected length of lo character")
}
hi = r
if hi < lo {
return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
}
case lexer.Text:
chars = token.Raw
case lexer.RangeClose:
isRange := lo != 0 && hi != 0
isChars := chars != ""
if isChars == isRange {
return nil, tree, fmt.Errorf("could not parse range")
}
if isRange {
Insert(tree, NewNode(KindRange, Range{
Lo: lo,
Hi: hi,
Not: not,
}))
} else {
Insert(tree, NewNode(KindList, List{
Chars: chars,
Not: not,
}))
}
return parserMain, tree, nil
}
}
}

View File

@ -1,9 +1,9 @@
package glob
package lexer
import (
"bytes"
"fmt"
"github.com/gobwas/glob/runes"
"github.com/gobwas/glob/util/runes"
"unicode/utf8"
)
@ -30,123 +30,24 @@ var specials = []byte{
char_terms_close,
}
func special(c byte) bool {
func Special(c byte) bool {
return bytes.IndexByte(specials, c) != -1
}
type itemType int
type tokens []Token
const (
item_eof itemType = iota
item_error
item_text
item_char
item_any
item_super
item_single
item_not
item_separator
item_range_open
item_range_close
item_range_lo
item_range_hi
item_range_between
item_terms_open
item_terms_close
)
func (i itemType) String() string {
switch i {
case item_eof:
return "eof"
case item_error:
return "error"
case item_text:
return "text"
case item_char:
return "char"
case item_any:
return "any"
case item_super:
return "super"
case item_single:
return "single"
case item_not:
return "not"
case item_separator:
return "separator"
case item_range_open:
return "range_open"
case item_range_close:
return "range_close"
case item_range_lo:
return "range_lo"
case item_range_hi:
return "range_hi"
case item_range_between:
return "range_between"
case item_terms_open:
return "terms_open"
case item_terms_close:
return "terms_close"
default:
return "undef"
}
}
type item struct {
t itemType
s string
}
func (i item) String() string {
return fmt.Sprintf("%v<%q>", i.t, i.s)
}
type stubLexer struct {
Items []item
pos int
}
func (s *stubLexer) nextItem() (ret item) {
if s.pos == len(s.Items) {
return item{item_eof, ""}
}
ret = s.Items[s.pos]
s.pos++
return
}
type items []item
func (i *items) shift() (ret item) {
func (i *tokens) shift() (ret Token) {
ret = (*i)[0]
copy(*i, (*i)[1:])
*i = (*i)[:len(*i)-1]
return
}
func (i *items) push(v item) {
func (i *tokens) push(v Token) {
*i = append(*i, v)
}
func (i *items) empty() bool {
func (i *tokens) empty() bool {
return len(*i) == 0
}
@ -157,7 +58,7 @@ type lexer struct {
pos int
err error
items items
tokens tokens
termsLevel int
lastRune rune
@ -165,14 +66,26 @@ type lexer struct {
hasRune bool
}
func newLexer(source string) *lexer {
func NewLexer(source string) *lexer {
l := &lexer{
data: source,
items: items(make([]item, 0, 4)),
data: source,
tokens: tokens(make([]Token, 0, 4)),
}
return l
}
func (l *lexer) Next() Token {
if l.err != nil {
return Token{Error, l.err.Error()}
}
if !l.tokens.empty() {
return l.tokens.shift()
}
l.fetchItem()
return l.Next()
}
func (l *lexer) peek() (r rune, w int) {
if l.pos == len(l.data) {
return eof, 0
@ -233,18 +146,6 @@ func (l *lexer) termsLeave() {
l.termsLevel--
}
func (l *lexer) nextItem() item {
if l.err != nil {
return item{item_error, l.err.Error()}
}
if !l.items.empty() {
return l.items.shift()
}
l.fetchItem()
return l.nextItem()
}
var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open}
var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma)
@ -252,32 +153,32 @@ func (l *lexer) fetchItem() {
r := l.read()
switch {
case r == eof:
l.items.push(item{item_eof, ""})
l.tokens.push(Token{EOF, ""})
case r == char_terms_open:
l.termsEnter()
l.items.push(item{item_terms_open, string(r)})
l.tokens.push(Token{TermsOpen, string(r)})
case r == char_comma && l.inTerms():
l.items.push(item{item_separator, string(r)})
l.tokens.push(Token{Separator, string(r)})
case r == char_terms_close && l.inTerms():
l.items.push(item{item_terms_close, string(r)})
l.tokens.push(Token{TermsClose, string(r)})
l.termsLeave()
case r == char_range_open:
l.items.push(item{item_range_open, string(r)})
l.tokens.push(Token{RangeOpen, string(r)})
l.fetchRange()
case r == char_single:
l.items.push(item{item_single, string(r)})
l.tokens.push(Token{Single, string(r)})
case r == char_any:
if l.read() == char_any {
l.items.push(item{item_super, string(r) + string(r)})
l.tokens.push(Token{Super, string(r) + string(r)})
} else {
l.unread()
l.items.push(item{item_any, string(r)})
l.tokens.push(Token{Any, string(r)})
}
default:
@ -308,27 +209,27 @@ func (l *lexer) fetchRange() {
if r != char_range_close {
l.errorf("expected close range character")
} else {
l.items.push(item{item_range_close, string(r)})
l.tokens.push(Token{RangeClose, string(r)})
}
return
}
if wantHi {
l.items.push(item{item_range_hi, string(r)})
l.tokens.push(Token{RangeHi, string(r)})
wantClose = true
continue
}
if !seenNot && r == char_range_not {
l.items.push(item{item_not, string(r)})
l.tokens.push(Token{Not, string(r)})
seenNot = true
continue
}
if n, w := l.peek(); n == char_range_between {
l.seek(w)
l.items.push(item{item_range_lo, string(r)})
l.items.push(item{item_range_between, string(n)})
l.tokens.push(Token{RangeLo, string(r)})
l.tokens.push(Token{RangeBetween, string(n)})
wantHi = true
continue
}
@ -367,6 +268,6 @@ reading:
}
if len(data) > 0 {
l.items.push(item{item_text, string(data)})
l.tokens.push(Token{Text, string(data)})
}
}

88
vendor/github.com/gobwas/glob/syntax/lexer/token.go generated vendored Normal file
View File

@ -0,0 +1,88 @@
package lexer
import "fmt"
type TokenType int
const (
EOF TokenType = iota
Error
Text
Char
Any
Super
Single
Not
Separator
RangeOpen
RangeClose
RangeLo
RangeHi
RangeBetween
TermsOpen
TermsClose
)
func (tt TokenType) String() string {
switch tt {
case EOF:
return "eof"
case Error:
return "error"
case Text:
return "text"
case Char:
return "char"
case Any:
return "any"
case Super:
return "super"
case Single:
return "single"
case Not:
return "not"
case Separator:
return "separator"
case RangeOpen:
return "range_open"
case RangeClose:
return "range_close"
case RangeLo:
return "range_lo"
case RangeHi:
return "range_hi"
case RangeBetween:
return "range_between"
case TermsOpen:
return "terms_open"
case TermsClose:
return "terms_close"
default:
return "undef"
}
}
type Token struct {
Type TokenType
Raw string
}
func (t Token) String() string {
return fmt.Sprintf("%v<%q>", t.Type, t.Raw)
}

14
vendor/github.com/gobwas/glob/syntax/syntax.go generated vendored Normal file
View File

@ -0,0 +1,14 @@
package syntax
import (
"github.com/gobwas/glob/syntax/ast"
"github.com/gobwas/glob/syntax/lexer"
)
func Parse(s string) (*ast.Node, error) {
return ast.Parse(lexer.NewLexer(s))
}
func Special(b byte) bool {
return lexer.Special(b)
}

4
vendor/manifest vendored
View File

@ -134,8 +134,8 @@
{
"importpath": "github.com/gobwas/glob",
"repository": "https://github.com/gobwas/glob",
"revision": "70f1304bc31c066b52b681dfbaecf0e913527632",
"branch": "newlexer",
"revision": "ce6abff51712df5da11095fb41dd4b0353559797",
"branch": "master",
"notests": true
},
{