mirror of
https://github.com/golang/go
synced 2025-05-05 03:21:36 +00:00
Inst is a 40-byte struct, so avoiding the copy gives a decent speedup: name old time/op new time/op delta Find-8 160ns ± 4% 109ns ± 4% -32.22% (p=0.008 n=5+5) FindAllNoMatches-8 70.4ns ± 4% 53.8ns ± 0% -23.58% (p=0.016 n=5+4) FindString-8 154ns ± 6% 107ns ± 0% -30.37% (p=0.016 n=5+4) FindSubmatch-8 194ns ± 1% 135ns ± 1% -30.56% (p=0.008 n=5+5) FindStringSubmatch-8 193ns ± 8% 131ns ± 0% -31.82% (p=0.008 n=5+5) Literal-8 42.8ns ± 2% 34.8ns ± 0% -18.67% (p=0.008 n=5+5) NotLiteral-8 917ns ± 2% 636ns ± 0% -30.68% (p=0.008 n=5+5) MatchClass-8 1.18µs ± 3% 0.91µs ± 1% -22.27% (p=0.016 n=5+4) MatchClass_InRange-8 1.11µs ± 1% 0.87µs ± 2% -21.38% (p=0.008 n=5+5) ReplaceAll-8 659ns ± 6% 596ns ± 3% -9.60% (p=0.008 n=5+5) AnchoredLiteralShortNonMatch-8 34.2ns ± 0% 30.4ns ± 1% -11.20% (p=0.016 n=4+5) AnchoredLiteralLongNonMatch-8 38.7ns ± 0% 38.7ns ± 0% ~ (p=0.579 n=5+5) AnchoredShortMatch-8 67.0ns ± 1% 52.7ns ± 0% -21.31% (p=0.016 n=5+4) AnchoredLongMatch-8 121ns ± 0% 124ns ±10% ~ (p=0.730 n=5+5) OnePassShortA-8 392ns ± 0% 231ns ± 3% -41.10% (p=0.008 n=5+5) NotOnePassShortA-8 370ns ± 0% 282ns ± 1% -23.81% (p=0.008 n=5+5) OnePassShortB-8 280ns ± 0% 179ns ± 1% -36.05% (p=0.008 n=5+5) NotOnePassShortB-8 226ns ± 0% 185ns ± 3% -18.26% (p=0.008 n=5+5) OnePassLongPrefix-8 51.7ns ± 0% 39.1ns ± 1% -24.28% (p=0.016 n=4+5) OnePassLongNotPrefix-8 213ns ± 2% 132ns ± 1% -37.86% (p=0.008 n=5+5) MatchParallelShared-8 25.3ns ± 3% 23.4ns ± 7% -7.50% (p=0.016 n=5+5) MatchParallelCopied-8 26.5ns ± 7% 22.3ns ± 7% -16.06% (p=0.008 n=5+5) QuoteMetaAll-8 45.8ns ± 1% 45.8ns ± 1% ~ (p=1.000 n=5+5) QuoteMetaNone-8 24.3ns ± 0% 24.3ns ± 0% ~ (p=0.325 n=5+5) Compile/Onepass-8 1.98µs ± 0% 1.97µs ± 0% -0.22% (p=0.016 n=5+4) Compile/Medium-8 4.56µs ± 0% 4.55µs ± 1% ~ (p=0.595 n=5+5) Compile/Hard-8 35.7µs ± 0% 35.3µs ± 3% ~ (p=0.151 n=5+5) Match/Easy0/16-8 2.18ns ± 2% 2.19ns ± 5% ~ (p=0.690 n=5+5) Match/Easy0/32-8 27.4ns ± 2% 27.6ns ± 4% ~ (p=1.000 n=5+5) Match/Easy0/1K-8 246ns ± 0% 252ns ± 7% ~ (p=0.238 n=5+5) Match/Easy0/32K-8 4.58µs ± 7% 4.64µs ± 5% ~ (p=1.000 n=5+5) Match/Easy0/1M-8 235µs ± 0% 235µs ± 0% ~ (p=0.886 n=4+4) Match/Easy0/32M-8 7.86ms ± 0% 7.86ms ± 1% ~ (p=0.730 n=4+5) Match/Easy0i/16-8 2.15ns ± 0% 2.15ns ± 0% ~ (p=0.246 n=5+5) Match/Easy0i/32-8 507ns ± 2% 466ns ± 4% -8.03% (p=0.008 n=5+5) Match/Easy0i/1K-8 14.7µs ± 0% 13.6µs ± 2% -7.63% (p=0.008 n=5+5) Match/Easy0i/32K-8 571µs ± 1% 570µs ± 1% ~ (p=0.556 n=4+5) Match/Easy0i/1M-8 18.2ms ± 0% 18.8ms ±11% ~ (p=0.548 n=5+5) Match/Easy0i/32M-8 581ms ± 0% 590ms ± 1% +1.52% (p=0.016 n=4+5) Match/Easy1/16-8 2.17ns ± 0% 2.15ns ± 0% -0.90% (p=0.000 n=5+4) Match/Easy1/32-8 25.1ns ± 0% 25.4ns ± 4% ~ (p=0.651 n=5+5) Match/Easy1/1K-8 462ns ± 1% 431ns ± 4% -6.56% (p=0.008 n=5+5) Match/Easy1/32K-8 18.8µs ± 0% 18.8µs ± 1% ~ (p=1.000 n=5+5) Match/Easy1/1M-8 658µs ± 0% 658µs ± 1% ~ (p=0.841 n=5+5) Match/Easy1/32M-8 21.0ms ± 1% 21.0ms ± 2% ~ (p=0.841 n=5+5) Match/Medium/16-8 2.15ns ± 0% 2.16ns ± 0% ~ (p=0.714 n=4+5) Match/Medium/32-8 561ns ± 1% 512ns ± 5% -8.69% (p=0.008 n=5+5) Match/Medium/1K-8 16.9µs ± 0% 15.2µs ± 1% -10.40% (p=0.008 n=5+5) Match/Medium/32K-8 632µs ± 0% 631µs ± 1% ~ (p=0.421 n=5+5) Match/Medium/1M-8 20.3ms ± 1% 20.1ms ± 0% ~ (p=0.190 n=5+4) Match/Medium/32M-8 650ms ± 1% 646ms ± 0% -0.58% (p=0.032 n=5+4) Match/Hard/16-8 2.15ns ± 0% 2.15ns ± 1% ~ (p=0.111 n=5+5) Match/Hard/32-8 870ns ± 2% 667ns ± 1% -23.28% (p=0.008 n=5+5) Match/Hard/1K-8 26.9µs ± 0% 21.0µs ± 2% -21.83% (p=0.008 n=5+5) Match/Hard/32K-8 833µs ± 0% 833µs ± 1% ~ (p=0.548 n=5+5) Match/Hard/1M-8 26.6ms ± 0% 26.8ms ± 1% ~ (p=0.905 n=4+5) Match/Hard/32M-8 856ms ± 0% 851ms ± 0% -0.65% (p=0.016 n=5+4) Match/Hard1/16-8 2.96µs ±12% 1.81µs ± 3% -38.68% (p=0.008 n=5+5) Match/Hard1/32-8 5.62µs ± 3% 3.48µs ± 0% -38.07% (p=0.016 n=5+4) Match/Hard1/1K-8 175µs ± 5% 108µs ± 0% -37.85% (p=0.016 n=5+4) Match/Hard1/32K-8 4.09ms ± 2% 4.05ms ± 0% -0.85% (p=0.016 n=5+4) Match/Hard1/1M-8 131ms ± 0% 131ms ± 3% ~ (p=0.151 n=5+5) Match/Hard1/32M-8 4.19s ± 0% 4.20s ± 1% ~ (p=1.000 n=5+5) Match_onepass_regex/16-8 262ns ± 2% 170ns ± 2% -35.13% (p=0.008 n=5+5) Match_onepass_regex/32-8 463ns ± 0% 306ns ± 0% -33.90% (p=0.008 n=5+5) Match_onepass_regex/1K-8 13.3µs ± 2% 8.8µs ± 0% -33.84% (p=0.008 n=5+5) Match_onepass_regex/32K-8 424µs ± 3% 280µs ± 1% -33.93% (p=0.008 n=5+5) Match_onepass_regex/1M-8 13.4ms ± 0% 9.0ms ± 1% -32.80% (p=0.016 n=4+5) Match_onepass_regex/32M-8 427ms ± 0% 288ms ± 1% -32.60% (p=0.008 n=5+5) Change-Id: I02c54176ed5c9f5b5fc99524a2d5eb1c490f0ebf Reviewed-on: https://go-review.googlesource.com/c/go/+/355789 Reviewed-by: Peter Weinberger <pjw@google.com> Reviewed-by: Russ Cox <rsc@golang.org> TryBot-Result: Gopher Robot <gobot@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> Auto-Submit: Russ Cox <rsc@golang.org>
368 lines
8.8 KiB
Go
368 lines
8.8 KiB
Go
// Copyright 2015 The Go Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
// backtrack is a regular expression search with submatch
|
|
// tracking for small regular expressions and texts. It allocates
|
|
// a bit vector with (length of input) * (length of prog) bits,
|
|
// to make sure it never explores the same (character position, instruction)
|
|
// state multiple times. This limits the search to run in time linear in
|
|
// the length of the test.
|
|
//
|
|
// backtrack is a fast replacement for the NFA code on small
|
|
// regexps when onepass cannot be used.
|
|
|
|
package regexp
|
|
|
|
import (
|
|
"regexp/syntax"
|
|
"sync"
|
|
)
|
|
|
|
// A job is an entry on the backtracker's job stack. It holds
|
|
// the instruction pc and the position in the input.
|
|
type job struct {
|
|
pc uint32
|
|
arg bool
|
|
pos int
|
|
}
|
|
|
|
const (
|
|
visitedBits = 32
|
|
maxBacktrackProg = 500 // len(prog.Inst) <= max
|
|
maxBacktrackVector = 256 * 1024 // bit vector size <= max (bits)
|
|
)
|
|
|
|
// bitState holds state for the backtracker.
|
|
type bitState struct {
|
|
end int
|
|
cap []int
|
|
matchcap []int
|
|
jobs []job
|
|
visited []uint32
|
|
|
|
inputs inputs
|
|
}
|
|
|
|
var bitStatePool sync.Pool
|
|
|
|
func newBitState() *bitState {
|
|
b, ok := bitStatePool.Get().(*bitState)
|
|
if !ok {
|
|
b = new(bitState)
|
|
}
|
|
return b
|
|
}
|
|
|
|
func freeBitState(b *bitState) {
|
|
b.inputs.clear()
|
|
bitStatePool.Put(b)
|
|
}
|
|
|
|
// maxBitStateLen returns the maximum length of a string to search with
|
|
// the backtracker using prog.
|
|
func maxBitStateLen(prog *syntax.Prog) int {
|
|
if !shouldBacktrack(prog) {
|
|
return 0
|
|
}
|
|
return maxBacktrackVector / len(prog.Inst)
|
|
}
|
|
|
|
// shouldBacktrack reports whether the program is too
|
|
// long for the backtracker to run.
|
|
func shouldBacktrack(prog *syntax.Prog) bool {
|
|
return len(prog.Inst) <= maxBacktrackProg
|
|
}
|
|
|
|
// reset resets the state of the backtracker.
|
|
// end is the end position in the input.
|
|
// ncap is the number of captures.
|
|
func (b *bitState) reset(prog *syntax.Prog, end int, ncap int) {
|
|
b.end = end
|
|
|
|
if cap(b.jobs) == 0 {
|
|
b.jobs = make([]job, 0, 256)
|
|
} else {
|
|
b.jobs = b.jobs[:0]
|
|
}
|
|
|
|
visitedSize := (len(prog.Inst)*(end+1) + visitedBits - 1) / visitedBits
|
|
if cap(b.visited) < visitedSize {
|
|
b.visited = make([]uint32, visitedSize, maxBacktrackVector/visitedBits)
|
|
} else {
|
|
b.visited = b.visited[:visitedSize]
|
|
for i := range b.visited {
|
|
b.visited[i] = 0
|
|
}
|
|
}
|
|
|
|
if cap(b.cap) < ncap {
|
|
b.cap = make([]int, ncap)
|
|
} else {
|
|
b.cap = b.cap[:ncap]
|
|
}
|
|
for i := range b.cap {
|
|
b.cap[i] = -1
|
|
}
|
|
|
|
if cap(b.matchcap) < ncap {
|
|
b.matchcap = make([]int, ncap)
|
|
} else {
|
|
b.matchcap = b.matchcap[:ncap]
|
|
}
|
|
for i := range b.matchcap {
|
|
b.matchcap[i] = -1
|
|
}
|
|
}
|
|
|
|
// shouldVisit reports whether the combination of (pc, pos) has not
|
|
// been visited yet.
|
|
func (b *bitState) shouldVisit(pc uint32, pos int) bool {
|
|
n := uint(int(pc)*(b.end+1) + pos)
|
|
if b.visited[n/visitedBits]&(1<<(n&(visitedBits-1))) != 0 {
|
|
return false
|
|
}
|
|
b.visited[n/visitedBits] |= 1 << (n & (visitedBits - 1))
|
|
return true
|
|
}
|
|
|
|
// push pushes (pc, pos, arg) onto the job stack if it should be
|
|
// visited.
|
|
func (b *bitState) push(re *Regexp, pc uint32, pos int, arg bool) {
|
|
// Only check shouldVisit when arg is false.
|
|
// When arg is true, we are continuing a previous visit.
|
|
if re.prog.Inst[pc].Op != syntax.InstFail && (arg || b.shouldVisit(pc, pos)) {
|
|
b.jobs = append(b.jobs, job{pc: pc, arg: arg, pos: pos})
|
|
}
|
|
}
|
|
|
|
// tryBacktrack runs a backtracking search starting at pos.
|
|
func (re *Regexp) tryBacktrack(b *bitState, i input, pc uint32, pos int) bool {
|
|
longest := re.longest
|
|
|
|
b.push(re, pc, pos, false)
|
|
for len(b.jobs) > 0 {
|
|
l := len(b.jobs) - 1
|
|
// Pop job off the stack.
|
|
pc := b.jobs[l].pc
|
|
pos := b.jobs[l].pos
|
|
arg := b.jobs[l].arg
|
|
b.jobs = b.jobs[:l]
|
|
|
|
// Optimization: rather than push and pop,
|
|
// code that is going to Push and continue
|
|
// the loop simply updates ip, p, and arg
|
|
// and jumps to CheckAndLoop. We have to
|
|
// do the ShouldVisit check that Push
|
|
// would have, but we avoid the stack
|
|
// manipulation.
|
|
goto Skip
|
|
CheckAndLoop:
|
|
if !b.shouldVisit(pc, pos) {
|
|
continue
|
|
}
|
|
Skip:
|
|
|
|
inst := &re.prog.Inst[pc]
|
|
|
|
switch inst.Op {
|
|
default:
|
|
panic("bad inst")
|
|
case syntax.InstFail:
|
|
panic("unexpected InstFail")
|
|
case syntax.InstAlt:
|
|
// Cannot just
|
|
// b.push(inst.Out, pos, false)
|
|
// b.push(inst.Arg, pos, false)
|
|
// If during the processing of inst.Out, we encounter
|
|
// inst.Arg via another path, we want to process it then.
|
|
// Pushing it here will inhibit that. Instead, re-push
|
|
// inst with arg==true as a reminder to push inst.Arg out
|
|
// later.
|
|
if arg {
|
|
// Finished inst.Out; try inst.Arg.
|
|
arg = false
|
|
pc = inst.Arg
|
|
goto CheckAndLoop
|
|
} else {
|
|
b.push(re, pc, pos, true)
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
}
|
|
|
|
case syntax.InstAltMatch:
|
|
// One opcode consumes runes; the other leads to match.
|
|
switch re.prog.Inst[inst.Out].Op {
|
|
case syntax.InstRune, syntax.InstRune1, syntax.InstRuneAny, syntax.InstRuneAnyNotNL:
|
|
// inst.Arg is the match.
|
|
b.push(re, inst.Arg, pos, false)
|
|
pc = inst.Arg
|
|
pos = b.end
|
|
goto CheckAndLoop
|
|
}
|
|
// inst.Out is the match - non-greedy
|
|
b.push(re, inst.Out, b.end, false)
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstRune:
|
|
r, width := i.step(pos)
|
|
if !inst.MatchRune(r) {
|
|
continue
|
|
}
|
|
pos += width
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstRune1:
|
|
r, width := i.step(pos)
|
|
if r != inst.Rune[0] {
|
|
continue
|
|
}
|
|
pos += width
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstRuneAnyNotNL:
|
|
r, width := i.step(pos)
|
|
if r == '\n' || r == endOfText {
|
|
continue
|
|
}
|
|
pos += width
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstRuneAny:
|
|
r, width := i.step(pos)
|
|
if r == endOfText {
|
|
continue
|
|
}
|
|
pos += width
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstCapture:
|
|
if arg {
|
|
// Finished inst.Out; restore the old value.
|
|
b.cap[inst.Arg] = pos
|
|
continue
|
|
} else {
|
|
if inst.Arg < uint32(len(b.cap)) {
|
|
// Capture pos to register, but save old value.
|
|
b.push(re, pc, b.cap[inst.Arg], true) // come back when we're done.
|
|
b.cap[inst.Arg] = pos
|
|
}
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
}
|
|
|
|
case syntax.InstEmptyWidth:
|
|
flag := i.context(pos)
|
|
if !flag.match(syntax.EmptyOp(inst.Arg)) {
|
|
continue
|
|
}
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstNop:
|
|
pc = inst.Out
|
|
goto CheckAndLoop
|
|
|
|
case syntax.InstMatch:
|
|
// We found a match. If the caller doesn't care
|
|
// where the match is, no point going further.
|
|
if len(b.cap) == 0 {
|
|
return true
|
|
}
|
|
|
|
// Record best match so far.
|
|
// Only need to check end point, because this entire
|
|
// call is only considering one start position.
|
|
if len(b.cap) > 1 {
|
|
b.cap[1] = pos
|
|
}
|
|
if old := b.matchcap[1]; old == -1 || (longest && pos > 0 && pos > old) {
|
|
copy(b.matchcap, b.cap)
|
|
}
|
|
|
|
// If going for first match, we're done.
|
|
if !longest {
|
|
return true
|
|
}
|
|
|
|
// If we used the entire text, no longer match is possible.
|
|
if pos == b.end {
|
|
return true
|
|
}
|
|
|
|
// Otherwise, continue on in hope of a longer match.
|
|
continue
|
|
}
|
|
}
|
|
|
|
return longest && len(b.matchcap) > 1 && b.matchcap[1] >= 0
|
|
}
|
|
|
|
// backtrack runs a backtracking search of prog on the input starting at pos.
|
|
func (re *Regexp) backtrack(ib []byte, is string, pos int, ncap int, dstCap []int) []int {
|
|
startCond := re.cond
|
|
if startCond == ^syntax.EmptyOp(0) { // impossible
|
|
return nil
|
|
}
|
|
if startCond&syntax.EmptyBeginText != 0 && pos != 0 {
|
|
// Anchored match, past beginning of text.
|
|
return nil
|
|
}
|
|
|
|
b := newBitState()
|
|
i, end := b.inputs.init(nil, ib, is)
|
|
b.reset(re.prog, end, ncap)
|
|
|
|
// Anchored search must start at the beginning of the input
|
|
if startCond&syntax.EmptyBeginText != 0 {
|
|
if len(b.cap) > 0 {
|
|
b.cap[0] = pos
|
|
}
|
|
if !re.tryBacktrack(b, i, uint32(re.prog.Start), pos) {
|
|
freeBitState(b)
|
|
return nil
|
|
}
|
|
} else {
|
|
|
|
// Unanchored search, starting from each possible text position.
|
|
// Notice that we have to try the empty string at the end of
|
|
// the text, so the loop condition is pos <= end, not pos < end.
|
|
// This looks like it's quadratic in the size of the text,
|
|
// but we are not clearing visited between calls to TrySearch,
|
|
// so no work is duplicated and it ends up still being linear.
|
|
width := -1
|
|
for ; pos <= end && width != 0; pos += width {
|
|
if len(re.prefix) > 0 {
|
|
// Match requires literal prefix; fast search for it.
|
|
advance := i.index(re, pos)
|
|
if advance < 0 {
|
|
freeBitState(b)
|
|
return nil
|
|
}
|
|
pos += advance
|
|
}
|
|
|
|
if len(b.cap) > 0 {
|
|
b.cap[0] = pos
|
|
}
|
|
if re.tryBacktrack(b, i, uint32(re.prog.Start), pos) {
|
|
// Match must be leftmost; done.
|
|
goto Match
|
|
}
|
|
_, width = i.step(pos)
|
|
}
|
|
freeBitState(b)
|
|
return nil
|
|
}
|
|
|
|
Match:
|
|
dstCap = append(dstCap, b.matchcap...)
|
|
freeBitState(b)
|
|
return dstCap
|
|
}
|