Skip to content

text/template: remove concurrency from scanner #53405

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 21 additions & 27 deletions src/text/template/parse/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,20 +111,20 @@ type stateFn func(*lexer) stateFn

// lexer holds the state of the scanner.
type lexer struct {
name string // the name of the input; used only for error reports
input string // the string being scanned
leftDelim string // start of action
rightDelim string // end of action
emitComment bool // emit itemComment tokens.
pos Pos // current position in the input
start Pos // start position of this item
atEOF bool // we have hit the end of input and returned eof
items chan item // channel of scanned items
parenDepth int // nesting depth of ( ) exprs
line int // 1+number of newlines seen
startLine int // start line of this item
breakOK bool // break keyword allowed
continueOK bool // continue keyword allowed
name string // the name of the input; used only for error reports
input string // the string being scanned
leftDelim string // start of action
rightDelim string // end of action
emitComment bool // emit itemComment tokens.
pos Pos // current position in the input
start Pos // start position of this item
atEOF bool // we have hit the end of input and returned eof
items []item // queue of scanned items
parenDepth int // nesting depth of ( ) exprs
line int // 1+number of newlines seen
startLine int // start line of this item
breakOK bool // break keyword allowed
continueOK bool // continue keyword allowed
}

// next returns the next rune in the input.
Expand Down Expand Up @@ -162,7 +162,7 @@ func (l *lexer) backup() {

// emit passes an item back to the client.
func (l *lexer) emit(t itemType) {
l.items <- item{t, l.start, l.input[l.start:l.pos], l.startLine}
l.items = append(l.items, item{t, l.start, l.input[l.start:l.pos], l.startLine})
l.start = l.pos
l.startLine = l.line
}
Expand Down Expand Up @@ -193,21 +193,16 @@ func (l *lexer) acceptRun(valid string) {
// errorf returns an error token and terminates the scan by passing
// back a nil pointer that will be the next state, terminating l.nextItem.
func (l *lexer) errorf(format string, args ...any) stateFn {
l.items <- item{itemError, l.start, fmt.Sprintf(format, args...), l.startLine}
l.items = append(l.items, item{itemError, l.start, fmt.Sprintf(format, args...), l.startLine})
return nil
}

// nextItem returns the next item from the input.
// Called by the parser, not in the lexing goroutine.
func (l *lexer) nextItem() item {
return <-l.items
}

// drain drains the output so the lexing goroutine will exit.
// Called by the parser, not in the lexing goroutine.
func (l *lexer) drain() {
for range l.items {
}
it := l.items[0]
l.items = append(l.items[:0], l.items[1:]...)
return it
}

// lex creates a new scanner for the input string.
Expand All @@ -226,11 +221,11 @@ func lex(name, input, left, right string, emitComment, breakOK, continueOK bool)
emitComment: emitComment,
breakOK: breakOK,
continueOK: continueOK,
items: make(chan item),
items: make([]item, 0, len(input)),
line: 1,
startLine: 1,
}
go l.run()
l.run()
return l
}

Expand All @@ -239,7 +234,6 @@ func (l *lexer) run() {
for state := lexText; state != nil; {
state = state(l)
}
close(l.items)
}

// state functions
Expand Down
16 changes: 0 additions & 16 deletions src/text/template/parse/lex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -546,22 +546,6 @@ func TestPos(t *testing.T) {
}
}

// Test that an error shuts down the lexing goroutine.
func TestShutdown(t *testing.T) {
// We need to duplicate template.Parse here to hold on to the lexer.
const text = "erroneous{{define}}{{else}}1234"
lexer := lex("foo", text, "{{", "}}", false, true, true)
_, err := New("root").parseLexer(lexer)
if err == nil {
t.Fatalf("expected error")
}
// The error should have drained the input. Therefore, the lexer should be shut down.
token, ok := <-lexer.items
if ok {
t.Fatalf("input was not drained; got %v", token)
}
}

// parseLexer is a local version of parse that lets us pass in the lexer instead of building it.
// We expect an error, so the tree set and funcs list are explicitly nil.
func (t *Tree) parseLexer(lex *lexer) (tree *Tree, err error) {
Expand Down
1 change: 0 additions & 1 deletion src/text/template/parse/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,6 @@ func (t *Tree) recover(errp *error) {
panic(e)
}
if t != nil {
t.lex.drain()
t.stopParse()
}
*errp = e.(error)
Expand Down