From 5f1e7ef9c883b76a9c1b3636936d91ec0821d922 Mon Sep 17 00:00:00 2001 From: Muir Manders Date: Fri, 7 Jun 2019 12:18:52 -0700 Subject: [PATCH] internal/lsp: improve error handling while parsing If the context is canceled (or times out) during parsing, we were previously caching the package with no *ast.Files. Any further LSP queries against that package would fail because the package is already loaded, but none of the files are mapped to the package. Fix by propagating non-parse errors as "fatal" errors in parseFiles. typeCheck will propagate these errors and not cache the package. I also fixed the package cache to not cache errors loading packages. If you get an error like "context canceled" then none of the package's files are mapped to the package. This prevents the package from ever getting unmapped when its files are updated. I also added a retry mechanism where if the current request is not canceled but the package failed to load due to a previous request being canceled, this request can try loading the package again. --- internal/lsp/cache/check.go | 26 ++++++++++++++-- internal/lsp/cache/load.go | 5 +++- internal/lsp/cache/parse.go | 59 ++++++++++++++++++++++++------------- 3 files changed, 67 insertions(+), 23 deletions(-) diff --git a/internal/lsp/cache/check.go b/internal/lsp/cache/check.go index 457bdde98e6..c295c298a0d 100644 --- a/internal/lsp/cache/check.go +++ b/internal/lsp/cache/check.go @@ -45,6 +45,7 @@ func (imp *importer) getPkg(pkgPath string) (*pkg, error) { } imp.view.pcache.mu.Lock() e, ok := imp.view.pcache.packages[pkgPath] + if ok { // cache hit imp.view.pcache.mu.Unlock() @@ -61,9 +62,21 @@ func (imp *importer) getPkg(pkgPath string) (*pkg, error) { e.pkg, e.err = imp.typeCheck(pkgPath) close(e.ready) } + if e.err != nil { + // If the import had been previously canceled, and that error cached, try again. + if e.err == context.Canceled && imp.ctx.Err() == nil { + imp.view.pcache.mu.Lock() + // Clear out canceled cache entry if it is still there. + if imp.view.pcache.packages[pkgPath] == e { + delete(imp.view.pcache.packages, pkgPath) + } + imp.view.pcache.mu.Unlock() + return imp.getPkg(pkgPath) + } return nil, e.err } + return e.pkg, nil } @@ -104,10 +117,19 @@ func (imp *importer) typeCheck(pkgPath string) (*pkg, error) { ignoreFuncBodies := imp.topLevelPkgID != pkg.id // Don't type-check function bodies if we are not in the top-level package. - files, errs := imp.parseFiles(meta.files, ignoreFuncBodies) - for _, err := range errs { + files, parseErrs, err := imp.parseFiles(meta.files, ignoreFuncBodies) + if err != nil { + return nil, err + } + for _, err := range parseErrs { appendError(err) } + + // If something unexpected happens, don't cache a package with 0 parsed files. + if len(files) == 0 { + return nil, fmt.Errorf("no parsed files for package %s", pkg.pkgPath) + } + pkg.syntax = files // Handle circular imports by copying previously seen imports. diff --git a/internal/lsp/cache/load.go b/internal/lsp/cache/load.go index f6522970892..e8b6890f27c 100644 --- a/internal/lsp/cache/load.go +++ b/internal/lsp/cache/load.go @@ -52,9 +52,12 @@ func (v *view) loadParseTypecheck(ctx context.Context, f *goFile) ([]packages.Er } // Type-check package. pkg, err := imp.getPkg(f.meta.pkgPath) - if pkg == nil || pkg.IsIllTyped() { + if err != nil { return nil, err } + if pkg == nil || pkg.IsIllTyped() { + return nil, fmt.Errorf("loadParseTypecheck: %s is ill typed", f.meta.pkgPath) + } // If we still have not found the package for the file, something is wrong. if f.pkg == nil { return nil, fmt.Errorf("loadParseTypeCheck: no package found for %v", f.filename()) diff --git a/internal/lsp/cache/parse.go b/internal/lsp/cache/parse.go index 25e4720ac63..113743a4b50 100644 --- a/internal/lsp/cache/parse.go +++ b/internal/lsp/cache/parse.go @@ -28,32 +28,48 @@ func parseFile(fset *token.FileSet, filename string, src []byte) (*ast.File, err var ioLimit = make(chan bool, 20) // parseFiles reads and parses the Go source files and returns the ASTs -// of the ones that could be at least partially parsed, along with a -// list of I/O and parse errors encountered. +// of the ones that could be at least partially parsed, along with a list +// parse errors encountered, and a fatal error that prevented parsing. // // Because files are scanned in parallel, the token.Pos // positions of the resulting ast.Files are not ordered. // -func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*astFile, []error) { - var wg sync.WaitGroup - n := len(filenames) - parsed := make([]*astFile, n) - errors := make([]error, n) +func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*astFile, []error, error) { + var ( + wg sync.WaitGroup + mu sync.Mutex + n = len(filenames) + parsed = make([]*astFile, n) + errors = make([]error, n) + fatalErr error + ) + + setFatalErr := func(err error) { + mu.Lock() + fatalErr = err + mu.Unlock() + } + for i, filename := range filenames { - if imp.ctx.Err() != nil { - parsed[i], errors[i] = nil, imp.ctx.Err() - continue + if err := imp.ctx.Err(); err != nil { + setFatalErr(err) + break } // First, check if we have already cached an AST for this file. f, err := imp.view.findFile(span.FileURI(filename)) - if err != nil || f == nil { - parsed[i], errors[i] = nil, err - continue + if err != nil { + setFatalErr(err) + break } + if f == nil { + setFatalErr(fmt.Errorf("could not find file %s", filename)) + break + } + gof, ok := f.(*goFile) if !ok { - parsed[i], errors[i] = nil, fmt.Errorf("non-Go file in parse call: %v", filename) - continue + setFatalErr(fmt.Errorf("non-Go file in parse call: %s", filename)) + break } wg.Add(1) go func(i int, filename string) { @@ -71,14 +87,13 @@ func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*a } // We don't have a cached AST for this file, so we read its content and parse it. - data, _, err := gof.Handle(imp.ctx).Read(imp.ctx) + src, _, err := gof.Handle(imp.ctx).Read(imp.ctx) if err != nil { - parsed[i], errors[i] = nil, err + setFatalErr(err) return } - src := data if src == nil { - parsed[i], errors[i] = nil, fmt.Errorf("no source for %v", filename) + setFatalErr(fmt.Errorf("no source for %v", filename)) return } @@ -102,6 +117,10 @@ func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*a } wg.Wait() + if fatalErr != nil { + return nil, nil, fatalErr + } + // Eliminate nils, preserving order. var o int for _, f := range parsed { @@ -121,7 +140,7 @@ func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) ([]*a } errors = errors[:o] - return parsed, errors + return parsed, errors, nil } // sameFile returns true if x and y have the same basename and denote