diff --git a/go.mod b/go.mod index 26c7cdc9aec..24cca8bec0f 100644 --- a/go.mod +++ b/go.mod @@ -5,8 +5,8 @@ go 1.18 // tagx:compat 1.16 require ( github.com/yuin/goldmark v1.4.13 golang.org/x/mod v0.12.0 - golang.org/x/net v0.14.0 - golang.org/x/sys v0.11.0 + golang.org/x/net v0.15.0 + golang.org/x/sys v0.12.0 ) require golang.org/x/sync v0.3.0 diff --git a/go.sum b/go.sum index d2f1de7aba3..2c884cc6e39 100644 --- a/go.sum +++ b/go.sum @@ -2,7 +2,7 @@ github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= @@ -12,8 +12,8 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= -golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -26,19 +26,19 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= -golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= diff --git a/go/analysis/analysistest/analysistest.go b/go/analysis/analysistest/analysistest.go index 6a27edb1064..63ca6e9eb2e 100644 --- a/go/analysis/analysistest/analysistest.go +++ b/go/analysis/analysistest/analysistest.go @@ -98,6 +98,34 @@ type Testing interface { // println() // } // } +// +// # Conflicts +// +// A single analysis pass may offer two or more suggested fixes that +// (1) conflict but are nonetheless logically composable, (e.g. +// because both update the import declaration), or (2) are +// fundamentally incompatible (e.g. alternative fixes to the same +// statement). +// +// It is up to the driver to decide how to apply such fixes. A +// sophisticated driver could attempt to resolve conflicts of the +// first kind, but this test driver simply reports the fact of the +// conflict with the expectation that the user will split their tests +// into nonconflicting parts. +// +// Conflicts of the second kind can be avoided by giving the +// alternative fixes different names (SuggestedFix.Message) and using +// a multi-section .txtar file with a named section for each +// alternative fix. +// +// Analyzers that compute fixes from a textual diff of the +// before/after file contents (instead of directly from syntax tree +// positions) may produce fixes that, although logically +// non-conflicting, nonetheless conflict due to the particulars of the +// diff algorithm. In such cases it may suffice to introduce +// sufficient separation of the statements in the test input so that +// the computed diffs do not overlap. If that fails, break the test +// into smaller parts. func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result { r := Run(t, dir, a, patterns...) @@ -135,7 +163,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns continue } if _, ok := fileContents[file]; !ok { - contents, err := ioutil.ReadFile(file.Name()) + contents, err := os.ReadFile(file.Name()) if err != nil { t.Errorf("error reading %s: %v", file.Name(), err) } @@ -186,7 +214,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns found = true out, err := diff.ApplyBytes(orig, edits) if err != nil { - t.Errorf("%s: error applying fixes: %v", file.Name(), err) + t.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", file.Name(), err) continue } // the file may contain multiple trailing @@ -220,7 +248,7 @@ func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns out, err := diff.ApplyBytes(orig, catchallEdits) if err != nil { - t.Errorf("%s: error applying fixes: %v", file.Name(), err) + t.Errorf("%s: error applying fixes: %v (see possible explanations at RunWithSuggestedFixes)", file.Name(), err) continue } want := string(ar.Comment) diff --git a/go/analysis/internal/versiontest/version_test.go b/go/analysis/internal/versiontest/version_test.go index 45eef8b89d2..43c52f565f7 100644 --- a/go/analysis/internal/versiontest/version_test.go +++ b/go/analysis/internal/versiontest/version_test.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/go/analysis/analysistest" "golang.org/x/tools/go/analysis/multichecker" "golang.org/x/tools/go/analysis/singlechecker" + "golang.org/x/tools/internal/testenv" ) var analyzer = &analysis.Analyzer{ @@ -60,6 +61,8 @@ func TestAnalysistest(t *testing.T) { } func TestMultichecker(t *testing.T) { + testenv.NeedsGoPackages(t) + exe, err := os.Executable() if err != nil { t.Fatal(err) @@ -74,6 +77,8 @@ func TestMultichecker(t *testing.T) { } func TestSinglechecker(t *testing.T) { + testenv.NeedsGoPackages(t) + exe, err := os.Executable() if err != nil { t.Fatal(err) @@ -88,6 +93,8 @@ func TestSinglechecker(t *testing.T) { } func TestVettool(t *testing.T) { + testenv.NeedsGoPackages(t) + exe, err := os.Executable() if err != nil { t.Fatal(err) diff --git a/go/analysis/passes/copylock/copylock.go b/go/analysis/passes/copylock/copylock.go index ff2b41ac4aa..ec7727de769 100644 --- a/go/analysis/passes/copylock/copylock.go +++ b/go/analysis/passes/copylock/copylock.go @@ -223,6 +223,8 @@ func (path typePath) String() string { } func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { + x = analysisutil.Unparen(x) // ignore parens on rhs + if _, ok := x.(*ast.CompositeLit); ok { return nil } @@ -231,7 +233,7 @@ func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { return nil } if star, ok := x.(*ast.StarExpr); ok { - if _, ok := star.X.(*ast.CallExpr); ok { + if _, ok := analysisutil.Unparen(star.X).(*ast.CallExpr); ok { // A call may return a pointer to a zero value. return nil } diff --git a/go/analysis/passes/copylock/testdata/src/a/copylock.go b/go/analysis/passes/copylock/testdata/src/a/copylock.go index 4ab66dca1f6..2f0f8136628 100644 --- a/go/analysis/passes/copylock/testdata/src/a/copylock.go +++ b/go/analysis/passes/copylock/testdata/src/a/copylock.go @@ -34,6 +34,9 @@ func OkFunc() { xx := struct{ L *sync.Mutex }{ L: new(sync.Mutex), } + + var pz = (sync.Mutex{}) + pw := (sync.Mutex{}) } type Tlock struct { @@ -214,3 +217,11 @@ func AtomicTypesCheck() { vP := &vX vZ := &atomic.Value{} } + +// PointerRhsCheck checks that exceptions are made for pointer return values of +// function calls. These may be zero initialized so they are considered OK. +func PointerRhsCheck() { + newMutex := func() *sync.Mutex { return new(sync.Mutex) } + d := *newMutex() + pd := *(newMutex()) +} diff --git a/go/analysis/unitchecker/export_test.go b/go/analysis/unitchecker/export_test.go new file mode 100644 index 00000000000..04eacc47576 --- /dev/null +++ b/go/analysis/unitchecker/export_test.go @@ -0,0 +1,26 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unitchecker + +import ( + "go/token" + "go/types" +) + +// This file exposes various internal hooks to the separate_test. +// +// TODO(adonovan): expose a public API to unitchecker that doesn't +// rely on details of JSON .cfg files or enshrine I/O decisions or +// assumptions about how "go vet" locates things. Ideally the new Run +// function would accept an interface, and a Config file would be just +// one way--the go vet way--to implement it. + +func SetTypeImportExport( + MakeTypesImporter func(*Config, *token.FileSet) types.Importer, + ExportTypes func(*Config, *token.FileSet, *types.Package) error, +) { + makeTypesImporter = MakeTypesImporter + exportTypes = ExportTypes +} diff --git a/go/analysis/unitchecker/separate_test.go b/go/analysis/unitchecker/separate_test.go new file mode 100644 index 00000000000..37e74e481ec --- /dev/null +++ b/go/analysis/unitchecker/separate_test.go @@ -0,0 +1,309 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unitchecker_test + +// This file illustrates separate analysis with an example. + +import ( + "bytes" + "encoding/json" + "fmt" + "go/token" + "go/types" + "io" + "os" + "path/filepath" + "strings" + "sync/atomic" + "testing" + + "golang.org/x/tools/go/analysis/passes/printf" + "golang.org/x/tools/go/analysis/unitchecker" + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/txtar" +) + +// TestExampleSeparateAnalysis demonstrates the principle of separate +// analysis, the distribution of units of type-checking and analysis +// work across several processes, using serialized summaries to +// communicate between them. +// +// It uses two different kinds of task, "manager" and "worker": +// +// - The manager computes the graph of package dependencies, and makes +// a request to the worker for each package. It does not parse, +// type-check, or analyze Go code. It is analogous "go vet". +// +// - The worker, which contains the Analyzers, reads each request, +// loads, parses, and type-checks the files of one package, +// applies all necessary analyzers to the package, then writes +// its results to a file. It is a unitchecker-based driver, +// analogous to the program specified by go vet -vettool= flag. +// +// In practice these would be separate executables, but for simplicity +// of this example they are provided by one executable in two +// different modes: the Example function is the manager, and the same +// executable invoked with ENTRYPOINT=worker is the worker. +// (See TestIntegration for how this happens.) +// +// Unfortunately this can't be a true Example because of the skip, +// which requires a testing.T. +func TestExampleSeparateAnalysis(t *testing.T) { + testenv.NeedsGoPackages(t) + + // src is an archive containing a module with a printf mistake. + const src = ` +-- go.mod -- +module separate +go 1.18 + +-- main/main.go -- +package main + +import "separate/lib" + +func main() { + lib.MyPrintf("%s", 123) +} + +-- lib/lib.go -- +package lib + +import "fmt" + +func MyPrintf(format string, args ...any) { + fmt.Printf(format, args...) +} +` + + // Expand archive into tmp tree. + tmpdir := t.TempDir() + if err := extractTxtar(txtar.Parse([]byte(src)), tmpdir); err != nil { + t.Fatal(err) + } + + // Load metadata for the main package and all its dependencies. + cfg := &packages.Config{ + Mode: packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedImports | packages.NeedModule, + Dir: tmpdir, + Env: append(os.Environ(), + "GOPROXY=off", // disable network + "GOWORK=off", // an ambient GOWORK value would break package loading + ), + Logf: t.Logf, + } + pkgs, err := packages.Load(cfg, "separate/main") + if err != nil { + t.Fatal(err) + } + // Stop if any package had a metadata error. + if packages.PrintErrors(pkgs) > 0 { + t.Fatal("there were errors among loaded packages") + } + + // Now we have loaded the import graph, + // let's begin the proper work of the manager. + + // Gather root packages. They will get all analyzers, + // whereas dependencies get only the subset that + // produce facts or are required by them. + roots := make(map[*packages.Package]bool) + for _, pkg := range pkgs { + roots[pkg] = true + } + + // nextID generates sequence numbers for each unit of work. + // We use it to create names of temporary files. + var nextID atomic.Int32 + + var allDiagnostics []string + + // Visit all packages in postorder: dependencies first. + // TODO(adonovan): opt: use parallel postorder. + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + if pkg.PkgPath == "unsafe" { + return + } + + // Choose a unique prefix for temporary files + // (.cfg .types .facts) produced by this package. + // We stow it in an otherwise unused field of + // Package so it can be accessed by our importers. + prefix := fmt.Sprintf("%s/%d", tmpdir, nextID.Add(1)) + pkg.ExportFile = prefix + + // Construct the request to the worker. + var ( + importMap = make(map[string]string) + packageFile = make(map[string]string) + packageVetx = make(map[string]string) + ) + for importPath, dep := range pkg.Imports { + importMap[importPath] = dep.PkgPath + if depPrefix := dep.ExportFile; depPrefix != "" { // skip "unsafe" + packageFile[dep.PkgPath] = depPrefix + ".types" + packageVetx[dep.PkgPath] = depPrefix + ".facts" + } + } + cfg := unitchecker.Config{ + ID: pkg.ID, + ImportPath: pkg.PkgPath, + GoFiles: pkg.CompiledGoFiles, + NonGoFiles: pkg.OtherFiles, + IgnoredFiles: pkg.IgnoredFiles, + ImportMap: importMap, + PackageFile: packageFile, + PackageVetx: packageVetx, + VetxOnly: !roots[pkg], + VetxOutput: prefix + ".facts", + } + if pkg.Module != nil { + if v := pkg.Module.GoVersion; v != "" { + cfg.GoVersion = "go" + v + } + } + + // Write the JSON configuration message to a file. + cfgData, err := json.Marshal(cfg) + if err != nil { + t.Fatalf("internal error in json.Marshal: %v", err) + } + cfgFile := prefix + ".cfg" + if err := os.WriteFile(cfgFile, cfgData, 0666); err != nil { + t.Fatal(err) + } + + // Send the request to the worker. + cmd := testenv.Command(t, os.Args[0], "-json", cfgFile) + cmd.Stderr = os.Stderr + cmd.Stdout = new(bytes.Buffer) + cmd.Env = append(os.Environ(), "ENTRYPOINT=worker") + if err := cmd.Run(); err != nil { + t.Fatal(err) + } + + // Parse JSON output and gather in allDiagnostics. + dec := json.NewDecoder(cmd.Stdout.(io.Reader)) + for { + type jsonDiagnostic struct { + Posn string `json:"posn"` + Message string `json:"message"` + } + // 'results' maps Package.Path -> Analyzer.Name -> diagnostics + var results map[string]map[string][]jsonDiagnostic + if err := dec.Decode(&results); err != nil { + if err == io.EOF { + break + } + t.Fatalf("internal error decoding JSON: %v", err) + } + for _, result := range results { + for analyzer, diags := range result { + for _, diag := range diags { + rel := strings.ReplaceAll(diag.Posn, tmpdir, "") + rel = filepath.ToSlash(rel) + msg := fmt.Sprintf("%s: [%s] %s", rel, analyzer, diag.Message) + allDiagnostics = append(allDiagnostics, msg) + } + } + } + } + }) + + // Observe that the example produces a fact-based diagnostic + // from separate analysis of "main", "lib", and "fmt": + + const want = `/main/main.go:6:2: [printf] separate/lib.MyPrintf format %s has arg 123 of wrong type int` + if got := strings.Join(allDiagnostics, "\n"); got != want { + t.Errorf("Got: %s\nWant: %s", got, want) + } +} + +// -- worker process -- + +// worker is the main entry point for a unitchecker-based driver +// with only a single analyzer, for illustration. +func worker() { + // Currently the unitchecker API doesn't allow clients to + // control exactly how and where fact and type information + // is produced and consumed. + // + // So, for example, it assumes that type information has + // already been produced by the compiler, which is true when + // running under "go vet", but isn't necessary. It may be more + // convenient and efficient for a distributed analysis system + // if the worker generates both of them, which is the approach + // taken in this example; they could even be saved as two + // sections of a single file. + // + // Consequently, this test currently needs special access to + // private hooks in unitchecker to control how and where facts + // and types are produced and consumed. In due course this + // will become a respectable public API. In the meantime, it + // should at least serve as a demonstration of how one could + // fork unitchecker to achieve separate analysis without go vet. + unitchecker.SetTypeImportExport(makeTypesImporter, exportTypes) + + unitchecker.Main(printf.Analyzer) +} + +func makeTypesImporter(cfg *unitchecker.Config, fset *token.FileSet) types.Importer { + imports := make(map[string]*types.Package) + return importerFunc(func(importPath string) (*types.Package, error) { + // Resolve import path to package path (vendoring, etc) + path, ok := cfg.ImportMap[importPath] + if !ok { + return nil, fmt.Errorf("can't resolve import %q", path) + } + if path == "unsafe" { + return types.Unsafe, nil + } + + // Find, read, and decode file containing type information. + file, ok := cfg.PackageFile[path] + if !ok { + return nil, fmt.Errorf("no package file for %q", path) + } + f, err := os.Open(file) + if err != nil { + return nil, err + } + defer f.Close() // ignore error + return gcexportdata.Read(f, fset, imports, path) + }) +} + +func exportTypes(cfg *unitchecker.Config, fset *token.FileSet, pkg *types.Package) error { + var out bytes.Buffer + if err := gcexportdata.Write(&out, fset, pkg); err != nil { + return err + } + typesFile := strings.TrimSuffix(cfg.VetxOutput, ".facts") + ".types" + return os.WriteFile(typesFile, out.Bytes(), 0666) +} + +// -- helpers -- + +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } + +// extractTxtar writes each archive file to the corresponding location beneath dir. +// +// TODO(adonovan): move this to txtar package, we need it all the time (#61386). +func extractTxtar(ar *txtar.Archive, dir string) error { + for _, file := range ar.Files { + name := filepath.Join(dir, file.Name) + if err := os.MkdirAll(filepath.Dir(name), 0777); err != nil { + return err + } + if err := os.WriteFile(name, file.Data, 0666); err != nil { + return err + } + } + return nil +} diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go index 88527d7a8e2..4ff45feb4ce 100644 --- a/go/analysis/unitchecker/unitchecker.go +++ b/go/analysis/unitchecker/unitchecker.go @@ -38,7 +38,6 @@ import ( "go/token" "go/types" "io" - "io/ioutil" "log" "os" "path/filepath" @@ -59,19 +58,19 @@ import ( // whose name ends with ".cfg". type Config struct { ID string // e.g. "fmt [fmt.test]" - Compiler string - Dir string - ImportPath string + Compiler string // gc or gccgo, provided to MakeImporter + Dir string // (unused) + ImportPath string // package path GoVersion string // minimum required Go version, such as "go1.21.0" GoFiles []string NonGoFiles []string IgnoredFiles []string - ImportMap map[string]string - PackageFile map[string]string - Standard map[string]bool - PackageVetx map[string]string - VetxOnly bool - VetxOutput string + ImportMap map[string]string // maps import path to package path + PackageFile map[string]string // maps package path to file of type information + Standard map[string]bool // package belongs to standard library + PackageVetx map[string]string // maps package path to file of fact information + VetxOnly bool // run analysis only for facts, not diagnostics + VetxOutput string // where to write file of fact information SucceedOnTypecheckFailure bool } @@ -167,7 +166,7 @@ func Run(configFile string, analyzers []*analysis.Analyzer) { } func readConfig(filename string) (*Config, error) { - data, err := ioutil.ReadFile(filename) + data, err := os.ReadFile(filename) if err != nil { return nil, err } @@ -184,6 +183,55 @@ func readConfig(filename string) (*Config, error) { return cfg, nil } +type factImporter = func(pkgPath string) ([]byte, error) + +// These four hook variables are a proof of concept of a future +// parameterization of a unitchecker API that allows the client to +// determine how and where facts and types are produced and consumed. +// (Note that the eventual API will likely be quite different.) +// +// The defaults honor a Config in a manner compatible with 'go vet'. +var ( + makeTypesImporter = func(cfg *Config, fset *token.FileSet) types.Importer { + return importer.ForCompiler(fset, cfg.Compiler, func(importPath string) (io.ReadCloser, error) { + // Resolve import path to package path (vendoring, etc) + path, ok := cfg.ImportMap[importPath] + if !ok { + return nil, fmt.Errorf("can't resolve import %q", path) + } + + // path is a resolved package path, not an import path. + file, ok := cfg.PackageFile[path] + if !ok { + if cfg.Compiler == "gccgo" && cfg.Standard[path] { + return nil, nil // fall back to default gccgo lookup + } + return nil, fmt.Errorf("no package file for %q", path) + } + return os.Open(file) + }) + } + + exportTypes = func(*Config, *token.FileSet, *types.Package) error { + // By default this is a no-op, because "go vet" + // makes the compiler produce type information. + return nil + } + + makeFactImporter = func(cfg *Config) factImporter { + return func(pkgPath string) ([]byte, error) { + if vetx, ok := cfg.PackageVetx[pkgPath]; ok { + return os.ReadFile(vetx) + } + return nil, nil // no .vetx file, no facts + } + } + + exportFacts = func(cfg *Config, data []byte) error { + return os.WriteFile(cfg.VetxOutput, data, 0666) + } +) + func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]result, error) { // Load, parse, typecheck. var files []*ast.File @@ -199,27 +247,9 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re } files = append(files, f) } - compilerImporter := importer.ForCompiler(fset, cfg.Compiler, func(path string) (io.ReadCloser, error) { - // path is a resolved package path, not an import path. - file, ok := cfg.PackageFile[path] - if !ok { - if cfg.Compiler == "gccgo" && cfg.Standard[path] { - return nil, nil // fall back to default gccgo lookup - } - return nil, fmt.Errorf("no package file for %q", path) - } - return os.Open(file) - }) - importer := importerFunc(func(importPath string) (*types.Package, error) { - path, ok := cfg.ImportMap[importPath] // resolve vendoring, etc - if !ok { - return nil, fmt.Errorf("can't resolve import %q", path) - } - return compilerImporter.Import(path) - }) tc := &types.Config{ - Importer: importer, - Sizes: types.SizesFor("gc", build.Default.GOARCH), // assume gccgo ≡ gc? + Importer: makeTypesImporter(cfg, fset), + Sizes: types.SizesFor("gc", build.Default.GOARCH), // TODO(adonovan): use cfg.Compiler GoVersion: cfg.GoVersion, } info := &types.Info{ @@ -288,13 +318,7 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re analyzers = filtered // Read facts from imported packages. - read := func(pkgPath string) ([]byte, error) { - if vetx, ok := cfg.PackageVetx[pkgPath]; ok { - return ioutil.ReadFile(vetx) - } - return nil, nil // no .vetx file, no facts - } - facts, err := facts.NewDecoder(pkg).Decode(false, read) + facts, err := facts.NewDecoder(pkg).Decode(false, makeFactImporter(cfg)) if err != nil { return nil, err } @@ -394,8 +418,11 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re } data := facts.Encode(false) - if err := ioutil.WriteFile(cfg.VetxOutput, data, 0666); err != nil { - return nil, fmt.Errorf("failed to write analysis facts: %v", err) + if err := exportFacts(cfg, data); err != nil { + return nil, fmt.Errorf("failed to export analysis facts: %v", err) + } + if err := exportTypes(cfg, fset, pkg); err != nil { + return nil, fmt.Errorf("failed to export type information: %v", err) } return results, nil @@ -406,7 +433,3 @@ type result struct { diagnostics []analysis.Diagnostic err error } - -type importerFunc func(path string) (*types.Package, error) - -func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go index 1ed001247c6..270a3582ccf 100644 --- a/go/analysis/unitchecker/unitchecker_test.go +++ b/go/analysis/unitchecker/unitchecker_test.go @@ -29,6 +29,9 @@ func TestMain(m *testing.M) { case "minivet": minivet() panic("unreachable") + case "worker": + worker() // see ExampleSeparateAnalysis + panic("unreachable") } // test process diff --git a/go/internal/packagesdriver/sizes.go b/go/internal/packagesdriver/sizes.go index 18a002f82a1..0454cdd78e5 100644 --- a/go/internal/packagesdriver/sizes.go +++ b/go/internal/packagesdriver/sizes.go @@ -8,7 +8,6 @@ package packagesdriver import ( "context" "fmt" - "go/types" "strings" "golang.org/x/tools/internal/gocommand" @@ -16,7 +15,7 @@ import ( var debug = false -func GetSizesGolist(ctx context.Context, inv gocommand.Invocation, gocmdRunner *gocommand.Runner) (types.Sizes, error) { +func GetSizesForArgsGolist(ctx context.Context, inv gocommand.Invocation, gocmdRunner *gocommand.Runner) (string, string, error) { inv.Verb = "list" inv.Args = []string{"-f", "{{context.GOARCH}} {{context.Compiler}}", "--", "unsafe"} stdout, stderr, friendlyErr, rawErr := gocmdRunner.RunRaw(ctx, inv) @@ -29,21 +28,21 @@ func GetSizesGolist(ctx context.Context, inv gocommand.Invocation, gocmdRunner * inv.Args = []string{"GOARCH"} envout, enverr := gocmdRunner.Run(ctx, inv) if enverr != nil { - return nil, enverr + return "", "", enverr } goarch = strings.TrimSpace(envout.String()) compiler = "gc" } else { - return nil, friendlyErr + return "", "", friendlyErr } } else { fields := strings.Fields(stdout.String()) if len(fields) < 2 { - return nil, fmt.Errorf("could not parse GOARCH and Go compiler in format \" \":\nstdout: <<%s>>\nstderr: <<%s>>", + return "", "", fmt.Errorf("could not parse GOARCH and Go compiler in format \" \":\nstdout: <<%s>>\nstderr: <<%s>>", stdout.String(), stderr.String()) } goarch = fields[0] compiler = fields[1] } - return types.SizesFor(compiler, goarch), nil + return compiler, goarch, nil } diff --git a/go/packages/golist.go b/go/packages/golist.go index 58230038a7c..b5de9cf9f21 100644 --- a/go/packages/golist.go +++ b/go/packages/golist.go @@ -9,7 +9,6 @@ import ( "context" "encoding/json" "fmt" - "go/types" "io/ioutil" "log" "os" @@ -153,10 +152,10 @@ func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { sizeswg.Add(1) go func() { - var sizes types.Sizes - sizes, sizeserr = packagesdriver.GetSizesGolist(ctx, state.cfgInvocation(), cfg.gocmdRunner) - // types.SizesFor always returns nil or a *types.StdSizes. - response.dr.Sizes, _ = sizes.(*types.StdSizes) + compiler, arch, err := packagesdriver.GetSizesForArgsGolist(ctx, state.cfgInvocation(), cfg.gocmdRunner) + sizeserr = err + response.dr.Compiler = compiler + response.dr.Arch = arch sizeswg.Done() }() } diff --git a/go/packages/packages.go b/go/packages/packages.go index da1a27eea62..124a6fe143b 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -220,8 +220,10 @@ type driverResponse struct { // lists of multiple drivers, go/packages will fall back to the next driver. NotHandled bool - // Sizes, if not nil, is the types.Sizes to use when type checking. - Sizes *types.StdSizes + // Compiler and Arch are the arguments pass of types.SizesFor + // to get a types.Sizes to use when type checking. + Compiler string + Arch string // Roots is the set of package IDs that make up the root packages. // We have to encode this separately because when we encode a single package @@ -262,7 +264,7 @@ func Load(cfg *Config, patterns ...string) ([]*Package, error) { if err != nil { return nil, err } - l.sizes = response.Sizes + l.sizes = types.SizesFor(response.Compiler, response.Arch) return l.refine(response) } diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go index c725d839ba1..fa5834baf72 100644 --- a/go/types/objectpath/objectpath.go +++ b/go/types/objectpath/objectpath.go @@ -32,6 +32,7 @@ import ( _ "unsafe" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" ) // A Path is an opaque name that identifies a types.Object @@ -127,12 +128,15 @@ type Encoder struct { skipMethodSorting bool } -// Exposed to gopls via golang.org/x/tools/internal/typesinternal -// TODO(golang/go#61443): eliminate this parameter one way or the other. +// Expose back doors so that gopls can avoid method sorting, which can dominate +// analysis on certain repositories. // -//go:linkname skipMethodSorting -func skipMethodSorting(enc *Encoder) { - enc.skipMethodSorting = true +// TODO(golang/go#61443): remove this. +func init() { + typesinternal.SkipEncoderMethodSorting = func(enc interface{}) { + enc.(*Encoder).skipMethodSorting = true + } + typesinternal.ObjectpathObject = object } // For returns the path to an object relative to its package, @@ -572,17 +576,16 @@ func findTypeParam(obj types.Object, list *typeparams.TypeParamList, path []byte // Object returns the object denoted by path p within the package pkg. func Object(pkg *types.Package, p Path) (types.Object, error) { - return object(pkg, p, false) + return object(pkg, string(p), false) } // Note: the skipMethodSorting parameter must match the value of // Encoder.skipMethodSorting used during encoding. -func object(pkg *types.Package, p Path, skipMethodSorting bool) (types.Object, error) { - if p == "" { +func object(pkg *types.Package, pathstr string, skipMethodSorting bool) (types.Object, error) { + if pathstr == "" { return nil, fmt.Errorf("empty path") } - pathstr := string(p) var pkgobj, suffix string if dot := strings.IndexByte(pathstr, opType); dot < 0 { pkgobj = pathstr diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index 48c98e0cb39..2ff9434d0b6 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -494,6 +494,24 @@ This is one of the simplifications that "gofmt -s" applies. **Enabled by default.** +## **slog** + +check for invalid structured logging calls + +The slog checker looks for calls to functions from the log/slog +package that take alternating key-value pairs. It reports calls +where an argument in a key position is neither a string nor a +slog.Attr, and where a final key is missing its value. +For example,it would report + + slog.Warn("message", 11, "k") // slog.Warn arg "11" should be a string or a slog.Attr + +and + + slog.Info("message", "k1", v1, "k2") // call to slog.Info missing a final value + +**Enabled by default.** + ## **sortslice** check the argument type of sort.Slice @@ -632,7 +650,7 @@ any parameters that are not being used. To reduce false positives it ignores: - methods -- parameters that do not have a name or are underscored +- parameters that do not have a name or have the name '_' (the blank identifier) - functions in test files - functions with empty bodies or those with just a return stmt diff --git a/gopls/doc/generate.go b/gopls/doc/generate.go index f7e69972897..51987f6a7b0 100644 --- a/gopls/doc/generate.go +++ b/gopls/doc/generate.go @@ -85,9 +85,13 @@ func doMain(write bool) (bool, error) { // pkgDir returns the directory corresponding to the import path pkgPath. func pkgDir(pkgPath string) (string, error) { - out, err := exec.Command("go", "list", "-f", "{{.Dir}}", pkgPath).Output() + cmd := exec.Command("go", "list", "-f", "{{.Dir}}", pkgPath) + out, err := cmd.Output() if err != nil { - return "", err + if ee, _ := err.(*exec.ExitError); ee != nil && len(ee.Stderr) > 0 { + return "", fmt.Errorf("%v: %w\n%s", cmd, err, ee.Stderr) + } + return "", fmt.Errorf("%v: %w", cmd, err) } return strings.TrimSpace(string(out)), nil } diff --git a/gopls/doc/generate_test.go b/gopls/doc/generate_test.go index 44e6041721d..6e1c23b94db 100644 --- a/gopls/doc/generate_test.go +++ b/gopls/doc/generate_test.go @@ -12,9 +12,9 @@ import ( func TestGenerated(t *testing.T) { testenv.NeedsGoPackages(t) - // This test fails on 1.18 Kokoro for unknown reasons; in any case, it - // suffices to run this test on any builder. - testenv.NeedsGo1Point(t, 19) + // This test fails on Kokoro, for unknown reasons, so must be run only on TryBots. + // In any case, it suffices to run this test on any builder. + testenv.NeedsGo1Point(t, 21) testenv.NeedsLocalXTools(t) diff --git a/gopls/go.mod b/gopls/go.mod index f28b5bdc19b..3198e0f81bb 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -9,9 +9,9 @@ require ( github.com/sergi/go-diff v1.1.0 golang.org/x/mod v0.12.0 golang.org/x/sync v0.3.0 - golang.org/x/sys v0.11.0 - golang.org/x/telemetry v0.0.0-20230728182230-e84a26264b60 - golang.org/x/text v0.12.0 + golang.org/x/sys v0.12.0 + golang.org/x/telemetry v0.0.0-20230822160736-17171dbf1d76 + golang.org/x/text v0.13.0 golang.org/x/tools v0.6.0 golang.org/x/vuln v0.0.0-20230110180137-6ad3e3d07815 gopkg.in/yaml.v3 v3.0.1 diff --git a/gopls/go.sum b/gopls/go.sum index 3745b42d613..78b66349fe3 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -44,7 +44,7 @@ github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJy github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= +golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e h1:+WEEuIdZHnUeJJmEUjyYC2gfUMj69yZXw17EnHg/otA= golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA= golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= @@ -59,7 +59,7 @@ golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= -golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= +golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -73,17 +73,17 @@ golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= -golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/telemetry v0.0.0-20230728182230-e84a26264b60 h1:OCiXqf7/gdoaS7dKppAtPxi783Ke/JIb+r20ZYGiEFg= -golang.org/x/telemetry v0.0.0-20230728182230-e84a26264b60/go.mod h1:kO7uNSGGmqCHII6C0TYfaLwSBIfcyhj53//nu0+Fy4A= +golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o= +golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/telemetry v0.0.0-20230822160736-17171dbf1d76 h1:Lv25uIMpljmSMN0+GCC+xgiC/4ikIdKMkQfw/EVq2Nk= +golang.org/x/telemetry v0.0.0-20230822160736-17171dbf1d76/go.mod h1:kO7uNSGGmqCHII6C0TYfaLwSBIfcyhj53//nu0+Fy4A= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= -golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= +golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= -golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/vuln v0.0.0-20230110180137-6ad3e3d07815 h1:A9kONVi4+AnuOr1dopsibH6hLi1Huy54cbeJxnq4vmU= golang.org/x/vuln v0.0.0-20230110180137-6ad3e3d07815/go.mod h1:XJiVExZgoZfrrxoTeVsFYrSSk1snhfpOEC95JL+A4T0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/gopls/internal/bug/bug.go b/gopls/internal/bug/bug.go index 7331ba8c85c..7c290b0cd27 100644 --- a/gopls/internal/bug/bug.go +++ b/gopls/internal/bug/bug.go @@ -65,7 +65,8 @@ func Report(description string) { report(description) } -var bugReport = counter.NewStack("gopls/bug", 16) +// BugReportCount is a telemetry counter that tracks # of bug reports. +var BugReportCount = counter.NewStack("gopls/bug", 16) func report(description string) { _, file, line, ok := runtime.Caller(2) // all exported reporting functions call report directly @@ -102,7 +103,7 @@ func report(description string) { mu.Unlock() if newBug { - bugReport.Inc() + BugReportCount.Inc() } // Call the handlers outside the critical section since a // handler may itself fail and call bug.Report. Since handlers diff --git a/gopls/internal/hooks/licenses_test.go b/gopls/internal/hooks/licenses_test.go index a7853cd5f66..609f05a606c 100644 --- a/gopls/internal/hooks/licenses_test.go +++ b/gopls/internal/hooks/licenses_test.go @@ -18,7 +18,7 @@ func TestLicenses(t *testing.T) { // License text differs for older Go versions because staticcheck or gofumpt // isn't supported for those versions, and this fails for unknown, unrelated // reasons on Kokoro legacy CI. - testenv.NeedsGo1Point(t, 19) + testenv.NeedsGo1Point(t, 21) if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { t.Skip("generating licenses only works on Unixes") diff --git a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go index 4c933c8fb86..e0ef5ef8dfb 100644 --- a/gopls/internal/lsp/analysis/unusedparams/unusedparams.go +++ b/gopls/internal/lsp/analysis/unusedparams/unusedparams.go @@ -24,7 +24,7 @@ any parameters that are not being used. To reduce false positives it ignores: - methods -- parameters that do not have a name or are underscored +- parameters that do not have a name or have the name '_' (the blank identifier) - functions in test files - functions with empty bodies or those with just a return stmt` diff --git a/gopls/internal/lsp/cache/analysis.go b/gopls/internal/lsp/cache/analysis.go index dd15843bc19..5676a7814a2 100644 --- a/gopls/internal/lsp/cache/analysis.go +++ b/gopls/internal/lsp/cache/analysis.go @@ -707,10 +707,9 @@ func (an *analysisNode) cacheKey() [sha256.Size]byte { // uses those fields, we account for them by hashing vdeps. // type sizes - // This assertion is safe, but if a black-box implementation - // is ever needed, record Sizeof(*int) and Alignof(int64). - sz := m.TypesSizes.(*types.StdSizes) - fmt.Fprintf(hasher, "sizes: %d %d\n", sz.WordSize, sz.MaxAlign) + wordSize := an.m.TypesSizes.Sizeof(types.Typ[types.Int]) + maxAlign := an.m.TypesSizes.Alignof(types.NewPointer(types.Typ[types.Int64])) + fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) // metadata errors: used for 'compiles' field fmt.Fprintf(hasher, "errors: %d", len(m.Errors)) @@ -1396,22 +1395,12 @@ func requiredAnalyzers(analyzers []*analysis.Analyzer) []*analysis.Analyzer { return result } -func mustEncode(x interface{}) []byte { - var buf bytes.Buffer - if err := gob.NewEncoder(&buf).Encode(x); err != nil { - log.Fatalf("internal error encoding %T: %v", x, err) - } - return buf.Bytes() -} - -// var analyzeSummaryCodec = frob.For[*analyzeSummary]() -var analyzeSummaryCodec = frob.CodecFor117(new(*analyzeSummary)) +var analyzeSummaryCodec = frob.CodecFor[*analyzeSummary]() // -- data types for serialization of analysis.Diagnostic and source.Diagnostic -- // (The name says gob but we use frob.) -// var diagnosticsCodec = frob.For[[]gobDiagnostic]() -var diagnosticsCodec = frob.CodecFor117(new([]gobDiagnostic)) +var diagnosticsCodec = frob.CodecFor[[]gobDiagnostic]() type gobDiagnostic struct { Location protocol.Location diff --git a/gopls/internal/lsp/cache/check.go b/gopls/internal/lsp/cache/check.go index 50ce8955014..b7267e983ec 100644 --- a/gopls/internal/lsp/cache/check.go +++ b/gopls/internal/lsp/cache/check.go @@ -323,6 +323,9 @@ type ( // // Both pre and post may be called concurrently. func (s *snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preTypeCheck, post postTypeCheck) error { + s.typeCheckMu.Lock() + defer s.typeCheckMu.Unlock() + ctx, done := event.Start(ctx, "cache.forEachPackage", tag.PackageCount.Of(len(ids))) defer done() @@ -846,7 +849,7 @@ func (s *snapshot) getPackageHandles(ctx context.Context, ids []PackageID) (map[ unfinishedSuccs: int32(len(m.DepsByPkgPath)), } if entry, hit := b.s.packages.Get(m.ID); hit { - n.ph = entry.(*packageHandle) + n.ph = entry } if n.unfinishedSuccs == 0 { leaves = append(leaves, n) @@ -1115,12 +1118,11 @@ func (b *packageHandleBuilder) buildPackageHandle(ctx context.Context, n *handle } // Check the packages map again in case another goroutine got there first. - if alt, ok := b.s.packages.Get(n.m.ID); ok && alt.(*packageHandle).validated { - altPH := alt.(*packageHandle) - if altPH.m != n.m { + if alt, ok := b.s.packages.Get(n.m.ID); ok && alt.validated { + if alt.m != n.m { bug.Reportf("existing package handle does not match for %s", n.m.ID) } - n.ph = altPH + n.ph = alt } else { b.s.packages.Set(n.m.ID, n.ph, nil) } @@ -1398,8 +1400,9 @@ func localPackageKey(inputs typeCheckInputs) source.Hash { } // types sizes - sz := inputs.sizes.(*types.StdSizes) - fmt.Fprintf(hasher, "sizes: %d %d\n", sz.WordSize, sz.MaxAlign) + wordSize := inputs.sizes.Sizeof(types.Typ[types.Int]) + maxAlign := inputs.sizes.Alignof(types.NewPointer(types.Typ[types.Int64])) + fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) fmt.Fprintf(hasher, "relatedInformation: %t\n", inputs.relatedInformation) fmt.Fprintf(hasher, "linkTarget: %s\n", inputs.linkTarget) diff --git a/gopls/internal/lsp/cache/cycle_test.go b/gopls/internal/lsp/cache/cycle_test.go index d08e8e0b73f..25edbbfe338 100644 --- a/gopls/internal/lsp/cache/cycle_test.go +++ b/gopls/internal/lsp/cache/cycle_test.go @@ -76,7 +76,7 @@ func TestBreakImportCycles(t *testing.T) { } if s != "" { for _, item := range strings.Split(s, ";") { - nodeID, succIDs, ok := cut(item, "->") + nodeID, succIDs, ok := strings.Cut(item, "->") node := makeNode(nodeID) if ok { for _, succID := range strings.Split(succIDs, ",") { diff --git a/gopls/internal/lsp/cache/load.go b/gopls/internal/lsp/cache/load.go index eb302b7b332..03db2a35d0d 100644 --- a/gopls/internal/lsp/cache/load.go +++ b/gopls/internal/lsp/cache/load.go @@ -67,7 +67,7 @@ func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc panic(fmt.Sprintf("internal error: load called with multiple scopes when a file scope is present (file: %s)", uri)) } fh := s.FindFile(uri) - if fh == nil || s.View().FileKind(fh) != source.Go { + if fh == nil || s.FileKind(fh) != source.Go { // Don't try to load a file that doesn't exist, or isn't a go file. continue } @@ -217,8 +217,7 @@ func (s *snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc s.mu.Lock() // Assert the invariant s.packages.Get(id).m == s.meta.metadata[id]. - s.packages.Range(func(k, v interface{}) { - id, ph := k.(PackageID), v.(*packageHandle) + s.packages.Range(func(id PackageID, ph *packageHandle) { if s.meta.metadata[id] != ph.m { panic("inconsistent metadata") } diff --git a/gopls/internal/lsp/cache/maps.go b/gopls/internal/lsp/cache/maps.go index de6187da255..edb72d5c123 100644 --- a/gopls/internal/lsp/cache/maps.go +++ b/gopls/internal/lsp/cache/maps.go @@ -10,21 +10,14 @@ import ( "golang.org/x/tools/internal/persistent" ) -// TODO(euroelessar): Use generics once support for go1.17 is dropped. - type filesMap struct { - impl *persistent.Map + impl *persistent.Map[span.URI, source.FileHandle] overlayMap map[span.URI]*Overlay // the subset that are overlays } -// uriLessInterface is the < relation for "any" values containing span.URIs. -func uriLessInterface(a, b interface{}) bool { - return a.(span.URI) < b.(span.URI) -} - func newFilesMap() filesMap { return filesMap{ - impl: persistent.NewMap(uriLessInterface), + impl: new(persistent.Map[span.URI, source.FileHandle]), overlayMap: make(map[span.URI]*Overlay), } } @@ -53,9 +46,7 @@ func (m filesMap) Get(key span.URI) (source.FileHandle, bool) { } func (m filesMap) Range(do func(key span.URI, value source.FileHandle)) { - m.impl.Range(func(key, value interface{}) { - do(key.(span.URI), value.(source.FileHandle)) - }) + m.impl.Range(do) } func (m filesMap) Set(key span.URI, value source.FileHandle) { @@ -85,52 +76,3 @@ func (m filesMap) overlays() []*Overlay { } return overlays } - -func packageIDLessInterface(x, y interface{}) bool { - return x.(PackageID) < y.(PackageID) -} - -type knownDirsSet struct { - impl *persistent.Map -} - -func newKnownDirsSet() knownDirsSet { - return knownDirsSet{ - impl: persistent.NewMap(func(a, b interface{}) bool { - return a.(span.URI) < b.(span.URI) - }), - } -} - -func (s knownDirsSet) Clone() knownDirsSet { - return knownDirsSet{ - impl: s.impl.Clone(), - } -} - -func (s knownDirsSet) Destroy() { - s.impl.Destroy() -} - -func (s knownDirsSet) Contains(key span.URI) bool { - _, ok := s.impl.Get(key) - return ok -} - -func (s knownDirsSet) Range(do func(key span.URI)) { - s.impl.Range(func(key, value interface{}) { - do(key.(span.URI)) - }) -} - -func (s knownDirsSet) SetAll(other knownDirsSet) { - s.impl.SetAll(other.impl) -} - -func (s knownDirsSet) Insert(key span.URI) { - s.impl.Set(key, nil, nil) -} - -func (s knownDirsSet) Remove(key span.URI) { - s.impl.Delete(key) -} diff --git a/gopls/internal/lsp/cache/mod.go b/gopls/internal/lsp/cache/mod.go index 8b6331d056d..8a452ab086d 100644 --- a/gopls/internal/lsp/cache/mod.go +++ b/gopls/internal/lsp/cache/mod.go @@ -52,7 +52,7 @@ func (s *snapshot) ParseMod(ctx context.Context, fh source.FileHandle) (*source. } // Await result. - v, err := s.awaitPromise(ctx, entry.(*memoize.Promise)) + v, err := s.awaitPromise(ctx, entry) if err != nil { return nil, err } @@ -130,7 +130,7 @@ func (s *snapshot) ParseWork(ctx context.Context, fh source.FileHandle) (*source } // Await result. - v, err := s.awaitPromise(ctx, entry.(*memoize.Promise)) + v, err := s.awaitPromise(ctx, entry) if err != nil { return nil, err } @@ -213,7 +213,7 @@ func sumFilename(modURI span.URI) string { func (s *snapshot) ModWhy(ctx context.Context, fh source.FileHandle) (map[string]string, error) { uri := fh.URI() - if s.View().FileKind(fh) != source.Mod { + if s.FileKind(fh) != source.Mod { return nil, fmt.Errorf("%s is not a go.mod file", uri) } @@ -240,7 +240,7 @@ func (s *snapshot) ModWhy(ctx context.Context, fh source.FileHandle) (map[string } // Await result. - v, err := s.awaitPromise(ctx, entry.(*memoize.Promise)) + v, err := s.awaitPromise(ctx, entry) if err != nil { return nil, err } diff --git a/gopls/internal/lsp/cache/mod_tidy.go b/gopls/internal/lsp/cache/mod_tidy.go index a96793bdbc2..b806edb7499 100644 --- a/gopls/internal/lsp/cache/mod_tidy.go +++ b/gopls/internal/lsp/cache/mod_tidy.go @@ -9,7 +9,6 @@ import ( "fmt" "go/ast" "go/token" - "io/ioutil" "os" "path/filepath" "strconv" @@ -86,7 +85,7 @@ func (s *snapshot) ModTidy(ctx context.Context, pm *source.ParsedModule) (*sourc } // Await result. - v, err := s.awaitPromise(ctx, entry.(*memoize.Promise)) + v, err := s.awaitPromise(ctx, entry) if err != nil { return nil, err } @@ -118,7 +117,7 @@ func modTidyImpl(ctx context.Context, snapshot *snapshot, filename string, pm *s // Go directly to disk to get the temporary mod file, // since it is always on disk. - tempContents, err := ioutil.ReadFile(tmpURI.Filename()) + tempContents, err := os.ReadFile(tmpURI.Filename()) if err != nil { return nil, err } @@ -169,7 +168,7 @@ func modTidyDiagnostics(ctx context.Context, snapshot *snapshot, pm *source.Pars for _, req := range wrongDirectness { // Handle dependencies that are incorrectly labeled indirect and // vice versa. - srcDiag, err := directnessDiagnostic(pm.Mapper, req, snapshot.View().Options().ComputeEdits) + srcDiag, err := directnessDiagnostic(pm.Mapper, req, snapshot.Options().ComputeEdits) if err != nil { // We're probably in a bad state if we can't compute a // directnessDiagnostic, but try to keep going so as to not suppress diff --git a/gopls/internal/lsp/cache/mod_vuln.go b/gopls/internal/lsp/cache/mod_vuln.go index 942ca52525c..dcd58bfa94a 100644 --- a/gopls/internal/lsp/cache/mod_vuln.go +++ b/gopls/internal/lsp/cache/mod_vuln.go @@ -55,7 +55,7 @@ func (s *snapshot) ModVuln(ctx context.Context, modURI span.URI) (*govulncheck.R } // Await result. - v, err := s.awaitPromise(ctx, entry.(*memoize.Promise)) + v, err := s.awaitPromise(ctx, entry) if err != nil { return nil, err } diff --git a/gopls/internal/lsp/cache/session.go b/gopls/internal/lsp/cache/session.go index 6b75f10b36f..1e463fa3f4f 100644 --- a/gopls/internal/lsp/cache/session.go +++ b/gopls/internal/lsp/cache/session.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/memoize" "golang.org/x/tools/internal/persistent" "golang.org/x/tools/internal/xcontext" ) @@ -169,19 +170,19 @@ func (s *Session) createView(ctx context.Context, name string, folder span.URI, backgroundCtx: backgroundCtx, cancel: cancel, store: s.cache.store, - packages: persistent.NewMap(packageIDLessInterface), + packages: new(persistent.Map[PackageID, *packageHandle]), meta: new(metadataGraph), files: newFilesMap(), - activePackages: persistent.NewMap(packageIDLessInterface), - symbolizeHandles: persistent.NewMap(uriLessInterface), + activePackages: new(persistent.Map[PackageID, *Package]), + symbolizeHandles: new(persistent.Map[span.URI, *memoize.Promise]), workspacePackages: make(map[PackageID]PackagePath), - unloadableFiles: make(map[span.URI]struct{}), - parseModHandles: persistent.NewMap(uriLessInterface), - parseWorkHandles: persistent.NewMap(uriLessInterface), - modTidyHandles: persistent.NewMap(uriLessInterface), - modVulnHandles: persistent.NewMap(uriLessInterface), - modWhyHandles: persistent.NewMap(uriLessInterface), - knownSubdirs: newKnownDirsSet(), + unloadableFiles: new(persistent.Set[span.URI]), + parseModHandles: new(persistent.Map[span.URI, *memoize.Promise]), + parseWorkHandles: new(persistent.Map[span.URI, *memoize.Promise]), + modTidyHandles: new(persistent.Map[span.URI, *memoize.Promise]), + modVulnHandles: new(persistent.Map[span.URI, *memoize.Promise]), + modWhyHandles: new(persistent.Map[span.URI, *memoize.Promise]), + knownSubdirs: new(persistent.Set[span.URI]), workspaceModFiles: wsModFiles, workspaceModFilesErr: wsModFilesErr, pkgIndex: typerefs.NewPackageIndex(), @@ -618,15 +619,15 @@ func (s *Session) ExpandModificationsToDirectories(ctx context.Context, changes // knownDirectories returns all of the directories known to the given // snapshots, including workspace directories and their subdirectories. // It is responsibility of the caller to destroy the returned set. -func knownDirectories(ctx context.Context, snapshots []*snapshot) knownDirsSet { - result := newKnownDirsSet() +func knownDirectories(ctx context.Context, snapshots []*snapshot) *persistent.Set[span.URI] { + result := new(persistent.Set[span.URI]) for _, snapshot := range snapshots { dirs := snapshot.dirs(ctx) for _, dir := range dirs { - result.Insert(dir) + result.Add(dir) } knownSubdirs := snapshot.getKnownSubdirs(dirs) - result.SetAll(knownSubdirs) + result.AddAll(knownSubdirs) knownSubdirs.Destroy() } return result diff --git a/gopls/internal/lsp/cache/snapshot.go b/gopls/internal/lsp/cache/snapshot.go index 863e488c4e3..94eceed869b 100644 --- a/gopls/internal/lsp/cache/snapshot.go +++ b/gopls/internal/lsp/cache/snapshot.go @@ -14,7 +14,6 @@ import ( "go/token" "go/types" "io" - "io/ioutil" "log" "os" "path/filepath" @@ -102,7 +101,7 @@ type snapshot struct { // symbolizeHandles maps each file URI to a handle for the future // result of computing the symbols declared in that file. - symbolizeHandles *persistent.Map // from span.URI to *memoize.Promise[symbolizeResult] + symbolizeHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[symbolizeResult] // packages maps a packageKey to a *packageHandle. // It may be invalidated when a file's content changes. @@ -111,13 +110,13 @@ type snapshot struct { // - packages.Get(id).meta == meta.metadata[id] for all ids // - if a package is in packages, then all of its dependencies should also // be in packages, unless there is a missing import - packages *persistent.Map // from packageID to *packageHandle + packages *persistent.Map[PackageID, *packageHandle] // activePackages maps a package ID to a memoized active package, or nil if // the package is known not to be open. // // IDs not contained in the map are not known to be open or not open. - activePackages *persistent.Map // from packageID to *Package + activePackages *persistent.Map[PackageID, *Package] // workspacePackages contains the workspace's packages, which are loaded // when the view is created. It contains no intermediate test variants. @@ -131,29 +130,29 @@ type snapshot struct { shouldLoad map[PackageID][]PackagePath // unloadableFiles keeps track of files that we've failed to load. - unloadableFiles map[span.URI]struct{} + unloadableFiles *persistent.Set[span.URI] // TODO(rfindley): rename the handles below to "promises". A promise is // different from a handle (we mutate the package handle.) // parseModHandles keeps track of any parseModHandles for the snapshot. // The handles need not refer to only the view's go.mod file. - parseModHandles *persistent.Map // from span.URI to *memoize.Promise[parseModResult] + parseModHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[parseModResult] // parseWorkHandles keeps track of any parseWorkHandles for the snapshot. // The handles need not refer to only the view's go.work file. - parseWorkHandles *persistent.Map // from span.URI to *memoize.Promise[parseWorkResult] + parseWorkHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[parseWorkResult] // Preserve go.mod-related handles to avoid garbage-collecting the results // of various calls to the go command. The handles need not refer to only // the view's go.mod file. - modTidyHandles *persistent.Map // from span.URI to *memoize.Promise[modTidyResult] - modWhyHandles *persistent.Map // from span.URI to *memoize.Promise[modWhyResult] - modVulnHandles *persistent.Map // from span.URI to *memoize.Promise[modVulnResult] + modTidyHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[modTidyResult] + modWhyHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[modWhyResult] + modVulnHandles *persistent.Map[span.URI, *memoize.Promise] // *memoize.Promise[modVulnResult] // knownSubdirs is the set of subdirectory URIs in the workspace, // used to create glob patterns for file watching. - knownSubdirs knownDirsSet + knownSubdirs *persistent.Set[span.URI] knownSubdirsCache map[string]struct{} // memo of knownSubdirs as a set of filenames // unprocessedSubdirChanges are any changes that might affect the set of // subdirectories in the workspace. They are not reflected to knownSubdirs @@ -184,6 +183,18 @@ type snapshot struct { // detect ignored files. ignoreFilterOnce sync.Once ignoreFilter *ignoreFilter + + // typeCheckMu guards type checking. + // + // Only one type checking pass should be running at a given time, for two reasons: + // 1. type checking batches are optimized to use all available processors. + // Generally speaking, running two type checking batches serially is about + // as fast as running them in parallel. + // 2. type checking produces cached artifacts that may be re-used by the + // next type-checking batch: the shared import graph and the set of + // active packages. Running type checking batches in parallel after an + // invalidation can cause redundant calculation of this shared state. + typeCheckMu sync.Mutex } var globalSnapshotID uint64 @@ -258,6 +269,7 @@ func (s *snapshot) destroy(destroyedBy string) { s.modTidyHandles.Destroy() s.modVulnHandles.Destroy() s.modWhyHandles.Destroy() + s.unloadableFiles.Destroy() } func (s *snapshot) SequenceID() uint64 { @@ -272,6 +284,14 @@ func (s *snapshot) View() source.View { return s.view } +func (s *snapshot) FileKind(h source.FileHandle) source.FileKind { + return s.view.FileKind(h) +} + +func (s *snapshot) Options() *source.Options { + return s.view.Options() // temporarily return view options. +} + func (s *snapshot) BackgroundContext() context.Context { return s.backgroundCtx } @@ -444,11 +464,11 @@ func (s *snapshot) RunGoCommands(ctx context.Context, allowNetwork bool, wd stri return false, nil, nil, nil } var modBytes, sumBytes []byte - modBytes, err = ioutil.ReadFile(tmpURI.Filename()) + modBytes, err = os.ReadFile(tmpURI.Filename()) if err != nil && !os.IsNotExist(err) { return false, nil, nil, err } - sumBytes, err = ioutil.ReadFile(strings.TrimSuffix(tmpURI.Filename(), ".mod") + ".sum") + sumBytes, err = os.ReadFile(strings.TrimSuffix(tmpURI.Filename(), ".mod") + ".sum") if err != nil && !os.IsNotExist(err) { return false, nil, nil, err } @@ -731,7 +751,7 @@ func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]*source } // Check if uri is known to be unloadable. - _, unloadable := s.unloadableFiles[uri] + unloadable := s.unloadableFiles.Contains(uri) s.mu.Unlock() @@ -743,12 +763,22 @@ func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]*source scope := fileLoadScope(uri) err := s.load(ctx, false, scope) - // Guard against failed loads due to context cancellation. // // Return the context error here as the current operation is no longer // valid. - if ctxErr := ctx.Err(); ctxErr != nil { - return nil, ctxErr + if err != nil { + // Guard against failed loads due to context cancellation. We don't want + // to mark loads as completed if they failed due to context cancellation. + if ctx.Err() != nil { + return nil, ctx.Err() + } + + // Don't return an error here, as we may still return stale IDs. + // Furthermore, the result of MetadataForFile should be consistent upon + // subsequent calls, even if the file is marked as unloadable. + if !errors.Is(err, errNoPackages) { + event.Error(ctx, "MetadataForFile", err) + } } // We must clear scopes after loading. @@ -757,13 +787,6 @@ func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]*source // packages as loaded. We could do this from snapshot.load and avoid // raciness. s.clearShouldLoad(scope) - - // Don't return an error here, as we may still return stale IDs. - // Furthermore, the result of MetadataForFile should be consistent upon - // subsequent calls, even if the file is marked as unloadable. - if err != nil && !errors.Is(err, errNoPackages) { - event.Error(ctx, "MetadataForFile", err) - } } // Retrieve the metadata. @@ -781,7 +804,7 @@ func (s *snapshot) MetadataForFile(ctx context.Context, uri span.URI) ([]*source // so if we get here and still have // no IDs, uri is unloadable. if !unloadable && len(ids) == 0 { - s.unloadableFiles[uri] = struct{}{} + s.unloadableFiles.Add(uri) } // Sort packages "narrowest" to "widest" (in practice: @@ -849,7 +872,7 @@ func (s *snapshot) getActivePackage(id PackageID) *Package { defer s.mu.Unlock() if value, ok := s.activePackages.Get(id); ok { - return value.(*Package) // possibly nil, if we have already checked this id. + return value } return nil } @@ -873,14 +896,14 @@ func (s *snapshot) setActivePackage(id PackageID, pkg *Package) { func (s *snapshot) resetActivePackagesLocked() { s.activePackages.Destroy() - s.activePackages = persistent.NewMap(packageIDLessInterface) + s.activePackages = new(persistent.Map[PackageID, *Package]) } const fileExtensions = "go,mod,sum,work" func (s *snapshot) fileWatchingGlobPatterns(ctx context.Context) map[string]struct{} { extensions := fileExtensions - for _, ext := range s.View().Options().TemplateExtensions { + for _, ext := range s.Options().TemplateExtensions { extensions += "," + ext } // Work-around microsoft/vscode#100870 by making sure that we are, @@ -995,14 +1018,14 @@ func (s *snapshot) collectAllKnownSubdirs(ctx context.Context) { defer s.mu.Unlock() s.knownSubdirs.Destroy() - s.knownSubdirs = newKnownDirsSet() + s.knownSubdirs = new(persistent.Set[span.URI]) s.knownSubdirsCache = nil s.files.Range(func(uri span.URI, fh source.FileHandle) { s.addKnownSubdirLocked(uri, dirs) }) } -func (s *snapshot) getKnownSubdirs(wsDirs []span.URI) knownDirsSet { +func (s *snapshot) getKnownSubdirs(wsDirs []span.URI) *persistent.Set[span.URI] { s.mu.Lock() defer s.mu.Unlock() @@ -1053,7 +1076,7 @@ func (s *snapshot) addKnownSubdirLocked(uri span.URI, dirs []span.URI) { if s.knownSubdirs.Contains(uri) { break } - s.knownSubdirs.Insert(uri) + s.knownSubdirs.Add(uri) dir = filepath.Dir(dir) s.knownSubdirsCache = nil } @@ -1570,7 +1593,7 @@ func (s *snapshot) reloadOrphanedOpenFiles(ctx context.Context) error { s.mu.Lock() loadable := files[:0] for _, file := range files { - if _, unloadable := s.unloadableFiles[file.URI()]; !unloadable { + if !s.unloadableFiles.Contains(file.URI()) { loadable = append(loadable, file) } } @@ -1633,7 +1656,7 @@ func (s *snapshot) reloadOrphanedOpenFiles(ctx context.Context) error { // metadata graph that resulted from loading. uri := file.URI() if len(s.meta.ids[uri]) == 0 { - s.unloadableFiles[uri] = struct{}{} + s.unloadableFiles.Add(uri) } } @@ -1852,20 +1875,12 @@ https://github.com/golang/tools/blob/master/gopls/doc/settings.md#buildflags-str // Most likely, each call site of inVendor needs to be reconsidered to // understand and correctly implement the desired behavior. func inVendor(uri span.URI) bool { - _, after, found := cut(string(uri), "/vendor/") + _, after, found := strings.Cut(string(uri), "/vendor/") // Only subdirectories of /vendor/ are considered vendored // (/vendor/a/foo.go is vendored, /vendor/foo.go is not). return found && strings.Contains(after, "/") } -// TODO(adonovan): replace with strings.Cut when we can assume go1.18. -func cut(s, sep string) (before, after string, found bool) { - if i := strings.Index(s, sep); i >= 0 { - return s[:i], s[i+len(sep):], true - } - return s, "", false -} - // unappliedChanges is a file source that handles an uncloned snapshot. type unappliedChanges struct { originalSnapshot *snapshot @@ -1955,7 +1970,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC files: s.files.Clone(), symbolizeHandles: s.symbolizeHandles.Clone(), workspacePackages: make(map[PackageID]PackagePath, len(s.workspacePackages)), - unloadableFiles: make(map[span.URI]struct{}, len(s.unloadableFiles)), + unloadableFiles: s.unloadableFiles.Clone(), // see the TODO for unloadableFiles below parseModHandles: s.parseModHandles.Clone(), parseWorkHandles: s.parseWorkHandles.Clone(), modTidyHandles: s.modTidyHandles.Clone(), @@ -1979,16 +1994,11 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC // incref/decref operation that might destroy it prematurely.) release := result.Acquire() - // Copy the set of unloadable files. - // - // TODO(rfindley): this looks wrong. Shouldn't we clear unloadableFiles on + // TODO(rfindley): this looks wrong. Should we clear unloadableFiles on // changes to environment or workspace layout, or more generally on any // metadata change? // // Maybe not, as major configuration changes cause a new view. - for k, v := range s.unloadableFiles { - result.unloadableFiles[k] = v - } // Add all of the known subdirectories, but don't update them for the // changed files. We need to rebuild the workspace module to know the @@ -2105,7 +2115,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC // TODO(rfindley): this also looks wrong, as typing in an unloadable file // will result in repeated reloads. We should only delete if metadata // changed. - delete(result.unloadableFiles, uri) + result.unloadableFiles.Remove(uri) } // Deleting an import can cause list errors due to import cycles to be @@ -2175,7 +2185,7 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC result.packages.Delete(id) } else { if entry, hit := result.packages.Get(id); hit { - ph := entry.(*packageHandle).clone(false) + ph := entry.clone(false) result.packages.Set(id, ph, nil) } } @@ -2277,12 +2287,11 @@ func (s *snapshot) clone(ctx, bgCtx context.Context, changes map[span.URI]*fileC // changed that happens not to be present in the map, but that's OK: the goal // of this function is to guarantee that IF the nearest mod file is present in // the map, it is invalidated. -func deleteMostRelevantModFile(m *persistent.Map, changed span.URI) { +func deleteMostRelevantModFile(m *persistent.Map[span.URI, *memoize.Promise], changed span.URI) { var mostRelevant span.URI changedFile := changed.Filename() - m.Range(func(key, value interface{}) { - modURI := key.(span.URI) + m.Range(func(modURI span.URI, _ *memoize.Promise) { if len(modURI) > len(mostRelevant) { if source.InDir(filepath.Dir(modURI.Filename()), changedFile) { mostRelevant = modURI diff --git a/gopls/internal/lsp/cache/symbols.go b/gopls/internal/lsp/cache/symbols.go index 466d9dc71a6..3ecd794303b 100644 --- a/gopls/internal/lsp/cache/symbols.go +++ b/gopls/internal/lsp/cache/symbols.go @@ -15,7 +15,6 @@ import ( "golang.org/x/tools/gopls/internal/lsp/protocol" "golang.org/x/tools/gopls/internal/lsp/source" "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/memoize" ) // symbolize returns the result of symbolizing the file identified by uri, using a cache. @@ -51,7 +50,7 @@ func (s *snapshot) symbolize(ctx context.Context, uri span.URI) ([]source.Symbol } // Await result. - v, err := s.awaitPromise(ctx, entry.(*memoize.Promise)) + v, err := s.awaitPromise(ctx, entry) if err != nil { return nil, err } diff --git a/gopls/internal/lsp/cache/view.go b/gopls/internal/lsp/cache/view.go index 70395d1a259..fbdb6047a78 100644 --- a/gopls/internal/lsp/cache/view.go +++ b/gopls/internal/lsp/cache/view.go @@ -11,7 +11,6 @@ import ( "encoding/json" "errors" "fmt" - "io/ioutil" "os" "path" "path/filepath" @@ -351,7 +350,7 @@ func (v *View) ID() string { return v.id } // longer needed. func tempModFile(modFh source.FileHandle, gosum []byte) (tmpURI span.URI, cleanup func(), err error) { filenameHash := source.Hashf("%s", modFh.URI().Filename()) - tmpMod, err := ioutil.TempFile("", fmt.Sprintf("go.%s.*.mod", filenameHash)) + tmpMod, err := os.CreateTemp("", fmt.Sprintf("go.%s.*.mod", filenameHash)) if err != nil { return "", nil, err } @@ -386,7 +385,7 @@ func tempModFile(modFh source.FileHandle, gosum []byte) (tmpURI span.URI, cleanu // Create an analogous go.sum, if one exists. if gosum != nil { - if err := ioutil.WriteFile(tmpSumName, gosum, 0655); err != nil { + if err := os.WriteFile(tmpSumName, gosum, 0655); err != nil { return "", nil, err } } diff --git a/gopls/internal/lsp/cache/view_test.go b/gopls/internal/lsp/cache/view_test.go index 90471ed4401..21b10b6a982 100644 --- a/gopls/internal/lsp/cache/view_test.go +++ b/gopls/internal/lsp/cache/view_test.go @@ -6,7 +6,6 @@ package cache import ( "context" "encoding/json" - "io/ioutil" "os" "path/filepath" "testing" @@ -20,17 +19,14 @@ import ( ) func TestCaseInsensitiveFilesystem(t *testing.T) { - base, err := ioutil.TempDir("", t.Name()) - if err != nil { - t.Fatal(err) - } + base := t.TempDir() inner := filepath.Join(base, "a/B/c/DEFgh") if err := os.MkdirAll(inner, 0777); err != nil { t.Fatal(err) } file := filepath.Join(inner, "f.go") - if err := ioutil.WriteFile(file, []byte("hi"), 0777); err != nil { + if err := os.WriteFile(file, []byte("hi"), 0777); err != nil { t.Fatal(err) } if _, err := os.Stat(filepath.Join(inner, "F.go")); err != nil { diff --git a/gopls/internal/lsp/cmd/cmd.go b/gopls/internal/lsp/cmd/cmd.go index 3474ed73352..340073d8a5a 100644 --- a/gopls/internal/lsp/cmd/cmd.go +++ b/gopls/internal/lsp/cmd/cmd.go @@ -30,6 +30,7 @@ import ( "golang.org/x/tools/gopls/internal/lsp/protocol" "golang.org/x/tools/gopls/internal/lsp/source" "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/tool" "golang.org/x/tools/internal/xcontext" @@ -75,6 +76,26 @@ type Application struct { // PrepareOptions is called to update the options when a new view is built. // It is primarily to allow the behavior of gopls to be modified by hooks. PrepareOptions func(*source.Options) + + // editFlags holds flags that control how file edit operations + // are applied, in particular when the server makes an ApplyEdits + // downcall to the client. Present only for commands that apply edits. + editFlags *EditFlags +} + +// EditFlags defines flags common to {fix,format,imports,rename} +// that control how edits are applied to the client's files. +// +// The type is exported for flag reflection. +// +// The -write, -diff, and -list flags are orthogonal but any +// of them suppresses the default behavior, which is to print +// the edited file contents. +type EditFlags struct { + Write bool `flag:"w,write" help:"write edited content to source files"` + Preserve bool `flag:"preserve" help:"with -write, make copies of original files"` + Diff bool `flag:"d,diff" help:"display diffs instead of edited file content"` + List bool `flag:"l,list" help:"display names of edited files"` } func (app *Application) verbose() bool { @@ -326,15 +347,6 @@ func (app *Application) connect(ctx context.Context, onProgress func(*protocol.P } } -// CloseTestConnections terminates shared connections used in command tests. It -// should only be called from tests. -func CloseTestConnections(ctx context.Context) { - for _, c := range internalConnections { - c.Shutdown(ctx) - c.Exit(ctx) - } -} - func (app *Application) connectRemote(ctx context.Context, remote string) (*connection, error) { conn, err := lsprpc.ConnectToRemote(ctx, remote) if err != nil { @@ -524,7 +536,87 @@ func (c *cmdClient) Configuration(ctx context.Context, p *protocol.ParamConfigur } func (c *cmdClient) ApplyEdit(ctx context.Context, p *protocol.ApplyWorkspaceEditParams) (*protocol.ApplyWorkspaceEditResult, error) { - return &protocol.ApplyWorkspaceEditResult{Applied: false, FailureReason: "not implemented"}, nil + if err := c.applyWorkspaceEdit(&p.Edit); err != nil { + return &protocol.ApplyWorkspaceEditResult{FailureReason: err.Error()}, nil + } + return &protocol.ApplyWorkspaceEditResult{Applied: true}, nil +} + +// applyWorkspaceEdit applies a complete WorkspaceEdit to the client's +// files, honoring the preferred edit mode specified by cli.app.editMode. +// (Used by rename and by ApplyEdit downcalls.) +func (cli *cmdClient) applyWorkspaceEdit(edit *protocol.WorkspaceEdit) error { + var orderedURIs []string + edits := map[span.URI][]protocol.TextEdit{} + for _, c := range edit.DocumentChanges { + if c.TextDocumentEdit != nil { + uri := fileURI(c.TextDocumentEdit.TextDocument.URI) + edits[uri] = append(edits[uri], c.TextDocumentEdit.Edits...) + orderedURIs = append(orderedURIs, string(uri)) + } + if c.RenameFile != nil { + return fmt.Errorf("client does not support file renaming (%s -> %s)", + c.RenameFile.OldURI, + c.RenameFile.NewURI) + } + } + sort.Strings(orderedURIs) + for _, u := range orderedURIs { + uri := span.URIFromURI(u) + f := cli.openFile(uri) + if f.err != nil { + return f.err + } + if err := applyTextEdits(f.mapper, edits[uri], cli.app.editFlags); err != nil { + return err + } + } + return nil +} + +// applyTextEdits applies a list of edits to the mapper file content, +// using the preferred edit mode. It is a no-op if there are no edits. +func applyTextEdits(mapper *protocol.Mapper, edits []protocol.TextEdit, flags *EditFlags) error { + if len(edits) == 0 { + return nil + } + newContent, renameEdits, err := source.ApplyProtocolEdits(mapper, edits) + if err != nil { + return err + } + + filename := mapper.URI.Filename() + + if flags.List { + fmt.Println(filename) + } + + if flags.Write { + if flags.Preserve { + if err := os.Rename(filename, filename+".orig"); err != nil { + return err + } + } + if err := os.WriteFile(filename, newContent, 0644); err != nil { + return err + } + } + + if flags.Diff { + unified, err := diff.ToUnified(filename+".orig", filename, string(mapper.Content), renameEdits) + if err != nil { + return err + } + fmt.Print(unified) + } + + // No flags: just print edited file content. + // TODO(adonovan): how is this ever useful with multiple files? + if !(flags.List || flags.Write || flags.Diff) { + os.Stdout.Write(newContent) + } + + return nil } func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishDiagnosticsParams) error { @@ -539,7 +631,7 @@ func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishD c.filesMu.Lock() defer c.filesMu.Unlock() - file := c.getFile(ctx, fileURI(p.URI)) + file := c.getFile(fileURI(p.URI)) file.diagnostics = append(file.diagnostics, p.Diagnostics...) // Perform a crude in-place deduplication. @@ -607,7 +699,7 @@ func (c *cmdClient) InlineValueRefresh(context.Context) error { return nil } -func (c *cmdClient) getFile(ctx context.Context, uri span.URI) *cmdFile { +func (c *cmdClient) getFile(uri span.URI) *cmdFile { file, found := c.files[uri] if !found || file.err != nil { file = &cmdFile{ @@ -626,17 +718,17 @@ func (c *cmdClient) getFile(ctx context.Context, uri span.URI) *cmdFile { return file } -func (c *cmdClient) openFile(ctx context.Context, uri span.URI) *cmdFile { +func (c *cmdClient) openFile(uri span.URI) *cmdFile { c.filesMu.Lock() defer c.filesMu.Unlock() - return c.getFile(ctx, uri) + return c.getFile(uri) } // TODO(adonovan): provide convenience helpers to: // - map a (URI, protocol.Range) to a MappedRange; // - parse a command-line argument to a MappedRange. func (c *connection) openFile(ctx context.Context, uri span.URI) (*cmdFile, error) { - file := c.client.openFile(ctx, uri) + file := c.client.openFile(uri) if file.err != nil { return nil, file.err } diff --git a/gopls/internal/lsp/cmd/format.go b/gopls/internal/lsp/cmd/format.go index 517a4d33d9f..73a8d7f582b 100644 --- a/gopls/internal/lsp/cmd/format.go +++ b/gopls/internal/lsp/cmd/format.go @@ -8,21 +8,14 @@ import ( "context" "flag" "fmt" - "io/ioutil" - "os" "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" ) // format implements the format verb for gopls. type format struct { - Diff bool `flag:"d,diff" help:"display diffs instead of rewriting files"` - Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"` - List bool `flag:"l,list" help:"list files whose formatting differs from gofmt's"` - + EditFlags app *Application } @@ -47,10 +40,9 @@ format-flags: // results to stdout. func (c *format) Run(ctx context.Context, args ...string) error { if len(args) == 0 { - // no files, so no results return nil } - // now we ready to kick things off + c.app.editFlags = &c.EditFlags conn, err := c.app.connect(ctx, nil) if err != nil { return err @@ -62,7 +54,6 @@ func (c *format) Run(ctx context.Context, args ...string) error { if err != nil { return err } - filename := spn.URI().Filename() loc, err := file.mapper.SpanLocation(spn) if err != nil { return err @@ -77,33 +68,8 @@ func (c *format) Run(ctx context.Context, args ...string) error { if err != nil { return fmt.Errorf("%v: %v", spn, err) } - formatted, sedits, err := source.ApplyProtocolEdits(file.mapper, edits) - if err != nil { - return fmt.Errorf("%v: %v", spn, err) - } - printIt := true - if c.List { - printIt = false - if len(edits) > 0 { - fmt.Println(filename) - } - } - if c.Write { - printIt = false - if len(edits) > 0 { - ioutil.WriteFile(filename, formatted, 0644) - } - } - if c.Diff { - printIt = false - unified, err := diff.ToUnified(filename+".orig", filename, string(file.mapper.Content), sedits) - if err != nil { - return err - } - fmt.Print(unified) - } - if printIt { - os.Stdout.Write(formatted) + if err := applyTextEdits(file.mapper, edits, c.app.editFlags); err != nil { + return err } } return nil diff --git a/gopls/internal/lsp/cmd/imports.go b/gopls/internal/lsp/cmd/imports.go index 537c8f164f1..d014d03881e 100644 --- a/gopls/internal/lsp/cmd/imports.go +++ b/gopls/internal/lsp/cmd/imports.go @@ -8,21 +8,15 @@ import ( "context" "flag" "fmt" - "io/ioutil" - "os" "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/tool" ) // imports implements the import verb for gopls. type imports struct { - Diff bool `flag:"d,diff" help:"display diffs instead of rewriting files"` - Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"` - + EditFlags app *Application } @@ -49,6 +43,7 @@ func (t *imports) Run(ctx context.Context, args ...string) error { if len(args) != 1 { return tool.CommandLineErrorf("imports expects 1 argument") } + t.app.editFlags = &t.EditFlags conn, err := t.app.connect(ctx, nil) if err != nil { return err @@ -82,24 +77,5 @@ func (t *imports) Run(ctx context.Context, args ...string) error { } } } - newContent, sedits, err := source.ApplyProtocolEdits(file.mapper, edits) - if err != nil { - return fmt.Errorf("%v: %v", edits, err) - } - filename := file.uri.Filename() - switch { - case t.Write: - if len(edits) > 0 { - ioutil.WriteFile(filename, newContent, 0644) - } - case t.Diff: - unified, err := diff.ToUnified(filename+".orig", filename, string(file.mapper.Content), sedits) - if err != nil { - return err - } - fmt.Print(unified) - default: - os.Stdout.Write(newContent) - } - return nil + return applyTextEdits(file.mapper, edits, t.app.editFlags) } diff --git a/gopls/internal/lsp/cmd/rename.go b/gopls/internal/lsp/cmd/rename.go index 8a1ae36d7e7..5ad7aa44494 100644 --- a/gopls/internal/lsp/cmd/rename.go +++ b/gopls/internal/lsp/cmd/rename.go @@ -8,24 +8,15 @@ import ( "context" "flag" "fmt" - "io/ioutil" - "os" - "path/filepath" - "sort" "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/tool" ) // rename implements the rename verb for gopls. type rename struct { - Diff bool `flag:"d,diff" help:"display diffs instead of rewriting files"` - Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"` - Preserve bool `flag:"preserve" help:"preserve original files"` - + EditFlags app *Application } @@ -54,6 +45,7 @@ func (r *rename) Run(ctx context.Context, args ...string) error { if len(args) != 2 { return tool.CommandLineErrorf("definition expects 2 arguments (position, new name)") } + r.app.editFlags = &r.EditFlags conn, err := r.app.connect(ctx, nil) if err != nil { return err @@ -78,56 +70,5 @@ func (r *rename) Run(ctx context.Context, args ...string) error { if err != nil { return err } - var orderedURIs []string - edits := map[span.URI][]protocol.TextEdit{} - for _, c := range edit.DocumentChanges { - if c.TextDocumentEdit != nil { - uri := fileURI(c.TextDocumentEdit.TextDocument.URI) - edits[uri] = append(edits[uri], c.TextDocumentEdit.Edits...) - orderedURIs = append(orderedURIs, string(uri)) - } - } - sort.Strings(orderedURIs) - changeCount := len(orderedURIs) - - for _, u := range orderedURIs { - uri := span.URIFromURI(u) - cmdFile, err := conn.openFile(ctx, uri) - if err != nil { - return err - } - filename := cmdFile.uri.Filename() - - newContent, renameEdits, err := source.ApplyProtocolEdits(cmdFile.mapper, edits[uri]) - if err != nil { - return fmt.Errorf("%v: %v", edits, err) - } - - switch { - case r.Write: - fmt.Fprintln(os.Stderr, filename) - if r.Preserve { - if err := os.Rename(filename, filename+".orig"); err != nil { - return fmt.Errorf("%v: %v", edits, err) - } - } - ioutil.WriteFile(filename, newContent, 0644) - case r.Diff: - unified, err := diff.ToUnified(filename+".orig", filename, string(cmdFile.mapper.Content), renameEdits) - if err != nil { - return err - } - fmt.Print(unified) - default: - if len(orderedURIs) > 1 { - fmt.Printf("%s:\n", filepath.Base(filename)) - } - os.Stdout.Write(newContent) - if changeCount > 1 { // if this wasn't last change, print newline - fmt.Println() - } - changeCount -= 1 - } - } - return nil + return conn.client.applyWorkspaceEdit(edit) } diff --git a/gopls/internal/lsp/cmd/stats.go b/gopls/internal/lsp/cmd/stats.go index 4986107134e..4e339f1c543 100644 --- a/gopls/internal/lsp/cmd/stats.go +++ b/gopls/internal/lsp/cmd/stats.go @@ -74,7 +74,7 @@ func (s *stats) Run(ctx context.Context, args ...string) error { GOARCH: runtime.GOARCH, GOPLSCACHE: os.Getenv("GOPLSCACHE"), GoVersion: runtime.Version(), - GoplsVersion: debug.Version, + GoplsVersion: debug.Version(), } opts := s.app.options diff --git a/gopls/internal/lsp/cmd/suggested_fix.go b/gopls/internal/lsp/cmd/suggested_fix.go index c0770bc489e..a3e6093912a 100644 --- a/gopls/internal/lsp/cmd/suggested_fix.go +++ b/gopls/internal/lsp/cmd/suggested_fix.go @@ -8,21 +8,21 @@ import ( "context" "flag" "fmt" - "io/ioutil" - "os" "golang.org/x/tools/gopls/internal/lsp/protocol" - "golang.org/x/tools/gopls/internal/lsp/source" "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/tool" ) +// TODO(adonovan): this command has a very poor user interface. It +// should have a way to query the available fixes for a file (without +// a span), enumerate the valid fix kinds, enable all fixes, and not +// require the pointless -all flag. See issue #60290. + // suggestedFix implements the fix verb for gopls. type suggestedFix struct { - Diff bool `flag:"d,diff" help:"display diffs instead of rewriting files"` - Write bool `flag:"w,write" help:"write result to (source) file instead of stdout"` - All bool `flag:"a,all" help:"apply all fixes, not just preferred fixes"` + EditFlags + All bool `flag:"a,all" help:"apply all fixes, not just preferred fixes"` app *Application } @@ -33,8 +33,33 @@ func (s *suggestedFix) Usage() string { return "[fix-flags] " } func (s *suggestedFix) ShortHelp() string { return "apply suggested fixes" } func (s *suggestedFix) DetailedHelp(f *flag.FlagSet) { fmt.Fprintf(f.Output(), ` -Example: apply suggested fixes for this file - $ gopls fix -w internal/lsp/cmd/check.go +Example: apply fixes to this file, rewriting it: + + $ gopls fix -a -w internal/lsp/cmd/check.go + +The -a (-all) flag causes all fixes, not just preferred ones, to be +applied, but since no fixes are currently preferred, this flag is +essentially mandatory. + +Arguments after the filename are interpreted as LSP CodeAction kinds +to be applied; the default set is {"quickfix"}, but valid kinds include: + + quickfix + refactor + refactor.extract + refactor.inline + refactor.rewrite + source.organizeImports + source.fixAll + +CodeAction kinds are hierarchical, so "refactor" includes +"refactor.inline". There is currently no way to enable or even +enumerate all kinds. + +Example: apply any "refactor.rewrite" fixes at the specific byte +offset within this file: + + $ gopls fix -a internal/lsp/cmd/check.go:#43 refactor.rewrite fix-flags: `) @@ -49,6 +74,7 @@ func (s *suggestedFix) Run(ctx context.Context, args ...string) error { if len(args) < 1 { return tool.CommandLineErrorf("fix expects at least 1 argument") } + s.app.editFlags = &s.EditFlags conn, err := s.app.connect(ctx, nil) if err != nil { return err @@ -101,12 +127,30 @@ func (s *suggestedFix) Run(ctx context.Context, args ...string) error { // Gather edits from matching code actions. var edits []protocol.TextEdit for _, a := range actions { - if a.Command != nil { - return fmt.Errorf("ExecuteCommand is not yet supported on the command line (action: %v)", a.Title) - } + // Without -all, apply only "preferred" fixes. if !a.IsPreferred && !s.All { continue } + + // Execute any command. + // This may cause the server to make + // an ApplyEdit downcall to the client. + if a.Command != nil { + if _, err := conn.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + Command: a.Command.Command, + Arguments: a.Command.Arguments, + }); err != nil { + return err + } + // The specification says that commands should + // be executed _after_ edits are applied, not + // instead of them, but we don't want to + // duplicate edits. + continue + } + + // Partially apply CodeAction.Edit, a WorkspaceEdit. + // (See also conn.Client.applyWorkspaceEdit(a.Edit)). if !from.HasPosition() { for _, c := range a.Edit.DocumentChanges { if c.TextDocumentEdit != nil { @@ -145,25 +189,5 @@ func (s *suggestedFix) Run(ctx context.Context, args ...string) error { } } - newContent, sedits, err := source.ApplyProtocolEdits(file.mapper, edits) - if err != nil { - return fmt.Errorf("%v: %v", edits, err) - } - - filename := file.uri.Filename() - switch { - case s.Write: - if len(edits) > 0 { - ioutil.WriteFile(filename, newContent, 0644) - } - case s.Diff: - diffs, err := diff.ToUnified(filename+".orig", filename, string(file.mapper.Content), sedits) - if err != nil { - return err - } - fmt.Print(diffs) - default: - os.Stdout.Write(newContent) - } - return nil + return applyTextEdits(file.mapper, edits, s.app.editFlags) } diff --git a/gopls/internal/lsp/cmd/test/integration_test.go b/gopls/internal/lsp/cmd/test/integration_test.go index 5c694d070b8..4ee9e3eb7c5 100644 --- a/gopls/internal/lsp/cmd/test/integration_test.go +++ b/gopls/internal/lsp/cmd/test/integration_test.go @@ -20,8 +20,8 @@ package cmdtest // TODO(adonovan): // - Use markers to represent positions in the input and in assertions. // - Coverage of cross-cutting things like cwd, environ, span parsing, etc. -// - Subcommands that accept -write and -diff flags should implement -// them consistently wrt the default behavior; factor their tests. +// - Subcommands that accept -write and -diff flags implement them +// consistently; factor their tests. // - Add missing test for 'vulncheck' subcommand. // - Add tests for client-only commands: serve, bug, help, api-json, licenses. @@ -55,7 +55,7 @@ func TestVersion(t *testing.T) { tree := writeTree(t, "") // There's not much we can robustly assert about the actual version. - const want = debug.Version // e.g. "master" + want := debug.Version() // e.g. "master" // basic { @@ -396,7 +396,7 @@ func _() { res := gopls(t, tree, "imports", "a.go") res.checkExit(true) if res.stdout != want { - t.Errorf("format: got <<%s>>, want <<%s>>", res.stdout, want) + t.Errorf("imports: got <<%s>>, want <<%s>>", res.stdout, want) } } // -diff: show a unified diff @@ -783,12 +783,13 @@ go 1.18 package a type T int func f() (int, string) { return } -`) - want := ` + +-- b.go -- package a -type T int -func f() (int, string) { return 0, "" } -`[1:] +import "io" +var _ io.Reader = C{} +type C struct{} +`) // no arguments { @@ -796,20 +797,45 @@ func f() (int, string) { return 0, "" } res.checkExit(false) res.checkStderr("expects at least 1 argument") } - // success (-a enables fillreturns) + // success with default kinds, {quickfix}. + // -a is always required because no fix is currently "preferred" (!) { res := gopls(t, tree, "fix", "-a", "a.go") res.checkExit(true) got := res.stdout + want := ` +package a +type T int +func f() (int, string) { return 0, "" } + +`[1:] + if got != want { + t.Errorf("fix: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) + } + } + // success, with explicit CodeAction kind and diagnostics span. + { + res := gopls(t, tree, "fix", "-a", "b.go:#40", "quickfix") + res.checkExit(true) + got := res.stdout + want := ` +package a + +import "io" + +var _ io.Reader = C{} + +type C struct{} + +// Read implements io.Reader. +func (C) Read(p []byte) (n int, err error) { + panic("unimplemented") +} +`[1:] if got != want { t.Errorf("fix: got <<%s>>, want <<%s>>\nstderr:\n%s", got, want, res.stderr) } } - // TODO(adonovan): more tests: - // - -write, -diff: factor with imports, format, rename. - // - without -all flag - // - args[2:] is an optional list of protocol.CodeActionKind enum values. - // - a span argument with a range causes filtering. } // TestWorkspaceSymbol tests the 'workspace_symbol' subcommand (../workspace_symbol.go). diff --git a/gopls/internal/lsp/cmd/usage/fix.hlp b/gopls/internal/lsp/cmd/usage/fix.hlp index 4789a6c5b37..39e464da59d 100644 --- a/gopls/internal/lsp/cmd/usage/fix.hlp +++ b/gopls/internal/lsp/cmd/usage/fix.hlp @@ -3,13 +3,42 @@ apply suggested fixes Usage: gopls [flags] fix [fix-flags] -Example: apply suggested fixes for this file - $ gopls fix -w internal/lsp/cmd/check.go +Example: apply fixes to this file, rewriting it: + + $ gopls fix -a -w internal/lsp/cmd/check.go + +The -a (-all) flag causes all fixes, not just preferred ones, to be +applied, but since no fixes are currently preferred, this flag is +essentially mandatory. + +Arguments after the filename are interpreted as LSP CodeAction kinds +to be applied; the default set is {"quickfix"}, but valid kinds include: + + quickfix + refactor + refactor.extract + refactor.inline + refactor.rewrite + source.organizeImports + source.fixAll + +CodeAction kinds are hierarchical, so "refactor" includes +"refactor.inline". There is currently no way to enable or even +enumerate all kinds. + +Example: apply any "refactor.rewrite" fixes at the specific byte +offset within this file: + + $ gopls fix -a internal/lsp/cmd/check.go:#43 refactor.rewrite fix-flags: -a,-all apply all fixes, not just preferred fixes -d,-diff - display diffs instead of rewriting files + display diffs instead of edited file content + -l,-list + display names of edited files + -preserve + with -write, make copies of original files -w,-write - write result to (source) file instead of stdout + write edited content to source files diff --git a/gopls/internal/lsp/cmd/usage/format.hlp b/gopls/internal/lsp/cmd/usage/format.hlp index 7ef0bbe4314..fedb5895282 100644 --- a/gopls/internal/lsp/cmd/usage/format.hlp +++ b/gopls/internal/lsp/cmd/usage/format.hlp @@ -11,8 +11,10 @@ Example: reformat this file: format-flags: -d,-diff - display diffs instead of rewriting files + display diffs instead of edited file content -l,-list - list files whose formatting differs from gofmt's + display names of edited files + -preserve + with -write, make copies of original files -w,-write - write result to (source) file instead of stdout + write edited content to source files diff --git a/gopls/internal/lsp/cmd/usage/imports.hlp b/gopls/internal/lsp/cmd/usage/imports.hlp index 295f4daa2d4..6e0517296ec 100644 --- a/gopls/internal/lsp/cmd/usage/imports.hlp +++ b/gopls/internal/lsp/cmd/usage/imports.hlp @@ -9,6 +9,10 @@ Example: update imports statements in a file: imports-flags: -d,-diff - display diffs instead of rewriting files + display diffs instead of edited file content + -l,-list + display names of edited files + -preserve + with -write, make copies of original files -w,-write - write result to (source) file instead of stdout + write edited content to source files diff --git a/gopls/internal/lsp/cmd/usage/rename.hlp b/gopls/internal/lsp/cmd/usage/rename.hlp index ae58cbf60a7..7b6d7f96b55 100644 --- a/gopls/internal/lsp/cmd/usage/rename.hlp +++ b/gopls/internal/lsp/cmd/usage/rename.hlp @@ -11,8 +11,10 @@ Example: rename-flags: -d,-diff - display diffs instead of rewriting files + display diffs instead of edited file content + -l,-list + display names of edited files -preserve - preserve original files + with -write, make copies of original files -w,-write - write result to (source) file instead of stdout + write edited content to source files diff --git a/gopls/internal/lsp/code_action.go b/gopls/internal/lsp/code_action.go index 69df978f0fc..bef4e34d68f 100644 --- a/gopls/internal/lsp/code_action.go +++ b/gopls/internal/lsp/code_action.go @@ -38,8 +38,8 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara uri := fh.URI() // Determine the supported actions for this file kind. - kind := snapshot.View().FileKind(fh) - supportedCodeActions, ok := snapshot.View().Options().SupportedCodeActions[kind] + kind := snapshot.FileKind(fh) + supportedCodeActions, ok := snapshot.Options().SupportedCodeActions[kind] if !ok { return nil, fmt.Errorf("no supported code actions for %v file kind", kind) } @@ -185,7 +185,7 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara } var stubMethodsDiagnostics []protocol.Diagnostic - if wantQuickFixes && snapshot.View().Options().IsAnalyzerEnabled(stubmethods.Analyzer.Name) { + if wantQuickFixes && snapshot.Options().IsAnalyzerEnabled(stubmethods.Analyzer.Name) { for _, pd := range diagnostics { if stubmethods.MatchesMessage(pd.Message) { stubMethodsDiagnostics = append(stubMethodsDiagnostics, pd) @@ -194,7 +194,10 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara } // Code actions requiring type information. - if len(stubMethodsDiagnostics) > 0 || want[protocol.RefactorRewrite] || want[protocol.GoTest] { + if len(stubMethodsDiagnostics) > 0 || + want[protocol.RefactorRewrite] || + want[protocol.RefactorInline] || + want[protocol.GoTest] { pkg, pgf, err := source.NarrowestPackageForFile(ctx, snapshot, fh.URI()) if err != nil { return nil, err @@ -250,6 +253,14 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara actions = append(actions, rewrites...) } + if want[protocol.RefactorInline] { + rewrites, err := refactorInline(ctx, snapshot, pkg, pgf, fh, params.Range) + if err != nil { + return nil, err + } + actions = append(actions, rewrites...) + } + if want[protocol.GoTest] { fixes, err := goTest(ctx, snapshot, pkg, pgf, params.Range) if err != nil { @@ -442,7 +453,7 @@ func refactorRewrite(ctx context.Context, snapshot source.Snapshot, pkg source.P // // TODO: Consider removing the inspection after convenienceAnalyzers are removed. inspect := inspector.New([]*ast.File{pgf.File}) - if snapshot.View().Options().IsAnalyzerEnabled(fillstruct.Analyzer.Name) { + if snapshot.Options().IsAnalyzerEnabled(fillstruct.Analyzer.Name) { for _, d := range fillstruct.DiagnoseFillableStructs(inspect, start, end, pkg.GetTypes(), pkg.GetTypesInfo()) { rng, err := pgf.Mapper.PosRange(pgf.Tok, d.Pos, d.End) if err != nil { @@ -469,7 +480,7 @@ func refactorRewrite(ctx context.Context, snapshot source.Snapshot, pkg source.P }) } - if snapshot.View().Options().IsAnalyzerEnabled(infertypeargs.Analyzer.Name) { + if snapshot.Options().IsAnalyzerEnabled(infertypeargs.Analyzer.Name) { for _, d := range infertypeargs.DiagnoseInferableTypeArgs(pkg.FileSet(), inspect, start, end, pkg.GetTypes(), pkg.GetTypesInfo()) { if len(d.SuggestedFixes) != 1 { panic(fmt.Sprintf("unexpected number of suggested fixes from infertypeargs: %v", len(d.SuggestedFixes))) @@ -499,6 +510,35 @@ func refactorRewrite(ctx context.Context, snapshot source.Snapshot, pkg source.P return actions, nil } +// refactorInline returns inline actions available at the specified range. +func refactorInline(ctx context.Context, snapshot source.Snapshot, pkg source.Package, pgf *source.ParsedGoFile, fh source.FileHandle, rng protocol.Range) ([]protocol.CodeAction, error) { + var commands []protocol.Command + + // If range is within call expression, offer inline action. + if _, fn, err := source.EnclosingStaticCall(pkg, pgf, rng); err == nil { + cmd, err := command.NewApplyFixCommand(fmt.Sprintf("Inline call to %s", fn.Name()), command.ApplyFixArgs{ + URI: protocol.URIFromSpanURI(pgf.URI), + Fix: source.InlineCall, + Range: rng, + }) + if err != nil { + return nil, err + } + commands = append(commands, cmd) + } + + // Convert commands to actions. + var actions []protocol.CodeAction + for i := range commands { + actions = append(actions, protocol.CodeAction{ + Title: commands[i].Title, + Kind: protocol.RefactorInline, + Command: &commands[i], + }) + } + return actions, nil +} + func documentChanges(fh source.FileHandle, edits []protocol.TextEdit) []protocol.DocumentChanges { return []protocol.DocumentChanges{ { diff --git a/gopls/internal/lsp/code_lens.go b/gopls/internal/lsp/code_lens.go index 0167a78dc30..da7598604b0 100644 --- a/gopls/internal/lsp/code_lens.go +++ b/gopls/internal/lsp/code_lens.go @@ -27,7 +27,7 @@ func (s *Server) codeLens(ctx context.Context, params *protocol.CodeLensParams) return nil, err } var lenses map[command.Command]source.LensFunc - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Mod: lenses = mod.LensFuncs() case source.Go: @@ -38,7 +38,7 @@ func (s *Server) codeLens(ctx context.Context, params *protocol.CodeLensParams) } var result []protocol.CodeLens for cmd, lf := range lenses { - if !snapshot.View().Options().Codelenses[string(cmd)] { + if !snapshot.Options().Codelenses[string(cmd)] { continue } added, err := lf(ctx, snapshot, fh) diff --git a/gopls/internal/lsp/command.go b/gopls/internal/lsp/command.go index ff646709d63..388030e4bcc 100644 --- a/gopls/internal/lsp/command.go +++ b/gopls/internal/lsp/command.go @@ -11,7 +11,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "os" "os/exec" "path/filepath" @@ -427,7 +426,7 @@ func dropDependency(snapshot source.Snapshot, pm *source.ParsedModule, modulePat return nil, err } // Calculate the edits to be made due to the change. - diff := snapshot.View().Options().ComputeEdits(string(pm.Mapper.Content), string(newContent)) + diff := snapshot.Options().ComputeEdits(string(pm.Mapper.Content), string(newContent)) return source.ToProtocolEdits(pm.Mapper, diff) } @@ -629,12 +628,12 @@ func collectFileEdits(ctx context.Context, snapshot source.Snapshot, uri span.UR // file and leave it unsaved. We would rather apply the changes directly, // especially to go.sum, which should be mostly invisible to the user. if !snapshot.IsOpen(uri) { - err := ioutil.WriteFile(uri.Filename(), newContent, 0666) + err := os.WriteFile(uri.Filename(), newContent, 0666) return nil, err } m := protocol.NewMapper(fh.URI(), oldContent) - diff := snapshot.View().Options().ComputeEdits(string(oldContent), string(newContent)) + diff := snapshot.Options().ComputeEdits(string(oldContent), string(newContent)) edits, err := source.ToProtocolEdits(m, diff) if err != nil { return nil, err @@ -900,7 +899,7 @@ type pkgLoadConfig struct { func (c *commandHandler) FetchVulncheckResult(ctx context.Context, arg command.URIArg) (map[protocol.DocumentURI]*govulncheck.Result, error) { ret := map[protocol.DocumentURI]*govulncheck.Result{} err := c.run(ctx, commandConfig{forURI: arg.URI}, func(ctx context.Context, deps commandDeps) error { - if deps.snapshot.View().Options().Vulncheck == source.ModeVulncheckImports { + if deps.snapshot.Options().Vulncheck == source.ModeVulncheckImports { for _, modfile := range deps.snapshot.ModFiles() { res, err := deps.snapshot.ModVuln(ctx, modfile) if err != nil { @@ -937,8 +936,7 @@ func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.Vulnch }, func(ctx context.Context, deps commandDeps) error { tokenChan <- deps.work.Token() - view := deps.snapshot.View() - opts := view.Options() + opts := deps.snapshot.Options() // quickly test if gopls is compiled to support govulncheck // by checking vulncheck.Main. Alternatively, we can continue and // let the `gopls vulncheck` command fail. This is lighter-weight. diff --git a/gopls/internal/lsp/command/command_gen.go b/gopls/internal/lsp/command/command_gen.go index 25a101cb36e..00b76579601 100644 --- a/gopls/internal/lsp/command/command_gen.go +++ b/gopls/internal/lsp/command/command_gen.go @@ -7,10 +7,10 @@ //go:build !generate // +build !generate -package command - // Code generated by generate.go. DO NOT EDIT. +package command + import ( "context" "fmt" diff --git a/gopls/internal/lsp/command/gen/gen.go b/gopls/internal/lsp/command/gen/gen.go index 29428699ee6..9f0453c62cc 100644 --- a/gopls/internal/lsp/command/gen/gen.go +++ b/gopls/internal/lsp/command/gen/gen.go @@ -12,8 +12,8 @@ import ( "go/types" "text/template" - "golang.org/x/tools/internal/imports" "golang.org/x/tools/gopls/internal/lsp/command/commandmeta" + "golang.org/x/tools/internal/imports" ) const src = `// Copyright 2021 The Go Authors. All rights reserved. @@ -25,10 +25,10 @@ const src = `// Copyright 2021 The Go Authors. All rights reserved. //go:build !generate // +build !generate -package command - // Code generated by generate.go. DO NOT EDIT. +package command + import ( {{range $k, $v := .Imports -}} "{{$k}}" diff --git a/gopls/internal/lsp/completion.go b/gopls/internal/lsp/completion.go index f464bb2d6bf..209f26be3cb 100644 --- a/gopls/internal/lsp/completion.go +++ b/gopls/internal/lsp/completion.go @@ -29,7 +29,7 @@ func (s *Server) completion(ctx context.Context, params *protocol.CompletionPara } var candidates []completion.CompletionItem var surrounding *completion.Selection - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Go: candidates, surrounding, err = completion.Completion(ctx, snapshot, fh, params.Position, params.Context) case source.Mod: @@ -65,7 +65,7 @@ func (s *Server) completion(ctx context.Context, params *protocol.CompletionPara // When using deep completions/fuzzy matching, report results as incomplete so // client fetches updated completions after every key stroke. - options := snapshot.View().Options() + options := snapshot.Options() incompleteResults := options.DeepCompletion || options.Matcher == source.Fuzzy items := toProtocolCompletionItems(candidates, rng, options) diff --git a/gopls/internal/lsp/debug/buildinfo_go1.12.go b/gopls/internal/lsp/debug/buildinfo_go1.12.go deleted file mode 100644 index 2f360dbfc70..00000000000 --- a/gopls/internal/lsp/debug/buildinfo_go1.12.go +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.18 -// +build !go1.18 - -package debug - -import ( - "runtime" - "runtime/debug" -) - -type BuildInfo struct { - debug.BuildInfo - GoVersion string // Version of Go that produced this binary -} - -func readBuildInfo() (*BuildInfo, bool) { - rinfo, ok := debug.ReadBuildInfo() - if !ok { - return nil, false - } - return &BuildInfo{ - GoVersion: runtime.Version(), - BuildInfo: *rinfo, - }, true -} diff --git a/gopls/internal/lsp/debug/buildinfo_go1.18.go b/gopls/internal/lsp/debug/buildinfo_go1.18.go deleted file mode 100644 index 4121c4bc9cd..00000000000 --- a/gopls/internal/lsp/debug/buildinfo_go1.18.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.18 -// +build go1.18 - -package debug - -import ( - "runtime/debug" -) - -type BuildInfo debug.BuildInfo - -func readBuildInfo() (*BuildInfo, bool) { - info, ok := debug.ReadBuildInfo() - return (*BuildInfo)(info), ok -} diff --git a/gopls/internal/lsp/debug/info.go b/gopls/internal/lsp/debug/info.go index 5ce23fc2f59..34e6dd4e2b1 100644 --- a/gopls/internal/lsp/debug/info.go +++ b/gopls/internal/lsp/debug/info.go @@ -30,12 +30,19 @@ const ( ) // Version is a manually-updated mechanism for tracking versions. -const Version = "master" +func Version() string { + if info, ok := debug.ReadBuildInfo(); ok { + if info.Main.Version != "" { + return info.Main.Version + } + } + return "(unknown)" +} // ServerVersion is the format used by gopls to report its version to the // client. This format is structured so that the client can parse it easily. type ServerVersion struct { - *BuildInfo + *debug.BuildInfo Version string } @@ -43,23 +50,18 @@ type ServerVersion struct { // built in module mode, we return a GOPATH-specific message with the // hardcoded version. func VersionInfo() *ServerVersion { - if info, ok := readBuildInfo(); ok { - return getVersion(info) - } - buildInfo := &BuildInfo{} - // go1.17 or earlier, part of s.BuildInfo are embedded fields. - buildInfo.Path = "gopls, built in GOPATH mode" - buildInfo.GoVersion = runtime.Version() - return &ServerVersion{ - Version: Version, - BuildInfo: buildInfo, + if info, ok := debug.ReadBuildInfo(); ok { + return &ServerVersion{ + Version: Version(), + BuildInfo: info, + } } -} - -func getVersion(info *BuildInfo) *ServerVersion { return &ServerVersion{ - Version: Version, - BuildInfo: info, + Version: Version(), + BuildInfo: &debug.BuildInfo{ + Path: "gopls, built in GOPATH mode", + GoVersion: runtime.Version(), + }, } } @@ -125,7 +127,7 @@ func section(w io.Writer, mode PrintMode, title string, body func()) { } func printBuildInfo(w io.Writer, info *ServerVersion, verbose bool, mode PrintMode) { - fmt.Fprintf(w, "%v %v\n", info.Path, Version) + fmt.Fprintf(w, "%v %v\n", info.Path, Version()) printModuleInfo(w, info.Main, mode) if !verbose { return diff --git a/gopls/internal/lsp/debug/info_test.go b/gopls/internal/lsp/debug/info_test.go index 5a536284193..3bc9290c157 100644 --- a/gopls/internal/lsp/debug/info_test.go +++ b/gopls/internal/lsp/debug/info_test.go @@ -27,7 +27,7 @@ func TestPrintVersionInfoJSON(t *testing.T) { if g, w := got.GoVersion, runtime.Version(); g != w { t.Errorf("go version = %v, want %v", g, w) } - if g, w := got.Version, Version; g != w { + if g, w := got.Version, Version(); g != w { t.Errorf("gopls version = %v, want %v", g, w) } // Other fields of BuildInfo may not be available during test. @@ -41,7 +41,8 @@ func TestPrintVersionInfoPlainText(t *testing.T) { res := buf.Bytes() // Other fields of BuildInfo may not be available during test. - if !bytes.Contains(res, []byte(Version)) || !bytes.Contains(res, []byte(runtime.Version())) { - t.Errorf("plaintext output = %q,\nwant (version: %v, go: %v)", res, Version, runtime.Version()) + wantGoplsVersion, wantGoVersion := Version(), runtime.Version() + if !bytes.Contains(res, []byte(wantGoplsVersion)) || !bytes.Contains(res, []byte(wantGoVersion)) { + t.Errorf("plaintext output = %q,\nwant (version: %v, go: %v)", res, wantGoplsVersion, wantGoVersion) } } diff --git a/gopls/internal/lsp/definition.go b/gopls/internal/lsp/definition.go index 89cf86efc05..fb691ef9d16 100644 --- a/gopls/internal/lsp/definition.go +++ b/gopls/internal/lsp/definition.go @@ -26,7 +26,7 @@ func (s *Server) definition(ctx context.Context, params *protocol.DefinitionPara if !ok { return nil, err } - switch kind := snapshot.View().FileKind(fh); kind { + switch kind := snapshot.FileKind(fh); kind { case source.Tmpl: return template.Definition(snapshot, fh, params.Position) case source.Go: @@ -51,7 +51,7 @@ func (s *Server) typeDefinition(ctx context.Context, params *protocol.TypeDefini if !ok { return nil, err } - switch kind := snapshot.View().FileKind(fh); kind { + switch kind := snapshot.FileKind(fh); kind { case source.Go: return source.TypeDefinition(ctx, snapshot, fh, params.Position) default: diff --git a/gopls/internal/lsp/diagnostics.go b/gopls/internal/lsp/diagnostics.go index 69c9aeb3da7..2ae50586416 100644 --- a/gopls/internal/lsp/diagnostics.go +++ b/gopls/internal/lsp/diagnostics.go @@ -161,7 +161,7 @@ func (s *Server) diagnoseSnapshots(snapshots map[source.Snapshot][]span.URI, onD diagnosticWG.Add(1) go func(snapshot source.Snapshot, uris []span.URI) { defer diagnosticWG.Done() - s.diagnoseSnapshot(snapshot, uris, onDisk, snapshot.View().Options().DiagnosticsDelay) + s.diagnoseSnapshot(snapshot, uris, onDisk, snapshot.Options().DiagnosticsDelay) }(snapshot, uris) } diagnosticWG.Wait() @@ -188,9 +188,27 @@ func (s *Server) diagnoseSnapshot(snapshot source.Snapshot, changedURIs []span.U // file modifications. // // The second phase runs after the delay, and does everything. + // + // We wait a brief delay before the first phase, to allow higher priority + // work such as autocompletion to acquire the type checking mutex (though + // typically both diagnosing changed files and performing autocompletion + // will be doing the same work: recomputing active packages). + const minDelay = 20 * time.Millisecond + select { + case <-time.After(minDelay): + case <-ctx.Done(): + return + } + s.diagnoseChangedFiles(ctx, snapshot, changedURIs, onDisk) s.publishDiagnostics(ctx, false, snapshot) + if delay < minDelay { + delay = 0 + } else { + delay -= minDelay + } + select { case <-time.After(delay): case <-ctx.Done(): diff --git a/gopls/internal/lsp/filecache/filecache.go b/gopls/internal/lsp/filecache/filecache.go index 7d4b8b1b424..6877780c29c 100644 --- a/gopls/internal/lsp/filecache/filecache.go +++ b/gopls/internal/lsp/filecache/filecache.go @@ -60,10 +60,11 @@ type memKey struct { key [32]byte } -// Get retrieves from the cache and returns a newly allocated -// copy of the value most recently supplied to Set(kind, key), -// possibly by another process. +// Get retrieves from the cache and returns the value most recently +// supplied to Set(kind, key), possibly by another process. // Get returns ErrNotFound if the value was not found. +// +// Callers should not modify the returned array. func Get(kind string, key [32]byte) ([]byte, error) { // First consult the read-through memory cache. // Note that memory cache hits do not update the times diff --git a/gopls/internal/lsp/folding_range.go b/gopls/internal/lsp/folding_range.go index ecbe93f1d8d..e3b4987d391 100644 --- a/gopls/internal/lsp/folding_range.go +++ b/gopls/internal/lsp/folding_range.go @@ -23,7 +23,7 @@ func (s *Server) foldingRange(ctx context.Context, params *protocol.FoldingRange return nil, err } - ranges, err := source.FoldingRange(ctx, snapshot, fh, snapshot.View().Options().LineFoldingOnly) + ranges, err := source.FoldingRange(ctx, snapshot, fh, snapshot.Options().LineFoldingOnly) if err != nil { return nil, err } diff --git a/gopls/internal/lsp/format.go b/gopls/internal/lsp/format.go index 47659ba94a5..a6197a68e59 100644 --- a/gopls/internal/lsp/format.go +++ b/gopls/internal/lsp/format.go @@ -24,7 +24,7 @@ func (s *Server) formatting(ctx context.Context, params *protocol.DocumentFormat if !ok { return nil, err } - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Mod: return mod.Format(ctx, snapshot, fh) case source.Go: diff --git a/gopls/internal/lsp/frob/frob.go b/gopls/internal/lsp/frob/frob.go index 5582ebee0df..7d037328424 100644 --- a/gopls/internal/lsp/frob/frob.go +++ b/gopls/internal/lsp/frob/frob.go @@ -3,11 +3,18 @@ // license that can be found in the LICENSE file. // Package frob is a fast restricted object encoder/decoder in the -// spirit of gob. Restrictions include: +// spirit of encoding/gob. // -// - Interface values are not supported. This avoids the need for +// As with gob, types that recursively contain functions, channels, +// and unsafe.Pointers cannot be encoded, but frob has these +// additional restrictions: +// +// - Interface values are not supported; this avoids the need for // the encoding to describe types. // +// - Types that recursively contain private struct fields are not +// permitted. +// // - The encoding is unspecified and subject to change, so the encoder // and decoder must exactly agree on their implementation and on the // definitions of the target types. @@ -17,8 +24,6 @@ // // - There is no error handling. All errors are reported by panicking. // -// - Types that (recursively) contain private struct fields are not permitted. -// // - Values are serialized as trees, not graphs, so shared subgraphs // are encoded repeatedly. // @@ -33,38 +38,20 @@ import ( "sync" ) -// Use CodecFor117(new(T)) to create a codec for values of type T. -// Then call Encode(T) and Decode(data, *T). -// This is a placeholder for the forthcoming generic API -- see below. -// CodecFor117 panics if type T is unsuitable. -func CodecFor117(x any) Codec { +// A Codec[T] is an immutable encoder and decoder for values of type T. +type Codec[T any] struct{ frob *frob } + +// CodecFor[T] returns a codec for values of type T. +// It panics if type T is unsuitable. +func CodecFor[T any]() Codec[T] { frobsMu.Lock() defer frobsMu.Unlock() - return Codec{frobFor(reflect.TypeOf(x).Elem())} + return Codec[T]{frobFor(reflect.TypeOf((*T)(nil)).Elem())} } -type any = interface{} - -// A Codec is an immutable encoder and decoder for values of a particular type. -type Codec struct{ *frob } - -// TODO(adonovan): after go1.18, enable this generic interface. -/* - -// CodecFor[T] returns a codec for values of type T. -// -// For panics if the type recursively contains members of unsupported -// types: functions, channels, interfaces, unsafe.Pointer. -func CodecFor[T any]() Codec[T] { return For117((*T)(nil)) } - -// A Codec[T] is an immutable encoder and decoder for values of type T. -type Codec[T any] struct{ frob *frob } - func (codec Codec[T]) Encode(v T) []byte { return codec.frob.Encode(v) } func (codec Codec[T]) Decode(data []byte, ptr *T) { codec.frob.Decode(data, ptr) } -*/ - var ( frobsMu sync.Mutex frobs = make(map[reflect.Type]*frob) @@ -134,12 +121,15 @@ func (fr *frob) addElem(t reflect.Type) { fr.elems = append(fr.elems, frobFor(t)) } +const magic = "frob" + func (fr *frob) Encode(v any) []byte { rv := reflect.ValueOf(v) if rv.Type() != fr.t { panic(fmt.Sprintf("got %v, want %v", rv.Type(), fr.t)) } w := &writer{} + w.bytes([]byte(magic)) fr.encode(w, rv) if uint64(len(w.data))>>32 != 0 { panic("too large") // includes all cases where len doesn't fit in 32 bits @@ -255,6 +245,9 @@ func (fr *frob) Decode(data []byte, ptr any) { panic(fmt.Sprintf("got %v, want %v", rv.Type(), fr.t)) } rd := &reader{data} + if string(rd.bytes(4)) != magic { + panic("not a frob-encoded message") + } fr.decode(rd, rv) if len(rd.data) > 0 { panic("surplus bytes") @@ -337,7 +330,7 @@ func (fr *frob) decode(in *reader, addr reflect.Value) { kzero := reflect.Zero(kfrob.t) vzero := reflect.Zero(vfrob.t) for i := 0; i < len; i++ { - // TODO(adonovan): after go1.18, use SetZero. + // TODO(adonovan): use SetZero from go1.20. // k.SetZero() // v.SetZero() k.Set(kzero) @@ -414,7 +407,7 @@ func (w *writer) uint32(v uint32) { w.data = appendUint32(w.data, v) } func (w *writer) uint64(v uint64) { w.data = appendUint64(w.data, v) } func (w *writer) bytes(v []byte) { w.data = append(w.data, v...) } -// TODO(adonovan): delete these as in go1.18 they are methods on LittleEndian: +// TODO(adonovan): delete these as in go1.19 they are methods on LittleEndian: func appendUint16(b []byte, v uint16) []byte { return append(b, diff --git a/gopls/internal/lsp/frob/frob_test.go b/gopls/internal/lsp/frob/frob_test.go index d2a9f2a5bc7..6a0f6e729db 100644 --- a/gopls/internal/lsp/frob/frob_test.go +++ b/gopls/internal/lsp/frob/frob_test.go @@ -18,8 +18,10 @@ func TestBasics(t *testing.T) { B [2]int C *Basics D map[string]int + E []byte + F []string } - codec := frob.CodecFor117(new(Basics)) + codec := frob.CodecFor[Basics]() s1, s2 := "hello", "world" x := Basics{ @@ -29,6 +31,8 @@ func TestBasics(t *testing.T) { B: [...]int{3, 4}, D: map[string]int{"one": 1}, }, + E: []byte("hello"), + F: []string{s1, s2}, } var y Basics codec.Decode(codec.Encode(x), &y) @@ -55,7 +59,7 @@ func TestInts(t *testing.T) { C64 complex64 C128 complex128 } - codec := frob.CodecFor117(new(Ints)) + codec := frob.CodecFor[Ints]() // maxima max1 := Ints{ diff --git a/gopls/internal/lsp/general.go b/gopls/internal/lsp/general.go index b57992fed5e..9c10d9377a1 100644 --- a/gopls/internal/lsp/general.go +++ b/gopls/internal/lsp/general.go @@ -317,6 +317,7 @@ func (s *Server) checkViewGoVersions() { if oldestVersion == -1 || viewVersion < oldestVersion { oldestVersion, fromBuild = viewVersion, false } + telemetry.RecordViewGoVersion(viewVersion) } if msg, mType := versionMessage(oldestVersion, fromBuild); msg != "" { diff --git a/gopls/internal/lsp/highlight.go b/gopls/internal/lsp/highlight.go index a3c898a0a77..c0c2502e5f1 100644 --- a/gopls/internal/lsp/highlight.go +++ b/gopls/internal/lsp/highlight.go @@ -24,7 +24,7 @@ func (s *Server) documentHighlight(ctx context.Context, params *protocol.Documen return nil, err } - if snapshot.View().FileKind(fh) == source.Tmpl { + if snapshot.FileKind(fh) == source.Tmpl { return template.Highlight(ctx, snapshot, fh, params.Position) } diff --git a/gopls/internal/lsp/hover.go b/gopls/internal/lsp/hover.go index 9c5bc0d308d..eef59920ae4 100644 --- a/gopls/internal/lsp/hover.go +++ b/gopls/internal/lsp/hover.go @@ -25,7 +25,7 @@ func (s *Server) hover(ctx context.Context, params *protocol.HoverParams) (*prot if !ok { return nil, err } - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Mod: return mod.Hover(ctx, snapshot, fh, params.Position) case source.Go: diff --git a/gopls/internal/lsp/inlay_hint.go b/gopls/internal/lsp/inlay_hint.go index 67a6625c0e1..39b51abcbc6 100644 --- a/gopls/internal/lsp/inlay_hint.go +++ b/gopls/internal/lsp/inlay_hint.go @@ -23,7 +23,7 @@ func (s *Server) inlayHint(ctx context.Context, params *protocol.InlayHintParams if !ok { return nil, err } - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Mod: return mod.InlayHint(ctx, snapshot, fh, params.Range) case source.Go: diff --git a/gopls/internal/lsp/link.go b/gopls/internal/lsp/link.go index 4ad745fc1f2..f04e265a08b 100644 --- a/gopls/internal/lsp/link.go +++ b/gopls/internal/lsp/link.go @@ -32,7 +32,7 @@ func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLink if !ok { return nil, err } - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Mod: links, err = modLinks(ctx, snapshot, fh) case source.Go: @@ -69,7 +69,7 @@ func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandl } // Shift the start position to the location of the // dependency within the require statement. - target := source.BuildLink(snapshot.View().Options().LinkTarget, "mod/"+req.Mod.String(), "") + target := source.BuildLink(snapshot.Options().LinkTarget, "mod/"+req.Mod.String(), "") l, err := toProtocolLink(pm.Mapper, target, start+i, start+i+len(dep)) if err != nil { return nil, err @@ -82,7 +82,7 @@ func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandl } // Get all the links that are contained in the comments of the file. - urlRegexp := snapshot.View().Options().URLRegexp + urlRegexp := snapshot.Options().URLRegexp for _, expr := range pm.File.Syntax.Stmt { comments := expr.Comment() if comments == nil { @@ -103,7 +103,6 @@ func modLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandl // goLinks returns the set of hyperlink annotations for the specified Go file. func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) ([]protocol.DocumentLink, error) { - view := snapshot.View() pgf, err := snapshot.ParseGo(ctx, fh, source.ParseFull) if err != nil { @@ -113,12 +112,12 @@ func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle var links []protocol.DocumentLink // Create links for import specs. - if view.Options().ImportShortcut.ShowLinks() { + if snapshot.Options().ImportShortcut.ShowLinks() { // If links are to pkg.go.dev, append module version suffixes. // This requires the import map from the package metadata. Ignore errors. var depsByImpPath map[source.ImportPath]source.PackageID - if strings.ToLower(view.Options().LinkTarget) == "pkg.go.dev" { + if strings.ToLower(snapshot.Options().LinkTarget) == "pkg.go.dev" { if meta, err := source.NarrowestMetadataForFile(ctx, snapshot, fh.URI()); err == nil { depsByImpPath = meta.DepsByImpPath } @@ -130,7 +129,7 @@ func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle continue // bad import } // See golang/go#36998: don't link to modules matching GOPRIVATE. - if view.IsGoPrivatePath(string(importPath)) { + if snapshot.View().IsGoPrivatePath(string(importPath)) { continue } @@ -145,7 +144,7 @@ func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle if err != nil { return nil, err } - targetURL := source.BuildLink(view.Options().LinkTarget, urlPath, "") + targetURL := source.BuildLink(snapshot.Options().LinkTarget, urlPath, "") // Account for the quotation marks in the positions. l, err := toProtocolLink(pgf.Mapper, targetURL, start+len(`"`), end-len(`"`)) if err != nil { @@ -155,7 +154,7 @@ func goLinks(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle } } - urlRegexp := snapshot.View().Options().URLRegexp + urlRegexp := snapshot.Options().URLRegexp // Gather links found in string literals. var str []*ast.BasicLit diff --git a/gopls/internal/lsp/lru/lru.go b/gopls/internal/lsp/lru/lru.go index 5750f412bb0..b75fc852d2d 100644 --- a/gopls/internal/lsp/lru/lru.go +++ b/gopls/internal/lsp/lru/lru.go @@ -11,8 +11,6 @@ import ( "sync" ) -type any = interface{} // TODO: remove once gopls only builds at go1.18+ - // A Cache is a fixed-size in-memory LRU cache. type Cache struct { capacity int diff --git a/gopls/internal/lsp/lru/lru_test.go b/gopls/internal/lsp/lru/lru_test.go index 165a64780cb..a9e6407a7c6 100644 --- a/gopls/internal/lsp/lru/lru_test.go +++ b/gopls/internal/lsp/lru/lru_test.go @@ -17,8 +17,6 @@ import ( "golang.org/x/tools/gopls/internal/lsp/lru" ) -type any = interface{} // TODO: remove once gopls only builds at go1.18+ - func TestCache(t *testing.T) { type get struct { key string diff --git a/gopls/internal/lsp/lsprpc/goenv.go b/gopls/internal/lsp/lsprpc/goenv.go index c316ea07c70..b7717844f17 100644 --- a/gopls/internal/lsp/lsprpc/goenv.go +++ b/gopls/internal/lsp/lsprpc/goenv.go @@ -10,10 +10,10 @@ import ( "fmt" "os" + "golang.org/x/tools/gopls/internal/lsp/protocol" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" jsonrpc2_v2 "golang.org/x/tools/internal/jsonrpc2_v2" - "golang.org/x/tools/gopls/internal/lsp/protocol" ) func GoEnvMiddleware() (Middleware, error) { diff --git a/gopls/internal/lsp/mod/diagnostics.go b/gopls/internal/lsp/mod/diagnostics.go index bb0346e6034..43fc0a24481 100644 --- a/gopls/internal/lsp/mod/diagnostics.go +++ b/gopls/internal/lsp/mod/diagnostics.go @@ -192,7 +192,7 @@ func ModVulnerabilityDiagnostics(ctx context.Context, snapshot source.Snapshot, diagSource := source.Govulncheck vs := snapshot.View().Vulnerabilities(fh.URI())[fh.URI()] - if vs == nil && snapshot.View().Options().Vulncheck == source.ModeVulncheckImports { + if vs == nil && snapshot.Options().Vulncheck == source.ModeVulncheckImports { vs, err = snapshot.ModVuln(ctx, fh.URI()) if err != nil { return nil, err diff --git a/gopls/internal/lsp/mod/format.go b/gopls/internal/lsp/mod/format.go index 9c3942ee06d..daa12dac9a4 100644 --- a/gopls/internal/lsp/mod/format.go +++ b/gopls/internal/lsp/mod/format.go @@ -25,6 +25,6 @@ func Format(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) return nil, err } // Calculate the edits to be made due to the change. - diffs := snapshot.View().Options().ComputeEdits(string(pm.Mapper.Content), string(formatted)) + diffs := snapshot.Options().ComputeEdits(string(pm.Mapper.Content), string(formatted)) return source.ToProtocolEdits(pm.Mapper, diffs) } diff --git a/gopls/internal/lsp/mod/hover.go b/gopls/internal/lsp/mod/hover.go index fbd3c000013..bc754dcb911 100644 --- a/gopls/internal/lsp/mod/hover.go +++ b/gopls/internal/lsp/mod/hover.go @@ -82,7 +82,7 @@ func hoverOnRequireStatement(ctx context.Context, pm *source.ParsedModule, offse // Get the vulnerability info. fromGovulncheck := true vs := snapshot.View().Vulnerabilities(fh.URI())[fh.URI()] - if vs == nil && snapshot.View().Options().Vulncheck == source.ModeVulncheckImports { + if vs == nil && snapshot.Options().Vulncheck == source.ModeVulncheckImports { var err error vs, err = snapshot.ModVuln(ctx, fh.URI()) if err != nil { @@ -109,7 +109,7 @@ func hoverOnRequireStatement(ctx context.Context, pm *source.ParsedModule, offse if err != nil { return nil, err } - options := snapshot.View().Options() + options := snapshot.Options() isPrivate := snapshot.View().IsGoPrivatePath(req.Mod.Path) header := formatHeader(req.Mod.Path, options) explanation = formatExplanation(explanation, req, options, isPrivate) @@ -140,7 +140,7 @@ func hoverOnModuleStatement(ctx context.Context, pm *source.ParsedModule, offset fromGovulncheck := true vs := snapshot.View().Vulnerabilities(fh.URI())[fh.URI()] - if vs == nil && snapshot.View().Options().Vulncheck == source.ModeVulncheckImports { + if vs == nil && snapshot.Options().Vulncheck == source.ModeVulncheckImports { vs, err = snapshot.ModVuln(ctx, fh.URI()) if err != nil { return nil, false @@ -150,7 +150,7 @@ func hoverOnModuleStatement(ctx context.Context, pm *source.ParsedModule, offset modpath := "stdlib" goVersion := snapshot.View().GoVersionString() affecting, nonaffecting := lookupVulns(vs, modpath, goVersion) - options := snapshot.View().Options() + options := snapshot.Options() vulns := formatVulnerabilities(modpath, affecting, nonaffecting, options, fromGovulncheck) return &protocol.Hover{ diff --git a/gopls/internal/lsp/mod/mod_test.go b/gopls/internal/lsp/mod/mod_test.go deleted file mode 100644 index 4ec3067e4ac..00000000000 --- a/gopls/internal/lsp/mod/mod_test.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package mod - -import ( - "io/ioutil" - "os" - "path/filepath" - "testing" - - "golang.org/x/tools/gopls/internal/lsp/cache" - "golang.org/x/tools/gopls/internal/lsp/source" - "golang.org/x/tools/gopls/internal/lsp/tests" - "golang.org/x/tools/gopls/internal/span" - "golang.org/x/tools/internal/testenv" -) - -func TestMain(m *testing.M) { - testenv.ExitIfSmallMachine() - os.Exit(m.Run()) -} - -func TestModfileRemainsUnchanged(t *testing.T) { - testenv.NeedsExec(t) - - ctx := tests.Context(t) - session := cache.NewSession(ctx, cache.New(nil), nil) - options := source.DefaultOptions().Clone() - tests.DefaultOptions(options) - options.TempModfile = true - options.Env = map[string]string{"GOPACKAGESDRIVER": "off", "GOROOT": ""} - - // Make sure to copy the test directory to a temporary directory so we do not - // modify the test code or add go.sum files when we run the tests. - folder, err := tests.CopyFolderToTempDir(filepath.Join("testdata", "unchanged")) - if err != nil { - t.Fatal(err) - } - defer os.RemoveAll(folder) - - before, err := ioutil.ReadFile(filepath.Join(folder, "go.mod")) - if err != nil { - t.Fatal(err) - } - _, _, release, err := session.NewView(ctx, "diagnostics_test", span.URIFromPath(folder), options) - if err != nil { - t.Fatal(err) - } - release() - after, err := ioutil.ReadFile(filepath.Join(folder, "go.mod")) - if err != nil { - t.Fatal(err) - } - if string(before) != string(after) { - t.Errorf("the real go.mod file was changed even when tempModfile=true") - } -} diff --git a/gopls/internal/lsp/mod/testdata/unchanged/go.mod b/gopls/internal/lsp/mod/testdata/unchanged/go.mod deleted file mode 100644 index e3d13cebe54..00000000000 --- a/gopls/internal/lsp/mod/testdata/unchanged/go.mod +++ /dev/null @@ -1 +0,0 @@ -module unchanged diff --git a/gopls/internal/lsp/mod/testdata/unchanged/main.go b/gopls/internal/lsp/mod/testdata/unchanged/main.go deleted file mode 100644 index b258445f438..00000000000 --- a/gopls/internal/lsp/mod/testdata/unchanged/main.go +++ /dev/null @@ -1,6 +0,0 @@ -// Package unchanged does something -package unchanged - -func Yo() { - println("yo") -} diff --git a/gopls/internal/lsp/protocol/tsprotocol.go b/gopls/internal/lsp/protocol/tsprotocol.go index 19cdd817773..f571be379a8 100644 --- a/gopls/internal/lsp/protocol/tsprotocol.go +++ b/gopls/internal/lsp/protocol/tsprotocol.go @@ -15,14 +15,14 @@ import "encoding/json" // A special text edit with an additional change annotation. // // @since 3.16.0. -type AnnotatedTextEdit struct { // line 9702 +type AnnotatedTextEdit struct { // The actual identifier of the change annotation AnnotationID ChangeAnnotationIdentifier `json:"annotationId"` TextEdit } // The parameters passed via an apply workspace edit request. -type ApplyWorkspaceEditParams struct { // line 6220 +type ApplyWorkspaceEditParams struct { // An optional label of the workspace edit. This label is // presented in the user interface for example on an undo // stack to undo the workspace edit. @@ -34,7 +34,7 @@ type ApplyWorkspaceEditParams struct { // line 6220 // The result returned from the apply workspace edit request. // // @since 3.17 renamed from ApplyWorkspaceEditResponse -type ApplyWorkspaceEditResult struct { // line 6243 +type ApplyWorkspaceEditResult struct { // Indicates whether the edit was applied or not. Applied bool `json:"applied"` // An optional textual description for why the edit was not applied. @@ -48,7 +48,7 @@ type ApplyWorkspaceEditResult struct { // line 6243 } // A base for all symbol information. -type BaseSymbolInformation struct { // line 9284 +type BaseSymbolInformation struct { // The name of this symbol. Name string `json:"name"` // The kind of this symbol. @@ -65,7 +65,7 @@ type BaseSymbolInformation struct { // line 9284 } // @since 3.16.0 -type CallHierarchyClientCapabilities struct { // line 12517 +type CallHierarchyClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` // return value for the corresponding server capability as well. @@ -75,7 +75,7 @@ type CallHierarchyClientCapabilities struct { // line 12517 // Represents an incoming call, e.g. a caller of a method or constructor. // // @since 3.16.0 -type CallHierarchyIncomingCall struct { // line 2852 +type CallHierarchyIncomingCall struct { // The item that makes the call. From CallHierarchyItem `json:"from"` // The ranges at which the calls appear. This is relative to the caller @@ -86,7 +86,7 @@ type CallHierarchyIncomingCall struct { // line 2852 // The parameter of a `callHierarchy/incomingCalls` request. // // @since 3.16.0 -type CallHierarchyIncomingCallsParams struct { // line 2828 +type CallHierarchyIncomingCallsParams struct { Item CallHierarchyItem `json:"item"` WorkDoneProgressParams PartialResultParams @@ -96,7 +96,7 @@ type CallHierarchyIncomingCallsParams struct { // line 2828 // of call hierarchy. // // @since 3.16.0 -type CallHierarchyItem struct { // line 2729 +type CallHierarchyItem struct { // The name of this item. Name string `json:"name"` // The kind of this item. @@ -120,14 +120,14 @@ type CallHierarchyItem struct { // line 2729 // Call hierarchy options used during static registration. // // @since 3.16.0 -type CallHierarchyOptions struct { // line 6770 +type CallHierarchyOptions struct { WorkDoneProgressOptions } // Represents an outgoing call, e.g. calling a getter from a method or a method from a constructor etc. // // @since 3.16.0 -type CallHierarchyOutgoingCall struct { // line 2902 +type CallHierarchyOutgoingCall struct { // The item that is called. To CallHierarchyItem `json:"to"` // The range at which this item is called. This is the range relative to the caller, e.g the item @@ -139,7 +139,7 @@ type CallHierarchyOutgoingCall struct { // line 2902 // The parameter of a `callHierarchy/outgoingCalls` request. // // @since 3.16.0 -type CallHierarchyOutgoingCallsParams struct { // line 2878 +type CallHierarchyOutgoingCallsParams struct { Item CallHierarchyItem `json:"item"` WorkDoneProgressParams PartialResultParams @@ -148,7 +148,7 @@ type CallHierarchyOutgoingCallsParams struct { // line 2878 // The parameter of a `textDocument/prepareCallHierarchy` request. // // @since 3.16.0 -type CallHierarchyPrepareParams struct { // line 2711 +type CallHierarchyPrepareParams struct { TextDocumentPositionParams WorkDoneProgressParams } @@ -156,12 +156,12 @@ type CallHierarchyPrepareParams struct { // line 2711 // Call hierarchy options used during static or dynamic registration. // // @since 3.16.0 -type CallHierarchyRegistrationOptions struct { // line 2806 +type CallHierarchyRegistrationOptions struct { TextDocumentRegistrationOptions CallHierarchyOptions StaticRegistrationOptions } -type CancelParams struct { // line 6415 +type CancelParams struct { // The request id to cancel. ID interface{} `json:"id"` } @@ -169,7 +169,7 @@ type CancelParams struct { // line 6415 // Additional information that describes document changes. // // @since 3.16.0 -type ChangeAnnotation struct { // line 7067 +type ChangeAnnotation struct { // A human-readable string describing the actual change. The string // is rendered prominent in the user interface. Label string `json:"label"` @@ -184,7 +184,7 @@ type ChangeAnnotation struct { // line 7067 // An identifier to refer to a change annotation stored with a workspace edit. type ChangeAnnotationIdentifier = string // (alias) line 14391 // Defines the capabilities provided by the client. -type ClientCapabilities struct { // line 10028 +type ClientCapabilities struct { // Workspace specific client capabilities. Workspace WorkspaceClientCapabilities `json:"workspace,omitempty"` // Text document specific client capabilities. @@ -207,7 +207,7 @@ type ClientCapabilities struct { // line 10028 // to refactor code. // // A CodeAction must set either `edit` and/or a `command`. If both are supplied, the `edit` is applied first, then the `command` is executed. -type CodeAction struct { // line 5577 +type CodeAction struct { // A short, human-readable, title for this code action. Title string `json:"title"` // The kind of the code action. @@ -254,7 +254,7 @@ type CodeAction struct { // line 5577 } // The Client Capabilities of a {@link CodeActionRequest}. -type CodeActionClientCapabilities struct { // line 12086 +type CodeActionClientCapabilities struct { // Whether code action supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client support code action literals of type `CodeAction` as a valid @@ -294,7 +294,7 @@ type CodeActionClientCapabilities struct { // line 12086 // Contains additional diagnostic information about the context in which // a {@link CodeActionProvider.provideCodeActions code action} is run. -type CodeActionContext struct { // line 9350 +type CodeActionContext struct { // An array of diagnostics known on the client side overlapping the range provided to the // `textDocument/codeAction` request. They are provided so that the server knows which // errors are currently presented to the user for the given range. There is no guarantee @@ -313,9 +313,10 @@ type CodeActionContext struct { // line 9350 } // A set of predefined code action kinds -type CodeActionKind string // line 13719 +type CodeActionKind string + // Provider options for a {@link CodeActionRequest}. -type CodeActionOptions struct { // line 9389 +type CodeActionOptions struct { // CodeActionKinds that this server may return. // // The list of kinds may be generic, such as `CodeActionKind.Refactor`, or the server @@ -330,7 +331,7 @@ type CodeActionOptions struct { // line 9389 } // The parameters of a {@link CodeActionRequest}. -type CodeActionParams struct { // line 5503 +type CodeActionParams struct { // The document in which the command was invoked. TextDocument TextDocumentIdentifier `json:"textDocument"` // The range for which the command was invoked. @@ -342,7 +343,7 @@ type CodeActionParams struct { // line 5503 } // Registration options for a {@link CodeActionRequest}. -type CodeActionRegistrationOptions struct { // line 5671 +type CodeActionRegistrationOptions struct { TextDocumentRegistrationOptions CodeActionOptions } @@ -350,11 +351,12 @@ type CodeActionRegistrationOptions struct { // line 5671 // The reason why code actions were requested. // // @since 3.17.0 -type CodeActionTriggerKind uint32 // line 14021 +type CodeActionTriggerKind uint32 + // Structure to capture a description for an error code. // // @since 3.16.0 -type CodeDescription struct { // line 10380 +type CodeDescription struct { // An URI to open with more information about the diagnostic error. Href URI `json:"href"` } @@ -364,7 +366,7 @@ type CodeDescription struct { // line 10380 // // A code lens is _unresolved_ when no command is associated to it. For performance // reasons the creation of a code lens and resolving should be done in two stages. -type CodeLens struct { // line 5794 +type CodeLens struct { // The range in which this code lens is valid. Should only span a single line. Range Range `json:"range"` // The command this code lens represents. @@ -376,20 +378,20 @@ type CodeLens struct { // line 5794 } // The client capabilities of a {@link CodeLensRequest}. -type CodeLensClientCapabilities struct { // line 12200 +type CodeLensClientCapabilities struct { // Whether code lens supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // Code Lens provider options of a {@link CodeLensRequest}. -type CodeLensOptions struct { // line 9445 +type CodeLensOptions struct { // Code lens has a resolve provider as well. ResolveProvider bool `json:"resolveProvider,omitempty"` WorkDoneProgressOptions } // The parameters of a {@link CodeLensRequest}. -type CodeLensParams struct { // line 5770 +type CodeLensParams struct { // The document to request code lens for. TextDocument TextDocumentIdentifier `json:"textDocument"` WorkDoneProgressParams @@ -397,13 +399,13 @@ type CodeLensParams struct { // line 5770 } // Registration options for a {@link CodeLensRequest}. -type CodeLensRegistrationOptions struct { // line 5826 +type CodeLensRegistrationOptions struct { TextDocumentRegistrationOptions CodeLensOptions } // @since 3.16.0 -type CodeLensWorkspaceClientCapabilities struct { // line 11358 +type CodeLensWorkspaceClientCapabilities struct { // Whether the client implementation supports a refresh request sent from the // server to the client. // @@ -415,7 +417,7 @@ type CodeLensWorkspaceClientCapabilities struct { // line 11358 } // Represents a color in RGBA space. -type Color struct { // line 6669 +type Color struct { // The red component of this color in the range [0-1]. Red float64 `json:"red"` // The green component of this color in the range [0-1]. @@ -427,13 +429,13 @@ type Color struct { // line 6669 } // Represents a color range from a document. -type ColorInformation struct { // line 2312 +type ColorInformation struct { // The range in the document where this color appears. Range Range `json:"range"` // The actual color value for this color range. Color Color `json:"color"` } -type ColorPresentation struct { // line 2394 +type ColorPresentation struct { // The label of this color presentation. It will be shown on the color // picker header. By default this is also the text that is inserted when selecting // this color presentation. @@ -448,7 +450,7 @@ type ColorPresentation struct { // line 2394 } // Parameters for a {@link ColorPresentationRequest}. -type ColorPresentationParams struct { // line 2354 +type ColorPresentationParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The color to request presentations for. @@ -463,7 +465,7 @@ type ColorPresentationParams struct { // line 2354 // will be used to represent a command in the UI and, optionally, // an array of arguments which will be passed to the command handler // function when invoked. -type Command struct { // line 5543 +type Command struct { // Title of the command, like `save`. Title string `json:"title"` // The identifier of the actual command handler. @@ -474,7 +476,7 @@ type Command struct { // line 5543 } // Completion client capabilities -type CompletionClientCapabilities struct { // line 11533 +type CompletionClientCapabilities struct { // Whether completion supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client supports the following `CompletionItem` specific @@ -498,7 +500,7 @@ type CompletionClientCapabilities struct { // line 11533 } // Contains additional information about the context in which a completion request is triggered. -type CompletionContext struct { // line 8946 +type CompletionContext struct { // How the completion was triggered. TriggerKind CompletionTriggerKind `json:"triggerKind"` // The trigger character (a single character) that has trigger code complete. @@ -508,7 +510,7 @@ type CompletionContext struct { // line 8946 // A completion item represents a text snippet that is // proposed to complete text that is being typed. -type CompletionItem struct { // line 4723 +type CompletionItem struct { // The label of this completion item. // // The label property is also by default the text that @@ -629,11 +631,12 @@ type CompletionItem struct { // line 4723 } // The kind of a completion entry. -type CompletionItemKind uint32 // line 13527 +type CompletionItemKind uint32 + // Additional details for a completion item label. // // @since 3.17.0 -type CompletionItemLabelDetails struct { // line 8969 +type CompletionItemLabelDetails struct { // An optional string which is rendered less prominently directly after {@link CompletionItem.label label}, // without any spacing. Should be used for function signatures and type annotations. Detail string `json:"detail,omitempty"` @@ -646,10 +649,11 @@ type CompletionItemLabelDetails struct { // line 8969 // item. // // @since 3.15.0 -type CompletionItemTag uint32 // line 13637 +type CompletionItemTag uint32 + // Represents a collection of {@link CompletionItem completion items} to be presented // in the editor. -type CompletionList struct { // line 4932 +type CompletionList struct { // This list it not complete. Further typing results in recomputing this list. // // Recomputed lists have all their items replaced (not appended) in the @@ -674,7 +678,7 @@ type CompletionList struct { // line 4932 } // Completion options. -type CompletionOptions struct { // line 9025 +type CompletionOptions struct { // Most tools trigger completion request automatically without explicitly requesting // it using a keyboard shortcut (e.g. Ctrl+Space). Typically they do so when the user // starts to type an identifier. For example if the user types `c` in a JavaScript file @@ -705,7 +709,7 @@ type CompletionOptions struct { // line 9025 } // Completion parameters -type CompletionParams struct { // line 4692 +type CompletionParams struct { // The completion context. This is only available it the client specifies // to send this using the client capability `textDocument.completion.contextSupport === true` Context CompletionContext `json:"context,omitempty"` @@ -715,14 +719,14 @@ type CompletionParams struct { // line 4692 } // Registration options for a {@link CompletionRequest}. -type CompletionRegistrationOptions struct { // line 5049 +type CompletionRegistrationOptions struct { TextDocumentRegistrationOptions CompletionOptions } // How a completion was triggered -type CompletionTriggerKind uint32 // line 13970 -type ConfigurationItem struct { // line 6632 +type CompletionTriggerKind uint32 +type ConfigurationItem struct { // The scope to get the configuration section for. ScopeURI string `json:"scopeUri,omitempty"` // The configuration section asked for. @@ -730,12 +734,12 @@ type ConfigurationItem struct { // line 6632 } // The parameters of a configuration request. -type ConfigurationParams struct { // line 2272 +type ConfigurationParams struct { Items []ConfigurationItem `json:"items"` } // Create file operation. -type CreateFile struct { // line 6948 +type CreateFile struct { // A create Kind string `json:"kind"` // The resource to create. @@ -746,7 +750,7 @@ type CreateFile struct { // line 6948 } // Options to create a file. -type CreateFileOptions struct { // line 9747 +type CreateFileOptions struct { // Overwrite existing file. Overwrite wins over `ignoreIfExists` Overwrite bool `json:"overwrite,omitempty"` // Ignore if exists. @@ -757,7 +761,7 @@ type CreateFileOptions struct { // line 9747 // files. // // @since 3.16.0 -type CreateFilesParams struct { // line 3248 +type CreateFilesParams struct { // An array of all files/folders created in this operation. Files []FileCreate `json:"files"` } @@ -765,7 +769,7 @@ type CreateFilesParams struct { // line 3248 // The declaration of a symbol representation as one or many {@link Location locations}. type Declaration = []Location // (alias) line 14248 // @since 3.14.0 -type DeclarationClientCapabilities struct { // line 11874 +type DeclarationClientCapabilities struct { // Whether declaration supports dynamic registration. If this is set to `true` // the client supports the new `DeclarationRegistrationOptions` return value // for the corresponding server capability as well. @@ -782,15 +786,15 @@ type DeclarationClientCapabilities struct { // line 11874 // Servers should prefer returning `DeclarationLink` over `Declaration` if supported // by the client. type DeclarationLink = LocationLink // (alias) line 14268 -type DeclarationOptions struct { // line 6727 +type DeclarationOptions struct { WorkDoneProgressOptions } -type DeclarationParams struct { // line 2567 +type DeclarationParams struct { TextDocumentPositionParams WorkDoneProgressParams PartialResultParams } -type DeclarationRegistrationOptions struct { // line 2587 +type DeclarationRegistrationOptions struct { DeclarationOptions TextDocumentRegistrationOptions StaticRegistrationOptions @@ -804,7 +808,7 @@ type DeclarationRegistrationOptions struct { // line 2587 // by the client. type Definition = Or_Definition // (alias) line 14166 // Client Capabilities for a {@link DefinitionRequest}. -type DefinitionClientCapabilities struct { // line 11899 +type DefinitionClientCapabilities struct { // Whether definition supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client supports additional metadata in the form of definition links. @@ -819,25 +823,25 @@ type DefinitionClientCapabilities struct { // line 11899 // the defining symbol type DefinitionLink = LocationLink // (alias) line 14186 // Server Capabilities for a {@link DefinitionRequest}. -type DefinitionOptions struct { // line 9237 +type DefinitionOptions struct { WorkDoneProgressOptions } // Parameters for a {@link DefinitionRequest}. -type DefinitionParams struct { // line 5213 +type DefinitionParams struct { TextDocumentPositionParams WorkDoneProgressParams PartialResultParams } // Registration options for a {@link DefinitionRequest}. -type DefinitionRegistrationOptions struct { // line 5234 +type DefinitionRegistrationOptions struct { TextDocumentRegistrationOptions DefinitionOptions } // Delete file operation -type DeleteFile struct { // line 7030 +type DeleteFile struct { // A delete Kind string `json:"kind"` // The file to delete. @@ -848,7 +852,7 @@ type DeleteFile struct { // line 7030 } // Delete file options -type DeleteFileOptions struct { // line 9795 +type DeleteFileOptions struct { // Delete the content recursively if a folder is denoted. Recursive bool `json:"recursive,omitempty"` // Ignore the operation if the file doesn't exist. @@ -859,14 +863,14 @@ type DeleteFileOptions struct { // line 9795 // files. // // @since 3.16.0 -type DeleteFilesParams struct { // line 3373 +type DeleteFilesParams struct { // An array of all files/folders deleted in this operation. Files []FileDelete `json:"files"` } // Represents a diagnostic, such as a compiler error or warning. Diagnostic objects // are only valid in the scope of a resource. -type Diagnostic struct { // line 8843 +type Diagnostic struct { // The range at which the message applies Range Range `json:"range"` // The diagnostic's severity. Can be omitted. If omitted it is up to the @@ -902,7 +906,7 @@ type Diagnostic struct { // line 8843 // Client capabilities specific to diagnostic pull requests. // // @since 3.17.0 -type DiagnosticClientCapabilities struct { // line 12784 +type DiagnosticClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` // return value for the corresponding server capability as well. @@ -914,7 +918,7 @@ type DiagnosticClientCapabilities struct { // line 12784 // Diagnostic options. // // @since 3.17.0 -type DiagnosticOptions struct { // line 7529 +type DiagnosticOptions struct { // An optional identifier under which the diagnostics are // managed by the client. Identifier string `json:"identifier,omitempty"` @@ -931,7 +935,7 @@ type DiagnosticOptions struct { // line 7529 // Diagnostic registration options. // // @since 3.17.0 -type DiagnosticRegistrationOptions struct { // line 3928 +type DiagnosticRegistrationOptions struct { TextDocumentRegistrationOptions DiagnosticOptions StaticRegistrationOptions @@ -940,7 +944,7 @@ type DiagnosticRegistrationOptions struct { // line 3928 // Represents a related message and source code location for a diagnostic. This should be // used to point to code locations that cause or related to a diagnostics, e.g when duplicating // a symbol in a scope. -type DiagnosticRelatedInformation struct { // line 10395 +type DiagnosticRelatedInformation struct { // The location of this related diagnostic information. Location Location `json:"location"` // The message of this related diagnostic information. @@ -950,20 +954,22 @@ type DiagnosticRelatedInformation struct { // line 10395 // Cancellation data returned from a diagnostic request. // // @since 3.17.0 -type DiagnosticServerCancellationData struct { // line 3914 +type DiagnosticServerCancellationData struct { RetriggerRequest bool `json:"retriggerRequest"` } // The diagnostic's severity. -type DiagnosticSeverity uint32 // line 13919 +type DiagnosticSeverity uint32 + // The diagnostic tags. // // @since 3.15.0 -type DiagnosticTag uint32 // line 13949 +type DiagnosticTag uint32 + // Workspace client capabilities specific to diagnostic pull requests. // // @since 3.17.0 -type DiagnosticWorkspaceClientCapabilities struct { // line 11476 +type DiagnosticWorkspaceClientCapabilities struct { // Whether the client implementation supports a refresh request sent from // the server to the client. // @@ -973,24 +979,24 @@ type DiagnosticWorkspaceClientCapabilities struct { // line 11476 // change that requires such a calculation. RefreshSupport bool `json:"refreshSupport,omitempty"` } -type DidChangeConfigurationClientCapabilities struct { // line 11202 +type DidChangeConfigurationClientCapabilities struct { // Did change configuration notification supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // The parameters of a change configuration notification. -type DidChangeConfigurationParams struct { // line 4339 +type DidChangeConfigurationParams struct { // The actual changed settings Settings interface{} `json:"settings"` } -type DidChangeConfigurationRegistrationOptions struct { // line 4353 +type DidChangeConfigurationRegistrationOptions struct { Section *OrPSection_workspace_didChangeConfiguration `json:"section,omitempty"` } // The params sent in a change notebook document notification. // // @since 3.17.0 -type DidChangeNotebookDocumentParams struct { // line 4047 +type DidChangeNotebookDocumentParams struct { // The notebook document that did change. The version number points // to the version after all provided changes have been applied. If // only the text document content of a cell changes the notebook version @@ -1014,7 +1020,7 @@ type DidChangeNotebookDocumentParams struct { // line 4047 } // The change text document notification's parameters. -type DidChangeTextDocumentParams struct { // line 4482 +type DidChangeTextDocumentParams struct { // The document that did change. The version number points // to the version after all provided content changes have // been applied. @@ -1033,7 +1039,7 @@ type DidChangeTextDocumentParams struct { // line 4482 // you receive them. ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"` } -type DidChangeWatchedFilesClientCapabilities struct { // line 11216 +type DidChangeWatchedFilesClientCapabilities struct { // Did change watched files notification supports dynamic registration. Please note // that the current protocol doesn't support static configuration for file changes // from the server side. @@ -1046,19 +1052,19 @@ type DidChangeWatchedFilesClientCapabilities struct { // line 11216 } // The watched files change notification's parameters. -type DidChangeWatchedFilesParams struct { // line 4623 +type DidChangeWatchedFilesParams struct { // The actual file events. Changes []FileEvent `json:"changes"` } // Describe options to be used when registered for text document change events. -type DidChangeWatchedFilesRegistrationOptions struct { // line 4640 +type DidChangeWatchedFilesRegistrationOptions struct { // The watchers to register. Watchers []FileSystemWatcher `json:"watchers"` } // The parameters of a `workspace/didChangeWorkspaceFolders` notification. -type DidChangeWorkspaceFoldersParams struct { // line 2258 +type DidChangeWorkspaceFoldersParams struct { // The actual workspace folder change event. Event WorkspaceFoldersChangeEvent `json:"event"` } @@ -1066,7 +1072,7 @@ type DidChangeWorkspaceFoldersParams struct { // line 2258 // The params sent in a close notebook document notification. // // @since 3.17.0 -type DidCloseNotebookDocumentParams struct { // line 4085 +type DidCloseNotebookDocumentParams struct { // The notebook document that got closed. NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"` // The text documents that represent the content @@ -1075,7 +1081,7 @@ type DidCloseNotebookDocumentParams struct { // line 4085 } // The parameters sent in a close text document notification -type DidCloseTextDocumentParams struct { // line 4527 +type DidCloseTextDocumentParams struct { // The document that was closed. TextDocument TextDocumentIdentifier `json:"textDocument"` } @@ -1083,7 +1089,7 @@ type DidCloseTextDocumentParams struct { // line 4527 // The params sent in an open notebook document notification. // // @since 3.17.0 -type DidOpenNotebookDocumentParams struct { // line 4021 +type DidOpenNotebookDocumentParams struct { // The notebook document that got opened. NotebookDocument NotebookDocument `json:"notebookDocument"` // The text documents that represent the content @@ -1092,7 +1098,7 @@ type DidOpenNotebookDocumentParams struct { // line 4021 } // The parameters sent in an open text document notification -type DidOpenTextDocumentParams struct { // line 4468 +type DidOpenTextDocumentParams struct { // The document that was opened. TextDocument TextDocumentItem `json:"textDocument"` } @@ -1100,37 +1106,37 @@ type DidOpenTextDocumentParams struct { // line 4468 // The params sent in a save notebook document notification. // // @since 3.17.0 -type DidSaveNotebookDocumentParams struct { // line 4070 +type DidSaveNotebookDocumentParams struct { // The notebook document that got saved. NotebookDocument NotebookDocumentIdentifier `json:"notebookDocument"` } // The parameters sent in a save text document notification -type DidSaveTextDocumentParams struct { // line 4541 +type DidSaveTextDocumentParams struct { // The document that was saved. TextDocument TextDocumentIdentifier `json:"textDocument"` // Optional the content when saved. Depends on the includeText value // when the save notification was requested. Text *string `json:"text,omitempty"` } -type DocumentColorClientCapabilities struct { // line 12240 +type DocumentColorClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `DocumentColorRegistrationOptions` return value // for the corresponding server capability as well. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } -type DocumentColorOptions struct { // line 6707 +type DocumentColorOptions struct { WorkDoneProgressOptions } // Parameters for a {@link DocumentColorRequest}. -type DocumentColorParams struct { // line 2288 +type DocumentColorParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` WorkDoneProgressParams PartialResultParams } -type DocumentColorRegistrationOptions struct { // line 2334 +type DocumentColorRegistrationOptions struct { TextDocumentRegistrationOptions DocumentColorOptions StaticRegistrationOptions @@ -1139,7 +1145,7 @@ type DocumentColorRegistrationOptions struct { // line 2334 // Parameters of the document diagnostic request. // // @since 3.17.0 -type DocumentDiagnosticParams struct { // line 3841 +type DocumentDiagnosticParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The additional identifier provided during registration. @@ -1153,11 +1159,12 @@ type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) line 1390 // The document diagnostic report kinds. // // @since 3.17.0 -type DocumentDiagnosticReportKind string // line 13115 +type DocumentDiagnosticReportKind string + // A partial result for a document diagnostic report. // // @since 3.17.0 -type DocumentDiagnosticReportPartialResult struct { // line 3884 +type DocumentDiagnosticReportPartialResult struct { RelatedDocuments map[DocumentURI]interface{} `json:"relatedDocuments"` } @@ -1167,18 +1174,18 @@ type DocumentDiagnosticReportPartialResult struct { // line 3884 // @since 3.17.0 - proposed support for NotebookCellTextDocumentFilter. type DocumentFilter = Or_DocumentFilter // (alias) line 14508 // Client capabilities of a {@link DocumentFormattingRequest}. -type DocumentFormattingClientCapabilities struct { // line 12254 +type DocumentFormattingClientCapabilities struct { // Whether formatting supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // Provider options for a {@link DocumentFormattingRequest}. -type DocumentFormattingOptions struct { // line 9539 +type DocumentFormattingOptions struct { WorkDoneProgressOptions } // The parameters of a {@link DocumentFormattingRequest}. -type DocumentFormattingParams struct { // line 5922 +type DocumentFormattingParams struct { // The document to format. TextDocument TextDocumentIdentifier `json:"textDocument"` // The format options. @@ -1187,7 +1194,7 @@ type DocumentFormattingParams struct { // line 5922 } // Registration options for a {@link DocumentFormattingRequest}. -type DocumentFormattingRegistrationOptions struct { // line 5950 +type DocumentFormattingRegistrationOptions struct { TextDocumentRegistrationOptions DocumentFormattingOptions } @@ -1195,7 +1202,7 @@ type DocumentFormattingRegistrationOptions struct { // line 5950 // A document highlight is a range inside a text document which deserves // special attention. Usually a document highlight is visualized by changing // the background color of its range. -type DocumentHighlight struct { // line 5314 +type DocumentHighlight struct { // The range this highlight applies to. Range Range `json:"range"` // The highlight kind, default is {@link DocumentHighlightKind.Text text}. @@ -1203,34 +1210,35 @@ type DocumentHighlight struct { // line 5314 } // Client Capabilities for a {@link DocumentHighlightRequest}. -type DocumentHighlightClientCapabilities struct { // line 11989 +type DocumentHighlightClientCapabilities struct { // Whether document highlight supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // A document highlight kind. -type DocumentHighlightKind uint32 // line 13694 +type DocumentHighlightKind uint32 + // Provider options for a {@link DocumentHighlightRequest}. -type DocumentHighlightOptions struct { // line 9273 +type DocumentHighlightOptions struct { WorkDoneProgressOptions } // Parameters for a {@link DocumentHighlightRequest}. -type DocumentHighlightParams struct { // line 5293 +type DocumentHighlightParams struct { TextDocumentPositionParams WorkDoneProgressParams PartialResultParams } // Registration options for a {@link DocumentHighlightRequest}. -type DocumentHighlightRegistrationOptions struct { // line 5337 +type DocumentHighlightRegistrationOptions struct { TextDocumentRegistrationOptions DocumentHighlightOptions } // A document link is a range in a text document that links to an internal or external resource, like another // text document or a web site. -type DocumentLink struct { // line 5865 +type DocumentLink struct { // The range this link applies to. Range Range `json:"range"` // The uri this link points to. If missing a resolve request is sent later. @@ -1249,7 +1257,7 @@ type DocumentLink struct { // line 5865 } // The client capabilities of a {@link DocumentLinkRequest}. -type DocumentLinkClientCapabilities struct { // line 12215 +type DocumentLinkClientCapabilities struct { // Whether document link supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Whether the client supports the `tooltip` property on `DocumentLink`. @@ -1259,14 +1267,14 @@ type DocumentLinkClientCapabilities struct { // line 12215 } // Provider options for a {@link DocumentLinkRequest}. -type DocumentLinkOptions struct { // line 9466 +type DocumentLinkOptions struct { // Document links have a resolve provider as well. ResolveProvider bool `json:"resolveProvider,omitempty"` WorkDoneProgressOptions } // The parameters of a {@link DocumentLinkRequest}. -type DocumentLinkParams struct { // line 5841 +type DocumentLinkParams struct { // The document to provide document links for. TextDocument TextDocumentIdentifier `json:"textDocument"` WorkDoneProgressParams @@ -1274,19 +1282,19 @@ type DocumentLinkParams struct { // line 5841 } // Registration options for a {@link DocumentLinkRequest}. -type DocumentLinkRegistrationOptions struct { // line 5907 +type DocumentLinkRegistrationOptions struct { TextDocumentRegistrationOptions DocumentLinkOptions } // Client capabilities of a {@link DocumentOnTypeFormattingRequest}. -type DocumentOnTypeFormattingClientCapabilities struct { // line 12295 +type DocumentOnTypeFormattingClientCapabilities struct { // Whether on type formatting supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // Provider options for a {@link DocumentOnTypeFormattingRequest}. -type DocumentOnTypeFormattingOptions struct { // line 9573 +type DocumentOnTypeFormattingOptions struct { // A character on which formatting should be triggered, like `{`. FirstTriggerCharacter string `json:"firstTriggerCharacter"` // More trigger characters. @@ -1294,7 +1302,7 @@ type DocumentOnTypeFormattingOptions struct { // line 9573 } // The parameters of a {@link DocumentOnTypeFormattingRequest}. -type DocumentOnTypeFormattingParams struct { // line 6057 +type DocumentOnTypeFormattingParams struct { // The document to format. TextDocument TextDocumentIdentifier `json:"textDocument"` // The position around which the on type formatting should happen. @@ -1311,13 +1319,13 @@ type DocumentOnTypeFormattingParams struct { // line 6057 } // Registration options for a {@link DocumentOnTypeFormattingRequest}. -type DocumentOnTypeFormattingRegistrationOptions struct { // line 6095 +type DocumentOnTypeFormattingRegistrationOptions struct { TextDocumentRegistrationOptions DocumentOnTypeFormattingOptions } // Client capabilities of a {@link DocumentRangeFormattingRequest}. -type DocumentRangeFormattingClientCapabilities struct { // line 12269 +type DocumentRangeFormattingClientCapabilities struct { // Whether range formatting supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Whether the client supports formatting multiple ranges at once. @@ -1328,7 +1336,7 @@ type DocumentRangeFormattingClientCapabilities struct { // line 12269 } // Provider options for a {@link DocumentRangeFormattingRequest}. -type DocumentRangeFormattingOptions struct { // line 9550 +type DocumentRangeFormattingOptions struct { // Whether the server supports formatting multiple ranges at once. // // @since 3.18.0 @@ -1338,7 +1346,7 @@ type DocumentRangeFormattingOptions struct { // line 9550 } // The parameters of a {@link DocumentRangeFormattingRequest}. -type DocumentRangeFormattingParams struct { // line 5965 +type DocumentRangeFormattingParams struct { // The document to format. TextDocument TextDocumentIdentifier `json:"textDocument"` // The range to format @@ -1349,7 +1357,7 @@ type DocumentRangeFormattingParams struct { // line 5965 } // Registration options for a {@link DocumentRangeFormattingRequest}. -type DocumentRangeFormattingRegistrationOptions struct { // line 6001 +type DocumentRangeFormattingRegistrationOptions struct { TextDocumentRegistrationOptions DocumentRangeFormattingOptions } @@ -1358,7 +1366,7 @@ type DocumentRangeFormattingRegistrationOptions struct { // line 6001 // // @since 3.18.0 // @proposed -type DocumentRangesFormattingParams struct { // line 6016 +type DocumentRangesFormattingParams struct { // The document to format. TextDocument TextDocumentIdentifier `json:"textDocument"` // The ranges to format @@ -1378,7 +1386,7 @@ type DocumentSelector = []DocumentFilter // (alias) line 14363 // that appear in a document. Document symbols can be hierarchical and they // have two ranges: one that encloses its definition and one that points to // its most interesting range, e.g. the range of an identifier. -type DocumentSymbol struct { // line 5406 +type DocumentSymbol struct { // The name of this symbol. Will be displayed in the user interface and therefore must not be // an empty string or a string only consisting of white spaces. Name string `json:"name"` @@ -1406,7 +1414,7 @@ type DocumentSymbol struct { // line 5406 } // Client Capabilities for a {@link DocumentSymbolRequest}. -type DocumentSymbolClientCapabilities struct { // line 12004 +type DocumentSymbolClientCapabilities struct { // Whether document symbol supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Specific capabilities for the `SymbolKind` in the @@ -1428,7 +1436,7 @@ type DocumentSymbolClientCapabilities struct { // line 12004 } // Provider options for a {@link DocumentSymbolRequest}. -type DocumentSymbolOptions struct { // line 9328 +type DocumentSymbolOptions struct { // A human-readable string that is shown when multiple outlines trees // are shown for the same document. // @@ -1438,7 +1446,7 @@ type DocumentSymbolOptions struct { // line 9328 } // Parameters for a {@link DocumentSymbolRequest}. -type DocumentSymbolParams struct { // line 5352 +type DocumentSymbolParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` WorkDoneProgressParams @@ -1446,29 +1454,30 @@ type DocumentSymbolParams struct { // line 5352 } // Registration options for a {@link DocumentSymbolRequest}. -type DocumentSymbolRegistrationOptions struct { // line 5488 +type DocumentSymbolRegistrationOptions struct { TextDocumentRegistrationOptions DocumentSymbolOptions } type DocumentURI string // Predefined error codes. -type ErrorCodes int32 // line 13136 +type ErrorCodes int32 + // The client capabilities of a {@link ExecuteCommandRequest}. -type ExecuteCommandClientCapabilities struct { // line 11327 +type ExecuteCommandClientCapabilities struct { // Execute command supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // The server capabilities of a {@link ExecuteCommandRequest}. -type ExecuteCommandOptions struct { // line 9621 +type ExecuteCommandOptions struct { // The commands to be executed on the server Commands []string `json:"commands"` WorkDoneProgressOptions } // The parameters of a {@link ExecuteCommandRequest}. -type ExecuteCommandParams struct { // line 6177 +type ExecuteCommandParams struct { // The identifier of the actual command handler. Command string `json:"command"` // Arguments that the command should be invoked with. @@ -1477,10 +1486,10 @@ type ExecuteCommandParams struct { // line 6177 } // Registration options for a {@link ExecuteCommandRequest}. -type ExecuteCommandRegistrationOptions struct { // line 6209 +type ExecuteCommandRegistrationOptions struct { ExecuteCommandOptions } -type ExecutionSummary struct { // line 10516 +type ExecutionSummary struct { // A strict monotonically increasing value // indicating the execution order of a cell // inside a notebook. @@ -1491,7 +1500,7 @@ type ExecutionSummary struct { // line 10516 } // created for Literal (Lit_CodeActionClientCapabilities_codeActionLiteralSupport_codeActionKind) -type FCodeActionKindPCodeActionLiteralSupport struct { // line 12107 +type FCodeActionKindPCodeActionLiteralSupport struct { // The code action kind values the client supports. When this // property exists the client also guarantees that it will // handle values outside its set gracefully and falls back @@ -1500,25 +1509,25 @@ type FCodeActionKindPCodeActionLiteralSupport struct { // line 12107 } // created for Literal (Lit_CompletionList_itemDefaults_editRange_Item1) -type FEditRangePItemDefaults struct { // line 4972 +type FEditRangePItemDefaults struct { Insert Range `json:"insert"` Replace Range `json:"replace"` } // created for Literal (Lit_SemanticTokensClientCapabilities_requests_full_Item1) -type FFullPRequests struct { // line 12581 +type FFullPRequests struct { // The client will send the `textDocument/semanticTokens/full/delta` request if // the server provides a corresponding handler. Delta bool `json:"delta"` } // created for Literal (Lit_CompletionClientCapabilities_completionItem_insertTextModeSupport) -type FInsertTextModeSupportPCompletionItem struct { // line 11660 +type FInsertTextModeSupportPCompletionItem struct { ValueSet []InsertTextMode `json:"valueSet"` } // created for Literal (Lit_SignatureHelpClientCapabilities_signatureInformation_parameterInformation) -type FParameterInformationPSignatureInformation struct { // line 11826 +type FParameterInformationPSignatureInformation struct { // The client supports processing label offsets instead of a // simple label string. // @@ -1527,17 +1536,17 @@ type FParameterInformationPSignatureInformation struct { // line 11826 } // created for Literal (Lit_SemanticTokensClientCapabilities_requests_range_Item1) -type FRangePRequests struct { // line 12561 +type FRangePRequests struct { } // created for Literal (Lit_CompletionClientCapabilities_completionItem_resolveSupport) -type FResolveSupportPCompletionItem struct { // line 11636 +type FResolveSupportPCompletionItem struct { // The properties that a client can resolve lazily. Properties []string `json:"properties"` } // created for Literal (Lit_NotebookDocumentChangeEvent_cells_structure) -type FStructurePCells struct { // line 7723 +type FStructurePCells struct { // The change to the cell array. Array NotebookCellArrayChange `json:"array"` // Additional opened cell text documents. @@ -1547,17 +1556,19 @@ type FStructurePCells struct { // line 7723 } // created for Literal (Lit_CompletionClientCapabilities_completionItem_tagSupport) -type FTagSupportPCompletionItem struct { // line 11602 +type FTagSupportPCompletionItem struct { // The tags supported by the client. ValueSet []CompletionItemTag `json:"valueSet"` } -type FailureHandlingKind string // line 14108 +type FailureHandlingKind string + // The file event type -type FileChangeType uint32 // line 13869 +type FileChangeType uint32 + // Represents information on a file/folder create. // // @since 3.16.0 -type FileCreate struct { // line 6898 +type FileCreate struct { // A file:// URI for the location of the file/folder being created. URI string `json:"uri"` } @@ -1565,13 +1576,13 @@ type FileCreate struct { // line 6898 // Represents information on a file/folder delete. // // @since 3.16.0 -type FileDelete struct { // line 7147 +type FileDelete struct { // A file:// URI for the location of the file/folder being deleted. URI string `json:"uri"` } // An event describing a file change. -type FileEvent struct { // line 8798 +type FileEvent struct { // The file's uri. URI DocumentURI `json:"uri"` // The change type. @@ -1584,7 +1595,7 @@ type FileEvent struct { // line 8798 // like renaming a file in the UI. // // @since 3.16.0 -type FileOperationClientCapabilities struct { // line 11374 +type FileOperationClientCapabilities struct { // Whether the client supports dynamic registration for file requests/notifications. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client has support for sending didCreateFiles notifications. @@ -1605,7 +1616,7 @@ type FileOperationClientCapabilities struct { // line 11374 // the server is interested in receiving. // // @since 3.16.0 -type FileOperationFilter struct { // line 7100 +type FileOperationFilter struct { // A Uri scheme like `file` or `untitled`. Scheme string `json:"scheme,omitempty"` // The actual file operation pattern. @@ -1615,7 +1626,7 @@ type FileOperationFilter struct { // line 7100 // Options for notifications/requests for user operations on files. // // @since 3.16.0 -type FileOperationOptions struct { // line 10319 +type FileOperationOptions struct { // The server is interested in receiving didCreateFiles notifications. DidCreate *FileOperationRegistrationOptions `json:"didCreate,omitempty"` // The server is interested in receiving willCreateFiles requests. @@ -1634,7 +1645,7 @@ type FileOperationOptions struct { // line 10319 // the server is interested in receiving. // // @since 3.16.0 -type FileOperationPattern struct { // line 9819 +type FileOperationPattern struct { // The glob pattern to match. Glob patterns can have the following syntax: // // - `*` to match one or more characters in a path segment @@ -1656,11 +1667,12 @@ type FileOperationPattern struct { // line 9819 // both. // // @since 3.16.0 -type FileOperationPatternKind string // line 14042 +type FileOperationPatternKind string + // Matching options for the file operation pattern. // // @since 3.16.0 -type FileOperationPatternOptions struct { // line 10500 +type FileOperationPatternOptions struct { // The pattern should be matched ignoring casing. IgnoreCase bool `json:"ignoreCase,omitempty"` } @@ -1668,7 +1680,7 @@ type FileOperationPatternOptions struct { // line 10500 // The options to register for file operations. // // @since 3.16.0 -type FileOperationRegistrationOptions struct { // line 3337 +type FileOperationRegistrationOptions struct { // The actual filters. Filters []FileOperationFilter `json:"filters"` } @@ -1676,13 +1688,13 @@ type FileOperationRegistrationOptions struct { // line 3337 // Represents information on a file/folder rename. // // @since 3.16.0 -type FileRename struct { // line 7124 +type FileRename struct { // A file:// URI for the original location of the file/folder being renamed. OldURI string `json:"oldUri"` // A file:// URI for the new location of the file/folder being renamed. NewURI string `json:"newUri"` } -type FileSystemWatcher struct { // line 8820 +type FileSystemWatcher struct { // The glob pattern to watch. See {@link GlobPattern glob pattern} for more detail. // // @since 3.17.0 support for relative patterns. @@ -1695,7 +1707,7 @@ type FileSystemWatcher struct { // line 8820 // Represents a folding range. To be valid, start and end line must be bigger than zero and smaller // than the number of lines in the document. Clients are free to ignore invalid ranges. -type FoldingRange struct { // line 2488 +type FoldingRange struct { // The zero-based start line of the range to fold. The folded area starts after the line's last character. // To be valid, the end must be zero or larger and smaller than the number of lines in the document. StartLine uint32 `json:"startLine"` @@ -1717,7 +1729,7 @@ type FoldingRange struct { // line 2488 // @since 3.17.0 CollapsedText string `json:"collapsedText,omitempty"` } -type FoldingRangeClientCapabilities struct { // line 12354 +type FoldingRangeClientCapabilities struct { // Whether implementation supports dynamic registration for folding range // providers. If this is set to `true` the client supports the new // `FoldingRangeRegistrationOptions` return value for the corresponding @@ -1742,26 +1754,26 @@ type FoldingRangeClientCapabilities struct { // line 12354 } // A set of predefined range kinds. -type FoldingRangeKind string // line 13208 -type FoldingRangeOptions struct { // line 6717 +type FoldingRangeKind string +type FoldingRangeOptions struct { WorkDoneProgressOptions } // Parameters for a {@link FoldingRangeRequest}. -type FoldingRangeParams struct { // line 2464 +type FoldingRangeParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` WorkDoneProgressParams PartialResultParams } -type FoldingRangeRegistrationOptions struct { // line 2547 +type FoldingRangeRegistrationOptions struct { TextDocumentRegistrationOptions FoldingRangeOptions StaticRegistrationOptions } // Value-object describing what options formatting should use. -type FormattingOptions struct { // line 9487 +type FormattingOptions struct { // Size of a tab in spaces. TabSize uint32 `json:"tabSize"` // Prefer spaces over tabs. @@ -1783,7 +1795,7 @@ type FormattingOptions struct { // line 9487 // A diagnostic report with a full set of problems. // // @since 3.17.0 -type FullDocumentDiagnosticReport struct { // line 7471 +type FullDocumentDiagnosticReport struct { // A full document diagnostic report. Kind string `json:"kind"` // An optional result id. If provided it will @@ -1797,7 +1809,7 @@ type FullDocumentDiagnosticReport struct { // line 7471 // General client capabilities. // // @since 3.16.0 -type GeneralClientCapabilities struct { // line 11029 +type GeneralClientCapabilities struct { // Client capability that signals how the client // handles stale requests (e.g. a request // for which the client will not process the response @@ -1839,14 +1851,14 @@ type GeneralClientCapabilities struct { // line 11029 // @since 3.17.0 type GlobPattern = string // (alias) line 14542 // The result of a hover request. -type Hover struct { // line 5081 +type Hover struct { // The hover's content Contents MarkupContent `json:"contents"` // An optional range inside the text document that is used to // visualize the hover, e.g. by changing the background color. Range Range `json:"range,omitempty"` } -type HoverClientCapabilities struct { // line 11767 +type HoverClientCapabilities struct { // Whether hover supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Client supports the following content formats for the content @@ -1855,24 +1867,24 @@ type HoverClientCapabilities struct { // line 11767 } // Hover options. -type HoverOptions struct { // line 9094 +type HoverOptions struct { WorkDoneProgressOptions } // Parameters for a {@link HoverRequest}. -type HoverParams struct { // line 5064 +type HoverParams struct { TextDocumentPositionParams WorkDoneProgressParams } // Registration options for a {@link HoverRequest}. -type HoverRegistrationOptions struct { // line 5120 +type HoverRegistrationOptions struct { TextDocumentRegistrationOptions HoverOptions } // @since 3.6.0 -type ImplementationClientCapabilities struct { // line 11948 +type ImplementationClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `ImplementationRegistrationOptions` return value // for the corresponding server capability as well. @@ -1882,15 +1894,15 @@ type ImplementationClientCapabilities struct { // line 11948 // @since 3.14.0 LinkSupport bool `json:"linkSupport,omitempty"` } -type ImplementationOptions struct { // line 6569 +type ImplementationOptions struct { WorkDoneProgressOptions } -type ImplementationParams struct { // line 2136 +type ImplementationParams struct { TextDocumentPositionParams WorkDoneProgressParams PartialResultParams } -type ImplementationRegistrationOptions struct { // line 2176 +type ImplementationRegistrationOptions struct { TextDocumentRegistrationOptions ImplementationOptions StaticRegistrationOptions @@ -1898,20 +1910,20 @@ type ImplementationRegistrationOptions struct { // line 2176 // The data type of the ResponseError if the // initialize request fails. -type InitializeError struct { // line 4321 +type InitializeError struct { // Indicates whether the client execute the following retry logic: // (1) show the message provided by the ResponseError to the user // (2) user selects retry or cancel // (3) if user selected retry the initialize method is sent again. Retry bool `json:"retry"` } -type InitializeParams struct { // line 4263 +type InitializeParams struct { XInitializeParams WorkspaceFoldersInitializeParams } // The result returned from an initialize request. -type InitializeResult struct { // line 4277 +type InitializeResult struct { // The capabilities the language server provides. Capabilities ServerCapabilities `json:"capabilities"` // Information about the server. @@ -1919,13 +1931,13 @@ type InitializeResult struct { // line 4277 // @since 3.15.0 ServerInfo *PServerInfoMsg_initialize `json:"serverInfo,omitempty"` } -type InitializedParams struct { // line 4335 +type InitializedParams struct { } // Inlay hint information. // // @since 3.17.0 -type InlayHint struct { // line 3718 +type InlayHint struct { // The position of this hint. Position Position `json:"position"` // The label of this hint. A human readable string or an array of @@ -1964,7 +1976,7 @@ type InlayHint struct { // line 3718 // Inlay hint client capabilities. // // @since 3.17.0 -type InlayHintClientCapabilities struct { // line 12745 +type InlayHintClientCapabilities struct { // Whether inlay hints support dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Indicates which properties a client can resolve lazily on an inlay @@ -1975,12 +1987,13 @@ type InlayHintClientCapabilities struct { // line 12745 // Inlay hint kinds. // // @since 3.17.0 -type InlayHintKind uint32 // line 13426 +type InlayHintKind uint32 + // An inlay hint label part allows for interactive and composite labels // of inlay hints. // // @since 3.17.0 -type InlayHintLabelPart struct { // line 7298 +type InlayHintLabelPart struct { // The value of this label part. Value string `json:"value"` // The tooltip text when you hover over this label part. Depending on @@ -2009,7 +2022,7 @@ type InlayHintLabelPart struct { // line 7298 // Inlay hint options used during static registration. // // @since 3.17.0 -type InlayHintOptions struct { // line 7371 +type InlayHintOptions struct { // The server provides support to resolve additional // information for an inlay hint item. ResolveProvider bool `json:"resolveProvider,omitempty"` @@ -2019,7 +2032,7 @@ type InlayHintOptions struct { // line 7371 // A parameter literal used in inlay hint requests. // // @since 3.17.0 -type InlayHintParams struct { // line 3689 +type InlayHintParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The document range for which inlay hints should be computed. @@ -2030,7 +2043,7 @@ type InlayHintParams struct { // line 3689 // Inlay hint options used during static or dynamic registration. // // @since 3.17.0 -type InlayHintRegistrationOptions struct { // line 3819 +type InlayHintRegistrationOptions struct { InlayHintOptions TextDocumentRegistrationOptions StaticRegistrationOptions @@ -2039,7 +2052,7 @@ type InlayHintRegistrationOptions struct { // line 3819 // Client workspace capabilities specific to inlay hints. // // @since 3.17.0 -type InlayHintWorkspaceClientCapabilities struct { // line 11460 +type InlayHintWorkspaceClientCapabilities struct { // Whether the client implementation supports a refresh request sent from // the server to the client. // @@ -2054,7 +2067,7 @@ type InlayHintWorkspaceClientCapabilities struct { // line 11460 // // @since 3.18.0 // @proposed -type InlineCompletionClientCapabilities struct { // line 12809 +type InlineCompletionClientCapabilities struct { // Whether implementation supports dynamic registration for inline completion providers. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } @@ -2063,7 +2076,7 @@ type InlineCompletionClientCapabilities struct { // line 12809 // // @since 3.18.0 // @proposed -type InlineCompletionContext struct { // line 7833 +type InlineCompletionContext struct { // Describes how the inline completion was triggered. TriggerKind InlineCompletionTriggerKind `json:"triggerKind"` // Provides information about the currently selected item in the autocomplete widget if it is visible. @@ -2074,7 +2087,7 @@ type InlineCompletionContext struct { // line 7833 // // @since 3.18.0 // @proposed -type InlineCompletionItem struct { // line 4158 +type InlineCompletionItem struct { // The text to replace the range with. Must be set. InsertText Or_InlineCompletionItem_insertText `json:"insertText"` // A text that is used to decide if this inline completion should be shown. When `falsy` the {@link InlineCompletionItem.insertText} is used. @@ -2089,7 +2102,7 @@ type InlineCompletionItem struct { // line 4158 // // @since 3.18.0 // @proposed -type InlineCompletionList struct { // line 4139 +type InlineCompletionList struct { // The inline completion items Items []InlineCompletionItem `json:"items"` } @@ -2098,7 +2111,7 @@ type InlineCompletionList struct { // line 4139 // // @since 3.18.0 // @proposed -type InlineCompletionOptions struct { // line 7882 +type InlineCompletionOptions struct { WorkDoneProgressOptions } @@ -2106,7 +2119,7 @@ type InlineCompletionOptions struct { // line 7882 // // @since 3.18.0 // @proposed -type InlineCompletionParams struct { // line 4111 +type InlineCompletionParams struct { // Additional information about the context in which inline completions were // requested. Context InlineCompletionContext `json:"context"` @@ -2118,7 +2131,7 @@ type InlineCompletionParams struct { // line 4111 // // @since 3.18.0 // @proposed -type InlineCompletionRegistrationOptions struct { // line 4210 +type InlineCompletionRegistrationOptions struct { InlineCompletionOptions TextDocumentRegistrationOptions StaticRegistrationOptions @@ -2128,7 +2141,8 @@ type InlineCompletionRegistrationOptions struct { // line 4210 // // @since 3.18.0 // @proposed -type InlineCompletionTriggerKind uint32 // line 13820 +type InlineCompletionTriggerKind uint32 + // Inline value information can be provided by different means: // // - directly as a text value (class InlineValueText). @@ -2142,13 +2156,13 @@ type InlineValue = Or_InlineValue // (alias) line 14276 // Client capabilities specific to inline values. // // @since 3.17.0 -type InlineValueClientCapabilities struct { // line 12729 +type InlineValueClientCapabilities struct { // Whether implementation supports dynamic registration for inline value providers. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // @since 3.17.0 -type InlineValueContext struct { // line 7184 +type InlineValueContext struct { // The stack frame (as a DAP Id) where the execution has stopped. FrameID int32 `json:"frameId"` // The document range where execution has stopped. @@ -2161,7 +2175,7 @@ type InlineValueContext struct { // line 7184 // An optional expression can be used to override the extracted expression. // // @since 3.17.0 -type InlineValueEvaluatableExpression struct { // line 7262 +type InlineValueEvaluatableExpression struct { // The document range for which the inline value applies. // The range is used to extract the evaluatable expression from the underlying document. Range Range `json:"range"` @@ -2172,14 +2186,14 @@ type InlineValueEvaluatableExpression struct { // line 7262 // Inline value options used during static registration. // // @since 3.17.0 -type InlineValueOptions struct { // line 7286 +type InlineValueOptions struct { WorkDoneProgressOptions } // A parameter literal used in inline value requests. // // @since 3.17.0 -type InlineValueParams struct { // line 3630 +type InlineValueParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The document range for which inline values should be computed. @@ -2193,7 +2207,7 @@ type InlineValueParams struct { // line 3630 // Inline value options used during static or dynamic registration. // // @since 3.17.0 -type InlineValueRegistrationOptions struct { // line 3667 +type InlineValueRegistrationOptions struct { InlineValueOptions TextDocumentRegistrationOptions StaticRegistrationOptions @@ -2202,7 +2216,7 @@ type InlineValueRegistrationOptions struct { // line 3667 // Provide inline value as text. // // @since 3.17.0 -type InlineValueText struct { // line 7207 +type InlineValueText struct { // The document range for which the inline value applies. Range Range `json:"range"` // The text of the inline value. @@ -2214,7 +2228,7 @@ type InlineValueText struct { // line 7207 // An optional variable name can be used to override the extracted name. // // @since 3.17.0 -type InlineValueVariableLookup struct { // line 7230 +type InlineValueVariableLookup struct { // The document range for which the inline value applies. // The range is used to extract the variable name from the underlying document. Range Range `json:"range"` @@ -2227,7 +2241,7 @@ type InlineValueVariableLookup struct { // line 7230 // Client workspace capabilities specific to inline values. // // @since 3.17.0 -type InlineValueWorkspaceClientCapabilities struct { // line 11444 +type InlineValueWorkspaceClientCapabilities struct { // Whether the client implementation supports a refresh request sent from the // server to the client. // @@ -2241,7 +2255,7 @@ type InlineValueWorkspaceClientCapabilities struct { // line 11444 // A special text edit to provide an insert and a replace operation. // // @since 3.16.0 -type InsertReplaceEdit struct { // line 8994 +type InsertReplaceEdit struct { // The string to be inserted. NewText string `json:"newText"` // The range if the insert is requested @@ -2252,38 +2266,40 @@ type InsertReplaceEdit struct { // line 8994 // Defines whether the insert text in a completion item should be interpreted as // plain text or a snippet. -type InsertTextFormat uint32 // line 13653 +type InsertTextFormat uint32 + // How whitespace and indentation is handled during completion // item insertion. // // @since 3.16.0 -type InsertTextMode uint32 // line 13673 +type InsertTextMode uint32 type LSPAny = interface{} // LSP arrays. // @since 3.17.0 type LSPArray = []interface{} // (alias) line 14194 -type LSPErrorCodes int32 // line 13176 +type LSPErrorCodes int32 + // LSP object definition. // @since 3.17.0 type LSPObject = map[string]LSPAny // (alias) line 14526 // Client capabilities for the linked editing range request. // // @since 3.16.0 -type LinkedEditingRangeClientCapabilities struct { // line 12681 +type LinkedEditingRangeClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` // return value for the corresponding server capability as well. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } -type LinkedEditingRangeOptions struct { // line 6888 +type LinkedEditingRangeOptions struct { WorkDoneProgressOptions } -type LinkedEditingRangeParams struct { // line 3185 +type LinkedEditingRangeParams struct { TextDocumentPositionParams WorkDoneProgressParams } -type LinkedEditingRangeRegistrationOptions struct { // line 3228 +type LinkedEditingRangeRegistrationOptions struct { TextDocumentRegistrationOptions LinkedEditingRangeOptions StaticRegistrationOptions @@ -2292,7 +2308,7 @@ type LinkedEditingRangeRegistrationOptions struct { // line 3228 // The result of a linked editing range request. // // @since 3.16.0 -type LinkedEditingRanges struct { // line 3201 +type LinkedEditingRanges struct { // A list of ranges that can be edited together. The ranges must have // identical length and contain identical text content. The ranges cannot overlap. Ranges []Range `json:"ranges"` @@ -2303,13 +2319,13 @@ type LinkedEditingRanges struct { // line 3201 } // created for Literal (Lit_NotebookDocumentChangeEvent_cells_textContent_Elem) -type Lit_NotebookDocumentChangeEvent_cells_textContent_Elem struct { // line 7781 +type Lit_NotebookDocumentChangeEvent_cells_textContent_Elem struct { Document VersionedTextDocumentIdentifier `json:"document"` Changes []TextDocumentContentChangeEvent `json:"changes"` } // created for Literal (Lit_NotebookDocumentFilter_Item1) -type Lit_NotebookDocumentFilter_Item1 struct { // line 14708 +type Lit_NotebookDocumentFilter_Item1 struct { // The type of the enclosing notebook. NotebookType string `json:"notebookType,omitempty"` // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. @@ -2319,7 +2335,7 @@ type Lit_NotebookDocumentFilter_Item1 struct { // line 14708 } // created for Literal (Lit_NotebookDocumentFilter_Item2) -type Lit_NotebookDocumentFilter_Item2 struct { // line 14741 +type Lit_NotebookDocumentFilter_Item2 struct { // The type of the enclosing notebook. NotebookType string `json:"notebookType,omitempty"` // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. @@ -2329,12 +2345,12 @@ type Lit_NotebookDocumentFilter_Item2 struct { // line 14741 } // created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_cells_Elem) -type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_cells_Elem struct { // line 10185 +type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0_cells_Elem struct { Language string `json:"language"` } // created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1) -type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1 struct { // line 10206 +type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1 struct { // The notebook to be synced If a string // value is provided it matches against the // notebook type. '*' matches every notebook. @@ -2344,23 +2360,23 @@ type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1 struct { // lin } // created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_cells_Elem) -type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_cells_Elem struct { // line 10232 +type Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_cells_Elem struct { Language string `json:"language"` } // created for Literal (Lit_PrepareRenameResult_Item2) -type Lit_PrepareRenameResult_Item2 struct { // line 14347 +type Lit_PrepareRenameResult_Item2 struct { DefaultBehavior bool `json:"defaultBehavior"` } // created for Literal (Lit_TextDocumentContentChangeEvent_Item1) -type Lit_TextDocumentContentChangeEvent_Item1 struct { // line 14455 +type Lit_TextDocumentContentChangeEvent_Item1 struct { // The new text of the whole document. Text string `json:"text"` } // created for Literal (Lit_TextDocumentFilter_Item2) -type Lit_TextDocumentFilter_Item2 struct { // line 14632 +type Lit_TextDocumentFilter_Item2 struct { // A language id, like `typescript`. Language string `json:"language,omitempty"` // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. @@ -2371,14 +2387,14 @@ type Lit_TextDocumentFilter_Item2 struct { // line 14632 // Represents a location inside a resource, such as a line // inside a text file. -type Location struct { // line 2156 +type Location struct { URI DocumentURI `json:"uri"` Range Range `json:"range"` } // Represents the connection of two locations. Provides additional metadata over normal {@link Location locations}, // including an origin range. -type LocationLink struct { // line 6508 +type LocationLink struct { // Span of the origin of this link. // // Used as the underlined span for mouse interaction. Defaults to the word range at @@ -2396,13 +2412,13 @@ type LocationLink struct { // line 6508 } // The log message parameters. -type LogMessageParams struct { // line 4446 +type LogMessageParams struct { // The message type. See {@link MessageType} Type MessageType `json:"type"` // The actual message. Message string `json:"message"` } -type LogTraceParams struct { // line 6395 +type LogTraceParams struct { Message string `json:"message"` Verbose string `json:"verbose,omitempty"` } @@ -2410,7 +2426,7 @@ type LogTraceParams struct { // line 6395 // Client capabilities specific to the used markdown parser. // // @since 3.16.0 -type MarkdownClientCapabilities struct { // line 12917 +type MarkdownClientCapabilities struct { // The name of the parser. Parser string `json:"parser"` // The version of the parser. @@ -2459,7 +2475,7 @@ type MarkedString = Or_MarkedString // (alias) line 14473 // // *Please Note* that clients might sanitize the return markdown. A client could decide to // remove HTML from the markdown to avoid script execution. -type MarkupContent struct { // line 7349 +type MarkupContent struct { // The type of the Markup Kind MarkupKind `json:"kind"` // The content itself @@ -2471,18 +2487,19 @@ type MarkupContent struct { // line 7349 // // Please note that `MarkupKinds` must not start with a `$`. This kinds // are reserved for internal usage. -type MarkupKind string // line 13800 -type MessageActionItem struct { // line 4433 +type MarkupKind string +type MessageActionItem struct { // A short title like 'Retry', 'Open Log' etc. Title string `json:"title"` } // The message type -type MessageType uint32 // line 13447 +type MessageType uint32 + // Moniker definition to match LSIF 0.5 moniker definition. // // @since 3.16.0 -type Moniker struct { // line 3411 +type Moniker struct { // The scheme of the moniker. For example tsc or .Net Scheme string `json:"scheme"` // The identifier of the moniker. The value is opaque in LSIF however @@ -2497,7 +2514,7 @@ type Moniker struct { // line 3411 // Client capabilities specific to the moniker request. // // @since 3.16.0 -type MonikerClientCapabilities struct { // line 12697 +type MonikerClientCapabilities struct { // Whether moniker supports dynamic registration. If this is set to `true` // the client supports the new `MonikerRegistrationOptions` return value // for the corresponding server capability as well. @@ -2507,28 +2524,28 @@ type MonikerClientCapabilities struct { // line 12697 // The moniker kind. // // @since 3.16.0 -type MonikerKind string // line 13400 -type MonikerOptions struct { // line 7162 +type MonikerKind string +type MonikerOptions struct { WorkDoneProgressOptions } -type MonikerParams struct { // line 3391 +type MonikerParams struct { TextDocumentPositionParams WorkDoneProgressParams PartialResultParams } -type MonikerRegistrationOptions struct { // line 3451 +type MonikerRegistrationOptions struct { TextDocumentRegistrationOptions MonikerOptions } // created for Literal (Lit_MarkedString_Item1) -type Msg_MarkedString struct { // line 14483 +type Msg_MarkedString struct { Language string `json:"language"` Value string `json:"value"` } // created for Literal (Lit_NotebookDocumentFilter_Item0) -type Msg_NotebookDocumentFilter struct { // line 14675 +type Msg_NotebookDocumentFilter struct { // The type of the enclosing notebook. NotebookType string `json:"notebookType"` // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. @@ -2538,13 +2555,13 @@ type Msg_NotebookDocumentFilter struct { // line 14675 } // created for Literal (Lit_PrepareRenameResult_Item1) -type Msg_PrepareRename2Gn struct { // line 14326 +type Msg_PrepareRename2Gn struct { Range Range `json:"range"` Placeholder string `json:"placeholder"` } // created for Literal (Lit_TextDocumentContentChangeEvent_Item0) -type Msg_TextDocumentContentChangeEvent struct { // line 14423 +type Msg_TextDocumentContentChangeEvent struct { // The range of the document that changed. Range *Range `json:"range"` // The optional length of the range that got replaced. @@ -2556,7 +2573,7 @@ type Msg_TextDocumentContentChangeEvent struct { // line 14423 } // created for Literal (Lit_TextDocumentFilter_Item1) -type Msg_TextDocumentFilter struct { // line 14599 +type Msg_TextDocumentFilter struct { // A language id, like `typescript`. Language string `json:"language,omitempty"` // A Uri {@link Uri.scheme scheme}, like `file` or `untitled`. @@ -2566,7 +2583,7 @@ type Msg_TextDocumentFilter struct { // line 14599 } // created for Literal (Lit__InitializeParams_clientInfo) -type Msg_XInitializeParams_clientInfo struct { // line 7971 +type Msg_XInitializeParams_clientInfo struct { // The name of the client as defined by the client. Name string `json:"name"` // The client's version as defined by the client. @@ -2580,7 +2597,7 @@ type Msg_XInitializeParams_clientInfo struct { // line 7971 // notebook cell or the cell's text document. // // @since 3.17.0 -type NotebookCell struct { // line 9928 +type NotebookCell struct { // The cell's kind Kind NotebookCellKind `json:"kind"` // The URI of the cell's text document @@ -2599,7 +2616,7 @@ type NotebookCell struct { // line 9928 // array from state S to S'. // // @since 3.17.0 -type NotebookCellArrayChange struct { // line 9969 +type NotebookCellArrayChange struct { // The start oftest of the cell that changed. Start uint32 `json:"start"` // The deleted cells @@ -2611,12 +2628,13 @@ type NotebookCellArrayChange struct { // line 9969 // A notebook cell kind. // // @since 3.17.0 -type NotebookCellKind uint32 // line 14063 +type NotebookCellKind uint32 + // A notebook cell text document filter denotes a cell text // document by different properties. // // @since 3.17.0 -type NotebookCellTextDocumentFilter struct { // line 10467 +type NotebookCellTextDocumentFilter struct { // A filter that matches against the notebook // containing the notebook cell. If a string // value is provided it matches against the @@ -2632,7 +2650,7 @@ type NotebookCellTextDocumentFilter struct { // line 10467 // A notebook document. // // @since 3.17.0 -type NotebookDocument struct { // line 7590 +type NotebookDocument struct { // The notebook document's uri. URI URI `json:"uri"` // The type of the notebook. @@ -2652,7 +2670,7 @@ type NotebookDocument struct { // line 7590 // A change event for a notebook document. // // @since 3.17.0 -type NotebookDocumentChangeEvent struct { // line 7702 +type NotebookDocumentChangeEvent struct { // The changed meta data if any. // // Note: should always be an object literal (e.g. LSPObject) @@ -2664,7 +2682,7 @@ type NotebookDocumentChangeEvent struct { // line 7702 // Capabilities specific to the notebook document support. // // @since 3.17.0 -type NotebookDocumentClientCapabilities struct { // line 10978 +type NotebookDocumentClientCapabilities struct { // Capabilities specific to notebook document synchronization // // @since 3.17.0 @@ -2680,7 +2698,7 @@ type NotebookDocumentFilter = Msg_NotebookDocumentFilter // (alias) line 14669 // A literal to identify a notebook document in the client. // // @since 3.17.0 -type NotebookDocumentIdentifier struct { // line 7818 +type NotebookDocumentIdentifier struct { // The notebook document's uri. URI URI `json:"uri"` } @@ -2688,7 +2706,7 @@ type NotebookDocumentIdentifier struct { // line 7818 // Notebook specific client capabilities. // // @since 3.17.0 -type NotebookDocumentSyncClientCapabilities struct { // line 12826 +type NotebookDocumentSyncClientCapabilities struct { // Whether implementation supports dynamic registration. If this is // set to `true` the client supports the new // `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` @@ -2711,7 +2729,7 @@ type NotebookDocumentSyncClientCapabilities struct { // line 12826 // cell will be synced. // // @since 3.17.0 -type NotebookDocumentSyncOptions struct { // line 10149 +type NotebookDocumentSyncOptions struct { // The notebooks to be synced NotebookSelector []PNotebookSelectorPNotebookDocumentSync `json:"notebookSelector"` // Whether save notification should be forwarded to @@ -2722,13 +2740,13 @@ type NotebookDocumentSyncOptions struct { // line 10149 // Registration options specific to a notebook. // // @since 3.17.0 -type NotebookDocumentSyncRegistrationOptions struct { // line 10269 +type NotebookDocumentSyncRegistrationOptions struct { NotebookDocumentSyncOptions StaticRegistrationOptions } // A text document identifier to optionally denote a specific version of a text document. -type OptionalVersionedTextDocumentIdentifier struct { // line 9673 +type OptionalVersionedTextDocumentIdentifier struct { // The version number of this document. If a versioned text document identifier // is sent from the server to the client and the file is not open in the editor // (the server has not received an open notification before) the server can send @@ -2739,322 +2757,322 @@ type OptionalVersionedTextDocumentIdentifier struct { // line 9673 } // created for Or [FEditRangePItemDefaults Range] -type OrFEditRangePItemDefaults struct { // line 4965 +type OrFEditRangePItemDefaults struct { Value interface{} `json:"value"` } // created for Or [NotebookDocumentFilter string] -type OrFNotebookPNotebookSelector struct { // line 10166 +type OrFNotebookPNotebookSelector struct { Value interface{} `json:"value"` } // created for Or [Location PLocationMsg_workspace_symbol] -type OrPLocation_workspace_symbol struct { // line 5716 +type OrPLocation_workspace_symbol struct { Value interface{} `json:"value"` } // created for Or [[]string string] -type OrPSection_workspace_didChangeConfiguration struct { // line 4359 +type OrPSection_workspace_didChangeConfiguration struct { Value interface{} `json:"value"` } // created for Or [MarkupContent string] -type OrPTooltipPLabel struct { // line 7312 +type OrPTooltipPLabel struct { Value interface{} `json:"value"` } // created for Or [MarkupContent string] -type OrPTooltip_textDocument_inlayHint struct { // line 3773 +type OrPTooltip_textDocument_inlayHint struct { Value interface{} `json:"value"` } // created for Or [int32 string] -type Or_CancelParams_id struct { // line 6421 +type Or_CancelParams_id struct { Value interface{} `json:"value"` } // created for Or [MarkupContent string] -type Or_CompletionItem_documentation struct { // line 4778 +type Or_CompletionItem_documentation struct { Value interface{} `json:"value"` } // created for Or [InsertReplaceEdit TextEdit] -type Or_CompletionItem_textEdit struct { // line 4861 +type Or_CompletionItem_textEdit struct { Value interface{} `json:"value"` } // created for Or [Location []Location] -type Or_Definition struct { // line 14169 +type Or_Definition struct { Value interface{} `json:"value"` } // created for Or [int32 string] -type Or_Diagnostic_code struct { // line 8866 +type Or_Diagnostic_code struct { Value interface{} `json:"value"` } // created for Or [RelatedFullDocumentDiagnosticReport RelatedUnchangedDocumentDiagnosticReport] -type Or_DocumentDiagnosticReport struct { // line 14301 +type Or_DocumentDiagnosticReport struct { Value interface{} `json:"value"` } // created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] -type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct { // line 3896 +type Or_DocumentDiagnosticReportPartialResult_relatedDocuments_Value struct { Value interface{} `json:"value"` } // created for Or [NotebookCellTextDocumentFilter TextDocumentFilter] -type Or_DocumentFilter struct { // line 14511 +type Or_DocumentFilter struct { Value interface{} `json:"value"` } // created for Or [MarkedString MarkupContent []MarkedString] -type Or_Hover_contents struct { // line 5087 +type Or_Hover_contents struct { Value interface{} `json:"value"` } // created for Or [[]InlayHintLabelPart string] -type Or_InlayHint_label struct { // line 3732 +type Or_InlayHint_label struct { Value interface{} `json:"value"` } // created for Or [StringValue string] -type Or_InlineCompletionItem_insertText struct { // line 4164 +type Or_InlineCompletionItem_insertText struct { Value interface{} `json:"value"` } // created for Or [InlineValueEvaluatableExpression InlineValueText InlineValueVariableLookup] -type Or_InlineValue struct { // line 14279 +type Or_InlineValue struct { Value interface{} `json:"value"` } // created for Or [Msg_MarkedString string] -type Or_MarkedString struct { // line 14476 +type Or_MarkedString struct { Value interface{} `json:"value"` } // created for Or [NotebookDocumentFilter string] -type Or_NotebookCellTextDocumentFilter_notebook struct { // line 10473 +type Or_NotebookCellTextDocumentFilter_notebook struct { Value interface{} `json:"value"` } // created for Or [NotebookDocumentFilter string] -type Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_notebook struct { // line 10212 +type Or_NotebookDocumentSyncOptions_notebookSelector_Elem_Item1_notebook struct { Value interface{} `json:"value"` } // created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] -type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct { // line 7405 +type Or_RelatedFullDocumentDiagnosticReport_relatedDocuments_Value struct { Value interface{} `json:"value"` } // created for Or [FullDocumentDiagnosticReport UnchangedDocumentDiagnosticReport] -type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct { // line 7444 +type Or_RelatedUnchangedDocumentDiagnosticReport_relatedDocuments_Value struct { Value interface{} `json:"value"` } // created for Or [URI WorkspaceFolder] -type Or_RelativePattern_baseUri struct { // line 11107 +type Or_RelativePattern_baseUri struct { Value interface{} `json:"value"` } // created for Or [CodeAction Command] -type Or_Result_textDocument_codeAction_Item0_Elem struct { // line 1414 +type Or_Result_textDocument_codeAction_Item0_Elem struct { Value interface{} `json:"value"` } // created for Or [InlineCompletionList []InlineCompletionItem] -type Or_Result_textDocument_inlineCompletion struct { // line 981 +type Or_Result_textDocument_inlineCompletion struct { Value interface{} `json:"value"` } // created for Or [FFullPRequests bool] -type Or_SemanticTokensClientCapabilities_requests_full struct { // line 12574 +type Or_SemanticTokensClientCapabilities_requests_full struct { Value interface{} `json:"value"` } // created for Or [FRangePRequests bool] -type Or_SemanticTokensClientCapabilities_requests_range struct { // line 12554 +type Or_SemanticTokensClientCapabilities_requests_range struct { Value interface{} `json:"value"` } // created for Or [PFullESemanticTokensOptions bool] -type Or_SemanticTokensOptions_full struct { // line 6816 +type Or_SemanticTokensOptions_full struct { Value interface{} `json:"value"` } // created for Or [PRangeESemanticTokensOptions bool] -type Or_SemanticTokensOptions_range struct { // line 6796 +type Or_SemanticTokensOptions_range struct { Value interface{} `json:"value"` } // created for Or [CallHierarchyOptions CallHierarchyRegistrationOptions bool] -type Or_ServerCapabilities_callHierarchyProvider struct { // line 8526 +type Or_ServerCapabilities_callHierarchyProvider struct { Value interface{} `json:"value"` } // created for Or [CodeActionOptions bool] -type Or_ServerCapabilities_codeActionProvider struct { // line 8334 +type Or_ServerCapabilities_codeActionProvider struct { Value interface{} `json:"value"` } // created for Or [DocumentColorOptions DocumentColorRegistrationOptions bool] -type Or_ServerCapabilities_colorProvider struct { // line 8370 +type Or_ServerCapabilities_colorProvider struct { Value interface{} `json:"value"` } // created for Or [DeclarationOptions DeclarationRegistrationOptions bool] -type Or_ServerCapabilities_declarationProvider struct { // line 8196 +type Or_ServerCapabilities_declarationProvider struct { Value interface{} `json:"value"` } // created for Or [DefinitionOptions bool] -type Or_ServerCapabilities_definitionProvider struct { // line 8218 +type Or_ServerCapabilities_definitionProvider struct { Value interface{} `json:"value"` } // created for Or [DiagnosticOptions DiagnosticRegistrationOptions] -type Or_ServerCapabilities_diagnosticProvider struct { // line 8683 +type Or_ServerCapabilities_diagnosticProvider struct { Value interface{} `json:"value"` } // created for Or [DocumentFormattingOptions bool] -type Or_ServerCapabilities_documentFormattingProvider struct { // line 8410 +type Or_ServerCapabilities_documentFormattingProvider struct { Value interface{} `json:"value"` } // created for Or [DocumentHighlightOptions bool] -type Or_ServerCapabilities_documentHighlightProvider struct { // line 8298 +type Or_ServerCapabilities_documentHighlightProvider struct { Value interface{} `json:"value"` } // created for Or [DocumentRangeFormattingOptions bool] -type Or_ServerCapabilities_documentRangeFormattingProvider struct { // line 8428 +type Or_ServerCapabilities_documentRangeFormattingProvider struct { Value interface{} `json:"value"` } // created for Or [DocumentSymbolOptions bool] -type Or_ServerCapabilities_documentSymbolProvider struct { // line 8316 +type Or_ServerCapabilities_documentSymbolProvider struct { Value interface{} `json:"value"` } // created for Or [FoldingRangeOptions FoldingRangeRegistrationOptions bool] -type Or_ServerCapabilities_foldingRangeProvider struct { // line 8473 +type Or_ServerCapabilities_foldingRangeProvider struct { Value interface{} `json:"value"` } // created for Or [HoverOptions bool] -type Or_ServerCapabilities_hoverProvider struct { // line 8169 +type Or_ServerCapabilities_hoverProvider struct { Value interface{} `json:"value"` } // created for Or [ImplementationOptions ImplementationRegistrationOptions bool] -type Or_ServerCapabilities_implementationProvider struct { // line 8258 +type Or_ServerCapabilities_implementationProvider struct { Value interface{} `json:"value"` } // created for Or [InlayHintOptions InlayHintRegistrationOptions bool] -type Or_ServerCapabilities_inlayHintProvider struct { // line 8660 +type Or_ServerCapabilities_inlayHintProvider struct { Value interface{} `json:"value"` } // created for Or [InlineCompletionOptions bool] -type Or_ServerCapabilities_inlineCompletionProvider struct { // line 8702 +type Or_ServerCapabilities_inlineCompletionProvider struct { Value interface{} `json:"value"` } // created for Or [InlineValueOptions InlineValueRegistrationOptions bool] -type Or_ServerCapabilities_inlineValueProvider struct { // line 8637 +type Or_ServerCapabilities_inlineValueProvider struct { Value interface{} `json:"value"` } // created for Or [LinkedEditingRangeOptions LinkedEditingRangeRegistrationOptions bool] -type Or_ServerCapabilities_linkedEditingRangeProvider struct { // line 8549 +type Or_ServerCapabilities_linkedEditingRangeProvider struct { Value interface{} `json:"value"` } // created for Or [MonikerOptions MonikerRegistrationOptions bool] -type Or_ServerCapabilities_monikerProvider struct { // line 8591 +type Or_ServerCapabilities_monikerProvider struct { Value interface{} `json:"value"` } // created for Or [NotebookDocumentSyncOptions NotebookDocumentSyncRegistrationOptions] -type Or_ServerCapabilities_notebookDocumentSync struct { // line 8141 +type Or_ServerCapabilities_notebookDocumentSync struct { Value interface{} `json:"value"` } // created for Or [ReferenceOptions bool] -type Or_ServerCapabilities_referencesProvider struct { // line 8280 +type Or_ServerCapabilities_referencesProvider struct { Value interface{} `json:"value"` } // created for Or [RenameOptions bool] -type Or_ServerCapabilities_renameProvider struct { // line 8455 +type Or_ServerCapabilities_renameProvider struct { Value interface{} `json:"value"` } // created for Or [SelectionRangeOptions SelectionRangeRegistrationOptions bool] -type Or_ServerCapabilities_selectionRangeProvider struct { // line 8495 +type Or_ServerCapabilities_selectionRangeProvider struct { Value interface{} `json:"value"` } // created for Or [SemanticTokensOptions SemanticTokensRegistrationOptions] -type Or_ServerCapabilities_semanticTokensProvider struct { // line 8572 +type Or_ServerCapabilities_semanticTokensProvider struct { Value interface{} `json:"value"` } // created for Or [TextDocumentSyncKind TextDocumentSyncOptions] -type Or_ServerCapabilities_textDocumentSync struct { // line 8123 +type Or_ServerCapabilities_textDocumentSync struct { Value interface{} `json:"value"` } // created for Or [TypeDefinitionOptions TypeDefinitionRegistrationOptions bool] -type Or_ServerCapabilities_typeDefinitionProvider struct { // line 8236 +type Or_ServerCapabilities_typeDefinitionProvider struct { Value interface{} `json:"value"` } // created for Or [TypeHierarchyOptions TypeHierarchyRegistrationOptions bool] -type Or_ServerCapabilities_typeHierarchyProvider struct { // line 8614 +type Or_ServerCapabilities_typeHierarchyProvider struct { Value interface{} `json:"value"` } // created for Or [WorkspaceSymbolOptions bool] -type Or_ServerCapabilities_workspaceSymbolProvider struct { // line 8392 +type Or_ServerCapabilities_workspaceSymbolProvider struct { Value interface{} `json:"value"` } // created for Or [MarkupContent string] -type Or_SignatureInformation_documentation struct { // line 9160 +type Or_SignatureInformation_documentation struct { Value interface{} `json:"value"` } // created for Or [AnnotatedTextEdit TextEdit] -type Or_TextDocumentEdit_edits_Elem struct { // line 6929 +type Or_TextDocumentEdit_edits_Elem struct { Value interface{} `json:"value"` } // created for Or [SaveOptions bool] -type Or_TextDocumentSyncOptions_save struct { // line 10132 +type Or_TextDocumentSyncOptions_save struct { Value interface{} `json:"value"` } // created for Or [WorkspaceFullDocumentDiagnosticReport WorkspaceUnchangedDocumentDiagnosticReport] -type Or_WorkspaceDocumentDiagnosticReport struct { // line 14402 +type Or_WorkspaceDocumentDiagnosticReport struct { Value interface{} `json:"value"` } // created for Or [CreateFile DeleteFile RenameFile TextDocumentEdit] -type Or_WorkspaceEdit_documentChanges_Elem struct { // line 3293 +type Or_WorkspaceEdit_documentChanges_Elem struct { Value interface{} `json:"value"` } // created for Or [Declaration []DeclarationLink] -type Or_textDocument_declaration struct { // line 249 +type Or_textDocument_declaration struct { Value interface{} `json:"value"` } // created for Literal (Lit_NotebookDocumentChangeEvent_cells) -type PCellsPChange struct { // line 7717 +type PCellsPChange struct { // Changes to the cell structure to add or // remove cells. Structure *FStructurePCells `json:"structure,omitempty"` @@ -3066,7 +3084,7 @@ type PCellsPChange struct { // line 7717 } // created for Literal (Lit_WorkspaceEditClientCapabilities_changeAnnotationSupport) -type PChangeAnnotationSupportPWorkspaceEdit struct { // line 11181 +type PChangeAnnotationSupportPWorkspaceEdit struct { // Whether the client groups edits with equal labels into tree nodes, // for instance all edits labelled with "Changes in Strings" would // be a tree node. @@ -3074,14 +3092,14 @@ type PChangeAnnotationSupportPWorkspaceEdit struct { // line 11181 } // created for Literal (Lit_CodeActionClientCapabilities_codeActionLiteralSupport) -type PCodeActionLiteralSupportPCodeAction struct { // line 12101 +type PCodeActionLiteralSupportPCodeAction struct { // The code action kind is support with the following value // set. CodeActionKind FCodeActionKindPCodeActionLiteralSupport `json:"codeActionKind"` } // created for Literal (Lit_CompletionClientCapabilities_completionItemKind) -type PCompletionItemKindPCompletion struct { // line 11699 +type PCompletionItemKindPCompletion struct { // The completion item kind values the client supports. When this // property exists the client also guarantees that it will // handle values outside its set gracefully and falls back @@ -3094,7 +3112,7 @@ type PCompletionItemKindPCompletion struct { // line 11699 } // created for Literal (Lit_CompletionClientCapabilities_completionItem) -type PCompletionItemPCompletion struct { // line 11548 +type PCompletionItemPCompletion struct { // Client supports snippets as insert text. // // A snippet can define tab stops and placeholders with `$1`, `$2` @@ -3143,7 +3161,7 @@ type PCompletionItemPCompletion struct { // line 11548 } // created for Literal (Lit_CompletionOptions_completionItem) -type PCompletionItemPCompletionProvider struct { // line 9065 +type PCompletionItemPCompletionProvider struct { // The server has support for completion item label // details (see also `CompletionItemLabelDetails`) when // receiving a completion item in a resolve call. @@ -3153,7 +3171,7 @@ type PCompletionItemPCompletionProvider struct { // line 9065 } // created for Literal (Lit_CompletionClientCapabilities_completionList) -type PCompletionListPCompletion struct { // line 11741 +type PCompletionListPCompletion struct { // The client supports the following itemDefaults on // a completion list. // @@ -3166,7 +3184,7 @@ type PCompletionListPCompletion struct { // line 11741 } // created for Literal (Lit_CodeAction_disabled) -type PDisabledMsg_textDocument_codeAction struct { // line 5622 +type PDisabledMsg_textDocument_codeAction struct { // Human readable description of why the code action is currently disabled. // // This is displayed in the code actions UI. @@ -3174,7 +3192,7 @@ type PDisabledMsg_textDocument_codeAction struct { // line 5622 } // created for Literal (Lit_FoldingRangeClientCapabilities_foldingRangeKind) -type PFoldingRangeKindPFoldingRange struct { // line 12387 +type PFoldingRangeKindPFoldingRange struct { // The folding range kind values the client supports. When this // property exists the client also guarantees that it will // handle values outside its set gracefully and falls back @@ -3183,7 +3201,7 @@ type PFoldingRangeKindPFoldingRange struct { // line 12387 } // created for Literal (Lit_FoldingRangeClientCapabilities_foldingRange) -type PFoldingRangePFoldingRange struct { // line 12412 +type PFoldingRangePFoldingRange struct { // If set, the client signals that it supports setting collapsedText on // folding ranges to display custom labels instead of the default text. // @@ -3192,13 +3210,13 @@ type PFoldingRangePFoldingRange struct { // line 12412 } // created for Literal (Lit_SemanticTokensOptions_full_Item1) -type PFullESemanticTokensOptions struct { // line 6823 +type PFullESemanticTokensOptions struct { // The server supports deltas for full documents. Delta bool `json:"delta"` } // created for Literal (Lit_CompletionList_itemDefaults) -type PItemDefaultsMsg_textDocument_completion struct { // line 4946 +type PItemDefaultsMsg_textDocument_completion struct { // A default commit character set. // // @since 3.17.0 @@ -3222,12 +3240,12 @@ type PItemDefaultsMsg_textDocument_completion struct { // line 4946 } // created for Literal (Lit_WorkspaceSymbol_location_Item1) -type PLocationMsg_workspace_symbol struct { // line 5723 +type PLocationMsg_workspace_symbol struct { URI DocumentURI `json:"uri"` } // created for Literal (Lit_ShowMessageRequestClientCapabilities_messageActionItem) -type PMessageActionItemPShowMessage struct { // line 12857 +type PMessageActionItemPShowMessage struct { // Whether the client supports additional attributes which // are preserved and send back to the server in the // request's response. @@ -3235,7 +3253,7 @@ type PMessageActionItemPShowMessage struct { // line 12857 } // created for Literal (Lit_NotebookDocumentSyncOptions_notebookSelector_Elem_Item0) -type PNotebookSelectorPNotebookDocumentSync struct { // line 10160 +type PNotebookSelectorPNotebookDocumentSync struct { // The notebook to be synced If a string // value is provided it matches against the // notebook type. '*' matches every notebook. @@ -3245,11 +3263,11 @@ type PNotebookSelectorPNotebookDocumentSync struct { // line 10160 } // created for Literal (Lit_SemanticTokensOptions_range_Item1) -type PRangeESemanticTokensOptions struct { // line 6803 +type PRangeESemanticTokensOptions struct { } // created for Literal (Lit_SemanticTokensClientCapabilities_requests) -type PRequestsPSemanticTokens struct { // line 12548 +type PRequestsPSemanticTokens struct { // The client will send the `textDocument/semanticTokens/range` request if // the server provides a corresponding handler. Range Or_SemanticTokensClientCapabilities_requests_range `json:"range"` @@ -3259,26 +3277,26 @@ type PRequestsPSemanticTokens struct { // line 12548 } // created for Literal (Lit_CodeActionClientCapabilities_resolveSupport) -type PResolveSupportPCodeAction struct { // line 12166 +type PResolveSupportPCodeAction struct { // The properties that a client can resolve lazily. Properties []string `json:"properties"` } // created for Literal (Lit_InlayHintClientCapabilities_resolveSupport) -type PResolveSupportPInlayHint struct { // line 12760 +type PResolveSupportPInlayHint struct { // The properties that a client can resolve lazily. Properties []string `json:"properties"` } // created for Literal (Lit_WorkspaceSymbolClientCapabilities_resolveSupport) -type PResolveSupportPSymbol struct { // line 11303 +type PResolveSupportPSymbol struct { // The properties that a client can resolve lazily. Usually // `location.range` Properties []string `json:"properties"` } // created for Literal (Lit_InitializeResult_serverInfo) -type PServerInfoMsg_initialize struct { // line 4291 +type PServerInfoMsg_initialize struct { // The name of the server as defined by the server. Name string `json:"name"` // The server's version as defined by the server. @@ -3286,7 +3304,7 @@ type PServerInfoMsg_initialize struct { // line 4291 } // created for Literal (Lit_SignatureHelpClientCapabilities_signatureInformation) -type PSignatureInformationPSignatureHelp struct { // line 11808 +type PSignatureInformationPSignatureHelp struct { // Client supports the following content formats for the documentation // property. The order describes the preferred format of the client. DocumentationFormat []MarkupKind `json:"documentationFormat,omitempty"` @@ -3300,7 +3318,7 @@ type PSignatureInformationPSignatureHelp struct { // line 11808 } // created for Literal (Lit_GeneralClientCapabilities_staleRequestSupport) -type PStaleRequestSupportPGeneral struct { // line 11035 +type PStaleRequestSupportPGeneral struct { // The client will actively cancel the request. Cancel bool `json:"cancel"` // The list of requests for which the client @@ -3310,7 +3328,7 @@ type PStaleRequestSupportPGeneral struct { // line 11035 } // created for Literal (Lit_DocumentSymbolClientCapabilities_symbolKind) -type PSymbolKindPDocumentSymbol struct { // line 12019 +type PSymbolKindPDocumentSymbol struct { // The symbol kind values the client supports. When this // property exists the client also guarantees that it will // handle values outside its set gracefully and falls back @@ -3323,7 +3341,7 @@ type PSymbolKindPDocumentSymbol struct { // line 12019 } // created for Literal (Lit_WorkspaceSymbolClientCapabilities_symbolKind) -type PSymbolKindPSymbol struct { // line 11255 +type PSymbolKindPSymbol struct { // The symbol kind values the client supports. When this // property exists the client also guarantees that it will // handle values outside its set gracefully and falls back @@ -3336,35 +3354,35 @@ type PSymbolKindPSymbol struct { // line 11255 } // created for Literal (Lit_DocumentSymbolClientCapabilities_tagSupport) -type PTagSupportPDocumentSymbol struct { // line 12052 +type PTagSupportPDocumentSymbol struct { // The tags supported by the client. ValueSet []SymbolTag `json:"valueSet"` } // created for Literal (Lit_PublishDiagnosticsClientCapabilities_tagSupport) -type PTagSupportPPublishDiagnostics struct { // line 12463 +type PTagSupportPPublishDiagnostics struct { // The tags supported by the client. ValueSet []DiagnosticTag `json:"valueSet"` } // created for Literal (Lit_WorkspaceSymbolClientCapabilities_tagSupport) -type PTagSupportPSymbol struct { // line 11279 +type PTagSupportPSymbol struct { // The tags supported by the client. ValueSet []SymbolTag `json:"valueSet"` } // The parameters of a configuration request. -type ParamConfiguration struct { // line 2272 +type ParamConfiguration struct { Items []ConfigurationItem `json:"items"` } -type ParamInitialize struct { // line 4263 +type ParamInitialize struct { XInitializeParams WorkspaceFoldersInitializeParams } // Represents a parameter of a callable-signature. A parameter can // have a label and a doc-comment. -type ParameterInformation struct { // line 10417 +type ParameterInformation struct { // The label of this parameter information. // // Either a string or an inclusive start and exclusive end offsets within its containing @@ -3378,7 +3396,7 @@ type ParameterInformation struct { // line 10417 // in the UI but can be omitted. Documentation string `json:"documentation,omitempty"` } -type PartialResultParams struct { // line 6494 +type PartialResultParams struct { // An optional token that a server can use to report partial results (e.g. streaming) to // the client. PartialResultToken *ProgressToken `json:"partialResultToken,omitempty"` @@ -3422,7 +3440,7 @@ type Pattern = string // (alias) line 14778 // that denotes `\r|\n` or `\n|` where `|` represents the character offset. // // @since 3.17.0 - support for negotiated position encoding. -type Position struct { // line 6737 +type Position struct { // Line position in a document (zero-based). // // If a line number is greater than the number of lines in a document, it defaults back to the number of lines in the document. @@ -3441,18 +3459,19 @@ type Position struct { // line 6737 // A set of predefined position encoding kinds. // // @since 3.17.0 -type PositionEncodingKind string // line 13842 +type PositionEncodingKind string type PrepareRename2Gn = Msg_PrepareRename2Gn // (alias) line 13927 -type PrepareRenameParams struct { // line 6161 +type PrepareRenameParams struct { TextDocumentPositionParams WorkDoneProgressParams } type PrepareRenameResult = Msg_PrepareRename2Gn // (alias) line 13927 -type PrepareSupportDefaultBehavior uint32 // line 14137 +type PrepareSupportDefaultBehavior uint32 + // A previous result id in a workspace pull request. // // @since 3.17.0 -type PreviousResultID struct { // line 7567 +type PreviousResultID struct { // The URI for which the client knowns a // result id. URI DocumentURI `json:"uri"` @@ -3463,14 +3482,14 @@ type PreviousResultID struct { // line 7567 // A previous result id in a workspace pull request. // // @since 3.17.0 -type PreviousResultId struct { // line 7567 +type PreviousResultId struct { // The URI for which the client knowns a // result id. URI DocumentURI `json:"uri"` // The value of the previous result id. Value string `json:"value"` } -type ProgressParams struct { // line 6437 +type ProgressParams struct { // The progress token provided by the client or server. Token ProgressToken `json:"token"` // The progress data. @@ -3478,7 +3497,7 @@ type ProgressParams struct { // line 6437 } type ProgressToken = interface{} // (alias) line 14375 // The publish diagnostic client capabilities. -type PublishDiagnosticsClientCapabilities struct { // line 12448 +type PublishDiagnosticsClientCapabilities struct { // Whether the clients accepts diagnostics with related information. RelatedInformation bool `json:"relatedInformation,omitempty"` // Client supports the tag property to provide meta data about a diagnostic. @@ -3504,7 +3523,7 @@ type PublishDiagnosticsClientCapabilities struct { // line 12448 } // The publish diagnostic notification's parameters. -type PublishDiagnosticsParams struct { // line 4657 +type PublishDiagnosticsParams struct { // The URI for which diagnostic information is reported. URI DocumentURI `json:"uri"` // Optional the version number of the document the diagnostics are published for. @@ -3528,7 +3547,7 @@ type PublishDiagnosticsParams struct { // line 4657 // } // // ``` -type Range struct { // line 6547 +type Range struct { // The range's start position. Start Position `json:"start"` // The range's end position. @@ -3536,25 +3555,25 @@ type Range struct { // line 6547 } // Client Capabilities for a {@link ReferencesRequest}. -type ReferenceClientCapabilities struct { // line 11974 +type ReferenceClientCapabilities struct { // Whether references supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } // Value-object that contains additional information when // requesting references. -type ReferenceContext struct { // line 9248 +type ReferenceContext struct { // Include the declaration of the current symbol. IncludeDeclaration bool `json:"includeDeclaration"` } // Reference options. -type ReferenceOptions struct { // line 9262 +type ReferenceOptions struct { WorkDoneProgressOptions } // Parameters for a {@link ReferencesRequest}. -type ReferenceParams struct { // line 5249 +type ReferenceParams struct { Context ReferenceContext `json:"context"` TextDocumentPositionParams WorkDoneProgressParams @@ -3562,13 +3581,13 @@ type ReferenceParams struct { // line 5249 } // Registration options for a {@link ReferencesRequest}. -type ReferenceRegistrationOptions struct { // line 5278 +type ReferenceRegistrationOptions struct { TextDocumentRegistrationOptions ReferenceOptions } // General parameters to register for a notification or to register a provider. -type Registration struct { // line 7895 +type Registration struct { // The id used to register the request. The id can be used to deregister // the request again. ID string `json:"id"` @@ -3577,14 +3596,14 @@ type Registration struct { // line 7895 // Options necessary for the registration. RegisterOptions interface{} `json:"registerOptions,omitempty"` } -type RegistrationParams struct { // line 4233 +type RegistrationParams struct { Registrations []Registration `json:"registrations"` } // Client capabilities specific to regular expressions. // // @since 3.16.0 -type RegularExpressionsClientCapabilities struct { // line 12893 +type RegularExpressionsClientCapabilities struct { // The engine's name. Engine string `json:"engine"` // The engine's version. @@ -3594,7 +3613,7 @@ type RegularExpressionsClientCapabilities struct { // line 12893 // A full diagnostic report with a set of related documents. // // @since 3.17.0 -type RelatedFullDocumentDiagnosticReport struct { // line 7393 +type RelatedFullDocumentDiagnosticReport struct { // Diagnostics of related documents. This information is useful // in programming languages where code in a file A can generate // diagnostics in a file B which A depends on. An example of @@ -3609,7 +3628,7 @@ type RelatedFullDocumentDiagnosticReport struct { // line 7393 // An unchanged diagnostic report with a set of related documents. // // @since 3.17.0 -type RelatedUnchangedDocumentDiagnosticReport struct { // line 7432 +type RelatedUnchangedDocumentDiagnosticReport struct { // Diagnostics of related documents. This information is useful // in programming languages where code in a file A can generate // diagnostics in a file B which A depends on. An example of @@ -3626,14 +3645,14 @@ type RelatedUnchangedDocumentDiagnosticReport struct { // line 7432 // folder root, but it can be another absolute URI as well. // // @since 3.17.0 -type RelativePattern struct { // line 11101 +type RelativePattern struct { // A workspace folder or a base URI to which this pattern will be matched // against relatively. BaseURI Or_RelativePattern_baseUri `json:"baseUri"` // The actual glob pattern; Pattern Pattern `json:"pattern"` } -type RenameClientCapabilities struct { // line 12310 +type RenameClientCapabilities struct { // Whether rename supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Client supports testing for validity of rename operations @@ -3659,7 +3678,7 @@ type RenameClientCapabilities struct { // line 12310 } // Rename file operation -type RenameFile struct { // line 6985 +type RenameFile struct { // A rename Kind string `json:"kind"` // The old (existing) location. @@ -3672,7 +3691,7 @@ type RenameFile struct { // line 6985 } // Rename file options -type RenameFileOptions struct { // line 9771 +type RenameFileOptions struct { // Overwrite target if existing. Overwrite wins over `ignoreIfExists` Overwrite bool `json:"overwrite,omitempty"` // Ignores if target exists. @@ -3683,14 +3702,14 @@ type RenameFileOptions struct { // line 9771 // files. // // @since 3.16.0 -type RenameFilesParams struct { // line 3355 +type RenameFilesParams struct { // An array of all files/folders renamed in this operation. When a folder is renamed, only // the folder will be included, and not its children. Files []FileRename `json:"files"` } // Provider options for a {@link RenameRequest}. -type RenameOptions struct { // line 9599 +type RenameOptions struct { // Renames should be checked and tested before being executed. // // @since version 3.12.0 @@ -3699,7 +3718,7 @@ type RenameOptions struct { // line 9599 } // The parameters of a {@link RenameRequest}. -type RenameParams struct { // line 6110 +type RenameParams struct { // The document to rename. TextDocument TextDocumentIdentifier `json:"textDocument"` // The position at which this request was sent. @@ -3712,13 +3731,13 @@ type RenameParams struct { // line 6110 } // Registration options for a {@link RenameRequest}. -type RenameRegistrationOptions struct { // line 6146 +type RenameRegistrationOptions struct { TextDocumentRegistrationOptions RenameOptions } // A generic resource operation. -type ResourceOperation struct { // line 9723 +type ResourceOperation struct { // The resource operation kind. Kind string `json:"kind"` // An optional annotation identifier describing the operation. @@ -3726,9 +3745,10 @@ type ResourceOperation struct { // line 9723 // @since 3.16.0 AnnotationID *ChangeAnnotationIdentifier `json:"annotationId,omitempty"` } -type ResourceOperationKind string // line 14084 +type ResourceOperationKind string + // Save options. -type SaveOptions struct { // line 8783 +type SaveOptions struct { // The client is supposed to include the content on save. IncludeText bool `json:"includeText,omitempty"` } @@ -3737,7 +3757,7 @@ type SaveOptions struct { // line 8783 // // @since 3.18.0 // @proposed -type SelectedCompletionInfo struct { // line 10004 +type SelectedCompletionInfo struct { // The range that will be replaced if this completion item is accepted. Range Range `json:"range"` // The text the range will be replaced with if this completion is accepted. @@ -3746,24 +3766,24 @@ type SelectedCompletionInfo struct { // line 10004 // A selection range represents a part of a selection hierarchy. A selection range // may have a parent selection range that contains it. -type SelectionRange struct { // line 2642 +type SelectionRange struct { // The {@link Range range} of this selection range. Range Range `json:"range"` // The parent selection range containing this range. Therefore `parent.range` must contain `this.range`. Parent *SelectionRange `json:"parent,omitempty"` } -type SelectionRangeClientCapabilities struct { // line 12434 +type SelectionRangeClientCapabilities struct { // Whether implementation supports dynamic registration for selection range providers. If this is set to `true` // the client supports the new `SelectionRangeRegistrationOptions` return value for the corresponding server // capability as well. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` } -type SelectionRangeOptions struct { // line 6760 +type SelectionRangeOptions struct { WorkDoneProgressOptions } // A parameter literal used in selection range requests. -type SelectionRangeParams struct { // line 2607 +type SelectionRangeParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The positions inside the text document. @@ -3771,7 +3791,7 @@ type SelectionRangeParams struct { // line 2607 WorkDoneProgressParams PartialResultParams } -type SelectionRangeRegistrationOptions struct { // line 2665 +type SelectionRangeRegistrationOptions struct { SelectionRangeOptions TextDocumentRegistrationOptions StaticRegistrationOptions @@ -3782,15 +3802,17 @@ type SelectionRangeRegistrationOptions struct { // line 2665 // corresponding client capabilities. // // @since 3.16.0 -type SemanticTokenModifiers string // line 13063 +type SemanticTokenModifiers string + // A set of predefined token types. This set is not fixed // an clients can specify additional token types via the // corresponding client capabilities. // // @since 3.16.0 -type SemanticTokenTypes string // line 12956 +type SemanticTokenTypes string + // @since 3.16.0 -type SemanticTokens struct { // line 2953 +type SemanticTokens struct { // An optional result id. If provided and clients support delta updating // the client will include the result id in the next semantic token request. // A server can then instead of computing all semantic tokens again simply @@ -3801,7 +3823,7 @@ type SemanticTokens struct { // line 2953 } // @since 3.16.0 -type SemanticTokensClientCapabilities struct { // line 12533 +type SemanticTokensClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` // return value for the corresponding server capability as well. @@ -3846,14 +3868,14 @@ type SemanticTokensClientCapabilities struct { // line 12533 } // @since 3.16.0 -type SemanticTokensDelta struct { // line 3052 +type SemanticTokensDelta struct { ResultID string `json:"resultId,omitempty"` // The semantic token edits to transform a previous result into a new result. Edits []SemanticTokensEdit `json:"edits"` } // @since 3.16.0 -type SemanticTokensDeltaParams struct { // line 3019 +type SemanticTokensDeltaParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The result id of a previous response. The result Id can either point to a full response @@ -3864,12 +3886,12 @@ type SemanticTokensDeltaParams struct { // line 3019 } // @since 3.16.0 -type SemanticTokensDeltaPartialResult struct { // line 3078 +type SemanticTokensDeltaPartialResult struct { Edits []SemanticTokensEdit `json:"edits"` } // @since 3.16.0 -type SemanticTokensEdit struct { // line 6853 +type SemanticTokensEdit struct { // The start offset of the edit. Start uint32 `json:"start"` // The count of elements to remove. @@ -3879,7 +3901,7 @@ type SemanticTokensEdit struct { // line 6853 } // @since 3.16.0 -type SemanticTokensLegend struct { // line 9644 +type SemanticTokensLegend struct { // The token types a server uses. TokenTypes []string `json:"tokenTypes"` // The token modifiers a server uses. @@ -3887,7 +3909,7 @@ type SemanticTokensLegend struct { // line 9644 } // @since 3.16.0 -type SemanticTokensOptions struct { // line 6782 +type SemanticTokensOptions struct { // The legend used by the server Legend SemanticTokensLegend `json:"legend"` // Server supports providing semantic tokens for a specific range @@ -3899,7 +3921,7 @@ type SemanticTokensOptions struct { // line 6782 } // @since 3.16.0 -type SemanticTokensParams struct { // line 2928 +type SemanticTokensParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` WorkDoneProgressParams @@ -3907,12 +3929,12 @@ type SemanticTokensParams struct { // line 2928 } // @since 3.16.0 -type SemanticTokensPartialResult struct { // line 2980 +type SemanticTokensPartialResult struct { Data []uint32 `json:"data"` } // @since 3.16.0 -type SemanticTokensRangeParams struct { // line 3095 +type SemanticTokensRangeParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The range the semantic tokens are requested for. @@ -3922,14 +3944,14 @@ type SemanticTokensRangeParams struct { // line 3095 } // @since 3.16.0 -type SemanticTokensRegistrationOptions struct { // line 2997 +type SemanticTokensRegistrationOptions struct { TextDocumentRegistrationOptions SemanticTokensOptions StaticRegistrationOptions } // @since 3.16.0 -type SemanticTokensWorkspaceClientCapabilities struct { // line 11342 +type SemanticTokensWorkspaceClientCapabilities struct { // Whether the client implementation supports a refresh request sent from // the server to the client. // @@ -3942,7 +3964,7 @@ type SemanticTokensWorkspaceClientCapabilities struct { // line 11342 // Defines the capabilities provided by a language // server. -type ServerCapabilities struct { // line 8107 +type ServerCapabilities struct { // The position encoding the server picked from the encodings offered // by the client via the client capability `general.positionEncodings`. // @@ -4051,14 +4073,14 @@ type ServerCapabilities struct { // line 8107 // Experimental server capabilities. Experimental interface{} `json:"experimental,omitempty"` } -type SetTraceParams struct { // line 6383 +type SetTraceParams struct { Value TraceValues `json:"value"` } // Client capabilities for the showDocument request. // // @since 3.16.0 -type ShowDocumentClientCapabilities struct { // line 12878 +type ShowDocumentClientCapabilities struct { // The client has support for the showDocument // request. Support bool `json:"support"` @@ -4067,7 +4089,7 @@ type ShowDocumentClientCapabilities struct { // line 12878 // Params to show a resource in the UI. // // @since 3.16.0 -type ShowDocumentParams struct { // line 3128 +type ShowDocumentParams struct { // The uri to show. URI URI `json:"uri"` // Indicates to show the resource in an external program. @@ -4089,13 +4111,13 @@ type ShowDocumentParams struct { // line 3128 // The result of a showDocument request. // // @since 3.16.0 -type ShowDocumentResult struct { // line 3170 +type ShowDocumentResult struct { // A boolean indicating if the show was successful. Success bool `json:"success"` } // The parameters of a notification message. -type ShowMessageParams struct { // line 4378 +type ShowMessageParams struct { // The message type. See {@link MessageType} Type MessageType `json:"type"` // The actual message. @@ -4103,11 +4125,11 @@ type ShowMessageParams struct { // line 4378 } // Show message request client capabilities -type ShowMessageRequestClientCapabilities struct { // line 12851 +type ShowMessageRequestClientCapabilities struct { // Capabilities specific to the `MessageActionItem` type. MessageActionItem *PMessageActionItemPShowMessage `json:"messageActionItem,omitempty"` } -type ShowMessageRequestParams struct { // line 4400 +type ShowMessageRequestParams struct { // The message type. See {@link MessageType} Type MessageType `json:"type"` // The actual message. @@ -4119,7 +4141,7 @@ type ShowMessageRequestParams struct { // line 4400 // Signature help represents the signature of something // callable. There can be multiple signature but only one // active and only one active parameter. -type SignatureHelp struct { // line 5163 +type SignatureHelp struct { // One or more signatures. Signatures []SignatureInformation `json:"signatures"` // The active signature. If omitted or the value lies outside the @@ -4143,7 +4165,7 @@ type SignatureHelp struct { // line 5163 } // Client Capabilities for a {@link SignatureHelpRequest}. -type SignatureHelpClientCapabilities struct { // line 11793 +type SignatureHelpClientCapabilities struct { // Whether signature help supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client supports the following `SignatureInformation` @@ -4161,7 +4183,7 @@ type SignatureHelpClientCapabilities struct { // line 11793 // Additional information about the context in which a signature help request was triggered. // // @since 3.15.0 -type SignatureHelpContext struct { // line 9105 +type SignatureHelpContext struct { // Action that caused signature help to be triggered. TriggerKind SignatureHelpTriggerKind `json:"triggerKind"` // Character that caused signature help to be triggered. @@ -4181,7 +4203,7 @@ type SignatureHelpContext struct { // line 9105 } // Server Capabilities for a {@link SignatureHelpRequest}. -type SignatureHelpOptions struct { // line 9200 +type SignatureHelpOptions struct { // List of characters that trigger signature help automatically. TriggerCharacters []string `json:"triggerCharacters,omitempty"` // List of characters that re-trigger signature help. @@ -4195,7 +4217,7 @@ type SignatureHelpOptions struct { // line 9200 } // Parameters for a {@link SignatureHelpRequest}. -type SignatureHelpParams struct { // line 5135 +type SignatureHelpParams struct { // The signature help context. This is only available if the client specifies // to send this using the client capability `textDocument.signatureHelp.contextSupport === true` // @@ -4206,7 +4228,7 @@ type SignatureHelpParams struct { // line 5135 } // Registration options for a {@link SignatureHelpRequest}. -type SignatureHelpRegistrationOptions struct { // line 5198 +type SignatureHelpRegistrationOptions struct { TextDocumentRegistrationOptions SignatureHelpOptions } @@ -4214,11 +4236,12 @@ type SignatureHelpRegistrationOptions struct { // line 5198 // How a signature help was triggered. // // @since 3.15.0 -type SignatureHelpTriggerKind uint32 // line 13995 +type SignatureHelpTriggerKind uint32 + // Represents the signature of something callable. A signature // can have a label, like a function-name, a doc-comment, and // a set of parameters. -type SignatureInformation struct { // line 9146 +type SignatureInformation struct { // The label of this signature. Will be shown in // the UI. Label string `json:"label"` @@ -4237,7 +4260,7 @@ type SignatureInformation struct { // line 9146 // Static registration options to be returned in the initialize // request. -type StaticRegistrationOptions struct { // line 6579 +type StaticRegistrationOptions struct { // The id used to register the request. The id can be used to deregister // the request again. See also Registration#id. ID string `json:"id,omitempty"` @@ -4253,7 +4276,7 @@ type StaticRegistrationOptions struct { // line 6579 // // @since 3.18.0 // @proposed -type StringValue struct { // line 7858 +type StringValue struct { // The kind of string value. Kind string `json:"kind"` // The snippet string. @@ -4262,7 +4285,7 @@ type StringValue struct { // line 7858 // Represents information about programming constructs like variables, classes, // interfaces etc. -type SymbolInformation struct { // line 5376 +type SymbolInformation struct { // extends BaseSymbolInformation // Indicates if this symbol is deprecated. // @@ -4294,20 +4317,22 @@ type SymbolInformation struct { // line 5376 } // A symbol kind. -type SymbolKind uint32 // line 13234 +type SymbolKind uint32 + // Symbol tags are extra annotations that tweak the rendering of a symbol. // // @since 3.16 -type SymbolTag uint32 // line 13348 +type SymbolTag uint32 + // Describe options to be used when registered for text document change events. -type TextDocumentChangeRegistrationOptions struct { // line 4507 +type TextDocumentChangeRegistrationOptions struct { // How documents are synced to the server. SyncKind TextDocumentSyncKind `json:"syncKind"` TextDocumentRegistrationOptions } // Text document specific client capabilities. -type TextDocumentClientCapabilities struct { // line 10677 +type TextDocumentClientCapabilities struct { // Defines which synchronization capabilities the client supports. Synchronization *TextDocumentSyncClientCapabilities `json:"synchronization,omitempty"` // Capabilities specific to the `textDocument/completion` request. @@ -4411,7 +4436,7 @@ type TextDocumentContentChangeEvent = Msg_TextDocumentContentChangeEvent // (ali // on a document version Si and after they are applied move the document to version Si+1. // So the creator of a TextDocumentEdit doesn't need to sort the array of edits or do any // kind of ordering. However the edits must be non overlapping. -type TextDocumentEdit struct { // line 6913 +type TextDocumentEdit struct { // The text document to change. TextDocument OptionalVersionedTextDocumentIdentifier `json:"textDocument"` // The edits to be applied. @@ -4440,14 +4465,14 @@ type TextDocumentEdit struct { // line 6913 // @since 3.17.0 type TextDocumentFilter = Msg_TextDocumentFilter // (alias) line 14560 // A literal to identify a text document in the client. -type TextDocumentIdentifier struct { // line 6655 +type TextDocumentIdentifier struct { // The text document's uri. URI DocumentURI `json:"uri"` } // An item to transfer a text document from the client to the // server. -type TextDocumentItem struct { // line 7641 +type TextDocumentItem struct { // The text document's uri. URI DocumentURI `json:"uri"` // The text document's language identifier. @@ -4461,7 +4486,7 @@ type TextDocumentItem struct { // line 7641 // A parameter literal used in requests to pass a text document and a position inside that // document. -type TextDocumentPositionParams struct { // line 6458 +type TextDocumentPositionParams struct { // The text document. TextDocument TextDocumentIdentifier `json:"textDocument"` // The position inside the text document. @@ -4469,20 +4494,21 @@ type TextDocumentPositionParams struct { // line 6458 } // General text document registration options. -type TextDocumentRegistrationOptions struct { // line 2441 +type TextDocumentRegistrationOptions struct { // A document selector to identify the scope of the registration. If set to null // the document selector provided on the client side will be used. DocumentSelector DocumentSelector `json:"documentSelector"` } // Represents reasons why a text document is saved. -type TextDocumentSaveReason uint32 // line 13502 +type TextDocumentSaveReason uint32 + // Save registration options. -type TextDocumentSaveRegistrationOptions struct { // line 4564 +type TextDocumentSaveRegistrationOptions struct { TextDocumentRegistrationOptions SaveOptions } -type TextDocumentSyncClientCapabilities struct { // line 11492 +type TextDocumentSyncClientCapabilities struct { // Whether text document synchronization supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // The client supports sending will save notifications. @@ -4497,8 +4523,8 @@ type TextDocumentSyncClientCapabilities struct { // line 11492 // Defines how the host (editor) should sync // document changes to the language server. -type TextDocumentSyncKind uint32 // line 13477 -type TextDocumentSyncOptions struct { // line 10090 +type TextDocumentSyncKind uint32 +type TextDocumentSyncOptions struct { // Open and close notifications are sent to the server. If omitted open close notification should not // be sent. OpenClose bool `json:"openClose,omitempty"` @@ -4517,7 +4543,7 @@ type TextDocumentSyncOptions struct { // line 10090 } // A text edit applicable to a text document. -type TextEdit struct { // line 4601 +type TextEdit struct { // The range of the text document to be manipulated. To insert // text into a document create a range where start === end. Range Range `json:"range"` @@ -4525,10 +4551,11 @@ type TextEdit struct { // line 4601 // empty string. NewText string `json:"newText"` } -type TokenFormat string // line 14151 -type TraceValues string // line 13776 +type TokenFormat string +type TraceValues string + // Since 3.6.0 -type TypeDefinitionClientCapabilities struct { // line 11924 +type TypeDefinitionClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `TypeDefinitionRegistrationOptions` return value // for the corresponding server capability as well. @@ -4538,22 +4565,22 @@ type TypeDefinitionClientCapabilities struct { // line 11924 // Since 3.14.0 LinkSupport bool `json:"linkSupport,omitempty"` } -type TypeDefinitionOptions struct { // line 6594 +type TypeDefinitionOptions struct { WorkDoneProgressOptions } -type TypeDefinitionParams struct { // line 2196 +type TypeDefinitionParams struct { TextDocumentPositionParams WorkDoneProgressParams PartialResultParams } -type TypeDefinitionRegistrationOptions struct { // line 2216 +type TypeDefinitionRegistrationOptions struct { TextDocumentRegistrationOptions TypeDefinitionOptions StaticRegistrationOptions } // @since 3.17.0 -type TypeHierarchyClientCapabilities struct { // line 12713 +type TypeHierarchyClientCapabilities struct { // Whether implementation supports dynamic registration. If this is set to `true` // the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)` // return value for the corresponding server capability as well. @@ -4561,7 +4588,7 @@ type TypeHierarchyClientCapabilities struct { // line 12713 } // @since 3.17.0 -type TypeHierarchyItem struct { // line 3483 +type TypeHierarchyItem struct { // The name of this item. Name string `json:"name"` // The kind of this item. @@ -4589,14 +4616,14 @@ type TypeHierarchyItem struct { // line 3483 // Type hierarchy options used during static registration. // // @since 3.17.0 -type TypeHierarchyOptions struct { // line 7172 +type TypeHierarchyOptions struct { WorkDoneProgressOptions } // The parameter of a `textDocument/prepareTypeHierarchy` request. // // @since 3.17.0 -type TypeHierarchyPrepareParams struct { // line 3465 +type TypeHierarchyPrepareParams struct { TextDocumentPositionParams WorkDoneProgressParams } @@ -4604,7 +4631,7 @@ type TypeHierarchyPrepareParams struct { // line 3465 // Type hierarchy options used during static or dynamic registration. // // @since 3.17.0 -type TypeHierarchyRegistrationOptions struct { // line 3560 +type TypeHierarchyRegistrationOptions struct { TextDocumentRegistrationOptions TypeHierarchyOptions StaticRegistrationOptions @@ -4613,7 +4640,7 @@ type TypeHierarchyRegistrationOptions struct { // line 3560 // The parameter of a `typeHierarchy/subtypes` request. // // @since 3.17.0 -type TypeHierarchySubtypesParams struct { // line 3606 +type TypeHierarchySubtypesParams struct { Item TypeHierarchyItem `json:"item"` WorkDoneProgressParams PartialResultParams @@ -4622,14 +4649,14 @@ type TypeHierarchySubtypesParams struct { // line 3606 // The parameter of a `typeHierarchy/supertypes` request. // // @since 3.17.0 -type TypeHierarchySupertypesParams struct { // line 3582 +type TypeHierarchySupertypesParams struct { Item TypeHierarchyItem `json:"item"` WorkDoneProgressParams PartialResultParams } // created for Tuple -type UIntCommaUInt struct { // line 10430 +type UIntCommaUInt struct { Fld0 uint32 `json:"fld0"` Fld1 uint32 `json:"fld1"` } @@ -4639,7 +4666,7 @@ type URI = string // report is still accurate. // // @since 3.17.0 -type UnchangedDocumentDiagnosticReport struct { // line 7506 +type UnchangedDocumentDiagnosticReport struct { // A document diagnostic report indicating // no changes to the last result. A server can // only return `unchanged` if result ids are @@ -4653,23 +4680,24 @@ type UnchangedDocumentDiagnosticReport struct { // line 7506 // Moniker uniqueness level to define scope of the moniker. // // @since 3.16.0 -type UniquenessLevel string // line 13364 +type UniquenessLevel string + // General parameters to unregister a request or notification. -type Unregistration struct { // line 7926 +type Unregistration struct { // The id used to unregister the request or notification. Usually an id // provided during the register request. ID string `json:"id"` // The method to unregister for. Method string `json:"method"` } -type UnregistrationParams struct { // line 4248 +type UnregistrationParams struct { Unregisterations []Unregistration `json:"unregisterations"` } // A versioned notebook document identifier. // // @since 3.17.0 -type VersionedNotebookDocumentIdentifier struct { // line 7679 +type VersionedNotebookDocumentIdentifier struct { // The version number of this notebook document. Version int32 `json:"version"` // The notebook document's uri. @@ -4677,19 +4705,19 @@ type VersionedNotebookDocumentIdentifier struct { // line 7679 } // A text document identifier to denote a specific version of a text document. -type VersionedTextDocumentIdentifier struct { // line 8763 +type VersionedTextDocumentIdentifier struct { // The version number of this document. Version int32 `json:"version"` TextDocumentIdentifier } -type WatchKind = uint32 // line 13505// The parameters sent in a will save text document notification. -type WillSaveTextDocumentParams struct { // line 4579 +type WatchKind = uint32 // line 13505// The parameters sent in a will save text document notification. +type WillSaveTextDocumentParams struct { // The document that will be saved. TextDocument TextDocumentIdentifier `json:"textDocument"` // The 'TextDocumentSaveReason'. Reason TextDocumentSaveReason `json:"reason"` } -type WindowClientCapabilities struct { // line 10994 +type WindowClientCapabilities struct { // It indicates whether the client supports server initiated // progress using the `window/workDoneProgress/create` request. // @@ -4709,7 +4737,7 @@ type WindowClientCapabilities struct { // line 10994 // @since 3.16.0 ShowDocument *ShowDocumentClientCapabilities `json:"showDocument,omitempty"` } -type WorkDoneProgressBegin struct { // line 6276 +type WorkDoneProgressBegin struct { Kind string `json:"kind"` // Mandatory title of the progress operation. Used to briefly inform about // the kind of operation being performed. @@ -4734,34 +4762,34 @@ type WorkDoneProgressBegin struct { // line 6276 // that are not following this rule. The value range is [0, 100]. Percentage uint32 `json:"percentage,omitempty"` } -type WorkDoneProgressCancelParams struct { // line 2698 +type WorkDoneProgressCancelParams struct { // The token to be used to report progress. Token ProgressToken `json:"token"` } -type WorkDoneProgressCreateParams struct { // line 2685 +type WorkDoneProgressCreateParams struct { // The token to be used to report progress. Token ProgressToken `json:"token"` } -type WorkDoneProgressEnd struct { // line 6362 +type WorkDoneProgressEnd struct { Kind string `json:"kind"` // Optional, a final message indicating to for example indicate the outcome // of the operation. Message string `json:"message,omitempty"` } -type WorkDoneProgressOptions struct { // line 2428 +type WorkDoneProgressOptions struct { WorkDoneProgress bool `json:"workDoneProgress,omitempty"` } // created for And -type WorkDoneProgressOptionsAndTextDocumentRegistrationOptions struct { // line 196 +type WorkDoneProgressOptionsAndTextDocumentRegistrationOptions struct { WorkDoneProgressOptions TextDocumentRegistrationOptions } -type WorkDoneProgressParams struct { // line 6480 +type WorkDoneProgressParams struct { // An optional token that a server can use to report work done progress. WorkDoneToken ProgressToken `json:"workDoneToken,omitempty"` } -type WorkDoneProgressReport struct { // line 6323 +type WorkDoneProgressReport struct { Kind string `json:"kind"` // Controls enablement state of a cancel button. // @@ -4784,7 +4812,7 @@ type WorkDoneProgressReport struct { // line 6323 } // created for Literal (Lit_ServerCapabilities_workspace) -type Workspace6Gn struct { // line 8722 +type Workspace6Gn struct { // The server supports workspace folder. // // @since 3.6.0 @@ -4796,7 +4824,7 @@ type Workspace6Gn struct { // line 8722 } // Workspace specific client capabilities. -type WorkspaceClientCapabilities struct { // line 10538 +type WorkspaceClientCapabilities struct { // The client supports applying batch edits // to the workspace by supporting the request // 'workspace/applyEdit' @@ -4853,7 +4881,7 @@ type WorkspaceClientCapabilities struct { // line 10538 // Parameters of the workspace diagnostic request. // // @since 3.17.0 -type WorkspaceDiagnosticParams struct { // line 3950 +type WorkspaceDiagnosticParams struct { // The additional identifier provided during registration. Identifier string `json:"identifier,omitempty"` // The currently known diagnostic reports with their @@ -4866,14 +4894,14 @@ type WorkspaceDiagnosticParams struct { // line 3950 // A workspace diagnostic report. // // @since 3.17.0 -type WorkspaceDiagnosticReport struct { // line 3987 +type WorkspaceDiagnosticReport struct { Items []WorkspaceDocumentDiagnosticReport `json:"items"` } // A partial result for a workspace diagnostic report. // // @since 3.17.0 -type WorkspaceDiagnosticReportPartialResult struct { // line 4004 +type WorkspaceDiagnosticReportPartialResult struct { Items []WorkspaceDocumentDiagnosticReport `json:"items"` } @@ -4893,7 +4921,7 @@ type WorkspaceDocumentDiagnosticReport = Or_WorkspaceDocumentDiagnosticReport // // An invalid sequence (e.g. (1) delete file a.txt and (2) insert text into file a.txt) will // cause failure of the operation. How the client recovers from the failure is described by // the client capability: `workspace.workspaceEdit.failureHandling` -type WorkspaceEdit struct { // line 3266 +type WorkspaceEdit struct { // Holds changes to existing resources. Changes map[DocumentURI][]TextEdit `json:"changes,omitempty"` // Depending on the client capability `workspace.workspaceEdit.resourceOperations` document changes @@ -4915,7 +4943,7 @@ type WorkspaceEdit struct { // line 3266 // @since 3.16.0 ChangeAnnotations map[ChangeAnnotationIdentifier]ChangeAnnotation `json:"changeAnnotations,omitempty"` } -type WorkspaceEditClientCapabilities struct { // line 11133 +type WorkspaceEditClientCapabilities struct { // The client supports versioned document changes in `WorkspaceEdit`s DocumentChanges bool `json:"documentChanges,omitempty"` // The resource operations the client supports. Clients should at least @@ -4944,14 +4972,14 @@ type WorkspaceEditClientCapabilities struct { // line 11133 } // A workspace folder inside a client. -type WorkspaceFolder struct { // line 2236 +type WorkspaceFolder struct { // The associated URI for this workspace folder. URI URI `json:"uri"` // The name of the workspace folder. Used to refer to this // workspace folder in the user interface. Name string `json:"name"` } -type WorkspaceFolders5Gn struct { // line 10287 +type WorkspaceFolders5Gn struct { // The server has support for workspace folders Supported bool `json:"supported,omitempty"` // Whether the server wants to receive workspace folder @@ -4965,13 +4993,13 @@ type WorkspaceFolders5Gn struct { // line 10287 } // The workspace folder change event. -type WorkspaceFoldersChangeEvent struct { // line 6604 +type WorkspaceFoldersChangeEvent struct { // The array of added workspace folders Added []WorkspaceFolder `json:"added"` // The array of the removed workspace folders Removed []WorkspaceFolder `json:"removed"` } -type WorkspaceFoldersInitializeParams struct { // line 8080 +type WorkspaceFoldersInitializeParams struct { // The workspace folders configured in the client when the server starts. // // This property is only available if the client supports workspace folders. @@ -4981,7 +5009,7 @@ type WorkspaceFoldersInitializeParams struct { // line 8080 // @since 3.6.0 WorkspaceFolders []WorkspaceFolder `json:"workspaceFolders,omitempty"` } -type WorkspaceFoldersServerCapabilities struct { // line 10287 +type WorkspaceFoldersServerCapabilities struct { // The server has support for workspace folders Supported bool `json:"supported,omitempty"` // Whether the server wants to receive workspace folder @@ -4997,7 +5025,7 @@ type WorkspaceFoldersServerCapabilities struct { // line 10287 // A full document diagnostic report for a workspace diagnostic result. // // @since 3.17.0 -type WorkspaceFullDocumentDiagnosticReport struct { // line 9852 +type WorkspaceFullDocumentDiagnosticReport struct { // The URI for which diagnostic information is reported. URI DocumentURI `json:"uri"` // The version number for which the diagnostics are reported. @@ -5011,7 +5039,7 @@ type WorkspaceFullDocumentDiagnosticReport struct { // line 9852 // See also SymbolInformation. // // @since 3.17.0 -type WorkspaceSymbol struct { // line 5710 +type WorkspaceSymbol struct { // The location of the symbol. Whether a server is allowed to // return a location without a range depends on the client // capability `workspace.symbol.resolveSupport`. @@ -5025,7 +5053,7 @@ type WorkspaceSymbol struct { // line 5710 } // Client capabilities for a {@link WorkspaceSymbolRequest}. -type WorkspaceSymbolClientCapabilities struct { // line 11240 +type WorkspaceSymbolClientCapabilities struct { // Symbol request supports dynamic registration. DynamicRegistration bool `json:"dynamicRegistration,omitempty"` // Specific capabilities for the `SymbolKind` in the `workspace/symbol` request. @@ -5044,7 +5072,7 @@ type WorkspaceSymbolClientCapabilities struct { // line 11240 } // Server capabilities for a {@link WorkspaceSymbolRequest}. -type WorkspaceSymbolOptions struct { // line 9423 +type WorkspaceSymbolOptions struct { // The server provides support to resolve additional // information for a workspace symbol. // @@ -5054,7 +5082,7 @@ type WorkspaceSymbolOptions struct { // line 9423 } // The parameters of a {@link WorkspaceSymbolRequest}. -type WorkspaceSymbolParams struct { // line 5686 +type WorkspaceSymbolParams struct { // A query string to filter symbols by. Clients may send an empty // string here to request all symbols. Query string `json:"query"` @@ -5063,14 +5091,14 @@ type WorkspaceSymbolParams struct { // line 5686 } // Registration options for a {@link WorkspaceSymbolRequest}. -type WorkspaceSymbolRegistrationOptions struct { // line 5759 +type WorkspaceSymbolRegistrationOptions struct { WorkspaceSymbolOptions } // An unchanged document diagnostic report for a workspace diagnostic result. // // @since 3.17.0 -type WorkspaceUnchangedDocumentDiagnosticReport struct { // line 9890 +type WorkspaceUnchangedDocumentDiagnosticReport struct { // The URI for which diagnostic information is reported. URI DocumentURI `json:"uri"` // The version number for which the diagnostics are reported. @@ -5080,7 +5108,7 @@ type WorkspaceUnchangedDocumentDiagnosticReport struct { // line 9890 } // The initialize parameters -type XInitializeParams struct { // line 7948 +type XInitializeParams struct { // The process Id of the parent process that started // the server. // @@ -5121,7 +5149,7 @@ type XInitializeParams struct { // line 7948 } // The initialize parameters -type _InitializeParams struct { // line 7948 +type _InitializeParams struct { // The process Id of the parent process that started // the server. // @@ -5164,11 +5192,11 @@ type _InitializeParams struct { // line 7948 const ( // A set of predefined code action kinds // Empty kind. - Empty CodeActionKind = "" // line 13726 + Empty CodeActionKind = "" // Base kind for quickfix actions: 'quickfix' - QuickFix CodeActionKind = "quickfix" // line 13731 + QuickFix CodeActionKind = "quickfix" // Base kind for refactoring actions: 'refactor' - Refactor CodeActionKind = "refactor" // line 13736 + Refactor CodeActionKind = "refactor" // Base kind for refactoring extraction actions: 'refactor.extract' // // Example extract actions: @@ -5179,7 +5207,7 @@ const ( // - Extract variable // - Extract interface from class // - ... - RefactorExtract CodeActionKind = "refactor.extract" // line 13741 + RefactorExtract CodeActionKind = "refactor.extract" // Base kind for refactoring inline actions: 'refactor.inline' // // Example inline actions: @@ -5189,7 +5217,7 @@ const ( // - Inline variable // - Inline constant // - ... - RefactorInline CodeActionKind = "refactor.inline" // line 13746 + RefactorInline CodeActionKind = "refactor.inline" // Base kind for refactoring rewrite actions: 'refactor.rewrite' // // Example rewrite actions: @@ -5201,80 +5229,80 @@ const ( // - Make method static // - Move method to base class // - ... - RefactorRewrite CodeActionKind = "refactor.rewrite" // line 13751 + RefactorRewrite CodeActionKind = "refactor.rewrite" // Base kind for source actions: `source` // // Source code actions apply to the entire file. - Source CodeActionKind = "source" // line 13756 + Source CodeActionKind = "source" // Base kind for an organize imports source action: `source.organizeImports` - SourceOrganizeImports CodeActionKind = "source.organizeImports" // line 13761 + SourceOrganizeImports CodeActionKind = "source.organizeImports" // Base kind for auto-fix source actions: `source.fixAll`. // // Fix all actions automatically fix errors that have a clear fix that do not require user input. // They should not suppress errors or perform unsafe fixes such as generating new types or classes. // // @since 3.15.0 - SourceFixAll CodeActionKind = "source.fixAll" // line 13766 + SourceFixAll CodeActionKind = "source.fixAll" // The reason why code actions were requested. // // @since 3.17.0 // Code actions were explicitly requested by the user or by an extension. - CodeActionInvoked CodeActionTriggerKind = 1 // line 14028 + CodeActionInvoked CodeActionTriggerKind = 1 // Code actions were requested automatically. // // This typically happens when current selection in a file changes, but can // also be triggered when file content changes. - CodeActionAutomatic CodeActionTriggerKind = 2 // line 14033 + CodeActionAutomatic CodeActionTriggerKind = 2 // The kind of a completion entry. - TextCompletion CompletionItemKind = 1 // line 13534 - MethodCompletion CompletionItemKind = 2 // line 13538 - FunctionCompletion CompletionItemKind = 3 // line 13542 - ConstructorCompletion CompletionItemKind = 4 // line 13546 - FieldCompletion CompletionItemKind = 5 // line 13550 - VariableCompletion CompletionItemKind = 6 // line 13554 - ClassCompletion CompletionItemKind = 7 // line 13558 - InterfaceCompletion CompletionItemKind = 8 // line 13562 - ModuleCompletion CompletionItemKind = 9 // line 13566 - PropertyCompletion CompletionItemKind = 10 // line 13570 - UnitCompletion CompletionItemKind = 11 // line 13574 - ValueCompletion CompletionItemKind = 12 // line 13578 - EnumCompletion CompletionItemKind = 13 // line 13582 - KeywordCompletion CompletionItemKind = 14 // line 13586 - SnippetCompletion CompletionItemKind = 15 // line 13590 - ColorCompletion CompletionItemKind = 16 // line 13594 - FileCompletion CompletionItemKind = 17 // line 13598 - ReferenceCompletion CompletionItemKind = 18 // line 13602 - FolderCompletion CompletionItemKind = 19 // line 13606 - EnumMemberCompletion CompletionItemKind = 20 // line 13610 - ConstantCompletion CompletionItemKind = 21 // line 13614 - StructCompletion CompletionItemKind = 22 // line 13618 - EventCompletion CompletionItemKind = 23 // line 13622 - OperatorCompletion CompletionItemKind = 24 // line 13626 - TypeParameterCompletion CompletionItemKind = 25 // line 13630 + TextCompletion CompletionItemKind = 1 + MethodCompletion CompletionItemKind = 2 + FunctionCompletion CompletionItemKind = 3 + ConstructorCompletion CompletionItemKind = 4 + FieldCompletion CompletionItemKind = 5 + VariableCompletion CompletionItemKind = 6 + ClassCompletion CompletionItemKind = 7 + InterfaceCompletion CompletionItemKind = 8 + ModuleCompletion CompletionItemKind = 9 + PropertyCompletion CompletionItemKind = 10 + UnitCompletion CompletionItemKind = 11 + ValueCompletion CompletionItemKind = 12 + EnumCompletion CompletionItemKind = 13 + KeywordCompletion CompletionItemKind = 14 + SnippetCompletion CompletionItemKind = 15 + ColorCompletion CompletionItemKind = 16 + FileCompletion CompletionItemKind = 17 + ReferenceCompletion CompletionItemKind = 18 + FolderCompletion CompletionItemKind = 19 + EnumMemberCompletion CompletionItemKind = 20 + ConstantCompletion CompletionItemKind = 21 + StructCompletion CompletionItemKind = 22 + EventCompletion CompletionItemKind = 23 + OperatorCompletion CompletionItemKind = 24 + TypeParameterCompletion CompletionItemKind = 25 // Completion item tags are extra annotations that tweak the rendering of a completion // item. // // @since 3.15.0 // Render a completion as obsolete, usually using a strike-out. - ComplDeprecated CompletionItemTag = 1 // line 13644 + ComplDeprecated CompletionItemTag = 1 // How a completion was triggered // Completion was triggered by typing an identifier (24x7 code // complete), manual invocation (e.g Ctrl+Space) or via API. - Invoked CompletionTriggerKind = 1 // line 13977 + Invoked CompletionTriggerKind = 1 // Completion was triggered by a trigger character specified by // the `triggerCharacters` properties of the `CompletionRegistrationOptions`. - TriggerCharacter CompletionTriggerKind = 2 // line 13982 + TriggerCharacter CompletionTriggerKind = 2 // Completion was re-triggered as current completion list is incomplete - TriggerForIncompleteCompletions CompletionTriggerKind = 3 // line 13987 + TriggerForIncompleteCompletions CompletionTriggerKind = 3 // The diagnostic's severity. // Reports an error. - SeverityError DiagnosticSeverity = 1 // line 13926 + SeverityError DiagnosticSeverity = 1 // Reports a warning. - SeverityWarning DiagnosticSeverity = 2 // line 13931 + SeverityWarning DiagnosticSeverity = 2 // Reports an information. - SeverityInformation DiagnosticSeverity = 3 // line 13936 + SeverityInformation DiagnosticSeverity = 3 // Reports a hint. - SeverityHint DiagnosticSeverity = 4 // line 13941 + SeverityHint DiagnosticSeverity = 4 // The diagnostic tags. // // @since 3.15.0 @@ -5282,91 +5310,91 @@ const ( // // Clients are allowed to render diagnostics with this tag faded out instead of having // an error squiggle. - Unnecessary DiagnosticTag = 1 // line 13956 + Unnecessary DiagnosticTag = 1 // Deprecated or obsolete code. // // Clients are allowed to rendered diagnostics with this tag strike through. - Deprecated DiagnosticTag = 2 // line 13961 + Deprecated DiagnosticTag = 2 // The document diagnostic report kinds. // // @since 3.17.0 // A diagnostic report with a full // set of problems. - DiagnosticFull DocumentDiagnosticReportKind = "full" // line 13122 + DiagnosticFull DocumentDiagnosticReportKind = "full" // A report indicating that the last // returned report is still accurate. - DiagnosticUnchanged DocumentDiagnosticReportKind = "unchanged" // line 13127 + DiagnosticUnchanged DocumentDiagnosticReportKind = "unchanged" // A document highlight kind. // A textual occurrence. - Text DocumentHighlightKind = 1 // line 13701 + Text DocumentHighlightKind = 1 // Read-access of a symbol, like reading a variable. - Read DocumentHighlightKind = 2 // line 13706 + Read DocumentHighlightKind = 2 // Write-access of a symbol, like writing to a variable. - Write DocumentHighlightKind = 3 // line 13711 + Write DocumentHighlightKind = 3 // Predefined error codes. - ParseError ErrorCodes = -32700 // line 13143 - InvalidRequest ErrorCodes = -32600 // line 13147 - MethodNotFound ErrorCodes = -32601 // line 13151 - InvalidParams ErrorCodes = -32602 // line 13155 - InternalError ErrorCodes = -32603 // line 13159 + ParseError ErrorCodes = -32700 + InvalidRequest ErrorCodes = -32600 + MethodNotFound ErrorCodes = -32601 + InvalidParams ErrorCodes = -32602 + InternalError ErrorCodes = -32603 // Error code indicating that a server received a notification or // request before the server has received the `initialize` request. - ServerNotInitialized ErrorCodes = -32002 // line 13163 - UnknownErrorCode ErrorCodes = -32001 // line 13168 + ServerNotInitialized ErrorCodes = -32002 + UnknownErrorCode ErrorCodes = -32001 // Applying the workspace change is simply aborted if one of the changes provided // fails. All operations executed before the failing operation stay executed. - Abort FailureHandlingKind = "abort" // line 14115 + Abort FailureHandlingKind = "abort" // All operations are executed transactional. That means they either all // succeed or no changes at all are applied to the workspace. - Transactional FailureHandlingKind = "transactional" // line 14120 + Transactional FailureHandlingKind = "transactional" // If the workspace edit contains only textual file changes they are executed transactional. // If resource changes (create, rename or delete file) are part of the change the failure // handling strategy is abort. - TextOnlyTransactional FailureHandlingKind = "textOnlyTransactional" // line 14125 + TextOnlyTransactional FailureHandlingKind = "textOnlyTransactional" // The client tries to undo the operations already executed. But there is no // guarantee that this is succeeding. - Undo FailureHandlingKind = "undo" // line 14130 + Undo FailureHandlingKind = "undo" // The file event type // The file got created. - Created FileChangeType = 1 // line 13876 + Created FileChangeType = 1 // The file got changed. - Changed FileChangeType = 2 // line 13881 + Changed FileChangeType = 2 // The file got deleted. - Deleted FileChangeType = 3 // line 13886 + Deleted FileChangeType = 3 // A pattern kind describing if a glob pattern matches a file a folder or // both. // // @since 3.16.0 // The pattern matches a file only. - FilePattern FileOperationPatternKind = "file" // line 14049 + FilePattern FileOperationPatternKind = "file" // The pattern matches a folder only. - FolderPattern FileOperationPatternKind = "folder" // line 14054 + FolderPattern FileOperationPatternKind = "folder" // A set of predefined range kinds. // Folding range for a comment - Comment FoldingRangeKind = "comment" // line 13215 + Comment FoldingRangeKind = "comment" // Folding range for an import or include - Imports FoldingRangeKind = "imports" // line 13220 + Imports FoldingRangeKind = "imports" // Folding range for a region (e.g. `#region`) - Region FoldingRangeKind = "region" // line 13225 + Region FoldingRangeKind = "region" // Inlay hint kinds. // // @since 3.17.0 // An inlay hint that for a type annotation. - Type InlayHintKind = 1 // line 13433 + Type InlayHintKind = 1 // An inlay hint that is for a parameter. - Parameter InlayHintKind = 2 // line 13438 + Parameter InlayHintKind = 2 // Describes how an {@link InlineCompletionItemProvider inline completion provider} was triggered. // // @since 3.18.0 // @proposed // Completion was triggered explicitly by a user gesture. - InlineInvoked InlineCompletionTriggerKind = 0 // line 13827 + InlineInvoked InlineCompletionTriggerKind = 0 // Completion was triggered automatically while editing. - InlineAutomatic InlineCompletionTriggerKind = 1 // line 13832 + InlineAutomatic InlineCompletionTriggerKind = 1 // Defines whether the insert text in a completion item should be interpreted as // plain text or a snippet. // The primary text to be inserted is treated as a plain string. - PlainTextTextFormat InsertTextFormat = 1 // line 13660 + PlainTextTextFormat InsertTextFormat = 1 // The primary text to be inserted is treated as a snippet. // // A snippet can define tab stops and placeholders with `$1`, `$2` @@ -5375,7 +5403,7 @@ const ( // that is typing in one will update others too. // // See also: https://microsoft.github.io/language-server-protocol/specifications/specification-current/#snippet_syntax - SnippetTextFormat InsertTextFormat = 2 // line 13665 + SnippetTextFormat InsertTextFormat = 2 // How whitespace and indentation is handled during completion // item insertion. // @@ -5385,7 +5413,7 @@ const ( // inserted using the indentation defined in the string value. // The client will not apply any kind of adjustments to the // string. - AsIs InsertTextMode = 1 // line 13680 + AsIs InsertTextMode = 1 // The editor adjusts leading whitespace of new lines so that // they match the indentation up to the cursor of the line for // which the item is accepted. @@ -5393,20 +5421,20 @@ const ( // Consider a line like this: <2tabs><3tabs>foo. Accepting a // multi line completion item is indented using 2 tabs and all // following lines inserted will be indented using 2 tabs as well. - AdjustIndentation InsertTextMode = 2 // line 13685 + AdjustIndentation InsertTextMode = 2 // A request failed but it was syntactically correct, e.g the // method name was known and the parameters were valid. The error // message should contain human readable information about why // the request failed. // // @since 3.17.0 - RequestFailed LSPErrorCodes = -32803 // line 13183 + RequestFailed LSPErrorCodes = -32803 // The server cancelled the request. This error code should // only be used for requests that explicitly support being // server cancellable. // // @since 3.17.0 - ServerCancelled LSPErrorCodes = -32802 // line 13189 + ServerCancelled LSPErrorCodes = -32802 // The server detected that the content of a document got // modified outside normal conditions. A server should // NOT send this error code if it detects a content change @@ -5415,200 +5443,200 @@ const ( // // If a client decides that a result is not of any use anymore // the client should cancel the request. - ContentModified LSPErrorCodes = -32801 // line 13195 + ContentModified LSPErrorCodes = -32801 // The client has canceled a request and a server as detected // the cancel. - RequestCancelled LSPErrorCodes = -32800 // line 13200 + RequestCancelled LSPErrorCodes = -32800 // Describes the content type that a client supports in various // result literals like `Hover`, `ParameterInfo` or `CompletionItem`. // // Please note that `MarkupKinds` must not start with a `$`. This kinds // are reserved for internal usage. // Plain text is supported as a content format - PlainText MarkupKind = "plaintext" // line 13807 + PlainText MarkupKind = "plaintext" // Markdown is supported as a content format - Markdown MarkupKind = "markdown" // line 13812 + Markdown MarkupKind = "markdown" // The message type // An error message. - Error MessageType = 1 // line 13454 + Error MessageType = 1 // A warning message. - Warning MessageType = 2 // line 13459 + Warning MessageType = 2 // An information message. - Info MessageType = 3 // line 13464 + Info MessageType = 3 // A log message. - Log MessageType = 4 // line 13469 + Log MessageType = 4 // The moniker kind. // // @since 3.16.0 // The moniker represent a symbol that is imported into a project - Import MonikerKind = "import" // line 13407 + Import MonikerKind = "import" // The moniker represents a symbol that is exported from a project - Export MonikerKind = "export" // line 13412 + Export MonikerKind = "export" // The moniker represents a symbol that is local to a project (e.g. a local // variable of a function, a class not visible outside the project, ...) - Local MonikerKind = "local" // line 13417 + Local MonikerKind = "local" // A notebook cell kind. // // @since 3.17.0 // A markup-cell is formatted source that is used for display. - Markup NotebookCellKind = 1 // line 14070 + Markup NotebookCellKind = 1 // A code-cell is source code. - Code NotebookCellKind = 2 // line 14075 + Code NotebookCellKind = 2 // A set of predefined position encoding kinds. // // @since 3.17.0 // Character offsets count UTF-8 code units (e.g. bytes). - UTF8 PositionEncodingKind = "utf-8" // line 13849 + UTF8 PositionEncodingKind = "utf-8" // Character offsets count UTF-16 code units. // // This is the default and must always be supported // by servers - UTF16 PositionEncodingKind = "utf-16" // line 13854 + UTF16 PositionEncodingKind = "utf-16" // Character offsets count UTF-32 code units. // // Implementation note: these are the same as Unicode codepoints, // so this `PositionEncodingKind` may also be used for an // encoding-agnostic representation of character offsets. - UTF32 PositionEncodingKind = "utf-32" // line 13859 + UTF32 PositionEncodingKind = "utf-32" // The client's default behavior is to select the identifier // according the to language's syntax rule. - Identifier PrepareSupportDefaultBehavior = 1 // line 14144 + Identifier PrepareSupportDefaultBehavior = 1 // Supports creating new files and folders. - Create ResourceOperationKind = "create" // line 14091 + Create ResourceOperationKind = "create" // Supports renaming existing files and folders. - Rename ResourceOperationKind = "rename" // line 14096 + Rename ResourceOperationKind = "rename" // Supports deleting existing files and folders. - Delete ResourceOperationKind = "delete" // line 14101 + Delete ResourceOperationKind = "delete" // A set of predefined token modifiers. This set is not fixed // an clients can specify additional token types via the // corresponding client capabilities. // // @since 3.16.0 - ModDeclaration SemanticTokenModifiers = "declaration" // line 13070 - ModDefinition SemanticTokenModifiers = "definition" // line 13074 - ModReadonly SemanticTokenModifiers = "readonly" // line 13078 - ModStatic SemanticTokenModifiers = "static" // line 13082 - ModDeprecated SemanticTokenModifiers = "deprecated" // line 13086 - ModAbstract SemanticTokenModifiers = "abstract" // line 13090 - ModAsync SemanticTokenModifiers = "async" // line 13094 - ModModification SemanticTokenModifiers = "modification" // line 13098 - ModDocumentation SemanticTokenModifiers = "documentation" // line 13102 - ModDefaultLibrary SemanticTokenModifiers = "defaultLibrary" // line 13106 + ModDeclaration SemanticTokenModifiers = "declaration" + ModDefinition SemanticTokenModifiers = "definition" + ModReadonly SemanticTokenModifiers = "readonly" + ModStatic SemanticTokenModifiers = "static" + ModDeprecated SemanticTokenModifiers = "deprecated" + ModAbstract SemanticTokenModifiers = "abstract" + ModAsync SemanticTokenModifiers = "async" + ModModification SemanticTokenModifiers = "modification" + ModDocumentation SemanticTokenModifiers = "documentation" + ModDefaultLibrary SemanticTokenModifiers = "defaultLibrary" // A set of predefined token types. This set is not fixed // an clients can specify additional token types via the // corresponding client capabilities. // // @since 3.16.0 - NamespaceType SemanticTokenTypes = "namespace" // line 12963 + NamespaceType SemanticTokenTypes = "namespace" // Represents a generic type. Acts as a fallback for types which can't be mapped to // a specific type like class or enum. - TypeType SemanticTokenTypes = "type" // line 12967 - ClassType SemanticTokenTypes = "class" // line 12972 - EnumType SemanticTokenTypes = "enum" // line 12976 - InterfaceType SemanticTokenTypes = "interface" // line 12980 - StructType SemanticTokenTypes = "struct" // line 12984 - TypeParameterType SemanticTokenTypes = "typeParameter" // line 12988 - ParameterType SemanticTokenTypes = "parameter" // line 12992 - VariableType SemanticTokenTypes = "variable" // line 12996 - PropertyType SemanticTokenTypes = "property" // line 13000 - EnumMemberType SemanticTokenTypes = "enumMember" // line 13004 - EventType SemanticTokenTypes = "event" // line 13008 - FunctionType SemanticTokenTypes = "function" // line 13012 - MethodType SemanticTokenTypes = "method" // line 13016 - MacroType SemanticTokenTypes = "macro" // line 13020 - KeywordType SemanticTokenTypes = "keyword" // line 13024 - ModifierType SemanticTokenTypes = "modifier" // line 13028 - CommentType SemanticTokenTypes = "comment" // line 13032 - StringType SemanticTokenTypes = "string" // line 13036 - NumberType SemanticTokenTypes = "number" // line 13040 - RegexpType SemanticTokenTypes = "regexp" // line 13044 - OperatorType SemanticTokenTypes = "operator" // line 13048 + TypeType SemanticTokenTypes = "type" + ClassType SemanticTokenTypes = "class" + EnumType SemanticTokenTypes = "enum" + InterfaceType SemanticTokenTypes = "interface" + StructType SemanticTokenTypes = "struct" + TypeParameterType SemanticTokenTypes = "typeParameter" + ParameterType SemanticTokenTypes = "parameter" + VariableType SemanticTokenTypes = "variable" + PropertyType SemanticTokenTypes = "property" + EnumMemberType SemanticTokenTypes = "enumMember" + EventType SemanticTokenTypes = "event" + FunctionType SemanticTokenTypes = "function" + MethodType SemanticTokenTypes = "method" + MacroType SemanticTokenTypes = "macro" + KeywordType SemanticTokenTypes = "keyword" + ModifierType SemanticTokenTypes = "modifier" + CommentType SemanticTokenTypes = "comment" + StringType SemanticTokenTypes = "string" + NumberType SemanticTokenTypes = "number" + RegexpType SemanticTokenTypes = "regexp" + OperatorType SemanticTokenTypes = "operator" // @since 3.17.0 - DecoratorType SemanticTokenTypes = "decorator" // line 13052 + DecoratorType SemanticTokenTypes = "decorator" // How a signature help was triggered. // // @since 3.15.0 // Signature help was invoked manually by the user or by a command. - SigInvoked SignatureHelpTriggerKind = 1 // line 14002 + SigInvoked SignatureHelpTriggerKind = 1 // Signature help was triggered by a trigger character. - SigTriggerCharacter SignatureHelpTriggerKind = 2 // line 14007 + SigTriggerCharacter SignatureHelpTriggerKind = 2 // Signature help was triggered by the cursor moving or by the document content changing. - SigContentChange SignatureHelpTriggerKind = 3 // line 14012 + SigContentChange SignatureHelpTriggerKind = 3 // A symbol kind. - File SymbolKind = 1 // line 13241 - Module SymbolKind = 2 // line 13245 - Namespace SymbolKind = 3 // line 13249 - Package SymbolKind = 4 // line 13253 - Class SymbolKind = 5 // line 13257 - Method SymbolKind = 6 // line 13261 - Property SymbolKind = 7 // line 13265 - Field SymbolKind = 8 // line 13269 - Constructor SymbolKind = 9 // line 13273 - Enum SymbolKind = 10 // line 13277 - Interface SymbolKind = 11 // line 13281 - Function SymbolKind = 12 // line 13285 - Variable SymbolKind = 13 // line 13289 - Constant SymbolKind = 14 // line 13293 - String SymbolKind = 15 // line 13297 - Number SymbolKind = 16 // line 13301 - Boolean SymbolKind = 17 // line 13305 - Array SymbolKind = 18 // line 13309 - Object SymbolKind = 19 // line 13313 - Key SymbolKind = 20 // line 13317 - Null SymbolKind = 21 // line 13321 - EnumMember SymbolKind = 22 // line 13325 - Struct SymbolKind = 23 // line 13329 - Event SymbolKind = 24 // line 13333 - Operator SymbolKind = 25 // line 13337 - TypeParameter SymbolKind = 26 // line 13341 + File SymbolKind = 1 + Module SymbolKind = 2 + Namespace SymbolKind = 3 + Package SymbolKind = 4 + Class SymbolKind = 5 + Method SymbolKind = 6 + Property SymbolKind = 7 + Field SymbolKind = 8 + Constructor SymbolKind = 9 + Enum SymbolKind = 10 + Interface SymbolKind = 11 + Function SymbolKind = 12 + Variable SymbolKind = 13 + Constant SymbolKind = 14 + String SymbolKind = 15 + Number SymbolKind = 16 + Boolean SymbolKind = 17 + Array SymbolKind = 18 + Object SymbolKind = 19 + Key SymbolKind = 20 + Null SymbolKind = 21 + EnumMember SymbolKind = 22 + Struct SymbolKind = 23 + Event SymbolKind = 24 + Operator SymbolKind = 25 + TypeParameter SymbolKind = 26 // Symbol tags are extra annotations that tweak the rendering of a symbol. // // @since 3.16 // Render a symbol as obsolete, usually using a strike-out. - DeprecatedSymbol SymbolTag = 1 // line 13355 + DeprecatedSymbol SymbolTag = 1 // Represents reasons why a text document is saved. // Manually triggered, e.g. by the user pressing save, by starting debugging, // or by an API call. - Manual TextDocumentSaveReason = 1 // line 13509 + Manual TextDocumentSaveReason = 1 // Automatic after a delay. - AfterDelay TextDocumentSaveReason = 2 // line 13514 + AfterDelay TextDocumentSaveReason = 2 // When the editor lost focus. - FocusOut TextDocumentSaveReason = 3 // line 13519 + FocusOut TextDocumentSaveReason = 3 // Defines how the host (editor) should sync // document changes to the language server. // Documents should not be synced at all. - None TextDocumentSyncKind = 0 // line 13484 + None TextDocumentSyncKind = 0 // Documents are synced by always sending the full content // of the document. - Full TextDocumentSyncKind = 1 // line 13489 + Full TextDocumentSyncKind = 1 // Documents are synced by sending the full content on open. // After that only incremental updates to the document are // send. - Incremental TextDocumentSyncKind = 2 // line 13494 - Relative TokenFormat = "relative" // line 14158 + Incremental TextDocumentSyncKind = 2 + Relative TokenFormat = "relative" // Turn tracing off. - Off TraceValues = "off" // line 13783 + Off TraceValues = "off" // Trace messages only. - Messages TraceValues = "messages" // line 13788 + Messages TraceValues = "messages" // Verbose message tracing. - Verbose TraceValues = "verbose" // line 13793 + Verbose TraceValues = "verbose" // Moniker uniqueness level to define scope of the moniker. // // @since 3.16.0 // The moniker is only unique inside a document - Document UniquenessLevel = "document" // line 13371 + Document UniquenessLevel = "document" // The moniker is unique inside a project for which a dump got created - Project UniquenessLevel = "project" // line 13376 + Project UniquenessLevel = "project" // The moniker is unique inside the group to which a project belongs - Group UniquenessLevel = "group" // line 13381 + Group UniquenessLevel = "group" // The moniker is unique inside the moniker scheme. - Scheme UniquenessLevel = "scheme" // line 13386 + Scheme UniquenessLevel = "scheme" // The moniker is globally unique - Global UniquenessLevel = "global" // line 13391 + Global UniquenessLevel = "global" // Interested in create events. - WatchCreate WatchKind = 1 // line 13901 + WatchCreate WatchKind = 1 // Interested in change events - WatchChange WatchKind = 2 // line 13906 + WatchChange WatchKind = 2 // Interested in delete events - WatchDelete WatchKind = 4 // line 13911 + WatchDelete WatchKind = 4 ) diff --git a/gopls/internal/lsp/references.go b/gopls/internal/lsp/references.go index 09e1e6349a1..cc89b381088 100644 --- a/gopls/internal/lsp/references.go +++ b/gopls/internal/lsp/references.go @@ -23,7 +23,7 @@ func (s *Server) references(ctx context.Context, params *protocol.ReferenceParam if !ok { return nil, err } - if snapshot.View().FileKind(fh) == source.Tmpl { + if snapshot.FileKind(fh) == source.Tmpl { return template.References(ctx, snapshot, fh, params) } return source.References(ctx, snapshot, fh, params.Position, params.Context.IncludeDeclaration) diff --git a/gopls/internal/lsp/regtest/marker.go b/gopls/internal/lsp/regtest/marker.go index 6ae306b3d4e..36dcda39647 100644 --- a/gopls/internal/lsp/regtest/marker.go +++ b/gopls/internal/lsp/regtest/marker.go @@ -132,6 +132,10 @@ var update = flag.Bool("update", false, "if set, update test data during marker // // The following markers are supported within marker tests: // +// - acceptcompletion(location, label, golden): specifies that accepting the +// completion candidate produced at the given location with provided label +// results in the given golden state. +// // - codeaction(kind, start, end, golden): specifies a codeaction to request // for the given range. To support multi-line ranges, the range is defined // to be between start.Start and end.End. The golden directory contains @@ -546,21 +550,22 @@ arity: // Marker funcs should not mutate the test environment (e.g. via opening files // or applying edits in the editor). var markerFuncs = map[string]markerFunc{ - "codeaction": makeMarkerFunc(codeActionMarker), - "codeactionerr": makeMarkerFunc(codeActionErrMarker), - "complete": makeMarkerFunc(completeMarker), - "def": makeMarkerFunc(defMarker), - "diag": makeMarkerFunc(diagMarker), - "hover": makeMarkerFunc(hoverMarker), - "format": makeMarkerFunc(formatMarker), - "implementation": makeMarkerFunc(implementationMarker), - "loc": makeMarkerFunc(locMarker), - "rename": makeMarkerFunc(renameMarker), - "renameerr": makeMarkerFunc(renameErrMarker), - "suggestedfix": makeMarkerFunc(suggestedfixMarker), - "symbol": makeMarkerFunc(symbolMarker), - "refs": makeMarkerFunc(refsMarker), - "workspacesymbol": makeMarkerFunc(workspaceSymbolMarker), + "acceptcompletion": makeMarkerFunc(acceptCompletionMarker), + "codeaction": makeMarkerFunc(codeActionMarker), + "codeactionerr": makeMarkerFunc(codeActionErrMarker), + "complete": makeMarkerFunc(completeMarker), + "def": makeMarkerFunc(defMarker), + "diag": makeMarkerFunc(diagMarker), + "hover": makeMarkerFunc(hoverMarker), + "format": makeMarkerFunc(formatMarker), + "implementation": makeMarkerFunc(implementationMarker), + "loc": makeMarkerFunc(locMarker), + "rename": makeMarkerFunc(renameMarker), + "renameerr": makeMarkerFunc(renameErrMarker), + "suggestedfix": makeMarkerFunc(suggestedfixMarker), + "symbol": makeMarkerFunc(symbolMarker), + "refs": makeMarkerFunc(refsMarker), + "workspacesymbol": makeMarkerFunc(workspaceSymbolMarker), } // markerTest holds all the test data extracted from a test txtar archive. @@ -741,8 +746,7 @@ func loadMarkerTest(name string, content []byte) (*markerTest, error) { test.env = make(map[string]string) fields := strings.Fields(string(file.Data)) for _, field := range fields { - // TODO: use strings.Cut once we are on 1.18+. - key, value, ok := cut(field, "=") + key, value, ok := strings.Cut(field, "=") if !ok { return nil, fmt.Errorf("env vars must be formatted as var=value, got %q", field) } @@ -750,7 +754,7 @@ func loadMarkerTest(name string, content []byte) (*markerTest, error) { } case strings.HasPrefix(file.Name, "@"): // golden content - id, name, _ := cut(file.Name[len("@"):], "/") + id, name, _ := strings.Cut(file.Name[len("@"):], "/") // Note that a file.Name of just "@id" gives (id, name) = ("id", ""). if _, ok := test.golden[id]; !ok { test.golden[id] = &Golden{ @@ -795,16 +799,6 @@ func loadMarkerTest(name string, content []byte) (*markerTest, error) { return test, nil } -// cut is a copy of strings.Cut. -// -// TODO: once we only support Go 1.18+, just use strings.Cut. -func cut(s, sep string) (before, after string, found bool) { - if i := strings.Index(s, sep); i >= 0 { - return s[:i], s[i+len(sep):], true - } - return s, "", false -} - // formatTest formats the test as a txtar archive. func formatTest(test *markerTest) ([]byte, error) { arch := &txtar.Archive{ @@ -1294,6 +1288,43 @@ func completeMarker(mark marker, src protocol.Location, want ...string) { } } +// acceptCompletionMarker implements the @acceptCompletion marker, running +// textDocument/completion at the given src location and accepting the +// candidate with the given label. The resulting source must match the provided +// golden content. +func acceptCompletionMarker(mark marker, src protocol.Location, label string, golden *Golden) { + list := mark.run.env.Completion(src) + var selected *protocol.CompletionItem + for _, item := range list.Items { + if item.Label == label { + selected = &item + break + } + } + if selected == nil { + mark.errorf("Completion(...) did not return an item labeled %q", label) + return + } + filename := mark.run.env.Sandbox.Workdir.URIToPath(mark.uri()) + mapper, err := mark.run.env.Editor.Mapper(filename) + if err != nil { + mark.errorf("Editor.Mapper(%s) failed: %v", filename, err) + return + } + + patched, _, err := source.ApplyProtocolEdits(mapper, append([]protocol.TextEdit{ + *selected.TextEdit, + }, selected.AdditionalTextEdits...)) + + if err != nil { + mark.errorf("ApplyProtocolEdits failed: %v", err) + return + } + changes := map[string][]byte{filename: patched} + // Check the file state. + checkChangedFiles(mark, changes, golden) +} + // defMarker implements the @def marker, running textDocument/definition at // the given src location and asserting that there is exactly one resulting // location, matching dst. diff --git a/gopls/internal/lsp/regtest/regtest.go b/gopls/internal/lsp/regtest/regtest.go index 02c0ad06bb9..7def1d77da7 100644 --- a/gopls/internal/lsp/regtest/regtest.go +++ b/gopls/internal/lsp/regtest/regtest.go @@ -8,7 +8,6 @@ import ( "context" "flag" "fmt" - "io/ioutil" "os" "runtime" "testing" @@ -133,7 +132,7 @@ func Main(m *testing.M, hook func(*source.Options)) { } } - dir, err := ioutil.TempDir("", "gopls-regtest-") + dir, err := os.MkdirTemp("", "gopls-regtest-") if err != nil { panic(fmt.Errorf("creating regtest temp directory: %v", err)) } diff --git a/gopls/internal/lsp/regtest/runner.go b/gopls/internal/lsp/regtest/runner.go index 4f085e720fc..e4aa2f312fa 100644 --- a/gopls/internal/lsp/regtest/runner.go +++ b/gopls/internal/lsp/regtest/runner.go @@ -9,7 +9,6 @@ import ( "context" "fmt" "io" - "io/ioutil" "net" "os" "path/filepath" @@ -370,7 +369,7 @@ func (r *Runner) separateProcessServer(optsHook func(*source.Options)) jsonrpc2. } r.startRemoteOnce.Do(func() { - socketDir, err := ioutil.TempDir(r.tempDir, "gopls-regtest-socket") + socketDir, err := os.MkdirTemp(r.tempDir, "gopls-regtest-socket") if err != nil { r.remoteErr = err return diff --git a/gopls/internal/lsp/semantic.go b/gopls/internal/lsp/semantic.go index 021fa5efabb..12ee8dae903 100644 --- a/gopls/internal/lsp/semantic.go +++ b/gopls/internal/lsp/semantic.go @@ -63,13 +63,12 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu if !ok { return nil, err } - vv := snapshot.View() - if !vv.Options().SemanticTokens { + if !snapshot.Options().SemanticTokens { // return an error, so if the option changes // the client won't remember the wrong answer return nil, fmt.Errorf("semantictokens are disabled") } - kind := snapshot.View().FileKind(fh) + kind := snapshot.FileKind(fh) if kind == source.Tmpl { // this is a little cumbersome to avoid both exporting 'encoded' and its methods // and to avoid import cycles @@ -111,8 +110,8 @@ func (s *Server) computeSemanticTokens(ctx context.Context, td protocol.TextDocu fset: pkg.FileSet(), tokTypes: s.session.Options().SemanticTypes, tokMods: s.session.Options().SemanticMods, - noStrings: vv.Options().NoSemanticString, - noNumbers: vv.Options().NoSemanticNumber, + noStrings: snapshot.Options().NoSemanticString, + noNumbers: snapshot.Options().NoSemanticNumber, } if err := e.init(); err != nil { // e.init should never return an error, unless there's some diff --git a/gopls/internal/lsp/source/api_json.go b/gopls/internal/lsp/source/api_json.go index 97f6384ab82..60425db2c5c 100644 --- a/gopls/internal/lsp/source/api_json.go +++ b/gopls/internal/lsp/source/api_json.go @@ -358,6 +358,11 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.", Default: "true", }, + { + Name: "\"slog\"", + Doc: "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", + Default: "true", + }, { Name: "\"sortslice\"", Doc: "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", @@ -410,7 +415,7 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "\"unusedparams\"", - Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt", + Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or have the name '_' (the blank identifier)\n- functions in test files\n- functions with empty bodies or those with just a return stmt", Default: "false", }, { @@ -1070,6 +1075,12 @@ var GeneratedAPIJSON = &APIJSON{ Doc: "check for slice simplifications\n\nA slice expression of the form:\n\ts[a:len(s)]\nwill be simplified to:\n\ts[a:]\n\nThis is one of the simplifications that \"gofmt -s\" applies.", Default: true, }, + { + Name: "slog", + Doc: "check for invalid structured logging calls\n\nThe slog checker looks for calls to functions from the log/slog\npackage that take alternating key-value pairs. It reports calls\nwhere an argument in a key position is neither a string nor a\nslog.Attr, and where a final key is missing its value.\nFor example,it would report\n\n\tslog.Warn(\"message\", 11, \"k\") // slog.Warn arg \"11\" should be a string or a slog.Attr\n\nand\n\n\tslog.Info(\"message\", \"k1\", v1, \"k2\") // call to slog.Info missing a final value", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/slog", + Default: true, + }, { Name: "sortslice", Doc: "check the argument type of sort.Slice\n\nsort.Slice requires an argument of a slice type. Check that\nthe interface{} value passed to sort.Slice is actually a slice.", @@ -1132,7 +1143,7 @@ var GeneratedAPIJSON = &APIJSON{ }, { Name: "unusedparams", - Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or are underscored\n- functions in test files\n- functions with empty bodies or those with just a return stmt", + Doc: "check for unused parameters of functions\n\nThe unusedparams analyzer checks functions to see if there are\nany parameters that are not being used.\n\nTo reduce false positives it ignores:\n- methods\n- parameters that do not have a name or have the name '_' (the blank identifier)\n- functions in test files\n- functions with empty bodies or those with just a return stmt", }, { Name: "unusedresult", diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go index a4095f37832..e0a221f6017 100644 --- a/gopls/internal/lsp/source/completion/completion.go +++ b/gopls/internal/lsp/source/completion/completion.go @@ -508,7 +508,7 @@ func Completion(ctx context.Context, snapshot source.Snapshot, fh source.FileHan scopes := source.CollectScopes(pkg.GetTypesInfo(), path, pos) scopes = append(scopes, pkg.GetTypes().Scope(), types.Universe) - opts := snapshot.View().Options() + opts := snapshot.Options() c := &completer{ pkg: pkg, snapshot: snapshot, diff --git a/gopls/internal/lsp/source/completion/format.go b/gopls/internal/lsp/source/completion/format.go index c2d2c0bc035..89c5cb4ae97 100644 --- a/gopls/internal/lsp/source/completion/format.go +++ b/gopls/internal/lsp/source/completion/format.go @@ -183,11 +183,18 @@ Suffixes: if cand.convertTo != nil { typeName := types.TypeString(cand.convertTo, c.qf) - switch cand.convertTo.(type) { + switch t := cand.convertTo.(type) { // We need extra parens when casting to these types. For example, // we need "(*int)(foo)", not "*int(foo)". case *types.Pointer, *types.Signature: typeName = "(" + typeName + ")" + case *types.Basic: + // If the types are incompatible (as determined by typeMatches), then we + // must need a conversion here. However, if the target type is untyped, + // don't suggest converting to e.g. "untyped float" (golang/go#62141). + if t.Info()&types.IsUntyped != 0 { + typeName = types.TypeString(types.Default(cand.convertTo), c.qf) + } } prefix = typeName + "(" + prefix @@ -256,9 +263,9 @@ Suffixes: // TODO(rfindley): It doesn't look like this does the right thing for // multi-line comments. if strings.HasPrefix(comment.Text(), "Deprecated") { - if c.snapshot.View().Options().CompletionTags { + if c.snapshot.Options().CompletionTags { item.Tags = []protocol.CompletionItemTag{protocol.ComplDeprecated} - } else if c.snapshot.View().Options().CompletionDeprecated { + } else if c.snapshot.Options().CompletionDeprecated { item.Deprecated = true } } diff --git a/gopls/internal/lsp/source/completion/postfix_snippets.go b/gopls/internal/lsp/source/completion/postfix_snippets.go index c1582e6b379..a10004993b2 100644 --- a/gopls/internal/lsp/source/completion/postfix_snippets.go +++ b/gopls/internal/lsp/source/completion/postfix_snippets.go @@ -194,6 +194,14 @@ for {{.VarName .ElemType "e"}} := range {{.X}} { body: `{{if and (eq .Kind "slice") (eq (.TypeName .ElemType) "string") -}} {{.Import "strings"}}.Join({{.X}}, "{{.Cursor}}") {{- end}}`, +}, { + label: "ifnotnil", + details: "if expr != nil", + body: `{{if and (or (eq .Kind "pointer") (eq .Kind "chan") (eq .Kind "signature") (eq .Kind "interface") (eq .Kind "map") (eq .Kind "slice")) .StmtOK -}} +if {{.X}} != nil {{"{"}} + {{.Cursor}} +{{"}"}} +{{- end}}`, }} // Cursor indicates where the client's cursor should end up after the @@ -211,6 +219,7 @@ func (a *postfixTmplArgs) Import(path string) (string, error) { return "", fmt.Errorf("couldn't import %q: %w", path, err) } a.edits = append(a.edits, edits...) + return name, nil } diff --git a/gopls/internal/lsp/source/diagnostics.go b/gopls/internal/lsp/source/diagnostics.go index ad56253a5a9..ff41c570ddd 100644 --- a/gopls/internal/lsp/source/diagnostics.go +++ b/gopls/internal/lsp/source/diagnostics.go @@ -32,7 +32,7 @@ func Analyze(ctx context.Context, snapshot Snapshot, pkgIDs map[PackageID]unit, return nil, ctx.Err() } - options := snapshot.View().Options() + options := snapshot.Options() categories := []map[string]*Analyzer{ options.DefaultAnalyzers, options.StaticcheckAnalyzers, diff --git a/gopls/internal/lsp/source/fix.go b/gopls/internal/lsp/source/fix.go index f9d901c196c..7a715a8ff5a 100644 --- a/gopls/internal/lsp/source/fix.go +++ b/gopls/internal/lsp/source/fix.go @@ -35,6 +35,8 @@ type ( singleFileFixFunc func(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*analysis.SuggestedFix, error) ) +// These strings identify kinds of suggested fix, both in Analyzer.Fix +// and in the ApplyFix subcommand (see ExecuteCommand and ApplyFixArgs.Fix). const ( FillStruct = "fill_struct" StubMethods = "stub_methods" @@ -42,6 +44,7 @@ const ( ExtractVariable = "extract_variable" ExtractFunction = "extract_function" ExtractMethod = "extract_method" + InlineCall = "inline_call" InvertIfCondition = "invert_if_condition" AddEmbedImport = "add_embed_import" ) @@ -51,6 +54,7 @@ var suggestedFixes = map[string]SuggestedFixFunc{ FillStruct: singleFile(fillstruct.SuggestedFix), UndeclaredName: singleFile(undeclaredname.SuggestedFix), ExtractVariable: singleFile(extractVariable), + InlineCall: inlineCall, ExtractFunction: singleFile(extractFunction), ExtractMethod: singleFile(extractMethod), InvertIfCondition: singleFile(invertIfCondition), diff --git a/gopls/internal/lsp/source/format.go b/gopls/internal/lsp/source/format.go index 047edfc4839..6eed4cb9d0b 100644 --- a/gopls/internal/lsp/source/format.go +++ b/gopls/internal/lsp/source/format.go @@ -62,7 +62,7 @@ func Format(ctx context.Context, snapshot Snapshot, fh FileHandle) ([]protocol.T // Apply additional formatting, if any is supported. Currently, the only // supported additional formatter is gofumpt. - if format := snapshot.View().Options().GofumptFormat; snapshot.View().Options().Gofumpt && format != nil { + if format := snapshot.Options().GofumptFormat; snapshot.Options().Gofumpt && format != nil { // gofumpt can customize formatting based on language version and module // path, if available. // @@ -155,7 +155,7 @@ func computeImportEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFil // ComputeOneImportFixEdits returns text edits for a single import fix. func ComputeOneImportFixEdits(snapshot Snapshot, pgf *ParsedGoFile, fix *imports.ImportFix) ([]protocol.TextEdit, error) { options := &imports.Options{ - LocalPrefix: snapshot.View().Options().Local, + LocalPrefix: snapshot.Options().Local, // Defaults. AllErrors: true, Comments: true, @@ -194,7 +194,7 @@ func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Opti if fixedData == nil || fixedData[len(fixedData)-1] != '\n' { fixedData = append(fixedData, '\n') // ApplyFixes may miss the newline, go figure. } - edits := snapshot.View().Options().ComputeEdits(left, string(fixedData)) + edits := snapshot.Options().ComputeEdits(left, string(fixedData)) return protocolEditsFromSource([]byte(left), edits) } @@ -304,7 +304,7 @@ func computeTextEdits(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile, _, done := event.Start(ctx, "source.computeTextEdits") defer done() - edits := snapshot.View().Options().ComputeEdits(string(pgf.Src), formatted) + edits := snapshot.Options().ComputeEdits(string(pgf.Src), formatted) return ToProtocolEdits(pgf.Mapper, edits) } diff --git a/gopls/internal/lsp/source/gc_annotations.go b/gopls/internal/lsp/source/gc_annotations.go index b1299b8d891..2a21473aaf2 100644 --- a/gopls/internal/lsp/source/gc_annotations.go +++ b/gopls/internal/lsp/source/gc_annotations.go @@ -9,7 +9,6 @@ import ( "context" "encoding/json" "fmt" - "io/ioutil" "os" "path/filepath" "strings" @@ -45,7 +44,7 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) if err := os.MkdirAll(outDir, 0700); err != nil { return nil, err } - tmpFile, err := ioutil.TempFile(os.TempDir(), "gopls-x") + tmpFile, err := os.CreateTemp(os.TempDir(), "gopls-x") if err != nil { return nil, err } @@ -75,7 +74,7 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) return nil, err } reports := make(map[span.URI][]*Diagnostic) - opts := snapshot.View().Options() + opts := snapshot.Options() var parseError error for _, fn := range files { uri, diagnostics, err := parseDetailsFile(fn, opts) @@ -99,7 +98,7 @@ func GCOptimizationDetails(ctx context.Context, snapshot Snapshot, m *Metadata) } func parseDetailsFile(filename string, options *Options) (span.URI, []*Diagnostic, error) { - buf, err := ioutil.ReadFile(filename) + buf, err := os.ReadFile(filename) if err != nil { return "", nil, err } diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go index 6fc4d792875..a6830751a91 100644 --- a/gopls/internal/lsp/source/hover.go +++ b/gopls/internal/lsp/source/hover.go @@ -71,13 +71,13 @@ func Hover(ctx context.Context, snapshot Snapshot, fh FileHandle, position proto if h == nil { return nil, nil } - hover, err := formatHover(h, snapshot.View().Options()) + hover, err := formatHover(h, snapshot.Options()) if err != nil { return nil, err } return &protocol.Hover{ Contents: protocol.MarkupContent{ - Kind: snapshot.View().Options().PreferredContentFormat, + Kind: snapshot.Options().PreferredContentFormat, Value: hover, }, Range: rng, diff --git a/gopls/internal/lsp/source/implementation.go b/gopls/internal/lsp/source/implementation.go index 25beccf6e1d..d9eb814099b 100644 --- a/gopls/internal/lsp/source/implementation.go +++ b/gopls/internal/lsp/source/implementation.go @@ -17,6 +17,7 @@ import ( "sync" "golang.org/x/sync/errgroup" + "golang.org/x/tools/gopls/internal/bug" "golang.org/x/tools/gopls/internal/lsp/protocol" "golang.org/x/tools/gopls/internal/lsp/safetoken" "golang.org/x/tools/gopls/internal/lsp/source/methodsets" @@ -70,60 +71,36 @@ func Implementation(ctx context.Context, snapshot Snapshot, f FileHandle, pp pro } func implementations(ctx context.Context, snapshot Snapshot, fh FileHandle, pp protocol.Position) ([]protocol.Location, error) { - - // Type-check the query package, find the query identifier, - // and locate the type or method declaration it refers to. - declPosn, err := typeDeclPosition(ctx, snapshot, fh.URI(), pp) - if err != nil { - return nil, err - } - - // Type-check the declaring package (incl. variants) for use - // by the "local" search, which uses type information to - // enumerate all types within the package that satisfy the - // query type, even those defined local to a function. - declURI := span.URIFromPath(declPosn.Filename) - declMetas, err := snapshot.MetadataForFile(ctx, declURI) - if err != nil { - return nil, err - } - RemoveIntermediateTestVariants(&declMetas) - if len(declMetas) == 0 { - return nil, fmt.Errorf("no packages for file %s", declURI) - } - ids := make([]PackageID, len(declMetas)) - for i, m := range declMetas { - ids[i] = m.ID - } - localPkgs, err := snapshot.TypeCheck(ctx, ids...) + obj, pkg, err := implementsObj(ctx, snapshot, fh.URI(), pp) if err != nil { return nil, err } - // The narrowest package will do, since the local search is based - // on position and the global search is based on fingerprint. - // (Neither is based on object identity.) - declPkg := localPkgs[0] - declFile, err := declPkg.File(declURI) - if err != nil { - return nil, err // "can't happen" - } - // Find declaration of corresponding object - // in this package based on (URI, offset). - pos, err := safetoken.Pos(declFile.Tok, declPosn.Offset) - if err != nil { - return nil, err - } - // TODO(adonovan): simplify: use objectsAt? - path := pathEnclosingObjNode(declFile.File, pos) - if path == nil { - return nil, ErrNoIdentFound // checked earlier - } - id, ok := path[0].(*ast.Ident) - if !ok { - return nil, ErrNoIdentFound // checked earlier + var localPkgs []Package + if obj.Pos().IsValid() { // no local package for error or error.Error + declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) + // Type-check the declaring package (incl. variants) for use + // by the "local" search, which uses type information to + // enumerate all types within the package that satisfy the + // query type, even those defined local to a function. + declURI := span.URIFromPath(declPosn.Filename) + declMetas, err := snapshot.MetadataForFile(ctx, declURI) + if err != nil { + return nil, err + } + RemoveIntermediateTestVariants(&declMetas) + if len(declMetas) == 0 { + return nil, fmt.Errorf("no packages for file %s", declURI) + } + ids := make([]PackageID, len(declMetas)) + for i, m := range declMetas { + ids[i] = m.ID + } + localPkgs, err = snapshot.TypeCheck(ctx, ids...) + if err != nil { + return nil, err + } } - obj := declPkg.GetTypesInfo().ObjectOf(id) // may be nil // Is the selected identifier a type name or method? // (For methods, report the corresponding method names.) @@ -140,7 +117,7 @@ func implementations(ctx context.Context, snapshot Snapshot, fh FileHandle, pp p } } if queryType == nil { - return nil, fmt.Errorf("%s is not a type or method", id.Name) + return nil, bug.Errorf("%s is not a type or method", obj.Name()) // should have been handled by implementsObj } // Compute the method-set fingerprint used as a key to the global search. @@ -166,8 +143,13 @@ func implementations(ctx context.Context, snapshot Snapshot, fh FileHandle, pp p } RemoveIntermediateTestVariants(&globalMetas) globalIDs := make([]PackageID, 0, len(globalMetas)) + + var pkgPath PackagePath + if obj.Pkg() != nil { // nil for error + pkgPath = PackagePath(obj.Pkg().Path()) + } for _, m := range globalMetas { - if m.PkgPath == declPkg.Metadata().PkgPath { + if m.PkgPath == pkgPath { continue // declaring package is handled by local implementation } globalIDs = append(globalIDs, m.ID) @@ -241,18 +223,19 @@ func offsetToLocation(ctx context.Context, snapshot Snapshot, filename string, s return m.OffsetLocation(start, end) } -// typeDeclPosition returns the position of the declaration of the -// type (or one of its methods) referred to at (uri, ppos). -func typeDeclPosition(ctx context.Context, snapshot Snapshot, uri span.URI, ppos protocol.Position) (token.Position, error) { - var noPosn token.Position - +// implementsObj returns the object to query for implementations, which is a +// type name or method. +// +// The returned Package is the narrowest package containing ppos, which is the +// package using the resulting obj but not necessarily the declaring package. +func implementsObj(ctx context.Context, snapshot Snapshot, uri span.URI, ppos protocol.Position) (types.Object, Package, error) { pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, uri) if err != nil { - return noPosn, err + return nil, nil, err } pos, err := pgf.PositionPos(ppos) if err != nil { - return noPosn, err + return nil, nil, err } // This function inherits the limitation of its predecessor in @@ -267,11 +250,11 @@ func typeDeclPosition(ctx context.Context, snapshot Snapshot, uri span.URI, ppos // TODO(adonovan): simplify: use objectsAt? path := pathEnclosingObjNode(pgf.File, pos) if path == nil { - return noPosn, ErrNoIdentFound + return nil, nil, ErrNoIdentFound } id, ok := path[0].(*ast.Ident) if !ok { - return noPosn, ErrNoIdentFound + return nil, nil, ErrNoIdentFound } // Is the object a type or method? Reject other kinds. @@ -287,18 +270,17 @@ func typeDeclPosition(ctx context.Context, snapshot Snapshot, uri span.URI, ppos // ok case *types.Func: if obj.Type().(*types.Signature).Recv() == nil { - return noPosn, fmt.Errorf("%s is a function, not a method", id.Name) + return nil, nil, fmt.Errorf("%s is a function, not a method", id.Name) } case nil: - return noPosn, fmt.Errorf("%s denotes unknown object", id.Name) + return nil, nil, fmt.Errorf("%s denotes unknown object", id.Name) default: // e.g. *types.Var -> "var". kind := strings.ToLower(strings.TrimPrefix(reflect.TypeOf(obj).String(), "*types.")) - return noPosn, fmt.Errorf("%s is a %s, not a type", id.Name, kind) + return nil, nil, fmt.Errorf("%s is a %s, not a type", id.Name, kind) } - declPosn := safetoken.StartPosition(pkg.FileSet(), obj.Pos()) - return declPosn, nil + return obj, pkg, nil } // localImplementations searches within pkg for declarations of all diff --git a/gopls/internal/lsp/source/inlay_hint.go b/gopls/internal/lsp/source/inlay_hint.go index f323d56cb2c..f75cd3621e3 100644 --- a/gopls/internal/lsp/source/inlay_hint.go +++ b/gopls/internal/lsp/source/inlay_hint.go @@ -88,7 +88,7 @@ func InlayHint(ctx context.Context, snapshot Snapshot, fh FileHandle, pRng proto } // Collect a list of the inlay hints that are enabled. - inlayHintOptions := snapshot.View().Options().InlayHintOptions + inlayHintOptions := snapshot.Options().InlayHintOptions var enabledHints []InlayHintFunc for hint, enabled := range inlayHintOptions.Hints { if !enabled { diff --git a/gopls/internal/lsp/source/inline.go b/gopls/internal/lsp/source/inline.go new file mode 100644 index 00000000000..6a8d57d412a --- /dev/null +++ b/gopls/internal/lsp/source/inline.go @@ -0,0 +1,113 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package source + +// This file defines the refactor.inline code action. + +import ( + "context" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/gopls/internal/lsp/protocol" + "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/gopls/internal/span" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/refactor/inline" +) + +// EnclosingStaticCall returns the innermost function call enclosing +// the selected range, along with the callee. +func EnclosingStaticCall(pkg Package, pgf *ParsedGoFile, rng protocol.Range) (*ast.CallExpr, *types.Func, error) { + start, end, err := pgf.RangePos(rng) + if err != nil { + return nil, nil, err + } + path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) + + var call *ast.CallExpr +loop: + for _, n := range path { + switch n := n.(type) { + case *ast.FuncLit: + break loop + case *ast.CallExpr: + call = n + break loop + } + } + if call == nil { + return nil, nil, fmt.Errorf("no enclosing call") + } + if safetoken.Line(pgf.Tok, call.Lparen) != safetoken.Line(pgf.Tok, start) { + return nil, nil, fmt.Errorf("enclosing call is not on this line") + } + fn := typeutil.StaticCallee(pkg.GetTypesInfo(), call) + if fn == nil { + return nil, nil, fmt.Errorf("not a static call to a Go function") + } + return call, fn, nil +} + +func inlineCall(ctx context.Context, snapshot Snapshot, fh FileHandle, rng protocol.Range) (*token.FileSet, *analysis.SuggestedFix, error) { + // Find enclosing static call. + callerPkg, callerPGF, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) + if err != nil { + return nil, nil, err + } + call, fn, err := EnclosingStaticCall(callerPkg, callerPGF, rng) + if err != nil { + return nil, nil, err + } + + // Locate callee by file/line and analyze it. + calleePosn := safetoken.StartPosition(callerPkg.FileSet(), fn.Pos()) + calleePkg, calleePGF, err := NarrowestPackageForFile(ctx, snapshot, span.URIFromPath(calleePosn.Filename)) + if err != nil { + return nil, nil, err + } + var calleeDecl *ast.FuncDecl + for _, decl := range calleePGF.File.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + posn := safetoken.StartPosition(calleePkg.FileSet(), decl.Name.Pos()) + if posn.Line == calleePosn.Line && posn.Column == calleePosn.Column { + calleeDecl = decl + break + } + } + } + if calleeDecl == nil { + return nil, nil, fmt.Errorf("can't find callee") + } + callee, err := inline.AnalyzeCallee(calleePkg.FileSet(), calleePkg.GetTypes(), calleePkg.GetTypesInfo(), calleeDecl, calleePGF.Src) + if err != nil { + return nil, nil, err + } + + // Inline the call. + caller := &inline.Caller{ + Fset: callerPkg.FileSet(), + Types: callerPkg.GetTypes(), + Info: callerPkg.GetTypesInfo(), + File: callerPGF.File, + Call: call, + Content: callerPGF.Src, + } + got, err := inline.Inline(caller, callee) + if err != nil { + return nil, nil, err + } + + // Suggest the fix. + return callerPkg.FileSet(), &analysis.SuggestedFix{ + Message: fmt.Sprintf("inline call of %v", callee), + TextEdits: diffToTextEdits(callerPGF.Tok, diff.Bytes(callerPGF.Src, got)), + }, nil +} diff --git a/gopls/internal/lsp/source/methodsets/methodsets.go b/gopls/internal/lsp/source/methodsets/methodsets.go index 1ade7402421..d934c3c6907 100644 --- a/gopls/internal/lsp/source/methodsets/methodsets.go +++ b/gopls/internal/lsp/source/methodsets/methodsets.go @@ -455,8 +455,7 @@ func fingerprint(method *types.Func) (string, bool) { // -- serial format of index -- // (The name says gob but in fact we use frob.) -// var packageCodec = frob.For[gobPackage]() -var packageCodec = frob.CodecFor117(new(gobPackage)) +var packageCodec = frob.CodecFor[gobPackage]() // A gobPackage records the method set of each package-level type for a single package. type gobPackage struct { diff --git a/gopls/internal/lsp/source/options.go b/gopls/internal/lsp/source/options.go index c2e3223e6c1..2b91f834d6a 100644 --- a/gopls/internal/lsp/source/options.go +++ b/gopls/internal/lsp/source/options.go @@ -39,6 +39,7 @@ import ( "golang.org/x/tools/go/analysis/passes/printf" "golang.org/x/tools/go/analysis/passes/shadow" "golang.org/x/tools/go/analysis/passes/shift" + "golang.org/x/tools/go/analysis/passes/slog" "golang.org/x/tools/go/analysis/passes/sortslice" "golang.org/x/tools/go/analysis/passes/stdmethods" "golang.org/x/tools/go/analysis/passes/stringintconv" @@ -104,6 +105,7 @@ func DefaultOptions() *Options { protocol.SourceOrganizeImports: true, protocol.QuickFix: true, protocol.RefactorRewrite: true, + protocol.RefactorInline: true, protocol.RefactorExtract: true, }, Mod: { @@ -171,7 +173,7 @@ func DefaultOptions() *Options { CompletionDocumentation: true, DeepCompletion: true, ChattyDiagnostics: true, - NewDiff: "both", + NewDiff: "new", SubdirWatchPatterns: SubdirWatchPatternsAuto, ReportAnalysisProgressAfter: 5 * time.Second, }, @@ -1310,14 +1312,6 @@ func (e *SoftError) Error() string { return e.msg } -// softErrorf reports an error that does not affect the functionality of gopls -// (a warning in the UI). -// The formatted message will be shown to the user unmodified. -func (r *OptionResult) softErrorf(format string, values ...interface{}) { - msg := fmt.Sprintf(format, values...) - r.Error = &SoftError{msg} -} - // deprecated reports the current setting as deprecated. If 'replacement' is // non-nil, it is suggested to the user. func (r *OptionResult) deprecated(replacement string) { @@ -1556,6 +1550,7 @@ func defaultAnalyzers() map[string]*Analyzer { nilfunc.Analyzer.Name: {Analyzer: nilfunc.Analyzer, Enabled: true}, printf.Analyzer.Name: {Analyzer: printf.Analyzer, Enabled: true}, shift.Analyzer.Name: {Analyzer: shift.Analyzer, Enabled: true}, + slog.Analyzer.Name: {Analyzer: slog.Analyzer, Enabled: true}, stdmethods.Analyzer.Name: {Analyzer: stdmethods.Analyzer, Enabled: true}, stringintconv.Analyzer.Name: {Analyzer: stringintconv.Analyzer, Enabled: true}, structtag.Analyzer.Name: {Analyzer: structtag.Analyzer, Enabled: true}, diff --git a/gopls/internal/lsp/source/origin.go b/gopls/internal/lsp/source/origin.go new file mode 100644 index 00000000000..8ee467e844e --- /dev/null +++ b/gopls/internal/lsp/source/origin.go @@ -0,0 +1,26 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.19 +// +build !go1.19 + +package source + +import "go/types" + +// containsOrigin reports whether the provided object set contains an object +// with the same origin as the provided obj (which may be a synthetic object +// created during instantiation). +func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { + if obj == nil { + return objSet[obj] + } + // In Go 1.18, we can't use the types.Var.Origin and types.Func.Origin methods. + for target := range objSet { + if target.Pkg() == obj.Pkg() && target.Pos() == obj.Pos() && target.Name() == obj.Name() { + return true + } + } + return false +} diff --git a/gopls/internal/lsp/source/origin_119.go b/gopls/internal/lsp/source/origin_119.go new file mode 100644 index 00000000000..a249ce4b1c5 --- /dev/null +++ b/gopls/internal/lsp/source/origin_119.go @@ -0,0 +1,33 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.19 +// +build go1.19 + +package source + +import "go/types" + +// containsOrigin reports whether the provided object set contains an object +// with the same origin as the provided obj (which may be a synthetic object +// created during instantiation). +func containsOrigin(objSet map[types.Object]bool, obj types.Object) bool { + objOrigin := origin(obj) + for target := range objSet { + if origin(target) == objOrigin { + return true + } + } + return false +} + +func origin(obj types.Object) types.Object { + switch obj := obj.(type) { + case *types.Var: + return obj.Origin() + case *types.Func: + return obj.Origin() + } + return obj +} diff --git a/gopls/internal/lsp/source/references.go b/gopls/internal/lsp/source/references.go index 3d923e44702..46459dcbec4 100644 --- a/gopls/internal/lsp/source/references.go +++ b/gopls/internal/lsp/source/references.go @@ -580,10 +580,8 @@ func localReferences(pkg Package, targets map[types.Object]bool, correspond bool // matches reports whether obj either is or corresponds to a target. // (Correspondence is defined as usual for interface methods.) matches := func(obj types.Object) bool { - for target := range targets { - if equalOrigin(obj, target) { - return true - } + if containsOrigin(targets, obj) { + return true } if methodRecvs != nil && obj.Name() == methodName { if orecv := effectiveReceiver(obj); orecv != nil { @@ -611,13 +609,6 @@ func localReferences(pkg Package, targets map[types.Object]bool, correspond bool return nil } -// equalOrigin reports whether obj1 and obj2 have equivalent origin object. -// This may be the case even if obj1 != obj2, if one or both of them is -// instantiated. -func equalOrigin(obj1, obj2 types.Object) bool { - return obj1.Pkg() == obj2.Pkg() && obj1.Pos() == obj2.Pos() && obj1.Name() == obj2.Name() -} - // effectiveReceiver returns the effective receiver type for method-set // comparisons for obj, if it is a method, or nil otherwise. func effectiveReceiver(obj types.Object) types.Type { diff --git a/gopls/internal/lsp/source/rename.go b/gopls/internal/lsp/source/rename.go index c1db0e5fd5d..ad6184966f4 100644 --- a/gopls/internal/lsp/source/rename.go +++ b/gopls/internal/lsp/source/rename.go @@ -152,7 +152,7 @@ func PrepareRename(ctx context.Context, snapshot Snapshot, f FileHandle, pp prot func prepareRenamePackageName(ctx context.Context, snapshot Snapshot, pgf *ParsedGoFile) (*PrepareItem, error) { // Does the client support file renaming? fileRenameSupported := false - for _, op := range snapshot.View().Options().SupportedResourceOperations { + for _, op := range snapshot.Options().SupportedResourceOperations { if op == protocol.Rename { fileRenameSupported = true break @@ -727,7 +727,7 @@ func renamePackageName(ctx context.Context, s Snapshot, f FileHandle, newName Pa } // Calculate the edits to be made due to the change. - edits := s.View().Options().ComputeEdits(string(pm.Mapper.Content), string(newContent)) + edits := s.Options().ComputeEdits(string(pm.Mapper.Content), string(newContent)) renamingEdits[pm.URI] = append(renamingEdits[pm.URI], edits...) } @@ -1054,13 +1054,7 @@ func (r *renamer) update() (map[span.URI][]diff.Edit, error) { // shouldUpdate reports whether obj is one of (or an // instantiation of one of) the target objects. shouldUpdate := func(obj types.Object) bool { - if r.objsToUpdate[obj] { - return true - } - if fn, ok := obj.(*types.Func); ok && r.objsToUpdate[funcOrigin(fn)] { - return true - } - return false + return containsOrigin(r.objsToUpdate, obj) } // Find all identifiers in the package that define or use a diff --git a/gopls/internal/lsp/source/signature_help.go b/gopls/internal/lsp/source/signature_help.go index ce9b2678e46..1420fc3ee15 100644 --- a/gopls/internal/lsp/source/signature_help.go +++ b/gopls/internal/lsp/source/signature_help.go @@ -117,7 +117,7 @@ FindCall: } return &protocol.SignatureInformation{ Label: name + s.Format(), - Documentation: stringToSigInfoDocumentation(s.doc, snapshot.View().Options()), + Documentation: stringToSigInfoDocumentation(s.doc, snapshot.Options()), Parameters: paramInfo, }, activeParam, nil } @@ -134,7 +134,7 @@ func builtinSignature(ctx context.Context, snapshot Snapshot, callExpr *ast.Call activeParam := activeParameter(callExpr, len(sig.params), sig.variadic, pos) return &protocol.SignatureInformation{ Label: sig.name + sig.Format(), - Documentation: stringToSigInfoDocumentation(sig.doc, snapshot.View().Options()), + Documentation: stringToSigInfoDocumentation(sig.doc, snapshot.Options()), Parameters: paramInfo, }, activeParam, nil diff --git a/gopls/internal/lsp/source/stub.go b/gopls/internal/lsp/source/stub.go index b7b2292e4e3..fd2b357032c 100644 --- a/gopls/internal/lsp/source/stub.go +++ b/gopls/internal/lsp/source/stub.go @@ -22,6 +22,7 @@ import ( "golang.org/x/tools/gopls/internal/lsp/analysis/stubmethods" "golang.org/x/tools/gopls/internal/lsp/protocol" "golang.org/x/tools/gopls/internal/lsp/safetoken" + "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/tokeninternal" "golang.org/x/tools/internal/typeparams" ) @@ -125,6 +126,12 @@ func stub(ctx context.Context, snapshot Snapshot, si *stubmethods.StubInfo) (*to var newImports []newImport // for AddNamedImport qual := func(pkg *types.Package) string { // TODO(adonovan): don't ignore vendor prefix. + // + // Ignore the current package import. + if pkg.Path() == conc.Pkg().Path() { + return "" + } + importPath := ImportPath(pkg.Path()) name, ok := importEnv[importPath] if !ok { @@ -224,16 +231,20 @@ func (%s%s%s) %s%s { } // Report the diff. - diffs := snapshot.View().Options().ComputeEdits(string(input), output.String()) - var edits []analysis.TextEdit + diffs := snapshot.Options().ComputeEdits(string(input), output.String()) + return tokeninternal.FileSetFor(declPGF.Tok), // edits use declPGF.Tok + &analysis.SuggestedFix{TextEdits: diffToTextEdits(declPGF.Tok, diffs)}, + nil +} + +func diffToTextEdits(tok *token.File, diffs []diff.Edit) []analysis.TextEdit { + edits := make([]analysis.TextEdit, 0, len(diffs)) for _, edit := range diffs { edits = append(edits, analysis.TextEdit{ - Pos: declPGF.Tok.Pos(edit.Start), - End: declPGF.Tok.Pos(edit.End), + Pos: tok.Pos(edit.Start), + End: tok.Pos(edit.End), NewText: []byte(edit.New), }) } - return tokeninternal.FileSetFor(declPGF.Tok), // edits use declPGF.Tok - &analysis.SuggestedFix{TextEdits: edits}, - nil + return edits } diff --git a/gopls/internal/lsp/source/typerefs/refs.go b/gopls/internal/lsp/source/typerefs/refs.go index 2f6b1d92ee4..9adbb88fe4c 100644 --- a/gopls/internal/lsp/source/typerefs/refs.go +++ b/gopls/internal/lsp/source/typerefs/refs.go @@ -738,8 +738,7 @@ func assert(cond bool, msg string) { // -- serialization -- // (The name says gob but in fact we use frob.) -// var classesCodec = frob.For[gobClasses]() -var classesCodec = frob.CodecFor117(new(gobClasses)) +var classesCodec = frob.CodecFor[gobClasses]() type gobClasses struct { Strings []string // table of strings (PackageIDs and names) diff --git a/gopls/internal/lsp/source/types_format.go b/gopls/internal/lsp/source/types_format.go index 3c371711967..1fcad26bb11 100644 --- a/gopls/internal/lsp/source/types_format.go +++ b/gopls/internal/lsp/source/types_format.go @@ -115,7 +115,7 @@ func NewBuiltinSignature(ctx context.Context, s Snapshot, name string) (*signatu params, _ := formatFieldList(ctx, fset, decl.Type.Params, variadic) results, needResultParens := formatFieldList(ctx, fset, decl.Type.Results, false) d := decl.Doc.Text() - switch s.View().Options().HoverKind { + switch s.Options().HoverKind { case SynopsisDocumentation: d = doc.Synopsis(d) case NoDocumentation: @@ -245,7 +245,7 @@ func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signa if comment != nil { d = comment.Text() } - switch s.View().Options().HoverKind { + switch s.Options().HoverKind { case SynopsisDocumentation: d = doc.Synopsis(d) case NoDocumentation: diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go index b47e5b800ce..fe51cf0e5b6 100644 --- a/gopls/internal/lsp/source/view.go +++ b/gopls/internal/lsp/source/view.go @@ -58,6 +58,18 @@ type Snapshot interface { // subsequent snapshots in a view may not have adjacent global IDs. GlobalID() GlobalSnapshotID + // FileKind returns the type of a file. + // + // We can't reliably deduce the kind from the file name alone, + // as some editors can be told to interpret a buffer as + // language different from the file name heuristic, e.g. that + // an .html file actually contains Go "html/template" syntax, + // or even that a .go file contains Python. + FileKind(FileHandle) FileKind + + // Options returns the options associated with this snapshot. + Options() *Options + // View returns the View associated with this snapshot. View() View @@ -353,9 +365,6 @@ type View interface { // Folder returns the folder with which this view was created. Folder() span.URI - // Options returns a copy of the Options for this view. - Options() *Options - // Snapshot returns the current snapshot for the view, and a // release function that must be called when the Snapshot is // no longer needed. @@ -388,15 +397,6 @@ type View interface { // required by modfile. SetVulnerabilities(modfile span.URI, vulncheckResult *govulncheck.Result) - // FileKind returns the type of a file. - // - // We can't reliably deduce the kind from the file name alone, - // as some editors can be told to interpret a buffer as - // language different from the file name heuristic, e.g. that - // an .html file actually contains Go "html/template" syntax, - // or even that a .go file contains Python. - FileKind(FileHandle) FileKind - // GoVersion returns the configured Go version for this view. GoVersion() int diff --git a/gopls/internal/lsp/source/workspace_symbol.go b/gopls/internal/lsp/source/workspace_symbol.go index eb774a5df53..c656889fdb6 100644 --- a/gopls/internal/lsp/source/workspace_symbol.go +++ b/gopls/internal/lsp/source/workspace_symbol.go @@ -313,12 +313,12 @@ func collectSymbols(ctx context.Context, views []View, matcherType SymbolMatcher // whether a URI is in any open workspace. roots = append(roots, strings.TrimRight(string(v.Folder()), "/")) - filters := v.Options().DirectoryFilters + filters := snapshot.Options().DirectoryFilters filterer := NewFilterer(filters) folder := filepath.ToSlash(v.Folder().Filename()) workspaceOnly := true - if v.Options().SymbolScope == AllSymbolScope { + if snapshot.Options().SymbolScope == AllSymbolScope { workspaceOnly = false } symbols, err := snapshot.Symbols(ctx, workspaceOnly) diff --git a/gopls/internal/lsp/source/xrefs/xrefs.go b/gopls/internal/lsp/source/xrefs/xrefs.go index 0a8d5741157..88f76b1eb64 100644 --- a/gopls/internal/lsp/source/xrefs/xrefs.go +++ b/gopls/internal/lsp/source/xrefs/xrefs.go @@ -172,8 +172,7 @@ func Lookup(m *source.Metadata, data []byte, targets map[source.PackagePath]map[ // The gobRef.Range field is the obvious place to begin. // (The name says gob but in fact we use frob.) -// var packageCodec = frob.For[[]*gobPackage]() -var packageCodec = frob.CodecFor117(new([]*gobPackage)) +var packageCodec = frob.CodecFor[[]*gobPackage]() // A gobPackage records the set of outgoing references from the index // package to symbols defined in a dependency package. diff --git a/gopls/internal/lsp/symbols.go b/gopls/internal/lsp/symbols.go index b31d980484c..18bae059e79 100644 --- a/gopls/internal/lsp/symbols.go +++ b/gopls/internal/lsp/symbols.go @@ -24,7 +24,7 @@ func (s *Server) documentSymbol(ctx context.Context, params *protocol.DocumentSy return []interface{}{}, err } var docSymbols []protocol.DocumentSymbol - switch snapshot.View().FileKind(fh) { + switch snapshot.FileKind(fh) { case source.Tmpl: docSymbols, err = template.DocumentSymbols(snapshot, fh) case source.Go: @@ -40,7 +40,7 @@ func (s *Server) documentSymbol(ctx context.Context, params *protocol.DocumentSy // TODO: Remove this once the lsp deprecates SymbolInformation. symbols := make([]interface{}, len(docSymbols)) for i, s := range docSymbols { - if snapshot.View().Options().HierarchicalDocumentSymbolSupport { + if snapshot.Options().HierarchicalDocumentSymbolSupport { symbols[i] = s continue } diff --git a/gopls/internal/lsp/testdata/builtins/builtin_go121.go b/gopls/internal/lsp/testdata/builtins/builtin_go121.go index a52d168636e..14f59def9ac 100644 --- a/gopls/internal/lsp/testdata/builtins/builtin_go121.go +++ b/gopls/internal/lsp/testdata/builtins/builtin_go121.go @@ -1,5 +1,5 @@ -//go:build go1.21 -// +build go1.21 +//go:build go1.21 && !go1.22 && ignore +// +build go1.21,!go1.22,ignore package builtins diff --git a/gopls/internal/lsp/testdata/builtins/builtin_go122.go b/gopls/internal/lsp/testdata/builtins/builtin_go122.go new file mode 100644 index 00000000000..f799c1225a1 --- /dev/null +++ b/gopls/internal/lsp/testdata/builtins/builtin_go122.go @@ -0,0 +1,8 @@ +//go:build go1.22 && ignore +// +build go1.22,ignore + +package builtins + +func _() { + //@complete("", any, append, bool, byte, cap, clear, close, comparable, complex, complex128, complex64, copy, delete, error, _false, float32, float64, imag, int, int16, int32, int64, int8, len, make, max, min, new, panic, print, println, real, recover, rune, string, _true, uint, uint16, uint32, uint64, uint8, uintptr, zero, _nil) +} diff --git a/gopls/internal/lsp/testdata/builtins/builtins.go b/gopls/internal/lsp/testdata/builtins/builtins.go index 2e3361c7e6d..a6450362a78 100644 --- a/gopls/internal/lsp/testdata/builtins/builtins.go +++ b/gopls/internal/lsp/testdata/builtins/builtins.go @@ -47,3 +47,4 @@ package builtins /* uint64 */ //@item(uint64, "uint64", "", "type") /* uint8 */ //@item(uint8, "uint8", "", "type") /* uintptr */ //@item(uintptr, "uintptr", "", "type") +/* zero */ //@item(zero, "zero", "", "var") diff --git a/gopls/internal/lsp/testdata/snippets/postfix.go b/gopls/internal/lsp/testdata/snippets/postfix.go index d29694e835f..78a091ada5c 100644 --- a/gopls/internal/lsp/testdata/snippets/postfix.go +++ b/gopls/internal/lsp/testdata/snippets/postfix.go @@ -34,9 +34,10 @@ func _() { /* reverse! */ //@item(postfixReverse, "reverse!", "reverse slice", "snippet") /* sort! */ //@item(postfixSort, "sort!", "sort.Slice()", "snippet") /* var! */ //@item(postfixVar, "var!", "assign to variable", "snippet") + /* ifnotnil! */ //@item(postfixIfNotNil, "ifnotnil!", "if expr != nil", "snippet") var foo []int - foo. //@complete(" //", postfixAppend, postfixCopy, postfixLast, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixVar) + foo. //@complete(" //", postfixAppend, postfixCopy, postfixIfNotNil, postfixLast, postfixPrint, postfixRange, postfixReverse, postfixSort, postfixVar) foo = nil } diff --git a/gopls/internal/lsp/testdata/summary_go1.21.txt.golden b/gopls/internal/lsp/testdata/summary_go1.21.txt.golden index 8d6a32bb986..619c25ba757 100644 --- a/gopls/internal/lsp/testdata/summary_go1.21.txt.golden +++ b/gopls/internal/lsp/testdata/summary_go1.21.txt.golden @@ -1,7 +1,7 @@ -- summary -- CallHierarchyCount = 2 CodeLensCount = 5 -CompletionsCount = 264 +CompletionsCount = 263 CompletionSnippetCount = 115 UnimportedCompletionsCount = 5 DeepCompletionsCount = 5 diff --git a/gopls/internal/lsp/tests/tests.go b/gopls/internal/lsp/tests/tests.go index 9ab114e4834..4f5fc3c5080 100644 --- a/gopls/internal/lsp/tests/tests.go +++ b/gopls/internal/lsp/tests/tests.go @@ -230,6 +230,7 @@ func DefaultOptions(o *source.Options) { protocol.SourceOrganizeImports: true, protocol.QuickFix: true, protocol.RefactorRewrite: true, + protocol.RefactorInline: true, protocol.RefactorExtract: true, protocol.SourceFixAll: true, }, @@ -246,7 +247,7 @@ func DefaultOptions(o *source.Options) { o.CompletionBudget = time.Minute o.HierarchicalDocumentSymbolSupport = true o.SemanticTokens = true - o.InternalOptions.NewDiff = "both" + o.InternalOptions.NewDiff = "new" } func RunTests(t *testing.T, dataDir string, includeMultiModule bool, f func(*testing.T, *Data)) { @@ -1158,38 +1159,6 @@ func SpanName(spn span.Span) string { return fmt.Sprintf("%v_%v_%v", uriName(spn.URI()), spn.Start().Line(), spn.Start().Column()) } -func CopyFolderToTempDir(folder string) (string, error) { - if _, err := os.Stat(folder); err != nil { - return "", err - } - dst, err := ioutil.TempDir("", "modfile_test") - if err != nil { - return "", err - } - fds, err := ioutil.ReadDir(folder) - if err != nil { - return "", err - } - for _, fd := range fds { - srcfp := filepath.Join(folder, fd.Name()) - stat, err := os.Stat(srcfp) - if err != nil { - return "", err - } - if !stat.Mode().IsRegular() { - return "", fmt.Errorf("cannot copy non regular file %s", srcfp) - } - contents, err := ioutil.ReadFile(srcfp) - if err != nil { - return "", err - } - if err := ioutil.WriteFile(filepath.Join(dst, fd.Name()), contents, stat.Mode()); err != nil { - return "", err - } - } - return dst, nil -} - func shouldSkip(data *Data, uri span.URI) bool { if data.ModfileFlagAvailable { return false diff --git a/gopls/internal/lsp/tests/util_go122.go b/gopls/internal/lsp/tests/util_go122.go new file mode 100644 index 00000000000..90ae029766a --- /dev/null +++ b/gopls/internal/lsp/tests/util_go122.go @@ -0,0 +1,12 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.22 +// +build go1.22 + +package tests + +func init() { + builtins["zero"] = true +} diff --git a/gopls/internal/lsp/text_synchronization.go b/gopls/internal/lsp/text_synchronization.go index 5c6ebcc086b..5584287a78c 100644 --- a/gopls/internal/lsp/text_synchronization.go +++ b/gopls/internal/lsp/text_synchronization.go @@ -280,7 +280,7 @@ func (s *Server) didModifyFiles(ctx context.Context, modifications []source.File for snapshot, uris := range snapshots { for _, uri := range uris { mod := modMap[uri] - if snapshot.View().Options().ChattyDiagnostics || mod.Action == source.Open || mod.Action == source.Close { + if snapshot.Options().ChattyDiagnostics || mod.Action == source.Open || mod.Action == source.Close { s.mustPublishDiagnostics(uri) } } diff --git a/gopls/internal/lsp/work/format.go b/gopls/internal/lsp/work/format.go index e852eb4d27e..70cbe59d174 100644 --- a/gopls/internal/lsp/work/format.go +++ b/gopls/internal/lsp/work/format.go @@ -23,6 +23,6 @@ func Format(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle) } formatted := modfile.Format(pw.File.Syntax) // Calculate the edits to be made due to the change. - diffs := snapshot.View().Options().ComputeEdits(string(pw.Mapper.Content), string(formatted)) + diffs := snapshot.Options().ComputeEdits(string(pw.Mapper.Content), string(formatted)) return source.ToProtocolEdits(pw.Mapper, diffs) } diff --git a/gopls/internal/lsp/work/hover.go b/gopls/internal/lsp/work/hover.go index 558eebc824b..d777acdf3b4 100644 --- a/gopls/internal/lsp/work/hover.go +++ b/gopls/internal/lsp/work/hover.go @@ -62,7 +62,7 @@ func Hover(ctx context.Context, snapshot source.Snapshot, fh source.FileHandle, if err != nil { return nil, err } - options := snapshot.View().Options() + options := snapshot.Options() return &protocol.Hover{ Contents: protocol.MarkupContent{ Kind: options.PreferredContentFormat, diff --git a/gopls/internal/regtest/bench/completion_test.go b/gopls/internal/regtest/bench/completion_test.go index a0cf5a043f9..0400e70b0bd 100644 --- a/gopls/internal/regtest/bench/completion_test.go +++ b/gopls/internal/regtest/bench/completion_test.go @@ -180,6 +180,18 @@ func (kl *Kubelet) _() { `, `kl\.()`, }, + { + "oracle", + "dataintegration/pivot2.go", + ` +package dataintegration + +func (p *Pivot) _() { + p. +} +`, + `p\.()`, + }, } for _, test := range tests { diff --git a/gopls/internal/regtest/bench/didchange_test.go b/gopls/internal/regtest/bench/didchange_test.go index 27856f3031e..56da0ae7a68 100644 --- a/gopls/internal/regtest/bench/didchange_test.go +++ b/gopls/internal/regtest/bench/didchange_test.go @@ -20,19 +20,20 @@ import ( var editID int64 = time.Now().UnixNano() type changeTest struct { - repo string - file string + repo string + file string + canSave bool } var didChangeTests = []changeTest{ - {"google-cloud-go", "internal/annotate.go"}, - {"istio", "pkg/fuzz/util.go"}, - {"kubernetes", "pkg/controller/lookup_cache.go"}, - {"kuma", "api/generic/insights.go"}, - {"oracle", "dataintegration/data_type.go"}, // diagnoseSave fails because this package is generated - {"pkgsite", "internal/frontend/server.go"}, - {"starlark", "starlark/eval.go"}, - {"tools", "internal/lsp/cache/snapshot.go"}, + {"google-cloud-go", "internal/annotate.go", true}, + {"istio", "pkg/fuzz/util.go", true}, + {"kubernetes", "pkg/controller/lookup_cache.go", true}, + {"kuma", "api/generic/insights.go", true}, + {"oracle", "dataintegration/data_type.go", false}, // diagnoseSave fails because this package is generated + {"pkgsite", "internal/frontend/server.go", true}, + {"starlark", "starlark/eval.go", true}, + {"tools", "internal/lsp/cache/snapshot.go", true}, } // BenchmarkDidChange benchmarks modifications of a single file by making @@ -89,6 +90,9 @@ func BenchmarkDiagnoseSave(b *testing.B) { // await the resulting diagnostics pass. If save is set, the file is also saved. func runChangeDiagnosticsBenchmark(b *testing.B, test changeTest, save bool, operation string) { b.Run(test.repo, func(b *testing.B) { + if !test.canSave { + b.Skipf("skipping as %s cannot be saved", test.file) + } sharedEnv := getRepo(b, test.repo).sharedEnv(b) config := fake.EditorConfig{ Env: map[string]string{ diff --git a/gopls/internal/regtest/bench/repo_test.go b/gopls/internal/regtest/bench/repo_test.go index c3b8b3bace9..3a4575e65c4 100644 --- a/gopls/internal/regtest/bench/repo_test.go +++ b/gopls/internal/regtest/bench/repo_test.go @@ -65,6 +65,7 @@ var repos = map[string]*repo{ name: "oracle", url: "https://github.com/oracle/oci-go-sdk.git", commit: "v65.43.0", + short: true, inDir: flag.String("oracle_dir", "", "if set, reuse this directory as oracle/oci-go-sdk@v65.43.0"), }, diff --git a/gopls/internal/regtest/codelens/codelens_test.go b/gopls/internal/regtest/codelens/codelens_test.go index 8f718855f66..b72e598c913 100644 --- a/gopls/internal/regtest/codelens/codelens_test.go +++ b/gopls/internal/regtest/codelens/codelens_test.go @@ -199,13 +199,25 @@ require golang.org/x/hello v1.2.3 } for _, vendoring := range []bool{false, true} { t.Run(fmt.Sprintf("Upgrade individual dependency vendoring=%v", vendoring), func(t *testing.T) { - WithOptions(ProxyFiles(proxyWithLatest)).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { + WithOptions( + ProxyFiles(proxyWithLatest), + ).Run(t, shouldUpdateDep, func(t *testing.T, env *Env) { if vendoring { env.RunGoCommandInDirWithEnv("a", []string{"GOWORK=off"}, "mod", "vendor") } env.AfterChange() env.OpenFile("a/go.mod") env.OpenFile("b/go.mod") + + // Await the diagnostics resulting from opening the modfiles, because + // otherwise they may cause races when running asynchronously to the + // explicit re-diagnosing below. + // + // TODO(golang/go#58750): there is still a race here, inherent to + // accessing state on the View; we should create a new snapshot when + // the view diagnostics change. + env.AfterChange() + env.ExecuteCodeLensCommand("a/go.mod", command.CheckUpgrades, nil) d := &protocol.PublishDiagnosticsParams{} env.OnceMet( diff --git a/gopls/internal/regtest/completion/postfix_snippet_test.go b/gopls/internal/regtest/completion/postfix_snippet_test.go index df69703ee26..bfaa8f664f4 100644 --- a/gopls/internal/regtest/completion/postfix_snippet_test.go +++ b/gopls/internal/regtest/completion/postfix_snippet_test.go @@ -430,6 +430,132 @@ func foo() string { return strings.Join(x, "$0") }`, }, + { + name: "if not nil interface", + before: ` +package foo + +func _() { + var foo error + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo error + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil pointer", + before: ` +package foo + +func _() { + var foo *int + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo *int + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil slice", + before: ` +package foo + +func _() { + var foo []int + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo []int + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil map", + before: ` +package foo + +func _() { + var foo map[string]any + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo map[string]any + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil channel", + before: ` +package foo + +func _() { + var foo chan int + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo chan int + if foo != nil { + $0 +} +} +`, + }, + { + name: "if not nil function", + before: ` +package foo + +func _() { + var foo func() + foo.ifnotnil +} +`, + after: ` +package foo + +func _() { + var foo func() + if foo != nil { + $0 +} +} +`, + }, } r := WithOptions( diff --git a/gopls/internal/regtest/diagnostics/diagnostics_test.go b/gopls/internal/regtest/diagnostics/diagnostics_test.go index 623cd724cec..8066b7502c2 100644 --- a/gopls/internal/regtest/diagnostics/diagnostics_test.go +++ b/gopls/internal/regtest/diagnostics/diagnostics_test.go @@ -1307,7 +1307,14 @@ func _() { env.OpenFile("a/a_exclude.go") loadOnce := LogMatching(protocol.Info, "query=.*file=.*a_exclude.go", 1, false) - env.Await(loadOnce) // can't use OnceMet or AfterChange as logs are async + + // can't use OnceMet or AfterChange as logs are async + env.Await(loadOnce) + // ...but ensure that the change has been fully processed before editing. + // Otherwise, there may be a race where the snapshot is cloned before all + // state changes resulting from the load have been processed + // (golang/go#61521). + env.AfterChange() // Check that orphaned files are not reloaded, by making a change in // a.go file and confirming that the workspace diagnosis did not reload diff --git a/gopls/internal/regtest/marker/testdata/codeaction/inline.txt b/gopls/internal/regtest/marker/testdata/codeaction/inline.txt new file mode 100644 index 00000000000..8f1ea924864 --- /dev/null +++ b/gopls/internal/regtest/marker/testdata/codeaction/inline.txt @@ -0,0 +1,23 @@ +This is a minimal test of the refactor.inline code action. + +-- go.mod -- +module testdata/codeaction +go 1.18 + +-- a/a.go -- +package a + +func _() { + println(add(1, 2)) //@codeaction("refactor.inline", "add", ")", inline) +} + +func add(x, y int) int { return x + y } + +-- @inline/a/a.go -- +package a + +func _() { + println(func(x, y int) int { return x + y }(1, 2)) //@codeaction("refactor.inline", "add", ")", inline) +} + +func add(x, y int) int { return x + y } diff --git a/gopls/internal/regtest/marker/testdata/completion/issue62141.txt b/gopls/internal/regtest/marker/testdata/completion/issue62141.txt new file mode 100644 index 00000000000..877e59d0b7c --- /dev/null +++ b/gopls/internal/regtest/marker/testdata/completion/issue62141.txt @@ -0,0 +1,39 @@ +This test checks that we don't suggest completion to an untyped conversion such +as "untyped float(abcdef)". + +-- main.go -- +package main + +func main() { + abcdef := 32 //@diag("abcdef", re"not used") + x := 1.0 / abcd //@acceptcompletion(re"abcd()", "abcdef", int), diag("x", re"not used"), diag("abcd", re"(undefined|undeclared)") + + // Verify that we don't suggest converting compatible untyped constants. + const untypedConst = 42 + y := 1.1 / untypedC //@acceptcompletion(re"untypedC()", "untypedConst", untyped), diag("y", re"not used"), diag("untypedC", re"(undefined|undeclared)") +} + +-- @int/main.go -- +package main + +func main() { + abcdef := 32 //@diag("abcdef", re"not used") + x := 1.0 / float64(abcdef) //@acceptcompletion(re"abcd()", "abcdef", int), diag("x", re"not used"), diag("abcd", re"(undefined|undeclared)") + + // Verify that we don't suggest converting compatible untyped constants. + const untypedConst = 42 + y := 1.1 / untypedC //@acceptcompletion(re"untypedC()", "untypedConst", untyped), diag("y", re"not used"), diag("untypedC", re"(undefined|undeclared)") +} + +-- @untyped/main.go -- +package main + +func main() { + abcdef := 32 //@diag("abcdef", re"not used") + x := 1.0 / abcd //@acceptcompletion(re"abcd()", "abcdef", int), diag("x", re"not used"), diag("abcd", re"(undefined|undeclared)") + + // Verify that we don't suggest converting compatible untyped constants. + const untypedConst = 42 + y := 1.1 / untypedConst //@acceptcompletion(re"untypedC()", "untypedConst", untyped), diag("y", re"not used"), diag("untypedC", re"(undefined|undeclared)") +} + diff --git a/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt index 6e7e4650578..e98674b94f4 100644 --- a/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt +++ b/gopls/internal/regtest/marker/testdata/diagnostics/analyzers.txt @@ -1,24 +1,32 @@ Test of warning diagnostics from various analyzers: -tests, copylocks, printf, and timeformat. +copylocks, printf, slog, tests, and timeformat. -- go.mod -- module example.com go 1.12 +-- flags -- +-min_go=go1.21 + -- bad_test.go -- package analyzer import ( "fmt" + "log/slog" "sync" "testing" "time" ) -func Testbad(t *testing.T) { //@diag("", re"Testbad has malformed name: first letter after 'Test' must not be lowercase") +// copylocks +func _() { var x sync.Mutex _ = x //@diag("x", re"assignment copies lock value to _: sync.Mutex") +} +// printf +func _() { printfWrapper("%s") //@diag(re`printfWrapper\(.*\)`, re"example.com.printfWrapper format %s reads arg #1, but call has 0 args") } @@ -26,7 +34,18 @@ func printfWrapper(format string, args ...interface{}) { fmt.Printf(format, args...) } +// slog +func _() { + slog.Info("msg", 1) //@diag("1", re`slog.Info arg "1" should be a string or a slog.Attr`) +} + +// tests +func Testbad(t *testing.T) { //@diag("", re"Testbad has malformed name: first letter after 'Test' must not be lowercase") +} + +// timeformat func _() { now := time.Now() fmt.Println(now.Format("2006-02-01")) //@diag("2006-02-01", re"2006-02-01 should be 2006-01-02") } + diff --git a/gopls/internal/regtest/marker/testdata/implementation/issue43655.txt b/gopls/internal/regtest/marker/testdata/implementation/issue43655.txt new file mode 100644 index 00000000000..a7f1d57f80d --- /dev/null +++ b/gopls/internal/regtest/marker/testdata/implementation/issue43655.txt @@ -0,0 +1,22 @@ +This test verifies that we fine implementations of the built-in error interface. + +-- go.mod -- +module example.com +go 1.12 + +-- p.go -- +package p + +type errA struct{ error } //@loc(errA, "errA") + +type errB struct{} //@loc(errB, "errB") +func (errB) Error() string{ return "" } //@loc(errBError, "Error") + +type notAnError struct{} +func (notAnError) Error() int { return 0 } + +func _() { + var _ error //@implementation("error", errA, errB) + var a errA + _ = a.Error //@implementation("Error", errBError) +} diff --git a/gopls/internal/regtest/marker/testdata/rename/issue61640.txt b/gopls/internal/regtest/marker/testdata/rename/issue61640.txt new file mode 100644 index 00000000000..91c2b76933d --- /dev/null +++ b/gopls/internal/regtest/marker/testdata/rename/issue61640.txt @@ -0,0 +1,47 @@ +This test verifies that gopls can rename instantiated fields. + +-- flags -- +-min_go=go1.18 + +-- a.go -- +package a + +// This file is adapted from the example in the issue. + +type builder[S ~[]int] struct { + elements S //@rename("elements", elements2, OneToTwo) +} + +type BuilderImpl[S ~[]int] struct{ builder[S] } + +func NewBuilderImpl[S ~[]int](name string) *BuilderImpl[S] { + impl := &BuilderImpl[S]{ + builder[S]{ + elements: S{}, + }, + } + + _ = impl.elements + return impl +} +-- @OneToTwo/a.go -- +package a + +// This file is adapted from the example in the issue. + +type builder[S ~[]int] struct { + elements2 S //@rename("elements", elements2, OneToTwo) +} + +type BuilderImpl[S ~[]int] struct{ builder[S] } + +func NewBuilderImpl[S ~[]int](name string) *BuilderImpl[S] { + impl := &BuilderImpl[S]{ + builder[S]{ + elements2: S{}, + }, + } + + _ = impl.elements2 + return impl +} diff --git a/gopls/internal/regtest/marker/testdata/rename/issue61813.txt b/gopls/internal/regtest/marker/testdata/rename/issue61813.txt new file mode 100644 index 00000000000..ae5162b84a4 --- /dev/null +++ b/gopls/internal/regtest/marker/testdata/rename/issue61813.txt @@ -0,0 +1,18 @@ +This test exercises the panic reported in golang/go#61813. + +-- p.go -- +package p + +type P struct{} + +func (P) M() {} //@rename("M", N, MToN) + +var x = []*P{{}} +-- @MToN/p.go -- +package p + +type P struct{} + +func (P) N() {} //@rename("M", N, MToN) + +var x = []*P{{}} diff --git a/gopls/internal/regtest/marker/testdata/stubmethods/issue61830.txt b/gopls/internal/regtest/marker/testdata/stubmethods/issue61830.txt new file mode 100644 index 00000000000..43633557d89 --- /dev/null +++ b/gopls/internal/regtest/marker/testdata/stubmethods/issue61830.txt @@ -0,0 +1,36 @@ +This test verifies that method stubbing qualifies types relative to the current +package. + +-- p.go -- +package p + +import "io" + +type B struct{} + +type I interface { + M(io.Reader, B) +} + +type A struct{} + +var _ I = &A{} //@suggestedfix(re"&A..", re"missing method M", "quickfix", stub) +-- @stub/p.go -- +package p + +import "io" + +type B struct{} + +type I interface { + M(io.Reader, B) +} + +type A struct{} + +// M implements I. +func (*A) M(io.Reader, B) { + panic("unimplemented") +} + +var _ I = &A{} //@suggestedfix(re"&A..", re"missing method M", "quickfix", stub) diff --git a/gopls/internal/regtest/modfile/tempmodfile_test.go b/gopls/internal/regtest/modfile/tempmodfile_test.go new file mode 100644 index 00000000000..8b0926ab422 --- /dev/null +++ b/gopls/internal/regtest/modfile/tempmodfile_test.go @@ -0,0 +1,41 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modfile + +import ( + "testing" + + . "golang.org/x/tools/gopls/internal/lsp/regtest" +) + +// This test replaces an older, problematic test (golang/go#57784). But it has +// been a long time since the go command would mutate go.mod files. +// +// TODO(golang/go#61970): the tempModfile setting should be removed entirely. +func TestTempModfileUnchanged(t *testing.T) { + // badMod has a go.mod file that is missing a go directive. + const badMod = ` +-- go.mod -- +module badmod.test/p +-- p.go -- +package p +` + + WithOptions( + Modes(Default), // no reason to test this with a remote gopls + ProxyFiles(workspaceProxy), + Settings{ + "tempModfile": true, + }, + ).Run(t, badMod, func(t *testing.T, env *Env) { + env.OpenFile("p.go") + env.AfterChange() + want := "module badmod.test/p\n" + got := env.ReadWorkspaceFile("go.mod") + if got != want { + t.Errorf("go.mod content:\n%s\nwant:\n%s", got, want) + } + }) +} diff --git a/gopls/internal/telemetry/telemetry.go b/gopls/internal/telemetry/telemetry.go index 67ab45adb41..db75e1a7fbf 100644 --- a/gopls/internal/telemetry/telemetry.go +++ b/gopls/internal/telemetry/telemetry.go @@ -2,21 +2,24 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:build go1.19 +// +build go1.19 + package telemetry import ( - "os" + "fmt" "golang.org/x/telemetry/counter" + "golang.org/x/telemetry/upload" "golang.org/x/tools/gopls/internal/lsp/protocol" ) // Start starts telemetry instrumentation. func Start() { - if os.Getenv("GOPLS_TELEMETRY_EXP") != "" { - counter.Open() - // TODO: add upload logic. - } + counter.Open() + // upload only once at startup, hoping that users restart gopls often. + go upload.Run(nil) } // RecordClientInfo records gopls client info. @@ -26,6 +29,8 @@ func RecordClientInfo(params *protocol.ParamInitialize) { switch params.ClientInfo.Name { case "Visual Studio Code": client = "gopls/client:vscode" + case "Visual Studio Code - Insiders": + client = "gopls/client:vscode-insiders" case "VSCodium": client = "gopls/client:vscodium" case "code-server": @@ -46,7 +51,20 @@ func RecordClientInfo(params *protocol.ParamInitialize) { case "Sublime Text LSP": // https://github.com/sublimelsp/LSP/blob/e608f878e7e9dd34aabe4ff0462540fadcd88fcc/plugin/core/sessions.py#L493 client = "gopls/client:sublimetext" + default: + // at least accumulate the client name locally + counter.New(fmt.Sprintf("gopls/client-other:%s", params.ClientInfo.Name)).Inc() + // but also record client:other } } counter.Inc(client) } + +// RecordViewGoVersion records the Go minor version number (1.x) used for a view. +func RecordViewGoVersion(x int) { + if x < 0 { + return + } + name := fmt.Sprintf("gopls/goversion:1.%d", x) + counter.Inc(name) +} diff --git a/gopls/internal/telemetry/telemetry_go118.go b/gopls/internal/telemetry/telemetry_go118.go new file mode 100644 index 00000000000..b0c1197cb77 --- /dev/null +++ b/gopls/internal/telemetry/telemetry_go118.go @@ -0,0 +1,19 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.19 +// +build !go1.19 + +package telemetry + +import "golang.org/x/tools/gopls/internal/lsp/protocol" + +func Start() { +} + +func RecordClientInfo(params *protocol.ParamInitialize) { +} + +func RecordViewGoVersion(x int) { +} diff --git a/gopls/internal/telemetry/telemetry_test.go b/gopls/internal/telemetry/telemetry_test.go new file mode 100644 index 00000000000..93751bff1d8 --- /dev/null +++ b/gopls/internal/telemetry/telemetry_test.go @@ -0,0 +1,85 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.21 && !openbsd && !js && !wasip1 && !solaris && !android && !386 +// +build go1.21,!openbsd,!js,!wasip1,!solaris,!android,!386 + +package telemetry_test + +import ( + "os" + "strconv" + "strings" + "testing" + "time" + + "golang.org/x/telemetry/counter" + "golang.org/x/telemetry/counter/countertest" // requires go1.21+ + "golang.org/x/tools/gopls/internal/bug" + "golang.org/x/tools/gopls/internal/hooks" + . "golang.org/x/tools/gopls/internal/lsp/regtest" +) + +func TestMain(m *testing.M) { + tmp, err := os.MkdirTemp("", "gopls-telemetry-test") + if err != nil { + panic(err) + } + countertest.Open(tmp) + defer os.RemoveAll(tmp) + Main(m, hooks.Options) +} + +func TestTelemetry(t *testing.T) { + var ( + goversion = "" + editor = "vscode" // We set ClientName("Visual Studio Code") below. + ) + + // Verify that a properly configured session gets notified of a bug on the + // server. + WithOptions( + Modes(Default), // must be in-process to receive the bug report below + Settings{"showBugReports": true}, + ClientName("Visual Studio Code"), + ).Run(t, "", func(t *testing.T, env *Env) { + goversion = strconv.Itoa(env.GoVersion()) + const desc = "got a bug" + bug.Report(desc) // want a stack counter with the trace starting from here. + env.Await(ShownMessage(desc)) + }) + + // gopls/editor:client + // gopls/goversion:1.x + for _, c := range []*counter.Counter{ + counter.New("gopls/client:" + editor), + counter.New("gopls/goversion:1." + goversion), + } { + count, err := countertest.ReadCounter(c) + if err != nil || count != 1 { + t.Errorf("ReadCounter(%q) = (%v, %v), want (1, nil)", c.Name(), count, err) + t.Logf("Current timestamp = %v", time.Now().UTC()) + } + } + + // gopls/bug + bugcount := bug.BugReportCount + counts, err := countertest.ReadStackCounter(bugcount) + if err != nil { + t.Fatalf("ReadStackCounter(bugreportcount) failed - %v", err) + } + if len(counts) != 1 || !hasEntry(counts, t.Name(), 1) { + t.Errorf("read stackcounter(%q) = (%#v, %v), want one entry", "gopls/bug", counts, err) + t.Logf("Current timestamp = %v", time.Now().UTC()) + } +} + +func hasEntry(counts map[string]uint64, pattern string, want uint64) bool { + for k, v := range counts { + if strings.Contains(k, pattern) && v == want { + return true + } + } + return false +} diff --git a/gopls/internal/vulncheck/command.go b/gopls/internal/vulncheck/command.go index 82711188583..4a3d3d2dcc0 100644 --- a/gopls/internal/vulncheck/command.go +++ b/gopls/internal/vulncheck/command.go @@ -197,7 +197,7 @@ func vulnerablePackages(ctx context.Context, snapshot source.Snapshot, modfile s return nil, err } cli, err := client.NewClient( - findGOVULNDB(snapshot.View().Options().EnvSlice()), + findGOVULNDB(snapshot.Options().EnvSlice()), client.Options{HTTPCache: govulncheck.NewInMemoryCache(fsCache)}) if err != nil { return nil, err @@ -209,7 +209,7 @@ func vulnerablePackages(ctx context.Context, snapshot source.Snapshot, modfile s mu sync.Mutex ) - goVersion := snapshot.View().Options().Env[GoVersionForVulnTest] + goVersion := snapshot.Options().Env[GoVersionForVulnTest] if goVersion == "" { goVersion = snapshot.View().GoVersionString() } diff --git a/internal/cmd/deadcode/deadcode.go b/internal/cmd/deadcode/deadcode.go index 60e22cb5552..f3388aa6161 100644 --- a/internal/cmd/deadcode/deadcode.go +++ b/internal/cmd/deadcode/deadcode.go @@ -8,6 +8,7 @@ import ( _ "embed" "flag" "fmt" + "go/ast" "go/token" "io" "log" @@ -32,10 +33,11 @@ var ( testFlag = flag.Bool("test", false, "include implicit test packages and executables") tagsFlag = flag.String("tags", "", "comma-separated list of extra build tags (see: go help buildconstraint)") - filterFlag = flag.String("filter", "", "report only packages matching this regular expression (default: module of first package)") - lineFlag = flag.Bool("line", false, "show output in a line-oriented format") - cpuProfile = flag.String("cpuprofile", "", "write CPU profile to this file") - memProfile = flag.String("memprofile", "", "write memory profile to this file") + filterFlag = flag.String("filter", "", "report only packages matching this regular expression (default: module of first package)") + generatedFlag = flag.Bool("generated", true, "report dead functions in generated Go files") + lineFlag = flag.Bool("line", false, "show output in a line-oriented format") + cpuProfile = flag.String("cpuprofile", "", "write CPU profile to this file") + memProfile = flag.String("memprofile", "", "write memory profile to this file") ) func usage() { @@ -104,6 +106,18 @@ func main() { log.Fatalf("packages contain errors") } + // (Optionally) gather names of generated files. + generated := make(map[string]bool) + if !*generatedFlag { + packages.Visit(initial, nil, func(p *packages.Package) { + for _, file := range p.Syntax { + if isGenerated(file) { + generated[p.Fset.File(file.Pos()).Name()] = true + } + } + }) + } + // If -filter is unset, use first module (if available). if *filterFlag == "" { if mod := initial[0].Module; mod != nil && mod.Path != "" { @@ -176,6 +190,13 @@ func main() { } posn := prog.Fset.Position(fn.Pos()) + + // If -generated=false, skip functions declared in generated Go files. + // (Functions called by them may still be reported as dead.) + if generated[posn.Filename] { + continue + } + if !reachablePosn[posn] { reachablePosn[posn] = true // suppress dups with same pos @@ -220,9 +241,6 @@ func main() { return xposn.Line < yposn.Line }) - // TODO(adonovan): add an option to skip (or indicate) - // dead functions in generated files (see ast.IsGenerated). - if *lineFlag { // line-oriented output for _, fn := range fns { @@ -238,3 +256,42 @@ func main() { } } } + +// TODO(adonovan): use go1.21's ast.IsGenerated. + +// isGenerated reports whether the file was generated by a program, +// not handwritten, by detecting the special comment described +// at https://go.dev/s/generatedcode. +// +// The syntax tree must have been parsed with the ParseComments flag. +// Example: +// +// f, err := parser.ParseFile(fset, filename, src, parser.ParseComments|parser.PackageClauseOnly) +// if err != nil { ... } +// gen := ast.IsGenerated(f) +func isGenerated(file *ast.File) bool { + _, ok := generator(file) + return ok +} + +func generator(file *ast.File) (string, bool) { + for _, group := range file.Comments { + for _, comment := range group.List { + if comment.Pos() > file.Package { + break // after package declaration + } + // opt: check Contains first to avoid unnecessary array allocation in Split. + const prefix = "// Code generated " + if strings.Contains(comment.Text, prefix) { + for _, line := range strings.Split(comment.Text, "\n") { + if rest, ok := strings.CutPrefix(line, prefix); ok { + if gen, ok := strings.CutSuffix(rest, " DO NOT EDIT."); ok { + return gen, true + } + } + } + } + } + } + return "", false +} diff --git a/internal/constraints/constraint.go b/internal/constraints/constraint.go new file mode 100644 index 00000000000..4e6ab61ea34 --- /dev/null +++ b/internal/constraints/constraint.go @@ -0,0 +1,52 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package constraints defines a set of useful constraints to be used +// with type parameters. +package constraints + +// Copied from x/exp/constraints. + +// Signed is a constraint that permits any signed integer type. +// If future releases of Go add new predeclared signed integer types, +// this constraint will be modified to include them. +type Signed interface { + ~int | ~int8 | ~int16 | ~int32 | ~int64 +} + +// Unsigned is a constraint that permits any unsigned integer type. +// If future releases of Go add new predeclared unsigned integer types, +// this constraint will be modified to include them. +type Unsigned interface { + ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~uintptr +} + +// Integer is a constraint that permits any integer type. +// If future releases of Go add new predeclared integer types, +// this constraint will be modified to include them. +type Integer interface { + Signed | Unsigned +} + +// Float is a constraint that permits any floating-point type. +// If future releases of Go add new predeclared floating-point types, +// this constraint will be modified to include them. +type Float interface { + ~float32 | ~float64 +} + +// Complex is a constraint that permits any complex numeric type. +// If future releases of Go add new predeclared complex numeric types, +// this constraint will be modified to include them. +type Complex interface { + ~complex64 | ~complex128 +} + +// Ordered is a constraint that permits any ordered type: any type +// that supports the operators < <= >= >. +// If future releases of Go add new ordered types, +// this constraint will be modified to include them. +type Ordered interface { + Integer | Float | ~string +} diff --git a/internal/diff/lcs/labels.go b/internal/diff/lcs/labels.go index 0689f1ed700..504913d1da3 100644 --- a/internal/diff/lcs/labels.go +++ b/internal/diff/lcs/labels.go @@ -8,7 +8,7 @@ import ( "fmt" ) -// For each D, vec[D] has length D+1, +// For each D, vec[D] has length D+1, // and the label for (D, k) is stored in vec[D][(D+k)/2]. type label struct { vec [][]int diff --git a/internal/facts/facts.go b/internal/facts/facts.go index 44c0605db27..ec11d5e0af1 100644 --- a/internal/facts/facts.go +++ b/internal/facts/facts.go @@ -247,7 +247,7 @@ func (d *Decoder) Decode(skipMethodSorting bool, read func(pkgPath string) ([]by key := key{pkg: factPkg, t: reflect.TypeOf(f.Fact)} if f.Object != "" { // object fact - obj, err := typesinternal.ObjectpathObject(factPkg, f.Object, skipMethodSorting) + obj, err := typesinternal.ObjectpathObject(factPkg, string(f.Object), skipMethodSorting) if err != nil { // (most likely due to unexported object) // TODO(adonovan): audit for other possibilities. diff --git a/internal/imports/zstdlib.go b/internal/imports/zstdlib.go index 31a75949cdc..9f992c2bec8 100644 --- a/internal/imports/zstdlib.go +++ b/internal/imports/zstdlib.go @@ -93,6 +93,7 @@ var stdlib = map[string][]string{ "Compare", "Contains", "ContainsAny", + "ContainsFunc", "ContainsRune", "Count", "Cut", @@ -147,6 +148,11 @@ var stdlib = map[string][]string{ "TrimSpace", "TrimSuffix", }, + "cmp": { + "Compare", + "Less", + "Ordered", + }, "compress/bzip2": { "NewReader", "StructuralError", @@ -228,6 +234,7 @@ var stdlib = map[string][]string{ "Ring", }, "context": { + "AfterFunc", "Background", "CancelCauseFunc", "CancelFunc", @@ -239,8 +246,11 @@ var stdlib = map[string][]string{ "WithCancel", "WithCancelCause", "WithDeadline", + "WithDeadlineCause", "WithTimeout", + "WithTimeoutCause", "WithValue", + "WithoutCancel", }, "crypto": { "BLAKE2b_256", @@ -445,6 +455,7 @@ var stdlib = map[string][]string{ "XORBytes", }, "crypto/tls": { + "AlertError", "Certificate", "CertificateRequestInfo", "CertificateVerificationError", @@ -476,6 +487,7 @@ var stdlib = map[string][]string{ "LoadX509KeyPair", "NewLRUClientSessionCache", "NewListener", + "NewResumptionState", "NoClientCert", "PKCS1WithSHA1", "PKCS1WithSHA256", @@ -484,6 +496,27 @@ var stdlib = map[string][]string{ "PSSWithSHA256", "PSSWithSHA384", "PSSWithSHA512", + "ParseSessionState", + "QUICClient", + "QUICConfig", + "QUICConn", + "QUICEncryptionLevel", + "QUICEncryptionLevelApplication", + "QUICEncryptionLevelEarly", + "QUICEncryptionLevelHandshake", + "QUICEncryptionLevelInitial", + "QUICEvent", + "QUICEventKind", + "QUICHandshakeDone", + "QUICNoEvent", + "QUICRejectedEarlyData", + "QUICServer", + "QUICSessionTicketOptions", + "QUICSetReadSecret", + "QUICSetWriteSecret", + "QUICTransportParameters", + "QUICTransportParametersRequired", + "QUICWriteData", "RecordHeaderError", "RenegotiateFreelyAsClient", "RenegotiateNever", @@ -493,6 +526,7 @@ var stdlib = map[string][]string{ "RequireAndVerifyClientCert", "RequireAnyClientCert", "Server", + "SessionState", "SignatureScheme", "TLS_AES_128_GCM_SHA256", "TLS_AES_256_GCM_SHA384", @@ -523,6 +557,7 @@ var stdlib = map[string][]string{ "TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_RC4_128_SHA", "VerifyClientCertIfGiven", + "VersionName", "VersionSSL30", "VersionTLS10", "VersionTLS11", @@ -618,6 +653,7 @@ var stdlib = map[string][]string{ "PureEd25519", "RSA", "RevocationList", + "RevocationListEntry", "SHA1WithRSA", "SHA256WithRSA", "SHA256WithRSAPSS", @@ -1002,10 +1038,42 @@ var stdlib = map[string][]string{ "COMPRESS_LOOS", "COMPRESS_LOPROC", "COMPRESS_ZLIB", + "COMPRESS_ZSTD", "Chdr32", "Chdr64", "Class", "CompressionType", + "DF_1_CONFALT", + "DF_1_DIRECT", + "DF_1_DISPRELDNE", + "DF_1_DISPRELPND", + "DF_1_EDITED", + "DF_1_ENDFILTEE", + "DF_1_GLOBAL", + "DF_1_GLOBAUDIT", + "DF_1_GROUP", + "DF_1_IGNMULDEF", + "DF_1_INITFIRST", + "DF_1_INTERPOSE", + "DF_1_KMOD", + "DF_1_LOADFLTR", + "DF_1_NOCOMMON", + "DF_1_NODEFLIB", + "DF_1_NODELETE", + "DF_1_NODIRECT", + "DF_1_NODUMP", + "DF_1_NOHDR", + "DF_1_NOKSYMS", + "DF_1_NOOPEN", + "DF_1_NORELOC", + "DF_1_NOW", + "DF_1_ORIGIN", + "DF_1_PIE", + "DF_1_SINGLETON", + "DF_1_STUB", + "DF_1_SYMINTPOSE", + "DF_1_TRANS", + "DF_1_WEAKFILTER", "DF_BIND_NOW", "DF_ORIGIN", "DF_STATIC_TLS", @@ -1144,6 +1212,7 @@ var stdlib = map[string][]string{ "Dyn32", "Dyn64", "DynFlag", + "DynFlag1", "DynTag", "EI_ABIVERSION", "EI_CLASS", @@ -2111,6 +2180,7 @@ var stdlib = map[string][]string{ "R_PPC64_REL16_LO", "R_PPC64_REL24", "R_PPC64_REL24_NOTOC", + "R_PPC64_REL24_P9NOTOC", "R_PPC64_REL30", "R_PPC64_REL32", "R_PPC64_REL64", @@ -2848,6 +2918,7 @@ var stdlib = map[string][]string{ "MaxVarintLen16", "MaxVarintLen32", "MaxVarintLen64", + "NativeEndian", "PutUvarint", "PutVarint", "Read", @@ -2963,6 +3034,7 @@ var stdlib = map[string][]string{ }, "errors": { "As", + "ErrUnsupported", "Is", "Join", "New", @@ -2989,6 +3061,7 @@ var stdlib = map[string][]string{ "Arg", "Args", "Bool", + "BoolFunc", "BoolVar", "CommandLine", "ContinueOnError", @@ -3119,6 +3192,7 @@ var stdlib = map[string][]string{ "Inspect", "InterfaceType", "IsExported", + "IsGenerated", "KeyValueExpr", "LabeledStmt", "Lbl", @@ -3169,6 +3243,7 @@ var stdlib = map[string][]string{ "ArchChar", "Context", "Default", + "Directive", "FindOnly", "IgnoreVendor", "Import", @@ -3184,6 +3259,7 @@ var stdlib = map[string][]string{ "go/build/constraint": { "AndExpr", "Expr", + "GoVersion", "IsGoBuild", "IsPlusBuild", "NotExpr", @@ -3626,6 +3702,7 @@ var stdlib = map[string][]string{ "ErrBadHTML", "ErrBranchEnd", "ErrEndContext", + "ErrJSTemplate", "ErrNoSuchTemplate", "ErrOutputContext", "ErrPartialCharset", @@ -3870,6 +3947,8 @@ var stdlib = map[string][]string{ "FileInfo", "FileInfoToDirEntry", "FileMode", + "FormatDirEntry", + "FormatFileInfo", "Glob", "GlobFS", "ModeAppend", @@ -3942,6 +4021,78 @@ var stdlib = map[string][]string{ "SetPrefix", "Writer", }, + "log/slog": { + "Any", + "AnyValue", + "Attr", + "Bool", + "BoolValue", + "Debug", + "DebugContext", + "Default", + "Duration", + "DurationValue", + "Error", + "ErrorContext", + "Float64", + "Float64Value", + "Group", + "GroupValue", + "Handler", + "HandlerOptions", + "Info", + "InfoContext", + "Int", + "Int64", + "Int64Value", + "IntValue", + "JSONHandler", + "Kind", + "KindAny", + "KindBool", + "KindDuration", + "KindFloat64", + "KindGroup", + "KindInt64", + "KindLogValuer", + "KindString", + "KindTime", + "KindUint64", + "Level", + "LevelDebug", + "LevelError", + "LevelInfo", + "LevelKey", + "LevelVar", + "LevelWarn", + "Leveler", + "Log", + "LogAttrs", + "LogValuer", + "Logger", + "MessageKey", + "New", + "NewJSONHandler", + "NewLogLogger", + "NewRecord", + "NewTextHandler", + "Record", + "SetDefault", + "Source", + "SourceKey", + "String", + "StringValue", + "TextHandler", + "Time", + "TimeKey", + "TimeValue", + "Uint64", + "Uint64Value", + "Value", + "Warn", + "WarnContext", + "With", + }, "log/syslog": { "Dial", "LOG_ALERT", @@ -3977,6 +4128,13 @@ var stdlib = map[string][]string{ "Priority", "Writer", }, + "maps": { + "Clone", + "Copy", + "DeleteFunc", + "Equal", + "EqualFunc", + }, "math": { "Abs", "Acos", @@ -4371,6 +4529,7 @@ var stdlib = map[string][]string{ "ErrNoLocation", "ErrNotMultipart", "ErrNotSupported", + "ErrSchemeMismatch", "ErrServerClosed", "ErrShortBody", "ErrSkipAltProtocol", @@ -5084,6 +5243,8 @@ var stdlib = map[string][]string{ "NumCPU", "NumCgoCall", "NumGoroutine", + "PanicNilError", + "Pinner", "ReadMemStats", "ReadTrace", "SetBlockProfileRate", @@ -5172,6 +5333,37 @@ var stdlib = map[string][]string{ "Task", "WithRegion", }, + "slices": { + "BinarySearch", + "BinarySearchFunc", + "Clip", + "Clone", + "Compact", + "CompactFunc", + "Compare", + "CompareFunc", + "Contains", + "ContainsFunc", + "Delete", + "DeleteFunc", + "Equal", + "EqualFunc", + "Grow", + "Index", + "IndexFunc", + "Insert", + "IsSorted", + "IsSortedFunc", + "Max", + "MaxFunc", + "Min", + "MinFunc", + "Replace", + "Reverse", + "Sort", + "SortFunc", + "SortStableFunc", + }, "sort": { "Find", "Float64Slice", @@ -5242,6 +5434,7 @@ var stdlib = map[string][]string{ "Compare", "Contains", "ContainsAny", + "ContainsFunc", "ContainsRune", "Count", "Cut", @@ -5299,6 +5492,9 @@ var stdlib = map[string][]string{ "Mutex", "NewCond", "Once", + "OnceFunc", + "OnceValue", + "OnceValues", "Pool", "RWMutex", "WaitGroup", @@ -9135,10 +9331,12 @@ var stdlib = map[string][]string{ "SYS_AIO_CANCEL", "SYS_AIO_ERROR", "SYS_AIO_FSYNC", + "SYS_AIO_MLOCK", "SYS_AIO_READ", "SYS_AIO_RETURN", "SYS_AIO_SUSPEND", "SYS_AIO_SUSPEND_NOCANCEL", + "SYS_AIO_WAITCOMPLETE", "SYS_AIO_WRITE", "SYS_ALARM", "SYS_ARCH_PRCTL", @@ -9368,6 +9566,7 @@ var stdlib = map[string][]string{ "SYS_GET_MEMPOLICY", "SYS_GET_ROBUST_LIST", "SYS_GET_THREAD_AREA", + "SYS_GSSD_SYSCALL", "SYS_GTTY", "SYS_IDENTITYSVC", "SYS_IDLE", @@ -9411,8 +9610,24 @@ var stdlib = map[string][]string{ "SYS_KLDSYM", "SYS_KLDUNLOAD", "SYS_KLDUNLOADF", + "SYS_KMQ_NOTIFY", + "SYS_KMQ_OPEN", + "SYS_KMQ_SETATTR", + "SYS_KMQ_TIMEDRECEIVE", + "SYS_KMQ_TIMEDSEND", + "SYS_KMQ_UNLINK", "SYS_KQUEUE", "SYS_KQUEUE1", + "SYS_KSEM_CLOSE", + "SYS_KSEM_DESTROY", + "SYS_KSEM_GETVALUE", + "SYS_KSEM_INIT", + "SYS_KSEM_OPEN", + "SYS_KSEM_POST", + "SYS_KSEM_TIMEDWAIT", + "SYS_KSEM_TRYWAIT", + "SYS_KSEM_UNLINK", + "SYS_KSEM_WAIT", "SYS_KTIMER_CREATE", "SYS_KTIMER_DELETE", "SYS_KTIMER_GETOVERRUN", @@ -9504,11 +9719,14 @@ var stdlib = map[string][]string{ "SYS_NFSSVC", "SYS_NFSTAT", "SYS_NICE", + "SYS_NLM_SYSCALL", "SYS_NLSTAT", "SYS_NMOUNT", "SYS_NSTAT", "SYS_NTP_ADJTIME", "SYS_NTP_GETTIME", + "SYS_NUMA_GETAFFINITY", + "SYS_NUMA_SETAFFINITY", "SYS_OABI_SYSCALL_BASE", "SYS_OBREAK", "SYS_OLDFSTAT", @@ -9891,6 +10109,7 @@ var stdlib = map[string][]string{ "SYS___ACL_SET_FD", "SYS___ACL_SET_FILE", "SYS___ACL_SET_LINK", + "SYS___CAP_RIGHTS_GET", "SYS___CLONE", "SYS___DISABLE_THREADSIGNAL", "SYS___GETCWD", @@ -10574,6 +10793,7 @@ var stdlib = map[string][]string{ "Short", "T", "TB", + "Testing", "Verbose", }, "testing/fstest": { @@ -10603,6 +10823,9 @@ var stdlib = map[string][]string{ "SetupError", "Value", }, + "testing/slogtest": { + "TestHandler", + }, "text/scanner": { "Char", "Comment", @@ -10826,6 +11049,7 @@ var stdlib = map[string][]string{ "Cs", "Cuneiform", "Cypriot", + "Cypro_Minoan", "Cyrillic", "Dash", "Deprecated", @@ -10889,6 +11113,7 @@ var stdlib = map[string][]string{ "Kaithi", "Kannada", "Katakana", + "Kawi", "Kayah_Li", "Kharoshthi", "Khitan_Small_Script", @@ -10943,6 +11168,7 @@ var stdlib = map[string][]string{ "Myanmar", "N", "Nabataean", + "Nag_Mundari", "Nandinagari", "Nd", "New_Tai_Lue", @@ -10964,6 +11190,7 @@ var stdlib = map[string][]string{ "Old_Sogdian", "Old_South_Arabian", "Old_Turkic", + "Old_Uyghur", "Oriya", "Osage", "Osmanya", @@ -11038,6 +11265,7 @@ var stdlib = map[string][]string{ "Tai_Viet", "Takri", "Tamil", + "Tangsa", "Tangut", "Telugu", "Terminal_Punctuation", @@ -11052,6 +11280,7 @@ var stdlib = map[string][]string{ "ToLower", "ToTitle", "ToUpper", + "Toto", "TurkishCase", "Ugaritic", "Unified_Ideograph", @@ -11061,6 +11290,7 @@ var stdlib = map[string][]string{ "Vai", "Variation_Selector", "Version", + "Vithkuqi", "Wancho", "Warang_Citi", "White_Space", diff --git a/internal/persistent/map.go b/internal/persistent/map.go index a9d878f4146..02389f89dc5 100644 --- a/internal/persistent/map.go +++ b/internal/persistent/map.go @@ -12,6 +12,8 @@ import ( "math/rand" "strings" "sync/atomic" + + "golang.org/x/tools/internal/constraints" ) // Implementation details: @@ -25,9 +27,7 @@ import ( // Each argument is followed by a delta change to its reference counter. // In case if no change is expected, the delta will be `-0`. -// Map is an associative mapping from keys to values, both represented as -// interface{}. Key comparison and iteration order is defined by a -// client-provided function that implements a strict weak order. +// Map is an associative mapping from keys to values. // // Maps can be Cloned in constant time. // Get, Store, and Delete operations are done on average in logarithmic time. @@ -38,16 +38,23 @@ import ( // // Internally the implementation is based on a randomized persistent treap: // https://en.wikipedia.org/wiki/Treap. -type Map struct { - less func(a, b interface{}) bool +// +// The zero value is ready to use. +type Map[K constraints.Ordered, V any] struct { + // Map is a generic wrapper around a non-generic implementation to avoid a + // significant increase in the size of the executable. root *mapNode } -func (m *Map) String() string { +func (*Map[K, V]) less(l, r any) bool { + return l.(K) < r.(K) +} + +func (m *Map[K, V]) String() string { var buf strings.Builder buf.WriteByte('{') var sep string - m.Range(func(k, v interface{}) { + m.Range(func(k K, v V) { fmt.Fprintf(&buf, "%s%v: %v", sep, k, v) sep = ", " }) @@ -56,7 +63,7 @@ func (m *Map) String() string { } type mapNode struct { - key interface{} + key any value *refValue weight uint64 refCount int32 @@ -65,11 +72,11 @@ type mapNode struct { type refValue struct { refCount int32 - value interface{} - release func(key, value interface{}) + value any + release func(key, value any) } -func newNodeWithRef(key, value interface{}, release func(key, value interface{})) *mapNode { +func newNodeWithRef[K constraints.Ordered, V any](key K, value V, release func(key, value any)) *mapNode { return &mapNode{ key: key, value: &refValue{ @@ -116,20 +123,10 @@ func (node *mapNode) decref() { } } -// NewMap returns a new map whose keys are ordered by the given comparison -// function (a strict weak order). It is the responsibility of the caller to -// Destroy it at later time. -func NewMap(less func(a, b interface{}) bool) *Map { - return &Map{ - less: less, - } -} - // Clone returns a copy of the given map. It is a responsibility of the caller // to Destroy it at later time. -func (pm *Map) Clone() *Map { - return &Map{ - less: pm.less, +func (pm *Map[K, V]) Clone() *Map[K, V] { + return &Map[K, V]{ root: pm.root.incref(), } } @@ -137,24 +134,26 @@ func (pm *Map) Clone() *Map { // Destroy destroys the map. // // After Destroy, the Map should not be used again. -func (pm *Map) Destroy() { +func (pm *Map[K, V]) Destroy() { // The implementation of these two functions is the same, // but their intent is different. pm.Clear() } // Clear removes all entries from the map. -func (pm *Map) Clear() { +func (pm *Map[K, V]) Clear() { pm.root.decref() pm.root = nil } // Range calls f sequentially in ascending key order for all entries in the map. -func (pm *Map) Range(f func(key, value interface{})) { - pm.root.forEach(f) +func (pm *Map[K, V]) Range(f func(key K, value V)) { + pm.root.forEach(func(k, v any) { + f(k.(K), v.(V)) + }) } -func (node *mapNode) forEach(f func(key, value interface{})) { +func (node *mapNode) forEach(f func(key, value any)) { if node == nil { return } @@ -163,26 +162,26 @@ func (node *mapNode) forEach(f func(key, value interface{})) { node.right.forEach(f) } -// Get returns the map value associated with the specified key, or nil if no entry -// is present. The ok result indicates whether an entry was found in the map. -func (pm *Map) Get(key interface{}) (interface{}, bool) { +// Get returns the map value associated with the specified key. +// The ok result indicates whether an entry was found in the map. +func (pm *Map[K, V]) Get(key K) (V, bool) { node := pm.root for node != nil { - if pm.less(key, node.key) { + if key < node.key.(K) { node = node.left - } else if pm.less(node.key, key) { + } else if node.key.(K) < key { node = node.right } else { - return node.value.value, true + return node.value.value.(V), true } } - return nil, false + var zero V + return zero, false } // SetAll updates the map with key/value pairs from the other map, overwriting existing keys. // It is equivalent to calling Set for each entry in the other map but is more efficient. -// Both maps must have the same comparison function, otherwise behavior is undefined. -func (pm *Map) SetAll(other *Map) { +func (pm *Map[K, V]) SetAll(other *Map[K, V]) { root := pm.root pm.root = union(root, other.root, pm.less, true) root.decref() @@ -191,7 +190,7 @@ func (pm *Map) SetAll(other *Map) { // Set updates the value associated with the specified key. // If release is non-nil, it will be called with entry's key and value once the // key is no longer contained in the map or any clone. -func (pm *Map) Set(key, value interface{}, release func(key, value interface{})) { +func (pm *Map[K, V]) Set(key K, value V, release func(key, value any)) { first := pm.root second := newNodeWithRef(key, value, release) pm.root = union(first, second, pm.less, true) @@ -205,7 +204,7 @@ func (pm *Map) Set(key, value interface{}, release func(key, value interface{})) // union(first:-0, second:-0) (result:+1) // Union borrows both subtrees without affecting their refcount and returns a // new reference that the caller is expected to call decref. -func union(first, second *mapNode, less func(a, b interface{}) bool, overwrite bool) *mapNode { +func union(first, second *mapNode, less func(any, any) bool, overwrite bool) *mapNode { if first == nil { return second.incref() } @@ -243,7 +242,7 @@ func union(first, second *mapNode, less func(a, b interface{}) bool, overwrite b // split(n:-0) (left:+1, mid:+1, right:+1) // Split borrows n without affecting its refcount, and returns three // new references that the caller is expected to call decref. -func split(n *mapNode, key interface{}, less func(a, b interface{}) bool, requireMid bool) (left, mid, right *mapNode) { +func split(n *mapNode, key any, less func(any, any) bool, requireMid bool) (left, mid, right *mapNode) { if n == nil { return nil, nil, nil } @@ -272,7 +271,7 @@ func split(n *mapNode, key interface{}, less func(a, b interface{}) bool, requir } // Delete deletes the value for a key. -func (pm *Map) Delete(key interface{}) { +func (pm *Map[K, V]) Delete(key K) { root := pm.root left, mid, right := split(root, key, pm.less, true) if mid == nil { diff --git a/internal/persistent/map_test.go b/internal/persistent/map_test.go index 9f89a1d300c..c73e5662d90 100644 --- a/internal/persistent/map_test.go +++ b/internal/persistent/map_test.go @@ -18,7 +18,7 @@ type mapEntry struct { } type validatedMap struct { - impl *Map + impl *Map[int, int] expected map[int]int // current key-value mapping. deleted map[mapEntry]int // maps deleted entries to their clock time of last deletion seen map[mapEntry]int // maps seen entries to their clock time of last insertion @@ -30,9 +30,7 @@ func TestSimpleMap(t *testing.T) { seenEntries := make(map[mapEntry]int) m1 := &validatedMap{ - impl: NewMap(func(a, b interface{}) bool { - return a.(int) < b.(int) - }), + impl: new(Map[int, int]), expected: make(map[int]int), deleted: deletedEntries, seen: seenEntries, @@ -123,9 +121,7 @@ func TestRandomMap(t *testing.T) { seenEntries := make(map[mapEntry]int) m := &validatedMap{ - impl: NewMap(func(a, b interface{}) bool { - return a.(int) < b.(int) - }), + impl: new(Map[int, int]), expected: make(map[int]int), deleted: deletedEntries, seen: seenEntries, @@ -165,9 +161,7 @@ func TestUpdate(t *testing.T) { seenEntries := make(map[mapEntry]int) m1 := &validatedMap{ - impl: NewMap(func(a, b interface{}) bool { - return a.(int) < b.(int) - }), + impl: new(Map[int, int]), expected: make(map[int]int), deleted: deletedEntries, seen: seenEntries, @@ -233,7 +227,7 @@ func dumpMap(t *testing.T, prefix string, n *mapNode) { func (vm *validatedMap) validate(t *testing.T) { t.Helper() - validateNode(t, vm.impl.root, vm.impl.less) + validateNode(t, vm.impl.root) // Note: this validation may not make sense if maps were constructed using // SetAll operations. If this proves to be problematic, remove the clock, @@ -246,23 +240,23 @@ func (vm *validatedMap) validate(t *testing.T) { } actualMap := make(map[int]int, len(vm.expected)) - vm.impl.Range(func(key, value interface{}) { - if other, ok := actualMap[key.(int)]; ok { + vm.impl.Range(func(key, value int) { + if other, ok := actualMap[key]; ok { t.Fatalf("key is present twice, key: %d, first value: %d, second value: %d", key, value, other) } - actualMap[key.(int)] = value.(int) + actualMap[key] = value }) assertSameMap(t, actualMap, vm.expected) } -func validateNode(t *testing.T, node *mapNode, less func(a, b interface{}) bool) { +func validateNode(t *testing.T, node *mapNode) { if node == nil { return } if node.left != nil { - if less(node.key, node.left.key) { + if node.key.(int) < node.left.key.(int) { t.Fatalf("left child has larger key: %v vs %v", node.left.key, node.key) } if node.left.weight > node.weight { @@ -271,7 +265,7 @@ func validateNode(t *testing.T, node *mapNode, less func(a, b interface{}) bool) } if node.right != nil { - if less(node.right.key, node.key) { + if node.right.key.(int) < node.key.(int) { t.Fatalf("right child has smaller key: %v vs %v", node.right.key, node.key) } if node.right.weight > node.weight { @@ -279,8 +273,8 @@ func validateNode(t *testing.T, node *mapNode, less func(a, b interface{}) bool) } } - validateNode(t, node.left, less) - validateNode(t, node.right, less) + validateNode(t, node.left) + validateNode(t, node.right) } func (vm *validatedMap) setAll(t *testing.T, other *validatedMap) { @@ -300,7 +294,7 @@ func (vm *validatedMap) set(t *testing.T, key, value int) { vm.clock++ vm.seen[entry] = vm.clock - vm.impl.Set(key, value, func(deletedKey, deletedValue interface{}) { + vm.impl.Set(key, value, func(deletedKey, deletedValue any) { if deletedKey != key || deletedValue != value { t.Fatalf("unexpected passed in deleted entry: %v/%v, expected: %v/%v", deletedKey, deletedValue, key, value) } @@ -346,7 +340,7 @@ func (vm *validatedMap) destroy() { vm.impl.Destroy() } -func assertSameMap(t *testing.T, map1, map2 interface{}) { +func assertSameMap(t *testing.T, map1, map2 any) { t.Helper() if !reflect.DeepEqual(map1, map2) { diff --git a/internal/persistent/set.go b/internal/persistent/set.go new file mode 100644 index 00000000000..348de5a71d2 --- /dev/null +++ b/internal/persistent/set.go @@ -0,0 +1,78 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package persistent + +import "golang.org/x/tools/internal/constraints" + +// Set is a collection of elements of type K. +// +// It uses immutable data structures internally, so that sets can be cloned in +// constant time. +// +// The zero value is a valid empty set. +type Set[K constraints.Ordered] struct { + impl *Map[K, struct{}] +} + +// Clone creates a copy of the receiver. +func (s *Set[K]) Clone() *Set[K] { + clone := new(Set[K]) + if s.impl != nil { + clone.impl = s.impl.Clone() + } + return clone +} + +// Destroy destroys the set. +// +// After Destroy, the Set should not be used again. +func (s *Set[K]) Destroy() { + if s.impl != nil { + s.impl.Destroy() + } +} + +// Contains reports whether s contains the given key. +func (s *Set[K]) Contains(key K) bool { + if s.impl == nil { + return false + } + _, ok := s.impl.Get(key) + return ok +} + +// Range calls f sequentially in ascending key order for all entries in the set. +func (s *Set[K]) Range(f func(key K)) { + if s.impl != nil { + s.impl.Range(func(key K, _ struct{}) { + f(key) + }) + } +} + +// AddAll adds all elements from other to the receiver set. +func (s *Set[K]) AddAll(other *Set[K]) { + if other.impl != nil { + if s.impl == nil { + s.impl = new(Map[K, struct{}]) + } + s.impl.SetAll(other.impl) + } +} + +// Add adds an element to the set. +func (s *Set[K]) Add(key K) { + if s.impl == nil { + s.impl = new(Map[K, struct{}]) + } + s.impl.Set(key, struct{}{}, nil) +} + +// Remove removes an element from the set. +func (s *Set[K]) Remove(key K) { + if s.impl != nil { + s.impl.Delete(key) + } +} diff --git a/internal/persistent/set_test.go b/internal/persistent/set_test.go new file mode 100644 index 00000000000..59025140bce --- /dev/null +++ b/internal/persistent/set_test.go @@ -0,0 +1,132 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package persistent_test + +import ( + "fmt" + "strings" + "testing" + + "golang.org/x/tools/internal/constraints" + "golang.org/x/tools/internal/persistent" +) + +func TestSet(t *testing.T) { + const ( + add = iota + remove + ) + type op struct { + op int + v int + } + + tests := []struct { + label string + ops []op + want []int + }{ + {"empty", nil, nil}, + {"singleton", []op{{add, 1}}, []int{1}}, + {"add and remove", []op{ + {add, 1}, + {remove, 1}, + }, nil}, + {"interleaved and remove", []op{ + {add, 1}, + {add, 2}, + {remove, 1}, + {add, 3}, + }, []int{2, 3}}, + } + + for _, test := range tests { + t.Run(test.label, func(t *testing.T) { + var s persistent.Set[int] + for _, op := range test.ops { + switch op.op { + case add: + s.Add(op.v) + case remove: + s.Remove(op.v) + } + } + + if d := diff(&s, test.want); d != "" { + t.Errorf("unexpected diff:\n%s", d) + } + }) + } +} + +func TestSet_Clone(t *testing.T) { + s1 := new(persistent.Set[int]) + s1.Add(1) + s1.Add(2) + s2 := s1.Clone() + s1.Add(3) + s2.Add(4) + if d := diff(s1, []int{1, 2, 3}); d != "" { + t.Errorf("s1: unexpected diff:\n%s", d) + } + if d := diff(s2, []int{1, 2, 4}); d != "" { + t.Errorf("s2: unexpected diff:\n%s", d) + } +} + +func TestSet_AddAll(t *testing.T) { + s1 := new(persistent.Set[int]) + s1.Add(1) + s1.Add(2) + s2 := new(persistent.Set[int]) + s2.Add(2) + s2.Add(3) + s2.Add(4) + s3 := new(persistent.Set[int]) + + s := new(persistent.Set[int]) + s.AddAll(s1) + s.AddAll(s2) + s.AddAll(s3) + + if d := diff(s1, []int{1, 2}); d != "" { + t.Errorf("s1: unexpected diff:\n%s", d) + } + if d := diff(s2, []int{2, 3, 4}); d != "" { + t.Errorf("s2: unexpected diff:\n%s", d) + } + if d := diff(s3, nil); d != "" { + t.Errorf("s3: unexpected diff:\n%s", d) + } + if d := diff(s, []int{1, 2, 3, 4}); d != "" { + t.Errorf("s: unexpected diff:\n%s", d) + } +} + +func diff[K constraints.Ordered](got *persistent.Set[K], want []K) string { + wantSet := make(map[K]struct{}) + for _, w := range want { + wantSet[w] = struct{}{} + } + var diff []string + got.Range(func(key K) { + if _, ok := wantSet[key]; !ok { + diff = append(diff, fmt.Sprintf("+%v", key)) + } + }) + for key := range wantSet { + if !got.Contains(key) { + diff = append(diff, fmt.Sprintf("-%v", key)) + } + } + if len(diff) > 0 { + d := new(strings.Builder) + for _, l := range diff { + fmt.Fprintln(d, l) + } + return d.String() + } + return "" +} diff --git a/internal/refactor/inline/analyzer/analyzer.go b/internal/refactor/inline/analyzer/analyzer.go new file mode 100644 index 00000000000..2356fa484e7 --- /dev/null +++ b/internal/refactor/inline/analyzer/analyzer.go @@ -0,0 +1,161 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analyzer + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/refactor/inline" +) + +const Doc = `inline calls to functions with "inlineme" doc comment` + +var Analyzer = &analysis.Analyzer{ + Name: "inline", + Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/internal/refactor/inline/analyzer", + Run: run, + FactTypes: []analysis.Fact{new(inlineMeFact)}, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Memoize repeated calls for same file. + // TODO(adonovan): the analysis.Pass should abstract this (#62292) + // as the driver may not be reading directly from the file system. + fileContent := make(map[string][]byte) + readFile := func(node ast.Node) ([]byte, error) { + filename := pass.Fset.File(node.Pos()).Name() + content, ok := fileContent[filename] + if !ok { + var err error + content, err = os.ReadFile(filename) + if err != nil { + return nil, err + } + fileContent[filename] = content + } + return content, nil + } + + // Pass 1: find functions annotated with an "inlineme" + // comment, and export a fact for each one. + inlinable := make(map[*types.Func]*inline.Callee) // memoization of fact import (nil => no fact) + for _, file := range pass.Files { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + // TODO(adonovan): this is just a placeholder. + // Use the precise go:fix syntax in the proposal. + // Beware that //go: comments are treated specially + // by (*ast.CommentGroup).Text(). + // TODO(adonovan): alternatively, consider using + // the universal annotation mechanism sketched in + // https://go.dev/cl/489835 (which doesn't yet have + // a proper proposal). + if strings.Contains(decl.Doc.Text(), "inlineme") { + content, err := readFile(file) + if err != nil { + pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err) + continue + } + callee, err := inline.AnalyzeCallee(pass.Fset, pass.Pkg, pass.TypesInfo, decl, content) + if err != nil { + pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err) + continue + } + fn := pass.TypesInfo.Defs[decl.Name].(*types.Func) + pass.ExportObjectFact(fn, &inlineMeFact{callee}) + inlinable[fn] = callee + } + } + } + } + + // Pass 2. Inline each static call to an inlinable function. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.File)(nil), + (*ast.CallExpr)(nil), + } + var currentFile *ast.File + inspect.Preorder(nodeFilter, func(n ast.Node) { + if file, ok := n.(*ast.File); ok { + currentFile = file + return + } + call := n.(*ast.CallExpr) + if fn := typeutil.StaticCallee(pass.TypesInfo, call); fn != nil { + // Inlinable? + callee, ok := inlinable[fn] + if !ok { + var fact inlineMeFact + if pass.ImportObjectFact(fn, &fact) { + callee = fact.callee + inlinable[fn] = callee + } + } + if callee == nil { + return // nope + } + + // Inline the call. + content, err := readFile(call) + if err != nil { + pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err) + return + } + caller := &inline.Caller{ + Fset: pass.Fset, + Types: pass.Pkg, + Info: pass.TypesInfo, + File: currentFile, + Call: call, + Content: content, + } + got, err := inline.Inline(caller, callee) + if err != nil { + pass.Reportf(call.Lparen, "%v", err) + return + } + + // Suggest the "fix". + var textEdits []analysis.TextEdit + for _, edit := range diff.Bytes(content, got) { + textEdits = append(textEdits, analysis.TextEdit{ + Pos: currentFile.FileStart + token.Pos(edit.Start), + End: currentFile.FileStart + token.Pos(edit.End), + NewText: []byte(edit.New), + }) + } + msg := fmt.Sprintf("inline call of %v", callee) + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: msg, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: msg, + TextEdits: textEdits, + }}, + }) + } + }) + + return nil, nil +} + +type inlineMeFact struct{ callee *inline.Callee } + +func (f *inlineMeFact) String() string { return "inlineme " + f.callee.String() } +func (*inlineMeFact) AFact() {} diff --git a/internal/refactor/inline/analyzer/analyzer_test.go b/internal/refactor/inline/analyzer/analyzer_test.go new file mode 100644 index 00000000000..5ad85cfb821 --- /dev/null +++ b/internal/refactor/inline/analyzer/analyzer_test.go @@ -0,0 +1,16 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analyzer_test + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + inlineanalyzer "golang.org/x/tools/internal/refactor/inline/analyzer" +) + +func TestAnalyzer(t *testing.T) { + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), inlineanalyzer.Analyzer, "a", "b") +} diff --git a/internal/refactor/inline/analyzer/main.go b/internal/refactor/inline/analyzer/main.go new file mode 100644 index 00000000000..4be223a80d6 --- /dev/null +++ b/internal/refactor/inline/analyzer/main.go @@ -0,0 +1,19 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore +// +build ignore + +// The inline command applies the inliner to the specified packages of +// Go source code. Run with: +// +// $ go run ./internal/refactor/inline/analyzer/main.go -fix packages... +package main + +import ( + "golang.org/x/tools/go/analysis/singlechecker" + inlineanalyzer "golang.org/x/tools/internal/refactor/inline/analyzer" +) + +func main() { singlechecker.Main(inlineanalyzer.Analyzer) } diff --git a/internal/refactor/inline/analyzer/testdata/src/a/a.go b/internal/refactor/inline/analyzer/testdata/src/a/a.go new file mode 100644 index 00000000000..e661515b7c7 --- /dev/null +++ b/internal/refactor/inline/analyzer/testdata/src/a/a.go @@ -0,0 +1,16 @@ +package a + +func f() { + One() // want `inline call of a.One` + new(T).Two() // want `inline call of \(a.T\).Two` +} + +type T struct{} + +// inlineme +func One() int { return one } // want One:`inlineme a.One` + +const one = 1 + +// inlineme +func (T) Two() int { return 2 } // want Two:`inlineme \(a.T\).Two` diff --git a/internal/refactor/inline/analyzer/testdata/src/a/a.go.golden b/internal/refactor/inline/analyzer/testdata/src/a/a.go.golden new file mode 100644 index 00000000000..fe9877b69c1 --- /dev/null +++ b/internal/refactor/inline/analyzer/testdata/src/a/a.go.golden @@ -0,0 +1,16 @@ +package a + +func f() { + _ = one // want `inline call of a.One` + func(_ T) int { return 2 }(*new(T)) // want `inline call of \(a.T\).Two` +} + +type T struct{} + +// inlineme +func One() int { return one } // want One:`inlineme a.One` + +const one = 1 + +// inlineme +func (T) Two() int { return 2 } // want Two:`inlineme \(a.T\).Two` diff --git a/internal/refactor/inline/analyzer/testdata/src/b/b.go b/internal/refactor/inline/analyzer/testdata/src/b/b.go new file mode 100644 index 00000000000..069e670d51e --- /dev/null +++ b/internal/refactor/inline/analyzer/testdata/src/b/b.go @@ -0,0 +1,9 @@ +package b + +import "a" + +func f() { + a.One() // want `cannot inline call to a.One because body refers to non-exported one` + + new(a.T).Two() // want `inline call of \(a.T\).Two` +} diff --git a/internal/refactor/inline/analyzer/testdata/src/b/b.go.golden b/internal/refactor/inline/analyzer/testdata/src/b/b.go.golden new file mode 100644 index 00000000000..61b7bd9b349 --- /dev/null +++ b/internal/refactor/inline/analyzer/testdata/src/b/b.go.golden @@ -0,0 +1,9 @@ +package b + +import "a" + +func f() { + a.One() // want `cannot inline call to a.One because body refers to non-exported one` + + func(_ a.T) int { return 2 }(*new(a.T)) // want `inline call of \(a.T\).Two` +} diff --git a/internal/refactor/inline/callee.go b/internal/refactor/inline/callee.go new file mode 100644 index 00000000000..291971cf6d8 --- /dev/null +++ b/internal/refactor/inline/callee.go @@ -0,0 +1,350 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package inline + +// This file defines the analysis of the callee function. + +import ( + "bytes" + "encoding/gob" + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/typeparams" +) + +// A Callee holds information about an inlinable function. Gob-serializable. +type Callee struct { + impl gobCallee +} + +func (callee *Callee) String() string { return callee.impl.Name } + +type gobCallee struct { + Content []byte // file content, compacted to a single func decl + + // syntax derived from compacted Content (not serialized) + fset *token.FileSet + decl *ast.FuncDecl + + // results of type analysis (does not reach go/types data structures) + PkgPath string // package path of declaring package + Name string // user-friendly name for error messages + Unexported []string // names of free objects that are unexported + FreeRefs []freeRef // locations of references to free objects + FreeObjs []object // descriptions of free objects + BodyIsReturnExpr bool // function body is "return expr(s)" + ValidForCallStmt bool // => bodyIsReturnExpr and sole expr is f() or <-ch + NumResults int // number of results (according to type, not ast.FieldList) +} + +// A freeRef records a reference to a free object. Gob-serializable. +type freeRef struct { + Start, End int // Callee.content[start:end] is extent of the reference + Object int // index into Callee.freeObjs +} + +// An object abstracts a free types.Object referenced by the callee. Gob-serializable. +type object struct { + Name string // Object.Name() + Kind string // one of {var,func,const,type,pkgname,nil,builtin} + PkgPath string // pkgpath of object (or of imported package if kind="pkgname") + ValidPos bool // Object.Pos().IsValid() +} + +func (callee *gobCallee) offset(pos token.Pos) int { return offsetOf(callee.fset, pos) } + +// AnalyzeCallee analyzes a function that is a candidate for inlining +// and returns a Callee that describes it. The Callee object, which is +// serializable, can be passed to one or more subsequent calls to +// Inline, each with a different Caller. +// +// This design allows separate analysis of callers and callees in the +// golang.org/x/tools/go/analysis framework: the inlining information +// about a callee can be recorded as a "fact". +func AnalyzeCallee(fset *token.FileSet, pkg *types.Package, info *types.Info, decl *ast.FuncDecl, content []byte) (*Callee, error) { + + // The client is expected to have determined that the callee + // is a function with a declaration (not a built-in or var). + fn := info.Defs[decl.Name].(*types.Func) + sig := fn.Type().(*types.Signature) + + // Create user-friendly name ("pkg.Func" or "(pkg.T).Method") + var name string + if sig.Recv() == nil { + name = fmt.Sprintf("%s.%s", fn.Pkg().Name(), fn.Name()) + } else { + name = fmt.Sprintf("(%s).%s", types.TypeString(sig.Recv().Type(), (*types.Package).Name), fn.Name()) + } + + if decl.Body == nil { + return nil, fmt.Errorf("cannot inline function %s as it has no body", name) + } + + // TODO(adonovan): support inlining of instantiated generic + // functions by replacing each occurrence of a type parameter + // T by its instantiating type argument (e.g. int). We'll need + // to wrap the instantiating type in parens when it's not an + // ident or qualified ident to prevent "if x == struct{}" + // parsing ambiguity, or "T(x)" where T = "*int" or "func()" + // from misparsing. + if decl.Type.TypeParams != nil { + return nil, fmt.Errorf("cannot inline generic function %s: type parameters are not yet supported", name) + } + + // Record the location of all free references in the callee body. + var ( + freeObjIndex = make(map[types.Object]int) + freeObjs []object + freeRefs []freeRef // free refs that may need renaming + unexported []string // free refs to unexported objects, for later error checks + ) + var visit func(n ast.Node) bool + visit = func(n ast.Node) bool { + switch n := n.(type) { + case *ast.SelectorExpr: + // Check selections of free fields/methods. + if sel, ok := info.Selections[n]; ok && + !within(sel.Obj().Pos(), decl) && + !n.Sel.IsExported() { + sym := fmt.Sprintf("(%s).%s", info.TypeOf(n.X), n.Sel.Name) + unexported = append(unexported, sym) + } + + // Don't recur into SelectorExpr.Sel. + visit(n.X) + return false + + case *ast.CompositeLit: + // Check for struct literals that refer to unexported fields, + // whether keyed or unkeyed. (Logic assumes well-typedness.) + litType := deref(info.TypeOf(n)) + if s, ok := typeparams.CoreType(litType).(*types.Struct); ok { + for i, elt := range n.Elts { + var field *types.Var + var value ast.Expr + if kv, ok := elt.(*ast.KeyValueExpr); ok { + field = info.Uses[kv.Key.(*ast.Ident)].(*types.Var) + value = kv.Value + } else { + field = s.Field(i) + value = elt + } + if !within(field.Pos(), decl) && !field.Exported() { + sym := fmt.Sprintf("(%s).%s", litType, field.Name()) + unexported = append(unexported, sym) + } + + // Don't recur into KeyValueExpr.Key. + visit(value) + } + return false + } + + case *ast.Ident: + if obj, ok := info.Uses[n]; ok { + // Methods and fields are handled by SelectorExpr and CompositeLit. + if isField(obj) || isMethod(obj) { + panic(obj) + } + // Inv: id is a lexical reference. + + // A reference to an unexported package-level declaration + // cannot be inlined into another package. + if !n.IsExported() && + obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() { + unexported = append(unexported, n.Name) + } + + // Record free reference. + if !within(obj.Pos(), decl) { + objidx, ok := freeObjIndex[obj] + if !ok { + objidx = len(freeObjIndex) + var pkgpath string + if pkgname, ok := obj.(*types.PkgName); ok { + pkgpath = pkgname.Imported().Path() + } else if obj.Pkg() != nil { + pkgpath = obj.Pkg().Path() + } + freeObjs = append(freeObjs, object{ + Name: obj.Name(), + Kind: objectKind(obj), + PkgPath: pkgpath, + ValidPos: obj.Pos().IsValid(), + }) + freeObjIndex[obj] = objidx + } + freeRefs = append(freeRefs, freeRef{ + Start: offsetOf(fset, n.Pos()), + End: offsetOf(fset, n.End()), + Object: objidx, + }) + } + } + } + return true + } + ast.Inspect(decl, visit) + + // Analyze callee body for "return results" form, where + // results is one or more expressions or an n-ary call. + validForCallStmt := false + bodyIsReturnExpr := decl.Type.Results != nil && len(decl.Type.Results.List) > 0 && + len(decl.Body.List) == 1 && + is[*ast.ReturnStmt](decl.Body.List[0]) && + len(decl.Body.List[0].(*ast.ReturnStmt).Results) > 0 + if bodyIsReturnExpr { + ret := decl.Body.List[0].(*ast.ReturnStmt) + + // Ascertain whether the results expression(s) + // would be safe to inline as a standalone statement. + // (This is true only for a single call or receive expression.) + validForCallStmt = func() bool { + if len(ret.Results) == 1 { + switch expr := astutil.Unparen(ret.Results[0]).(type) { + case *ast.CallExpr: // f(x) + callee := typeutil.Callee(info, expr) + if callee == nil { + return false // conversion T(x) + } + + // The only non-void built-in functions that may be + // called as a statement are copy and recover + // (though arguably a call to recover should never + // be inlined as that changes its behavior). + if builtin, ok := callee.(*types.Builtin); ok { + return builtin.Name() == "copy" || + builtin.Name() == "recover" + } + + return true // ordinary call f() + + case *ast.UnaryExpr: // <-x + return expr.Op == token.ARROW // channel receive <-ch + } + } + + // No other expressions are valid statements. + return false + }() + } + + // As a space optimization, we don't retain the complete + // callee file content; all we need is "package _; func f() { ... }". + // This reduces the size of analysis facts. + // + // The FileSet file/line info is no longer meaningful + // and should not be used in error messages. + // But the FileSet offsets are valid w.r.t. the content. + // + // (For ease of debugging we could insert a //line directive after + // the package decl but it seems more trouble than it's worth.) + { + start, end := offsetOf(fset, decl.Pos()), offsetOf(fset, decl.End()) + + var compact bytes.Buffer + compact.WriteString("package _\n") + compact.Write(content[start:end]) + content = compact.Bytes() + + // Re-parse the compacted content. + var err error + decl, err = parseCompact(fset, content) + if err != nil { + return nil, err + } + + // (content, decl) are now updated. + + // Adjust the freeRefs offsets. + delta := int(offsetOf(fset, decl.Pos()) - start) + for i := range freeRefs { + freeRefs[i].Start += delta + freeRefs[i].End += delta + } + } + + return &Callee{gobCallee{ + Content: content, + fset: fset, + decl: decl, + PkgPath: pkg.Path(), + Name: name, + Unexported: unexported, + FreeObjs: freeObjs, + FreeRefs: freeRefs, + BodyIsReturnExpr: bodyIsReturnExpr, + ValidForCallStmt: validForCallStmt, + NumResults: sig.Results().Len(), + }}, nil +} + +// parseCompact parses a Go source file of the form "package _\n func f() { ... }" +// and returns the sole function declaration. +func parseCompact(fset *token.FileSet, content []byte) (*ast.FuncDecl, error) { + const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors + f, err := parser.ParseFile(fset, "callee.go", content, mode) + if err != nil { + return nil, fmt.Errorf("internal error: cannot compact file: %v", err) + } + return f.Decls[0].(*ast.FuncDecl), nil +} + +// deref removes a pointer type constructor from the core type of t. +func deref(t types.Type) types.Type { + if ptr, ok := typeparams.CoreType(t).(*types.Pointer); ok { + return ptr.Elem() + } + return t +} + +func isField(obj types.Object) bool { + if v, ok := obj.(*types.Var); ok && v.IsField() { + return true + } + return false +} + +func isMethod(obj types.Object) bool { + if f, ok := obj.(*types.Func); ok && f.Type().(*types.Signature).Recv() != nil { + return true + } + return false +} + +// -- serialization -- + +var ( + _ gob.GobEncoder = (*Callee)(nil) + _ gob.GobDecoder = (*Callee)(nil) +) + +func (callee *Callee) GobEncode() ([]byte, error) { + var out bytes.Buffer + if err := gob.NewEncoder(&out).Encode(callee.impl); err != nil { + return nil, err + } + return out.Bytes(), nil +} + +func (callee *Callee) GobDecode(data []byte) error { + if err := gob.NewDecoder(bytes.NewReader(data)).Decode(&callee.impl); err != nil { + return err + } + fset := token.NewFileSet() + decl, err := parseCompact(fset, callee.impl.Content) + if err != nil { + return err + } + callee.impl.fset = fset + callee.impl.decl = decl + return nil +} diff --git a/internal/refactor/inline/inline.go b/internal/refactor/inline/inline.go new file mode 100644 index 00000000000..9167498fc35 --- /dev/null +++ b/internal/refactor/inline/inline.go @@ -0,0 +1,688 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package inline implements inlining of Go function calls. +// +// The client provides information about the caller and callee, +// including the source text, syntax tree, and type information, and +// the inliner returns the modified source file for the caller, or an +// error if the inlining operation is invalid (for example because the +// function body refers to names that are inaccessible to the caller). +// +// Although this interface demands more information from the client +// than might seem necessary, it enables smoother integration with +// existing batch and interactive tools that have their own ways of +// managing the processes of reading, parsing, and type-checking +// packages. In particular, this package does not assume that the +// caller and callee belong to the same token.FileSet or +// types.Importer realms. +// +// In general, inlining consists of modifying a function or method +// call expression f(a1, ..., an) so that the name of the function f +// is replaced ("literalized") by a literal copy of the function +// declaration, with free identifiers suitably modified to use the +// locally appropriate identifiers or perhaps constant argument +// values. +// +// Inlining must not change the semantics of the call. Semantics +// preservation is crucial for clients such as codebase maintenance +// tools that automatically inline all calls to designated functions +// on a large scale. Such tools must not introduce subtle behavior +// changes. (Fully inlining a call is dynamically observable using +// reflection over the call stack, but this exception to the rule is +// explicitly allowed.) +// +// In some special cases it is possible to entirely replace ("reduce") +// the call by a copy of the function's body in which parameters have +// been replaced by arguments, but this is surprisingly tricky for a +// number of reasons, some of which are listed here for illustration: +// +// - Any effects of the call argument expressions must be preserved, +// even if the corresponding parameters are never referenced, or are +// referenced multiple times, or are referenced in a different order +// from the arguments. +// +// - Even an argument expression as simple as ptr.x may not be +// referentially transparent, because another argument may have the +// effect of changing the value of ptr. +// +// - Although constants are referentially transparent, as a matter of +// style we do not wish to duplicate literals that are referenced +// multiple times in the body because this undoes proper factoring. +// Also, string literals may be arbitrarily large. +// +// - If the function body consists of statements other than just +// "return expr", in some contexts it may be syntactically +// impossible to replace the call expression by the body statements. +// Consider "} else if x := f(); cond { ... }". +// (Go has no equivalent to Lisp's progn or Rust's blocks.) +// +// - Similarly, without the equivalent of Rust-style blocks and +// first-class tuples, there is no general way to reduce a call +// to a function such as +// > func(params)(args)(results) { stmts; return body } +// to an expression such as +// > { var params = args; stmts; body } +// or even a statement such as +// > results = { var params = args; stmts; body } +// Consequently the declaration and scope of the result variables, +// and the assignment and control-flow implications of the return +// statement, must be dealt with by cases. +// +// - A standalone call statement that calls a function whose body is +// "return expr" cannot be simply replaced by the body expression +// if it is not itself a call or channel receive expression; it is +// necessary to explicitly discard the result using "_ = expr". +// +// Similarly, if the body is a call expression, only calls to some +// built-in functions with no result (such as copy or panic) are +// permitted as statements, whereas others (such as append) return +// a result that must be used, even if just by discarding. +// +// - If a parameter or result variable is updated by an assignment +// within the function body, it cannot always be safely replaced +// by a variable in the caller. For example, given +// > func f(a int) int { a++; return a } +// The call y = f(x) cannot be replaced by { x++; y = x } because +// this would change the value of the caller's variable x. +// Only if the caller is finished with x is this safe. +// +// A similar argument applies to parameter or result variables +// that escape: by eliminating a variable, inlining would change +// the identity of the variable that escapes. +// +// - If the function body uses 'defer' and the inlined call is not a +// tail-call, inlining may delay the deferred effects. +// +// - Each control label that is used by both caller and callee must +// be α-renamed. +// +// - Given +// > func f() uint8 { return 0 } +// > var x any = f() +// reducing the call to var x any = 0 is unsound because it +// discards the implicit conversion. We may need to make each +// argument->parameter and return->result assignment conversion +// implicit if the types differ. Assignments to variadic +// parameters may need to explicitly construct a slice. +// +// More complex callee functions are inlinable with more elaborate and +// invasive changes to the statements surrounding the call expression. +// +// TODO(adonovan): future work: +// +// - Handle more of the above special cases by careful analysis, +// thoughtful factoring of the large design space, and thorough +// test coverage. +// +// - Write a fuzz-like test that selects function calls at +// random in the corpus, inlines them, and checks that the +// result is either a sensible error or a valid transformation. +// +// - Eliminate parameters that are unreferenced in the callee +// and whose argument expression is side-effect free. +// +// - Afford the client more control such as a limit on the total +// increase in line count, or a refusal to inline using the +// general approach (replacing name by function literal). This +// could be achieved by returning metadata alongside the result +// and having the client conditionally discard the change. +// +// - Is it acceptable to skip effects that are limited to runtime +// panics? Can we avoid evaluating an argument x.f +// or a[i] when the corresponding parameter is unused? +// +// - When caller syntax permits a block, replace argument-to-parameter +// assignment by a set of local var decls, e.g. f(1, 2) would +// become { var x, y = 1, 2; body... }. +// +// But even this is complicated: a single var decl initializer +// cannot declare all the parameters and initialize them to their +// arguments in one go if they have varied types. Instead, +// one must use multiple specs such as: +// > { var x int = 1; var y int32 = 2; body ...} +// but this means that the initializer expression for y is +// within the scope of x, so it may require α-renaming. +// +// It is tempting to use a short var decl { x, y := 1, 2; body ...} +// as it permits simultaneous declaration and initialization +// of many variables of varied type. However, one must take care +// to convert each argument expression to the correct parameter +// variable type, perhaps explicitly. (Consider "x := 1 << 64".) +// +// Also, as a matter of style, having all parameter declarations +// and argument expressions in a single statement is potentially +// unwieldy. +// +// - Support inlining of generic functions, replacing type parameters +// by their instantiations. +// +// - Support inlining of calls to function literals such as: +// > f := func(...) { ...} +// > f() +// including recursive ones: +// > var f func(...) +// > f = func(...) { ...f...} +// > f() +// But note that the existing algorithm makes widespread assumptions +// that the callee is a package-level function or method. +// +// - Eliminate parens inserted conservatively when they are redundant. +// +// - Allow non-'go' build systems such as Bazel/Blaze a chance to +// decide whether an import is accessible using logic other than +// "/internal/" path segments. This could be achieved by returning +// the list of added import paths. +// +// - Inlining a function from another module may change the +// effective version of the Go language spec that governs it. We +// should probably make the client responsible for rejecting +// attempts to inline from newer callees to older callers, since +// there's no way for this package to access module versions. +// +// - Use an alternative implementation of the import-organizing +// operation that doesn't require operating on a complete file +// (and reformatting). Then return the results in a higher-level +// form as a set of import additions and deletions plus a single +// diff that encloses the call expression. This interface could +// perhaps be implemented atop imports.Process by post-processing +// its result to obtain the abstract import changes and discarding +// its formatted output. +package inline + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "log" + pathpkg "path" + "reflect" + "sort" + "strings" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/imports" + "golang.org/x/tools/internal/typeparams" +) + +// A Caller describes the function call and its enclosing context. +// +// The client is responsible for populating this struct and passing it to Inline. +type Caller struct { + Fset *token.FileSet + Types *types.Package + Info *types.Info + File *ast.File + Call *ast.CallExpr + Content []byte +} + +func (caller *Caller) offset(pos token.Pos) int { return offsetOf(caller.Fset, pos) } + +// Inline inlines the called function (callee) into the function call (caller) +// and returns the updated, formatted content of the caller source file. +func Inline(caller *Caller, callee_ *Callee) ([]byte, error) { + callee := &callee_.impl + + // -- check caller -- + + // Inlining of dynamic calls is not currently supported, + // even for local closure calls. + if typeutil.StaticCallee(caller.Info, caller.Call) == nil { + // e.g. interface method + return nil, fmt.Errorf("cannot inline: not a static function call") + } + + // Reject cross-package inlining if callee has + // free references to unexported symbols. + samePkg := caller.Types.Path() == callee.PkgPath + if !samePkg && len(callee.Unexported) > 0 { + return nil, fmt.Errorf("cannot inline call to %s because body refers to non-exported %s", + callee.Name, callee.Unexported[0]) + } + + // -- analyze callee's free references in caller context -- + + // syntax path enclosing Call, innermost first (Path[0]=Call) + callerPath, _ := astutil.PathEnclosingInterval(caller.File, caller.Call.Pos(), caller.Call.End()) + callerLookup := func(name string, pos token.Pos) types.Object { + for _, n := range callerPath { + // The function body scope (containing not just params) + // is associated with FuncDecl.Type, not FuncDecl.Body. + if decl, ok := n.(*ast.FuncDecl); ok { + n = decl.Type + } + if scope := caller.Info.Scopes[n]; scope != nil { + if _, obj := scope.LookupParent(name, pos); obj != nil { + return obj + } + } + } + return nil + } + + // Import map, initially populated with caller imports. + // + // For simplicity we ignore existing dot imports, so that a + // qualified identifier (QI) in the callee is always + // represented by a QI in the caller, allowing us to treat a + // QI like a selection on a package name. + importMap := make(map[string]string) // maps package path to local name + for _, imp := range caller.File.Imports { + if pkgname, ok := importedPkgName(caller.Info, imp); ok && pkgname.Name() != "." { + importMap[pkgname.Imported().Path()] = pkgname.Name() + } + } + + // localImportName returns the local name for a given imported package path. + var newImports []string + localImportName := func(path string) string { + name, ok := importMap[path] + if !ok { + // import added by callee + // + // Choose local PkgName based on last segment of + // package path plus, if needed, a numeric suffix to + // ensure uniqueness. + // + // TODO(adonovan): preserve the PkgName used + // in the original source, or, for a dot import, + // use the package's declared name. + base := pathpkg.Base(path) + name = base + for n := 0; callerLookup(name, caller.Call.Pos()) != nil; n++ { + name = fmt.Sprintf("%s%d", base, n) + } + + // TODO(adonovan): don't use a renaming import + // unless the local name differs from either + // the package name or the last segment of path. + // This requires that we tabulate (path, declared name, local name) + // triples for each package referenced by the callee. + newImports = append(newImports, fmt.Sprintf("%s %q", name, path)) + importMap[path] = name + } + return name + } + + // Compute the renaming of the callee's free identifiers. + objRenames := make([]string, len(callee.FreeObjs)) // "" => no rename + for i, obj := range callee.FreeObjs { + // obj is a free object of the callee. + // + // Possible cases are: + // - nil or a builtin + // => check not shadowed in caller. + // - package-level var/func/const/types + // => same package: check not shadowed in caller. + // => otherwise: import other package form a qualified identifier. + // (Unexported cross-package references were rejected already.) + // - type parameter + // => not yet supported + // - pkgname + // => import other package and use its local name. + // + // There can be no free references to labels, fields, or methods. + + var newName string + if obj.Kind == "pkgname" { + // Use locally appropriate import, creating as needed. + newName = localImportName(obj.PkgPath) // imported package + + } else if !obj.ValidPos { + // Built-in function, type, or nil: check not shadowed at caller. + found := callerLookup(obj.Name, caller.Call.Pos()) // can't fail + if found.Pos().IsValid() { + return nil, fmt.Errorf("cannot inline because built-in %q is shadowed in caller by a %s (line %d)", + obj.Name, objectKind(found), + caller.Fset.Position(found.Pos()).Line) + } + + newName = obj.Name + + } else { + // Must be reference to package-level var/func/const/type, + // since type parameters are not yet supported. + newName = obj.Name + qualify := false + if obj.PkgPath == callee.PkgPath { + // reference within callee package + if samePkg { + // Caller and callee are in same package. + // Check caller has not shadowed the decl. + found := callerLookup(obj.Name, caller.Call.Pos()) // can't fail + if !isPkgLevel(found) { + return nil, fmt.Errorf("cannot inline because %q is shadowed in caller by a %s (line %d)", + obj.Name, objectKind(found), + caller.Fset.Position(found.Pos()).Line) + } + } else { + // Cross-package reference. + qualify = true + } + } else { + // Reference to a package-level declaration + // in another package, without a qualified identifier: + // it must be a dot import. + qualify = true + } + + // Form a qualified identifier, pkg.Name. + if qualify { + pkgName := localImportName(obj.PkgPath) + newName = pkgName + "." + newName + } + } + objRenames[i] = newName + } + + // Compute edits to inlined callee. + type edit struct { + start, end int // byte offsets wrt callee.content + new string + } + var edits []edit + + // Give explicit blank "_" names to all method parameters + // (including receiver) since we will make the receiver a regular + // parameter and one cannot mix named and unnamed parameters. + // e.g. func (T) f(int, string) -> (_ T, _ int, _ string) + if callee.decl.Recv != nil { + ensureNamed := func(params *ast.FieldList) { + for _, param := range params.List { + if param.Names == nil { + offset := callee.offset(param.Type.Pos()) + edits = append(edits, edit{ + start: offset, + end: offset, + new: "_ ", + }) + } + } + } + ensureNamed(callee.decl.Recv) + ensureNamed(callee.decl.Type.Params) + } + + // Generate replacements for each free identifier. + for _, ref := range callee.FreeRefs { + if repl := objRenames[ref.Object]; repl != "" { + edits = append(edits, edit{ + start: ref.Start, + end: ref.End, + new: repl, + }) + } + } + + // Edits are non-overlapping but insertions and edits may be coincident. + // Preserve original order. + sort.SliceStable(edits, func(i, j int) bool { + return edits[i].start < edits[j].start + }) + + // Check that all imports (in particular, the new ones) are accessible. + // TODO(adonovan): allow customization of the accessibility relation (e.g. for Bazel). + for path := range importMap { + // TODO(adonovan): better segment hygiene. + if i := strings.Index(path, "/internal/"); i >= 0 { + if !strings.HasPrefix(caller.Types.Path(), path[:i]) { + return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee.Name, path) + } + } + } + + // The transformation is expressed by splicing substrings of + // the two source files, because syntax trees don't preserve + // comments faithfully (see #20744). + var out bytes.Buffer + + // 'replace' emits to out the specified range of the callee, + // applying all edits that fall completely within it. + replace := func(start, end int) { + off := start + for _, edit := range edits { + if start <= edit.start && edit.end <= end { + out.Write(callee.Content[off:edit.start]) + out.WriteString(edit.new) + off = edit.end + } + } + out.Write(callee.Content[off:end]) + } + + // Insert new imports after last existing import, + // to avoid migration of pre-import comments. + // The imports will be organized later. + { + offset := caller.offset(caller.File.Name.End()) // after package decl + if len(caller.File.Imports) > 0 { + // It's tempting to insert the new import after the last ImportSpec, + // but that may not be at the end of the import decl. + // Consider: import ( "a"; "b" ‸ ) + for _, decl := range caller.File.Decls { + if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { + offset = caller.offset(decl.End()) // after import decl + } + } + } + out.Write(caller.Content[:offset]) + out.WriteString("\n") + for _, imp := range newImports { + fmt.Fprintf(&out, "import %s\n", imp) + } + out.Write(caller.Content[offset:caller.offset(caller.Call.Pos())]) + } + + // Special case: a call to a function whose body consists only + // of "return expr" may be replaced by the expression, so long as: + // + // (a) There are no receiver or parameter argument expressions + // whose side effects must be considered. + // (b) There are no named parameter or named result variables + // that could potentially escape. + // + // TODO(adonovan): expand this special case to cover more scenarios. + // Consider each parameter in turn. If: + // - the parameter does not escape and is never assigned; + // - its argument is pure (no effects or panics--basically just idents and literals) + // and referentially transparent (not new(T) or &T{...}) or referenced at most once; and + // - the argument and parameter have the same type + // then the parameter can be eliminated and each reference + // to it replaced by the argument. + // If: + // - all parameters can be so replaced; + // - and the body is just "return expr"; + // - and the result vars are unnamed or never referenced (and thus cannot escape); + // then the call expression can be replaced by its body expression. + if callee.BodyIsReturnExpr && + callee.decl.Recv == nil && // no receiver arg effects to consider + len(caller.Call.Args) == 0 && // no argument effects to consider + !hasNamedVars(callee.decl.Type.Params) && // no param vars escape + !hasNamedVars(callee.decl.Type.Results) { // no result vars escape + + // A single return operand inlined to an expression + // context may need parens. Otherwise: + // func two() int { return 1+1 } + // print(-two()) => print(-1+1) // oops! + parens := callee.NumResults == 1 + + // If the call is a standalone statement, but the + // callee body is not suitable as a standalone statement + // (f() or <-ch), explicitly discard the results: + // _, _ = expr + if isCallStmt(callerPath) { + parens = false + + if !callee.ValidForCallStmt { + for i := 0; i < callee.NumResults; i++ { + if i > 0 { + out.WriteString(", ") + } + out.WriteString("_") + } + out.WriteString(" = ") + } + } + + // Emit the body expression(s). + for i, res := range callee.decl.Body.List[0].(*ast.ReturnStmt).Results { + if i > 0 { + out.WriteString(", ") + } + if parens { + out.WriteString("(") + } + replace(callee.offset(res.Pos()), callee.offset(res.End())) + if parens { + out.WriteString(")") + } + } + goto rest + } + + // Emit a function literal in place of the callee name, + // with appropriate replacements. + out.WriteString("func (") + if recv := callee.decl.Recv; recv != nil { + // Move method receiver to head of ordinary parameters. + replace(callee.offset(recv.Opening+1), callee.offset(recv.Closing)) + if len(callee.decl.Type.Params.List) > 0 { + out.WriteString(", ") + } + } + replace(callee.offset(callee.decl.Type.Params.Opening+1), + callee.offset(callee.decl.End())) + + // Emit call arguments. + out.WriteString("(") + if callee.decl.Recv != nil { + // Move receiver argument x.f(...) to argument list f(x, ...). + recv := astutil.Unparen(caller.Call.Fun).(*ast.SelectorExpr).X + + // If the receiver argument and parameter have + // different pointerness, make the "&" or "*" explicit. + argPtr := is[*types.Pointer](typeparams.CoreType(caller.Info.TypeOf(recv))) + paramPtr := is[*ast.StarExpr](callee.decl.Recv.List[0].Type) + if !argPtr && paramPtr { + out.WriteString("&") + } else if argPtr && !paramPtr { + out.WriteString("*") + } + + out.Write(caller.Content[caller.offset(recv.Pos()):caller.offset(recv.End())]) + + if len(caller.Call.Args) > 0 { + out.WriteString(", ") + } + } + // Append ordinary args, sans initial "(". + out.Write(caller.Content[caller.offset(caller.Call.Lparen+1):caller.offset(caller.Call.End())]) + + // Append rest of caller file. +rest: + out.Write(caller.Content[caller.offset(caller.Call.End()):]) + + // Reformat, and organize imports. + // + // TODO(adonovan): this looks at the user's cache state. + // Replace with a simpler implementation since + // all the necessary imports are present but merely untidy. + // That will be faster, and also less prone to nondeterminism + // if there are bugs in our logic for import maintenance. + // + // However, golang.org/x/tools/internal/imports.ApplyFixes is + // too simple as it requires the caller to have figured out + // all the logical edits. In our case, we know all the new + // imports that are needed (see newImports), each of which can + // be specified as: + // + // &imports.ImportFix{ + // StmtInfo: imports.ImportInfo{path, name, + // IdentName: name, + // FixType: imports.AddImport, + // } + // + // but we don't know which imports are made redundant by the + // inlining itself. For example, inlining a call to + // fmt.Println may make the "fmt" import redundant. + // + // Also, both imports.Process and internal/imports.ApplyFixes + // reformat the entire file, which is not ideal for clients + // such as gopls. (That said, the point of a canonical format + // is arguably that any tool can reformat as needed without + // this being inconvenient.) + res, err := imports.Process("output", out.Bytes(), nil) + if err != nil { + if false { // debugging + log.Printf("cannot reformat: %v <<%s>>", err, &out) + } + return nil, err // cannot reformat (a bug?) + } + return res, nil +} + +// -- helpers -- + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} + +func within(pos token.Pos, n ast.Node) bool { + return n.Pos() <= pos && pos <= n.End() +} + +func offsetOf(fset *token.FileSet, pos token.Pos) int { + return fset.PositionFor(pos, false).Offset +} + +// importedPkgName returns the PkgName object declared by an ImportSpec. +// TODO(adonovan): make this a method of types.Info (#62037). +func importedPkgName(info *types.Info, imp *ast.ImportSpec) (*types.PkgName, bool) { + var obj types.Object + if imp.Name != nil { + obj = info.Defs[imp.Name] + } else { + obj = info.Implicits[imp] + } + pkgname, ok := obj.(*types.PkgName) + return pkgname, ok +} + +func isPkgLevel(obj types.Object) bool { + return obj.Pkg().Scope().Lookup(obj.Name()) == obj +} + +// objectKind returns an object's kind (e.g. var, func, const, typename). +func objectKind(obj types.Object) string { + return strings.TrimPrefix(strings.ToLower(reflect.TypeOf(obj).String()), "*types.") +} + +// isCallStmt reports whether the function call (specified +// as a PathEnclosingInterval) appears within an ExprStmt. +func isCallStmt(callPath []ast.Node) bool { + _ = callPath[0].(*ast.CallExpr) + for _, n := range callPath[1:] { + switch n.(type) { + case *ast.ParenExpr: + continue + case *ast.ExprStmt: + return true + } + break + } + return false +} + +// hasNamedVars reports whether a function parameter tuple uses named variables. +// +// TODO(adonovan): this is a placeholder for a more complex analysis to detect +// whether inlining might cause named param/result variables to escape. +func hasNamedVars(tuple *ast.FieldList) bool { + return tuple != nil && len(tuple.List) > 0 && tuple.List[0].Names != nil +} diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go new file mode 100644 index 00000000000..f77d2851f17 --- /dev/null +++ b/internal/refactor/inline/inline_test.go @@ -0,0 +1,325 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package inline_test + +import ( + "bytes" + "encoding/gob" + "fmt" + "go/ast" + "go/token" + "os" + "path/filepath" + "regexp" + "testing" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/expect" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/refactor/inline" + "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/txtar" +) + +// Test executes test scenarios specified by files in testdata/*.txtar. +func Test(t *testing.T) { + testenv.NeedsGoPackages(t) + + files, err := filepath.Glob("testdata/*.txtar") + if err != nil { + t.Fatal(err) + } + for _, file := range files { + file := file + t.Run(filepath.Base(file), func(t *testing.T) { + t.Parallel() + + // Extract archive to temporary tree. + ar, err := txtar.ParseFile(file) + if err != nil { + t.Fatal(err) + } + dir := t.TempDir() + if err := extractTxtar(ar, dir); err != nil { + t.Fatal(err) + } + + // Load packages. + cfg := &packages.Config{ + Dir: dir, + Mode: packages.LoadAllSyntax, + Env: append(os.Environ(), + "GO111MODULES=on", + "GOPATH=", + "GOWORK=off", + "GOPROXY=off"), + } + pkgs, err := packages.Load(cfg, "./...") + if err != nil { + t.Errorf("Load: %v", err) + } + // Report parse/type errors; they may be benign. + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + for _, err := range pkg.Errors { + t.Log(err) + } + }) + + // Process @inline notes in comments in initial packages. + for _, pkg := range pkgs { + for _, file := range pkg.Syntax { + // Read file content (for @inline regexp, and inliner). + content, err := os.ReadFile(pkg.Fset.File(file.Pos()).Name()) + if err != nil { + t.Error(err) + continue + } + + // Read and process @inline notes. + notes, err := expect.ExtractGo(pkg.Fset, file) + if err != nil { + t.Errorf("parsing notes in %q: %v", pkg.Fset.File(file.Pos()).Name(), err) + continue + } + for _, note := range notes { + posn := pkg.Fset.Position(note.Pos) + if note.Name != "inline" { + t.Errorf("%s: invalid marker @%s", posn, note.Name) + continue + } + if nargs := len(note.Args); nargs != 2 { + t.Errorf("@inline: want 2 args, got %d", nargs) + continue + } + pattern, ok := note.Args[0].(*regexp.Regexp) + if !ok { + t.Errorf("%s: @inline(rx, want): want regular expression rx", posn) + continue + } + + // want is a []byte (success) or *Regexp (failure) + var want any + switch x := note.Args[1].(type) { + case string, expect.Identifier: + for _, file := range ar.Files { + if file.Name == fmt.Sprint(x) { + want = file.Data + break + } + } + if want == nil { + t.Errorf("%s: @inline(rx, want): archive entry %q not found", posn, x) + continue + } + case *regexp.Regexp: + want = x + default: + t.Errorf("%s: @inline(rx, want): want file name (to assert success) or error message regexp (to assert failure)", posn) + continue + } + t.Log("doInlineNote", posn) + if err := doInlineNote(pkg, file, content, pattern, posn, want); err != nil { + t.Errorf("%s: @inline(%v, %v): %v", posn, note.Args[0], note.Args[1], err) + continue + } + } + } + } + }) + } +} + +// doInlineNote executes an assertion specified by a single +// @inline(re"pattern", want) note in a comment. It finds the first +// match of regular expression 'pattern' on the same line, finds the +// innermost enclosing CallExpr, and inlines it. +// +// Finally it checks that, on success, the transformed file is equal +// to want (a []byte), or on failure that the error message matches +// want (a *Regexp). +func doInlineNote(pkg *packages.Package, file *ast.File, content []byte, pattern *regexp.Regexp, posn token.Position, want any) error { + // Find extent of pattern match within commented line. + var startPos, endPos token.Pos + { + tokFile := pkg.Fset.File(file.Pos()) + lineStartOffset := int(tokFile.LineStart(posn.Line)) - tokFile.Base() + line := content[lineStartOffset:] + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line = line[:i] + } + matches := pattern.FindSubmatchIndex(line) + var start, end int // offsets + switch len(matches) { + case 2: + // no subgroups: return the range of the regexp expression + start, end = matches[0], matches[1] + case 4: + // one subgroup: return its range + start, end = matches[2], matches[3] + default: + return fmt.Errorf("invalid location regexp %q: expect either 0 or 1 subgroups, got %d", + pattern, len(matches)/2-1) + } + startPos = tokFile.Pos(lineStartOffset + start) + endPos = tokFile.Pos(lineStartOffset + end) + } + + // Find innermost call enclosing the pattern match. + var caller *inline.Caller + { + path, _ := astutil.PathEnclosingInterval(file, startPos, endPos) + for _, n := range path { + if call, ok := n.(*ast.CallExpr); ok { + caller = &inline.Caller{ + Fset: pkg.Fset, + Types: pkg.Types, + Info: pkg.TypesInfo, + File: file, + Call: call, + Content: content, + } + break + } + } + if caller == nil { + return fmt.Errorf("no enclosing call") + } + } + + // Is it a static function call? + fn := typeutil.StaticCallee(caller.Info, caller.Call) + if fn == nil { + return fmt.Errorf("cannot inline: not a static call") + } + + // Find callee function. + var ( + calleePkg *packages.Package + calleeDecl *ast.FuncDecl + ) + { + var same func(*ast.FuncDecl) bool + // Is the call within the package? + if fn.Pkg() == caller.Types { + calleePkg = pkg // same as caller + same = func(decl *ast.FuncDecl) bool { + return decl.Name.Pos() == fn.Pos() + } + } else { + // Different package. Load it now. + // (The primary load loaded all dependencies, + // but we choose to load it again, with + // a distinct token.FileSet and types.Importer, + // to keep the implementation honest.) + cfg := &packages.Config{ + // TODO(adonovan): get the original module root more cleanly + Dir: filepath.Dir(filepath.Dir(pkg.GoFiles[0])), + Fset: token.NewFileSet(), + Mode: packages.LoadSyntax, + } + roots, err := packages.Load(cfg, fn.Pkg().Path()) + if err != nil { + return fmt.Errorf("loading callee package: %v", err) + } + if packages.PrintErrors(roots) > 0 { + return fmt.Errorf("callee package had errors") // (see log) + } + calleePkg = roots[0] + posn := caller.Fset.Position(fn.Pos()) // callee posn wrt caller package + same = func(decl *ast.FuncDecl) bool { + // We can't rely on columns in export data: + // some variants replace it with 1. + // We can't expect file names to have the same prefix. + // export data for go1.20 std packages have $GOROOT written in + // them, so how are we supposed to find the source? Yuck! + // Ugh. need to samefile? Nope $GOROOT just won't work + // This is highly client specific anyway. + posn2 := calleePkg.Fset.Position(decl.Name.Pos()) + return posn.Filename == posn2.Filename && + posn.Line == posn2.Line + } + } + + for _, file := range calleePkg.Syntax { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok && same(decl) { + calleeDecl = decl + goto found + } + } + } + return fmt.Errorf("can't find FuncDecl for callee") // can't happen? + found: + } + + // Do the inlining. For the purposes of the test, + // AnalyzeCallee and Inline are a single operation. + got, err := func() ([]byte, error) { + filename := calleePkg.Fset.File(calleeDecl.Pos()).Name() + content, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + callee, err := inline.AnalyzeCallee( + calleePkg.Fset, + calleePkg.Types, + calleePkg.TypesInfo, + calleeDecl, + content) + if err != nil { + return nil, err + } + + // Perform Gob transcoding so that it is exercised by the test. + var enc bytes.Buffer + if err := gob.NewEncoder(&enc).Encode(callee); err != nil { + return nil, fmt.Errorf("internal error: gob encoding failed: %v", err) + } + *callee = inline.Callee{} + if err := gob.NewDecoder(&enc).Decode(callee); err != nil { + return nil, fmt.Errorf("internal error: gob decoding failed: %v", err) + } + + return inline.Inline(caller, callee) + }() + if err != nil { + if wantRE, ok := want.(*regexp.Regexp); ok { + if !wantRE.MatchString(err.Error()) { + return fmt.Errorf("Inline failed with wrong error: %v (want error matching %q)", err, want) + } + return nil // expected error + } + return fmt.Errorf("Inline failed: %v", err) // success was expected + } + + // Inline succeeded. + if want, ok := want.([]byte); ok { + got = append(bytes.TrimSpace(got), '\n') + want = append(bytes.TrimSpace(want), '\n') + if diff := diff.Unified("want", "got", string(want), string(got)); diff != "" { + return fmt.Errorf("Inline returned wrong output:\n%s\nWant:\n%s\nDiff:\n%s", + got, want, diff) + } + return nil + } + return fmt.Errorf("Inline succeeded unexpectedly: want error matching %q, got <<%s>>", want, got) + +} + +// TODO(adonovan): publish this a helper (#61386). +func extractTxtar(ar *txtar.Archive, dir string) error { + for _, file := range ar.Files { + name := filepath.Join(dir, file.Name) + if err := os.MkdirAll(filepath.Dir(name), 0777); err != nil { + return err + } + if err := os.WriteFile(name, file.Data, 0666); err != nil { + return err + } + } + return nil +} diff --git a/internal/refactor/inline/testdata/basic-err.txtar b/internal/refactor/inline/testdata/basic-err.txtar new file mode 100644 index 00000000000..18e0eb7adb3 --- /dev/null +++ b/internal/refactor/inline/testdata/basic-err.txtar @@ -0,0 +1,24 @@ +Test of inlining a function that references err.Error, +which is often a special case because it has no position. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +import "io" + +var _ = getError(io.EOF) //@ inline(re"getError", getError) + +func getError(err error) string { return err.Error() } + +-- getError -- +package a + +import "io" + +var _ = func(err error) string { return err.Error() }(io.EOF) //@ inline(re"getError", getError) + +func getError(err error) string { return err.Error() } diff --git a/internal/refactor/inline/testdata/basic-literal.txtar b/internal/refactor/inline/testdata/basic-literal.txtar new file mode 100644 index 00000000000..50bac33456a --- /dev/null +++ b/internal/refactor/inline/testdata/basic-literal.txtar @@ -0,0 +1,19 @@ +Most basic test of inlining by literalization. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +var _ = add(1, 2) //@ inline(re"add", add) + +func add(x, y int) int { return x + y } + +-- add -- +package a + +var _ = func(x, y int) int { return x + y }(1, 2) //@ inline(re"add", add) + +func add(x, y int) int { return x + y } diff --git a/internal/refactor/inline/testdata/basic-reduce.txtar b/internal/refactor/inline/testdata/basic-reduce.txtar new file mode 100644 index 00000000000..9eedbc05f1e --- /dev/null +++ b/internal/refactor/inline/testdata/basic-reduce.txtar @@ -0,0 +1,19 @@ +Most basic test of inlining by reduction. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +var _ = zero() //@ inline(re"zero", zero) + +func zero() int { return 0 } + +-- zero -- +package a + +var _ = (0) //@ inline(re"zero", zero) + +func zero() int { return 0 } diff --git a/internal/refactor/inline/testdata/comments.txtar b/internal/refactor/inline/testdata/comments.txtar new file mode 100644 index 00000000000..0482e919a48 --- /dev/null +++ b/internal/refactor/inline/testdata/comments.txtar @@ -0,0 +1,56 @@ +Inlining, whether by literalization or reduction, +preserves comments in the callee. + +-- go.mod -- +module testdata +go 1.12 + +-- a/f.go -- +package a + +func _() { + f() //@ inline(re"f", f) +} + +func f() { + // a + /* b */ g() /* c */ + // d +} + +-- f -- +package a + +func _() { + func() { + // a + /* b */ + g() /* c */ + // d + }() //@ inline(re"f", f) +} + +func f() { + // a + /* b */ + g() /* c */ + // d +} + +-- a/g.go -- +package a + +func _() { + println(g()) //@ inline(re"g", g) +} + +func g() int { return 1 /*hello*/ + /*there*/ 1 } + +-- g -- +package a + +func _() { + println((1 /*hello*/ + /*there*/ 1)) //@ inline(re"g", g) +} + +func g() int { return 1 /*hello*/ + /*there*/ 1 } diff --git a/internal/refactor/inline/testdata/crosspkg.txtar b/internal/refactor/inline/testdata/crosspkg.txtar new file mode 100644 index 00000000000..43dc63f32ea --- /dev/null +++ b/internal/refactor/inline/testdata/crosspkg.txtar @@ -0,0 +1,77 @@ +Test of cross-package inlining. +The first case creates a new import, +the second reuses an existing one. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +// This comment does not migrate. + +import ( + "fmt" + "testdata/b" +) + +// Nor this one. + +func A() { + fmt.Println() + b.B1() //@ inline(re"B1", b1result) + b.B2() //@ inline(re"B2", b2result) +} + +-- b/b.go -- +package b + +import "testdata/c" +import "fmt" + +func B1() { c.C() } +func B2() { fmt.Println() } + +-- c/c.go -- +package c + +func C() {} + +-- b1result -- +package a + +// This comment does not migrate. + +import ( + "fmt" + "testdata/b" + + c "testdata/c" +) + +// Nor this one. + +func A() { + fmt.Println() + func() { c.C() }() //@ inline(re"B1", b1result) + b.B2() //@ inline(re"B2", b2result) +} + +-- b2result -- +package a + +// This comment does not migrate. + +import ( + "fmt" + "testdata/b" +) + +// Nor this one. + +func A() { + fmt.Println() + b.B1() //@ inline(re"B1", b1result) + func() { fmt.Println() }() //@ inline(re"B2", b2result) +} diff --git a/internal/refactor/inline/testdata/dotimport.txtar b/internal/refactor/inline/testdata/dotimport.txtar new file mode 100644 index 00000000000..7e886afdb94 --- /dev/null +++ b/internal/refactor/inline/testdata/dotimport.txtar @@ -0,0 +1,35 @@ +Test of inlining a function that uses a dot import. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +func A() {} + +-- b/b.go -- +package b + +import . "testdata/a" + +func B() { A() } + +-- c/c.go -- +package c + +import "testdata/b" + +func _() { + b.B() //@ inline(re"B", result) +} + +-- result -- +package c + +import a "testdata/a" + +func _() { + func() { a.A() }() //@ inline(re"B", result) +} diff --git a/internal/refactor/inline/testdata/err-basic.txtar b/internal/refactor/inline/testdata/err-basic.txtar new file mode 100644 index 00000000000..54377c70c4b --- /dev/null +++ b/internal/refactor/inline/testdata/err-basic.txtar @@ -0,0 +1,30 @@ +Basic errors: +- Inlining of generic functions is not yet supported. + +We can't express tests for the error resulting from inlining a +conversion T(x), a call to a literal func(){}(), a call to a +func-typed var, or a call to an interface method, since all of these +cause the test driver to fail to locate the callee, so +it doesn't even reach the Indent function. + +-- go.mod -- +module testdata +go 1.12 + +-- a/generic.go -- +package a + +func _() { + f[int]() //@ inline(re"f", re"type parameters are not yet supported") +} + +func f[T any]() {} + +-- a/nobody.go -- +package a + +func _() { + g() //@ inline(re"g", re"has no body") +} + +func g() diff --git a/internal/refactor/inline/testdata/err-shadow-builtin.txtar b/internal/refactor/inline/testdata/err-shadow-builtin.txtar new file mode 100644 index 00000000000..543d38fe540 --- /dev/null +++ b/internal/refactor/inline/testdata/err-shadow-builtin.txtar @@ -0,0 +1,36 @@ +Failures to inline because callee references a builtin that +is shadowed by caller. + +-- go.mod -- +module testdata +go 1.12 + +-- a/nil.go -- +package a + +func _() { + const nil = 1 + _ = f() //@ inline(re"f", re"nil.*shadowed.*by.*const .line 4") +} + +func f() *int { return nil } + +-- a/append.go -- +package a + +func _() { + type append int + g(nil) //@ inline(re"g", re"append.*shadowed.*by.*typename .line 4") +} + +func g(x []int) { _ = append(x, x...) } + +-- a/type.go -- +package a + +func _() { + type int uint8 + _ = h(0) //@ inline(re"h", re"int.*shadowed.*by.*typename .line 4") +} + +func h(x int) int { return x + 1 } diff --git a/internal/refactor/inline/testdata/err-shadow-pkg.txtar b/internal/refactor/inline/testdata/err-shadow-pkg.txtar new file mode 100644 index 00000000000..4338b8b31cd --- /dev/null +++ b/internal/refactor/inline/testdata/err-shadow-pkg.txtar @@ -0,0 +1,36 @@ +Test of failure to inline because callee references a +package-level decl that is shadowed by caller. + +Observe that the first call to f can be inlined because +the shadowing has not yet occurred; but the second call +to f is within the scope of the local constant v. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +func _() { + f() //@ inline(re"f", result) + const v = 1 + f() //@ inline(re"f", re"v.*shadowed.*by.*const .line 5") +} + +func f() int { return v } + +var v int + +-- result -- +package a + +func _() { + _ = v //@ inline(re"f", result) + const v = 1 + f() //@ inline(re"f", re"v.*shadowed.*by.*const .line 5") +} + +func f() int { return v } + +var v int diff --git a/internal/refactor/inline/testdata/err-unexported.txtar b/internal/refactor/inline/testdata/err-unexported.txtar new file mode 100644 index 00000000000..9ba91e5195d --- /dev/null +++ b/internal/refactor/inline/testdata/err-unexported.txtar @@ -0,0 +1,31 @@ +Errors from attempting to import a function from another +package whose body refers to unexported declarations. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +func A1() { b() } +func b() {} + +func A2() { var x T; print(x.f) } +type T struct { f int } + +func A3() { _ = &T{f: 0} } + +func A4() { _ = &T{0} } + +-- b/b.go -- +package b + +import "testdata/a" + +func _() { + a.A1() //@ inline(re"A1", re`body refers to non-exported b`) + a.A2() //@ inline(re"A2", re`body refers to non-exported \(testdata/a.T\).f`) + a.A3() //@ inline(re"A3", re`body refers to non-exported \(testdata/a.T\).f`) + a.A4() //@ inline(re"A4", re`body refers to non-exported \(testdata/a.T\).f`) +} diff --git a/internal/refactor/inline/testdata/exprstmt.txtar b/internal/refactor/inline/testdata/exprstmt.txtar new file mode 100644 index 00000000000..449ce35c454 --- /dev/null +++ b/internal/refactor/inline/testdata/exprstmt.txtar @@ -0,0 +1,99 @@ +Inlining an expression into an ExprStmt. +Call and receive expressions can be inlined directly +(though calls to only some builtins can be reduced). +All other expressions are inlined as "_ = expr". + +-- go.mod -- +module testdata +go 1.12 + +-- a/call.go -- +package a + +func _() { + call() //@ inline(re"call", call) +} + +func call() int { return recv() } + +-- call -- +package a + +func _() { + recv() //@ inline(re"call", call) +} + +func call() int { return recv() } + +-- a/recv.go -- +package a + +func _() { + recv() //@ inline(re"recv", recv) +} + +func recv() int { return <-(chan int)(nil) } + +-- recv -- +package a + +func _() { + <-(chan int)(nil) //@ inline(re"recv", recv) +} + +func recv() int { return <-(chan int)(nil) } + +-- a/constant.go -- +package a + +func _() { + constant() //@ inline(re"constant", constant) +} + +func constant() int { return 0 } + +-- constant -- +package a + +func _() { + _ = 0 //@ inline(re"constant", constant) +} + +func constant() int { return 0 } + +-- a/builtin.go -- +package a + +func _() { + builtin() //@ inline(re"builtin", builtin) +} + +func builtin() int { return len("") } + +-- builtin -- +package a + +func _() { + _ = len("") //@ inline(re"builtin", builtin) +} + +func builtin() int { return len("") } + +-- a/copy.go -- +package a + +func _() { + _copy() //@ inline(re"copy", copy) +} + +func _copy() int { return copy([]int(nil), []int(nil)) } + +-- copy -- +package a + +func _() { + copy([]int(nil), []int(nil)) //@ inline(re"copy", copy) +} + +func _copy() int { return copy([]int(nil), []int(nil)) } + diff --git a/internal/refactor/inline/testdata/import-shadow.txtar b/internal/refactor/inline/testdata/import-shadow.txtar new file mode 100644 index 00000000000..913c9cbe01a --- /dev/null +++ b/internal/refactor/inline/testdata/import-shadow.txtar @@ -0,0 +1,41 @@ +Test of heuristic for generating a fresh import PkgName. +The names c and c0 are taken, so it uses c1. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +import "testdata/b" + +func A() { + const c = 1 + type c0 int + b.B() //@ inline(re"B", result) +} + +-- b/b.go -- +package b + +import "testdata/c" + +func B() { c.C() } + +-- c/c.go -- +package c + +func C() {} + +-- result -- +package a + +import c1 "testdata/c" + +func A() { + const c = 1 + type c0 int + func() { c1.C() }() //@ inline(re"B", result) +} + diff --git a/internal/refactor/inline/testdata/internal.txtar b/internal/refactor/inline/testdata/internal.txtar new file mode 100644 index 00000000000..92a0fef4c0a --- /dev/null +++ b/internal/refactor/inline/testdata/internal.txtar @@ -0,0 +1,29 @@ +Test of inlining a function that references an +internal package that is not accessible to the caller. + +(c -> b -> b/internal/a) + +-- go.mod -- +module testdata +go 1.12 + +-- b/internal/a/a.go -- +package a + +func A() {} + +-- b/b.go -- +package b + +import "testdata/b/internal/a" + +func B() { a.A() } + +-- c/c.go -- +package c + +import "testdata/b" + +func _() { + b.B() //@ inline(re"B", re`body refers to inaccessible package "testdata/b/internal/a"`) +} diff --git a/internal/refactor/inline/testdata/method.txtar b/internal/refactor/inline/testdata/method.txtar new file mode 100644 index 00000000000..a4e02d575ca --- /dev/null +++ b/internal/refactor/inline/testdata/method.txtar @@ -0,0 +1,104 @@ +Test of inlining a method call. + +The call to (*T).g0 implicitly takes the address &x. + +The f1/g1 methods have parameters, exercising the +splicing of the receiver into the parameter list. +Notice that the unnamed parameters become named. + +-- go.mod -- +module testdata +go 1.12 + +-- a/f0.go -- +package a + +type T int +func (T) f0() {} + +func _(x T) { + x.f0() //@ inline(re"f0", f0) +} + +-- f0 -- +package a + +type T int + +func (T) f0() {} + +func _(x T) { + func(_ T) {}(x) //@ inline(re"f0", f0) +} + +-- a/g0.go -- +package a + +func (recv *T) g0() {} + +func _(x T) { + x.g0() //@ inline(re"g0", g0) +} + +-- g0 -- +package a + +func (recv *T) g0() {} + +func _(x T) { + func(recv *T) {}(&x) //@ inline(re"g0", g0) +} + +-- a/f1.go -- +package a + +func (T) f1(int, int) {} + +func _(x T) { + x.f1(1, 2) //@ inline(re"f1", f1) +} + +-- f1 -- +package a + +func (T) f1(int, int) {} + +func _(x T) { + func(_ T, _ int, _ int) {}(x, 1, 2) //@ inline(re"f1", f1) +} + +-- a/g1.go -- +package a + +func (recv *T) g1(int, int) {} + +func _(x T) { + x.g1(1, 2) //@ inline(re"g1", g1) +} + +-- g1 -- +package a + +func (recv *T) g1(int, int) {} + +func _(x T) { + func(recv *T, _ int, _ int) {}(&x, 1, 2) //@ inline(re"g1", g1) +} + +-- a/h.go -- +package a + +func (T) h() int { return 1 } + +func _() { + new(T).h() //@ inline(re"h", h) +} + +-- h -- +package a + +func (T) h() int { return 1 } + +func _() { + func(_ T) int { return 1 }(*new(T)) //@ inline(re"h", h) +} diff --git a/internal/refactor/inline/testdata/n-ary.txtar b/internal/refactor/inline/testdata/n-ary.txtar new file mode 100644 index 00000000000..2de97358aed --- /dev/null +++ b/internal/refactor/inline/testdata/n-ary.txtar @@ -0,0 +1,79 @@ +Tests of various n-ary result function cases. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +func _() { + println(f1()) //@ inline(re"f1", f1) +} + +func f1() (int, int) { return 1, 1 } + +-- f1 -- +package a + +func _() { + println(1, 1) //@ inline(re"f1", f1) +} + +func f1() (int, int) { return 1, 1 } + +-- b/b.go -- +package b + +func _() { + f2() //@ inline(re"f2", f2) +} + +func f2() (int, int) { return 2, 2 } + +-- f2 -- +package b + +func _() { + _, _ = 2, 2 //@ inline(re"f2", f2) +} + +func f2() (int, int) { return 2, 2 } + +-- c/c.go -- +package c + +func _() { + _, _ = f3() //@ inline(re"f3", f3) +} + +func f3() (int, int) { return f3A() } +func f3A() (x, y int) + +-- f3 -- +package c + +func _() { + _, _ = f3A() //@ inline(re"f3", f3) +} + +func f3() (int, int) { return f3A() } +func f3A() (x, y int) + +-- d/d.go -- +package d + +func _() { + println(-f4()) //@ inline(re"f4", f4) +} + +func f4() int { return 2 + 2 } + +-- f4 -- +package d + +func _() { + println(-(2 + 2)) //@ inline(re"f4", f4) +} + +func f4() int { return 2 + 2 } diff --git a/internal/refactor/inline/testdata/revdotimport.txtar b/internal/refactor/inline/testdata/revdotimport.txtar new file mode 100644 index 00000000000..f8b895e9218 --- /dev/null +++ b/internal/refactor/inline/testdata/revdotimport.txtar @@ -0,0 +1,43 @@ +Test of inlining a function into a context that already +dot-imports the necessary additional import. + +-- go.mod -- +module testdata +go 1.12 + +-- a/a.go -- +package a + +func A() {} + +-- b/b.go -- +package b + +import "testdata/a" + +func B() { a.A() } + +-- c/c.go -- +package c + +import . "testdata/a" +import "testdata/b" + +func _() { + A() + b.B() //@ inline(re"B", result) +} + +-- result -- +package c + +import ( + . "testdata/a" + + a "testdata/a" +) + +func _() { + A() + func() { a.A() }() //@ inline(re"B", result) +} diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index 9b01888adaa..0fe217b3c16 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -42,7 +42,7 @@ func packageMainIsDevel() bool { return info.Main.Version == "(devel)" } -var checkGoGoroot struct { +var checkGoBuild struct { once sync.Once err error } @@ -79,40 +79,48 @@ func hasTool(tool string) error { } case "go": - checkGoGoroot.once.Do(func() { - // Ensure that the 'go' command found by exec.LookPath is from the correct - // GOROOT. Otherwise, 'some/path/go test ./...' will test against some - // version of the 'go' binary other than 'some/path/go', which is almost - // certainly not what the user intended. - out, err := exec.Command(tool, "env", "GOROOT").CombinedOutput() - if err != nil { - checkGoGoroot.err = err - return + checkGoBuild.once.Do(func() { + if runtime.GOROOT() != "" { + // Ensure that the 'go' command found by exec.LookPath is from the correct + // GOROOT. Otherwise, 'some/path/go test ./...' will test against some + // version of the 'go' binary other than 'some/path/go', which is almost + // certainly not what the user intended. + out, err := exec.Command(tool, "env", "GOROOT").CombinedOutput() + if err != nil { + checkGoBuild.err = err + return + } + GOROOT := strings.TrimSpace(string(out)) + if GOROOT != runtime.GOROOT() { + checkGoBuild.err = fmt.Errorf("'go env GOROOT' does not match runtime.GOROOT:\n\tgo env: %s\n\tGOROOT: %s", GOROOT, runtime.GOROOT()) + return + } } - GOROOT := strings.TrimSpace(string(out)) - if GOROOT != runtime.GOROOT() { - checkGoGoroot.err = fmt.Errorf("'go env GOROOT' does not match runtime.GOROOT:\n\tgo env: %s\n\tGOROOT: %s", GOROOT, runtime.GOROOT()) + + dir, err := os.MkdirTemp("", "testenv-*") + if err != nil { + checkGoBuild.err = err return } + defer os.RemoveAll(dir) - // Also ensure that that GOROOT includes a compiler: 'go' commands - // don't in general work without it, and some builders - // (such as android-amd64-emu) seem to lack it in the test environment. - cmd := exec.Command(tool, "tool", "-n", "compile") - stderr := new(bytes.Buffer) - stderr.Write([]byte("\n")) - cmd.Stderr = stderr - out, err = cmd.Output() - if err != nil { - checkGoGoroot.err = fmt.Errorf("%v: %v%s", cmd, err, stderr) + mainGo := filepath.Join(dir, "main.go") + if err := os.WriteFile(mainGo, []byte("package main\nfunc main() {}\n"), 0644); err != nil { + checkGoBuild.err = err return } - if _, err := exec.LookPath(string(bytes.TrimSpace(out))); err != nil { - checkGoGoroot.err = err + cmd := exec.Command("go", "build", "-o", os.DevNull, mainGo) + cmd.Dir = dir + if out, err := cmd.CombinedOutput(); err != nil { + if len(out) > 0 { + checkGoBuild.err = fmt.Errorf("%v: %v\n%s", cmd, err, out) + } else { + checkGoBuild.err = fmt.Errorf("%v: %v", cmd, err) + } } }) - if checkGoGoroot.err != nil { - return checkGoGoroot.err + if checkGoBuild.err != nil { + return checkGoBuild.err } case "diff": diff --git a/internal/tool/tool.go b/internal/tool/tool.go index f4dd8d1c562..36ba55bea39 100644 --- a/internal/tool/tool.go +++ b/internal/tool/tool.go @@ -220,6 +220,9 @@ func addFlags(f *flag.FlagSet, field reflect.StructField, value reflect.Value) * if value.Kind() != reflect.Struct { return nil } + + // TODO(adonovan): there's no need for this special treatment of Profile: + // The caller can use f.Lookup("profile.cpu") etc instead. p, _ := value.Addr().Interface().(*Profile) // go through all the fields of the struct for i := 0; i < value.Type().NumField(); i++ { diff --git a/internal/typeparams/coretype.go b/internal/typeparams/coretype.go index 993135ec90e..71248209ee5 100644 --- a/internal/typeparams/coretype.go +++ b/internal/typeparams/coretype.go @@ -81,13 +81,13 @@ func CoreType(T types.Type) types.Type { // restrictions may be arbitrarily complex. For example, consider the // following: // -// type A interface{ ~string|~[]byte } +// type A interface{ ~string|~[]byte } // -// type B interface{ int|string } +// type B interface{ int|string } // -// type C interface { ~string|~int } +// type C interface { ~string|~int } // -// type T[P interface{ A|B; C }] int +// type T[P interface{ A|B; C }] int // // In this example, the structural type restriction of P is ~string|int: A|B // expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int, diff --git a/internal/typeparams/termlist.go b/internal/typeparams/termlist.go index 933106a23dd..cbd12f80131 100644 --- a/internal/typeparams/termlist.go +++ b/internal/typeparams/termlist.go @@ -30,7 +30,7 @@ func (xl termlist) String() string { var buf bytes.Buffer for i, x := range xl { if i > 0 { - buf.WriteString(" ∪ ") + buf.WriteString(" | ") } buf.WriteString(x.String()) } diff --git a/internal/typeparams/typeterm.go b/internal/typeparams/typeterm.go index 7ddee28d987..7350bb702a1 100644 --- a/internal/typeparams/typeterm.go +++ b/internal/typeparams/typeterm.go @@ -10,11 +10,10 @@ import "go/types" // A term describes elementary type sets: // -// ∅: (*term)(nil) == ∅ // set of no types (empty set) -// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse) -// T: &term{false, T} == {T} // set of type T -// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t -// +// ∅: (*term)(nil) == ∅ // set of no types (empty set) +// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse) +// T: &term{false, T} == {T} // set of type T +// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t type term struct { tilde bool // valid if typ != nil typ types.Type diff --git a/internal/typesinternal/objectpath.go b/internal/typesinternal/objectpath.go new file mode 100644 index 00000000000..5e96e895573 --- /dev/null +++ b/internal/typesinternal/objectpath.go @@ -0,0 +1,24 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import "go/types" + +// This file contains back doors that allow gopls to avoid method sorting when +// using the objectpath package. +// +// This is performance-critical in certain repositories, but changing the +// behavior of the objectpath package is still being discussed in +// golang/go#61443. If we decide to remove the sorting in objectpath we can +// simply delete these back doors. Otherwise, we should add a new API to +// objectpath that allows controlling the sorting. + +// SkipEncoderMethodSorting marks enc (which must be an *objectpath.Encoder) as +// not requiring sorted methods. +var SkipEncoderMethodSorting func(enc interface{}) + +// ObjectpathObject is like objectpath.Object, but allows suppressing method +// sorting. +var ObjectpathObject func(pkg *types.Package, p string, skipMethodSorting bool) (types.Object, error) diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index 66e8b099bd6..ce7d4351b22 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -11,8 +11,6 @@ import ( "go/types" "reflect" "unsafe" - - "golang.org/x/tools/go/types/objectpath" ) func SetUsesCgo(conf *types.Config) bool { @@ -52,17 +50,3 @@ func ReadGo116ErrorData(err types.Error) (code ErrorCode, start, end token.Pos, } var SetGoVersion = func(conf *types.Config, version string) bool { return false } - -// SkipEncoderMethodSorting marks the encoder as not requiring sorted methods, -// as an optimization for gopls (which guarantees the order of parsed source files). -// -// TODO(golang/go#61443): eliminate this parameter one way or the other. -// -//go:linkname SkipEncoderMethodSorting golang.org/x/tools/go/types/objectpath.skipMethodSorting -func SkipEncoderMethodSorting(enc *objectpath.Encoder) - -// ObjectpathObject is like objectpath.Object, but allows suppressing method -// sorting (which is not necessary for gopls). -// -//go:linkname ObjectpathObject golang.org/x/tools/go/types/objectpath.object -func ObjectpathObject(pkg *types.Package, p objectpath.Path, skipMethodSorting bool) (types.Object, error) diff --git a/refactor/satisfy/find.go b/refactor/satisfy/find.go index 47dc97e471c..9e60af3b618 100644 --- a/refactor/satisfy/find.go +++ b/refactor/satisfy/find.go @@ -355,7 +355,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { f.sig = saved case *ast.CompositeLit: - switch T := coreType(tv.Type).(type) { + switch T := coreType(deref(tv.Type)).(type) { case *types.Struct: for i, elem := range e.Elts { if kv, ok := elem.(*ast.KeyValueExpr); ok { @@ -386,7 +386,7 @@ func (f *Finder) expr(e ast.Expr) types.Type { } default: - panic("unexpected composite literal type: " + tv.Type.String()) + panic(fmt.Sprintf("unexpected composite literal type %T: %v", tv.Type, tv.Type.String())) } case *ast.ParenExpr: