diff --git a/README.md b/README.md index 4dbf6d69aed..a10e1f7eacf 100644 --- a/README.md +++ b/README.md @@ -51,9 +51,6 @@ Selected packages: - `go/cfg` provides a simple control-flow graph (CFG) for a Go function. -- `go/expect` reads Go source files used as test inputs and interprets - special comments within them as queries or assertions for testing. - - `go/gcexportdata` and `go/gccgoexportdata` read and write the binary files containing type information used by the standard and `gccgo` compilers. diff --git a/cmd/bundle/gotypesalias.go b/cmd/bundle/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/bundle/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/bundle/main.go b/cmd/bundle/main.go index a4831c78776..fa73eb83a0a 100644 --- a/cmd/bundle/main.go +++ b/cmd/bundle/main.go @@ -68,9 +68,6 @@ // Update all bundles in the standard library: // // go generate -run bundle std - -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/bundle/main_test.go b/cmd/bundle/main_test.go index 4ee8521a074..0dee2afb0b2 100644 --- a/cmd/bundle/main_test.go +++ b/cmd/bundle/main_test.go @@ -11,7 +11,7 @@ import ( "runtime" "testing" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" ) func TestBundle(t *testing.T) { packagestest.TestAll(t, testBundle) } diff --git a/cmd/callgraph/gotypesalias.go b/cmd/callgraph/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/callgraph/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/callgraph/main.go b/cmd/callgraph/main.go index 1b5af1b52e1..9e440bbafb9 100644 --- a/cmd/callgraph/main.go +++ b/cmd/callgraph/main.go @@ -4,9 +4,6 @@ // callgraph: a tool for reporting the call graph of a Go program. // See Usage for details, or run with -help. - -//go:debug gotypesalias=0 - package main // import "golang.org/x/tools/cmd/callgraph" // TODO(adonovan): diff --git a/cmd/deadcode/deadcode.go b/cmd/deadcode/deadcode.go index e6f32bb9979..f129102cc4c 100644 --- a/cmd/deadcode/deadcode.go +++ b/cmd/deadcode/deadcode.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/deadcode/gotypesalias.go b/cmd/deadcode/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/deadcode/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/eg/eg.go b/cmd/eg/eg.go index 07e73d2efe7..108b9e3009f 100644 --- a/cmd/eg/eg.go +++ b/cmd/eg/eg.go @@ -5,9 +5,6 @@ // The eg command performs example-based refactoring. // For documentation, run the command, or see Help in // golang.org/x/tools/refactor/eg. - -//go:debug gotypesalias=0 - package main // import "golang.org/x/tools/cmd/eg" import ( diff --git a/cmd/eg/gotypesalias.go b/cmd/eg/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/eg/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/godex/godex.go b/cmd/godex/godex.go index 4955600f2d6..e91dbfcea5f 100644 --- a/cmd/godex/godex.go +++ b/cmd/godex/godex.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/godex/gotypesalias.go b/cmd/godex/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/godex/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/godoc/godoc_test.go b/cmd/godoc/godoc_test.go index b7b0e1ba8d9..94159445a54 100644 --- a/cmd/godoc/godoc_test.go +++ b/cmd/godoc/godoc_test.go @@ -21,7 +21,7 @@ import ( "testing" "time" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/internal/testenv" ) @@ -197,11 +197,11 @@ func TestWebIndex(t *testing.T) { // Basic integration test for godoc HTTP interface. func testWeb(t *testing.T, x packagestest.Exporter, bin string, withIndex bool) { + testenv.NeedsGOROOTDir(t, "api") + switch runtime.GOOS { case "plan9": t.Skip("skipping on plan9: fails to start up quickly enough") - case "android", "ios": - t.Skip("skipping on mobile: lacks GOROOT/api in test environment") } // Write a fake GOROOT/GOPATH with some third party packages. diff --git a/cmd/godoc/gotypesalias.go b/cmd/godoc/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/godoc/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/godoc/main.go b/cmd/godoc/main.go index 1c874cc0e15..a665be0769d 100644 --- a/cmd/godoc/main.go +++ b/cmd/godoc/main.go @@ -14,8 +14,6 @@ // http://godoc/pkg/compress/zlib) // -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/goimports/goimports.go b/cmd/goimports/goimports.go index 7463e641e95..dcb5023a2e7 100644 --- a/cmd/goimports/goimports.go +++ b/cmd/goimports/goimports.go @@ -2,8 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/goimports/gotypesalias.go b/cmd/goimports/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/goimports/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/gomvpkg/gotypesalias.go b/cmd/gomvpkg/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/gomvpkg/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/gomvpkg/main.go b/cmd/gomvpkg/main.go index d54b7070dec..5de1e44062d 100644 --- a/cmd/gomvpkg/main.go +++ b/cmd/gomvpkg/main.go @@ -4,9 +4,6 @@ // The gomvpkg command moves go packages, updating import declarations. // See the -help message or Usage constant for details. - -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/gotype/gotype.go b/cmd/gotype/gotype.go index 09b66207e63..4a731f26233 100644 --- a/cmd/gotype/gotype.go +++ b/cmd/gotype/gotype.go @@ -85,9 +85,6 @@ To verify the output of a pipe: echo "package foo" | gotype */ - -//go:debug gotypesalias=0 - package main import ( diff --git a/cmd/gotype/gotypesalias.go b/cmd/gotype/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/gotype/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/ssadump/gotypesalias.go b/cmd/ssadump/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/ssadump/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/ssadump/main.go b/cmd/ssadump/main.go index 2ecf04fba50..275e0a92aef 100644 --- a/cmd/ssadump/main.go +++ b/cmd/ssadump/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // ssadump: a tool for displaying and interpreting the SSA form of Go programs. - -//go:debug gotypesalias=0 - package main // import "golang.org/x/tools/cmd/ssadump" import ( diff --git a/cmd/stringer/endtoend_test.go b/cmd/stringer/endtoend_test.go index 2b7d6a786a5..5a56636be46 100644 --- a/cmd/stringer/endtoend_test.go +++ b/cmd/stringer/endtoend_test.go @@ -5,7 +5,6 @@ // go command is not available on android //go:build !android -// +build !android package main diff --git a/cmd/stringer/gotypesalias.go b/cmd/stringer/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/cmd/stringer/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/cmd/stringer/multifile_test.go b/cmd/stringer/multifile_test.go index 7a7ae669065..32914c5e825 100644 --- a/cmd/stringer/multifile_test.go +++ b/cmd/stringer/multifile_test.go @@ -2,8 +2,9 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// For os.CopyFS -//go:build go1.23 +// go1.23 is required for os.CopyFS. +// !android is required for compatibility with endtoend_test.go. +//go:build go1.23 && !android package main diff --git a/cmd/stringer/stringer.go b/cmd/stringer/stringer.go index 94eaee844a4..09be11ca58e 100644 --- a/cmd/stringer/stringer.go +++ b/cmd/stringer/stringer.go @@ -70,9 +70,6 @@ // PillAspirin // Aspirin // // to suppress it in the output. - -//go:debug gotypesalias=0 - package main // import "golang.org/x/tools/cmd/stringer" import ( diff --git a/copyright/copyright.go b/copyright/copyright.go index f5e2de7a4f1..16bd9d2f329 100644 --- a/copyright/copyright.go +++ b/copyright/copyright.go @@ -52,7 +52,7 @@ license that can be found in the LICENSE file.`) func checkFile(toolsDir, filename string) (bool, error) { // Only check Go files. - if !strings.HasSuffix(filename, "go") { + if !strings.HasSuffix(filename, ".go") { return false, nil } // Don't check testdata files. diff --git a/go.mod b/go.mod index 9715167f426..0fbd072dd43 100644 --- a/go.mod +++ b/go.mod @@ -5,10 +5,10 @@ go 1.22.0 // => default GODEBUG has gotypesalias=0 require ( github.com/google/go-cmp v0.6.0 github.com/yuin/goldmark v1.4.13 - golang.org/x/mod v0.21.0 - golang.org/x/net v0.30.0 - golang.org/x/sync v0.8.0 + golang.org/x/mod v0.22.0 + golang.org/x/net v0.31.0 + golang.org/x/sync v0.9.0 golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 ) -require golang.org/x/sys v0.26.0 // indirect +require golang.org/x/sys v0.27.0 // indirect diff --git a/go.sum b/go.sum index 459786d0b91..1d4e510a2c0 100644 --- a/go.sum +++ b/go.sum @@ -2,13 +2,13 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/yuin/goldmark v1.4.13 h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= -golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= diff --git a/go/analysis/passes/asmdecl/asmdecl.go b/go/analysis/passes/asmdecl/asmdecl.go index c9ba1a375d3..b622dfdf3a0 100644 --- a/go/analysis/passes/asmdecl/asmdecl.go +++ b/go/analysis/passes/asmdecl/asmdecl.go @@ -57,6 +57,8 @@ type asmArch struct { // include the first integer register and first floating-point register. Accessing // any of them counts as writing to result. retRegs []string + // writeResult is a list of instructions that will change result register implicity. + writeResult []string // calculated during initialization sizes types.Sizes intSize int @@ -85,18 +87,18 @@ type asmVar struct { var ( asmArch386 = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false} asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true} - asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}} - asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}} + asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}, writeResult: []string{"SVC"}} + asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}, writeResult: []string{"SYSCALL"}} asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true} asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true} asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true} asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true} - asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}} - asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}} - asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}} + asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}, writeResult: []string{"SYSCALL"}} + asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}, writeResult: []string{"SYSCALL"}} + asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}, writeResult: []string{"ECALL"}} asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true} asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false} - asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true, retRegs: []string{"R4", "F0"}} + asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true, retRegs: []string{"R4", "F0"}, writeResult: []string{"SYSCALL"}} arches = []*asmArch{ &asmArch386, @@ -351,6 +353,12 @@ Files: } if abi == "ABIInternal" && !haveRetArg { + for _, ins := range archDef.writeResult { + if strings.Contains(line, ins) { + haveRetArg = true + break + } + } for _, reg := range archDef.retRegs { if strings.Contains(line, reg) { haveRetArg = true diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm.go b/go/analysis/passes/asmdecl/testdata/src/a/asm.go index 1413b74696f..077201ddbe6 100644 --- a/go/analysis/passes/asmdecl/testdata/src/a/asm.go +++ b/go/analysis/passes/asmdecl/testdata/src/a/asm.go @@ -54,5 +54,6 @@ func pickFutureABI(x int) func returnABIInternal() int func returnmissingABIInternal() int +func returnsyscallABIInternal() int func retjmp() int diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm1.s b/go/analysis/passes/asmdecl/testdata/src/a/asm1.s index c3ef9f40fec..ff951c762b0 100644 --- a/go/analysis/passes/asmdecl/testdata/src/a/asm1.s +++ b/go/analysis/passes/asmdecl/testdata/src/a/asm1.s @@ -307,7 +307,6 @@ TEXT ·returnnamed(SB),0,$0-41 TEXT ·returnintmissing(SB),0,$0-8 RET // want `RET without writing to 8-byte ret\+0\(FP\)` - // issue 15271 TEXT ·f15271(SB), NOSPLIT, $0-4 // Stick 123 into the low 32 bits of X0. @@ -354,6 +353,12 @@ TEXT ·returnmissingABIInternal(SB), NOSPLIT, $32 MOVQ $123, CX RET // want `RET without writing to result register` +// issue 69352 +TEXT ·returnsyscallABIInternal(SB), NOSPLIT, $0 + MOVQ $123, CX + SYSCALL + RET + // return jump TEXT ·retjmp(SB), NOSPLIT, $0-8 RET retjmp1(SB) // It's okay to not write results if there's a tail call. diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm11.s b/go/analysis/passes/asmdecl/testdata/src/a/asm11.s index e81e8ee179f..7086713726e 100644 --- a/go/analysis/passes/asmdecl/testdata/src/a/asm11.s +++ b/go/analysis/passes/asmdecl/testdata/src/a/asm11.s @@ -11,3 +11,9 @@ TEXT ·returnABIInternal(SB), NOSPLIT, $8 TEXT ·returnmissingABIInternal(SB), NOSPLIT, $8 MOV $123, X20 RET // want `RET without writing to result register` + +// issue 69352 +TEXT ·returnsyscallABIInternal(SB), NOSPLIT, $0 + MOV $123, X20 + ECALL + RET diff --git a/go/analysis/passes/asmdecl/testdata/src/a/asm7.s b/go/analysis/passes/asmdecl/testdata/src/a/asm7.s index 51b5a841313..db00bda3755 100644 --- a/go/analysis/passes/asmdecl/testdata/src/a/asm7.s +++ b/go/analysis/passes/asmdecl/testdata/src/a/asm7.s @@ -198,3 +198,9 @@ TEXT ·returnABIInternal(SB), NOSPLIT, $8 TEXT ·returnmissingABIInternal(SB), NOSPLIT, $8 MOVD $123, R10 RET // want `RET without writing to result register` + +// issue 69352 +TEXT ·returnsyscallABIInternal(SB), NOSPLIT, $0 + MOVD $123, R10 + SYSCALL + RET diff --git a/go/analysis/passes/cgocall/cgocall.go b/go/analysis/passes/cgocall/cgocall.go index 26ec0683158..613583a1a64 100644 --- a/go/analysis/passes/cgocall/cgocall.go +++ b/go/analysis/passes/cgocall/cgocall.go @@ -179,7 +179,7 @@ func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*a for _, raw := range files { // If f is a cgo-generated file, Position reports // the original file, honoring //line directives. - filename := fset.Position(raw.Pos()).Filename + filename := fset.Position(raw.Pos()).Filename // sic: Pos, not FileStart f, err := parser.ParseFile(fset, filename, nil, parser.SkipObjectResolution) if err != nil { return nil, nil, fmt.Errorf("can't parse raw cgo file: %v", err) diff --git a/go/analysis/passes/defers/cmd/defers/gotypesalias.go b/go/analysis/passes/defers/cmd/defers/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/defers/cmd/defers/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/defers/cmd/defers/main.go b/go/analysis/passes/defers/cmd/defers/main.go index ffa5ae2da9b..b3dc8b94eca 100644 --- a/go/analysis/passes/defers/cmd/defers/main.go +++ b/go/analysis/passes/defers/cmd/defers/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The defers command runs the defers analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go b/go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/fieldalignment/cmd/fieldalignment/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/fieldalignment/cmd/fieldalignment/main.go b/go/analysis/passes/fieldalignment/cmd/fieldalignment/main.go index 9ec4b9b505e..47d383d2d0e 100644 --- a/go/analysis/passes/fieldalignment/cmd/fieldalignment/main.go +++ b/go/analysis/passes/fieldalignment/cmd/fieldalignment/main.go @@ -1,9 +1,6 @@ // Copyright 2021 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/findcall/cmd/findcall/gotypesalias.go b/go/analysis/passes/findcall/cmd/findcall/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/findcall/cmd/findcall/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/findcall/cmd/findcall/main.go b/go/analysis/passes/findcall/cmd/findcall/main.go index 1ada9668313..e0ce9137d61 100644 --- a/go/analysis/passes/findcall/cmd/findcall/main.go +++ b/go/analysis/passes/findcall/cmd/findcall/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The findcall command runs the findcall analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go b/go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/httpmux/cmd/httpmux/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/httpmux/cmd/httpmux/main.go b/go/analysis/passes/httpmux/cmd/httpmux/main.go index 5933df923da..e8a631157dc 100644 --- a/go/analysis/passes/httpmux/cmd/httpmux/main.go +++ b/go/analysis/passes/httpmux/cmd/httpmux/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The httpmux command runs the httpmux analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go b/go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/ifaceassert/cmd/ifaceassert/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/ifaceassert/cmd/ifaceassert/main.go b/go/analysis/passes/ifaceassert/cmd/ifaceassert/main.go index 32390be1643..42250f93df8 100644 --- a/go/analysis/passes/ifaceassert/cmd/ifaceassert/main.go +++ b/go/analysis/passes/ifaceassert/cmd/ifaceassert/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The ifaceassert command runs the ifaceassert analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go b/go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/lostcancel/cmd/lostcancel/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/lostcancel/cmd/lostcancel/main.go b/go/analysis/passes/lostcancel/cmd/lostcancel/main.go index 3f2ac7c38f5..0bba8465242 100644 --- a/go/analysis/passes/lostcancel/cmd/lostcancel/main.go +++ b/go/analysis/passes/lostcancel/cmd/lostcancel/main.go @@ -4,9 +4,6 @@ // The lostcancel command applies the golang.org/x/tools/go/analysis/passes/lostcancel // analysis to the specified packages of Go source code. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/lostcancel/doc.go b/go/analysis/passes/lostcancel/doc.go index 28bf6c7e264..f789bdc8111 100644 --- a/go/analysis/passes/lostcancel/doc.go +++ b/go/analysis/passes/lostcancel/doc.go @@ -10,7 +10,7 @@ // lostcancel: check cancel func returned by context.WithCancel is called // // The cancellation function returned by context.WithCancel, WithTimeout, -// and WithDeadline must be called or the new context will remain live -// until its parent context is cancelled. +// WithDeadline and variants such as WithCancelCause must be called, +// or the new context will remain live until its parent context is cancelled. // (The background context is never cancelled.) package lostcancel diff --git a/go/analysis/passes/lostcancel/lostcancel.go b/go/analysis/passes/lostcancel/lostcancel.go index bf56a5c06f6..26fdc1206f8 100644 --- a/go/analysis/passes/lostcancel/lostcancel.go +++ b/go/analysis/passes/lostcancel/lostcancel.go @@ -198,7 +198,9 @@ func isContextWithCancel(info *types.Info, n ast.Node) bool { return false } switch sel.Sel.Name { - case "WithCancel", "WithTimeout", "WithDeadline": + case "WithCancel", "WithCancelCause", + "WithTimeout", "WithTimeoutCause", + "WithDeadline", "WithDeadlineCause": default: return false } diff --git a/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go b/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go index 1030ba4c1be..fd2f487f9e0 100644 --- a/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go +++ b/go/analysis/passes/lostcancel/testdata/src/typeparams/typeparams.go @@ -8,9 +8,14 @@ package typeparams import ( "context" + "io" "time" ) +// +// These comment lines are ballast to ensure +// that this is L17. Add/remove as needed. + var bg = context.Background() func _[T any]() { @@ -18,7 +23,7 @@ func _[T any]() { if false { _ = cancel } -} // want "this return statement may be reached without using the cancel var defined on line 17" +} // want "this return statement may be reached without using the cancel var defined on line 22" func _[T any]() { _, cancel := context.WithCancel(bg) @@ -55,3 +60,16 @@ func _() { var x C[int] x.f() } + +func withCancelCause(maybe bool) { + { + _, cancel := context.WithCancelCause(bg) + defer cancel(io.EOF) // ok + } + { + _, cancel := context.WithCancelCause(bg) // want "the cancel function is not used on all paths \\(possible context leak\\)" + if maybe { + cancel(io.EOF) + } + } +} // want "this return statement may be reached without using the cancel var defined on line 70" diff --git a/go/analysis/passes/nilness/cmd/nilness/gotypesalias.go b/go/analysis/passes/nilness/cmd/nilness/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/nilness/cmd/nilness/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/nilness/cmd/nilness/main.go b/go/analysis/passes/nilness/cmd/nilness/main.go index 91b4d5c44b3..136ac254a45 100644 --- a/go/analysis/passes/nilness/cmd/nilness/main.go +++ b/go/analysis/passes/nilness/cmd/nilness/main.go @@ -4,9 +4,6 @@ // The nilness command applies the golang.org/x/tools/go/analysis/passes/nilness // analysis to the specified packages of Go source code. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/shadow/cmd/shadow/gotypesalias.go b/go/analysis/passes/shadow/cmd/shadow/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/shadow/cmd/shadow/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/shadow/cmd/shadow/main.go b/go/analysis/passes/shadow/cmd/shadow/main.go index 38de46beb3e..f9e36ecee95 100644 --- a/go/analysis/passes/shadow/cmd/shadow/main.go +++ b/go/analysis/passes/shadow/cmd/shadow/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The shadow command runs the shadow analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/slog/slog.go b/go/analysis/passes/slog/slog.go index 0cade7bad7e..0129102a336 100644 --- a/go/analysis/passes/slog/slog.go +++ b/go/analysis/passes/slog/slog.go @@ -203,7 +203,7 @@ func kvFuncSkipArgs(fn *types.Func) (int, bool) { // order to get to the ones that match the ...any parameter. // The first key is the dereferenced receiver type name, or "" for a function. var kvFuncs = map[string]map[string]int{ - "": map[string]int{ + "": { "Debug": 1, "Info": 1, "Warn": 1, @@ -215,7 +215,7 @@ var kvFuncs = map[string]map[string]int{ "Log": 3, "Group": 1, }, - "Logger": map[string]int{ + "Logger": { "Debug": 1, "Info": 1, "Warn": 1, @@ -227,7 +227,7 @@ var kvFuncs = map[string]map[string]int{ "Log": 3, "With": 0, }, - "Record": map[string]int{ + "Record": { "Add": 0, }, } diff --git a/go/analysis/passes/stdversion/main.go b/go/analysis/passes/stdversion/main.go index a9efd0160eb..2156d41e4a9 100644 --- a/go/analysis/passes/stdversion/main.go +++ b/go/analysis/passes/stdversion/main.go @@ -8,8 +8,8 @@ package main import ( - "golang.org/x/tools/go/analysis/singlechecker" "golang.org/x/tools/go/analysis/passes/stdversion" + "golang.org/x/tools/go/analysis/singlechecker" ) func main() { singlechecker.Main(stdversion.Analyzer) } diff --git a/go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go b/go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/stringintconv/cmd/stringintconv/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/stringintconv/cmd/stringintconv/main.go b/go/analysis/passes/stringintconv/cmd/stringintconv/main.go index 8dfb9a2056d..118b9579a50 100644 --- a/go/analysis/passes/stringintconv/cmd/stringintconv/main.go +++ b/go/analysis/passes/stringintconv/cmd/stringintconv/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The stringintconv command runs the stringintconv analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/tests/tests.go b/go/analysis/passes/tests/tests.go index 5b4598235cf..36f2c43eb64 100644 --- a/go/analysis/passes/tests/tests.go +++ b/go/analysis/passes/tests/tests.go @@ -48,7 +48,7 @@ var acceptedFuzzTypes = []types.Type{ func run(pass *analysis.Pass) (interface{}, error) { for _, f := range pass.Files { - if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") { + if !strings.HasSuffix(pass.Fset.File(f.FileStart).Name(), "_test.go") { continue } for _, decl := range f.Decls { diff --git a/go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go b/go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/unmarshal/cmd/unmarshal/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/unmarshal/cmd/unmarshal/main.go b/go/analysis/passes/unmarshal/cmd/unmarshal/main.go index fd69013fa59..1a17cd64de3 100644 --- a/go/analysis/passes/unmarshal/cmd/unmarshal/main.go +++ b/go/analysis/passes/unmarshal/cmd/unmarshal/main.go @@ -3,9 +3,6 @@ // license that can be found in the LICENSE file. // The unmarshal command runs the unmarshal analyzer. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go b/go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/analysis/passes/unusedresult/cmd/unusedresult/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/analysis/passes/unusedresult/cmd/unusedresult/main.go b/go/analysis/passes/unusedresult/cmd/unusedresult/main.go index 635883c4051..8116c6e06e9 100644 --- a/go/analysis/passes/unusedresult/cmd/unusedresult/main.go +++ b/go/analysis/passes/unusedresult/cmd/unusedresult/main.go @@ -4,9 +4,6 @@ // The unusedresult command applies the golang.org/x/tools/go/analysis/passes/unusedresult // analysis to the specified packages of Go source code. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/analysis/passes/unusedwrite/testdata/src/a/unusedwrite.go b/go/analysis/passes/unusedwrite/testdata/src/a/unusedwrite.go index 7e43ee4369c..e7b33f308c2 100644 --- a/go/analysis/passes/unusedwrite/testdata/src/a/unusedwrite.go +++ b/go/analysis/passes/unusedwrite/testdata/src/a/unusedwrite.go @@ -30,6 +30,15 @@ func BadWrites() { v.x = i // want "unused write to field x" _ = v.y } + + // The analyzer can handle only simple control flow. + type T struct{ x, y int } + t := new(T) + if true { + t = new(T) + } // causes t below to become phi(alloc, alloc), not a simple alloc + t.x = 1 // false negative + print(t.y) } func (t T1) BadValueReceiverWrite(v T2) { diff --git a/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/i.go b/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/i.go new file mode 100644 index 00000000000..079a8a7a836 --- /dev/null +++ b/go/analysis/passes/unusedwrite/testdata/src/importsunsafe/i.go @@ -0,0 +1,20 @@ +package importsunsafe + +import "unsafe" + +type S struct { + F, G int +} + +func _() { + var s S + s.F = 1 + // This write to G is used below, because &s.F allows access to all of s, but + // the analyzer would naively report it as unused. For this reason, we + // silence the analysis if unsafe is imported. + s.G = 2 + + ptr := unsafe.Pointer(&s.F) + t := (*S)(ptr) + println(t.G) +} diff --git a/go/analysis/passes/unusedwrite/unusedwrite.go b/go/analysis/passes/unusedwrite/unusedwrite.go index 3f651fc26d5..2e209c8a6c1 100644 --- a/go/analysis/passes/unusedwrite/unusedwrite.go +++ b/go/analysis/passes/unusedwrite/unusedwrite.go @@ -28,7 +28,15 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { + for _, pkg := range pass.Pkg.Imports() { + if pkg.Path() == "unsafe" { + // See golang/go#67684, or testdata/src/importsunsafe: the unusedwrite + // analyzer may have false positives when used with unsafe. + return nil, nil + } + } + ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) for _, fn := range ssainput.SrcFuncs { reports := checkStores(fn) diff --git a/go/analysis/passes/unusedwrite/unusedwrite_test.go b/go/analysis/passes/unusedwrite/unusedwrite_test.go index 9658849d0e9..d1b2b680fae 100644 --- a/go/analysis/passes/unusedwrite/unusedwrite_test.go +++ b/go/analysis/passes/unusedwrite/unusedwrite_test.go @@ -13,5 +13,5 @@ import ( func Test(t *testing.T) { testdata := analysistest.TestData() - analysistest.Run(t, testdata, unusedwrite.Analyzer, "a") + analysistest.Run(t, testdata, unusedwrite.Analyzer, "a", "importsunsafe") } diff --git a/go/analysis/unitchecker/unitchecker.go b/go/analysis/unitchecker/unitchecker.go index 71ebbfaef15..2301ccfc0e4 100644 --- a/go/analysis/unitchecker/unitchecker.go +++ b/go/analysis/unitchecker/unitchecker.go @@ -51,7 +51,6 @@ import ( "golang.org/x/tools/go/analysis/internal/analysisflags" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/facts" - "golang.org/x/tools/internal/versions" ) // A Config describes a compilation unit to be analyzed. @@ -257,15 +256,15 @@ func run(fset *token.FileSet, cfg *Config, analyzers []*analysis.Analyzer) ([]re GoVersion: cfg.GoVersion, } info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) pkg, err := tc.Check(cfg.ImportPath, fset, files, info) if err != nil { diff --git a/go/analysis/unitchecker/unitchecker_test.go b/go/analysis/unitchecker/unitchecker_test.go index 54d8fa81851..1801b49cfe8 100644 --- a/go/analysis/unitchecker/unitchecker_test.go +++ b/go/analysis/unitchecker/unitchecker_test.go @@ -17,7 +17,7 @@ import ( "golang.org/x/tools/go/analysis/passes/findcall" "golang.org/x/tools/go/analysis/passes/printf" "golang.org/x/tools/go/analysis/unitchecker" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" ) func TestMain(m *testing.M) { diff --git a/go/ast/astutil/imports.go b/go/ast/astutil/imports.go index 18d1adb05dd..a6b5ed0a893 100644 --- a/go/ast/astutil/imports.go +++ b/go/ast/astutil/imports.go @@ -344,7 +344,12 @@ func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (r } // UsesImport reports whether a given import is used. +// The provided File must have been parsed with syntactic object resolution +// (not using go/parser.SkipObjectResolution). func UsesImport(f *ast.File, path string) (used bool) { + if f.Scope == nil { + panic("file f was not parsed with syntactic object resolution") + } spec := importSpec(f, path) if spec == nil { return diff --git a/go/ast/inspector/inspector.go b/go/ast/inspector/inspector.go index 0e0ba4c035c..958cf38deb0 100644 --- a/go/ast/inspector/inspector.go +++ b/go/ast/inspector/inspector.go @@ -180,7 +180,9 @@ func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, s // traverse builds the table of events representing a traversal. func traverse(files []*ast.File) []event { // Preallocate approximate number of events - // based on source file extent. + // based on source file extent of the declarations. + // (We use End-Pos not FileStart-FileEnd to neglect + // the effect of long doc comments.) // This makes traverse faster by 4x (!). var extent int for _, f := range files { diff --git a/go/buildutil/allpackages_test.go b/go/buildutil/allpackages_test.go index 1aa194d868e..6af86771104 100644 --- a/go/buildutil/allpackages_test.go +++ b/go/buildutil/allpackages_test.go @@ -17,7 +17,7 @@ import ( "testing" "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" ) func TestAllPackages(t *testing.T) { diff --git a/go/buildutil/util_test.go b/go/buildutil/util_test.go index 6c507579a38..534828d969b 100644 --- a/go/buildutil/util_test.go +++ b/go/buildutil/util_test.go @@ -13,7 +13,7 @@ import ( "testing" "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" ) func TestContainingPackage(t *testing.T) { diff --git a/go/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go index 6e0b2dda7b5..74e77b01291 100644 --- a/go/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -100,7 +100,7 @@ func TestRTA(t *testing.T) { // // Functions are notated as if by ssa.Function.String. func check(t *testing.T, f *ast.File, pkg *ssa.Package, res *rta.Result) { - tokFile := pkg.Prog.Fset.File(f.Pos()) + tokFile := pkg.Prog.Fset.File(f.FileStart) // Find the WANT comment. expectation := func(f *ast.File) (string, int) { diff --git a/go/callgraph/vta/graph.go b/go/callgraph/vta/graph.go index 61e841a39ae..c13b8a5e6cb 100644 --- a/go/callgraph/vta/graph.go +++ b/go/callgraph/vta/graph.go @@ -255,27 +255,64 @@ func (r recoverReturn) String() string { type empty = struct{} +// idx is an index representing a unique node in a vtaGraph. +type idx int + // vtaGraph remembers for each VTA node the set of its successors. // Tailored for VTA, hence does not support singleton (sub)graphs. -type vtaGraph map[node]map[node]empty +type vtaGraph struct { + m []map[idx]empty // m[i] has the successors for the node with index i. + idx map[node]idx // idx[n] is the index for the node n. + node []node // node[i] is the node with index i. +} + +func (g *vtaGraph) numNodes() int { + return len(g.idx) +} + +func (g *vtaGraph) successors(x idx) func(yield func(y idx) bool) { + return func(yield func(y idx) bool) { + for y := range g.m[x] { + if !yield(y) { + return + } + } + } +} // addEdge adds an edge x->y to the graph. -func (g vtaGraph) addEdge(x, y node) { - succs, ok := g[x] - if !ok { - succs = make(map[node]empty) - g[x] = succs +func (g *vtaGraph) addEdge(x, y node) { + if g.idx == nil { + g.idx = make(map[node]idx) + } + lookup := func(n node) idx { + i, ok := g.idx[n] + if !ok { + i = idx(len(g.idx)) + g.m = append(g.m, nil) + g.idx[n] = i + g.node = append(g.node, n) + } + return i + } + a := lookup(x) + b := lookup(y) + succs := g.m[a] + if succs == nil { + succs = make(map[idx]empty) + g.m[a] = succs } - succs[y] = empty{} + succs[b] = empty{} } // typePropGraph builds a VTA graph for a set of `funcs` and initial // `callgraph` needed to establish interprocedural edges. Returns the // graph and a map for unique type representatives. -func typePropGraph(funcs map[*ssa.Function]bool, callees calleesFunc) (vtaGraph, *typeutil.Map) { - b := builder{graph: make(vtaGraph), callees: callees} +func typePropGraph(funcs map[*ssa.Function]bool, callees calleesFunc) (*vtaGraph, *typeutil.Map) { + b := builder{callees: callees} b.visit(funcs) - return b.graph, &b.canon + b.callees = nil // ensure callees is not pinned by pointers to other fields of b. + return &b.graph, &b.canon } // Data structure responsible for linearly traversing the diff --git a/go/callgraph/vta/graph_test.go b/go/callgraph/vta/graph_test.go index ace80859e21..9e780c7e4e2 100644 --- a/go/callgraph/vta/graph_test.go +++ b/go/callgraph/vta/graph_test.go @@ -110,7 +110,7 @@ func TestVtaGraph(t *testing.T) { // n3 / // | / // n4 - g := make(vtaGraph) + var g vtaGraph g.addEdge(n1, n3) g.addEdge(n2, n3) g.addEdge(n3, n4) @@ -119,9 +119,19 @@ func TestVtaGraph(t *testing.T) { g.addEdge(n1, n3) want := vtaGraph{ - n1: map[node]empty{n3: empty{}}, - n2: map[node]empty{n3: empty{}, n4: empty{}}, - n3: map[node]empty{n4: empty{}}, + m: []map[idx]empty{ + map[idx]empty{1: empty{}}, + map[idx]empty{3: empty{}}, + map[idx]empty{1: empty{}, 3: empty{}}, + nil, + }, + idx: map[node]idx{ + n1: 0, + n3: 1, + n2: 2, + n4: 3, + }, + node: []node{n1, n3, n2, n4}, } if !reflect.DeepEqual(want, g) { @@ -137,7 +147,9 @@ func TestVtaGraph(t *testing.T) { {n3, 1}, {n4, 0}, } { - if sl := len(g[test.n]); sl != test.l { + sl := 0 + g.successors(g.idx[test.n])(func(_ idx) bool { sl++; return true }) + if sl != test.l { t.Errorf("want %d successors; got %d", test.l, sl) } } @@ -147,15 +159,16 @@ func TestVtaGraph(t *testing.T) { // where each string represents an edge set of the format // node -> succ_1, ..., succ_n. succ_1, ..., succ_n are // sorted in alphabetical order. -func vtaGraphStr(g vtaGraph) []string { +func vtaGraphStr(g *vtaGraph) []string { var vgs []string - for n, succ := range g { + for n := 0; n < g.numNodes(); n++ { var succStr []string - for s := range succ { - succStr = append(succStr, s.String()) - } + g.successors(idx(n))(func(s idx) bool { + succStr = append(succStr, g.node[s].String()) + return true + }) sort.Strings(succStr) - entry := fmt.Sprintf("%v -> %v", n.String(), strings.Join(succStr, ", ")) + entry := fmt.Sprintf("%v -> %v", g.node[n].String(), strings.Join(succStr, ", ")) vgs = append(vgs, removeModulePrefix(entry)) } return vgs diff --git a/go/callgraph/vta/propagation.go b/go/callgraph/vta/propagation.go index 4274f482d10..f448cde1135 100644 --- a/go/callgraph/vta/propagation.go +++ b/go/callgraph/vta/propagation.go @@ -6,6 +6,7 @@ package vta import ( "go/types" + "slices" "golang.org/x/tools/go/callgraph/vta/internal/trie" "golang.org/x/tools/go/ssa" @@ -14,63 +15,66 @@ import ( ) // scc computes strongly connected components (SCCs) of `g` using the -// classical Tarjan's algorithm for SCCs. The result is a pair -// where m is a map from nodes to unique id of their SCC in the range -// [0, id). The SCCs are sorted in reverse topological order: for SCCs +// classical Tarjan's algorithm for SCCs. The result is two slices: +// - sccs: the SCCs, each represented as a slice of node indices +// - idxToSccID: the inverse map, from node index to SCC number. +// +// The SCCs are sorted in reverse topological order: for SCCs // with ids X and Y s.t. X < Y, Y comes before X in the topological order. -func scc(g vtaGraph) (map[node]int, int) { +func scc(g *vtaGraph) (sccs [][]idx, idxToSccID []int) { // standard data structures used by Tarjan's algorithm. type state struct { - index int + pre int // preorder of the node (0 if unvisited) lowLink int onStack bool } - states := make(map[node]*state, len(g)) - var stack []node + states := make([]state, g.numNodes()) + var stack []idx - nodeToSccID := make(map[node]int, len(g)) - sccID := 0 + idxToSccID = make([]int, g.numNodes()) + nextPre := 0 - var doSCC func(node) - doSCC = func(n node) { - index := len(states) - ns := &state{index: index, lowLink: index, onStack: true} - states[n] = ns + var doSCC func(idx) + doSCC = func(n idx) { + nextPre++ + ns := &states[n] + *ns = state{pre: nextPre, lowLink: nextPre, onStack: true} stack = append(stack, n) - for s := range g[n] { - if ss, visited := states[s]; !visited { + g.successors(n)(func(s idx) bool { + if ss := &states[s]; ss.pre == 0 { // Analyze successor s that has not been visited yet. doSCC(s) - ss = states[s] ns.lowLink = min(ns.lowLink, ss.lowLink) } else if ss.onStack { // The successor is on the stack, meaning it has to be // in the current SCC. - ns.lowLink = min(ns.lowLink, ss.index) + ns.lowLink = min(ns.lowLink, ss.pre) } - } + return true + }) // if n is a root node, pop the stack and generate a new SCC. - if ns.lowLink == index { - var w node - for w != n { - w = stack[len(stack)-1] - stack = stack[:len(stack)-1] + if ns.lowLink == ns.pre { + sccStart := slicesLastIndex(stack, n) + scc := slices.Clone(stack[sccStart:]) + stack = stack[:sccStart] + sccID := len(sccs) + sccs = append(sccs, scc) + for _, w := range scc { states[w].onStack = false - nodeToSccID[w] = sccID + idxToSccID[w] = sccID } - sccID++ } } - for n := range g { - if _, visited := states[n]; !visited { - doSCC(n) + for n, nn := 0, g.numNodes(); n < nn; n++ { + if states[n].pre == 0 { + doSCC(idx(n)) } } - return nodeToSccID, sccID + return sccs, idxToSccID } func min(x, y int) int { @@ -80,6 +84,21 @@ func min(x, y int) int { return y } +// LastIndex returns the index of the last occurrence of v in s, or -1 if v is +// not present in s. +// +// LastIndex iterates backwards through the elements of s, stopping when the == +// operator determines an element is equal to v. +func slicesLastIndex[S ~[]E, E comparable](s S, v E) int { + // TODO: move to / dedup with slices.LastIndex + for i := len(s) - 1; i >= 0; i-- { + if s[i] == v { + return i + } + } + return -1 +} + // propType represents type information being propagated // over the vta graph. f != nil only for function nodes // and nodes reachable from function nodes. There, we also @@ -92,10 +111,7 @@ type propType struct { // propTypeMap is an auxiliary structure that serves // the role of a map from nodes to a set of propTypes. -type propTypeMap struct { - nodeToScc map[node]int - sccToTypes map[int]*trie.MutMap -} +type propTypeMap map[node]*trie.MutMap // propTypes returns a go1.23 iterator for the propTypes associated with // node `n` in map `ptm`. @@ -103,8 +119,8 @@ func (ptm propTypeMap) propTypes(n node) func(yield func(propType) bool) { // TODO: when x/tools uses go1.23, change callers to use range-over-func // (https://go.dev/issue/65237). return func(yield func(propType) bool) { - if id, ok := ptm.nodeToScc[n]; ok { - ptm.sccToTypes[id].M.Range(func(_ uint64, elem interface{}) bool { + if types := ptm[n]; types != nil { + types.M.Range(func(_ uint64, elem interface{}) bool { return yield(elem.(propType)) }) } @@ -116,14 +132,8 @@ func (ptm propTypeMap) propTypes(n node) func(yield func(propType) bool) { // graph. The result is a map from nodes to a set of types // and functions, stemming from higher-order data flow, // reaching the node. `canon` is used for type uniqueness. -func propagate(graph vtaGraph, canon *typeutil.Map) propTypeMap { - nodeToScc, sccID := scc(graph) - - // We also need the reverse map, from ids to SCCs. - sccs := make(map[int][]node, sccID) - for n, id := range nodeToScc { - sccs[id] = append(sccs[id], n) - } +func propagate(graph *vtaGraph, canon *typeutil.Map) propTypeMap { + sccs, idxToSccID := scc(graph) // propTypeIds are used to create unique ids for // propType, to be used for trie-based type sets. @@ -141,37 +151,40 @@ func propagate(graph vtaGraph, canon *typeutil.Map) propTypeMap { builder := trie.NewBuilder() // Initialize sccToTypes to avoid repeated check // for initialization later. - sccToTypes := make(map[int]*trie.MutMap, sccID) - for i := 0; i <= sccID; i++ { - sccToTypes[i] = nodeTypes(sccs[i], builder, propTypeId, canon) + sccToTypes := make([]*trie.MutMap, len(sccs)) + for sccID, scc := range sccs { + typeSet := builder.MutEmpty() + for _, idx := range scc { + if n := graph.node[idx]; hasInitialTypes(n) { + // add the propType for idx to typeSet. + pt := getPropType(n, canon) + typeSet.Update(propTypeId(pt), pt) + } + } + sccToTypes[sccID] = &typeSet } for i := len(sccs) - 1; i >= 0; i-- { nextSccs := make(map[int]empty) - for _, node := range sccs[i] { - for succ := range graph[node] { - nextSccs[nodeToScc[succ]] = empty{} - } + for _, n := range sccs[i] { + graph.successors(n)(func(succ idx) bool { + nextSccs[idxToSccID[succ]] = empty{} + return true + }) } // Propagate types to all successor SCCs. for nextScc := range nextSccs { sccToTypes[nextScc].Merge(sccToTypes[i].M) } } - return propTypeMap{nodeToScc: nodeToScc, sccToTypes: sccToTypes} -} - -// nodeTypes returns a set of propTypes for `nodes`. These are the -// propTypes stemming from the type of each node in `nodes` plus. -func nodeTypes(nodes []node, builder *trie.Builder, propTypeId func(p propType) uint64, canon *typeutil.Map) *trie.MutMap { - typeSet := builder.MutEmpty() - for _, n := range nodes { - if hasInitialTypes(n) { - pt := getPropType(n, canon) - typeSet.Update(propTypeId(pt), pt) + nodeToTypes := make(propTypeMap, graph.numNodes()) + for sccID, scc := range sccs { + types := sccToTypes[sccID] + for _, idx := range scc { + nodeToTypes[graph.node[idx]] = types } } - return &typeSet + return nodeToTypes } // hasInitialTypes check if a node can have initial types. diff --git a/go/callgraph/vta/propagation_test.go b/go/callgraph/vta/propagation_test.go index 87b80a20db7..1a274f38f84 100644 --- a/go/callgraph/vta/propagation_test.go +++ b/go/callgraph/vta/propagation_test.go @@ -7,7 +7,9 @@ package vta import ( "go/token" "go/types" + "math" "reflect" + "slices" "sort" "strings" "testing" @@ -64,17 +66,12 @@ func newNamedType(name string) *types.Named { // sccString is a utility for stringifying `nodeToScc`. Every // scc is represented as a string where string representation // of scc nodes are sorted and concatenated using `;`. -func sccString(nodeToScc map[node]int) []string { - sccs := make(map[int][]node) - for n, id := range nodeToScc { - sccs[id] = append(sccs[id], n) - } - +func sccString(sccs [][]idx, g *vtaGraph) []string { var sccsStr []string for _, scc := range sccs { var nodesStr []string - for _, node := range scc { - nodesStr = append(nodesStr, node.String()) + for _, idx := range scc { + nodesStr = append(nodesStr, g.node[idx].String()) } sort.Strings(nodesStr) sccsStr = append(sccsStr, strings.Join(nodesStr, ";")) @@ -99,7 +96,7 @@ func nodeToTypeString(pMap propTypeMap) map[string]string { } nodeToTypeStr := make(map[string]string) - for node := range pMap.nodeToScc { + for node := range pMap { var propStrings []string pMap.propTypes(node)(func(prop propType) bool { propStrings = append(propStrings, propTypeString(prop)) @@ -126,12 +123,31 @@ func sccEqual(sccs1 []string, sccs2 []string) bool { // topological order: // // for every edge x -> y in g, nodeToScc[x] > nodeToScc[y] -func isRevTopSorted(g vtaGraph, nodeToScc map[node]int) bool { - for n, succs := range g { - for s := range succs { - if nodeToScc[n] < nodeToScc[s] { +func isRevTopSorted(g *vtaGraph, idxToScc []int) bool { + result := true + for n := 0; n < len(idxToScc); n++ { + g.successors(idx(n))(func(s idx) bool { + if idxToScc[n] < idxToScc[s] { + result = false return false } + return true + }) + } + return result +} + +func sccMapsConsistent(sccs [][]idx, idxToSccID []int) bool { + for id, scc := range sccs { + for _, idx := range scc { + if idxToSccID[idx] != id { + return false + } + } + } + for i, id := range idxToSccID { + if !slices.Contains(sccs[id], idx(i)) { + return false } } return true @@ -183,7 +199,7 @@ func setName(f *ssa.Function, name string) { // t1 (A) -> t2 (B) -> F1 -> F2 -> F3 -> F4 // | | | | // <------- <------------ -func testSuite() map[string]vtaGraph { +func testSuite() map[string]*vtaGraph { a := newNamedType("A") b := newNamedType("B") c := newNamedType("C") @@ -198,44 +214,51 @@ func testSuite() map[string]vtaGraph { f4 := &ssa.Function{Signature: sig} setName(f4, "F4") - graphs := make(map[string]vtaGraph) - graphs["no-cycles"] = map[node]map[node]empty{ - newLocal("t0", a): {newLocal("t1", b): empty{}}, - newLocal("t1", b): {newLocal("t2", c): empty{}}, - } - - graphs["trivial-cycle"] = map[node]map[node]empty{ - newLocal("t0", a): {newLocal("t0", a): empty{}}, - newLocal("t1", b): {newLocal("t1", b): empty{}}, - } - - graphs["circle-cycle"] = map[node]map[node]empty{ - newLocal("t0", a): {newLocal("t1", a): empty{}}, - newLocal("t1", a): {newLocal("t2", b): empty{}}, - newLocal("t2", b): {newLocal("t0", a): empty{}}, - } - - graphs["fully-connected"] = map[node]map[node]empty{ - newLocal("t0", a): {newLocal("t1", b): empty{}, newLocal("t2", c): empty{}}, - newLocal("t1", b): {newLocal("t0", a): empty{}, newLocal("t2", c): empty{}}, - newLocal("t2", c): {newLocal("t0", a): empty{}, newLocal("t1", b): empty{}}, - } - - graphs["subsumed-scc"] = map[node]map[node]empty{ - newLocal("t0", a): {newLocal("t1", b): empty{}}, - newLocal("t1", b): {newLocal("t2", b): empty{}}, - newLocal("t2", b): {newLocal("t1", b): empty{}, newLocal("t3", a): empty{}}, - newLocal("t3", a): {newLocal("t0", a): empty{}}, - } - - graphs["more-realistic"] = map[node]map[node]empty{ - newLocal("t0", a): {newLocal("t0", a): empty{}}, - newLocal("t1", a): {newLocal("t2", b): empty{}}, - newLocal("t2", b): {newLocal("t1", a): empty{}, function{f1}: empty{}}, - function{f1}: {function{f2}: empty{}, function{f3}: empty{}}, - function{f2}: {function{f3}: empty{}}, - function{f3}: {function{f1}: empty{}, function{f4}: empty{}}, - } + graphs := make(map[string]*vtaGraph) + v := &vtaGraph{} + graphs["no-cycles"] = v + v.addEdge(newLocal("t0", a), newLocal("t1", b)) + v.addEdge(newLocal("t1", b), newLocal("t2", c)) + + v = &vtaGraph{} + graphs["trivial-cycle"] = v + v.addEdge(newLocal("t0", a), newLocal("t0", a)) + v.addEdge(newLocal("t1", b), newLocal("t1", b)) + + v = &vtaGraph{} + graphs["circle-cycle"] = v + v.addEdge(newLocal("t0", a), newLocal("t1", a)) + v.addEdge(newLocal("t1", a), newLocal("t2", b)) + v.addEdge(newLocal("t2", b), newLocal("t0", a)) + + v = &vtaGraph{} + graphs["fully-connected"] = v + v.addEdge(newLocal("t0", a), newLocal("t1", b)) + v.addEdge(newLocal("t0", a), newLocal("t2", c)) + v.addEdge(newLocal("t1", b), newLocal("t0", a)) + v.addEdge(newLocal("t1", b), newLocal("t2", c)) + v.addEdge(newLocal("t2", c), newLocal("t0", a)) + v.addEdge(newLocal("t2", c), newLocal("t1", b)) + + v = &vtaGraph{} + graphs["subsumed-scc"] = v + v.addEdge(newLocal("t0", a), newLocal("t1", b)) + v.addEdge(newLocal("t1", b), newLocal("t2", b)) + v.addEdge(newLocal("t2", b), newLocal("t1", b)) + v.addEdge(newLocal("t2", b), newLocal("t3", a)) + v.addEdge(newLocal("t3", a), newLocal("t0", a)) + + v = &vtaGraph{} + graphs["more-realistic"] = v + v.addEdge(newLocal("t0", a), newLocal("t0", a)) + v.addEdge(newLocal("t1", a), newLocal("t2", b)) + v.addEdge(newLocal("t2", b), newLocal("t1", a)) + v.addEdge(newLocal("t2", b), function{f1}) + v.addEdge(function{f1}, function{f2}) + v.addEdge(function{f1}, function{f3}) + v.addEdge(function{f2}, function{f3}) + v.addEdge(function{f3}, function{f1}) + v.addEdge(function{f3}, function{f4}) return graphs } @@ -244,7 +267,7 @@ func TestSCC(t *testing.T) { suite := testSuite() for _, test := range []struct { name string - graph vtaGraph + graph *vtaGraph want []string }{ // No cycles results in three separate SCCs: {t0} {t1} {t2} @@ -260,13 +283,17 @@ func TestSCC(t *testing.T) { // The more realistic example has the following SCCs: {t0} {t1, t2} {F1, F2, F3} {F4} {name: "more-realistic", graph: suite["more-realistic"], want: []string{"Local(t0)", "Local(t1);Local(t2)", "Function(F1);Function(F2);Function(F3)", "Function(F4)"}}, } { - sccs, _ := scc(test.graph) - if got := sccString(sccs); !sccEqual(test.want, got) { + sccs, idxToSccID := scc(test.graph) + if got := sccString(sccs, test.graph); !sccEqual(test.want, got) { t.Errorf("want %v for graph %v; got %v", test.want, test.name, got) } - if !isRevTopSorted(test.graph, sccs) { + if !isRevTopSorted(test.graph, idxToSccID) { t.Errorf("%v not topologically sorted", test.name) } + if !sccMapsConsistent(sccs, idxToSccID) { + t.Errorf("%v: scc maps not consistent", test.name) + } + break } } @@ -275,7 +302,7 @@ func TestPropagation(t *testing.T) { var canon typeutil.Map for _, test := range []struct { name string - graph vtaGraph + graph *vtaGraph want map[string]string }{ // No cycles graph pushes type information forward. @@ -336,3 +363,34 @@ func TestPropagation(t *testing.T) { } } } + +func testLastIndex[S ~[]E, E comparable](t *testing.T, s S, e E, want int) { + if got := slicesLastIndex(s, e); got != want { + t.Errorf("LastIndex(%v, %v): got %v want %v", s, e, got, want) + } +} + +func TestLastIndex(t *testing.T) { + testLastIndex(t, []int{10, 20, 30}, 10, 0) + testLastIndex(t, []int{10, 20, 30}, 20, 1) + testLastIndex(t, []int{10, 20, 30}, 30, 2) + testLastIndex(t, []int{10, 20, 30}, 42, -1) + testLastIndex(t, []int{10, 20, 10}, 10, 2) + testLastIndex(t, []int{20, 10, 10}, 10, 2) + testLastIndex(t, []int{10, 10, 20}, 10, 1) + type foo struct { + i int + s string + } + testLastIndex(t, []foo{{1, "abc"}, {2, "abc"}, {1, "xyz"}}, foo{1, "abc"}, 0) + // Test that LastIndex doesn't use bitwise comparisons for floats. + neg0 := 1 / math.Inf(-1) + nan := math.NaN() + testLastIndex(t, []float64{0, neg0}, 0, 1) + testLastIndex(t, []float64{0, neg0}, neg0, 1) + testLastIndex(t, []float64{neg0, 0}, 0, 1) + testLastIndex(t, []float64{neg0, 0}, neg0, 1) + testLastIndex(t, []float64{0, nan}, 0, 0) + testLastIndex(t, []float64{0, nan}, nan, -1) + testLastIndex(t, []float64{0, nan}, 1, -1) +} diff --git a/go/expect/expect.go b/go/expect/expect.go index fdc023c8924..6cdfcf0bef0 100644 --- a/go/expect/expect.go +++ b/go/expect/expect.go @@ -6,6 +6,10 @@ Package expect provides support for interpreting structured comments in Go source code (including go.mod and go.work files) as test expectations. +[Note: there is an open proposal (golang/go#70229) to deprecate, tag, +and delete this package. If accepted, the last version of the package +be available indefinitely but will not receive updates.] + This is primarily intended for writing tests of things that process Go source files, although it does not directly depend on the testing package. diff --git a/go/gcexportdata/gcexportdata.go b/go/gcexportdata/gcexportdata.go index 137cc8df1d8..f3ab0a2e126 100644 --- a/go/gcexportdata/gcexportdata.go +++ b/go/gcexportdata/gcexportdata.go @@ -2,22 +2,64 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package gcexportdata provides functions for locating, reading, and -// writing export data files containing type information produced by the -// gc compiler. This package supports go1.7 export data format and all -// later versions. -// -// Although it might seem convenient for this package to live alongside -// go/types in the standard library, this would cause version skew -// problems for developer tools that use it, since they must be able to -// consume the outputs of the gc compiler both before and after a Go -// update such as from Go 1.7 to Go 1.8. Because this package lives in -// golang.org/x/tools, sites can update their version of this repo some -// time before the Go 1.8 release and rebuild and redeploy their -// developer tools, which will then be able to consume both Go 1.7 and -// Go 1.8 export data files, so they will work before and after the -// Go update. (See discussion at https://golang.org/issue/15651.) -package gcexportdata // import "golang.org/x/tools/go/gcexportdata" +// Package gcexportdata provides functions for reading and writing +// export data, which is a serialized description of the API of a Go +// package including the names, kinds, types, and locations of all +// exported declarations. +// +// The standard Go compiler (cmd/compile) writes an export data file +// for each package it compiles, which it later reads when compiling +// packages that import the earlier one. The compiler must thus +// contain logic to both write and read export data. +// (See the "Export" section in the cmd/compile/README file.) +// +// The [Read] function in this package can read files produced by the +// compiler, producing [go/types] data structures. As a matter of +// policy, Read supports export data files produced by only the last +// two Go releases plus tip; see https://go.dev/issue/68898. The +// export data files produced by the compiler contain additional +// details related to generics, inlining, and other optimizations that +// cannot be decoded by the [Read] function. +// +// In files written by the compiler, the export data is not at the +// start of the file. Before calling Read, use [NewReader] to locate +// the desired portion of the file. +// +// The [Write] function in this package encodes the exported API of a +// Go package ([types.Package]) as a file. Such files can be later +// decoded by Read, but cannot be consumed by the compiler. +// +// # Future changes +// +// Although Read supports the formats written by both Write and the +// compiler, the two are quite different, and there is an open +// proposal (https://go.dev/issue/69491) to separate these APIs. +// +// Under that proposal, this package would ultimately provide only the +// Read operation for compiler export data, which must be defined in +// this module (golang.org/x/tools), not in the standard library, to +// avoid version skew for developer tools that need to read compiler +// export data both before and after a Go release, such as from Go +// 1.23 to Go 1.24. Because this package lives in the tools module, +// clients can update their version of the module some time before the +// Go 1.24 release and rebuild and redeploy their tools, which will +// then be able to consume both Go 1.23 and Go 1.24 export data files, +// so they will work before and after the Go update. (See discussion +// at https://go.dev/issue/15651.) +// +// The operations to import and export [go/types] data structures +// would be defined in the go/types package as Import and Export. +// [Write] would (eventually) delegate to Export, +// and [Read], when it detects a file produced by Export, +// would delegate to Import. +// +// # Deprecations +// +// The [NewImporter] and [Find] functions are deprecated and should +// not be used in new code. The [WriteBundle] and [ReadBundle] +// functions are experimental, and there is an open proposal to +// deprecate them (https://go.dev/issue/69573). +package gcexportdata import ( "bufio" @@ -100,6 +142,11 @@ func readAll(r io.Reader) ([]byte, error) { // Read reads export data from in, decodes it, and returns type // information for the package. // +// Read is capable of reading export data produced by [Write] at the +// same source code version, or by the last two Go releases (plus tip) +// of the standard Go compiler. Reading files from older compilers may +// produce an error. +// // The package path (effectively its linker symbol prefix) is // specified by path, since unlike the package name, this information // may not be recorded in the export data. @@ -128,14 +175,26 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, // (from "version"). Select appropriate importer. if len(data) > 0 { switch data[0] { - case 'v', 'c', 'd': // binary, till go1.10 + case 'v', 'c', 'd': + // binary, produced by cmd/compile till go1.10 return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - case 'i': // indexed, till go1.19 + case 'i': + // indexed, produced by cmd/compile till go1.19, + // and also by [Write]. + // + // If proposal #69491 is accepted, go/types + // serialization will be implemented by + // types.Export, to which Write would eventually + // delegate (explicitly dropping any pretence at + // inter-version Write-Read compatibility). + // This [Read] function would delegate to types.Import + // when it detects that the file was produced by Export. _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) return pkg, err - case 'u': // unified, from go1.20 + case 'u': + // unified, produced by cmd/compile since go1.20 _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path) return pkg, err diff --git a/go/loader/loader.go b/go/loader/loader.go index 013c0f505bb..2d4865f664f 100644 --- a/go/loader/loader.go +++ b/go/loader/loader.go @@ -23,7 +23,6 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/internal/cgo" - "golang.org/x/tools/internal/versions" ) var ignoreVendor build.ImportMode @@ -341,13 +340,12 @@ func (conf *Config) addImport(path string, tests bool) { func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { for _, info := range prog.AllPackages { for _, f := range info.Files { - if f.Pos() == token.NoPos { - // This can happen if the parser saw - // too many errors and bailed out. - // (Use parser.AllErrors to prevent that.) + if f.FileStart == token.NoPos { + // Workaround for #70162 (undefined FileStart). + // TODO(adonovan): delete once go1.24 is assured. continue } - if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { + if !tokenFileContainsPos(prog.Fset.File(f.FileStart), start) { continue } if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { @@ -1029,18 +1027,18 @@ func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { info := &PackageInfo{ Pkg: pkg, Info: types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), }, errorFunc: imp.conf.TypeChecker.Error, dir: dir, } - versions.InitFileVersions(&info.Info) // Copy the types.Config so we can vary it across PackageInfos. tc := imp.conf.TypeChecker diff --git a/go/packages/external.go b/go/packages/external.go index 8f7afcb5dfb..96db9daf314 100644 --- a/go/packages/external.go +++ b/go/packages/external.go @@ -79,7 +79,7 @@ type DriverResponse struct { // driver is the type for functions that query the build system for the // packages named by the patterns. -type driver func(cfg *Config, patterns ...string) (*DriverResponse, error) +type driver func(cfg *Config, patterns []string) (*DriverResponse, error) // findExternalDriver returns the file path of a tool that supplies // the build system package structure, or "" if not found. @@ -103,7 +103,7 @@ func findExternalDriver(cfg *Config) driver { return nil } } - return func(cfg *Config, words ...string) (*DriverResponse, error) { + return func(cfg *Config, patterns []string) (*DriverResponse, error) { req, err := json.Marshal(DriverRequest{ Mode: cfg.Mode, Env: cfg.Env, @@ -117,7 +117,7 @@ func findExternalDriver(cfg *Config) driver { buf := new(bytes.Buffer) stderr := new(bytes.Buffer) - cmd := exec.CommandContext(cfg.Context, tool, words...) + cmd := exec.CommandContext(cfg.Context, tool, patterns...) cmd.Dir = cfg.Dir // The cwd gets resolved to the real path. On Darwin, where // /tmp is a symlink, this breaks anything that expects the diff --git a/go/packages/golist.go b/go/packages/golist.go index 1a3a5b44f5c..76f910ecec9 100644 --- a/go/packages/golist.go +++ b/go/packages/golist.go @@ -80,6 +80,12 @@ type golistState struct { cfg *Config ctx context.Context + runner *gocommand.Runner + + // overlay is the JSON file that encodes the Config.Overlay + // mapping, used by 'go list -overlay=...'. + overlay string + envOnce sync.Once goEnvError error goEnv map[string]string @@ -127,7 +133,10 @@ func (state *golistState) mustGetEnv() map[string]string { // goListDriver uses the go list command to interpret the patterns and produce // the build system package structure. // See driver for more details. -func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error) { +// +// overlay is the JSON file that encodes the cfg.Overlay +// mapping, used by 'go list -overlay=...' +func goListDriver(cfg *Config, runner *gocommand.Runner, overlay string, patterns []string) (_ *DriverResponse, err error) { // Make sure that any asynchronous go commands are killed when we return. parentCtx := cfg.Context if parentCtx == nil { @@ -142,13 +151,15 @@ func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error cfg: cfg, ctx: ctx, vendorDirs: map[string]bool{}, + overlay: overlay, + runner: runner, } // Fill in response.Sizes asynchronously if necessary. - if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { + if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 { errCh := make(chan error) go func() { - compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), cfg.gocmdRunner) + compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), runner) response.dr.Compiler = compiler response.dr.Arch = arch errCh <- err @@ -681,7 +692,7 @@ func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool { // getGoVersion returns the effective minor version of the go command. func (state *golistState) getGoVersion() (int, error) { state.goVersionOnce.Do(func() { - state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner) + state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.runner) }) return state.goVersion, state.goVersionError } @@ -751,7 +762,7 @@ func jsonFlag(cfg *Config, goVersion int) string { } } addFields("Name", "ImportPath", "Error") // These fields are always needed - if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 { + if cfg.Mode&NeedFiles != 0 || cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 { addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles", "CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles", "SwigFiles", "SwigCXXFiles", "SysoFiles") @@ -759,7 +770,7 @@ func jsonFlag(cfg *Config, goVersion int) string { addFields("TestGoFiles", "XTestGoFiles") } } - if cfg.Mode&NeedTypes != 0 { + if cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 { // CompiledGoFiles seems to be required for the test case TestCgoNoSyntax, // even when -compiled isn't passed in. // TODO(#52435): Should we make the test ask for -compiled, or automatically @@ -840,7 +851,7 @@ func (state *golistState) cfgInvocation() gocommand.Invocation { Env: cfg.Env, Logf: cfg.Logf, WorkingDir: cfg.Dir, - Overlay: cfg.goListOverlayFile, + Overlay: state.overlay, } } @@ -851,11 +862,8 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, inv := state.cfgInvocation() inv.Verb = verb inv.Args = args - gocmdRunner := cfg.gocmdRunner - if gocmdRunner == nil { - gocmdRunner = &gocommand.Runner{} - } - stdout, stderr, friendlyErr, err := gocmdRunner.RunRaw(cfg.Context, inv) + + stdout, stderr, friendlyErr, err := state.runner.RunRaw(cfg.Context, inv) if err != nil { // Check for 'go' executable not being found. if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound { @@ -879,6 +887,12 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, return nil, friendlyErr } + // Return an error if 'go list' failed due to missing tools in + // $GOROOT/pkg/tool/$GOOS_$GOARCH (#69606). + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), `go: no such tool`) { + return nil, friendlyErr + } + // Is there an error running the C compiler in cgo? This will be reported in the "Error" field // and should be suppressed by go list -e. // diff --git a/go/packages/gopackages/gotypesalias.go b/go/packages/gopackages/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/packages/gopackages/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/packages/gopackages/main.go b/go/packages/gopackages/main.go index 7387e7fd10e..aab3362dbfd 100644 --- a/go/packages/gopackages/main.go +++ b/go/packages/gopackages/main.go @@ -6,9 +6,6 @@ // how to use golang.org/x/tools/go/packages to load, parse, // type-check, and print one or more Go packages. // Its precise output is unspecified and may change. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/packages/internal/nodecount/gotypesalias.go b/go/packages/internal/nodecount/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/packages/internal/nodecount/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/packages/internal/nodecount/nodecount.go b/go/packages/internal/nodecount/nodecount.go index 4b36a579ac0..a9f25bfdc6c 100644 --- a/go/packages/internal/nodecount/nodecount.go +++ b/go/packages/internal/nodecount/nodecount.go @@ -13,9 +13,6 @@ // A typical distribution is 40% identifiers, 10% literals, 8% // selectors, and 6% calls; around 3% each of BinaryExpr, BlockStmt, // AssignStmt, Field, and Comment; and the rest accounting for 20%. - -//go:debug gotypesalias=0 - package main import ( diff --git a/go/packages/overlay_test.go b/go/packages/overlay_test.go index 5760b7774b3..9edd0d646ed 100644 --- a/go/packages/overlay_test.go +++ b/go/packages/overlay_test.go @@ -14,7 +14,7 @@ import ( "testing" "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/internal/testenv" ) diff --git a/go/packages/packages.go b/go/packages/packages.go index f227f1bab10..2ecc64238e8 100644 --- a/go/packages/packages.go +++ b/go/packages/packages.go @@ -16,13 +16,13 @@ import ( "go/scanner" "go/token" "go/types" - "io" "log" "os" "path/filepath" "runtime" "strings" "sync" + "sync/atomic" "time" "golang.org/x/sync/errgroup" @@ -31,7 +31,6 @@ import ( "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/packagesinternal" "golang.org/x/tools/internal/typesinternal" - "golang.org/x/tools/internal/versions" ) // A LoadMode controls the amount of detail to return when loading. @@ -56,7 +55,7 @@ const ( // NeedName adds Name and PkgPath. NeedName LoadMode = 1 << iota - // NeedFiles adds GoFiles and OtherFiles. + // NeedFiles adds GoFiles, OtherFiles, and IgnoredFiles NeedFiles // NeedCompiledGoFiles adds CompiledGoFiles. @@ -78,7 +77,7 @@ const ( // NeedSyntax adds Syntax and Fset. NeedSyntax - // NeedTypesInfo adds TypesInfo. + // NeedTypesInfo adds TypesInfo and Fset. NeedTypesInfo // NeedTypesSizes adds TypesSizes. @@ -145,13 +144,7 @@ const ( // A Config specifies details about how packages should be loaded. // The zero value is a valid configuration. // -// Calls to Load do not modify this struct. -// -// TODO(adonovan): #67702: this is currently false: in fact, -// calls to [Load] do not modify the public fields of this struct, but -// may modify hidden fields, so concurrent calls to [Load] must not -// use the same Config. But perhaps we should reestablish the -// documented invariant. +// Calls to [Load] do not modify this struct. type Config struct { // Mode controls the level of information returned for each package. Mode LoadMode @@ -182,19 +175,10 @@ type Config struct { // Env []string - // gocmdRunner guards go command calls from concurrency errors. - gocmdRunner *gocommand.Runner - // BuildFlags is a list of command-line flags to be passed through to // the build system's query tool. BuildFlags []string - // modFile will be used for -modfile in go command invocations. - modFile string - - // modFlag will be used for -modfile in go command invocations. - modFlag string - // Fset provides source position information for syntax trees and types. // If Fset is nil, Load will use a new fileset, but preserve Fset's value. Fset *token.FileSet @@ -241,9 +225,13 @@ type Config struct { // drivers may vary in their level of support for overlays. Overlay map[string][]byte - // goListOverlayFile is the JSON file that encodes the Overlay - // mapping, used by 'go list -overlay=...' - goListOverlayFile string + // -- Hidden configuration fields only for use in x/tools -- + + // modFile will be used for -modfile in go command invocations. + modFile string + + // modFlag will be used for -modfile in go command invocations. + modFlag string } // Load loads and returns the Go packages named by the given patterns. @@ -334,21 +322,24 @@ func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, erro } else if !response.NotHandled { return response, true, nil } - // (fall through) + // not handled: fall through } // go list fallback - // + // Write overlays once, as there are many calls // to 'go list' (one per chunk plus others too). - overlay, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay) + overlayFile, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay) if err != nil { return nil, false, err } defer cleanupOverlay() - cfg.goListOverlayFile = overlay - response, err := callDriverOnChunks(goListDriver, cfg, chunks) + var runner gocommand.Runner // (shared across many 'go list' calls) + driver := func(cfg *Config, patterns []string) (*DriverResponse, error) { + return goListDriver(cfg, &runner, overlayFile, patterns) + } + response, err := callDriverOnChunks(driver, cfg, chunks) if err != nil { return nil, false, err } @@ -386,16 +377,14 @@ func splitIntoChunks(patterns []string, argMax int) ([][]string, error) { func callDriverOnChunks(driver driver, cfg *Config, chunks [][]string) (*DriverResponse, error) { if len(chunks) == 0 { - return driver(cfg) + return driver(cfg, nil) } responses := make([]*DriverResponse, len(chunks)) errNotHandled := errors.New("driver returned NotHandled") var g errgroup.Group for i, chunk := range chunks { - i := i - chunk := chunk g.Go(func() (err error) { - responses[i], err = driver(cfg, chunk...) + responses[i], err = driver(cfg, chunk) if responses[i] != nil && responses[i].NotHandled { err = errNotHandled } @@ -692,18 +681,19 @@ func (p *Package) String() string { return p.ID } // loaderPackage augments Package with state used during the loading phase type loaderPackage struct { *Package - importErrors map[string]error // maps each bad import to its error - loadOnce sync.Once - color uint8 // for cycle detection - needsrc bool // load from source (Mode >= LoadTypes) - needtypes bool // type information is either requested or depended on - initial bool // package was matched by a pattern - goVersion int // minor version number of go command on PATH + importErrors map[string]error // maps each bad import to its error + preds []*loaderPackage // packages that import this one + unfinishedSuccs atomic.Int32 // number of direct imports not yet loaded + color uint8 // for cycle detection + needsrc bool // load from source (Mode >= LoadTypes) + needtypes bool // type information is either requested or depended on + initial bool // package was matched by a pattern + goVersion int // minor version number of go command on PATH } // loader holds the working state of a single call to load. type loader struct { - pkgs map[string]*loaderPackage + pkgs map[string]*loaderPackage // keyed by Package.ID Config sizes types.Sizes // non-nil if needed by mode parseCache map[string]*parseValue @@ -749,9 +739,6 @@ func newLoader(cfg *Config) *loader { if ld.Config.Env == nil { ld.Config.Env = os.Environ() } - if ld.Config.gocmdRunner == nil { - ld.Config.gocmdRunner = &gocommand.Runner{} - } if ld.Context == nil { ld.Context = context.Background() } @@ -765,7 +752,7 @@ func newLoader(cfg *Config) *loader { ld.requestedMode = ld.Mode ld.Mode = impliedLoadMode(ld.Mode) - if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { + if ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0 { if ld.Fset == nil { ld.Fset = token.NewFileSet() } @@ -806,7 +793,7 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe" // This package needs type information if the caller requested types and the package is // either a root, or it's a non-root and the user requested dependencies ... - needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) + needtypes := (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) // This package needs source if the call requested source (or types info, which implies source) // and the package is either a root, or itas a non- root and the user requested dependencies... needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) || @@ -831,9 +818,10 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { } } - if ld.Mode&NeedImports != 0 { - // Materialize the import graph. - + // Materialize the import graph if it is needed (NeedImports), + // or if we'll be using loadPackages (Need{Syntax|Types|TypesInfo}). + var leaves []*loaderPackage // packages with no unfinished successors + if ld.Mode&(NeedImports|NeedSyntax|NeedTypes|NeedTypesInfo) != 0 { const ( white = 0 // new grey = 1 // in progress @@ -852,63 +840,76 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { // dependency on a package that does. These are the only packages // for which we load source code. var stack []*loaderPackage - var visit func(lpkg *loaderPackage) bool - visit = func(lpkg *loaderPackage) bool { - switch lpkg.color { - case black: - return lpkg.needsrc - case grey: + var visit func(from, lpkg *loaderPackage) bool + visit = func(from, lpkg *loaderPackage) bool { + if lpkg.color == grey { panic("internal error: grey node") } - lpkg.color = grey - stack = append(stack, lpkg) // push - stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports - lpkg.Imports = make(map[string]*Package, len(stubs)) - for importPath, ipkg := range stubs { - var importErr error - imp := ld.pkgs[ipkg.ID] - if imp == nil { - // (includes package "C" when DisableCgo) - importErr = fmt.Errorf("missing package: %q", ipkg.ID) - } else if imp.color == grey { - importErr = fmt.Errorf("import cycle: %s", stack) + if lpkg.color == white { + lpkg.color = grey + stack = append(stack, lpkg) // push + stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports + lpkg.Imports = make(map[string]*Package, len(stubs)) + for importPath, ipkg := range stubs { + var importErr error + imp := ld.pkgs[ipkg.ID] + if imp == nil { + // (includes package "C" when DisableCgo) + importErr = fmt.Errorf("missing package: %q", ipkg.ID) + } else if imp.color == grey { + importErr = fmt.Errorf("import cycle: %s", stack) + } + if importErr != nil { + if lpkg.importErrors == nil { + lpkg.importErrors = make(map[string]error) + } + lpkg.importErrors[importPath] = importErr + continue + } + + if visit(lpkg, imp) { + lpkg.needsrc = true + } + lpkg.Imports[importPath] = imp.Package } - if importErr != nil { - if lpkg.importErrors == nil { - lpkg.importErrors = make(map[string]error) + + // -- postorder -- + + // Complete type information is required for the + // immediate dependencies of each source package. + if lpkg.needsrc && ld.Mode&NeedTypes != 0 { + for _, ipkg := range lpkg.Imports { + ld.pkgs[ipkg.ID].needtypes = true } - lpkg.importErrors[importPath] = importErr - continue } - if visit(imp) { - lpkg.needsrc = true + // NeedTypeSizes causes TypeSizes to be set even + // on packages for which types aren't needed. + if ld.Mode&NeedTypesSizes != 0 { + lpkg.TypesSizes = ld.sizes } - lpkg.Imports[importPath] = imp.Package - } - // Complete type information is required for the - // immediate dependencies of each source package. - if lpkg.needsrc && ld.Mode&NeedTypes != 0 { - for _, ipkg := range lpkg.Imports { - ld.pkgs[ipkg.ID].needtypes = true + // Add packages with no imports directly to the queue of leaves. + if len(lpkg.Imports) == 0 { + leaves = append(leaves, lpkg) } + + stack = stack[:len(stack)-1] // pop + lpkg.color = black } - // NeedTypeSizes causes TypeSizes to be set even - // on packages for which types aren't needed. - if ld.Mode&NeedTypesSizes != 0 { - lpkg.TypesSizes = ld.sizes + // Add edge from predecessor. + if from != nil { + from.unfinishedSuccs.Add(+1) // incref + lpkg.preds = append(lpkg.preds, from) } - stack = stack[:len(stack)-1] // pop - lpkg.color = black return lpkg.needsrc } // For each initial package, create its import DAG. for _, lpkg := range initial { - visit(lpkg) + visit(nil, lpkg) } } else { @@ -921,16 +922,45 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { // Load type data and syntax if needed, starting at // the initial packages (roots of the import DAG). - if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { - var wg sync.WaitGroup - for _, lpkg := range initial { - wg.Add(1) - go func(lpkg *loaderPackage) { - ld.loadRecursive(lpkg) - wg.Done() - }(lpkg) + if ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0 { + + // We avoid using g.SetLimit to limit concurrency as + // it makes g.Go stop accepting work, which prevents + // workers from enqeuing, and thus finishing, and thus + // allowing the group to make progress: deadlock. + // + // Instead we use the ioLimit and cpuLimit semaphores. + g, _ := errgroup.WithContext(ld.Context) + + // enqueues adds a package to the type-checking queue. + // It must have no unfinished successors. + var enqueue func(*loaderPackage) + enqueue = func(lpkg *loaderPackage) { + g.Go(func() error { + // Parse and type-check. + ld.loadPackage(lpkg) + + // Notify each waiting predecessor, + // and enqueue it when it becomes a leaf. + for _, pred := range lpkg.preds { + if pred.unfinishedSuccs.Add(-1) == 0 { // decref + enqueue(pred) + } + } + + return nil + }) + } + + // Load leaves first, adding new packages + // to the queue as they become leaves. + for _, leaf := range leaves { + enqueue(leaf) + } + + if err := g.Wait(); err != nil { + return nil, err // cancelled } - wg.Wait() } // If the context is done, return its error and @@ -977,7 +1007,7 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { if ld.requestedMode&NeedSyntax == 0 { ld.pkgs[i].Syntax = nil } - if ld.requestedMode&NeedTypes == 0 && ld.requestedMode&NeedSyntax == 0 { + if ld.requestedMode&(NeedSyntax|NeedTypes|NeedTypesInfo) == 0 { ld.pkgs[i].Fset = nil } if ld.requestedMode&NeedTypesInfo == 0 { @@ -994,31 +1024,10 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { return result, nil } -// loadRecursive loads the specified package and its dependencies, -// recursively, in parallel, in topological order. -// It is atomic and idempotent. -// Precondition: ld.Mode&NeedTypes. -func (ld *loader) loadRecursive(lpkg *loaderPackage) { - lpkg.loadOnce.Do(func() { - // Load the direct dependencies, in parallel. - var wg sync.WaitGroup - for _, ipkg := range lpkg.Imports { - imp := ld.pkgs[ipkg.ID] - wg.Add(1) - go func(imp *loaderPackage) { - ld.loadRecursive(imp) - wg.Done() - }(imp) - } - wg.Wait() - ld.loadPackage(lpkg) - }) -} - -// loadPackage loads the specified package. +// loadPackage loads/parses/typechecks the specified package. // It must be called only once per Package, // after immediate dependencies are loaded. -// Precondition: ld.Mode & NeedTypes. +// Precondition: ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0. func (ld *loader) loadPackage(lpkg *loaderPackage) { if lpkg.PkgPath == "unsafe" { // Fill in the blanks to avoid surprises. @@ -1054,6 +1063,10 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { if !lpkg.needtypes && !lpkg.needsrc { return } + + // TODO(adonovan): this condition looks wrong: + // I think it should be lpkg.needtypes && !lpg.needsrc, + // so that NeedSyntax without NeedTypes can be satisfied by export data. if !lpkg.needsrc { if err := ld.loadFromExportData(lpkg); err != nil { lpkg.Errors = append(lpkg.Errors, Error{ @@ -1159,7 +1172,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { } lpkg.Syntax = files - if ld.Config.Mode&NeedTypes == 0 { + if ld.Config.Mode&(NeedTypes|NeedTypesInfo) == 0 { return } @@ -1170,16 +1183,20 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { return } - lpkg.TypesInfo = &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + // Populate TypesInfo only if needed, as it + // causes the type checker to work much harder. + if ld.Config.Mode&NeedTypesInfo != 0 { + lpkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), + } } - versions.InitFileVersions(lpkg.TypesInfo) lpkg.TypesSizes = ld.sizes importer := importerFunc(func(path string) (*types.Package, error) { @@ -1232,6 +1249,10 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { } } + // Type-checking is CPU intensive. + cpuLimit <- unit{} // acquire a token + defer func() { <-cpuLimit }() // release a token + typErr := types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) lpkg.importErrors = nil // no longer needed @@ -1296,8 +1317,11 @@ type importerFunc func(path string) (*types.Package, error) func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } // We use a counting semaphore to limit -// the number of parallel I/O calls per process. -var ioLimit = make(chan bool, 20) +// the number of parallel I/O calls or CPU threads per process. +var ( + ioLimit = make(chan unit, 20) + cpuLimit = make(chan unit, runtime.GOMAXPROCS(0)) +) func (ld *loader) parseFile(filename string) (*ast.File, error) { ld.parseCacheMu.Lock() @@ -1314,20 +1338,28 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) { var src []byte for f, contents := range ld.Config.Overlay { + // TODO(adonovan): Inefficient for large overlays. + // Do an exact name-based map lookup + // (for nonexistent files) followed by a + // FileID-based map lookup (for existing ones). if sameFile(f, filename) { src = contents + break } } var err error if src == nil { - ioLimit <- true // wait + ioLimit <- unit{} // acquire a token src, err = os.ReadFile(filename) - <-ioLimit // signal + <-ioLimit // release a token } if err != nil { v.err = err } else { + // Parsing is CPU intensive. + cpuLimit <- unit{} // acquire a token v.f, v.err = ld.ParseFile(ld.Fset, filename, src) + <-cpuLimit // release a token } close(v.ready) @@ -1342,18 +1374,21 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) { // Because files are scanned in parallel, the token.Pos // positions of the resulting ast.Files are not ordered. func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) { - var wg sync.WaitGroup - n := len(filenames) - parsed := make([]*ast.File, n) - errors := make([]error, n) - for i, file := range filenames { - wg.Add(1) - go func(i int, filename string) { + var ( + n = len(filenames) + parsed = make([]*ast.File, n) + errors = make([]error, n) + ) + var g errgroup.Group + for i, filename := range filenames { + // This creates goroutines unnecessarily in the + // cache-hit case, but that case is uncommon. + g.Go(func() error { parsed[i], errors[i] = ld.parseFile(filename) - wg.Done() - }(i, file) + return nil + }) } - wg.Wait() + g.Wait() // Eliminate nils, preserving order. var o int @@ -1524,4 +1559,4 @@ func usesExportData(cfg *Config) bool { return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0 } -var _ interface{} = io.Discard // assert build toolchain is go1.16 or later +type unit struct{} diff --git a/go/packages/packages_test.go b/go/packages/packages_test.go index e78d3cdb881..939f2df2da4 100644 --- a/go/packages/packages_test.go +++ b/go/packages/packages_test.go @@ -23,12 +23,14 @@ import ( "sort" "strings" "testing" + "testing/fstest" "time" "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/packages/packagestest" "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/internal/testenv" + "golang.org/x/tools/internal/testfiles" ) // testCtx is canceled when the test binary is about to time out. @@ -2682,7 +2684,7 @@ func testIssue48226(t *testing.T, exporter packagestest.Exporter) { t.Fatalf("package has errors: %v", pkg.Errors) } - fname := pkg.Fset.File(pkg.Syntax[0].Pos()).Name() + fname := pkg.Fset.File(pkg.Syntax[0].FileStart).Name() if filepath.Base(fname) != "syntax.go" { t.Errorf("expected the package declaration position "+ "to resolve to \"syntax.go\", got %q instead", fname) @@ -3103,3 +3105,107 @@ func TestLoadOverlayGoMod(t *testing.T) { t.Errorf("Load: got %s, want %v", got, want) } } + +func overlayFS(overlay map[string][]byte) fstest.MapFS { + fs := make(fstest.MapFS) + for name, data := range overlay { + fs[name] = &fstest.MapFile{Data: data} + } + return fs +} + +// TestIssue69606a tests when tools in $GOROOT/pkg/tool/$GOOS_$GOARCH are missing, +// Load should return an error. +func TestIssue69606a(t *testing.T) { + testenv.NeedsTool(t, "go") + overlay := overlayFS(map[string][]byte{ + "io/io.go": []byte("package io"), + "unsafe/unsafe.go": []byte("package unsafe"), + }) + goroot := testfiles.CopyToTmp(t, overlay) + + t.Logf("custom GOROOT: %s", goroot) + + // load the std packages under a custom GOROOT + _, err := packages.Load(&packages.Config{ + Mode: packages.NeedName | + packages.NeedFiles | + packages.NeedImports | + packages.NeedTypes, + Env: append( + os.Environ(), + "GO111MODULES=on", + "GOPATH=", + "GOWORK=off", + "GOPROXY=off", + fmt.Sprintf("GOROOT=%s", goroot)), + }, "std") + + if err == nil { + t.Fatal("Expected to get an error because missing tool 'compile' but got a nil error") + } +} + +// TestIssue69606b tests when loading std from a fake goroot without a unsafe package, +// Load should return an error. +func TestIssue69606b(t *testing.T) { + testenv.NeedsTool(t, "go") + overlay := overlayFS(map[string][]byte{ + "io/io.go": []byte("package io"), + }) + goroot := testfiles.CopyToTmp(t, overlay) + + t.Logf("custom GOROOT: %s", goroot) + + // load the std packages under a custom GOROOT + _, err := packages.Load(&packages.Config{ + Mode: packages.NeedName | + packages.NeedFiles | + packages.NeedImports | + packages.NeedTypes, + Env: append( + os.Environ(), + "GO111MODULES=on", + "GOPATH=", + "GOWORK=off", + "GOPROXY=off", + fmt.Sprintf("GOROOT=%s", goroot)), + }, "std") + + if err == nil { + t.Fatal("Expected to get an error because missing unsafe package but got a nil error") + } +} + +// TestNeedTypesInfoOnly tests when NeedTypesInfo was set and NeedSyntax & NeedTypes were not, +// Load should include the TypesInfo of packages properly +func TestLoadTypesInfoWithoutSyntaxOrTypes(t *testing.T) { + testAllOrModulesParallel(t, testLoadTypesInfoWithoutSyntaxOrTypes) +} + +func testLoadTypesInfoWithoutSyntaxOrTypes(t *testing.T, exporter packagestest.Exporter) { + exported := packagestest.Export(t, exporter, []packagestest.Module{{ + Name: "golang.org/fake", + Files: map[string]interface{}{ + "a/a.go": `package a; + +func foo() int { + i := 0 + s := "abc" + return i + len(s) +} +`, + }}}) + defer exported.Cleanup() + exported.Config.Mode = packages.NeedTypesInfo + + pkgs, err := packages.Load(exported.Config, "golang.org/fake/a") + if err != nil { + t.Fatal(err) + } + + // check if types info is present + if pkgs[0].TypesInfo == nil { + t.Errorf("expected types info to be present but got nil") + } +} diff --git a/go/packages/packagestest/export.go b/go/packages/packagestest/export.go index 67d48562f4c..47e6d11b94b 100644 --- a/go/packages/packagestest/export.go +++ b/go/packages/packagestest/export.go @@ -5,6 +5,10 @@ /* Package packagestest creates temporary projects on disk for testing go tools on. +[Note: there is an open proposal (golang/go#70229) to deprecate, tag, +and delete this package. If accepted, the last version of the package +be available indefinitely but will not receive updates.] + By changing the exporter used, you can create projects for multiple build systems from the same description, and run the same tests on them in many cases. diff --git a/go/packages/stdlib_test.go b/go/packages/stdlib_test.go index aac1ea558f5..33e06a96633 100644 --- a/go/packages/stdlib_test.go +++ b/go/packages/stdlib_test.go @@ -50,3 +50,33 @@ func TestStdlibMetadata(t *testing.T) { t.Log("Metadata: ", t1.Sub(t0)) // ~800ms on 12 threads t.Log("#MB: ", int64(memstats.Alloc-alloc)/1000000) // ~1MB } + +// BenchmarkNetHTTP measures the time to load/parse/typecheck the +// net/http package and all dependencies. +func BenchmarkNetHTTP(b *testing.B) { + testenv.NeedsGoPackages(b) + b.ReportAllocs() + + var bytes int64 + + for i := range b.N { + cfg := &packages.Config{Mode: packages.LoadAllSyntax} + pkgs, err := packages.Load(cfg, "net/http") + if err != nil { + b.Fatalf("failed to load metadata: %v", err) + } + if packages.PrintErrors(pkgs) > 0 { + b.Fatal("there were errors loading net/http") + } + + if i == 0 { + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + for _, f := range pkg.Syntax { + bytes += int64(f.FileEnd - f.FileStart) + } + }) + } + } + + b.SetBytes(bytes) // total source bytes +} diff --git a/go/ssa/builder_generic_test.go b/go/ssa/builder_generic_test.go index 5b8767c33ea..af16036dfa9 100644 --- a/go/ssa/builder_generic_test.go +++ b/go/ssa/builder_generic_test.go @@ -13,9 +13,9 @@ import ( "sort" "testing" - "golang.org/x/tools/go/expect" "golang.org/x/tools/go/loader" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/expect" ) // TestGenericBodies tests that bodies of generic functions and methods containing @@ -29,7 +29,7 @@ import ( // // where a, b and c are the types of the arguments to the print call // serialized using go/types.Type.String(). -// See x/tools/go/expect for details on the syntax. +// See x/tools/internal/expect for details on the syntax. func TestGenericBodies(t *testing.T) { for _, content := range []string{ ` diff --git a/go/ssa/builder_test.go b/go/ssa/builder_test.go index bc1989c58b7..59d8a91ea6a 100644 --- a/go/ssa/builder_test.go +++ b/go/ssa/builder_test.go @@ -6,6 +6,7 @@ package ssa_test import ( "bytes" + "errors" "fmt" "go/ast" "go/importer" @@ -24,10 +25,10 @@ import ( "golang.org/x/sync/errgroup" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/go/expect" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/expect" "golang.org/x/tools/internal/testenv" ) @@ -666,6 +667,8 @@ var indirect = R[int].M // TestTypeparamTest builds SSA over compilable examples in $GOROOT/test/typeparam/*.go. func TestTypeparamTest(t *testing.T) { + testenv.NeedsGOROOTDir(t, "test") + // Tests use a fake goroot to stub out standard libraries with declarations in // testdata/src. Decreases runtime from ~80s to ~1s. @@ -684,8 +687,10 @@ func TestTypeparamTest(t *testing.T) { t.Fatalf("Failed to load errors package from std: %s", err) } goroot := filepath.Dir(filepath.Dir(filepath.Dir(stdPkgs[0].GoFiles[0]))) - dir := filepath.Join(goroot, "test", "typeparam") + if _, err = os.Stat(dir); errors.Is(err, os.ErrNotExist) { + t.Skipf("test/typeparam doesn't exist under GOROOT %s", goroot) + } // Collect all of the .go files in fsys := os.DirFS(dir) @@ -694,24 +699,39 @@ func TestTypeparamTest(t *testing.T) { t.Fatal(err) } + // Each call to buildPackage calls package.Load, which invokes "go list", + // and with over 300 subtests this can be very slow (minutes, or tens + // on some platforms). So, we use an overlay to map each test file to a + // distinct single-file package and load them all at once. + overlay := map[string][]byte{ + "go.mod": goMod("example.com", -1), + } for _, entry := range entries { if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") { continue // Consider standalone go files. } - t.Run(entry.Name(), func(t *testing.T) { - src, err := fs.ReadFile(fsys, entry.Name()) - if err != nil { - t.Fatal(err) - } - - // Only build test files that can be compiled, or compiled and run. - if !bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// compile")) { - t.Skipf("not detected as a run test") - } + src, err := fs.ReadFile(fsys, entry.Name()) + if err != nil { + t.Fatal(err) + } + // Only build test files that can be compiled, or compiled and run. + if !bytes.HasPrefix(src, []byte("// run")) && !bytes.HasPrefix(src, []byte("// compile")) { + t.Logf("%s: not detected as a run test", entry.Name()) + continue + } - t.Logf("Input: %s\n", entry.Name()) + filename := fmt.Sprintf("%s/main.go", entry.Name()) + overlay[filename] = src + } - _, _ = buildPackage(t, string(src), ssa.SanityCheckFunctions|ssa.InstantiateGenerics) + // load all packages inside the overlay so 'go list' will be triggered only once. + pkgs := loadPackages(t, overlayFS(overlay), "./...") + for _, p := range pkgs { + originFilename := filepath.Base(filepath.Dir(p.GoFiles[0])) + t.Run(originFilename, func(t *testing.T) { + t.Parallel() + prog, _ := ssautil.Packages([]*packages.Package{p}, ssa.SanityCheckFunctions|ssa.InstantiateGenerics) + prog.Package(p.Types).Build() }) } } diff --git a/go/ssa/interp/interp_test.go b/go/ssa/interp/interp_test.go index 8ce9f368aec..f382c61f223 100644 --- a/go/ssa/interp/interp_test.go +++ b/go/ssa/interp/interp_test.go @@ -20,6 +20,7 @@ import ( "fmt" "go/build" "go/types" + "io" "log" "os" "path/filepath" @@ -152,11 +153,7 @@ func init() { } func run(t *testing.T, input string, goroot string) { - // The recover2 test case is broken on Go 1.14+. See golang/go#34089. - // TODO(matloob): Fix this. - if filepath.Base(input) == "recover2.go" { - t.Skip("The recover2.go test is broken in go1.14+. See golang.org/issue/34089.") - } + testenv.NeedsExec(t) // really we just need os.Pipe, but os/exec uses pipes t.Logf("Input: %s\n", input) @@ -186,10 +183,10 @@ func run(t *testing.T, input string, goroot string) { var hint string defer func() { if hint != "" { - fmt.Println("FAIL") - fmt.Println(hint) + t.Log("FAIL") + t.Log(hint) } else { - fmt.Println("PASS") + t.Log("PASS") } interp.CapturedOutput = nil @@ -219,10 +216,53 @@ func run(t *testing.T, input string, goroot string) { panic("bogus SizesFor") } hint = fmt.Sprintf("To trace execution, run:\n%% go build golang.org/x/tools/cmd/ssadump && ./ssadump -build=C -test -run --interp=T %s\n", input) + + // Capture anything written by the interpreter to os.Std{out,err} + // by temporarily redirecting them to a buffer via a pipe. + // + // While capturing is in effect, we must not write any + // test-related stuff to stderr (including log.Print, t.Log, etc). + // + // Suppress capturing if we are the child process of TestRangeFunc. + // TODO(adonovan): simplify that test using this mechanism. + // Also eliminate the redundant interp.CapturedOutput mechanism. + restore := func() {} // restore files and log the mixed out/err. + if os.Getenv("INTERPTEST_CHILD") == "" { + // Connect std{out,err} to pipe. + r, w, err := os.Pipe() + if err != nil { + t.Fatalf("can't create pipe for stderr: %v", err) + } + savedStdout := os.Stdout + savedStderr := os.Stderr + os.Stdout = w + os.Stderr = w + + // Buffer what is written. + var buf bytes.Buffer + done := make(chan struct{}) + go func() { + if _, err := io.Copy(&buf, r); err != nil { + fmt.Fprintf(savedStderr, "io.Copy: %v", err) + } + close(done) + }() + + // Finally, restore the files and log what was captured. + restore = func() { + os.Stdout = savedStdout + os.Stderr = savedStderr + w.Close() + <-done + t.Logf("Interpreter's stdout+stderr:\n%s", &buf) + } + } + var imode interp.Mode // default mode // imode |= interp.DisableRecover // enable for debugging // imode |= interp.EnableTracing // enable for debugging exitCode := interp.Interpret(mainPkg, imode, sizes, input, []string{}) + restore() if exitCode != 0 { t.Fatalf("interpreting %s: exit code was %d", input, exitCode) } @@ -300,6 +340,8 @@ func TestTestdataFiles(t *testing.T) { // TestGorootTest runs the interpreter on $GOROOT/test/*.go. func TestGorootTest(t *testing.T) { + testenv.NeedsGOROOTDir(t, "test") + goroot := makeGoroot(t) for _, input := range gorootTestTests { t.Run(input, func(t *testing.T) { @@ -312,6 +354,8 @@ func TestGorootTest(t *testing.T) { // in $GOROOT/test/typeparam/*.go. func TestTypeparamTest(t *testing.T) { + testenv.NeedsGOROOTDir(t, "test") + if runtime.GOARCH == "wasm" { // See ssa/TestTypeparamTest. t.Skip("Consistent flakes on wasm (e.g. https://go.dev/issues/64726)") diff --git a/go/ssa/interp/value.go b/go/ssa/interp/value.go index 8fa0180ba05..d1250b119d1 100644 --- a/go/ssa/interp/value.go +++ b/go/ssa/interp/value.go @@ -141,7 +141,7 @@ func (x array) hash(t types.Type) int { h := 0 tElt := t.Underlying().(*types.Array).Elem() for _, xi := range x { - h += hash(tElt, xi) + h += hash(t, tElt, xi) } return h } @@ -164,7 +164,7 @@ func (x structure) hash(t types.Type) int { h := 0 for i, n := 0, tStruct.NumFields(); i < n; i++ { if f := tStruct.Field(i); !f.Anonymous() { - h += hash(f.Type(), x[i]) + h += hash(t, f.Type(), x[i]) } } return h @@ -183,8 +183,8 @@ func (x iface) eq(t types.Type, _y interface{}) bool { return sameType(x.t, y.t) && (x.t == nil || equals(x.t, x.v, y.v)) } -func (x iface) hash(_ types.Type) int { - return hashType(x.t)*8581 + hash(x.t, x.v) +func (x iface) hash(outer types.Type) int { + return hashType(x.t)*8581 + hash(outer, x.t, x.v) } func (x rtype) hash(_ types.Type) int { @@ -256,7 +256,8 @@ func equals(t types.Type, x, y value) bool { } // Returns an integer hash of x such that equals(x, y) => hash(x) == hash(y). -func hash(t types.Type, x value) int { +// The outer type is used only for the "unhashable" panic message. +func hash(outer, t types.Type, x value) int { switch x := x.(type) { case bool: if x { @@ -308,7 +309,7 @@ func hash(t types.Type, x value) int { case rtype: return x.hash(t) } - panic(fmt.Sprintf("%T is unhashable", x)) + panic(fmt.Sprintf("unhashable type %v", outer)) } // reflect.Value struct values don't have a fixed shape, since the diff --git a/go/ssa/source_test.go b/go/ssa/source_test.go index bd156cbc5e8..3d0bfe4cdef 100644 --- a/go/ssa/source_test.go +++ b/go/ssa/source_test.go @@ -18,8 +18,8 @@ import ( "testing" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/expect" "golang.org/x/tools/go/ssa" + "golang.org/x/tools/internal/expect" ) func TestObjValueLookup(t *testing.T) { diff --git a/go/ssa/ssautil/deprecated_test.go b/go/ssa/ssautil/deprecated_test.go index 9bc39e7eebd..1793b06dcdb 100644 --- a/go/ssa/ssautil/deprecated_test.go +++ b/go/ssa/ssautil/deprecated_test.go @@ -15,10 +15,13 @@ import ( "golang.org/x/tools/go/loader" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/testenv" ) // TestCreateProgram tests CreateProgram which has an x/tools/go/loader.Program. func TestCreateProgram(t *testing.T) { + testenv.NeedsGoBuild(t) // for importer.Default() + conf := loader.Config{ParserMode: parser.ParseComments} f, err := conf.ParseFile("hello.go", hello) if err != nil { diff --git a/go/ssa/ssautil/load.go b/go/ssa/ssautil/load.go index 51fba054541..c64b03f177f 100644 --- a/go/ssa/ssautil/load.go +++ b/go/ssa/ssautil/load.go @@ -13,7 +13,6 @@ import ( "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/versions" ) // Packages creates an SSA program for a set of packages. @@ -134,15 +133,15 @@ func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, fil } info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { return nil, nil, err } diff --git a/go/ssa/ssautil/load_test.go b/go/ssa/ssautil/load_test.go index efa2ba40a8b..10375a3227f 100644 --- a/go/ssa/ssautil/load_test.go +++ b/go/ssa/ssautil/load_test.go @@ -17,9 +17,9 @@ import ( "testing" "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/packages/packagestest" "golang.org/x/tools/go/ssa" "golang.org/x/tools/go/ssa/ssautil" + "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/internal/testenv" ) diff --git a/go/types/internal/play/gotypesalias.go b/go/types/internal/play/gotypesalias.go new file mode 100644 index 00000000000..288c10c2d0a --- /dev/null +++ b/go/types/internal/play/gotypesalias.go @@ -0,0 +1,12 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +//go:debug gotypesalias=1 + +package main + +// Materialize aliases whenever the go toolchain version is after 1.23 (#69772). +// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1). diff --git a/go/types/internal/play/play.go b/go/types/internal/play/play.go index e8b8cb9bbbe..eb9e5794b94 100644 --- a/go/types/internal/play/play.go +++ b/go/types/internal/play/play.go @@ -9,9 +9,6 @@ // It is intended for convenient exploration and debugging of // go/types. The command and its web interface are not officially // supported and they may be changed arbitrarily in the future. - -//go:debug gotypesalias=0 - package main import ( @@ -116,7 +113,7 @@ func handleSelectJSON(w http.ResponseWriter, req *http.Request) { fset := pkg.Fset file := pkg.Syntax[0] - tokFile := fset.File(file.Pos()) + tokFile := fset.File(file.FileStart) startPos := tokFile.Pos(startOffset) endPos := tokFile.Pos(endOffset) @@ -197,8 +194,10 @@ func handleSelectJSON(w http.ResponseWriter, req *http.Request) { if tv.Value != nil { fmt.Fprintf(out, ", and constant value %v", tv.Value) } - fmt.Fprintf(out, "\n\n") + } else { + fmt.Fprintf(out, "%T has no type", innermostExpr) } + fmt.Fprintf(out, "\n\n") } // selection x.f information (if cursor is over .f) diff --git a/go/types/objectpath/objectpath.go b/go/types/objectpath/objectpath.go index a70b727f2c6..16ed3c1780b 100644 --- a/go/types/objectpath/objectpath.go +++ b/go/types/objectpath/objectpath.go @@ -281,25 +281,25 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { T := o.Type() if alias, ok := T.(*types.Alias); ok { - if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam, nil); r != nil { + if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam); r != nil { return Path(r), nil } - if r := find(obj, aliases.Rhs(alias), append(path, opRhs), nil); r != nil { + if r := find(obj, aliases.Rhs(alias), append(path, opRhs)); r != nil { return Path(r), nil } } else if tname.IsAlias() { // legacy alias - if r := find(obj, T, path, nil); r != nil { + if r := find(obj, T, path); r != nil { return Path(r), nil } } else if named, ok := T.(*types.Named); ok { // defined (named) type - if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam, nil); r != nil { + if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam); r != nil { return Path(r), nil } - if r := find(obj, named.Underlying(), append(path, opUnderlying), nil); r != nil { + if r := find(obj, named.Underlying(), append(path, opUnderlying)); r != nil { return Path(r), nil } } @@ -312,7 +312,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { if _, ok := o.(*types.TypeName); !ok { if o.Exported() { // exported non-type (const, var, func) - if r := find(obj, o.Type(), append(path, opType), nil); r != nil { + if r := find(obj, o.Type(), append(path, opType)); r != nil { return Path(r), nil } } @@ -332,7 +332,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { if m == obj { return Path(path2), nil // found declared method } - if r := find(obj, m.Type(), append(path2, opType), nil); r != nil { + if r := find(obj, m.Type(), append(path2, opType)); r != nil { return Path(r), nil } } @@ -447,46 +447,64 @@ func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) { // // The seen map is used to short circuit cycles through type parameters. If // nil, it will be allocated as necessary. -func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte { +// +// The seenMethods map is used internally to short circuit cycles through +// interface methods, such as occur in the following example: +// +// type I interface { f() interface{I} } +// +// See golang/go#68046 for details. +func find(obj types.Object, T types.Type, path []byte) []byte { + return (&finder{obj: obj}).find(T, path) +} + +// finder closes over search state for a call to find. +type finder struct { + obj types.Object // the sought object + seenTParamNames map[*types.TypeName]bool // for cycle breaking through type parameters + seenMethods map[*types.Func]bool // for cycle breaking through recursive interfaces +} + +func (f *finder) find(T types.Type, path []byte) []byte { switch T := T.(type) { case *types.Alias: - return find(obj, types.Unalias(T), path, seen) + return f.find(types.Unalias(T), path) case *types.Basic, *types.Named: // Named types belonging to pkg were handled already, // so T must belong to another package. No path. return nil case *types.Pointer: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Slice: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Array: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Chan: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Map: - if r := find(obj, T.Key(), append(path, opKey), seen); r != nil { + if r := f.find(T.Key(), append(path, opKey)); r != nil { return r } - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Signature: - if r := findTypeParam(obj, T.RecvTypeParams(), path, opRecvTypeParam, nil); r != nil { + if r := f.findTypeParam(T.RecvTypeParams(), path, opRecvTypeParam); r != nil { return r } - if r := findTypeParam(obj, T.TypeParams(), path, opTypeParam, seen); r != nil { + if r := f.findTypeParam(T.TypeParams(), path, opTypeParam); r != nil { return r } - if r := find(obj, T.Params(), append(path, opParams), seen); r != nil { + if r := f.find(T.Params(), append(path, opParams)); r != nil { return r } - return find(obj, T.Results(), append(path, opResults), seen) + return f.find(T.Results(), append(path, opResults)) case *types.Struct: for i := 0; i < T.NumFields(); i++ { fld := T.Field(i) path2 := appendOpArg(path, opField, i) - if fld == obj { + if fld == f.obj { return path2 // found field var } - if r := find(obj, fld.Type(), append(path2, opType), seen); r != nil { + if r := f.find(fld.Type(), append(path2, opType)); r != nil { return r } } @@ -495,10 +513,10 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] for i := 0; i < T.Len(); i++ { v := T.At(i) path2 := appendOpArg(path, opAt, i) - if v == obj { + if v == f.obj { return path2 // found param/result var } - if r := find(obj, v.Type(), append(path2, opType), seen); r != nil { + if r := f.find(v.Type(), append(path2, opType)); r != nil { return r } } @@ -506,28 +524,35 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] case *types.Interface: for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) + if f.seenMethods[m] { + return nil + } path2 := appendOpArg(path, opMethod, i) - if m == obj { + if m == f.obj { return path2 // found interface method } - if r := find(obj, m.Type(), append(path2, opType), seen); r != nil { + if f.seenMethods == nil { + f.seenMethods = make(map[*types.Func]bool) + } + f.seenMethods[m] = true + if r := f.find(m.Type(), append(path2, opType)); r != nil { return r } } return nil case *types.TypeParam: name := T.Obj() - if name == obj { - return append(path, opObj) - } - if seen[name] { + if f.seenTParamNames[name] { return nil } - if seen == nil { - seen = make(map[*types.TypeName]bool) + if name == f.obj { + return append(path, opObj) } - seen[name] = true - if r := find(obj, T.Constraint(), append(path, opConstraint), seen); r != nil { + if f.seenTParamNames == nil { + f.seenTParamNames = make(map[*types.TypeName]bool) + } + f.seenTParamNames[name] = true + if r := f.find(T.Constraint(), append(path, opConstraint)); r != nil { return r } return nil @@ -535,11 +560,15 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] panic(T) } -func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte, seen map[*types.TypeName]bool) []byte { +func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte) []byte { + return (&finder{obj: obj}).findTypeParam(list, path, op) +} + +func (f *finder) findTypeParam(list *types.TypeParamList, path []byte, op byte) []byte { for i := 0; i < list.Len(); i++ { tparam := list.At(i) path2 := appendOpArg(path, op, i) - if r := find(obj, tparam, path2, seen); r != nil { + if r := f.find(tparam, path2); r != nil { return r } } diff --git a/go/types/objectpath/objectpath_test.go b/go/types/objectpath/objectpath_test.go index 838f1be44df..0805c9d919a 100644 --- a/go/types/objectpath/objectpath_test.go +++ b/go/types/objectpath/objectpath_test.go @@ -82,6 +82,8 @@ package a type Int int type T struct{x, y int} + +type Issue68046 interface { f(x int) interface{Issue68046} } ` pkgmap := loadPackages(t, src, "./a", "./b") @@ -123,7 +125,8 @@ type T struct{x, y int} {"b", "R.UEF0", "field y int", ""}, {"b", "Q.UEF0", "field z int", ""}, {"a", "T", "type a.T struct{x int; y int}", ""}, - {"a", "T.UF0", "field x int", ""}, + {"a", "Issue68046.UM0", "func (a.Issue68046).f(x int) interface{a.Issue68046}", ""}, + {"a", "Issue68046.UM0.PA0", "var x int", ""}, // Bad paths {"b", "", "", "empty path"}, diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index faee0f88721..1d48bc743a9 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -14,7 +14,6 @@ import ( "testing" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/versions" ) func TestStaticCallee(t *testing.T) { @@ -122,11 +121,11 @@ func testStaticCallee(t *testing.T, contents []string) { packages := make(map[string]*types.Package) cfg := &types.Config{Importer: closure(packages)} info := &types.Info{ - Instances: make(map[*ast.Ident]types.Instance), - Uses: make(map[*ast.Ident]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Instances: make(map[*ast.Ident]types.Instance), + Uses: make(map[*ast.Ident]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) var files []*ast.File for i, content := range contents { diff --git a/godoc/index.go b/godoc/index.go index 4195dd205f6..377837a0b36 100644 --- a/godoc/index.go +++ b/godoc/index.go @@ -649,7 +649,7 @@ func (x *Indexer) addFile(f vfs.ReadSeekCloser, filename string, goFile bool) (f if goFile { // parse the file and in the process add it to the file set if ast, err = parser.ParseFile(x.fset, filename, src, parser.ParseComments); err == nil { - file = x.fset.File(ast.Pos()) // ast.Pos() is inside the file + file = x.fset.File(ast.FileStart) // ast.FileStart is inside the file return } // file has parse errors, and the AST may be incorrect - diff --git a/godoc/versions_test.go b/godoc/versions_test.go index bfc05f626a8..0c5ca50c774 100644 --- a/godoc/versions_test.go +++ b/godoc/versions_test.go @@ -7,6 +7,8 @@ package godoc import ( "go/build" "testing" + + "golang.org/x/tools/internal/testenv" ) func TestParseVersionRow(t *testing.T) { @@ -88,6 +90,8 @@ func hasTag(t string) bool { } func TestAPIVersion(t *testing.T) { + testenv.NeedsGOROOTDir(t, "api") + av, err := parsePackageAPIInfo() if err != nil { t.Fatal(err) diff --git a/gopls/doc/analyzers.md b/gopls/doc/analyzers.md index ec2b6316374..f7083bb0e89 100644 --- a/gopls/doc/analyzers.md +++ b/gopls/doc/analyzers.md @@ -428,8 +428,8 @@ Package documentation: [loopclosure](https://pkg.go.dev/golang.org/x/tools/go/an The cancellation function returned by context.WithCancel, WithTimeout, -and WithDeadline must be called or the new context will remain live -until its parent context is cancelled. +WithDeadline and variants such as WithCancelCause must be called, +or the new context will remain live until its parent context is cancelled. (The background context is never cancelled.) Default: on. diff --git a/gopls/doc/codelenses.md b/gopls/doc/codelenses.md index 71425ce09c6..0930076bec6 100644 --- a/gopls/doc/codelenses.md +++ b/gopls/doc/codelenses.md @@ -16,7 +16,7 @@ Their features are subject to change. Client support: - **VS Code**: Code Lenses appear as small text links above a line of source code. -- **Emacs + eglot**: Not supported, but prototype exists at https://github.joaotavora/eglot/pull/71. +- **Emacs + eglot**: Not supported, but prototype exists at https://github.com/joaotavora/eglot/pull/71. - **Vim + coc.nvim**: ?? - **CLI**: `gopls codelens`. For example, `gopls codelens -exec file.go:123 "run test"` runs the test at the specified line. diff --git a/gopls/doc/contributing.md b/gopls/doc/contributing.md index 007c5793073..914794aee71 100644 --- a/gopls/doc/contributing.md +++ b/gopls/doc/contributing.md @@ -19,7 +19,7 @@ claiming it. ## Getting started Most of the `gopls` logic is in the `golang.org/x/tools/gopls/internal` -directory. See [design/implementation.md] for an overview of the code organization. +directory. See [design/implementation.md](./design/implementation.md) for an overview of the code organization. ## Build diff --git a/gopls/doc/features/README.md b/gopls/doc/features/README.md index 41449de7f20..92203ed677a 100644 --- a/gopls/doc/features/README.md +++ b/gopls/doc/features/README.md @@ -46,10 +46,10 @@ when making significant changes to existing features or when adding new ones. - [Code transformation](transformation.md): fixes and refactorings - [Formatting](transformation.md#formatting): format the source code - [Rename](transformation.md#rename): rename a symbol or package - - [Organize imports](transformation.md#organize-imports): organize the import declaration - - [Extract](transformation.md#extract): extract selection to a new file/function/variable - - [Inline](transformation.md#inline): inline a call to a function or method - - [Miscellaneous rewrites](transformation.md#miscellaneous-rewrites): various Go-specific refactorings + - [Organize imports](transformation.md#source.organizeImports): organize the import declaration + - [Extract](transformation.md#refactor.extract): extract selection to a new file/function/variable + - [Inline](transformation.md#refactor.inline.call): inline a call to a function or method + - [Miscellaneous rewrites](transformation.md#refactor.rewrite): various Go-specific refactorings - [Web-based queries](web.md): commands that open a browser page - [Package documentation](web.md#doc): browse documentation for current Go package - [Free symbols](web.md#freesymbols): show symbols used by a selected block of code diff --git a/gopls/doc/features/diagnostics.md b/gopls/doc/features/diagnostics.md index b667f69a080..5955a55d8b3 100644 --- a/gopls/doc/features/diagnostics.md +++ b/gopls/doc/features/diagnostics.md @@ -11,7 +11,7 @@ common mistakes. Diagnostics come from two main sources: compilation errors and analysis findings. - **Compilation errors** are those that you would obtain from running `go - build`. Gopls doesn't actually run the compiler; that would be too +build`. Gopls doesn't actually run the compiler; that would be too slow. Instead it runs `go list` (when needed) to compute the metadata of the compilation, then processes those packages in a similar manner to the compiler front-end: reading, scanning, and parsing the @@ -51,7 +51,7 @@ Diagnostics come from two main sources: compilation errors and analysis findings ## Recomputation of diagnostics -Diagnostics are automatically recomputed each time the source files +By default, diagnostics are automatically recomputed each time the source files are edited. Compilation errors in open files are updated after a very short delay @@ -68,9 +68,12 @@ Alternatively, diagnostics may be triggered only after an edited file is saved, using the [`diagnosticsTrigger`](../settings.md#diagnosticsTrigger) setting. -Gopls does not currently support "pull-based" diagnostics, which are -computed synchronously when requested by the client; see golang/go#53275. - +When initialized with `"pullDiagnostics": true`, gopls also supports +["pull diagnostics"](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_pullDiagnostics), +an alternative mechanism for recomputing diagnostics in which the client +requests diagnostics from gopls explicitly using the `textDocument/diagnostic` +request. This feature is off by default until the performance of pull +diagnostics is comparable to push diagnostics. ## Quick fixes @@ -91,6 +94,7 @@ Suggested fixes that are indisputably safe are [code actions](transformation.md#code-actions) whose kind is `"source.fixAll"`. Many client editors have a shortcut to apply all such fixes. + -### `stubMethods`: Declare missing methods of type +### `stubMissingInterfaceMethods`: Declare missing methods of I When a value of a concrete type is assigned to a variable of an interface type, but the concrete type does not possess all the @@ -165,6 +169,34 @@ position. client there, or a progress notification indicating that something happened.) +### `StubMissingCalledFunction`: Declare missing method T.f + +When you attempt to call a method on a type that does not have that method, +the compiler will report an error such as "type X has no field or method Y". +In this scenario, gopls now offers a quick fix to generate a stub declaration of +the missing method, inferring its type from the call. + +Consider the following code where `Foo` does not have a method `bar`: + +```go +type Foo struct{} + +func main() { + var s string + f := Foo{} + s = f.bar("str", 42) // error: f.bar undefined (type Foo has no field or method bar) +} +``` + +Gopls will offer a quick fix, "Declare missing method Foo.bar". +When invoked, it creates the following declaration: + +```go +func (f Foo) bar(s string, i int) string { + panic("unimplemented") +} +``` + +- [`source.organizeImports`](#source.organizeImports) +- [`source.assembly`](web.md#assembly) +- [`source.doc`](web.md#doc) +- [`source.freesymbols`](web.md#freesymbols) +- `source.test` (undocumented) +- [`gopls.doc.features`](README.md), which opens gopls' index of features in a browser +- [`refactor.extract.function`](#extract) +- [`refactor.extract.method`](#extract) +- [`refactor.extract.toNewFile`](#extract.toNewFile) +- [`refactor.extract.variable`](#extract) +- [`refactor.inline.call`](#refactor.inline.call) +- [`refactor.rewrite.changeQuote`](#refactor.rewrite.changeQuote) +- [`refactor.rewrite.fillStruct`](#refactor.rewrite.fillStruct) +- [`refactor.rewrite.fillSwitch`](#refactor.rewrite.fillSwitch) +- [`refactor.rewrite.invertIf`](#refactor.rewrite.invertIf) +- [`refactor.rewrite.joinLines`](#refactor.rewrite.joinLines) +- [`refactor.rewrite.removeUnusedParam`](#refactor.rewrite.removeUnusedParam) +- [`refactor.rewrite.splitLines`](#refactor.rewrite.splitLines) + Gopls reports some code actions twice, with two different kinds, so that they appear in multiple UI elements: simplifications, for example from `for _ = range m` to `for range m`, @@ -120,6 +142,7 @@ Client support for code actions: - **CLI**: `gopls codeaction -exec -kind k,... -diff file.go:#123-#456` executes code actions of the specified kinds (e.g. `refactor.inline`) on the selected range, specified using zero-based byte offsets, and displays the diff. + ## Formatting The LSP @@ -141,7 +164,8 @@ Client support: - **Emacs + eglot**: Use `M-x eglot-format-buffer` to format. Attach it to `before-save-hook` to format on save. For formatting combined with organize-imports, many users take the legacy approach of setting `"goimports"` as their `gofmt-command` using [go-mode](https://github.com/dominikh/go-mode.el), and adding `gofmt-before-save` to `before-save-hook`. An LSP-based solution requires code such as https://github.com/joaotavora/eglot/discussions/1409. - **CLI**: `gopls format file.go` -## Organize imports + +## `source.organizeImports`: Organize imports A `codeActions` request in a file whose imports are not organized will return an action of the standard kind `source.organizeImports`. @@ -187,7 +211,8 @@ Client support: - **CLI**: `gopls fix -a file.go:#offset source.organizeImports` -## `refactor.rename`: Rename + +## Rename The LSP [`textDocument/rename`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_rename) @@ -268,7 +293,7 @@ Client support: - **CLI**: `gopls rename file.go:#offset newname` - + ## `refactor.extract`: Extract function/method/variable The `refactor.extract` family of code actions all return commands that @@ -326,7 +351,7 @@ The following Extract features are planned for 2024 but not yet supported: see golang/go#65721 and golang/go#46665. - + ## `refactor.extract.toNewFile`: Extract declarations to new file (Available from gopls/v0.17.0) @@ -343,7 +368,7 @@ first token of the declaration, such as `func` or `type`. ![After: the new file is based on the first symbol name](../assets/extract-to-new-file-after.png) - + ## `refactor.inline.call`: Inline call to function For a `codeActions` request where the selection is (or is within) a @@ -498,11 +523,13 @@ more detail. All of this is to say, it's a complex problem, and we aim for correctness first of all. We've already implemented a number of important "tidiness optimizations" and we expect more to follow. + ## `refactor.rewrite`: Miscellaneous rewrites This section covers a number of transformations that are accessible as code actions whose kinds are children of `refactor.rewrite`. + ### `refactor.rewrite.removeUnusedParam`: Remove unused parameter The [`unusedparams` analyzer](../analyzers.md#unusedparams) reports a @@ -538,6 +565,7 @@ Observe that in the first call, the argument `chargeCreditCard()` was not deleted because of potential side effects, whereas in the second call, the argument 2, a constant, was safely deleted. + ### `refactor.rewrite.changeQuote`: Convert string literal between raw and interpreted When the selection is a string literal, gopls offers a code action @@ -550,6 +578,7 @@ form (`"abc"`) where this is possible: Applying the code action a second time reverts back to the original form. + ### `refactor.rewrite.invertIf`: Invert 'if' condition When the selection is within an `if`/`else` statement that is not @@ -565,6 +594,8 @@ blocks. if the else block ends with a return statement; and thus applying the operation twice does not get you back to where you started. --> + + ### `refactor.rewrite.{split,join}Lines`: Split elements into separate lines When the selection is within a bracketed list of items such as: @@ -612,7 +643,7 @@ These code actions are not offered for lists containing `//`-style comments, which run to the end of the line. - + ### `refactor.rewrite.fillStruct`: Fill struct literal When the cursor is within a struct literal `S{}`, gopls offers the @@ -645,6 +676,7 @@ Caveats: or in other files in the package, are not considered; see golang/go#68224. + ### `refactor.rewrite.fillSwitch`: Fill switch When the cursor is within a switch statement whose operand type is an diff --git a/gopls/doc/generate/generate.go b/gopls/doc/generate/generate.go index 994933a3681..7d92b2629d5 100644 --- a/gopls/doc/generate/generate.go +++ b/gopls/doc/generate/generate.go @@ -533,7 +533,7 @@ func lowerFirst(x string) string { func fileForPos(pkg *packages.Package, pos token.Pos) (*ast.File, error) { fset := pkg.Fset for _, f := range pkg.Syntax { - if safetoken.StartPosition(fset, f.Pos()).Filename == safetoken.StartPosition(fset, pos).Filename { + if safetoken.StartPosition(fset, f.FileStart).Filename == safetoken.StartPosition(fset, pos).Filename { return f, nil } } diff --git a/gopls/doc/release/v0.17.0.md b/gopls/doc/release/v0.17.0.md index c57522973db..a3e8b1b34e0 100644 --- a/gopls/doc/release/v0.17.0.md +++ b/gopls/doc/release/v0.17.0.md @@ -1,16 +1,15 @@ - - # Configuration Changes -The `fieldalignment` analyzer, previously disabled by default, has -been removed: it is redundant with the hover size/offset information -displayed by v0.16.0 and its diagnostics were confusing. - -The kind (identifiers) of all of gopls' code actions have changed -to use more specific hierarchical names. For example, "Inline call" -has changed from `refactor.inline` to `refactor.inline.call`. -This allows clients to request particular code actions more precisely. -The user manual now includes the identifier in the documentation for each code action. +- The `fieldalignment` analyzer, previously disabled by default, has + been removed: it is redundant with the hover size/offset information + displayed by v0.16.0 and its diagnostics were confusing. +- The kind (identifiers) of all of gopls' code actions have changed + to use more specific hierarchical names. For example, "Inline call" + has changed from `refactor.inline` to `refactor.inline.call`. + This allows clients to request particular code actions more precisely. + The user manual now includes the identifier in the documentation for each code action. +- The experimental `allowImplicitNetworkAccess` setting is removed, following + its deprecation in gopls@v0.16.0. See golang/go#66861 for details. # New features @@ -30,13 +29,29 @@ or by selecting a whole declaration or multiple declrations. In order to avoid ambiguity and surprise about what to extract, some kinds of paritial selection of a declration cannot invoke this code action. -## Standard library version information in Hover +## Pull diagnostics + +When initialized with the option `"pullDiagnostics": true`, gopls will advertise support for the +`textDocument.diagnostic` +[client capability](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_pullDiagnostics), +which allows editors to request diagnostics directly from gopls using a +`textDocument/diagnostic` request, rather than wait for a +`textDocument/publishDiagnostics` notification. This feature is off by default +until the performance of pull diagnostics is comparable to push diagnostics. + +## Hover improvements -Hovering over a standard library symbol now displays information about the first -Go release containing the symbol. For example, hovering over `errors.As` shows -"Added in go1.13". +The `textDocument/hover` response has slightly tweaked markdown rendering, and +includes the following additional information: + +- Hovering over a standard library symbol now displays information about the + first Go release containing the symbol. For example, hovering over + `errors.As` shows "Added in go1.13". +- Hovering over the package name in a package declaration includes additional + package metadata. ## Semantic token modifiers of top-level constructor of types + The semantic tokens response now includes additional modifiers for the top-level constructor of the type of each symbol: `interface`, `struct`, `signature`, `pointer`, `array`, `map`, `slice`, `chan`, `string`, `number`, `bool`, and `invalid`. @@ -54,3 +69,12 @@ function's Go `func` declaration. If the function is implemented in C or assembly, the function has no body. Executing a second Definition query (while already at the Go declaration) will navigate you to the assembly implementation. + +## Generate missing method from function call + +When you attempt to call a method on a type that does not have that method, +the compiler will report an error like “type X has no field or method Y”. +Gopls now offers a new code action, “Declare missing method of T.f”, +where T is the concrete type and f is the undefined method. +The stub method's signature is inferred +from the context of the call. diff --git a/gopls/doc/settings.md b/gopls/doc/settings.md index db6092a980c..135fcca70af 100644 --- a/gopls/doc/settings.md +++ b/gopls/doc/settings.md @@ -119,17 +119,6 @@ gopls has to do to keep your workspace up to date. Default: `true`. - -### `allowImplicitNetworkAccess bool` - -**This setting is experimental and may be deleted.** - -allowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module -downloads rather than requiring user action. This option will eventually -be removed. - -Default: `false`. - ### `standaloneTags []string` diff --git a/gopls/go.mod b/gopls/go.mod index a7bc7404a65..beea37161db 100644 --- a/gopls/go.mod +++ b/gopls/go.mod @@ -7,25 +7,24 @@ go 1.23.1 require ( github.com/google/go-cmp v0.6.0 github.com/jba/templatecheck v0.7.0 - golang.org/x/mod v0.21.0 - golang.org/x/sync v0.8.0 - golang.org/x/sys v0.26.0 - golang.org/x/telemetry v0.0.0-20240927184629-19675431963b - golang.org/x/text v0.19.0 - golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d + golang.org/x/mod v0.22.0 + golang.org/x/sync v0.9.0 + golang.org/x/sys v0.27.0 + golang.org/x/telemetry v0.0.0-20241106142447-58a1122356f5 + golang.org/x/text v0.20.0 + golang.org/x/tools v0.21.1-0.20240531212143-b6235391adb3 golang.org/x/vuln v1.0.4 gopkg.in/yaml.v3 v3.0.1 - honnef.co/go/tools v0.4.7 + honnef.co/go/tools v0.5.1 mvdan.cc/gofumpt v0.7.0 mvdan.cc/xurls/v2 v2.5.0 ) require ( - github.com/BurntSushi/toml v1.2.1 // indirect + github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect github.com/google/safehtml v0.1.0 // indirect - golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 // indirect + golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect - ) replace golang.org/x/tools => ../ diff --git a/gopls/go.sum b/gopls/go.sum index 2b92ae83594..3321a78e6f0 100644 --- a/gopls/go.sum +++ b/gopls/go.sum @@ -1,5 +1,5 @@ -github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs= +github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= @@ -16,36 +16,36 @@ github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= -golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338 h1:2O2DON6y3XMJiQRAS1UWU+54aec2uopH3x7MAiqGW6Y= -golang.org/x/exp/typeparams v0.0.0-20221212164502-fae10dda9338/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 h1:1P7xPZEwZMoBoz0Yze5Nx2/4pxj6nw9ZqHWXqP0iRgQ= +golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0= -golang.org/x/telemetry v0.0.0-20240927184629-19675431963b h1:PfPrmVDHfPgLVpiYnf2R1uL8SCXBjkqT51+f/fQHR6Q= -golang.org/x/telemetry v0.0.0-20240927184629-19675431963b/go.mod h1:PsFMgI0jiuY7j+qwXANuh9a/x5kQESTSnRow3gapUyk= +golang.org/x/telemetry v0.0.0-20241106142447-58a1122356f5 h1:TCDqnvbBsFapViksHcHySl/sW4+rTGNIAoJJesHRuMM= +golang.org/x/telemetry v0.0.0-20241106142447-58a1122356f5/go.mod h1:8nZWdGp9pq73ZI//QJyckMQab3yq7hoWi7SI0UIusVI= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= -golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= golang.org/x/vuln v1.0.4 h1:SP0mPeg2PmGCu03V+61EcQiOjmpri2XijexKdzv8Z1I= golang.org/x/vuln v1.0.4/go.mod h1:NbJdUQhX8jY++FtuhrXs2Eyx0yePo9pF7nPlIjo9aaQ= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -53,8 +53,8 @@ gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogR gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.4.7 h1:9MDAWxMoSnB6QoSqiVr7P5mtkT9pOc1kSxchzPCnqJs= -honnef.co/go/tools v0.4.7/go.mod h1:+rnGS1THNh8zMwnd2oVOTL9QF6vmfyG6ZXBULae2uc0= +honnef.co/go/tools v0.5.1 h1:4bH5o3b5ZULQ4UrBmP+63W9r7qIkqJClEA9ko5YKx+I= +honnef.co/go/tools v0.5.1/go.mod h1:e9irvo83WDG9/irijV44wr3tbhcFeRnfpVlRqVwpzMs= mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU= mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo= mvdan.cc/xurls/v2 v2.5.0 h1:lyBNOm8Wo71UknhUs4QTFUNNMyxy2JEIaKKo0RWOh+8= diff --git a/gopls/internal/analysis/fillreturns/fillreturns.go b/gopls/internal/analysis/fillreturns/fillreturns.go index 5e18c1c6642..5ebfc2013bd 100644 --- a/gopls/internal/analysis/fillreturns/fillreturns.go +++ b/gopls/internal/analysis/fillreturns/fillreturns.go @@ -45,7 +45,7 @@ outer: } var file *ast.File for _, f := range pass.Files { - if f.Pos() <= typeErr.Pos && typeErr.Pos <= f.End() { + if f.FileStart <= typeErr.Pos && typeErr.Pos <= f.FileEnd { file = f break } diff --git a/gopls/internal/analysis/infertypeargs/infertypeargs.go b/gopls/internal/analysis/infertypeargs/infertypeargs.go index 9a514ad620c..0ce43e67079 100644 --- a/gopls/internal/analysis/infertypeargs/infertypeargs.go +++ b/gopls/internal/analysis/infertypeargs/infertypeargs.go @@ -13,7 +13,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/typeparams" - "golang.org/x/tools/internal/versions" ) const Doc = `check for unnecessary type arguments in call expressions @@ -91,9 +90,9 @@ func diagnose(fset *token.FileSet, inspect *inspector.Inspector, start, end toke Rparen: call.Rparen, } info := &types.Info{ - Instances: make(map[*ast.Ident]types.Instance), + Instances: make(map[*ast.Ident]types.Instance), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) if err := types.CheckExpr(fset, pkg, call.Pos(), newCall, info); err != nil { // Most likely inference failed. break diff --git a/gopls/internal/analysis/nonewvars/nonewvars.go b/gopls/internal/analysis/nonewvars/nonewvars.go index b9c9b4d6f48..9e5d79df02c 100644 --- a/gopls/internal/analysis/nonewvars/nonewvars.go +++ b/gopls/internal/analysis/nonewvars/nonewvars.go @@ -47,7 +47,7 @@ func run(pass *analysis.Pass) (interface{}, error) { var file *ast.File for _, f := range pass.Files { - if f.Pos() <= assignStmt.Pos() && assignStmt.Pos() < f.End() { + if f.FileStart <= assignStmt.Pos() && assignStmt.Pos() < f.FileEnd { file = f break } diff --git a/gopls/internal/analysis/norangeoverfunc/norangeoverfunc.go b/gopls/internal/analysis/norangeoverfunc/norangeoverfunc.go deleted file mode 100644 index aa58e89d75b..00000000000 --- a/gopls/internal/analysis/norangeoverfunc/norangeoverfunc.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package norangeoverfunc - -// TODO(adonovan): delete this when #67237 and dominikh/go-tools#1494 are fixed. - -import ( - _ "embed" - "fmt" - "go/ast" - "go/types" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -var Analyzer = &analysis.Analyzer{ - Name: "norangeoverfunc", - Doc: `norangeoverfunc fails if a package uses go1.23 range-over-func - -Require it from any analyzer that cannot yet safely process this new feature.`, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: run, -} - -func run(pass *analysis.Pass) (any, error) { - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - filter := []ast.Node{(*ast.RangeStmt)(nil)} - - // TODO(adonovan): opt: short circuit if not using go1.23. - - var found *ast.RangeStmt - inspect.Preorder(filter, func(n ast.Node) { - if found == nil { - stmt := n.(*ast.RangeStmt) - if _, ok := pass.TypesInfo.TypeOf(stmt.X).Underlying().(*types.Signature); ok { - found = stmt - } - } - }) - if found != nil { - return nil, fmt.Errorf("package %q uses go1.23 range-over-func; cannot build SSA or IR (#67237)", - pass.Pkg.Path()) - } - - return nil, nil -} diff --git a/gopls/internal/analysis/noresultvalues/noresultvalues.go b/gopls/internal/analysis/noresultvalues/noresultvalues.go index a5cd424a762..118beb4568b 100644 --- a/gopls/internal/analysis/noresultvalues/noresultvalues.go +++ b/gopls/internal/analysis/noresultvalues/noresultvalues.go @@ -42,7 +42,7 @@ func run(pass *analysis.Pass) (interface{}, error) { var file *ast.File for _, f := range pass.Files { - if f.Pos() <= retStmt.Pos() && retStmt.Pos() < f.End() { + if f.FileStart <= retStmt.Pos() && retStmt.Pos() < f.FileEnd { file = f break } diff --git a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go index 6511477d254..15176cef1c8 100644 --- a/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go +++ b/gopls/internal/analysis/simplifycompositelit/simplifycompositelit.go @@ -38,7 +38,7 @@ func run(pass *analysis.Pass) (interface{}, error) { generated := make(map[*token.File]bool) for _, file := range pass.Files { if ast.IsGenerated(file) { - generated[pass.Fset.File(file.Pos())] = true + generated[pass.Fset.File(file.FileStart)] = true } } diff --git a/gopls/internal/analysis/simplifyrange/simplifyrange.go b/gopls/internal/analysis/simplifyrange/simplifyrange.go index 4071d1b6e8a..6d079059eb1 100644 --- a/gopls/internal/analysis/simplifyrange/simplifyrange.go +++ b/gopls/internal/analysis/simplifyrange/simplifyrange.go @@ -33,7 +33,7 @@ func run(pass *analysis.Pass) (interface{}, error) { generated := make(map[*token.File]bool) for _, file := range pass.Files { if ast.IsGenerated(file) { - generated[pass.Fset.File(file.Pos())] = true + generated[pass.Fset.File(file.FileStart)] = true } } diff --git a/gopls/internal/analysis/simplifyslice/simplifyslice.go b/gopls/internal/analysis/simplifyslice/simplifyslice.go index dc99580b07e..6755187afe5 100644 --- a/gopls/internal/analysis/simplifyslice/simplifyslice.go +++ b/gopls/internal/analysis/simplifyslice/simplifyslice.go @@ -42,7 +42,7 @@ func run(pass *analysis.Pass) (interface{}, error) { generated := make(map[*token.File]bool) for _, file := range pass.Files { if ast.IsGenerated(file) { - generated[pass.Fset.File(file.Pos())] = true + generated[pass.Fset.File(file.FileStart)] = true } } diff --git a/gopls/internal/analysis/undeclaredname/undeclared.go b/gopls/internal/analysis/undeclaredname/undeclared.go index 70b22881700..47027be07e4 100644 --- a/gopls/internal/analysis/undeclaredname/undeclared.go +++ b/gopls/internal/analysis/undeclaredname/undeclared.go @@ -59,7 +59,7 @@ func runForError(pass *analysis.Pass, err types.Error) { // Find file enclosing error. var file *ast.File for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { + if f.FileStart <= err.Pos && err.Pos < f.FileEnd { file = f break } diff --git a/gopls/internal/analysis/unusedvariable/unusedvariable.go b/gopls/internal/analysis/unusedvariable/unusedvariable.go index 8019cfe9eca..5e4dd52be7e 100644 --- a/gopls/internal/analysis/unusedvariable/unusedvariable.go +++ b/gopls/internal/analysis/unusedvariable/unusedvariable.go @@ -60,7 +60,7 @@ func run(pass *analysis.Pass) (interface{}, error) { func runForError(pass *analysis.Pass, err types.Error, name string) error { var file *ast.File for _, f := range pass.Files { - if f.Pos() <= err.Pos && err.Pos < f.End() { + if f.FileStart <= err.Pos && err.Pos < f.FileEnd { file = f break } diff --git a/gopls/internal/bloom/filter.go b/gopls/internal/bloom/filter.go new file mode 100644 index 00000000000..a8e2f1b8c6c --- /dev/null +++ b/gopls/internal/bloom/filter.go @@ -0,0 +1,105 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bloom + +import ( + "hash/maphash" + "math" +) + +// block is the element type of the filter bitfield. +type block = uint8 + +const blockBits = 8 + +// Filter is a bloom filter for a set of strings. +type Filter struct { + seeds []maphash.Seed + blocks []block +} + +// NewFilter constructs a new Filter with the given elements. +func NewFilter(elems []string) *Filter { + // Tolerate a 5% false positive rate. + nblocks, nseeds := calibrate(0.05, len(elems)) + f := &Filter{ + blocks: make([]block, nblocks), + seeds: make([]maphash.Seed, nseeds), + } + for i := range nseeds { + f.seeds[i] = maphash.MakeSeed() + } + for _, elem := range elems { + for _, seed := range f.seeds { + index, bit := f.locate(seed, elem) + f.blocks[index] |= bit + } + } + return f +} + +// locate returns the block index and bit corresponding to the given hash seed and +// string. +func (f *Filter) locate(seed maphash.Seed, s string) (index int, bit block) { + h := uint(maphash.String(seed, s)) + blk := h / blockBits % uint(len(f.blocks)) + bit = block(1 << (h % blockBits)) + return int(blk), bit +} + +func assert(cond bool, msg string) { + if !cond { + panic(msg) + } +} + +// calibrate approximates the number of blocks and seeds to use for a bloom +// filter with desired false positive rate fpRate, given n elements. +func calibrate(fpRate float64, n int) (blocks, seeds int) { + // We following the terms of https://en.wikipedia.org/wiki/Bloom_filter: + // - k is the number of hash functions, + // - m is the size of the bit field; + // - n is the number of set bits. + + assert(0 < fpRate && fpRate < 1, "invalid false positive rate") + assert(n >= 0, "invalid set size") + + if n == 0 { + // degenerate case; use the simplest filter + return 1, 1 + } + + // Calibrate the number of blocks based on the optimal number of bits per + // element. In this case we round up, as more bits leads to fewer false + // positives. + logFpRate := math.Log(fpRate) // reused for k below + m := -(float64(n) * logFpRate) / (math.Ln2 * math.Ln2) + blocks = int(m) / blockBits + if float64(blocks*blockBits) < m { + blocks += 1 + } + + // Estimate the number of hash functions (=seeds). This is imprecise, not + // least since the formula in the article above assumes that the number of + // bits per element is not rounded. + // + // Here we round to the nearest integer (not unconditionally round up), since + // more hash functions do not always lead to better results. + k := -logFpRate / math.Ln2 + seeds = max(int(math.Round(k)), 1) + + return blocks, seeds +} + +// MayContain reports whether the filter may contain s. +func (f *Filter) MayContain(s string) bool { + for _, seed := range f.seeds { + index, bit := f.locate(seed, s) + if f.blocks[index]&bit == 0 { + return false + } + } + return true +} diff --git a/gopls/internal/bloom/filter_test.go b/gopls/internal/bloom/filter_test.go new file mode 100644 index 00000000000..6415eea15bb --- /dev/null +++ b/gopls/internal/bloom/filter_test.go @@ -0,0 +1,93 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bloom + +import ( + "math" + "math/rand/v2" + "testing" +) + +func TestFilter(t *testing.T) { + elems := []string{ + "a", "apple", "b", "banana", "an arbitrarily long string", "", "世界", + } + + // First, sanity check that the filter contains all the given elements. + f := NewFilter(elems) + for _, elem := range elems { + if got := f.MayContain(elem); !got { + t.Errorf("MayContain(%q) = %t, want true", elem, got) + } + } + + // Measure the false positives rate. + // + // Of course, we can't assert on the results, since they are probabilistic, + // but this can be useful for interactive use. + + fpRate := falsePositiveRate(len(f.blocks), len(f.seeds), len(elems)) + t.Logf("%d blocks, %d seeds, %.2g%% expected false positives", len(f.blocks), len(f.seeds), 100*fpRate) + + // In practice, all positives below will be false, but be precise anyway. + truePositive := make(map[string]bool) + for _, e := range elems { + truePositive[e] = true + } + + // Generate a large number of random strings to measure the false positive + // rate. + g := newStringGenerator() + const samples = 1000 + falsePositives := 0 + for range samples { + s := g.next() + got := f.MayContain(s) + if false { + t.Logf("MayContain(%q) = %t", s, got) + } + if got && !truePositive[s] { + falsePositives++ + } + } + t.Logf("false positives: %.1f%% (%d/%d)", 100*float64(falsePositives)/float64(samples), falsePositives, samples) +} + +// falsePositiveRate estimates the expected false positive rate for a filter +// with the given number of blocks, seeds, and elements. +func falsePositiveRate(block, seeds, elems int) float64 { + k, m, n := float64(seeds), float64(block*blockBits), float64(elems) + return math.Pow(1-math.Exp(-k*n/m), k) +} + +type stringGenerator struct { + r *rand.Rand +} + +func newStringGenerator() *stringGenerator { + return &stringGenerator{rand.New(rand.NewPCG(1, 2))} +} + +func (g *stringGenerator) next() string { + l := g.r.IntN(50) // length + var runes []rune + for range l { + runes = append(runes, rune(' '+rand.IntN('~'-' '))) + } + return string(runes) +} + +// TestDegenerateFilter checks that the degenerate filter with no elements +// results in no false positives. +func TestDegenerateFilter(t *testing.T) { + f := NewFilter(nil) + g := newStringGenerator() + for range 100 { + s := g.next() + if f.MayContain(s) { + t.Errorf("MayContain(%q) = true, want false", s) + } + } +} diff --git a/gopls/internal/cache/analysis.go b/gopls/internal/cache/analysis.go index 9debc609048..0964078f0e5 100644 --- a/gopls/internal/cache/analysis.go +++ b/gopls/internal/cache/analysis.go @@ -15,10 +15,10 @@ import ( "errors" "fmt" "go/ast" - "go/parser" "go/token" "go/types" "log" + "maps" urlpkg "net/url" "path/filepath" "reflect" @@ -34,7 +34,6 @@ import ( "golang.org/x/sync/errgroup" "golang.org/x/tools/go/analysis" "golang.org/x/tools/gopls/internal/cache/metadata" - "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/filecache" "golang.org/x/tools/gopls/internal/label" @@ -45,138 +44,73 @@ import ( "golang.org/x/tools/gopls/internal/util/bug" "golang.org/x/tools/gopls/internal/util/frob" "golang.org/x/tools/gopls/internal/util/moremaps" + "golang.org/x/tools/gopls/internal/util/persistent" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/facts" - "golang.org/x/tools/internal/gcimporter" - "golang.org/x/tools/internal/typesinternal" - "golang.org/x/tools/internal/versions" ) /* DESIGN - An analysis request (Snapshot.Analyze) is for a set of Analyzers and - PackageIDs. The result is the set of diagnostics for those - packages. Each request constructs a transitively closed DAG of - nodes, each representing a package, then works bottom up in - parallel postorder calling runCached to ensure that each node's - analysis summary is up to date. The summary contains the analysis - diagnostics as well as the intermediate results required by the - recursion, such as serialized types and facts. - - The entire DAG is ephemeral. Each node in the DAG records the set - of analyzers to run: the complete set for the root packages, and - the "facty" subset for dependencies. Each package is thus analyzed - at most once. The entire DAG shares a single FileSet for parsing - and importing. - - Each node is processed by runCached. It gets the source file - content hashes for package p, and the summaries of its "vertical" - dependencies (direct imports), and from them it computes a key - representing the unit of work (parsing, type-checking, and - analysis) that it has to do. The key is a cryptographic hash of the - "recipe" for this step, including the Metadata, the file contents, - the set of analyzers, and the type and fact information from the - vertical dependencies. - - The key is sought in a machine-global persistent file-system based - cache. If this gopls process, or another gopls process on the same - machine, has already performed this analysis step, runCached will - make a cache hit and load the serialized summary of the results. If - not, it will have to proceed to run() to parse and type-check the - package and then apply a set of analyzers to it. (The set of - analyzers applied to a single package itself forms a graph of - "actions", and it too is evaluated in parallel postorder; these - dependency edges within the same package are called "horizontal".) - Finally it writes a new cache entry. The entry contains serialized - types (export data) and analysis facts. - - Each node in the DAG acts like a go/types importer mapping, - providing a consistent view of packages and their objects: the - mapping for a node is a superset of its dependencies' mappings. - Every node has an associated *types.Package, initially nil. A - package is populated during run (cache miss) by type-checking its - syntax; but for a cache hit, the package is populated lazily, i.e. - not until it later becomes necessary because it is imported - directly or referenced by export data higher up in the DAG. - - For types, we use "shallow" export data. Historically, the Go - compiler always produced a summary of the types for a given package - that included types from other packages that it indirectly - referenced: "deep" export data. This had the advantage that the - compiler (and analogous tools such as gopls) need only load one - file per direct import. However, it meant that the files tended to - get larger based on the level of the package in the import - graph. For example, higher-level packages in the kubernetes module - have over 1MB of "deep" export data, even when they have almost no - content of their own, merely because they mention a major type that - references many others. In pathological cases the export data was - 300x larger than the source for a package due to this quadratic - growth. - - "Shallow" export data means that the serialized types describe only - a single package. If those types mention types from other packages, - the type checker may need to request additional packages beyond - just the direct imports. Type information for the entire transitive - closure of imports is provided (lazily) by the DAG. - - For correct dependency analysis, the digest used as a cache key - must reflect the "deep" export data, so it is derived recursively - from the transitive closure. As an optimization, we needn't include - every package of the transitive closure in the deep hash, only the - packages that were actually requested by the type checker. This - allows changes to a package that have no effect on its export data - to be "pruned". The direct consumer will need to be re-executed, - but if its export data is unchanged as a result, then indirect - consumers may not need to be re-executed. This allows, for example, - one to insert a print statement in a function and not "rebuild" the - whole application (though export data does record line numbers and - offsets of types which may be perturbed by otherwise insignificant - changes.) - - The summary must record whether a package is transitively - error-free (whether it would compile) because many analyzers are - not safe to run on packages with inconsistent types. - - For fact encoding, we use the same fact set as the unitchecker - (vet) to record and serialize analysis facts. The fact - serialization mechanism is analogous to "deep" export data. + An analysis request ([Snapshot.Analyze]) computes diagnostics for the + requested packages using the set of analyzers enabled in this view. Each + request constructs a transitively closed DAG of nodes, each representing a + package, then works bottom up in parallel postorder calling + [analysisNode.runCached] to ensure that each node's analysis summary is up + to date. The summary contains the analysis diagnostics and serialized facts. + + The entire DAG is ephemeral. Each node in the DAG records the set of + analyzers to run: the complete set for the root packages, and the "facty" + subset for dependencies. Each package is thus analyzed at most once. + + Each node has a cryptographic key, which is either memoized in the Snapshot + or computed by [analysisNode.cacheKey]. This key is a hash of the "recipe" + for the analysis step, including the inputs into the type checked package + (and its reachable dependencies), the set of analyzers, and importable + facts. + + The key is sought in a machine-global persistent file-system based cache. If + this gopls process, or another gopls process on the same machine, has + already performed this analysis step, runCached will make a cache hit and + load the serialized summary of the results. If not, it will have to proceed + to run() to parse and type-check the package and then apply a set of + analyzers to it. (The set of analyzers applied to a single package itself + forms a graph of "actions", and it too is evaluated in parallel postorder; + these dependency edges within the same package are called "horizontal".) + Finally it writes a new cache entry containing serialized diagnostics and + analysis facts. + + The summary must record whether a package is transitively error-free + (whether it would compile) because many analyzers are not safe to run on + packages with inconsistent types. + + For fact encoding, we use the same fact set as the unitchecker (vet) to + record and serialize analysis facts. The fact serialization mechanism is + analogous to "deep" export data. */ // TODO(adonovan): // - Add a (white-box) test of pruning when a change doesn't affect export data. // - Optimise pruning based on subset of packages mentioned in exportdata. -// - Better logging so that it is possible to deduce why an analyzer -// is not being run--often due to very indirect failures. -// Even if the ultimate consumer decides to ignore errors, -// tests and other situations want to be assured of freedom from -// errors, not just missing results. This should be recorded. -// - Split this into a subpackage, gopls/internal/cache/driver, -// consisting of this file and three helpers from errors.go. -// The (*snapshot).Analyze method would stay behind and make calls -// to the driver package. -// Steps: -// - define a narrow driver.Snapshot interface with only these methods: -// Metadata(PackageID) Metadata -// ReadFile(Context, URI) (file.Handle, error) -// View() *View // for Options -// - share cache.{goVersionRx,parseGoImpl} +// - Better logging so that it is possible to deduce why an analyzer is not +// being run--often due to very indirect failures. Even if the ultimate +// consumer decides to ignore errors, tests and other situations want to be +// assured of freedom from errors, not just missing results. This should be +// recorded. // AnalysisProgressTitle is the title of the progress report for ongoing // analysis. It is sought by regression tests for the progress reporting // feature. const AnalysisProgressTitle = "Analyzing Dependencies" -// Analyze applies a set of analyzers to the package denoted by id, -// and returns their diagnostics for that package. -// -// The analyzers list must be duplicate free; order does not matter. +// Analyze applies the set of enabled analyzers to the packages in the pkgs +// map, and returns their diagnostics. // // Notifications of progress may be sent to the optional reporter. -func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Package, analyzers []*settings.Analyzer, reporter *progress.Tracker) ([]*Diagnostic, error) { +func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Package, reporter *progress.Tracker) ([]*Diagnostic, error) { start := time.Now() // for progress reporting var tagStr string // sorted comma-separated list of PackageIDs @@ -193,6 +127,7 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac // Filter and sort enabled root analyzers. // A disabled analyzer may still be run if required by another. + analyzers := analyzers(s.Options().Staticcheck) toSrc := make(map[*analysis.Analyzer]*settings.Analyzer) var enabledAnalyzers []*analysis.Analyzer // enabled subset + transitive requirements for _, a := range analyzers { @@ -231,8 +166,15 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac } facty = requiredAnalyzers(facty) - // File set for this batch (entire graph) of analysis. - fset := token.NewFileSet() + batch, release := s.acquireTypeChecking() + defer release() + + ids := moremaps.KeySlice(pkgs) + handles, err := s.getPackageHandles(ctx, ids) + if err != nil { + return nil, err + } + batch.addHandles(handles) // Starting from the root packages and following DepsByPkgPath, // build the DAG of packages we're going to analyze. @@ -247,21 +189,19 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac makeNode = func(from *analysisNode, id PackageID) (*analysisNode, error) { an, ok := nodes[id] if !ok { - mp := s.Metadata(id) - if mp == nil { + ph := handles[id] + if ph == nil { return nil, bug.Errorf("no metadata for %s", id) } // -- preorder -- an = &analysisNode{ - fset: fset, - fsource: struct{ file.Source }{s}, // expose only ReadFile - viewType: s.View().Type(), - mp: mp, + parseCache: s.view.parseCache, + fsource: s, // expose only ReadFile + batch: batch, + ph: ph, analyzers: facty, // all nodes run at least the facty analyzers - allDeps: make(map[PackagePath]*analysisNode), - exportDeps: make(map[PackagePath]*analysisNode), stableNames: stableNames, } nodes[id] = an @@ -269,47 +209,29 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac // -- recursion -- // Build subgraphs for dependencies. - an.succs = make(map[PackageID]*analysisNode, len(mp.DepsByPkgPath)) - for _, depID := range mp.DepsByPkgPath { + an.succs = make(map[PackageID]*analysisNode, len(ph.mp.DepsByPkgPath)) + for _, depID := range ph.mp.DepsByPkgPath { dep, err := makeNode(an, depID) if err != nil { return nil, err } an.succs[depID] = dep - - // Compute the union of all dependencies. - // (This step has quadratic complexity.) - for pkgPath, node := range dep.allDeps { - an.allDeps[pkgPath] = node - } } // -- postorder -- - an.allDeps[mp.PkgPath] = an // add self entry (reflexive transitive closure) - // Add leaf nodes (no successors) directly to queue. if len(an.succs) == 0 { leaves = append(leaves, an) } - - // Load the contents of each compiled Go file through - // the snapshot's cache. (These are all cache hits as - // files are pre-loaded following packages.Load) - an.files = make([]file.Handle, len(mp.CompiledGoFiles)) - for i, uri := range mp.CompiledGoFiles { - fh, err := s.ReadFile(ctx, uri) - if err != nil { - return nil, err - } - an.files[i] = fh - } } // Add edge from predecessor. if from != nil { from.unfinishedSuccs.Add(+1) // incref an.preds = append(an.preds, from) } + // Increment unfinishedPreds even for root nodes (from==nil), so that their + // Action summaries are never cleared. an.unfinishedPreds.Add(+1) return an, nil } @@ -325,20 +247,14 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac roots = append(roots, root) } - // Now that we have read all files, - // we no longer need the snapshot. - // (but options are needed for progress reporting) - options := s.Options() - s = nil - // Progress reporting. If supported, gopls reports progress on analysis // passes that are taking a long time. maybeReport := func(completed int64) {} // Enable progress reporting if enabled by the user // and we have a capable reporter. - if reporter != nil && reporter.SupportsWorkDoneProgress() && options.AnalysisProgressReporting { - var reportAfter = options.ReportAnalysisProgressAfter // tests may set this to 0 + if reporter != nil && reporter.SupportsWorkDoneProgress() && s.Options().AnalysisProgressReporting { + var reportAfter = s.Options().ReportAnalysisProgressAfter // tests may set this to 0 const reportEvery = 1 * time.Second ctx, cancel := context.WithCancel(ctx) @@ -399,10 +315,34 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac limiter <- unit{} defer func() { <-limiter }() - summary, err := an.runCached(ctx) + // Check to see if we already have a valid cache key. If not, compute it. + // + // The snapshot field that memoizes keys depends on whether this key is + // for the analysis result including all enabled analyzer, or just facty analyzers. + var keys *persistent.Map[PackageID, file.Hash] + if _, root := pkgs[an.ph.mp.ID]; root { + keys = s.fullAnalysisKeys + } else { + keys = s.factyAnalysisKeys + } + + // As keys is referenced by a snapshot field, it's guarded by s.mu. + s.mu.Lock() + key, keyFound := keys.Get(an.ph.mp.ID) + s.mu.Unlock() + + if !keyFound { + key = an.cacheKey() + s.mu.Lock() + keys.Set(an.ph.mp.ID, key, nil) + s.mu.Unlock() + } + + summary, err := an.runCached(ctx, key) if err != nil { return err // cancelled, or failed to produce a package } + maybeReport(completed.Add(1)) an.summary = summary @@ -491,6 +431,14 @@ func (s *Snapshot) Analyze(ctx context.Context, pkgs map[PackageID]*metadata.Pac return results, nil } +func analyzers(staticcheck bool) []*settings.Analyzer { + analyzers := slices.Collect(maps.Values(settings.DefaultAnalyzers)) + if staticcheck { + analyzers = slices.AppendSeq(analyzers, maps.Values(settings.StaticcheckAnalyzers)) + } + return analyzers +} + func (an *analysisNode) decrefPreds() { if an.unfinishedPreds.Add(-1) == 0 { an.summary.Actions = nil @@ -503,113 +451,43 @@ func (an *analysisNode) decrefPreds() { // realm of token.Pos or types.Object values. // // A complete DAG is created anew for each batch of analysis; -// subgraphs are not reused over time. Each node's *types.Package -// field is initially nil and is populated on demand, either from -// type-checking syntax trees (typeCheck) or from importing export -// data (_import). When this occurs, the typesOnce event becomes -// "done". -// -// Each node's allDeps map is a "view" of all its dependencies keyed by -// package path, which defines the types.Importer mapping used when -// populating the node's types.Package. Different nodes have different -// views (e.g. due to variants), but two nodes that are related by -// graph ordering have views that are consistent in their overlap. -// exportDeps is the subset actually referenced by export data; -// this is the set for which we attempt to decode facts. +// subgraphs are not reused over time. +// TODO(rfindley): with cached keys we can typically avoid building the full +// DAG, so as an optimization we should rewrite this using a top-down +// traversal, rather than bottom-up. // // Each node's run method is called in parallel postorder. On success, // its summary field is populated, either from the cache (hit), or by // type-checking and analyzing syntax (miss). type analysisNode struct { - fset *token.FileSet // file set shared by entire batch (DAG) + parseCache *parseCache // shared parse cache fsource file.Source // Snapshot.ReadFile, for use by Pass.ReadFile - viewType ViewType // type of view - mp *metadata.Package // metadata for this package - files []file.Handle // contents of CompiledGoFiles + batch *typeCheckBatch // type checking batch, for shared type checking + ph *packageHandle // package handle, for key and reachability analysis analyzers []*analysis.Analyzer // set of analyzers to run preds []*analysisNode // graph edges: succs map[PackageID]*analysisNode // (preds -> self -> succs) unfinishedSuccs atomic.Int32 unfinishedPreds atomic.Int32 // effectively a summary.Actions refcount - allDeps map[PackagePath]*analysisNode // all dependencies including self - exportDeps map[PackagePath]*analysisNode // subset of allDeps ref'd by export data (+self) summary *analyzeSummary // serializable result of analyzing this package stableNames map[*analysis.Analyzer]string // cross-process stable names for Analyzers - typesOnce sync.Once // guards lazy population of types and typesErr fields - types *types.Package // type information lazily imported from summary - typesErr error // an error producing type information - - depHashOnce sync.Once - _depHash file.Hash // memoized hash of data affecting dependents + summaryHashOnce sync.Once + _summaryHash file.Hash // memoized hash of data affecting dependents } -func (an *analysisNode) String() string { return string(an.mp.ID) } - -// _import imports this node's types.Package from export data, if not already done. -// Precondition: analysis was a success. -// Postcondition: an.types and an.exportDeps are populated. -func (an *analysisNode) _import() (*types.Package, error) { - an.typesOnce.Do(func() { - if an.mp.PkgPath == "unsafe" { - an.types = types.Unsafe - return - } - - an.types = types.NewPackage(string(an.mp.PkgPath), string(an.mp.Name)) - - // getPackages recursively imports each dependency - // referenced by the export data, in parallel. - getPackages := func(items []gcimporter.GetPackagesItem) error { - var g errgroup.Group - for i, item := range items { - path := PackagePath(item.Path) - dep, ok := an.allDeps[path] - if !ok { - // This early return bypasses Wait; that's ok. - return fmt.Errorf("%s: unknown dependency %q", an.mp, path) - } - an.exportDeps[path] = dep // record, for later fact decoding - if dep == an { - if an.typesErr != nil { - return an.typesErr - } else { - items[i].Pkg = an.types - } - } else { - i := i - g.Go(func() error { - depPkg, err := dep._import() - if err == nil { - items[i].Pkg = depPkg - } - return err - }) - } - } - return g.Wait() - } - pkg, err := gcimporter.IImportShallow(an.fset, getPackages, an.summary.Export, string(an.mp.PkgPath), bug.Reportf) - if err != nil { - an.typesErr = bug.Errorf("%s: invalid export data: %v", an.mp, err) - an.types = nil - } else if pkg != an.types { - log.Fatalf("%s: inconsistent packages", an.mp) - } - }) - return an.types, an.typesErr -} +func (an *analysisNode) String() string { return string(an.ph.mp.ID) } -// depHash computes the hash of node information that may affect other nodes -// depending on this node: the package path, export hash, and action results. +// summaryHash computes the hash of the node summary, which may affect other +// nodes depending on this node. // -// The result is memoized to avoid redundant work when analysing multiple +// The result is memoized to avoid redundant work when analyzing multiple // dependents. -func (an *analysisNode) depHash() file.Hash { - an.depHashOnce.Do(func() { +func (an *analysisNode) summaryHash() file.Hash { + an.summaryHashOnce.Do(func() { hasher := sha256.New() - fmt.Fprintf(hasher, "dep: %s\n", an.mp.PkgPath) - fmt.Fprintf(hasher, "export: %s\n", an.summary.DeepExportHash) + fmt.Fprintf(hasher, "dep: %s\n", an.ph.mp.PkgPath) + fmt.Fprintf(hasher, "compiles: %t\n", an.summary.Compiles) // action results: errors and facts actions := an.summary.Actions @@ -629,18 +507,16 @@ func (an *analysisNode) depHash() file.Hash { // from the key since they have no downstream effect. } } - hasher.Sum(an._depHash[:0]) + hasher.Sum(an._summaryHash[:0]) }) - return an._depHash + return an._summaryHash } // analyzeSummary is a gob-serializable summary of successfully // applying a list of analyzers to a package. type analyzeSummary struct { - Export []byte // encoded types of package - DeepExportHash file.Hash // hash of reflexive transitive closure of export data - Compiles bool // transitively free of list/parse/type errors - Actions actionMap // maps analyzer stablename to analysis results (*actionSummary) + Compiles bool // transitively free of list/parse/type errors + Actions actionMap // maps analyzer stablename to analysis results (*actionSummary) } // actionMap defines a stable Gob encoding for a map. @@ -691,6 +567,19 @@ type actionSummary struct { Err string // "" => success } +var ( + // inFlightAnalyses records active analysis operations so that later requests + // can be satisfied by joining onto earlier requests that are still active. + // + // Note that persistent=false, so results are cleared once they are delivered + // to awaiting goroutines. + inFlightAnalyses = newFutureCache[file.Hash, *analyzeSummary](false) + + // cacheLimit reduces parallelism of filecache updates. + // We allow more than typical GOMAXPROCS as it's a mix of CPU and I/O. + cacheLimit = make(chan unit, 32) +) + // runCached applies a list of analyzers (plus any others // transitively required by them) to a package. It succeeds as long // as it could produce a types.Package, even if there were direct or @@ -698,21 +587,14 @@ type actionSummary struct { // actions failed. It usually fails only if the package was unknown, // a file was missing, or the operation was cancelled. // -// Postcondition: runCached must not continue to use the snapshot -// (in background goroutines) after it has returned; see memoize.RefCounted. -func (an *analysisNode) runCached(ctx context.Context) (*analyzeSummary, error) { - // At this point we have the action results (serialized - // packages and facts) of our immediate dependencies, - // and the metadata and content of this package. - // - // We now compute a hash for all our inputs, and consult a - // global cache of promised results. If nothing material - // has changed, we'll make a hit in the shared cache. +// The provided key is the cache key for this package. +func (an *analysisNode) runCached(ctx context.Context, key file.Hash) (*analyzeSummary, error) { + // At this point we have the action results (serialized packages and facts) + // of our immediate dependencies, and the metadata and content of this + // package. // - // The hash of our inputs is based on the serialized export - // data and facts so that immaterial changes can be pruned - // without decoding. - key := an.cacheKey() + // We now consult a global cache of promised results. If nothing material has + // changed, we'll make a hit in the shared cache. // Access the cache. var summary *analyzeSummary @@ -727,45 +609,47 @@ func (an *analysisNode) runCached(ctx context.Context) (*analyzeSummary, error) return nil, bug.Errorf("internal error reading shared cache: %v", err) } else { // Cache miss: do the work. - var err error - summary, err = an.run(ctx) + cachedSummary, err := inFlightAnalyses.get(ctx, key, func(ctx context.Context) (*analyzeSummary, error) { + summary, err := an.run(ctx) + if err != nil { + return nil, err + } + if summary == nil { // debugging #66732 (can't happen) + bug.Reportf("analyzeNode.run returned nil *analyzeSummary") + } + go func() { + cacheLimit <- unit{} // acquire token + defer func() { <-cacheLimit }() // release token + + data := analyzeSummaryCodec.Encode(summary) + if false { + log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), an.ph.mp.ID) + } + if err := filecache.Set(cacheKind, key, data); err != nil { + event.Error(ctx, "internal error updating analysis shared cache", err) + } + }() + return summary, nil + }) if err != nil { return nil, err } - if summary == nil { // debugging #66732 (can't happen) - bug.Reportf("analyzeNode.run returned nil *analyzeSummary") - } - an.unfinishedPreds.Add(+1) // incref - go func() { - defer an.decrefPreds() //decref - - cacheLimit <- unit{} // acquire token - defer func() { <-cacheLimit }() // release token - - data := analyzeSummaryCodec.Encode(summary) - if false { - log.Printf("Set key=%d value=%d id=%s\n", len(key), len(data), an.mp.ID) - } - if err := filecache.Set(cacheKind, key, data); err != nil { - event.Error(ctx, "internal error updating analysis shared cache", err) - } - }() + // Copy the computed summary. In decrefPreds, we may zero out + // summary.actions, but can't mutate a shared result. + copy := *cachedSummary + summary = © } return summary, nil } -// cacheLimit reduces parallelism of cache updates. -// We allow more than typical GOMAXPROCS as it's a mix of CPU and I/O. -var cacheLimit = make(chan unit, 32) - // analysisCacheKey returns a cache key that is a cryptographic digest // of the all the values that might affect type checking and analysis: // the analyzer names, package metadata, names and contents of // compiled Go files, and vdeps (successor) information // (export data and facts). -func (an *analysisNode) cacheKey() [sha256.Size]byte { +func (an *analysisNode) cacheKey() file.Hash { hasher := sha256.New() // In principle, a key must be the hash of an @@ -778,40 +662,19 @@ func (an *analysisNode) cacheKey() [sha256.Size]byte { fmt.Fprintln(hasher, a.Name) } - // package metadata - mp := an.mp - fmt.Fprintf(hasher, "package: %s %s %s\n", mp.ID, mp.Name, mp.PkgPath) - fmt.Fprintf(hasher, "viewtype: %s\n", an.viewType) // (affects diagnostics) - - // We can ignore m.DepsBy{Pkg,Import}Path: although the logic - // uses those fields, we account for them by hashing vdeps. - - // type sizes - wordSize := an.mp.TypesSizes.Sizeof(types.Typ[types.Int]) - maxAlign := an.mp.TypesSizes.Alignof(types.NewPointer(types.Typ[types.Int64])) - fmt.Fprintf(hasher, "sizes: %d %d\n", wordSize, maxAlign) + // type checked package + fmt.Fprintf(hasher, "package: %s\n", an.ph.key) // metadata errors: used for 'compiles' field - fmt.Fprintf(hasher, "errors: %d", len(mp.Errors)) - - // module Go version - if mp.Module != nil && mp.Module.GoVersion != "" { - fmt.Fprintf(hasher, "go %s\n", mp.Module.GoVersion) - } - - // file names and contents - fmt.Fprintf(hasher, "files: %d\n", len(an.files)) - for _, fh := range an.files { - fmt.Fprintln(hasher, fh.Identity()) - } + fmt.Fprintf(hasher, "errors: %d", len(an.ph.mp.Errors)) // vdeps, in PackageID order for _, vdep := range moremaps.Sorted(an.succs) { - hash := vdep.depHash() + hash := vdep.summaryHash() hasher.Write(hash[:]) } - var hash [sha256.Size]byte + var hash file.Hash hasher.Sum(hash[:0]) return hash } @@ -822,70 +685,12 @@ func (an *analysisNode) cacheKey() [sha256.Size]byte { // Postcondition: on success, the analyzeSummary.Actions // key set is {a.Name for a in analyzers}. func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) { - // Parse only the "compiled" Go files. - // Do the computation in parallel. - parsed := make([]*parsego.File, len(an.files)) - { - var group errgroup.Group - group.SetLimit(4) // not too much: run itself is already called in parallel - for i, fh := range an.files { - i, fh := i, fh - group.Go(func() error { - // Call parseGoImpl directly, not the caching wrapper, - // as cached ASTs require the global FileSet. - // ast.Object resolution is unfortunately an implied part of the - // go/analysis contract. - pgf, err := parseGoImpl(ctx, an.fset, fh, parsego.Full&^parser.SkipObjectResolution, false) - parsed[i] = pgf - return err - }) - } - if err := group.Wait(); err != nil { - return nil, err // cancelled, or catastrophic error (e.g. missing file) - } - } - // Type-check the package syntax. - pkg := an.typeCheck(parsed) - - // Publish the completed package. - an.typesOnce.Do(func() { an.types = pkg.types }) - if an.types != pkg.types { - log.Fatalf("typesOnce prematurely done") - } - - // Compute the union of exportDeps across our direct imports. - // This is the set that will be needed by the fact decoder. - allExportDeps := make(map[PackagePath]*analysisNode) - for _, succ := range an.succs { - for k, v := range succ.exportDeps { - allExportDeps[k] = v - } + pkg, err := an.typeCheck(ctx) + if err != nil { + return nil, err } - // The fact decoder needs a means to look up a Package by path. - pkg.factsDecoder = facts.NewDecoderFunc(pkg.types, func(path string) *types.Package { - // Note: Decode is called concurrently, and thus so is this function. - - // Does the fact relate to a package referenced by export data? - if dep, ok := allExportDeps[PackagePath(path)]; ok { - dep.typesOnce.Do(func() { log.Fatal("dep.types not populated") }) - if dep.typesErr == nil { - return dep.types - } - return nil - } - - // If the fact relates to a dependency not referenced - // by export data, it is safe to ignore it. - // (In that case dep.types exists but may be unpopulated - // or in the process of being populated from export data.) - if an.allDeps[PackagePath(path)] == nil { - log.Fatalf("fact package %q is not a dependency", path) - } - return nil - }) - // Poll cancellation state. if err := ctx.Err(); err != nil { return nil, err @@ -942,212 +747,136 @@ func (an *analysisNode) run(ctx context.Context) (*analyzeSummary, error) { } return &analyzeSummary{ - Export: pkg.export, - DeepExportHash: pkg.deepExportHash, - Compiles: pkg.compiles, - Actions: summaries, + Compiles: pkg.compiles, + Actions: summaries, }, nil } -// Postcondition: analysisPackage.types and an.exportDeps are populated. -func (an *analysisNode) typeCheck(parsed []*parsego.File) *analysisPackage { - mp := an.mp - - if false { // debugging - log.Println("typeCheck", mp.ID) - } - - pkg := &analysisPackage{ - mp: mp, - fset: an.fset, - parsed: parsed, - files: make([]*ast.File, len(parsed)), - compiles: len(mp.Errors) == 0, // false => list error - types: types.NewPackage(string(mp.PkgPath), string(mp.Name)), - typesInfo: &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Implicits: make(map[ast.Node]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), - Uses: make(map[*ast.Ident]types.Object), - }, - typesSizes: mp.TypesSizes, +func (an *analysisNode) typeCheck(ctx context.Context) (*analysisPackage, error) { + ppkg, err := an.batch.getPackage(ctx, an.ph) + if err != nil { + return nil, err } - versions.InitFileVersions(pkg.typesInfo) - // Unsafe has no syntax. - if mp.PkgPath == "unsafe" { - pkg.types = types.Unsafe - return pkg - } + compiles := len(an.ph.mp.Errors) == 0 && len(ppkg.TypeErrors()) == 0 - for i, p := range parsed { - pkg.files[i] = p.File + // The go/analysis framework implicitly promises to deliver + // trees with legacy ast.Object resolution. Do that now. + files := make([]*ast.File, len(ppkg.CompiledGoFiles())) + for i, p := range ppkg.CompiledGoFiles() { + p.Resolve() + files[i] = p.File if p.ParseErr != nil { - pkg.compiles = false // parse error + compiles = false // parse error } } - for _, vdep := range an.succs { - if !vdep.summary.Compiles { - pkg.compiles = false // transitive error - } - } + // The fact decoder needs a means to look up a Package by path. + pkgLookup := typesLookup(ppkg.Types()) + factsDecoder := facts.NewDecoderFunc(ppkg.Types(), func(path string) *types.Package { + // Note: Decode is called concurrently, and thus so is this function. - cfg := &types.Config{ - Sizes: mp.TypesSizes, - Error: func(e error) { - pkg.compiles = false // type error - - // Suppress type errors in files with parse errors - // as parser recovery can be quite lossy (#59888). - typeError := e.(types.Error) - for _, p := range parsed { - if p.ParseErr != nil && astutil.NodeContains(p.File, typeError.Pos) { - return - } - } - pkg.typeErrors = append(pkg.typeErrors, typeError) - }, - Importer: importerFunc(func(importPath string) (*types.Package, error) { - // Beware that returning an error from this function - // will cause the type checker to synthesize a fake - // package whose Path is importPath, potentially - // losing a vendor/ prefix. If type-checking errors - // are swallowed, these packages may be confusing. - - // Map ImportPath to ID. - id, ok := mp.DepsByImpPath[ImportPath(importPath)] - if !ok { - // The import syntax is inconsistent with the metadata. - // This could be because the import declaration was - // incomplete and the metadata only includes complete - // imports; or because the metadata ignores import - // edges that would lead to cycles in the graph. - return nil, fmt.Errorf("missing metadata for import of %q", importPath) - } + // Does the fact relate to a package reachable through imports? + if !an.ph.reachable.MayContain(path) { + return nil + } - // Map ID to node. (id may be "") - dep := an.succs[id] - if dep == nil { - // Analogous to (*snapshot).missingPkgError - // in the logic for regular type-checking, - // but without a snapshot we can't provide - // such detail, and anyway most analysis - // failures aren't surfaced in the UI. - return nil, fmt.Errorf("no required module provides analysis package %q (id=%q)", importPath, id) - } + return pkgLookup(path) + }) - // (Duplicates logic from check.go.) - if !metadata.IsValidImport(an.mp.PkgPath, dep.mp.PkgPath, an.viewType != GoPackagesDriverView) { - return nil, fmt.Errorf("invalid use of internal package %s", importPath) + var typeErrors []types.Error +filterErrors: + for _, typeError := range ppkg.TypeErrors() { + // Suppress type errors in files with parse errors + // as parser recovery can be quite lossy (#59888). + for _, p := range ppkg.CompiledGoFiles() { + if p.ParseErr != nil && astutil.NodeContains(p.File, typeError.Pos) { + continue filterErrors } - - return dep._import() - }), + } + typeErrors = append(typeErrors, typeError) } - // Set Go dialect. - if mp.Module != nil && mp.Module.GoVersion != "" { - goVersion := "go" + mp.Module.GoVersion - if validGoVersion(goVersion) { - cfg.GoVersion = goVersion + for _, vdep := range an.succs { + if !vdep.summary.Compiles { + compiles = false // transitive error } } - // We want to type check cgo code if go/types supports it. - // We passed typecheckCgo to go/packages when we Loaded. - // TODO(adonovan): do we actually need this?? - typesinternal.SetUsesCgo(cfg) - - check := types.NewChecker(cfg, pkg.fset, pkg.types, pkg.typesInfo) + return &analysisPackage{ + pkg: ppkg, + files: files, + typeErrors: typeErrors, + compiles: compiles, + factsDecoder: factsDecoder, + }, nil +} - // Type checking errors are handled via the config, so ignore them here. - _ = check.Files(pkg.files) +// typesLookup implements a concurrency safe depth-first traversal searching +// imports of pkg for a given package path. +func typesLookup(pkg *types.Package) func(string) *types.Package { + var ( + mu sync.Mutex // guards impMap and pending - // debugging (type errors are quite normal) - if false { - if pkg.typeErrors != nil { - log.Printf("package %s has type errors: %v", pkg.types.Path(), pkg.typeErrors) + // impMap memoizes the lookup of package paths. + impMap = map[string]*types.Package{ + pkg.Path(): pkg, + } + // pending is a FIFO queue of packages that have yet to have their + // dependencies fully scanned. + // Invariant: all entries in pending are already mapped in impMap. + pending = []*types.Package{pkg} + ) + + // search scans children the next package in pending, looking for pkgPath. + var search func(pkgPath string) (*types.Package, int) + search = func(pkgPath string) (sought *types.Package, numPending int) { + mu.Lock() + defer mu.Unlock() + + if p, ok := impMap[pkgPath]; ok { + return p, len(pending) } - } - // Emit the export data and compute the recursive hash. - export, err := gcimporter.IExportShallow(pkg.fset, pkg.types, bug.Reportf) - if err != nil { - // TODO(adonovan): in light of exporter bugs such as #57729, - // consider using bug.Report here and retrying the IExportShallow - // call here using an empty types.Package. - log.Fatalf("internal error writing shallow export data: %v", err) - } - pkg.export = export - - // Compute a recursive hash to account for the export data of - // this package and each dependency referenced by it. - // Also, populate exportDeps. - hash := sha256.New() - fmt.Fprintf(hash, "%s %d\n", mp.PkgPath, len(export)) - hash.Write(export) - paths, err := readShallowManifest(export) - if err != nil { - log.Fatalf("internal error: bad export data: %v", err) - } - for _, path := range paths { - dep, ok := an.allDeps[path] - if !ok { - log.Fatalf("%s: missing dependency: %q", an, path) + if len(pending) == 0 { + return nil, 0 } - fmt.Fprintf(hash, "%s %s\n", dep.mp.PkgPath, dep.summary.DeepExportHash) - an.exportDeps[path] = dep - } - an.exportDeps[mp.PkgPath] = an // self - hash.Sum(pkg.deepExportHash[:0]) - return pkg -} + pkg := pending[0] + pending = pending[1:] + for _, dep := range pkg.Imports() { + depPath := dep.Path() + if _, ok := impMap[depPath]; ok { + continue + } + impMap[depPath] = dep -// readShallowManifest returns the manifest of packages referenced by -// a shallow export data file for a package (excluding the package itself). -// TODO(adonovan): add a test. -func readShallowManifest(export []byte) ([]PackagePath, error) { - const selfPath = "" // dummy path - var paths []PackagePath - getPackages := func(items []gcimporter.GetPackagesItem) error { - paths = []PackagePath{} // non-nil - for _, item := range items { - if item.Path != selfPath { - paths = append(paths, PackagePath(item.Path)) + pending = append(pending, dep) + if depPath == pkgPath { + // Don't return early; finish processing pkg's deps. + sought = dep } } - return errors.New("stop") // terminate importer + return sought, len(pending) } - _, err := gcimporter.IImportShallow(token.NewFileSet(), getPackages, export, selfPath, bug.Reportf) - if paths == nil { - if err != nil { - return nil, err // failed before getPackages callback + + return func(pkgPath string) *types.Package { + p, np := (*types.Package)(nil), 1 + for p == nil && np > 0 { + p, np = search(pkgPath) } - return nil, bug.Errorf("internal error: IImportShallow did not call getPackages") + return p } - return paths, nil // success } // analysisPackage contains information about a package, including // syntax trees, used transiently during its type-checking and analysis. type analysisPackage struct { - mp *metadata.Package - fset *token.FileSet // local to this package - parsed []*parsego.File - files []*ast.File // same as parsed[i].File - types *types.Package - compiles bool // package is transitively free of list/parse/type errors - factsDecoder *facts.Decoder - export []byte // encoding of types.Package - deepExportHash file.Hash // reflexive transitive hash of export data - typesInfo *types.Info - typeErrors []types.Error - typesSizes types.Sizes + pkg *Package + files []*ast.File // same as parsed[i].File + typeErrors []types.Error // filtered type checker errors + compiles bool // package is transitively free of list/parse/type errors + factsDecoder *facts.Decoder } // An action represents one unit of analysis work: the application of @@ -1170,7 +899,7 @@ type action struct { } func (act *action) String() string { - return fmt.Sprintf("%s@%s", act.a.Name, act.pkg.mp.ID) + return fmt.Sprintf("%s@%s", act.a.Name, act.pkg.pkg.metadata.ID) } // execActions executes a set of action graph nodes in parallel. @@ -1210,7 +939,7 @@ func execActions(ctx context.Context, actions []*action) { // completion and deliver a valid result. func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { analyzer := act.a - pkg := act.pkg + apkg := act.pkg hasFacts := len(analyzer.FactTypes) > 0 @@ -1233,8 +962,8 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { // Inv: all action dependencies succeeded. // Were there list/parse/type errors that might prevent analysis? - if !pkg.compiles && !analyzer.RunDespiteErrors { - return nil, nil, fmt.Errorf("skipping analysis %q because package %q does not compile", analyzer.Name, pkg.mp.ID) + if !apkg.compiles && !analyzer.RunDespiteErrors { + return nil, nil, fmt.Errorf("skipping analysis %q because package %q does not compile", analyzer.Name, apkg.pkg.metadata.ID) } // Inv: package is well-formed enough to proceed with analysis. @@ -1263,7 +992,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { // by "deep" export data. Better still, use a "shallow" approach. // Read and decode analysis facts for each direct import. - factset, err := pkg.factsDecoder.Decode(func(pkgPath string) ([]byte, error) { + factset, err := apkg.factsDecoder.Decode(func(pkgPath string) ([]byte, error) { if !hasFacts { return nil, nil // analyzer doesn't use facts, so no vdeps } @@ -1273,7 +1002,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { return nil, nil } - id, ok := pkg.mp.DepsByPkgPath[PackagePath(pkgPath)] + id, ok := apkg.pkg.metadata.DepsByPkgPath[PackagePath(pkgPath)] if !ok { // This may mean imp was synthesized by the type // checker because it failed to import it for any reason @@ -1291,7 +1020,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { vdep := act.vdeps[id] if vdep == nil { - return nil, bug.Errorf("internal error in %s: missing vdep for id=%s", pkg.types.Path(), id) + return nil, bug.Errorf("internal error in %s: missing vdep for id=%s", apkg.pkg.Types().Path(), id) } return vdep.summary.Actions[act.stableName].Facts, nil @@ -1309,7 +1038,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { // posToLocation converts from token.Pos to protocol form. posToLocation := func(start, end token.Pos) (protocol.Location, error) { - tokFile := pkg.fset.File(start) + tokFile := apkg.pkg.FileSet().File(start) // Find existing mapper by file name. // (Don't require an exact token.File match @@ -1318,7 +1047,7 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { mapper *protocol.Mapper fixed bool ) - for _, p := range pkg.parsed { + for _, p := range apkg.pkg.CompiledGoFiles() { if p.Tok.Name() == tokFile.Name() { mapper = p.Mapper fixed = p.Fixed() // suppress some assertions after parser recovery @@ -1399,14 +1128,14 @@ func (act *action) exec(ctx context.Context) (any, *actionSummary, error) { pass := &analysis.Pass{ Analyzer: analyzer, - Fset: pkg.fset, - Files: pkg.files, + Fset: apkg.pkg.FileSet(), + Files: apkg.files, OtherFiles: nil, // since gopls doesn't handle non-Go (e.g. asm) files IgnoredFiles: nil, // zero-config gopls should analyze these files in another view - Pkg: pkg.types, - TypesInfo: pkg.typesInfo, - TypesSizes: pkg.typesSizes, - TypeErrors: pkg.typeErrors, + Pkg: apkg.pkg.Types(), + TypesInfo: apkg.pkg.TypesInfo(), + TypesSizes: apkg.pkg.TypesSizes(), + TypeErrors: apkg.typeErrors, ResultOf: inputs, Report: func(d analysis.Diagnostic) { diagnostic, err := toGobDiagnostic(posToLocation, analyzer, d) diff --git a/gopls/internal/cache/check.go b/gopls/internal/cache/check.go index 08d57f4e657..0b9d4f8024d 100644 --- a/gopls/internal/cache/check.go +++ b/gopls/internal/cache/check.go @@ -24,6 +24,7 @@ import ( "golang.org/x/mod/module" "golang.org/x/sync/errgroup" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/bloom" "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/cache/typerefs" @@ -58,17 +59,20 @@ type typeCheckBatch struct { // handleMu guards _handles, which must only be accessed via addHandles or // getHandle. // - // TODO(rfindley): refactor such that we can simply prepare the type checking - // pass by ensuring that handles are present on the Snapshot, and access them - // directly, rather than copying maps for each caller. + // An alternative would be to simply verify that package handles are present + // on the Snapshot, and access them directly, rather than copying maps for + // each caller. However, handles are accessed very frequently during type + // checking, and ordinary go maps are measurably faster than the + // persistent.Map used to store handles on the snapshot. handleMu sync.Mutex _handles map[PackageID]*packageHandle - parseCache *parseCache - fset *token.FileSet // describes all parsed or imported files - cpulimit chan unit // concurrency limiter for CPU-bound operations - syntaxPackages *futureCache[PackageID, *Package] // transient cache of in-progress syntax futures - importPackages *futureCache[PackageID, *types.Package] // persistent cache of imports + parseCache *parseCache + fset *token.FileSet // describes all parsed or imported files + cpulimit chan unit // concurrency limiter for CPU-bound operations + syntaxPackages *futureCache[PackageID, *Package] // transient cache of in-progress syntax futures + importPackages *futureCache[PackageID, *types.Package] // persistent cache of imports + gopackagesdriver bool // for bug reporting: were packages loaded with a driver? } // addHandles is called by each goroutine joining the type check batch, to @@ -77,13 +81,9 @@ func (b *typeCheckBatch) addHandles(handles map[PackageID]*packageHandle) { b.handleMu.Lock() defer b.handleMu.Unlock() for id, ph := range handles { - assert(ph.state == validKey, "invalid handle") - if alt, ok := b._handles[id]; ok { - // Once handles have been reevaluated, they should not change. Therefore, - // we should only ever encounter exactly one handle instance for a given - // ID. - assert(alt == ph, "mismatching handle") - } else { + assert(ph.state >= validKey, "invalid handle") + + if alt, ok := b._handles[id]; !ok || alt.state < ph.state { b._handles[id] = ph } } @@ -136,209 +136,6 @@ func (s *Snapshot) TypeCheck(ctx context.Context, ids ...PackageID) ([]*Package, return pkgs, s.forEachPackage(ctx, ids, nil, post) } -// getImportGraph returns a shared import graph use for this snapshot, or nil. -// -// This is purely an optimization: holding on to more imports allows trading -// memory for CPU and latency. Currently, getImportGraph returns an import -// graph containing all packages imported by open packages, since these are -// highly likely to be needed when packages change. -// -// Furthermore, since we memoize active packages, including their imports in -// the shared import graph means we don't run the risk of pinning duplicate -// copies of common imports, if active packages are computed in separate type -// checking batches. -func (s *Snapshot) getImportGraph(ctx context.Context) *importGraph { - if !preserveImportGraph { - return nil - } - s.mu.Lock() - - // Evaluate the shared import graph for the snapshot. There are three major - // codepaths here: - // - // 1. importGraphDone == nil, importGraph == nil: it is this goroutine's - // responsibility to type-check the shared import graph. - // 2. importGraphDone == nil, importGraph != nil: it is this goroutine's - // responsibility to resolve the import graph, which may result in - // type-checking only if the existing importGraph (carried over from the - // preceding snapshot) is invalid. - // 3. importGraphDone != nil: some other goroutine is doing (1) or (2), wait - // for the work to be done. - done := s.importGraphDone - if done == nil { - done = make(chan unit) - s.importGraphDone = done - release := s.Acquire() // must acquire to use the snapshot asynchronously - go func() { - defer release() - importGraph, err := s.resolveImportGraph() // may be nil - if err != nil { - // resolveImportGraph operates on the background context, because it is - // a shared resource across the snapshot. If the snapshot is cancelled, - // don't log an error. - if s.backgroundCtx.Err() == nil { - event.Error(ctx, "computing the shared import graph", err) - } - importGraph = nil - } - s.mu.Lock() - s.importGraph = importGraph - s.mu.Unlock() - close(done) - }() - } - s.mu.Unlock() - - select { - case <-done: - return s.importGraph - case <-ctx.Done(): - return nil - } -} - -// resolveImportGraph evaluates the shared import graph to use for -// type-checking in this snapshot. This may involve re-using the import graph -// of the previous snapshot (stored in s.importGraph), or computing a fresh -// import graph. -// -// resolveImportGraph should only be called from getImportGraph. -// -// TODO(rfindley): resolveImportGraph can be eliminated (greatly simplifying -// things) by instead holding on to imports of open packages after each type -// checking pass. -func (s *Snapshot) resolveImportGraph() (*importGraph, error) { - ctx := s.backgroundCtx - ctx, done := event.Start(event.Detach(ctx), "cache.resolveImportGraph") - defer done() - - s.mu.Lock() - lastImportGraph := s.importGraph - g := s.meta - s.mu.Unlock() - - openPackages := make(map[PackageID]bool) - for _, fh := range s.Overlays() { - // golang/go#66145: don't call MetadataForFile here. This function, which - // builds a shared import graph, is an optimization. We don't want it to - // have the side effect of triggering a load. - // - // In the past, a call to MetadataForFile here caused a bunch of - // unnecessary loads in multi-root workspaces (and as a result, spurious - // diagnostics). - var mps []*metadata.Package - for _, id := range g.IDs[fh.URI()] { - mps = append(mps, g.Packages[id]) - } - metadata.RemoveIntermediateTestVariants(&mps) - for _, mp := range mps { - openPackages[mp.ID] = true - } - } - - var openPackageIDs []PackageID - for id := range openPackages { - openPackageIDs = append(openPackageIDs, id) - } - - // Subtlety: we erase the upward cone of open packages from the shared import - // graph, to increase reusability. - // - // This is easiest to understand via an example: suppose A imports B, and B - // imports C. Now suppose A and B are open. If we preserve the entire set of - // shared deps by open packages, deps will be {B, C}. But this means that any - // change to the open package B will invalidate the shared import graph, - // meaning we will experience no benefit from sharing when B is edited. - // Consider that this will be a common scenario, when A is foo_test and B is - // foo. Better to just preserve the shared import C. - // - // With precise pruning, we may want to truncate this search based on - // reachability. - // - // TODO(rfindley): this logic could use a unit test. - volatile := make(map[PackageID]bool) - var isVolatile func(PackageID) bool - isVolatile = func(id PackageID) (v bool) { - v, ok := volatile[id] - if !ok { - volatile[id] = false // defensive: break cycles - for _, dep := range g.Packages[id].DepsByPkgPath { - if isVolatile(dep) { - v = true - // Keep going, to ensure that we traverse all dependencies. - } - } - if openPackages[id] { - v = true - } - volatile[id] = v - } - return v - } - for _, id := range openPackageIDs { - if ctx.Err() != nil { - return nil, ctx.Err() - } - _ = isVolatile(id) // populate volatile map - } - - var ids []PackageID - for id, v := range volatile { - if !v { - ids = append(ids, id) - } - } - - handles, err := s.getPackageHandles(ctx, ids) - if err != nil { - return nil, err - } - - // We reuse the last import graph if and only if none of the dependencies - // have changed. Doing better would involve analyzing dependencies to find - // subgraphs that are still valid. Not worth it, especially when in the - // common case nothing has changed. - unchanged := lastImportGraph != nil && len(ids) == len(lastImportGraph.depKeys) - depKeys := make(map[PackageID]file.Hash) - for id, ph := range handles { - depKeys[id] = ph.key - if unchanged { - prevKey, ok := lastImportGraph.depKeys[id] - unchanged = ok && prevKey == ph.key - } - } - - if unchanged { - return lastImportGraph, nil - } - - b := newTypeCheckBatch(s.view.parseCache, nil) - if err := b.query(ctx, ids, nil, nil, nil, handles); err != nil { - return nil, err - } - - next := &importGraph{ - fset: b.fset, - depKeys: depKeys, - imports: make(map[PackageID]pkgOrErr), - } - for id, fut := range b.importPackages.cache { - if fut.v == nil && fut.err == nil { - panic(fmt.Sprintf("internal error: import node %s is not evaluated", id)) - } - next.imports[id] = pkgOrErr{fut.v, fut.err} - } - return next, nil -} - -// An importGraph holds selected results of a type-checking pass, to be re-used -// by subsequent snapshots. -type importGraph struct { - fset *token.FileSet // fileset used for type checking imports - depKeys map[PackageID]file.Hash // hash of direct dependencies for this graph - imports map[PackageID]pkgOrErr // results of type checking -} - // Package visiting functions used by forEachPackage; see the documentation of // forEachPackage for details. type ( @@ -376,8 +173,11 @@ func (s *Snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preT // requests for package information for the modified package (semantic // tokens, code lens, inlay hints, etc.) for i, id := range ids { - if pkg := s.getActivePackage(id); pkg != nil { - post(i, pkg) + s.mu.Lock() + ph, ok := s.packages.Get(id) + s.mu.Unlock() + if ok && ph.state >= validPackage { + post(i, ph.pkgData.pkg) } else { needIDs = append(needIDs, id) indexes = append(indexes, i) @@ -388,6 +188,14 @@ func (s *Snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preT return nil // short cut: many call sites do not handle empty ids } + b, release := s.acquireTypeChecking() + defer release() + + handles, err := s.getPackageHandles(ctx, needIDs) + if err != nil { + return err + } + // Wrap the pre- and post- funcs to translate indices. var pre2 preTypeCheck if pre != nil { @@ -396,18 +204,28 @@ func (s *Snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preT } } post2 := func(i int, pkg *Package) { - s.setActivePackage(pkg.metadata.ID, pkg) - post(indexes[i], pkg) - } + id := pkg.metadata.ID + if ph := handles[id]; ph.isOpen && ph.state < validPackage { + // Cache open type checked packages. + ph = ph.clone() + ph.pkgData = &packageData{ + fset: pkg.FileSet(), + imports: pkg.Types().Imports(), + pkg: pkg, + } + ph.state = validPackage - b, release := s.acquireTypeChecking(ctx) - defer release() + s.mu.Lock() + if alt, ok := s.packages.Get(id); !ok || alt.state < ph.state { + s.packages.Set(id, ph, nil) + } + s.mu.Unlock() + } - handles, err := s.getPackageHandles(ctx, needIDs) - if err != nil { - return err + post(indexes[i], pkg) } - return b.query(ctx, nil, needIDs, pre2, post2, handles) + + return b.query(ctx, needIDs, pre2, post2, handles) } // acquireTypeChecking joins or starts a concurrent type checking batch. @@ -415,14 +233,13 @@ func (s *Snapshot) forEachPackage(ctx context.Context, ids []PackageID, pre preT // The batch may be queried for package information using [typeCheckBatch.query]. // The second result must be called when the batch is no longer needed, to // release the resource. -func (s *Snapshot) acquireTypeChecking(ctx context.Context) (*typeCheckBatch, func()) { +func (s *Snapshot) acquireTypeChecking() (*typeCheckBatch, func()) { s.typeCheckMu.Lock() defer s.typeCheckMu.Unlock() if s.batch == nil { assert(s.batchRef == 0, "miscounted type checking") - impGraph := s.getImportGraph(ctx) - s.batch = newTypeCheckBatch(s.view.parseCache, impGraph) + s.batch = newTypeCheckBatch(s.view.parseCache, s.view.typ == GoPackagesDriverView) } s.batchRef++ @@ -441,29 +258,16 @@ func (s *Snapshot) acquireTypeChecking(ctx context.Context) (*typeCheckBatch, fu // shared parseCache. // // If a non-nil importGraph is provided, imports in this graph will be reused. -func newTypeCheckBatch(parseCache *parseCache, importGraph *importGraph) *typeCheckBatch { - b := &typeCheckBatch{ - _handles: make(map[PackageID]*packageHandle), - parseCache: parseCache, - fset: fileSetWithBase(reservedForParsing), - cpulimit: make(chan unit, runtime.GOMAXPROCS(0)), - syntaxPackages: newFutureCache[PackageID, *Package](false), // don't persist syntax packages - importPackages: newFutureCache[PackageID, *types.Package](true), // ...but DO persist imports - } - - if importGraph != nil { - // Clone the file set every time, to ensure we do not leak files. - b.fset = tokeninternal.CloneFileSet(importGraph.fset) - // Pre-populate future cache with 'done' futures. - done := make(chan unit) - close(done) - for id, res := range importGraph.imports { - b.importPackages.cache[id] = &future[*types.Package]{done: done, v: res.pkg, err: res.err} - } - } else { - b.fset = fileSetWithBase(reservedForParsing) +func newTypeCheckBatch(parseCache *parseCache, gopackagesdriver bool) *typeCheckBatch { + return &typeCheckBatch{ + _handles: make(map[PackageID]*packageHandle), + parseCache: parseCache, + fset: fileSetWithBase(reservedForParsing), + cpulimit: make(chan unit, runtime.GOMAXPROCS(0)), + syntaxPackages: newFutureCache[PackageID, *Package](false), // don't persist syntax packages + importPackages: newFutureCache[PackageID, *types.Package](true), // ...but DO persist imports + gopackagesdriver: gopackagesdriver, } - return b } // query executes a traversal of package information in the given typeCheckBatch. @@ -478,7 +282,7 @@ func newTypeCheckBatch(parseCache *parseCache, importGraph *importGraph) *typeCh // // TODO(rfindley): simplify this API by clarifying shared import graph and // package handle logic. -func (b *typeCheckBatch) query(ctx context.Context, importIDs, syntaxIDs []PackageID, pre preTypeCheck, post postTypeCheck, handles map[PackageID]*packageHandle) error { +func (b *typeCheckBatch) query(ctx context.Context, syntaxIDs []PackageID, pre preTypeCheck, post postTypeCheck, handles map[PackageID]*packageHandle) error { b.addHandles(handles) // Start a single goroutine for each requested package. @@ -486,15 +290,6 @@ func (b *typeCheckBatch) query(ctx context.Context, importIDs, syntaxIDs []Packa // Other packages are reached recursively, and will not be evaluated if they // are not needed. var g errgroup.Group - for _, id := range importIDs { - g.Go(func() error { - if ctx.Err() != nil { - return ctx.Err() - } - _, err := b.getImportPackage(ctx, id) - return err - }) - } for i, id := range syntaxIDs { g.Go(func() error { if ctx.Err() != nil { @@ -554,13 +349,47 @@ func (b *typeCheckBatch) handleSyntaxPackage(ctx context.Context, i int, id Pack return nil // skip: not needed } - pkg, err := b.syntaxPackages.get(ctx, id, func(ctx context.Context) (*Package, error) { + // Check if we have a syntax package stored on ph. + // + // This was checked in [Snapshot.forEachPackage], but may have since changed. + if ph.state >= validPackage { + post(i, ph.pkgData.pkg) + return nil + } + + pkg, err := b.getPackage(ctx, ph) + if err != nil { + return err + } + + post(i, pkg) + return nil +} + +// getPackage type checks one [Package] in the batch. +func (b *typeCheckBatch) getPackage(ctx context.Context, ph *packageHandle) (*Package, error) { + return b.syntaxPackages.get(ctx, ph.mp.ID, func(ctx context.Context) (*Package, error) { // Wait for predecessors. - { + // Record imports of this package to avoid redundant work in typesConfig. + imports := make(map[PackagePath]*types.Package) + fset := b.fset + if ph.state >= validImports { + for _, imp := range ph.pkgData.imports { + imports[PackagePath(imp.Path())] = imp + } + // Reusing imports requires that their positions are mapped by the FileSet. + fset = tokeninternal.CloneFileSet(ph.pkgData.fset) + } else { + var impMu sync.Mutex var g errgroup.Group - for _, depID := range ph.mp.DepsByPkgPath { + for depPath, depID := range ph.mp.DepsByPkgPath { g.Go(func() error { - _, err := b.getImportPackage(ctx, depID) + imp, err := b.getImportPackage(ctx, depID) + if err == nil { + impMu.Lock() + imports[depPath] = imp + impMu.Unlock() + } return err }) } @@ -588,7 +417,7 @@ func (b *typeCheckBatch) handleSyntaxPackage(ctx context.Context, i int, id Pack } // Compute the syntax package. - p, err := b.checkPackage(ctx, ph) + p, err := b.checkPackage(ctx, fset, ph, imports) if err != nil { return nil, err // e.g. I/O error, cancelled } @@ -597,12 +426,6 @@ func (b *typeCheckBatch) handleSyntaxPackage(ctx context.Context, i int, id Pack go storePackageResults(ctx, ph, p) // ...and write all packages to disk return p, nil }) - if err != nil { - return err - } - - post(i, pkg) - return nil } // storePackageResults serializes and writes information derived from p to the @@ -632,13 +455,22 @@ func storePackageResults(ctx context.Context, ph *packageHandle, p *Package) { } } +// Metadata implements the [metadata.Source] interface. +func (b *typeCheckBatch) Metadata(id PackageID) *metadata.Package { + ph := b.getHandle(id) + if ph == nil { + return nil + } + return ph.mp +} + // importPackage loads the given package from its export data in p.exportData // (which must already be populated). func (b *typeCheckBatch) importPackage(ctx context.Context, mp *metadata.Package, data []byte) (*types.Package, error) { ctx, done := event.Start(ctx, "cache.typeCheckBatch.importPackage", label.Package.Of(string(mp.ID))) defer done() - importLookup := b.importLookup(mp) + importLookup := importLookup(mp, b) thisPackage := types.NewPackage(string(mp.PkgPath), string(mp.Name)) getPackages := func(items []gcimporter.GetPackagesItem) error { @@ -655,8 +487,14 @@ func (b *typeCheckBatch) importPackage(ctx context.Context, mp *metadata.Package // manifest in the export data of mp.PkgPath is // inconsistent with mp.Name. Or perhaps there // are duplicate PkgPath items in the manifest? - return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", - pkg.Name(), item.Name, id, item.Path) + if b.gopackagesdriver { + return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904) (using GOPACKAGESDRIVER)", + pkg.Name(), item.Name, id, item.Path) + } else { + return bug.Errorf("internal error: package name is %q, want %q (id=%q, path=%q) (see issue #60904)", + pkg.Name(), item.Name, id, item.Path) + + } } } else { id = importLookup(PackagePath(item.Path)) @@ -707,7 +545,7 @@ func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageH onError := func(e error) { // Ignore errors for exporting. } - cfg := b.typesConfig(ctx, ph.localInputs, onError) + cfg := b.typesConfig(ctx, ph.localInputs, nil, onError) cfg.IgnoreFuncBodies = true // Parse the compiled go files, bypassing the parse cache as packages checked @@ -777,7 +615,9 @@ func (b *typeCheckBatch) checkPackageForImport(ctx context.Context, ph *packageH // package (id). // // The resulting function is not concurrency safe. -func (b *typeCheckBatch) importLookup(mp *metadata.Package) func(PackagePath) PackageID { +func importLookup(mp *metadata.Package, source metadata.Source) func(PackagePath) PackageID { + assert(mp != nil, "nil metadata") + // This function implements an incremental depth first scan through the // package imports. Previous implementations of import mapping built the // entire PackagePath->PackageID mapping eagerly, but that resulted in a @@ -788,6 +628,7 @@ func (b *typeCheckBatch) importLookup(mp *metadata.Package) func(PackagePath) Pa impMap := map[PackagePath]PackageID{ mp.PkgPath: mp.ID, } + // pending is a FIFO queue of package metadata that has yet to have its // dependencies fully scanned. // Invariant: all entries in pending are already mapped in impMap. @@ -808,9 +649,11 @@ func (b *typeCheckBatch) importLookup(mp *metadata.Package) func(PackagePath) Pa continue } impMap[depPath] = depID - // TODO(rfindley): express this as an operation on the import graph - // itself, rather than the set of package handles. - pending = append(pending, b.getHandle(depID).mp) + + dep := source.Metadata(depID) + assert(dep != nil, "missing dep metadata") + + pending = append(pending, dep) if depPath == pkgPath { // Don't return early; finish processing pkg's deps. id = depID @@ -837,9 +680,11 @@ func (b *typeCheckBatch) importLookup(mp *metadata.Package) func(PackagePath) Pa type packageState uint8 const ( - validMetadata packageState = iota // the package has valid metadata (initial state) + validMetadata packageState = iota // the package has valid metadata validLocalData // local package files have been analyzed validKey // dependencies have been analyzed, and key produced + validImports // pkgData.fset and pkgData.imports are valid + validPackage // pkgData.pkg is valid ) // A packageHandle holds information derived from a metadata.Package, and @@ -866,12 +711,20 @@ const ( // we sometimes refer to as "precise pruning", or fine-grained invalidation: // https://go.dev/blog/gopls-scalability#invalidation // +// After type checking, package information for open packages is cached in the +// pkgData field (validPackage), to optimize subsequent requests oriented +// around open files. +// // Following a change, the packageHandle is cloned in the new snapshot with a // new state set to its least known valid state, as described above: if package // files changed, it is reset to validMetadata; if dependencies changed, it is // reset to validLocalData. However, the derived data from its previous state // is not yet removed, as keys may not have changed after they are reevaluated, -// in which case we can avoid recomputing the derived data. +// in which case we can avoid recomputing the derived data. In particular, if +// the cache key did not change, the pkgData field (if set) remains valid. As a +// special case, if the cache key did change, but none of the keys of +// dependencies changed, the pkgData.fset and pkgData.imports fields are still +// valid, though the pkgData.pkg field is not (validImports). // // See [packageHandleBuilder.evaluatePackageHandle] for more details of the // reevaluation algorithm. @@ -909,6 +762,8 @@ type packageHandle struct { // localInputs holds all local type-checking localInputs, excluding // dependencies. localInputs *typeCheckInputs + // isOpen reports whether the package has any open files. + isOpen bool // localKey is a hash of localInputs. localKey file.Hash // refs is the result of syntactic dependency analysis produced by the @@ -920,11 +775,34 @@ type packageHandle struct { // depKeys records the key of each dependency that was used to calculate the // key below. If state < validKey, we must re-check that each still matches. depKeys map[PackageID]file.Hash + + // reachable is used to filter reachable package paths for go/analysis fact + // importing. + reachable *bloom.Filter + // key is the hashed key for the package. // // It includes the all bits of the transitive closure of // dependencies's sources. key file.Hash + + // pkgData caches data derived from type checking the package. + // This data is set during [Snapshot.forEachPackage], and may be partially + // invalidated in [packageHandleBuilder.evaluatePackageHandle]. + // + // If state == validPackage, all fields of pkgData are valid. If state == + // validImports, only fset and imports are valid. + pkgData *packageData +} + +// packageData holds the (possibly partial) result of type checking this +// package. See the pkgData field of [packageHandle]. +// +// packageData instances are immutable. +type packageData struct { + fset *token.FileSet // pkg.FileSet() + imports []*types.Package // pkg.Types().Imports() + pkg *Package // pkg, if state==validPackage; nil in lower states } // clone returns a shallow copy of the receiver. @@ -1178,51 +1056,53 @@ func (b *packageHandleBuilder) getOneTransitiveRefLocked(sym typerefs.Symbol) *t // // evaluatePackageHandle must only be called from getPackageHandles. func (b *packageHandleBuilder) evaluatePackageHandle(ctx context.Context, n *handleNode) (err error) { - // Initialize n.ph. - var hit bool b.s.mu.Lock() - n.ph, hit = b.s.packages.Get(n.mp.ID) + ph, hit := b.s.packages.Get(n.mp.ID) b.s.mu.Unlock() - if hit && n.ph.state >= validKey { - return nil // already valid - } else { - // We'll need to update the package handle. Since this could happen - // concurrently, make a copy. - if hit { - n.ph = n.ph.clone() - } else { - n.ph = &packageHandle{ - mp: n.mp, - state: validMetadata, - } - } - } - defer func() { if err == nil { - assert(n.ph.state == validKey, "invalid handle") + assert(ph.state >= validKey, "invalid handle") + // Record the now valid key in the snapshot. // There may be a race, so avoid the write if the recorded handle is // already valid. b.s.mu.Lock() - if alt, ok := b.s.packages.Get(n.mp.ID); !ok || alt.state < n.ph.state { - b.s.packages.Set(n.mp.ID, n.ph, nil) + if alt, ok := b.s.packages.Get(n.mp.ID); !ok || alt.state < ph.state { + b.s.packages.Set(n.mp.ID, ph, nil) } else { - n.ph = alt + ph = alt } b.s.mu.Unlock() + + // Initialize n.ph. + n.ph = ph } }() - // Invariant: n.ph is either + if hit && ph.state >= validKey { + return nil // already valid + } else { + // We'll need to update the package handle. Since this could happen + // concurrently, make a copy. + if hit { + ph = ph.clone() // state < validKey + } else { + ph = &packageHandle{ + mp: n.mp, + state: validMetadata, + } + } + } + + // Invariant: ph is either // - a new handle in state validMetadata, or // - a clone of an existing handle in state validMetadata or validLocalData. // State transition: validMetadata -> validLocalInputs. localKeyChanged := false - if n.ph.state < validLocalData { - prevLocalKey := n.ph.localKey // may be zero + if ph.state < validLocalData { + prevLocalKey := ph.localKey // may be zero // No package handle: read and analyze the package syntax. inputs, err := b.s.typeCheckInputs(ctx, n.mp) if err != nil { @@ -1232,15 +1112,47 @@ func (b *packageHandleBuilder) evaluatePackageHandle(ctx context.Context, n *han if err != nil { return err } - n.ph.loadDiagnostics = computeLoadDiagnostics(ctx, b.s, n.mp) - n.ph.localInputs = inputs - n.ph.localKey = localPackageKey(inputs) - n.ph.refs = refs - n.ph.state = validLocalData - localKeyChanged = n.ph.localKey != prevLocalKey + ph.loadDiagnostics = computeLoadDiagnostics(ctx, b.s, n.mp) + ph.localInputs = inputs + + checkOpen: + for _, files := range [][]file.Handle{inputs.goFiles, inputs.compiledGoFiles} { + for _, fh := range files { + if _, ok := fh.(*overlay); ok { + ph.isOpen = true + break checkOpen + } + } + } + if !ph.isOpen { + // ensure we don't hold data for closed packages + ph.pkgData = nil + } + ph.localKey = localPackageKey(inputs) + ph.refs = refs + ph.state = validLocalData + localKeyChanged = ph.localKey != prevLocalKey } - assert(n.ph.state == validLocalData, "unexpected handle state") + assert(ph.state == validLocalData, "unexpected handle state") + + // State transition: validLocalInputs -> validKey + + // Check if any dependencies have actually changed. + depsChanged := true + if ph.depKeys != nil { // ph was previously evaluated + depsChanged = len(ph.depKeys) != len(n.succs) + if !depsChanged { + for id, succ := range n.succs { + oldKey, ok := ph.depKeys[id] + assert(ok, "missing dep") + if oldKey != succ.ph.key { + depsChanged = true + break + } + } + } + } // Optimization: if the local package information did not change, nor did any // of the dependencies, we don't need to re-run the reachability algorithm. @@ -1252,73 +1164,86 @@ func (b *packageHandleBuilder) evaluatePackageHandle(ctx context.Context, n *han // package key of B will not change. We still need to re-run the reachability // algorithm on B to confirm. But if the key of B did not change, we don't // even need to run the reachability algorithm on A. - if !localKeyChanged && - n.ph.depKeys != nil && // n.ph was previously evaluated - len(n.ph.depKeys) == len(n.succs) { - - unchanged := true - for id, succ := range n.succs { - oldKey, ok := n.ph.depKeys[id] - assert(ok, "missing dep") - if oldKey != succ.ph.key { - unchanged = false - break + if !localKeyChanged && !depsChanged { + ph.state = validKey + } + + keyChanged := false + if ph.state < validKey { + prevKey := ph.key + + // If we get here, it must be the case that deps have changed, so we must + // run the reachability algorithm. + ph.depKeys = make(map[PackageID]file.Hash) + + // See the typerefs package: the reachable set of packages is defined to be + // the set of packages containing syntax that is reachable through the + // symbol reference graph starting at the exported symbols in the + // dependencies of ph. + reachable := b.s.view.pkgIndex.NewSet() + for depID, succ := range n.succs { + ph.depKeys[depID] = succ.ph.key + reachable.Add(succ.idxID) + trefs := b.getTransitiveRefs(succ.mp.ID) + assert(trefs != nil, "nil trefs") + for _, set := range trefs { + reachable.Union(set) } } - if unchanged { - n.ph.state = validKey - return nil - } - } - // State transition: validLocalInputs -> validKey - // - // If we get here, it must be the case that deps have changed, so we must - // run the reachability algorithm. - n.ph.depKeys = make(map[PackageID]file.Hash) - - // See the typerefs package: the reachable set of packages is defined to be - // the set of packages containing syntax that is reachable through the - // exported symbols in the dependencies of n.ph. - reachable := b.s.view.pkgIndex.NewSet() - for depID, succ := range n.succs { - n.ph.depKeys[depID] = succ.ph.key - reachable.Add(succ.idxID) - trefs := b.getTransitiveRefs(succ.mp.ID) - assert(trefs != nil, "nil trefs") - for _, set := range trefs { - reachable.Union(set) - } - } - - // Collect reachable nodes. - var reachableNodes []*handleNode - // In the presence of context cancellation, any package may be missing. - // We need all dependencies to produce a valid key. - reachable.Elems(func(id typerefs.IndexID) { - dh := b.nodes[id] - if dh == nil { - // Previous code reported an error (not a bug) here. - bug.Reportf("missing reachable node for %q", id) - } else { - reachableNodes = append(reachableNodes, dh) + // Collect reachable nodes. + var reachableNodes []*handleNode + // In the presence of context cancellation, any package may be missing. + // We need all dependencies to produce a key. + reachable.Elems(func(id typerefs.IndexID) { + dh := b.nodes[id] + if dh == nil { + // Previous code reported an error (not a bug) here. + bug.Reportf("missing reachable node for %q", id) + } else { + reachableNodes = append(reachableNodes, dh) + } + }) + + // Sort for stability. + sort.Slice(reachableNodes, func(i, j int) bool { + return reachableNodes[i].mp.ID < reachableNodes[j].mp.ID + }) + + // Key is the hash of the local key of this package, and the local key of + // all reachable packages. + depHasher := sha256.New() + depHasher.Write(ph.localKey[:]) + reachablePaths := make([]string, len(reachableNodes)) + for i, dh := range reachableNodes { + depHasher.Write(dh.ph.localKey[:]) + reachablePaths[i] = string(dh.ph.mp.PkgPath) } - }) + depHasher.Sum(ph.key[:0]) + ph.reachable = bloom.NewFilter(reachablePaths) + ph.state = validKey + keyChanged = ph.key != prevKey + } - // Sort for stability. - sort.Slice(reachableNodes, func(i, j int) bool { - return reachableNodes[i].mp.ID < reachableNodes[j].mp.ID - }) + assert(ph.state == validKey, "unexpected handle state") - // Key is the hash of the local key, and the local key of all reachable - // packages. - depHasher := sha256.New() - depHasher.Write(n.ph.localKey[:]) - for _, dh := range reachableNodes { - depHasher.Write(dh.ph.localKey[:]) + // Validate ph.pkgData, upgrading state if the package or its imports are + // still valid. + if ph.pkgData != nil { + pkgData := *ph.pkgData // make a copy + ph.pkgData = &pkgData + ph.state = validPackage + if keyChanged || ph.pkgData.pkg == nil { + ph.pkgData.pkg = nil // ensure we don't hold on to stale packages + ph.state = validImports + } + if depsChanged { + ph.pkgData = nil + ph.state = validKey + } } - depHasher.Sum(n.ph.key[:0]) - n.ph.state = validKey + + // Postcondition: state >= validKey return nil } @@ -1533,36 +1458,36 @@ func localPackageKey(inputs *typeCheckInputs) file.Hash { // checkPackage type checks the parsed source files in compiledGoFiles. // (The resulting pkg also holds the parsed but not type-checked goFiles.) // deps holds the future results of type-checking the direct dependencies. -func (b *typeCheckBatch) checkPackage(ctx context.Context, ph *packageHandle) (*Package, error) { +func (b *typeCheckBatch) checkPackage(ctx context.Context, fset *token.FileSet, ph *packageHandle, imports map[PackagePath]*types.Package) (*Package, error) { inputs := ph.localInputs ctx, done := event.Start(ctx, "cache.typeCheckBatch.checkPackage", label.Package.Of(string(inputs.id))) defer done() pkg := &syntaxPackage{ id: inputs.id, - fset: b.fset, // must match parse call below + fset: fset, // must match parse call below types: types.NewPackage(string(inputs.pkgPath), string(inputs.name)), typesSizes: inputs.sizes, typesInfo: &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Scopes: make(map[ast.Node]*types.Scope), + FileVersions: make(map[*ast.File]string), }, } - versions.InitFileVersions(pkg.typesInfo) // Collect parsed files from the type check pass, capturing parse errors from // compiled files. var err error - pkg.goFiles, err = b.parseCache.parseFiles(ctx, b.fset, parsego.Full, false, inputs.goFiles...) + pkg.goFiles, err = b.parseCache.parseFiles(ctx, pkg.fset, parsego.Full, false, inputs.goFiles...) if err != nil { return nil, err } - pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, b.fset, parsego.Full, false, inputs.compiledGoFiles...) + pkg.compiledGoFiles, err = b.parseCache.parseFiles(ctx, pkg.fset, parsego.Full, false, inputs.compiledGoFiles...) if err != nil { return nil, err } @@ -1591,7 +1516,7 @@ func (b *typeCheckBatch) checkPackage(ctx context.Context, ph *packageHandle) (* onError := func(e error) { pkg.typeErrors = append(pkg.typeErrors, e.(types.Error)) } - cfg := b.typesConfig(ctx, inputs, onError) + cfg := b.typesConfig(ctx, inputs, imports, onError) check := types.NewChecker(cfg, pkg.fset, pkg.types, pkg.typesInfo) var files []*ast.File @@ -1677,7 +1602,7 @@ func (b *typeCheckBatch) checkPackage(ctx context.Context, ph *packageHandle) (* // e.g. "go1" or "go1.2" or "go1.2.3" var goVersionRx = regexp.MustCompile(`^go[1-9][0-9]*(?:\.(0|[1-9][0-9]*)){0,2}$`) -func (b *typeCheckBatch) typesConfig(ctx context.Context, inputs *typeCheckInputs, onError func(e error)) *types.Config { +func (b *typeCheckBatch) typesConfig(ctx context.Context, inputs *typeCheckInputs, imports map[PackagePath]*types.Package, onError func(e error)) *types.Config { cfg := &types.Config{ Sizes: inputs.sizes, Error: onError, @@ -1701,6 +1626,19 @@ func (b *typeCheckBatch) typesConfig(ctx context.Context, inputs *typeCheckInput if !metadata.IsValidImport(inputs.pkgPath, depPH.mp.PkgPath, inputs.viewType != GoPackagesDriverView) { return nil, fmt.Errorf("invalid use of internal package %q", path) } + // For syntax packages, the set of required imports is known and + // precomputed. For import packages (checkPackageForImport), imports are + // constructed lazily, because they may not have been needed if we could + // have imported from export data. + // + // TODO(rfindley): refactor to move this logic to the callsite. + if imports != nil { + imp, ok := imports[depPH.mp.PkgPath] + if !ok { + return nil, fmt.Errorf("missing import %s", id) + } + return imp, nil + } return b.getImportPackage(ctx, id) }), } @@ -1961,7 +1899,11 @@ func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs [ // over fixed syntax, which overflowed its file. So it's definitely // possible that we get here (it's hard to reason about fixing up the // AST). Nevertheless, it's a bug. - bug.Reportf("internal error: type checker error %q outside its Fset", e) + if pkg.hasFixedFiles() { + bug.Reportf("internal error: type checker error %q outside its Fset (fixed files)", e) + } else { + bug.Reportf("internal error: type checker error %q outside its Fset", e) + } continue } pgf, err := pkg.File(protocol.URIFromPath(posn.Filename)) @@ -1973,12 +1915,16 @@ func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs [ // package (the message would be rather confusing), but we do want to // report an error in the current package (golang/go#59005). if i == 0 { - bug.Reportf("internal error: could not locate file for primary type checker error %v: %v", e, err) + if pkg.hasFixedFiles() { + bug.Reportf("internal error: could not locate file for primary type checker error %v: %v (fixed files)", e, err) + } else { + bug.Reportf("internal error: could not locate file for primary type checker error %v: %v", e, err) + } } continue } - // debugging #65960 + // debugging golang/go#65960 // // At this point, we know 'start' IsValid, and // StartPosition(start) worked (with e.Fset). @@ -1987,21 +1933,33 @@ func typeErrorsToDiagnostics(pkg *syntaxPackage, inputs *typeCheckInputs, errs [ // is also in range for pgf.Tok, which means // the PosRange failure must be caused by 'end'. if pgf.Tok != e.Fset.File(start) { - bug.Reportf("internal error: inconsistent token.Files for pos") + if pkg.hasFixedFiles() { + bug.Reportf("internal error: inconsistent token.Files for pos (fixed files)") + } else { + bug.Reportf("internal error: inconsistent token.Files for pos") + } } if end == start { // Expand the end position to a more meaningful span. end = analysisinternal.TypeErrorEndPos(e.Fset, pgf.Src, start) - // debugging #65960 + // debugging golang/go#65960 if _, err := safetoken.Offset(pgf.Tok, end); err != nil { - bug.Reportf("TypeErrorEndPos returned invalid end: %v", err) + if pkg.hasFixedFiles() { + bug.Reportf("TypeErrorEndPos returned invalid end: %v (fixed files)", err) + } else { + bug.Reportf("TypeErrorEndPos returned invalid end: %v", err) + } } } else { - // debugging #65960 + // debugging golang/go#65960 if _, err := safetoken.Offset(pgf.Tok, end); err != nil { - bug.Reportf("ReadGo116ErrorData returned invalid end: %v", err) + if pkg.hasFixedFiles() { + bug.Reportf("ReadGo116ErrorData returned invalid end: %v (fixed files)", err) + } else { + bug.Reportf("ReadGo116ErrorData returned invalid end: %v", err) + } } } diff --git a/gopls/internal/cache/diagnostics.go b/gopls/internal/cache/diagnostics.go index 797ce961cd8..95b1b9f1c18 100644 --- a/gopls/internal/cache/diagnostics.go +++ b/gopls/internal/cache/diagnostics.go @@ -5,9 +5,11 @@ package cache import ( + "crypto/sha256" "encoding/json" "fmt" + "golang.org/x/tools/gopls/internal/file" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/bug" ) @@ -69,6 +71,30 @@ func (d *Diagnostic) String() string { return fmt.Sprintf("%v: %s", d.Range, d.Message) } +// Hash computes a hash to identify the diagnostic. +// The hash is for deduplicating within a file, so does not incorporate d.URI. +func (d *Diagnostic) Hash() file.Hash { + h := sha256.New() + for _, t := range d.Tags { + fmt.Fprintf(h, "tag: %s\n", t) + } + for _, r := range d.Related { + fmt.Fprintf(h, "related: %s %s %s\n", r.Location.URI, r.Message, r.Location.Range) + } + fmt.Fprintf(h, "code: %s\n", d.Code) + fmt.Fprintf(h, "codeHref: %s\n", d.CodeHref) + fmt.Fprintf(h, "message: %s\n", d.Message) + fmt.Fprintf(h, "range: %s\n", d.Range) + fmt.Fprintf(h, "severity: %s\n", d.Severity) + fmt.Fprintf(h, "source: %s\n", d.Source) + if d.BundledFixes != nil { + fmt.Fprintf(h, "fixes: %s\n", *d.BundledFixes) + } + var hash [sha256.Size]byte + h.Sum(hash[:0]) + return hash +} + type DiagnosticSource string const ( diff --git a/gopls/internal/cache/load.go b/gopls/internal/cache/load.go index 9373766b413..9987def6392 100644 --- a/gopls/internal/cache/load.go +++ b/gopls/internal/cache/load.go @@ -156,6 +156,10 @@ func (s *Snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc } } + if err != nil { + return fmt.Errorf("packages.Load error: %w", err) + } + if standalone { // Handle standalone package result. // @@ -173,20 +177,29 @@ func (s *Snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc if s.view.typ == GoPackagesDriverView { errorf = fmt.Errorf // all bets are off } + for _, pkg := range pkgs { + // Don't report bugs if any packages have errors. + // For example: given go list errors, go/packages may synthesize a + // package with ID equal to the query. + if len(pkg.Errors) > 0 { + errorf = fmt.Errorf + break + } + } var standalonePkg *packages.Package for _, pkg := range pkgs { if pkg.ID == "command-line-arguments" { if standalonePkg != nil { - return errorf("internal error: go/packages returned multiple standalone packages") + return errorf("go/packages returned multiple standalone packages") } standalonePkg = pkg } else if packagesinternal.GetForTest(pkg) == "" && !strings.HasSuffix(pkg.ID, ".test") { - return errorf("internal error: go/packages returned unexpected package %q for standalone file", pkg.ID) + return errorf("go/packages returned unexpected package %q for standalone file", pkg.ID) } } if standalonePkg == nil { - return errorf("internal error: go/packages failed to return non-test standalone package") + return errorf("go/packages failed to return non-test standalone package") } if len(standalonePkg.CompiledGoFiles) > 0 { pkgs = []*packages.Package{standalonePkg} @@ -196,10 +209,7 @@ func (s *Snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc } if len(pkgs) == 0 { - if err == nil { - err = errNoPackages - } - return fmt.Errorf("packages.Load error: %w", err) + return fmt.Errorf("packages.Load error: %w", errNoPackages) } moduleErrs := make(map[string][]packages.Error) // module path -> errors @@ -249,6 +259,12 @@ func (s *Snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc s.setBuiltin(pkg.GoFiles[0]) continue } + if packagesinternal.GetForTest(pkg) == "builtin" { + // We don't care about test variants of builtin. This caused test + // failures in https://go.dev/cl/620196, when a test file was added to + // builtin. + continue + } // Skip test main packages. if isTestMain(pkg, s.view.folder.Env.GOCACHE) { continue @@ -301,7 +317,6 @@ func (s *Snapshot) load(ctx context.Context, allowNetwork bool, scopes ...loadSc workspacePackages := computeWorkspacePackagesLocked(ctx, s, meta) s.meta = meta s.workspacePackages = workspacePackages - s.resetActivePackagesLocked() s.mu.Unlock() diff --git a/gopls/internal/cache/metadata/graph.go b/gopls/internal/cache/metadata/graph.go index f09822d3575..4b846df53be 100644 --- a/gopls/internal/cache/metadata/graph.go +++ b/gopls/internal/cache/metadata/graph.go @@ -27,6 +27,11 @@ type Graph struct { IDs map[protocol.DocumentURI][]PackageID } +// Metadata implements the [Source] interface +func (g *Graph) Metadata(id PackageID) *Package { + return g.Packages[id] +} + // Update creates a new Graph containing the result of applying the given // updates to the receiver, though the receiver is not itself mutated. As a // special case, if updates is empty, Update just returns the receiver. diff --git a/gopls/internal/cache/metadata/metadata.go b/gopls/internal/cache/metadata/metadata.go index e42aac304f6..81b6dc57e1f 100644 --- a/gopls/internal/cache/metadata/metadata.go +++ b/gopls/internal/cache/metadata/metadata.go @@ -167,9 +167,6 @@ func (mp *Package) IsIntermediateTestVariant() bool { } // A Source maps package IDs to metadata for the packages. -// -// TODO(rfindley): replace this with a concrete metadata graph, once it is -// exposed from the snapshot. type Source interface { // Metadata returns the [Package] for the given package ID, or nil if it does // not exist. diff --git a/gopls/internal/cache/mod_tidy.go b/gopls/internal/cache/mod_tidy.go index 8532d1c7497..67a3e9c7eb9 100644 --- a/gopls/internal/cache/mod_tidy.go +++ b/gopls/internal/cache/mod_tidy.go @@ -267,7 +267,7 @@ func missingModuleDiagnostics(ctx context.Context, snapshot *Snapshot, pm *Parse // Example: // // import ( - // "golang.org/x/tools/go/expect" + // "golang.org/x/tools/internal/expect" // "golang.org/x/tools/go/packages" // ) // They both are related to the same module: "golang.org/x/tools". diff --git a/gopls/internal/cache/package.go b/gopls/internal/cache/package.go index 5c0da7e6af0..3477d522cee 100644 --- a/gopls/internal/cache/package.go +++ b/gopls/internal/cache/package.go @@ -10,6 +10,7 @@ import ( "go/scanner" "go/token" "go/types" + "slices" "sync" "golang.org/x/tools/gopls/internal/cache/metadata" @@ -87,6 +88,14 @@ func (p *syntaxPackage) tests() *testfuncs.Index { return p._tests } +// hasFixedFiles reports whether there are any 'fixed' compiled go files in the +// package. +// +// Intended to be used to refine bug reports. +func (p *syntaxPackage) hasFixedFiles() bool { + return slices.ContainsFunc(p.compiledGoFiles, (*parsego.File).Fixed) +} + func (p *Package) String() string { return string(p.metadata.ID) } func (p *Package) Metadata() *metadata.Package { return p.metadata } diff --git a/gopls/internal/cache/parse.go b/gopls/internal/cache/parse.go index 56130c6e1fb..d733ca76799 100644 --- a/gopls/internal/cache/parse.go +++ b/gopls/internal/cache/parse.go @@ -40,6 +40,6 @@ func parseGoImpl(ctx context.Context, fset *token.FileSet, fh file.Handle, mode if ctx.Err() != nil { return nil, ctx.Err() } - pgf, _ := parsego.Parse(ctx, fset, fh.URI(), content, mode, purgeFuncBodies) + pgf, _ := parsego.Parse(ctx, fset, fh.URI(), content, mode, purgeFuncBodies) // ignore 'fixes' return pgf, nil } diff --git a/gopls/internal/cache/parsego/file.go b/gopls/internal/cache/parsego/file.go index b03929e6c86..1dc46da823a 100644 --- a/gopls/internal/cache/parsego/file.go +++ b/gopls/internal/cache/parsego/file.go @@ -9,6 +9,7 @@ import ( "go/parser" "go/scanner" "go/token" + "sync" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/safetoken" @@ -18,6 +19,12 @@ import ( type File struct { URI protocol.DocumentURI Mode parser.Mode + + // File is the file resulting from parsing. It is always non-nil. + // + // Clients must not access the AST's legacy ast.Object-related + // fields without first ensuring that [File.Resolve] was + // already called. File *ast.File Tok *token.File // Source code used to build the AST. It may be different from the @@ -39,13 +46,16 @@ type File struct { fixedAST bool Mapper *protocol.Mapper // may map fixed Src, not file content ParseErr scanner.ErrorList + + // resolveOnce guards the lazy ast.Object resolution. See [File.Resolve]. + resolveOnce sync.Once } -func (pgf File) String() string { return string(pgf.URI) } +func (pgf *File) String() string { return string(pgf.URI) } // Fixed reports whether p was "Fixed", meaning that its source or positions // may not correlate with the original file. -func (pgf File) Fixed() bool { +func (pgf *File) Fixed() bool { return pgf.fixedSrc || pgf.fixedAST } @@ -81,6 +91,11 @@ func (pgf *File) NodeRange(node ast.Node) (protocol.Range, error) { return pgf.Mapper.NodeRange(pgf.Tok, node) } +// NodeOffsets returns offsets for the ast.Node. +func (pgf *File) NodeOffsets(node ast.Node) (start int, end int, _ error) { + return safetoken.Offsets(pgf.Tok, node.Pos(), node.End()) +} + // NodeMappedRange returns a MappedRange for the ast.Node interval in this file. // A MappedRange can be converted to any other form. func (pgf *File) NodeMappedRange(node ast.Node) (protocol.MappedRange, error) { @@ -100,3 +115,28 @@ func (pgf *File) RangePos(r protocol.Range) (token.Pos, token.Pos, error) { } return pgf.Tok.Pos(start), pgf.Tok.Pos(end), nil } + +// Resolve lazily resolves ast.Ident.Objects in the enclosed syntax tree. +// +// Resolve must be called before accessing any of: +// - pgf.File.Scope +// - pgf.File.Unresolved +// - Ident.Obj, for any Ident in pgf.File +func (pgf *File) Resolve() { + pgf.resolveOnce.Do(func() { + if pgf.File.Scope != nil { + return // already resolved by parsing without SkipObjectResolution. + } + defer func() { + // (panic handler duplicated from go/parser) + if e := recover(); e != nil { + // A bailout indicates the resolution stack has exceeded max depth. + if _, ok := e.(bailout); !ok { + panic(e) + } + } + }() + declErr := func(token.Pos, string) {} + resolveFile(pgf.File, pgf.Tok, declErr) + }) +} diff --git a/gopls/internal/cache/parsego/parse.go b/gopls/internal/cache/parsego/parse.go index 82f3eeebeec..08f9c6bbe85 100644 --- a/gopls/internal/cache/parsego/parse.go +++ b/gopls/internal/cache/parsego/parse.go @@ -2,6 +2,14 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:generate go run resolver_gen.go + +// The parsego package defines the [File] type, a wrapper around a go/ast.File +// that is useful for answering LSP queries. Notably, it bundles the +// *token.File and *protocol.Mapper necessary for token.Pos locations to and +// from UTF-16 LSP positions. +// +// Run `go generate` to update resolver.go from GOROOT. package parsego import ( @@ -51,16 +59,36 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s // We passed a byte slice, so the only possible error is a parse error. parseErr = err.(scanner.ErrorList) } + // Inv: file != nil. - tok := fset.File(file.Pos()) - if tok == nil { - // file.Pos is the location of the package declaration (issue #53202). If there was - // none, we can't find the token.File that ParseFile created, and we - // have no choice but to recreate it. - tok = fset.AddFile(uri.Path(), -1, len(src)) - tok.SetLinesForContent(src) + // Workaround for #70162 (missing File{Start,End} when + // parsing empty file) with go1.23. + // + // When parsing an empty file, or one without a valid + // package declaration, the go1.23 parser bails out before + // setting FileStart and End. + // + // This leaves us no way to find the original + // token.File that ParseFile created, so as a + // workaround, we recreate the token.File, and + // populate the FileStart and FileEnd fields. + // + // See also #53202. + tokenFile := func(file *ast.File) *token.File { + tok := fset.File(file.FileStart) + if tok == nil { + tok = fset.AddFile(uri.Path(), -1, len(src)) + tok.SetLinesForContent(src) + if file.FileStart.IsValid() { + file.FileStart = token.Pos(tok.Base()) + file.FileEnd = token.Pos(tok.Base() + tok.Size()) + } + } + return tok } + tok := tokenFile(file) + fixedSrc := false fixedAST := false // If there were parse errors, attempt to fix them up. @@ -88,15 +116,13 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s } newFile, newErr := parser.ParseFile(fset, uri.Path(), newSrc, mode) - if newFile == nil { - break // no progress - } + assert(newFile != nil, "ParseFile returned nil") // I/O error can't happen // Maintain the original parseError so we don't try formatting the // doctored file. file = newFile src = newSrc - tok = fset.File(file.Pos()) + tok = tokenFile(file) // Only now that we accept the fix do we record the src fix from above. fixes = append(fixes, srcFix) @@ -114,6 +140,7 @@ func Parse(ctx context.Context, fset *token.FileSet, uri protocol.DocumentURI, s } } } + assert(file != nil, "nil *ast.File") return &File{ URI: uri, diff --git a/gopls/internal/cache/parsego/resolver.go b/gopls/internal/cache/parsego/resolver.go new file mode 100644 index 00000000000..450fcc0a293 --- /dev/null +++ b/gopls/internal/cache/parsego/resolver.go @@ -0,0 +1,614 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by resolver_gen.go. DO NOT EDIT. + +package parsego + +import ( + "fmt" + "go/ast" + "go/token" + "strings" +) + +const debugResolve = false + +// resolveFile walks the given file to resolve identifiers within the file +// scope, updating ast.Ident.Obj fields with declaration information. +// +// If declErr is non-nil, it is used to report declaration errors during +// resolution. tok is used to format position in error messages. +func resolveFile(file *ast.File, handle *token.File, declErr func(token.Pos, string)) { + pkgScope := ast.NewScope(nil) + r := &resolver{ + handle: handle, + declErr: declErr, + topScope: pkgScope, + pkgScope: pkgScope, + depth: 1, + } + + for _, decl := range file.Decls { + ast.Walk(r, decl) + } + + r.closeScope() + assert(r.topScope == nil, "unbalanced scopes") + assert(r.labelScope == nil, "unbalanced label scopes") + + // resolve global identifiers within the same file + i := 0 + for _, ident := range r.unresolved { + // i <= index for current ident + assert(ident.Obj == unresolved, "object already resolved") + ident.Obj = r.pkgScope.Lookup(ident.Name) // also removes unresolved sentinel + if ident.Obj == nil { + r.unresolved[i] = ident + i++ + } else if debugResolve { + pos := ident.Obj.Decl.(interface{ Pos() token.Pos }).Pos() + r.trace("resolved %s@%v to package object %v", ident.Name, ident.Pos(), pos) + } + } + file.Scope = r.pkgScope + file.Unresolved = r.unresolved[0:i] +} + +const maxScopeDepth int = 1e3 + +type resolver struct { + handle *token.File + declErr func(token.Pos, string) + + // Ordinary identifier scopes + pkgScope *ast.Scope // pkgScope.Outer == nil + topScope *ast.Scope // top-most scope; may be pkgScope + unresolved []*ast.Ident // unresolved identifiers + depth int // scope depth + + // Label scopes + // (maintained by open/close LabelScope) + labelScope *ast.Scope // label scope for current function + targetStack [][]*ast.Ident // stack of unresolved labels +} + +func (r *resolver) trace(format string, args ...any) { + fmt.Println(strings.Repeat(". ", r.depth) + r.sprintf(format, args...)) +} + +func (r *resolver) sprintf(format string, args ...any) string { + for i, arg := range args { + switch arg := arg.(type) { + case token.Pos: + args[i] = r.handle.Position(arg) + } + } + return fmt.Sprintf(format, args...) +} + +func (r *resolver) openScope(pos token.Pos) { + r.depth++ + if r.depth > maxScopeDepth { + panic(bailout{pos: pos, msg: "exceeded max scope depth during object resolution"}) + } + if debugResolve { + r.trace("opening scope @%v", pos) + } + r.topScope = ast.NewScope(r.topScope) +} + +func (r *resolver) closeScope() { + r.depth-- + if debugResolve { + r.trace("closing scope") + } + r.topScope = r.topScope.Outer +} + +func (r *resolver) openLabelScope() { + r.labelScope = ast.NewScope(r.labelScope) + r.targetStack = append(r.targetStack, nil) +} + +func (r *resolver) closeLabelScope() { + // resolve labels + n := len(r.targetStack) - 1 + scope := r.labelScope + for _, ident := range r.targetStack[n] { + ident.Obj = scope.Lookup(ident.Name) + if ident.Obj == nil && r.declErr != nil { + r.declErr(ident.Pos(), fmt.Sprintf("label %s undefined", ident.Name)) + } + } + // pop label scope + r.targetStack = r.targetStack[0:n] + r.labelScope = r.labelScope.Outer +} + +func (r *resolver) declare(decl, data any, scope *ast.Scope, kind ast.ObjKind, idents ...*ast.Ident) { + for _, ident := range idents { + if ident.Obj != nil { + panic(fmt.Sprintf("%v: identifier %s already declared or resolved", ident.Pos(), ident.Name)) + } + obj := ast.NewObj(kind, ident.Name) + // remember the corresponding declaration for redeclaration + // errors and global variable resolution/typechecking phase + obj.Decl = decl + obj.Data = data + // Identifiers (for receiver type parameters) are written to the scope, but + // never set as the resolved object. See go.dev/issue/50956. + if _, ok := decl.(*ast.Ident); !ok { + ident.Obj = obj + } + if ident.Name != "_" { + if debugResolve { + r.trace("declaring %s@%v", ident.Name, ident.Pos()) + } + if alt := scope.Insert(obj); alt != nil && r.declErr != nil { + prevDecl := "" + if pos := alt.Pos(); pos.IsValid() { + prevDecl = r.sprintf("\n\tprevious declaration at %v", pos) + } + r.declErr(ident.Pos(), fmt.Sprintf("%s redeclared in this block%s", ident.Name, prevDecl)) + } + } + } +} + +func (r *resolver) shortVarDecl(decl *ast.AssignStmt) { + // Go spec: A short variable declaration may redeclare variables + // provided they were originally declared in the same block with + // the same type, and at least one of the non-blank variables is new. + n := 0 // number of new variables + for _, x := range decl.Lhs { + if ident, isIdent := x.(*ast.Ident); isIdent { + assert(ident.Obj == nil, "identifier already declared or resolved") + obj := ast.NewObj(ast.Var, ident.Name) + // remember corresponding assignment for other tools + obj.Decl = decl + ident.Obj = obj + if ident.Name != "_" { + if debugResolve { + r.trace("declaring %s@%v", ident.Name, ident.Pos()) + } + if alt := r.topScope.Insert(obj); alt != nil { + ident.Obj = alt // redeclaration + } else { + n++ // new declaration + } + } + } + } + if n == 0 && r.declErr != nil { + r.declErr(decl.Lhs[0].Pos(), "no new variables on left side of :=") + } +} + +// The unresolved object is a sentinel to mark identifiers that have been added +// to the list of unresolved identifiers. The sentinel is only used for verifying +// internal consistency. +var unresolved = new(ast.Object) + +// If x is an identifier, resolve attempts to resolve x by looking up +// the object it denotes. If no object is found and collectUnresolved is +// set, x is marked as unresolved and collected in the list of unresolved +// identifiers. +func (r *resolver) resolve(ident *ast.Ident, collectUnresolved bool) { + if ident.Obj != nil { + panic(r.sprintf("%v: identifier %s already declared or resolved", ident.Pos(), ident.Name)) + } + // '_' should never refer to existing declarations, because it has special + // handling in the spec. + if ident.Name == "_" { + return + } + for s := r.topScope; s != nil; s = s.Outer { + if obj := s.Lookup(ident.Name); obj != nil { + if debugResolve { + r.trace("resolved %v:%s to %v", ident.Pos(), ident.Name, obj) + } + assert(obj.Name != "", "obj with no name") + // Identifiers (for receiver type parameters) are written to the scope, + // but never set as the resolved object. See go.dev/issue/50956. + if _, ok := obj.Decl.(*ast.Ident); !ok { + ident.Obj = obj + } + return + } + } + // all local scopes are known, so any unresolved identifier + // must be found either in the file scope, package scope + // (perhaps in another file), or universe scope --- collect + // them so that they can be resolved later + if collectUnresolved { + ident.Obj = unresolved + r.unresolved = append(r.unresolved, ident) + } +} + +func (r *resolver) walkExprs(list []ast.Expr) { + for _, node := range list { + ast.Walk(r, node) + } +} + +func (r *resolver) walkLHS(list []ast.Expr) { + for _, expr := range list { + expr := ast.Unparen(expr) + if _, ok := expr.(*ast.Ident); !ok && expr != nil { + ast.Walk(r, expr) + } + } +} + +func (r *resolver) walkStmts(list []ast.Stmt) { + for _, stmt := range list { + ast.Walk(r, stmt) + } +} + +func (r *resolver) Visit(node ast.Node) ast.Visitor { + if debugResolve && node != nil { + r.trace("node %T@%v", node, node.Pos()) + } + + switch n := node.(type) { + + // Expressions. + case *ast.Ident: + r.resolve(n, true) + + case *ast.FuncLit: + r.openScope(n.Pos()) + defer r.closeScope() + r.walkFuncType(n.Type) + r.walkBody(n.Body) + + case *ast.SelectorExpr: + ast.Walk(r, n.X) + // Note: don't try to resolve n.Sel, as we don't support qualified + // resolution. + + case *ast.StructType: + r.openScope(n.Pos()) + defer r.closeScope() + r.walkFieldList(n.Fields, ast.Var) + + case *ast.FuncType: + r.openScope(n.Pos()) + defer r.closeScope() + r.walkFuncType(n) + + case *ast.CompositeLit: + if n.Type != nil { + ast.Walk(r, n.Type) + } + for _, e := range n.Elts { + if kv, _ := e.(*ast.KeyValueExpr); kv != nil { + // See go.dev/issue/45160: try to resolve composite lit keys, but don't + // collect them as unresolved if resolution failed. This replicates + // existing behavior when resolving during parsing. + if ident, _ := kv.Key.(*ast.Ident); ident != nil { + r.resolve(ident, false) + } else { + ast.Walk(r, kv.Key) + } + ast.Walk(r, kv.Value) + } else { + ast.Walk(r, e) + } + } + + case *ast.InterfaceType: + r.openScope(n.Pos()) + defer r.closeScope() + r.walkFieldList(n.Methods, ast.Fun) + + // Statements + case *ast.LabeledStmt: + r.declare(n, nil, r.labelScope, ast.Lbl, n.Label) + ast.Walk(r, n.Stmt) + + case *ast.AssignStmt: + r.walkExprs(n.Rhs) + if n.Tok == token.DEFINE { + r.shortVarDecl(n) + } else { + r.walkExprs(n.Lhs) + } + + case *ast.BranchStmt: + // add to list of unresolved targets + if n.Tok != token.FALLTHROUGH && n.Label != nil { + depth := len(r.targetStack) - 1 + r.targetStack[depth] = append(r.targetStack[depth], n.Label) + } + + case *ast.BlockStmt: + r.openScope(n.Pos()) + defer r.closeScope() + r.walkStmts(n.List) + + case *ast.IfStmt: + r.openScope(n.Pos()) + defer r.closeScope() + if n.Init != nil { + ast.Walk(r, n.Init) + } + ast.Walk(r, n.Cond) + ast.Walk(r, n.Body) + if n.Else != nil { + ast.Walk(r, n.Else) + } + + case *ast.CaseClause: + r.walkExprs(n.List) + r.openScope(n.Pos()) + defer r.closeScope() + r.walkStmts(n.Body) + + case *ast.SwitchStmt: + r.openScope(n.Pos()) + defer r.closeScope() + if n.Init != nil { + ast.Walk(r, n.Init) + } + if n.Tag != nil { + // The scope below reproduces some unnecessary behavior of the parser, + // opening an extra scope in case this is a type switch. It's not needed + // for expression switches. + // TODO: remove this once we've matched the parser resolution exactly. + if n.Init != nil { + r.openScope(n.Tag.Pos()) + defer r.closeScope() + } + ast.Walk(r, n.Tag) + } + if n.Body != nil { + r.walkStmts(n.Body.List) + } + + case *ast.TypeSwitchStmt: + if n.Init != nil { + r.openScope(n.Pos()) + defer r.closeScope() + ast.Walk(r, n.Init) + } + r.openScope(n.Assign.Pos()) + defer r.closeScope() + ast.Walk(r, n.Assign) + // s.Body consists only of case clauses, so does not get its own + // scope. + if n.Body != nil { + r.walkStmts(n.Body.List) + } + + case *ast.CommClause: + r.openScope(n.Pos()) + defer r.closeScope() + if n.Comm != nil { + ast.Walk(r, n.Comm) + } + r.walkStmts(n.Body) + + case *ast.SelectStmt: + // as for switch statements, select statement bodies don't get their own + // scope. + if n.Body != nil { + r.walkStmts(n.Body.List) + } + + case *ast.ForStmt: + r.openScope(n.Pos()) + defer r.closeScope() + if n.Init != nil { + ast.Walk(r, n.Init) + } + if n.Cond != nil { + ast.Walk(r, n.Cond) + } + if n.Post != nil { + ast.Walk(r, n.Post) + } + ast.Walk(r, n.Body) + + case *ast.RangeStmt: + r.openScope(n.Pos()) + defer r.closeScope() + ast.Walk(r, n.X) + var lhs []ast.Expr + if n.Key != nil { + lhs = append(lhs, n.Key) + } + if n.Value != nil { + lhs = append(lhs, n.Value) + } + if len(lhs) > 0 { + if n.Tok == token.DEFINE { + // Note: we can't exactly match the behavior of object resolution + // during the parsing pass here, as it uses the position of the RANGE + // token for the RHS OpPos. That information is not contained within + // the AST. + as := &ast.AssignStmt{ + Lhs: lhs, + Tok: token.DEFINE, + TokPos: n.TokPos, + Rhs: []ast.Expr{&ast.UnaryExpr{Op: token.RANGE, X: n.X}}, + } + // TODO(rFindley): this walkLHS reproduced the parser resolution, but + // is it necessary? By comparison, for a normal AssignStmt we don't + // walk the LHS in case there is an invalid identifier list. + r.walkLHS(lhs) + r.shortVarDecl(as) + } else { + r.walkExprs(lhs) + } + } + ast.Walk(r, n.Body) + + // Declarations + case *ast.GenDecl: + switch n.Tok { + case token.CONST, token.VAR: + for i, spec := range n.Specs { + spec := spec.(*ast.ValueSpec) + kind := ast.Con + if n.Tok == token.VAR { + kind = ast.Var + } + r.walkExprs(spec.Values) + if spec.Type != nil { + ast.Walk(r, spec.Type) + } + r.declare(spec, i, r.topScope, kind, spec.Names...) + } + case token.TYPE: + for _, spec := range n.Specs { + spec := spec.(*ast.TypeSpec) + // Go spec: The scope of a type identifier declared inside a function begins + // at the identifier in the TypeSpec and ends at the end of the innermost + // containing block. + r.declare(spec, nil, r.topScope, ast.Typ, spec.Name) + if spec.TypeParams != nil { + r.openScope(spec.Pos()) + defer r.closeScope() + r.walkTParams(spec.TypeParams) + } + ast.Walk(r, spec.Type) + } + } + + case *ast.FuncDecl: + // Open the function scope. + r.openScope(n.Pos()) + defer r.closeScope() + + r.walkRecv(n.Recv) + + // Type parameters are walked normally: they can reference each other, and + // can be referenced by normal parameters. + if n.Type.TypeParams != nil { + r.walkTParams(n.Type.TypeParams) + // TODO(rFindley): need to address receiver type parameters. + } + + // Resolve and declare parameters in a specific order to get duplicate + // declaration errors in the correct location. + r.resolveList(n.Type.Params) + r.resolveList(n.Type.Results) + r.declareList(n.Recv, ast.Var) + r.declareList(n.Type.Params, ast.Var) + r.declareList(n.Type.Results, ast.Var) + + r.walkBody(n.Body) + if n.Recv == nil && n.Name.Name != "init" { + r.declare(n, nil, r.pkgScope, ast.Fun, n.Name) + } + + default: + return r + } + + return nil +} + +func (r *resolver) walkFuncType(typ *ast.FuncType) { + // typ.TypeParams must be walked separately for FuncDecls. + r.resolveList(typ.Params) + r.resolveList(typ.Results) + r.declareList(typ.Params, ast.Var) + r.declareList(typ.Results, ast.Var) +} + +func (r *resolver) resolveList(list *ast.FieldList) { + if list == nil { + return + } + for _, f := range list.List { + if f.Type != nil { + ast.Walk(r, f.Type) + } + } +} + +func (r *resolver) declareList(list *ast.FieldList, kind ast.ObjKind) { + if list == nil { + return + } + for _, f := range list.List { + r.declare(f, nil, r.topScope, kind, f.Names...) + } +} + +func (r *resolver) walkRecv(recv *ast.FieldList) { + // If our receiver has receiver type parameters, we must declare them before + // trying to resolve the rest of the receiver, and avoid re-resolving the + // type parameter identifiers. + if recv == nil || len(recv.List) == 0 { + return // nothing to do + } + typ := recv.List[0].Type + if ptr, ok := typ.(*ast.StarExpr); ok { + typ = ptr.X + } + + var declareExprs []ast.Expr // exprs to declare + var resolveExprs []ast.Expr // exprs to resolve + switch typ := typ.(type) { + case *ast.IndexExpr: + declareExprs = []ast.Expr{typ.Index} + resolveExprs = append(resolveExprs, typ.X) + case *ast.IndexListExpr: + declareExprs = typ.Indices + resolveExprs = append(resolveExprs, typ.X) + default: + resolveExprs = append(resolveExprs, typ) + } + for _, expr := range declareExprs { + if id, _ := expr.(*ast.Ident); id != nil { + r.declare(expr, nil, r.topScope, ast.Typ, id) + } else { + // The receiver type parameter expression is invalid, but try to resolve + // it anyway for consistency. + resolveExprs = append(resolveExprs, expr) + } + } + for _, expr := range resolveExprs { + if expr != nil { + ast.Walk(r, expr) + } + } + // The receiver is invalid, but try to resolve it anyway for consistency. + for _, f := range recv.List[1:] { + if f.Type != nil { + ast.Walk(r, f.Type) + } + } +} + +func (r *resolver) walkFieldList(list *ast.FieldList, kind ast.ObjKind) { + if list == nil { + return + } + r.resolveList(list) + r.declareList(list, kind) +} + +// walkTParams is like walkFieldList, but declares type parameters eagerly so +// that they may be resolved in the constraint expressions held in the field +// Type. +func (r *resolver) walkTParams(list *ast.FieldList) { + r.declareList(list, ast.Typ) + r.resolveList(list) +} + +func (r *resolver) walkBody(body *ast.BlockStmt) { + if body == nil { + return + } + r.openLabelScope() + defer r.closeLabelScope() + r.walkStmts(body.List) +} diff --git a/gopls/internal/cache/parsego/resolver_compat.go b/gopls/internal/cache/parsego/resolver_compat.go new file mode 100644 index 00000000000..0d9a3e19e3b --- /dev/null +++ b/gopls/internal/cache/parsego/resolver_compat.go @@ -0,0 +1,24 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains declarations needed for compatibility with resolver.go +// copied from GOROOT. + +package parsego + +import "go/token" + +// assert panics with the given msg if cond is not true. +func assert(cond bool, msg string) { + if !cond { + panic(msg) + } +} + +// A bailout panic is raised to indicate early termination. pos and msg are +// only populated when bailing out of object resolution. +type bailout struct { + pos token.Pos + msg string +} diff --git a/gopls/internal/cache/parsego/resolver_gen.go b/gopls/internal/cache/parsego/resolver_gen.go new file mode 100644 index 00000000000..7eb9f563193 --- /dev/null +++ b/gopls/internal/cache/parsego/resolver_gen.go @@ -0,0 +1,32 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build ignore + +package main + +import ( + "bytes" + "log" + "os" + "os/exec" + "path/filepath" + "strings" +) + +func main() { + output, err := exec.Command("go", "env", "GOROOT").Output() + if err != nil { + log.Fatalf("resolving GOROOT: %v", err) + } + goroot := strings.TrimSpace(string(output)) + data, err := os.ReadFile(filepath.Join(goroot, "src/go/parser/resolver.go")) + if err != nil { + log.Fatalf("reading resolver.go: %v", err) + } + data = bytes.Replace(data, []byte("\npackage parser"), []byte("\n// Code generated by resolver_gen.go. DO NOT EDIT.\n\npackage parsego"), 1) + if err := os.WriteFile("resolver.go", data, 0666); err != nil { + log.Fatalf("writing resolver.go: %v", err) + } +} diff --git a/gopls/internal/cache/parsego/resolver_test.go b/gopls/internal/cache/parsego/resolver_test.go new file mode 100644 index 00000000000..44908b7ec88 --- /dev/null +++ b/gopls/internal/cache/parsego/resolver_test.go @@ -0,0 +1,158 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package parsego + +import ( + "go/ast" + "go/types" + "os" + "strings" + "testing" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/internal/testenv" +) + +// TestGoplsSourceDoesNotUseObjectResolution verifies that gopls does not +// read fields that are set during syntactic object resolution, except in +// locations where we can guarantee that object resolution has occurred. This +// is achieved via static analysis of gopls source code to find references to +// the legacy Object symbols, checking the results against an allowlist +// +// Reading these fields would introduce a data race, due to the lazy +// resolution implemented by File.Resolve. +func TestGoplsSourceDoesNotUseObjectResolution(t *testing.T) { + + testenv.NeedsGoPackages(t) + testenv.NeedsLocalXTools(t) + + cfg := &packages.Config{ + Mode: packages.NeedName | packages.NeedModule | packages.NeedCompiledGoFiles | packages.NeedTypes | packages.NeedTypesInfo | packages.NeedSyntax | packages.NeedImports | packages.NeedDeps, + } + cfg.Env = os.Environ() + cfg.Env = append(cfg.Env, + "GOPACKAGESDRIVER=off", + "GOWORK=off", // necessary for -mod=mod below + "GOFLAGS=-mod=mod", + ) + + pkgs, err := packages.Load(cfg, + "go/ast", + "golang.org/x/tools/go/ast/astutil", + "golang.org/x/tools/gopls/...") + + if err != nil { + t.Fatal(err) + } + var astPkg, astutilPkg *packages.Package + for _, pkg := range pkgs { + switch pkg.PkgPath { + case "go/ast": + astPkg = pkg + case "golang.org/x/tools/go/ast/astutil": + astutilPkg = pkg + } + } + if astPkg == nil { + t.Fatal("missing package go/ast") + } + if astutilPkg == nil { + t.Fatal("missing package golang.org/x/tools/go/ast/astutil") + } + + File := astPkg.Types.Scope().Lookup("File").Type() + Ident := astPkg.Types.Scope().Lookup("Ident").Type() + + Scope, _, _ := types.LookupFieldOrMethod(File, true, astPkg.Types, "Scope") + assert(Scope != nil, "nil Scope") + Unresolved, _, _ := types.LookupFieldOrMethod(File, true, astPkg.Types, "Unresolved") + assert(Unresolved != nil, "nil unresolved") + Obj, _, _ := types.LookupFieldOrMethod(Ident, true, astPkg.Types, "Obj") + assert(Obj != nil, "nil Obj") + UsesImport := astutilPkg.Types.Scope().Lookup("UsesImport") + assert(UsesImport != nil, "nil UsesImport") + + disallowed := map[types.Object]bool{ + Scope: true, + Unresolved: true, + Obj: true, + UsesImport: true, + } + + // exceptions catalogues packages or declarations that are allowed to use + // forbidden symbols, with a rationale. + // + // - If the exception ends with '/', it is a prefix. + // - If it ends with a qualified name, it is a declaration. + // - Otherwise, it is an exact package path. + // + // TODO(rfindley): some sort of callgraph analysis would make these + // exceptions much easier to maintain. + exceptions := []string{ + "golang.org/x/tools/go/analysis/passes/", // analyzers may rely on object resolution + "golang.org/x/tools/gopls/internal/analysis/simplifyslice", // restrict ourselves to one blessed analyzer + "golang.org/x/tools/gopls/internal/cache/parsego", // used by parsego.File.Resolve, of course + "golang.org/x/tools/gopls/internal/golang.builtinDecl", // the builtin file is resolved + "golang.org/x/tools/gopls/internal/golang.NewBuiltinSignature", // ditto + "golang.org/x/tools/gopls/internal/golang/completion.builtinArgKind", // ditto + "golang.org/x/tools/internal/imports", // goimports does its own parsing + "golang.org/x/tools/go/ast/astutil.UsesImport", // disallowed + "golang.org/x/tools/go/ast/astutil.isTopName", // only reached from astutil.UsesImport + "go/ast", + "go/parser", + "go/doc", // manually verified that our usage is safe + } + + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + for _, exception := range exceptions { + if strings.HasSuffix(exception, "/") { + if strings.HasPrefix(pkg.PkgPath, exception) { + return + } + } else if pkg.PkgPath == exception { + return + } + } + + searchUses: + for ident, obj := range pkg.TypesInfo.Uses { + if disallowed[obj] { + decl := findEnclosingFuncDecl(ident, pkg) + if decl == "" { + posn := safetoken.Position(pkg.Fset.File(ident.Pos()), ident.Pos()) + t.Fatalf("%s: couldn't find enclosing decl for use of %s", posn, ident.Name) + } + qualified := pkg.PkgPath + "." + decl + for _, exception := range exceptions { + if exception == qualified { + continue searchUses + } + } + posn := safetoken.StartPosition(pkg.Fset, ident.Pos()) + t.Errorf("%s: forbidden use of %v in %s", posn, obj, qualified) + } + } + }) +} + +// findEnclosingFuncDecl finds the name of the func decl enclosing the usage, +// or "". +// +// (Usage could theoretically exist in e.g. var initializers, but that would be +// odd.) +func findEnclosingFuncDecl(ident *ast.Ident, pkg *packages.Package) string { + for _, file := range pkg.Syntax { + if file.FileStart <= ident.Pos() && ident.Pos() < file.FileEnd { + path, _ := astutil.PathEnclosingInterval(file, ident.Pos(), ident.End()) + decl, ok := path[len(path)-2].(*ast.FuncDecl) + if ok { + return decl.Name.Name + } + } + } + return "" +} diff --git a/gopls/internal/cache/port.go b/gopls/internal/cache/port.go index e62ebe29903..40005bcf6d4 100644 --- a/gopls/internal/cache/port.go +++ b/gopls/internal/cache/port.go @@ -141,6 +141,11 @@ var ( func (p port) matches(path string, content []byte) bool { ctxt := build.Default // make a copy ctxt.UseAllFiles = false + path = filepath.Clean(path) + if !filepath.IsAbs(path) { + bug.Reportf("non-abs file path %q", path) + return false // fail closed + } dir, name := filepath.Split(path) // The only virtualized operation called by MatchFile is OpenFile. diff --git a/gopls/internal/cache/session.go b/gopls/internal/cache/session.go index 65ba7e69d0a..5947b373b16 100644 --- a/gopls/internal/cache/session.go +++ b/gopls/internal/cache/session.go @@ -240,26 +240,27 @@ func (s *Session) createView(ctx context.Context, def *viewDefinition) (*View, * s.snapshotWG.Add(1) v.snapshot = &Snapshot{ - view: v, - backgroundCtx: backgroundCtx, - cancel: cancel, - store: s.cache.store, - refcount: 1, // Snapshots are born referenced. - done: s.snapshotWG.Done, - packages: new(persistent.Map[PackageID, *packageHandle]), - meta: new(metadata.Graph), - files: newFileMap(), - activePackages: new(persistent.Map[PackageID, *Package]), - symbolizeHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), - shouldLoad: new(persistent.Map[PackageID, []PackagePath]), - unloadableFiles: new(persistent.Set[protocol.DocumentURI]), - parseModHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), - parseWorkHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), - modTidyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), - modVulnHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), - modWhyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), - moduleUpgrades: new(persistent.Map[protocol.DocumentURI, map[string]string]), - vulns: new(persistent.Map[protocol.DocumentURI, *vulncheck.Result]), + view: v, + backgroundCtx: backgroundCtx, + cancel: cancel, + store: s.cache.store, + refcount: 1, // Snapshots are born referenced. + done: s.snapshotWG.Done, + packages: new(persistent.Map[PackageID, *packageHandle]), + fullAnalysisKeys: new(persistent.Map[PackageID, file.Hash]), + factyAnalysisKeys: new(persistent.Map[PackageID, file.Hash]), + meta: new(metadata.Graph), + files: newFileMap(), + symbolizeHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + shouldLoad: new(persistent.Map[PackageID, []PackagePath]), + unloadableFiles: new(persistent.Set[protocol.DocumentURI]), + parseModHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + parseWorkHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + modTidyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + modVulnHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + modWhyHandles: new(persistent.Map[protocol.DocumentURI, *memoize.Promise]), + moduleUpgrades: new(persistent.Map[protocol.DocumentURI, map[string]string]), + vulns: new(persistent.Map[protocol.DocumentURI, *vulncheck.Result]), } // Snapshots must observe all open files, as there are some caching diff --git a/gopls/internal/cache/session_test.go b/gopls/internal/cache/session_test.go index fe4e55e3d74..5f9a59a4945 100644 --- a/gopls/internal/cache/session_test.go +++ b/gopls/internal/cache/session_test.go @@ -85,7 +85,8 @@ func TestZeroConfigAlgorithm(t *testing.T) { options: func(dir string) map[string]any { return map[string]any{ "env": map[string]any{ - "GOPATH": dir, + "GO111MODULE": "", // golang/go#70196: must be unset + "GOPATH": dir, }, } }, @@ -340,12 +341,14 @@ replace ( t.Fatal(err) } } - env, err := FetchGoEnv(ctx, toURI(f.dir), opts) + uri := toURI(f.dir) + env, err := FetchGoEnv(ctx, uri, opts) if err != nil { t.Fatalf("FetchGoEnv failed: %v", err) } + t.Logf("FetchGoEnv(%q) = %+v", uri, env) folders = append(folders, &Folder{ - Dir: toURI(f.dir), + Dir: uri, Name: path.Base(f.dir), Options: opts, Env: *env, diff --git a/gopls/internal/cache/snapshot.go b/gopls/internal/cache/snapshot.go index 004dc5279c0..63aed7be2e6 100644 --- a/gopls/internal/cache/snapshot.go +++ b/gopls/internal/cache/snapshot.go @@ -144,11 +144,15 @@ type Snapshot struct { // be in packages, unless there is a missing import packages *persistent.Map[PackageID, *packageHandle] - // activePackages maps a package ID to a memoized active package, or nil if - // the package is known not to be open. + // fullAnalysisKeys and factyAnalysisKeys hold memoized cache keys for + // analysis packages. "full" refers to the cache key including all enabled + // analyzers, whereas "facty" is the key including only the subset of enabled + // analyzers that produce facts, such as is required for transitively + // imported packages. // - // IDs not contained in the map are not known to be open or not open. - activePackages *persistent.Map[PackageID, *Package] + // These keys are memoized because they can be quite expensive to compute. + fullAnalysisKeys *persistent.Map[PackageID, file.Hash] + factyAnalysisKeys *persistent.Map[PackageID, file.Hash] // workspacePackages contains the workspace's packages, which are loaded // when the view is created. It does not contain intermediate test variants. @@ -182,14 +186,6 @@ type Snapshot struct { modWhyHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modWhyResult] modVulnHandles *persistent.Map[protocol.DocumentURI, *memoize.Promise] // *memoize.Promise[modVulnResult] - // importGraph holds a shared import graph to use for type-checking. Adding - // more packages to this import graph can speed up type checking, at the - // expense of in-use memory. - // - // See getImportGraph for additional documentation. - importGraphDone chan struct{} // closed when importGraph is set; may be nil - importGraph *importGraph // copied from preceding snapshot and re-evaluated - // moduleUpgrades tracks known upgrades for module paths in each modfile. // Each modfile has a map of module name to upgrade version. moduleUpgrades *persistent.Map[protocol.DocumentURI, map[string]string] @@ -245,7 +241,6 @@ func (s *Snapshot) decref() { s.refcount-- if s.refcount == 0 { s.packages.Destroy() - s.activePackages.Destroy() s.files.destroy() s.symbolizeHandles.Destroy() s.parseModHandles.Destroy() @@ -531,7 +526,7 @@ func (s *Snapshot) GoCommandInvocation(allowNetwork bool, inv *gocommand.Invocat ) inv.BuildFlags = slices.Clone(s.Options().BuildFlags) - if !allowNetwork && !s.Options().AllowImplicitNetworkAccess { + if !allowNetwork { inv.Env = append(inv.Env, "GOPROXY=off") } @@ -844,50 +839,6 @@ func (s *Snapshot) ReverseDependencies(ctx context.Context, id PackageID, transi return rdeps, nil } -// -- Active package tracking -- -// -// We say a package is "active" if any of its files are open. -// This is an optimization: the "active" concept is an -// implementation detail of the cache and is not exposed -// in the source or Snapshot API. -// After type-checking we keep active packages in memory. -// The activePackages persistent map does bookkeeping for -// the set of active packages. - -// getActivePackage returns a the memoized active package for id, if it exists. -// If id is not active or has not yet been type-checked, it returns nil. -func (s *Snapshot) getActivePackage(id PackageID) *Package { - s.mu.Lock() - defer s.mu.Unlock() - - if value, ok := s.activePackages.Get(id); ok { - return value - } - return nil -} - -// setActivePackage checks if pkg is active, and if so either records it in -// the active packages map or returns the existing memoized active package for id. -func (s *Snapshot) setActivePackage(id PackageID, pkg *Package) { - s.mu.Lock() - defer s.mu.Unlock() - - if _, ok := s.activePackages.Get(id); ok { - return // already memoized - } - - if containsOpenFileLocked(s, pkg.Metadata()) { - s.activePackages.Set(id, pkg, nil) - } else { - s.activePackages.Set(id, (*Package)(nil), nil) // remember that pkg is not open - } -} - -func (s *Snapshot) resetActivePackagesLocked() { - s.activePackages.Destroy() - s.activePackages = new(persistent.Map[PackageID, *Package]) -} - // See Session.FileWatchingGlobPatterns for a description of gopls' file // watching heuristic. func (s *Snapshot) fileWatchingGlobPatterns() map[protocol.RelativePattern]unit { @@ -1670,7 +1621,8 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f initialized: s.initialized, initialErr: s.initialErr, packages: s.packages.Clone(), - activePackages: s.activePackages.Clone(), + fullAnalysisKeys: s.fullAnalysisKeys.Clone(), + factyAnalysisKeys: s.factyAnalysisKeys.Clone(), files: s.files.clone(changedFiles), symbolizeHandles: cloneWithout(s.symbolizeHandles, changedFiles, nil), workspacePackages: s.workspacePackages, @@ -1681,7 +1633,6 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f modTidyHandles: cloneWithout(s.modTidyHandles, changedFiles, &needsDiagnosis), modWhyHandles: cloneWithout(s.modWhyHandles, changedFiles, &needsDiagnosis), modVulnHandles: cloneWithout(s.modVulnHandles, changedFiles, &needsDiagnosis), - importGraph: s.importGraph, moduleUpgrades: cloneWith(s.moduleUpgrades, changed.ModuleUpgrades), vulns: cloneWith(s.vulns, changed.Vulns), } @@ -1949,6 +1900,12 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f // invalidation. if ph, ok := result.packages.Get(id); ok { needsDiagnosis = true + + // Always invalidate analysis keys, as we do not implement fine-grained + // invalidation for analysis. + result.fullAnalysisKeys.Delete(id) + result.factyAnalysisKeys.Delete(id) + if invalidateMetadata { result.packages.Delete(id) } else { @@ -1963,9 +1920,6 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f result.packages.Set(id, ph, nil) } } - if result.activePackages.Delete(id) { - needsDiagnosis = true - } } // Compute which metadata updates are required. We only need to invalidate @@ -2006,7 +1960,6 @@ func (s *Snapshot) clone(ctx, bgCtx context.Context, changed StateChange, done f if result.meta != s.meta || anyFileOpenedOrClosed { needsDiagnosis = true result.workspacePackages = computeWorkspacePackagesLocked(ctx, result, result.meta) - result.resetActivePackagesLocked() } else { result.workspacePackages = s.workspacePackages } @@ -2190,8 +2143,8 @@ func metadataChanges(ctx context.Context, lockedSnapshot *Snapshot, oldFH, newFH // Check whether package imports have changed. Only consider potentially // valid imports paths. - oldImports := validImports(oldHead.File.Imports) - newImports := validImports(newHead.File.Imports) + oldImports := validImportPaths(oldHead.File.Imports) + newImports := validImportPaths(newHead.File.Imports) for path := range newImports { if _, ok := oldImports[path]; ok { @@ -2240,8 +2193,8 @@ func magicCommentsChanged(original *ast.File, current *ast.File) bool { return false } -// validImports extracts the set of valid import paths from imports. -func validImports(imports []*ast.ImportSpec) map[string]struct{} { +// validImportPaths extracts the set of valid import paths from imports. +func validImportPaths(imports []*ast.ImportSpec) map[string]struct{} { m := make(map[string]struct{}) for _, spec := range imports { if path := spec.Path.Value; validImportPath(path) { diff --git a/gopls/internal/cache/testfuncs/tests.go b/gopls/internal/cache/testfuncs/tests.go index cfef3c54164..cfc7daab15c 100644 --- a/gopls/internal/cache/testfuncs/tests.go +++ b/gopls/internal/cache/testfuncs/tests.go @@ -156,7 +156,7 @@ func (b *indexBuilder) findSubtests(parent gobTest, typ *ast.FuncType, body *ast continue // subtest has wrong signature } - val := info.Types[call.Args[0]].Value + val := info.Types[call.Args[0]].Value // may be zero if val == nil || val.Kind() != constant.String { continue } diff --git a/gopls/internal/cache/view.go b/gopls/internal/cache/view.go index 93612a763fb..5c8f4faec9e 100644 --- a/gopls/internal/cache/view.go +++ b/gopls/internal/cache/view.go @@ -69,6 +69,7 @@ type GoEnv struct { GOFLAGS string GO111MODULE string GOTOOLCHAIN string + GOROOT string // Go version output. GoVersion int // The X in Go 1.X @@ -998,6 +999,7 @@ func FetchGoEnv(ctx context.Context, folder protocol.DocumentURI, opts *settings "GOFLAGS": &env.GOFLAGS, "GO111MODULE": &env.GO111MODULE, "GOTOOLCHAIN": &env.GOTOOLCHAIN, + "GOROOT": &env.GOROOT, } if err := loadGoEnv(ctx, dir, opts.EnvSlice(), runner, envvars); err != nil { return nil, err diff --git a/gopls/internal/cmd/cmd.go b/gopls/internal/cmd/cmd.go index 4afac6a7aff..91aca4683b5 100644 --- a/gopls/internal/cmd/cmd.go +++ b/gopls/internal/cmd/cmd.go @@ -12,7 +12,6 @@ import ( "flag" "fmt" "log" - "math/rand" "os" "path/filepath" "reflect" @@ -391,35 +390,13 @@ type connection struct { client *cmdClient } -// registerProgressHandler registers a handler for progress notifications. -// The caller must call unregister when the handler is no longer needed. -func (cli *cmdClient) registerProgressHandler(handler func(*protocol.ProgressParams)) (token protocol.ProgressToken, unregister func()) { - token = fmt.Sprintf("tok%d", rand.Uint64()) - - // register - cli.progressHandlersMu.Lock() - if cli.progressHandlers == nil { - cli.progressHandlers = make(map[protocol.ProgressToken]func(*protocol.ProgressParams)) - } - cli.progressHandlers[token] = handler - cli.progressHandlersMu.Unlock() - - unregister = func() { - cli.progressHandlersMu.Lock() - delete(cli.progressHandlers, token) - cli.progressHandlersMu.Unlock() - } - return token, unregister -} - // cmdClient defines the protocol.Client interface behavior of the gopls CLI tool. type cmdClient struct { app *Application - progressHandlersMu sync.Mutex - progressHandlers map[protocol.ProgressToken]func(*protocol.ProgressParams) - iwlToken protocol.ProgressToken - iwlDone chan struct{} + progressMu sync.Mutex + iwlToken protocol.ProgressToken + iwlDone chan struct{} filesMu sync.Mutex // guards files map files map[protocol.DocumentURI]*cmdFile @@ -698,41 +675,33 @@ func (c *cmdClient) PublishDiagnostics(ctx context.Context, p *protocol.PublishD } func (c *cmdClient) Progress(_ context.Context, params *protocol.ProgressParams) error { - token, ok := params.Token.(string) - if !ok { + if _, ok := params.Token.(string); !ok { return fmt.Errorf("unexpected progress token: %[1]T %[1]v", params.Token) } - c.progressHandlersMu.Lock() - handler := c.progressHandlers[token] - c.progressHandlersMu.Unlock() - if handler == nil { - handler = c.defaultProgressHandler - } - handler(params) - return nil -} - -// defaultProgressHandler is the default handler of progress messages, -// used during the initialize request. -func (c *cmdClient) defaultProgressHandler(params *protocol.ProgressParams) { switch v := params.Value.(type) { case *protocol.WorkDoneProgressBegin: if v.Title == server.DiagnosticWorkTitle(server.FromInitialWorkspaceLoad) { - c.progressHandlersMu.Lock() + c.progressMu.Lock() c.iwlToken = params.Token - c.progressHandlersMu.Unlock() + c.progressMu.Unlock() + } + + case *protocol.WorkDoneProgressReport: + if c.app.Verbose { + fmt.Fprintln(os.Stderr, v.Message) } case *protocol.WorkDoneProgressEnd: - c.progressHandlersMu.Lock() + c.progressMu.Lock() iwlToken := c.iwlToken - c.progressHandlersMu.Unlock() + c.progressMu.Unlock() if params.Token == iwlToken { close(c.iwlDone) } } + return nil } func (c *cmdClient) ShowDocument(ctx context.Context, params *protocol.ShowDocumentParams) (*protocol.ShowDocumentResult, error) { diff --git a/gopls/internal/cmd/execute.go b/gopls/internal/cmd/execute.go index 96b3cf3b81d..967e97ed50f 100644 --- a/gopls/internal/cmd/execute.go +++ b/gopls/internal/cmd/execute.go @@ -10,12 +10,10 @@ import ( "flag" "fmt" "log" - "os" "slices" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/protocol/command" - "golang.org/x/tools/gopls/internal/server" "golang.org/x/tools/internal/tool" ) @@ -98,38 +96,11 @@ func (e *execute) Run(ctx context.Context, args ...string) error { // executeCommand executes a protocol.Command, displaying progress // messages and awaiting completion of asynchronous commands. +// +// TODO(rfindley): inline away all calls, ensuring they inline idiomatically. func (conn *connection) executeCommand(ctx context.Context, cmd *protocol.Command) (any, error) { - endStatus := make(chan string, 1) - token, unregister := conn.client.registerProgressHandler(func(params *protocol.ProgressParams) { - switch v := params.Value.(type) { - case *protocol.WorkDoneProgressReport: - fmt.Fprintln(os.Stderr, v.Message) // combined std{out,err} - - case *protocol.WorkDoneProgressEnd: - endStatus <- v.Message // = canceled | failed | completed - } - }) - defer unregister() - - res, err := conn.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + return conn.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ Command: cmd.Command, Arguments: cmd.Arguments, - WorkDoneProgressParams: protocol.WorkDoneProgressParams{ - WorkDoneToken: token, - }, }) - if err != nil { - return nil, err - } - - // Some commands are asynchronous, so clients - // must wait for the "end" progress notification. - if command.Command(cmd.Command).IsAsync() { - status := <-endStatus - if status != server.CommandCompleted { - return nil, fmt.Errorf("command %s", status) - } - } - - return res, nil } diff --git a/gopls/internal/cmd/integration_test.go b/gopls/internal/cmd/integration_test.go index 39698f37334..15888b21f68 100644 --- a/gopls/internal/cmd/integration_test.go +++ b/gopls/internal/cmd/integration_test.go @@ -224,7 +224,7 @@ func TestFail(t *testing.T) { t.Fatal("fail") } } // run the passing test { - res := gopls(t, tree, "codelens", "-exec", "./a/a_test.go:3", "run test") + res := gopls(t, tree, "-v", "codelens", "-exec", "./a/a_test.go:3", "run test") res.checkExit(true) res.checkStderr(`PASS: TestPass`) // from go test res.checkStderr("Info: all tests passed") // from gopls.test diff --git a/gopls/internal/doc/api.json b/gopls/internal/doc/api.json index b076abd26b0..298c3ab49e1 100644 --- a/gopls/internal/doc/api.json +++ b/gopls/internal/doc/api.json @@ -79,19 +79,6 @@ "Status": "experimental", "Hierarchy": "build" }, - { - "Name": "allowImplicitNetworkAccess", - "Type": "bool", - "Doc": "allowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module\ndownloads rather than requiring user action. This option will eventually\nbe removed.\n", - "EnumKeys": { - "ValueType": "", - "Keys": null - }, - "EnumValues": null, - "Default": "false", - "Status": "experimental", - "Hierarchy": "build" - }, { "Name": "standaloneTags", "Type": "[]string", @@ -479,7 +466,7 @@ }, { "Name": "\"lostcancel\"", - "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nand WithDeadline must be called or the new context will remain live\nuntil its parent context is cancelled.\n(The background context is never cancelled.)", + "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", "Default": "true" }, { @@ -1127,7 +1114,7 @@ }, { "Name": "lostcancel", - "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nand WithDeadline must be called or the new context will remain live\nuntil its parent context is cancelled.\n(The background context is never cancelled.)", + "Doc": "check cancel func returned by context.WithCancel is called\n\nThe cancellation function returned by context.WithCancel, WithTimeout,\nWithDeadline and variants such as WithCancelCause must be called,\nor the new context will remain live until its parent context is cancelled.\n(The background context is never cancelled.)", "URL": "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/lostcancel", "Default": true }, diff --git a/gopls/internal/golang/add_import.go b/gopls/internal/golang/add_import.go index a43256a6a08..7581bc02dbd 100644 --- a/gopls/internal/golang/add_import.go +++ b/gopls/internal/golang/add_import.go @@ -20,7 +20,7 @@ func AddImport(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, im if err != nil { return nil, err } - return ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ + return ComputeImportFixEdits(snapshot.Options().Local, pgf.Src, &imports.ImportFix{ StmtInfo: imports.ImportInfo{ ImportPath: importPath, }, diff --git a/gopls/internal/golang/addtest.go b/gopls/internal/golang/addtest.go new file mode 100644 index 00000000000..bf4dfed0acf --- /dev/null +++ b/gopls/internal/golang/addtest.go @@ -0,0 +1,395 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package golang + +// This file defines the behavior of the "Add test for FUNC" command. + +import ( + "bytes" + "context" + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "html/template" + "os" + "path/filepath" + "strconv" + "strings" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/gopls/internal/cache" + "golang.org/x/tools/gopls/internal/cache/parsego" + "golang.org/x/tools/gopls/internal/protocol" + goplsastutil "golang.org/x/tools/gopls/internal/util/astutil" + "golang.org/x/tools/internal/typesinternal" +) + +const testTmplString = `func {{.TestFuncName}}(t *testing.T) { + {{- /* Functions/methods input parameters struct declaration. */}} + {{- if gt (len .Args) 1}} + type args struct { + {{- range .Args}} + {{.Name}} {{.Type}} + {{- end}} + } + {{- end}} + + {{- /* Test cases struct declaration and empty initialization. */}} + tests := []struct { + name string // description of this test case + {{- if gt (len .Args) 1}} + args args + {{- end}} + {{- if eq (len .Args) 1}} + arg {{(index .Args 0).Type}} + {{- end}} + {{- range $index, $res := .Results}} + {{- if eq $res.Name "gotErr"}} + wantErr bool + {{- else if eq $index 0}} + want {{$res.Type}} + {{- else}} + want{{add $index 1}} {{$res.Type}} + {{- end}} + {{- end}} + }{ + // TODO: Add test cases. + } + + {{- /* Loop over all the test cases. */}} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + {{/* Got variables. */}} + {{- if .Results}}{{fieldNames .Results ""}} := {{end}} + + {{- /* Call expression. In xtest package test, call function by PACKAGE.FUNC. */}} + {{- /* TODO(hxjiang): consider any renaming in existing xtest package imports. E.g. import renamedfoo "foo". */}} + {{- /* TODO(hxjiang): support add test for methods by calling the right constructor. */}} + {{- if .PackageName}}{{.PackageName}}.{{end}}{{.FuncName}} + + {{- /* Input parameters. */ -}} + ({{- if eq (len .Args) 1}}tt.arg{{end}}{{if gt (len .Args) 1}}{{fieldNames .Args "tt.args."}}{{end}}) + + {{- /* Handles the returned error before the rest of return value. */}} + {{- $last := index .Results (add (len .Results) -1)}} + {{- if eq $last.Name "gotErr"}} + if gotErr != nil { + if !tt.wantErr { + t.Errorf("{{$.FuncName}}() failed: %v", gotErr) + } + return + } + if tt.wantErr { + t.Fatal("{{$.FuncName}}() succeeded unexpectedly") + } + {{- end}} + + {{- /* Compare the returned values except for the last returned error. */}} + {{- if or (and .Results (ne $last.Name "gotErr")) (and (gt (len .Results) 1) (eq $last.Name "gotErr"))}} + // TODO: update the condition below to compare got with tt.want. + {{- range $index, $res := .Results}} + {{- if ne $res.Name "gotErr"}} + if true { + t.Errorf("{{$.FuncName}}() = %v, want %v", {{.Name}}, tt.{{if eq $index 0}}want{{else}}want{{add $index 1}}{{end}}) + } + {{- end}} + {{- end}} + {{- end}} + }) + } +} +` + +type field struct { + Name, Type string +} + +type testInfo struct { + PackageName string + FuncName string + TestFuncName string + Args []field + Results []field +} + +var testTmpl = template.Must(template.New("test").Funcs(template.FuncMap{ + "add": func(a, b int) int { return a + b }, + "fieldNames": func(fields []field, qualifier string) (res string) { + var names []string + for _, f := range fields { + names = append(names, qualifier+f.Name) + } + return strings.Join(names, ", ") + }, +}).Parse(testTmplString)) + +// AddTestForFunc adds a test for the function enclosing the given input range. +// It creates a _test.go file if one does not already exist. +func AddTestForFunc(ctx context.Context, snapshot *cache.Snapshot, loc protocol.Location) (changes []protocol.DocumentChange, _ error) { + pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, loc.URI) + if err != nil { + return nil, err + } + + if errors := pkg.ParseErrors(); len(errors) > 0 { + return nil, fmt.Errorf("package has parse errors: %v", errors[0]) + } + if errors := pkg.TypeErrors(); len(errors) > 0 { + return nil, fmt.Errorf("package has type errors: %v", errors[0]) + } + + // imports is a map from package path to local package name. + var imports = make(map[string]string) + + var collectImports = func(file *ast.File) error { + for _, spec := range file.Imports { + // TODO(hxjiang): support dot imports. + if spec.Name != nil && spec.Name.Name == "." { + return fmt.Errorf("\"add a test for FUNC\" does not support files containing dot imports") + } + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + return err + } + if spec.Name != nil && spec.Name.Name != "_" { + imports[path] = spec.Name.Name + } else { + imports[path] = filepath.Base(path) + } + } + return nil + } + + // Collect all the imports from the x.go, keep track of the local package name. + if err := collectImports(pgf.File); err != nil { + return nil, err + } + + testBase := strings.TrimSuffix(filepath.Base(loc.URI.Path()), ".go") + "_test.go" + goTestFileURI := protocol.URIFromPath(filepath.Join(loc.URI.Dir().Path(), testBase)) + + testFH, err := snapshot.ReadFile(ctx, goTestFileURI) + if err != nil { + return nil, err + } + + // TODO(hxjiang): use a fresh name if the same test function name already + // exist. + + var ( + eofRange protocol.Range // empty selection at end of new file + // edits contains all the text edits to be applied to the test file. + edits []protocol.TextEdit + // xtest indicates whether the test file use package x or x_test. + // TODO(hxjiang): For now, we try to interpret the user's intention by + // reading the foo_test.go's package name. Instead, we can discuss the option + // to interpret the user's intention by which function they are selecting. + // Have one file for x_test package testing, one file for x package testing. + xtest = true + ) + + if testPGF, err := snapshot.ParseGo(ctx, testFH, parsego.Header); err != nil { + if !errors.Is(err, os.ErrNotExist) { + return nil, err + } + changes = append(changes, protocol.DocumentChangeCreate(goTestFileURI)) + + // header is the buffer containing the text to add to the beginning of the file. + var header bytes.Buffer + + // If this test file was created by the gopls, add a copyright header and + // package decl based on the originating file. + // Search for something that looks like a copyright header, to replicate + // in the new file. + if groups := pgf.File.Comments; len(groups) > 0 { + // Copyright should appear before package decl and must be the first + // comment group. + // Avoid copying any other comment like package doc or directive comment. + if c := groups[0]; c.Pos() < pgf.File.Package && c != pgf.File.Doc && + !isDirective(c.List[0].Text) && + strings.Contains(strings.ToLower(c.List[0].Text), "copyright") { + start, end, err := pgf.NodeOffsets(c) + if err != nil { + return nil, err + } + header.Write(pgf.Src[start:end]) + // One empty line between copyright header and package decl. + header.WriteString("\n\n") + } + } + // One empty line between package decl and rest of the file. + fmt.Fprintf(&header, "package %s_test\n\n", pkg.Types().Name()) + + // Write the copyright and package decl to the beginning of the file. + edits = append(edits, protocol.TextEdit{ + Range: protocol.Range{}, + NewText: header.String(), + }) + } else { // existing _test.go file. + if testPGF.File.Name == nil || testPGF.File.Name.NamePos == token.NoPos { + return nil, fmt.Errorf("missing package declaration") + } + switch testPGF.File.Name.Name { + case pgf.File.Name.Name: + xtest = false + case pgf.File.Name.Name + "_test": + xtest = true + default: + return nil, fmt.Errorf("invalid package declaration %q in test file %q", testPGF.File.Name, testPGF) + } + + eofRange, err = testPGF.PosRange(testPGF.File.FileEnd, testPGF.File.FileEnd) + if err != nil { + return nil, err + } + + // Collect all the imports from the x_test.go, overwrite the local pakcage + // name collected from x.go. + if err := collectImports(testPGF.File); err != nil { + return nil, err + } + } + + // qf qualifier returns the local package name need to use in x_test.go by + // consulting the consolidated imports map. + qf := func(p *types.Package) string { + // When generating test in x packages, any type/function defined in the same + // x package can emit package name. + if !xtest && p == pkg.Types() { + return "" + } + if local, ok := imports[p.Path()]; ok { + return local + } + return p.Name() + } + + // TODO(hxjiang): modify existing imports or add new imports. + + start, end, err := pgf.RangePos(loc.Range) + if err != nil { + return nil, err + } + + path, _ := astutil.PathEnclosingInterval(pgf.File, start, end) + if len(path) < 2 { + return nil, fmt.Errorf("no enclosing function") + } + + decl, ok := path[len(path)-2].(*ast.FuncDecl) + if !ok { + return nil, fmt.Errorf("no enclosing function") + } + + fn := pkg.TypesInfo().Defs[decl.Name].(*types.Func) + sig := fn.Signature() + + if xtest { + // Reject if function/method is unexported. + if !fn.Exported() { + return nil, fmt.Errorf("cannot add test of unexported function %s to external test package %s_test", decl.Name, pgf.File.Name) + } + + // Reject if receiver is unexported. + if sig.Recv() != nil { + if _, ident, _ := goplsastutil.UnpackRecv(decl.Recv.List[0].Type); !ident.IsExported() { + return nil, fmt.Errorf("cannot add external test for method %s.%s as receiver type is not exported", ident.Name, decl.Name) + } + } + // TODO(hxjiang): reject if the any input parameter type is unexported. + // TODO(hxjiang): reject if any return value type is unexported. Explore + // the option to drop the return value if the type is unexported. + } + + testName, err := testName(fn) + if err != nil { + return nil, err + } + data := testInfo{ + FuncName: fn.Name(), + TestFuncName: testName, + } + + if sig.Recv() == nil && xtest { + data.PackageName = qf(pkg.Types()) + } + + for i := range sig.Params().Len() { + if i == 0 { + data.Args = append(data.Args, field{ + Name: "in", + Type: types.TypeString(sig.Params().At(i).Type(), qf), + }) + } else { + data.Args = append(data.Args, field{ + Name: fmt.Sprintf("in%d", i+1), + Type: types.TypeString(sig.Params().At(i).Type(), qf), + }) + } + } + + errorType := types.Universe.Lookup("error").Type() + for i := range sig.Results().Len() { + name := "got" + if i == sig.Results().Len()-1 && types.Identical(sig.Results().At(i).Type(), errorType) { + name = "gotErr" + } else if i > 0 { + name = fmt.Sprintf("got%d", i+1) + } + data.Results = append(data.Results, field{ + Name: name, + Type: types.TypeString(sig.Results().At(i).Type(), qf), + }) + } + + var test bytes.Buffer + if err := testTmpl.Execute(&test, data); err != nil { + return nil, err + } + + edits = append(edits, protocol.TextEdit{ + Range: eofRange, + NewText: test.String(), + }) + + return append(changes, protocol.DocumentChangeEdit(testFH, edits)), nil +} + +// testName returns the name of the function to use for the new function that +// tests fn. +// Returns empty string if the fn is ill typed or nil. +func testName(fn *types.Func) (string, error) { + if fn == nil { + return "", fmt.Errorf("input nil function") + } + testName := "Test" + if recv := fn.Signature().Recv(); recv != nil { // method declaration. + // Retrieve the unpointered receiver type to ensure the test name is based + // on the topmost alias or named type, not the alias' RHS type (potentially + // unexported) type. + // For example: + // type Foo = foo // Foo is an exported alias for the unexported type foo + recvType := recv.Type() + if ptr, ok := recv.Type().(*types.Pointer); ok { + recvType = ptr.Elem() + } + + t, ok := recvType.(typesinternal.NamedOrAlias) + if !ok { + return "", fmt.Errorf("receiver type is not named type or alias type") + } + + if !t.Obj().Exported() { + testName += "_" + } + + testName += t.Obj().Name() + "_" + } else if !fn.Exported() { // unexported function declaration. + testName += "_" + } + return testName + fn.Name(), nil +} diff --git a/gopls/internal/golang/change_signature.go b/gopls/internal/golang/change_signature.go index 72cbe4c2d90..41c56ba6c2c 100644 --- a/gopls/internal/golang/change_signature.go +++ b/gopls/internal/golang/change_signature.go @@ -28,7 +28,6 @@ import ( "golang.org/x/tools/internal/refactor/inline" "golang.org/x/tools/internal/tokeninternal" "golang.org/x/tools/internal/typesinternal" - "golang.org/x/tools/internal/versions" ) // RemoveUnusedParameter computes a refactoring to remove the parameter @@ -482,15 +481,15 @@ func rewriteCalls(ctx context.Context, rw signatureRewrite) (map[protocol.Docume func reTypeCheck(logf func(string, ...any), orig *cache.Package, fileMask map[protocol.DocumentURI]*ast.File, expectErrors bool) (*types.Package, *types.Info, error) { pkg := types.NewPackage(string(orig.Metadata().PkgPath), string(orig.Metadata().Name)) info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), - Instances: make(map[*ast.Ident]types.Instance), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Scopes: make(map[ast.Node]*types.Scope), + Instances: make(map[*ast.Ident]types.Instance), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) { var files []*ast.File for _, pgf := range orig.CompiledGoFiles() { diff --git a/gopls/internal/golang/codeaction.go b/gopls/internal/golang/codeaction.go index 3c916628a1a..3e4f3113f9e 100644 --- a/gopls/internal/golang/codeaction.go +++ b/gopls/internal/golang/codeaction.go @@ -227,6 +227,7 @@ type codeActionProducer struct { var codeActionProducers = [...]codeActionProducer{ {kind: protocol.QuickFix, fn: quickFix, needPkg: true}, {kind: protocol.SourceOrganizeImports, fn: sourceOrganizeImports}, + {kind: settings.AddTest, fn: addTest, needPkg: true}, {kind: settings.GoAssembly, fn: goAssembly, needPkg: true}, {kind: settings.GoDoc, fn: goDoc, needPkg: true}, {kind: settings.GoFreeSymbols, fn: goFreeSymbols}, @@ -301,20 +302,32 @@ func quickFix(ctx context.Context, req *codeActionsRequest) error { continue } + msg := typeError.Error() + switch { // "Missing method" error? (stubmethods) // Offer a "Declare missing methods of INTERFACE" code action. - // See [stubMethodsFixer] for command implementation. - msg := typeError.Error() - if strings.Contains(msg, "missing method") || - strings.HasPrefix(msg, "cannot convert") || - strings.Contains(msg, "not implement") { + // See [stubMissingInterfaceMethodsFixer] for command implementation. + case strings.Contains(msg, "missing method"), + strings.HasPrefix(msg, "cannot convert"), + strings.Contains(msg, "not implement"): path, _ := astutil.PathEnclosingInterval(req.pgf.File, start, end) - si := stubmethods.GetStubInfo(req.pkg.FileSet(), info, path, start) + si := stubmethods.GetIfaceStubInfo(req.pkg.FileSet(), info, path, start) if si != nil { qf := typesutil.FileQualifier(req.pgf.File, si.Concrete.Obj().Pkg(), info) iface := types.TypeString(si.Interface.Type(), qf) msg := fmt.Sprintf("Declare missing methods of %s", iface) - req.addApplyFixAction(msg, fixStubMethods, req.loc) + req.addApplyFixAction(msg, fixMissingInterfaceMethods, req.loc) + } + + // "type X has no field or method Y" compiler error. + // Offer a "Declare missing method T.f" code action. + // See [stubMissingCalledFunctionFixer] for command implementation. + case strings.Contains(msg, "has no field or method"): + path, _ := astutil.PathEnclosingInterval(req.pgf.File, start, end) + si := stubmethods.GetCallStubInfo(req.pkg.FileSet(), info, path, start) + if si != nil { + msg := fmt.Sprintf("Declare missing method %s.%s", si.Receiver.Obj().Name(), si.MethodName) + req.addApplyFixAction(msg, fixMissingCalledFunction, req.loc) } } } @@ -455,6 +468,41 @@ func refactorExtractToNewFile(ctx context.Context, req *codeActionsRequest) erro return nil } +// addTest produces "Add a test for FUNC" code actions. +// See [server.commandHandler.AddTest] for command implementation. +func addTest(ctx context.Context, req *codeActionsRequest) error { + // Reject if the feature is turned off. + if !req.snapshot.Options().AddTestSourceCodeAction { + return nil + } + + // Reject test package. + if req.pkg.Metadata().ForTest != "" { + return nil + } + + path, _ := astutil.PathEnclosingInterval(req.pgf.File, req.start, req.end) + if len(path) < 2 { + return nil + } + + decl, ok := path[len(path)-2].(*ast.FuncDecl) + if !ok { + return nil + } + + // Don't offer to create tests of "init" or "_". + if decl.Name.Name == "_" || decl.Name.Name == "init" { + return nil + } + + cmd := command.NewAddTestCommand("Add a test for "+decl.Name.String(), req.loc) + req.addCommandAction(cmd, true) + + // TODO(hxjiang): add code action for generate test for package/file. + return nil +} + // refactorRewriteRemoveUnusedParam produces "Remove unused parameter" code actions. // See [server.commandHandler.ChangeSignature] for command implementation. func refactorRewriteRemoveUnusedParam(ctx context.Context, req *codeActionsRequest) error { diff --git a/gopls/internal/golang/comment.go b/gopls/internal/golang/comment.go index 3a0d8153665..e1d154feac5 100644 --- a/gopls/internal/golang/comment.go +++ b/gopls/internal/golang/comment.go @@ -18,26 +18,29 @@ import ( "golang.org/x/tools/gopls/internal/cache/parsego" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/settings" + "golang.org/x/tools/gopls/internal/util/astutil" "golang.org/x/tools/gopls/internal/util/safetoken" ) var errNoCommentReference = errors.New("no comment reference found") -// CommentToMarkdown converts comment text to formatted markdown. -// The comment was prepared by DocReader, -// so it is known not to have leading, trailing blank lines -// nor to have trailing spaces at the end of lines. -// The comment markers have already been removed. -func CommentToMarkdown(text string, options *settings.Options) string { - var p comment.Parser - doc := p.Parse(text) - var pr comment.Printer +// DocCommentToMarkdown converts the text of a [doc comment] to Markdown. +// +// TODO(adonovan): provide a package (or file imports) as context for +// proper rendering of doc links; see [newDocCommentParser] and golang/go#61677. +// +// [doc comment]: https://go.dev/doc/comment +func DocCommentToMarkdown(text string, options *settings.Options) string { + var parser comment.Parser + doc := parser.Parse(text) + + var printer comment.Printer // The default produces {#Hdr-...} tags for headings. // vscode displays thems, which is undesirable. // The godoc for comment.Printer says the tags // avoid a security problem. - pr.HeadingID = func(*comment.Heading) string { return "" } - pr.DocLinkURL = func(link *comment.DocLink) string { + printer.HeadingID = func(*comment.Heading) string { return "" } + printer.DocLinkURL = func(link *comment.DocLink) string { msg := fmt.Sprintf("https://%s/%s", options.LinkTarget, link.ImportPath) if link.Name != "" { msg += "#" @@ -48,8 +51,8 @@ func CommentToMarkdown(text string, options *settings.Options) string { } return msg } - easy := pr.Markdown(doc) - return string(easy) + + return string(printer.Markdown(doc)) } // docLinkDefinition finds the definition of the doc link in comments at pos. @@ -199,3 +202,70 @@ func lookupDocLinkSymbol(pkg *cache.Package, pgf *parsego.File, name string) typ // package-level symbol return scope.Lookup(name) } + +// newDocCommentParser returns a function that parses [doc comments], +// with context for Doc Links supplied by the specified package. +// +// Imported symbols are rendered using the import mapping for the file +// that encloses fileNode. +// +// The resulting function is not concurrency safe. +// +// See issue #61677 for how this might be generalized to support +// correct contextual parsing of doc comments in Hover too. +// +// [doc comment]: https://go.dev/doc/comment +func newDocCommentParser(pkg *cache.Package) func(fileNode ast.Node, text string) *comment.Doc { + var currentFileNode ast.Node // node whose enclosing file's import mapping should be used + parser := &comment.Parser{ + LookupPackage: func(name string) (importPath string, ok bool) { + for _, f := range pkg.Syntax() { + // Different files in the same package have + // different import mappings. Use the provided + // syntax node to find the correct file. + if astutil.NodeContains(f, currentFileNode.Pos()) { + // First try the actual imported package name. + for _, imp := range f.Imports { + pkgName := pkg.TypesInfo().PkgNameOf(imp) + if pkgName != nil && pkgName.Name() == name { + return pkgName.Imported().Path(), true + } + } + + // Then try the imported package name, as some + // packages are typically imported under a + // non-default name (e.g. pathpkg "path") but + // may be referred to in doc links using their + // canonical name. + for _, imp := range f.Imports { + pkgName := pkg.TypesInfo().PkgNameOf(imp) + if pkgName != nil && pkgName.Imported().Name() == name { + return pkgName.Imported().Path(), true + } + } + + break + } + } + return "", false + }, + LookupSym: func(recv, name string) (ok bool) { + // package-level decl? + if recv == "" { + return pkg.Types().Scope().Lookup(name) != nil + } + + // method? + tname, ok := pkg.Types().Scope().Lookup(recv).(*types.TypeName) + if !ok { + return false + } + m, _, _ := types.LookupFieldOrMethod(tname.Type(), true, pkg.Types(), name) + return is[*types.Func](m) + }, + } + return func(fileNode ast.Node, text string) *comment.Doc { + currentFileNode = fileNode + return parser.Parse(text) + } +} diff --git a/gopls/internal/golang/completion/completion.go b/gopls/internal/golang/completion/completion.go index cf398693113..6bf8ad8acde 100644 --- a/gopls/internal/golang/completion/completion.go +++ b/gopls/internal/golang/completion/completion.go @@ -1713,7 +1713,7 @@ func (c *completer) injectType(ctx context.Context, t types.Type) { // considered via a lexical search, so we need to directly inject // them. Also allow generic types since lexical search does not // infer instantiated versions of them. - if named, ok := types.Unalias(t).(*types.Named); !ok || named.TypeParams().Len() > 0 { + if pnt, ok := t.(typesinternal.NamedOrAlias); !ok || typesinternal.TypeParams(pnt).Len() > 0 { // If our expected type is "[]int", this will add a literal // candidate of "[]int{}". c.literal(ctx, t, nil) @@ -2508,8 +2508,8 @@ func (c *completer) expectedCallParamType(inf candidateInference, node *ast.Call func expectedConstraint(t types.Type, idx int) types.Type { var tp *types.TypeParamList - if named, _ := t.(*types.Named); named != nil { - tp = named.TypeParams() + if pnt, ok := t.(typesinternal.NamedOrAlias); ok { + tp = typesinternal.TypeParams(pnt) } else if sig, _ := t.Underlying().(*types.Signature); sig != nil { tp = sig.TypeParams() } diff --git a/gopls/internal/golang/completion/format.go b/gopls/internal/golang/completion/format.go index c2b955ca7e9..baf0890497b 100644 --- a/gopls/internal/golang/completion/format.go +++ b/gopls/internal/golang/completion/format.go @@ -17,8 +17,10 @@ import ( "golang.org/x/tools/gopls/internal/golang/completion/snippet" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/util/safetoken" + "golang.org/x/tools/gopls/internal/util/typesutil" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/imports" + "golang.org/x/tools/internal/typesinternal" ) var ( @@ -59,12 +61,10 @@ func (c *completer) item(ctx context.Context, cand candidate) (CompletionItem, e detail = "" } if isTypeName(obj) && c.wantTypeParams() { - x := cand.obj.(*types.TypeName) - if named, ok := types.Unalias(x.Type()).(*types.Named); ok { - tp := named.TypeParams() - label += golang.FormatTypeParams(tp) - insert = label // maintain invariant above (label == insert) - } + // obj is a *types.TypeName, so its type must be Alias|Named. + tparams := typesinternal.TypeParams(obj.Type().(typesinternal.NamedOrAlias)) + label += typesutil.FormatTypeParams(tparams) + insert = label // maintain invariant above (label == insert) } snip.WriteText(insert) @@ -299,7 +299,7 @@ func (c *completer) importEdits(imp *importInfo) ([]protocol.TextEdit, error) { return nil, err } - return golang.ComputeOneImportFixEdits(c.snapshot, pgf, &imports.ImportFix{ + return golang.ComputeImportFixEdits(c.snapshot.Options().Local, pgf.Src, &imports.ImportFix{ StmtInfo: imports.ImportInfo{ ImportPath: imp.importPath, Name: imp.name, diff --git a/gopls/internal/golang/completion/printf.go b/gopls/internal/golang/completion/printf.go index 958d77efe2e..c9db1de0147 100644 --- a/gopls/internal/golang/completion/printf.go +++ b/gopls/internal/golang/completion/printf.go @@ -40,7 +40,7 @@ func printfArgKind(info *types.Info, call *ast.CallExpr, argIdx int) objKind { } // Format string must be a constant. - strArg := info.Types[call.Args[numParams-2]].Value + strArg := info.Types[call.Args[numParams-2]].Value // may be zero if strArg == nil || strArg.Kind() != constant.String { return kindAny } diff --git a/gopls/internal/golang/diagnostics.go b/gopls/internal/golang/diagnostics.go index 1c6da2e9d4e..f65ca4f7047 100644 --- a/gopls/internal/golang/diagnostics.go +++ b/gopls/internal/golang/diagnostics.go @@ -6,17 +6,43 @@ package golang import ( "context" - "maps" - "slices" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/cache/metadata" "golang.org/x/tools/gopls/internal/progress" "golang.org/x/tools/gopls/internal/protocol" - "golang.org/x/tools/gopls/internal/settings" "golang.org/x/tools/gopls/internal/util/moremaps" ) +// DiagnoseFile returns pull-based diagnostics for the given file. +func DiagnoseFile(ctx context.Context, snapshot *cache.Snapshot, uri protocol.DocumentURI) ([]*cache.Diagnostic, error) { + mp, err := NarrowestMetadataForFile(ctx, snapshot, uri) + if err != nil { + return nil, err + } + + // TODO(rfindley): consider analysing the package concurrently to package + // diagnostics. + + // Get package (list/parse/type check) diagnostics. + pkgDiags, err := snapshot.PackageDiagnostics(ctx, mp.ID) + if err != nil { + return nil, err + } + diags := pkgDiags[uri] + + // Get analysis diagnostics. + pkgAnalysisDiags, err := snapshot.Analyze(ctx, map[PackageID]*metadata.Package{mp.ID: mp}, nil) + if err != nil { + return nil, err + } + analysisDiags := moremaps.Group(pkgAnalysisDiags, byURI)[uri] + + // Return the merged set of file diagnostics, combining type error analyses + // with type error diagnostics. + return CombineDiagnostics(diags, analysisDiags), nil +} + // Analyze reports go/analysis-framework diagnostics in the specified package. // // If the provided tracker is non-nil, it may be used to provide notifications @@ -30,15 +56,54 @@ func Analyze(ctx context.Context, snapshot *cache.Snapshot, pkgIDs map[PackageID return nil, ctx.Err() } - analyzers := slices.Collect(maps.Values(settings.DefaultAnalyzers)) - if snapshot.Options().Staticcheck { - analyzers = slices.AppendSeq(analyzers, maps.Values(settings.StaticcheckAnalyzers)) - } - - analysisDiagnostics, err := snapshot.Analyze(ctx, pkgIDs, analyzers, tracker) + analysisDiagnostics, err := snapshot.Analyze(ctx, pkgIDs, tracker) if err != nil { return nil, err } - byURI := func(d *cache.Diagnostic) protocol.DocumentURI { return d.URI } return moremaps.Group(analysisDiagnostics, byURI), nil } + +// byURI is used for grouping diagnostics. +func byURI(d *cache.Diagnostic) protocol.DocumentURI { return d.URI } + +// CombineDiagnostics combines and filters list/parse/type diagnostics from +// tdiags with the analysis adiags, returning the resulting combined set. +// +// Type-error analyzers produce diagnostics that are redundant with type +// checker diagnostics, but more detailed (e.g. fixes). Rather than report two +// diagnostics for the same problem, we combine them by augmenting the +// type-checker diagnostic and discarding the analyzer diagnostic. +// +// If an analysis diagnostic has the same range and message as a +// list/parse/type diagnostic, the suggested fix information (et al) of the +// latter is merged into a copy of the former. This handles the case where a +// type-error analyzer suggests a fix to a type error, and avoids duplication. +// +// The arguments are not modified. +func CombineDiagnostics(tdiags []*cache.Diagnostic, adiags []*cache.Diagnostic) []*cache.Diagnostic { + // Build index of (list+parse+)type errors. + type key struct { + Range protocol.Range + message string + } + combined := make([]*cache.Diagnostic, len(tdiags)) + index := make(map[key]int) // maps (Range,Message) to index in tdiags slice + for i, diag := range tdiags { + index[key{diag.Range, diag.Message}] = i + combined[i] = diag + } + + // Filter out analysis diagnostics that match type errors, + // retaining their suggested fix (etc) fields. + for _, diag := range adiags { + if i, ok := index[key{diag.Range, diag.Message}]; ok { + copy := *tdiags[i] + copy.SuggestedFixes = diag.SuggestedFixes + copy.Tags = diag.Tags + combined[i] = © + continue + } + combined = append(combined, diag) + } + return combined +} diff --git a/gopls/internal/golang/extract.go b/gopls/internal/golang/extract.go index 6ea011e220e..2edda76b6c5 100644 --- a/gopls/internal/golang/extract.go +++ b/gopls/internal/golang/extract.go @@ -24,7 +24,7 @@ import ( ) func extractVariable(fset *token.FileSet, start, end token.Pos, src []byte, file *ast.File, pkg *types.Package, info *types.Info) (*token.FileSet, *analysis.SuggestedFix, error) { - tokFile := fset.File(file.Pos()) + tokFile := fset.File(file.FileStart) expr, path, ok, err := canExtractVariable(start, end, file) if !ok { return nil, nil, fmt.Errorf("extractVariable: cannot extract %s: %v", safetoken.StartPosition(fset, start), err) @@ -214,7 +214,7 @@ func extractFunctionMethod(fset *token.FileSet, start, end token.Pos, src []byte errorPrefix = "extractMethod" } - tok := fset.File(file.Pos()) + tok := fset.File(file.FileStart) if tok == nil { return nil, nil, bug.Errorf("no file for position") } @@ -821,7 +821,7 @@ func collectFreeVars(info *types.Info, file *ast.File, fileScope, pkgScope *type if _, ok := obj.(*types.PkgName); ok { return nil, false // imported package } - if !(file.Pos() <= obj.Pos() && obj.Pos() <= file.End()) { + if !(file.FileStart <= obj.Pos() && obj.Pos() <= file.FileEnd) { return nil, false // not defined in this file } scope := obj.Parent() diff --git a/gopls/internal/golang/extracttofile.go b/gopls/internal/golang/extracttofile.go index 0a1d74408d7..ae26738a5c3 100644 --- a/gopls/internal/golang/extracttofile.go +++ b/gopls/internal/golang/extracttofile.go @@ -80,7 +80,7 @@ func findImportEdits(file *ast.File, info *types.Info, start, end token.Pos) (ad } // ExtractToNewFile moves selected declarations into a new file. -func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) (*protocol.WorkspaceEdit, error) { +func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, rng protocol.Range) ([]protocol.DocumentChange, error) { errorPrefix := "ExtractToNewFile" pkg, pgf, err := NarrowestPackageForFile(ctx, snapshot, fh.URI()) @@ -160,7 +160,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han return nil, err } - return protocol.NewWorkspaceEdit( + return []protocol.DocumentChange{ // edit the original file protocol.DocumentChangeEdit(fh, append(importDeletes, protocol.TextEdit{Range: replaceRange, NewText: ""})), // create a new file @@ -168,7 +168,7 @@ func ExtractToNewFile(ctx context.Context, snapshot *cache.Snapshot, fh file.Han // edit the created file protocol.DocumentChangeEdit(newFile, []protocol.TextEdit{ {Range: protocol.Range{}, NewText: string(newFileContent)}, - })), nil + })}, nil } // chooseNewFile chooses a new filename in dir, based on the name of the diff --git a/gopls/internal/golang/fix.go b/gopls/internal/golang/fix.go index 7c44aa4d273..a20658fce7c 100644 --- a/gopls/internal/golang/fix.go +++ b/gopls/internal/golang/fix.go @@ -58,14 +58,15 @@ func singleFile(fixer1 singleFileFixer) fixer { // Names of ApplyFix.Fix created directly by the CodeAction handler. const ( - fixExtractVariable = "extract_variable" - fixExtractFunction = "extract_function" - fixExtractMethod = "extract_method" - fixInlineCall = "inline_call" - fixInvertIfCondition = "invert_if_condition" - fixSplitLines = "split_lines" - fixJoinLines = "join_lines" - fixStubMethods = "stub_methods" + fixExtractVariable = "extract_variable" + fixExtractFunction = "extract_function" + fixExtractMethod = "extract_method" + fixInlineCall = "inline_call" + fixInvertIfCondition = "invert_if_condition" + fixSplitLines = "split_lines" + fixJoinLines = "join_lines" + fixMissingInterfaceMethods = "stub_missing_interface_method" + fixMissingCalledFunction = "stub_missing_called_function" ) // ApplyFix applies the specified kind of suggested fix to the given @@ -102,14 +103,15 @@ func ApplyFix(ctx context.Context, fix string, snapshot *cache.Snapshot, fh file // Ad-hoc fixers: these are used when the command is // constructed directly by logic in server/code_action. - fixExtractFunction: singleFile(extractFunction), - fixExtractMethod: singleFile(extractMethod), - fixExtractVariable: singleFile(extractVariable), - fixInlineCall: inlineCall, - fixInvertIfCondition: singleFile(invertIfCondition), - fixSplitLines: singleFile(splitLines), - fixJoinLines: singleFile(joinLines), - fixStubMethods: stubMethodsFixer, + fixExtractFunction: singleFile(extractFunction), + fixExtractMethod: singleFile(extractMethod), + fixExtractVariable: singleFile(extractVariable), + fixInlineCall: inlineCall, + fixInvertIfCondition: singleFile(invertIfCondition), + fixSplitLines: singleFile(splitLines), + fixJoinLines: singleFile(joinLines), + fixMissingInterfaceMethods: stubMissingInterfaceMethodsFixer, + fixMissingCalledFunction: stubMissingCalledFunctionFixer, } fixer, ok := fixers[fix] if !ok { @@ -186,7 +188,7 @@ func suggestedFixToDocumentChange(ctx context.Context, snapshot *cache.Snapshot, // addEmbedImport adds a missing embed "embed" import with blank name. func addEmbedImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, _, _ token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { // Like golang.AddImport, but with _ as Name and using our pgf. - protoEdits, err := ComputeOneImportFixEdits(snapshot, pgf, &imports.ImportFix{ + protoEdits, err := ComputeImportFixEdits(snapshot.Options().Local, pgf.Src, &imports.ImportFix{ StmtInfo: imports.ImportInfo{ ImportPath: "embed", Name: "_", diff --git a/gopls/internal/golang/format.go b/gopls/internal/golang/format.go index 8f735f38cf4..fa255e6b1c6 100644 --- a/gopls/internal/golang/format.go +++ b/gopls/internal/golang/format.go @@ -120,7 +120,7 @@ func allImportsFixes(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego defer done() if err := snapshot.RunProcessEnvFunc(ctx, func(ctx context.Context, opts *imports.Options) error { - allFixEdits, editsPerFix, err = computeImportEdits(ctx, pgf, opts) + allFixEdits, editsPerFix, err = computeImportEdits(ctx, pgf, snapshot.View().Folder().Env.GOROOT, opts) return err }); err != nil { return nil, nil, fmt.Errorf("allImportsFixes: %v", err) @@ -130,16 +130,17 @@ func allImportsFixes(ctx context.Context, snapshot *cache.Snapshot, pgf *parsego // computeImportEdits computes a set of edits that perform one or all of the // necessary import fixes. -func computeImportEdits(ctx context.Context, pgf *parsego.File, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { +func computeImportEdits(ctx context.Context, pgf *parsego.File, goroot string, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*importFix, err error) { filename := pgf.URI.Path() // Build up basic information about the original file. - allFixes, err := imports.FixImports(ctx, filename, pgf.Src, options) + isource, err := imports.NewProcessEnvSource(options.Env, filename, pgf.File.Name.Name) + allFixes, err := imports.FixImports(ctx, filename, pgf.Src, goroot, options.Env.Logf, isource) if err != nil { return nil, nil, err } - allFixEdits, err = computeFixEdits(pgf, options, allFixes) + allFixEdits, err = computeFixEdits(pgf.Src, options, allFixes) if err != nil { return nil, nil, err } @@ -147,7 +148,7 @@ func computeImportEdits(ctx context.Context, pgf *parsego.File, options *imports // Apply all of the import fixes to the file. // Add the edits for each fix to the result. for _, fix := range allFixes { - edits, err := computeFixEdits(pgf, options, []*imports.ImportFix{fix}) + edits, err := computeFixEdits(pgf.Src, options, []*imports.ImportFix{fix}) if err != nil { return nil, nil, err } @@ -159,10 +160,10 @@ func computeImportEdits(ctx context.Context, pgf *parsego.File, options *imports return allFixEdits, editsPerFix, nil } -// ComputeOneImportFixEdits returns text edits for a single import fix. -func ComputeOneImportFixEdits(snapshot *cache.Snapshot, pgf *parsego.File, fix *imports.ImportFix) ([]protocol.TextEdit, error) { +// ComputeImportFixEdits returns text edits for a single import fix. +func ComputeImportFixEdits(localPrefix string, src []byte, fixes ...*imports.ImportFix) ([]protocol.TextEdit, error) { options := &imports.Options{ - LocalPrefix: snapshot.Options().Local, + LocalPrefix: localPrefix, // Defaults. AllErrors: true, Comments: true, @@ -171,18 +172,18 @@ func ComputeOneImportFixEdits(snapshot *cache.Snapshot, pgf *parsego.File, fix * TabIndent: true, TabWidth: 8, } - return computeFixEdits(pgf, options, []*imports.ImportFix{fix}) + return computeFixEdits(src, options, fixes) } -func computeFixEdits(pgf *parsego.File, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) { +func computeFixEdits(src []byte, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) { // trim the original data to match fixedData - left, err := importPrefix(pgf.Src) + left, err := importPrefix(src) if err != nil { return nil, err } extra := !strings.Contains(left, "\n") // one line may have more than imports if extra { - left = string(pgf.Src) + left = string(src) } if len(left) > 0 && left[len(left)-1] != '\n' { left += "\n" @@ -194,7 +195,7 @@ func computeFixEdits(pgf *parsego.File, options *imports.Options, fixes []*impor // used all of origData above, use all of it here too flags = 0 } - fixedData, err := imports.ApplyFixes(fixes, "", pgf.Src, options, flags) + fixedData, err := imports.ApplyFixes(fixes, "", src, options, flags) if err != nil { return nil, err } @@ -215,7 +216,7 @@ func importPrefix(src []byte) (string, error) { if err != nil { // This can happen if 'package' is misspelled return "", fmt.Errorf("importPrefix: failed to parse: %s", err) } - tok := fset.File(f.Pos()) + tok := fset.File(f.FileStart) var importEnd int for _, d := range f.Decls { if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT { diff --git a/gopls/internal/golang/freesymbols.go b/gopls/internal/golang/freesymbols.go index 0e2422d421b..bbda8f7d948 100644 --- a/gopls/internal/golang/freesymbols.go +++ b/gopls/internal/golang/freesymbols.go @@ -372,7 +372,11 @@ func freeRefs(pkg *types.Package, info *types.Info, file *ast.File, start, end t if ref != nil { ref.expr = n.(ast.Expr) - ref.typ = info.Types[n.(ast.Expr)].Type + if tv, ok := info.Types[ref.expr]; ok { + ref.typ = tv.Type + } else { + ref.typ = types.Typ[types.Invalid] + } free = append(free, ref) } diff --git a/gopls/internal/golang/highlight.go b/gopls/internal/golang/highlight.go index f53e73f3053..1174ce7f7d4 100644 --- a/gopls/internal/golang/highlight.go +++ b/gopls/internal/golang/highlight.go @@ -558,8 +558,6 @@ func highlightIdentifier(id *ast.Ident, file *ast.File, info *types.Info, result highlightWriteInExpr(n.Chan) case *ast.CompositeLit: t := info.TypeOf(n) - // Every expression should have a type; - // work around https://github.com/golang/go/issues/69092. if t == nil { t = types.Typ[types.Invalid] } diff --git a/gopls/internal/golang/hover.go b/gopls/internal/golang/hover.go index a0622fd764e..8e7febeaab3 100644 --- a/gopls/internal/golang/hover.go +++ b/gopls/internal/golang/hover.go @@ -15,6 +15,7 @@ import ( "go/format" "go/token" "go/types" + "go/version" "io/fs" "path/filepath" "sort" @@ -52,6 +53,10 @@ import ( // TODO(adonovan): see if we can wean all clients of this interface. type hoverJSON struct { // Synopsis is a single sentence synopsis of the symbol's documentation. + // + // TODO(adonovan): in what syntax? It (usually) comes from doc.Synopsis, + // which produces "Text" form, but it may be fed to + // DocCommentToMarkdown, which expects doc comment syntax. Synopsis string `json:"synopsis"` // FullDocumentation is the symbol's full documentation. @@ -76,10 +81,6 @@ type hoverJSON struct { // For example, the "Node" part of "pkg.go.dev/go/ast#Node". LinkAnchor string `json:"linkAnchor"` - // stdVersion is the Go release version at which this symbol became available. - // It is nil for non-std library. - stdVersion *stdlib.Version - // New fields go below, and are unexported. The existing // exported fields are underspecified and have already // constrained our movements too much. A detailed JSON @@ -99,6 +100,10 @@ type hoverJSON struct { // fields of a (struct) type that were promoted through an // embedded field. promotedFields string + + // footer is additional content to insert at the bottom of the hover + // documentation, before the pkgdoc link. + footer string } // Hover implements the "textDocument/hover" RPC for Go files. @@ -598,9 +603,9 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro linkPath = strings.Replace(linkPath, mod.Path, mod.Path+"@"+mod.Version, 1) } - var version *stdlib.Version - if symbol := StdSymbolOf(obj); symbol != nil { - version = &symbol.Version + var footer string + if sym := StdSymbolOf(obj); sym != nil && sym.Version > 0 { + footer = fmt.Sprintf("Added in %v", sym.Version) } return *hoverRange, &hoverJSON{ @@ -614,7 +619,7 @@ func hover(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pp pro typeDecl: typeDecl, methods: methods, promotedFields: fields, - stdVersion: version, + footer: footer, }, nil } @@ -729,6 +734,7 @@ func hoverImport(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Packa docText := comment.Text() return rng, &hoverJSON{ + Signature: "package " + string(impMetadata.Name), Synopsis: doc.Synopsis(docText), FullDocumentation: docText, }, nil @@ -749,11 +755,47 @@ func hoverPackageName(pkg *cache.Package, pgf *parsego.File) (protocol.Range, *h return protocol.Range{}, nil, err } docText := comment.Text() + + // List some package attributes at the bottom of the documentation, if + // applicable. + type attr struct{ title, value string } + var attrs []attr + + if !metadata.IsCommandLineArguments(pkg.Metadata().ID) { + attrs = append(attrs, attr{"Package path", string(pkg.Metadata().PkgPath)}) + } + + if pkg.Metadata().Module != nil { + attrs = append(attrs, attr{"Module", pkg.Metadata().Module.Path}) + } + + // Show the effective language version for this package. + if v := pkg.TypesInfo().FileVersions[pgf.File]; v != "" { + attr := attr{value: version.Lang(v)} + if v == pkg.Types().GoVersion() { + attr.title = "Language version" + } else { + attr.title = "Language version (current file)" + } + attrs = append(attrs, attr) + } + + // TODO(rfindley): consider exec'ing go here to compute DefaultGODEBUG, or + // propose adding GODEBUG info to go/packages. + + var footer string + for i, attr := range attrs { + if i > 0 { + footer += "\n" + } + footer += fmt.Sprintf(" - %s: %s", attr.title, attr.value) + } + return rng, &hoverJSON{ + Signature: "package " + string(pkg.Metadata().Name), Synopsis: doc.Synopsis(docText), FullDocumentation: docText, - // Note: including a signature is redundant, since the cursor is already on the - // package name. + footer: footer, }, nil } @@ -1145,8 +1187,9 @@ func parseFull(ctx context.Context, snapshot *cache.Snapshot, fset *token.FileSe // If pkgURL is non-nil, it should be used to generate doc links. func formatHover(h *hoverJSON, options *settings.Options, pkgURL func(path PackagePath, fragment string) protocol.URI) (string, error) { - maybeMarkdown := func(s string) string { - if s != "" && options.PreferredContentFormat == protocol.Markdown { + markdown := options.PreferredContentFormat == protocol.Markdown + maybeFenced := func(s string) string { + if s != "" && markdown { s = fmt.Sprintf("```go\n%s\n```", strings.Trim(s, "\n")) } return s @@ -1157,7 +1200,7 @@ func formatHover(h *hoverJSON, options *settings.Options, pkgURL func(path Packa return h.SingleLine, nil case settings.NoDocumentation: - return maybeMarkdown(h.Signature), nil + return maybeFenced(h.Signature), nil case settings.Structured: b, err := json.Marshal(h) @@ -1166,42 +1209,70 @@ func formatHover(h *hoverJSON, options *settings.Options, pkgURL func(path Packa } return string(b), nil - case settings.SynopsisDocumentation, - settings.FullDocumentation: + case settings.SynopsisDocumentation, settings.FullDocumentation: + var sections [][]string // assembled below + + // Signature section. + // // For types, we display TypeDecl and Methods, // but not Signature, which is redundant (= TypeDecl + "\n" + Methods). // For all other symbols, we display Signature; // TypeDecl and Methods are empty. // (This awkwardness is to preserve JSON compatibility.) - parts := []string{ - maybeMarkdown(h.Signature), - maybeMarkdown(h.typeDecl), - formatDoc(h, options), - maybeMarkdown(h.promotedFields), - maybeMarkdown(h.methods), - fmt.Sprintf("Added in %v", h.stdVersion), - formatLink(h, options, pkgURL), - } if h.typeDecl != "" { - parts[0] = "" // type: suppress redundant Signature + sections = append(sections, []string{maybeFenced(h.typeDecl)}) + } else { + sections = append(sections, []string{maybeFenced(h.Signature)}) } - if h.stdVersion == nil || *h.stdVersion == stdlib.Version(0) { - parts[5] = "" // suppress stdlib version if not applicable or initial version 1.0 + + // Doc section. + var doc string + switch options.HoverKind { + case settings.SynopsisDocumentation: + doc = h.Synopsis + case settings.FullDocumentation: + doc = h.FullDocumentation } + if options.PreferredContentFormat == protocol.Markdown { + doc = DocCommentToMarkdown(doc, options) + } + sections = append(sections, []string{ + doc, + maybeFenced(h.promotedFields), + maybeFenced(h.methods), + }) + + // Footer section. + sections = append(sections, []string{ + h.footer, + formatLink(h, options, pkgURL), + }) var b strings.Builder - for _, part := range parts { - if part == "" { - continue + newline := func() { + if options.PreferredContentFormat == protocol.Markdown { + b.WriteString("\n\n") + } else { + b.WriteByte('\n') } - if b.Len() > 0 { - if options.PreferredContentFormat == protocol.Markdown { - b.WriteString("\n\n") - } else { - b.WriteByte('\n') + } + for _, section := range sections { + start := b.Len() + for _, part := range section { + if part == "" { + continue + } + // When markdown is a available, insert an hline before the start of + // the section, if there is content above. + if markdown && b.Len() == start && start > 0 { + newline() + b.WriteString("---") + } + if b.Len() > 0 { + newline() } + b.WriteString(part) } - b.WriteString(part) } return b.String(), nil @@ -1309,20 +1380,6 @@ func formatLink(h *hoverJSON, options *settings.Options, pkgURL func(path Packag } } -func formatDoc(h *hoverJSON, options *settings.Options) string { - var doc string - switch options.HoverKind { - case settings.SynopsisDocumentation: - doc = h.Synopsis - case settings.FullDocumentation: - doc = h.FullDocumentation - } - if options.PreferredContentFormat == protocol.Markdown { - return CommentToMarkdown(doc, options) - } - return doc -} - // findDeclInfo returns the syntax nodes involved in the declaration of the // types.Object with position pos, searching the given list of file syntax // trees. diff --git a/gopls/internal/golang/identifier.go b/gopls/internal/golang/identifier.go index 813b3261f87..fcfc6eb682f 100644 --- a/gopls/internal/golang/identifier.go +++ b/gopls/internal/golang/identifier.go @@ -175,7 +175,7 @@ Outer: var typ types.Type if assign, ok := ts.Assign.(*ast.AssignStmt); ok && len(assign.Rhs) == 1 { if rhs := assign.Rhs[0].(*ast.TypeAssertExpr); ok { - typ = info.TypeOf(rhs.X) + typ = info.TypeOf(rhs.X) // may be nil } } return objs, typ diff --git a/gopls/internal/golang/identifier_test.go b/gopls/internal/golang/identifier_test.go index d78d8fe99f5..8206d8731ae 100644 --- a/gopls/internal/golang/identifier_test.go +++ b/gopls/internal/golang/identifier_test.go @@ -11,8 +11,6 @@ import ( "go/token" "go/types" "testing" - - "golang.org/x/tools/internal/versions" ) func TestSearchForEnclosing(t *testing.T) { @@ -95,13 +93,13 @@ func posAt(line, column int, fset *token.FileSet, fname string) token.Pos { // newInfo returns a types.Info with all maps populated. func newInfo() *types.Info { info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + Scopes: make(map[ast.Node]*types.Scope), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) return info } diff --git a/gopls/internal/golang/inlay_hint.go b/gopls/internal/golang/inlay_hint.go index 6e2b7f40d33..478843fac98 100644 --- a/gopls/internal/golang/inlay_hint.go +++ b/gopls/internal/golang/inlay_hint.go @@ -51,7 +51,9 @@ func InlayHint(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle, pR q := typesutil.FileQualifier(pgf.File, pkg.Types(), info) // Set the range to the full file if the range is not valid. - start, end := pgf.File.Pos(), pgf.File.End() + start, end := pgf.File.FileStart, pgf.File.FileEnd + + // TODO(adonovan): this condition looks completely wrong! if pRng.Start.Line < pRng.End.Line || pRng.Start.Character < pRng.End.Character { // Adjust start and end for the specified range. var err error diff --git a/gopls/internal/golang/pkgdoc.go b/gopls/internal/golang/pkgdoc.go index ed8f1b388f0..3df2019a1f9 100644 --- a/gopls/internal/golang/pkgdoc.go +++ b/gopls/internal/golang/pkgdoc.go @@ -282,7 +282,7 @@ func PackageDocHTML(viewID string, pkg *cache.Package, web Web) ([]byte, error) // TODO(adonovan): simulate that too. fileMap := make(map[string]*ast.File) for _, f := range pkg.Syntax() { - fileMap[pkg.FileSet().File(f.Pos()).Name()] = f + fileMap[pkg.FileSet().File(f.FileStart).Name()] = f } astpkg := &ast.Package{ Name: pkg.Types().Name(), @@ -326,68 +326,34 @@ func PackageDocHTML(viewID string, pkg *cache.Package, web Web) ([]byte, error) }) } - var docHTML func(comment string) []byte + // docHTML renders the doc comment as Markdown. + // The fileNode is used to deduce the enclosing file + // for the correct import mapping. + // + // It is not concurrency-safe. + var docHTML func(fileNode ast.Node, comment string) []byte { // Adapt doc comment parser and printer // to our representation of Go packages // so that doc links (e.g. "[fmt.Println]") // become valid links. - - printer := docpkg.Printer() - printer.DocLinkURL = func(link *comment.DocLink) string { - path := pkg.Metadata().PkgPath - if link.ImportPath != "" { - path = PackagePath(link.ImportPath) - } - fragment := link.Name - if link.Recv != "" { - fragment = link.Recv + "." + link.Name - } - return web.PkgURL(viewID, path, fragment) - } - parser := docpkg.Parser() - parser.LookupPackage = func(name string) (importPath string, ok bool) { - // Ambiguous: different files in the same - // package may have different import mappings, - // but the hook doesn't provide the file context. - // TODO(adonovan): conspire with docHTML to - // pass the doc comment's enclosing file through - // a shared variable, so that we can compute - // the correct per-file mapping. - // - // TODO(adonovan): check for PkgName.Name - // matches, but also check for - // PkgName.Imported.Namer matches, since some - // packages are typically imported under a - // non-default name (e.g. pathpkg "path") but - // may be referred to in doc links using their - // canonical name. - for _, f := range pkg.Syntax() { - for _, imp := range f.Imports { - pkgName := pkg.TypesInfo().PkgNameOf(imp) - if pkgName != nil && pkgName.Name() == name { - return pkgName.Imported().Path(), true - } + printer := &comment.Printer{ + DocLinkURL: func(link *comment.DocLink) string { + path := pkg.Metadata().PkgPath + if link.ImportPath != "" { + path = PackagePath(link.ImportPath) } - } - return "", false - } - parser.LookupSym = func(recv, name string) (ok bool) { - // package-level decl? - if recv == "" { - return pkg.Types().Scope().Lookup(name) != nil - } - - // method? - tname, ok := pkg.Types().Scope().Lookup(recv).(*types.TypeName) - if !ok { - return false - } - m, _, _ := types.LookupFieldOrMethod(tname.Type(), true, pkg.Types(), name) - return is[*types.Func](m) + fragment := link.Name + if link.Recv != "" { + fragment = link.Recv + "." + link.Name + } + return web.PkgURL(viewID, path, fragment) + }, } - docHTML = func(comment string) []byte { - return printer.HTML(parser.Parse(comment)) + parse := newDocCommentParser(pkg) + docHTML = func(fileNode ast.Node, comment string) []byte { + doc := parse(fileNode, comment) + return printer.HTML(doc) } } @@ -715,7 +681,12 @@ window.addEventListener('load', function() { "https://pkg.go.dev/"+string(pkg.Types().Path())) // package doc - fmt.Fprintf(&buf, "
%s
\n", docHTML(docpkg.Doc)) + for _, f := range pkg.Syntax() { + if f.Doc != nil { + fmt.Fprintf(&buf, "
%s
\n", docHTML(f.Doc, docpkg.Doc)) + break + } + } // symbol index fmt.Fprintf(&buf, "

Index

\n") @@ -773,7 +744,7 @@ window.addEventListener('load', function() { fmt.Fprintf(&buf, "
%s
\n", nodeHTML(&decl2)) // comment (if any) - fmt.Fprintf(&buf, "
%s
\n", docHTML(v.Doc)) + fmt.Fprintf(&buf, "
%s
\n", docHTML(v.Decl, v.Doc)) } } fmt.Fprintf(&buf, "

Constants

\n") @@ -814,7 +785,7 @@ window.addEventListener('load', function() { nodeHTML(docfn.Decl.Type)) // comment (if any) - fmt.Fprintf(&buf, "
%s
\n", docHTML(docfn.Doc)) + fmt.Fprintf(&buf, "
%s
\n", docHTML(docfn.Decl, docfn.Doc)) } } funcs(docpkg.Funcs) @@ -835,7 +806,7 @@ window.addEventListener('load', function() { fmt.Fprintf(&buf, "
%s
\n", nodeHTML(&decl2)) // comment (if any) - fmt.Fprintf(&buf, "
%s
\n", docHTML(doctype.Doc)) + fmt.Fprintf(&buf, "
%s
\n", docHTML(doctype.Decl, doctype.Doc)) // subelements values(doctype.Consts) // constants of type T @@ -856,7 +827,7 @@ window.addEventListener('load', function() { // comment (if any) fmt.Fprintf(&buf, "
%s
\n", - docHTML(docmethod.Doc)) + docHTML(docmethod.Decl, docmethod.Doc)) } } diff --git a/gopls/internal/golang/rename.go b/gopls/internal/golang/rename.go index c2633dcd315..7ff5857f186 100644 --- a/gopls/internal/golang/rename.go +++ b/gopls/internal/golang/rename.go @@ -1420,7 +1420,7 @@ func parsePackageNameDecl(ctx context.Context, snapshot *cache.Snapshot, fh file // enclosingFile returns the CompiledGoFile of pkg that contains the specified position. func enclosingFile(pkg *cache.Package, pos token.Pos) (*parsego.File, bool) { for _, pgf := range pkg.CompiledGoFiles() { - if pgf.File.Pos() <= pos && pos <= pgf.File.End() { + if pgf.File.FileStart <= pos && pos <= pgf.File.FileEnd { return pgf, true } } diff --git a/gopls/internal/golang/semtok.go b/gopls/internal/golang/semtok.go index e008d8cdaea..4e24dafc23f 100644 --- a/gopls/internal/golang/semtok.go +++ b/gopls/internal/golang/semtok.go @@ -556,7 +556,7 @@ func (tv *tokenVisitor) ident(id *ast.Ident) { case *types.Builtin: emit(semtok.TokFunction, "defaultLibrary") case *types.Const: - if is[*types.Named](obj.Type()) && + if is[*types.Basic](obj.Type()) && (id.Name == "iota" || id.Name == "true" || id.Name == "false") { emit(semtok.TokVariable, "readonly", "defaultLibrary") } else { diff --git a/gopls/internal/golang/signature_help.go b/gopls/internal/golang/signature_help.go index dfdce041ff6..6680a14378c 100644 --- a/gopls/internal/golang/signature_help.go +++ b/gopls/internal/golang/signature_help.go @@ -223,7 +223,7 @@ func stringToSigInfoDocumentation(s string, options *settings.Options) *protocol v := s k := protocol.PlainText if options.PreferredContentFormat == protocol.Markdown { - v = CommentToMarkdown(s, options) + v = DocCommentToMarkdown(s, options) // whether or not content is newline terminated may not matter for LSP clients, // but our tests expect trailing newlines to be stripped. v = strings.TrimSuffix(v, "\n") // TODO(pjw): change the golden files diff --git a/gopls/internal/golang/stub.go b/gopls/internal/golang/stub.go index ca5f0055c3b..036c1f959eb 100644 --- a/gopls/internal/golang/stub.go +++ b/gopls/internal/golang/stub.go @@ -12,7 +12,6 @@ import ( "go/parser" "go/token" "go/types" - "io" pathpkg "path" "strings" @@ -28,99 +27,60 @@ import ( "golang.org/x/tools/internal/tokeninternal" ) -// stubMethodsFixer returns a suggested fix to declare the missing +// stubMissingInterfaceMethodsFixer returns a suggested fix to declare the missing // methods of the concrete type that is assigned to an interface type // at the cursor position. -func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { +func stubMissingInterfaceMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) - si := stubmethods.GetStubInfo(pkg.FileSet(), pkg.TypesInfo(), nodes, start) + si := stubmethods.GetIfaceStubInfo(pkg.FileSet(), pkg.TypesInfo(), nodes, start) if si == nil { return nil, nil, fmt.Errorf("nil interface request") } + return insertDeclsAfter(ctx, snapshot, pkg.Metadata(), si.Fset, si.Concrete.Obj(), si.Emit) +} - // A function-local type cannot be stubbed - // since there's nowhere to put the methods. - // TODO(adonovan): move this check into GetStubInfo instead of offering a bad fix. - conc := si.Concrete.Obj() - if conc.Parent() != conc.Pkg().Scope() { - return nil, nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name()) +// stubMissingCalledFunctionFixer returns a suggested fix to declare the missing +// method that the user may want to generate based on CallExpr +// at the cursor position. +func stubMissingCalledFunctionFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache.Package, pgf *parsego.File, start, end token.Pos) (*token.FileSet, *analysis.SuggestedFix, error) { + nodes, _ := astutil.PathEnclosingInterval(pgf.File, start, end) + si := stubmethods.GetCallStubInfo(pkg.FileSet(), pkg.TypesInfo(), nodes, start) + if si == nil { + return nil, nil, fmt.Errorf("invalid type request") } + return insertDeclsAfter(ctx, snapshot, pkg.Metadata(), si.Fset, si.After, si.Emit) +} - // Parse the file declaring the concrete type. +// An emitter writes new top-level declarations into an existing +// file. References to symbols should be qualified using qual, which +// respects the local import environment. +type emitter = func(out *bytes.Buffer, qual types.Qualifier) error + +// insertDeclsAfter locates the file that declares symbol sym, +// (which must be among the dependencies of mp), +// calls the emit function to generate new declarations, +// respecting the local import environment, +// and splices those declarations into the file after the declaration of sym, +// updating imports as needed. +// +// fset must provide the position of sym. +func insertDeclsAfter(ctx context.Context, snapshot *cache.Snapshot, mp *metadata.Package, fset *token.FileSet, sym types.Object, emit emitter) (*token.FileSet, *analysis.SuggestedFix, error) { + // Parse the file declaring the sym. // // Beware: declPGF is not necessarily covered by pkg.FileSet() or si.Fset. - declPGF, _, err := parseFull(ctx, snapshot, si.Fset, conc.Pos()) + declPGF, _, err := parseFull(ctx, snapshot, fset, sym.Pos()) if err != nil { - return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation type: %w", declPGF.URI, err) + return nil, nil, fmt.Errorf("failed to parse file %q declaring implementation symbol: %w", declPGF.URI, err) } if declPGF.Fixed() { return nil, nil, fmt.Errorf("file contains parse errors: %s", declPGF.URI) } - // Find metadata for the concrete type's declaring package + // Find metadata for the symbol's declaring package // as we'll need its import mapping. - declMeta := findFileInDeps(snapshot, pkg.Metadata(), declPGF.URI) + declMeta := findFileInDeps(snapshot, mp, declPGF.URI) if declMeta == nil { - return nil, nil, bug.Errorf("can't find metadata for file %s among dependencies of %s", declPGF.URI, pkg) - } - - // Record all direct methods of the current object - concreteFuncs := make(map[string]struct{}) - for i := 0; i < si.Concrete.NumMethods(); i++ { - concreteFuncs[si.Concrete.Method(i).Name()] = struct{}{} - } - - // Find subset of interface methods that the concrete type lacks. - ifaceType := si.Interface.Type().Underlying().(*types.Interface) - - type missingFn struct { - fn *types.Func - needSubtle string - } - - var ( - missing []missingFn - concreteStruct, isStruct = si.Concrete.Origin().Underlying().(*types.Struct) - ) - - for i := 0; i < ifaceType.NumMethods(); i++ { - imethod := ifaceType.Method(i) - cmethod, index, _ := types.LookupFieldOrMethod(si.Concrete, si.Pointer, imethod.Pkg(), imethod.Name()) - if cmethod == nil { - missing = append(missing, missingFn{fn: imethod}) - continue - } - - if _, ok := cmethod.(*types.Var); ok { - // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow. - return nil, nil, fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field", - conc.Name(), imethod.Name()) - } - - if _, exist := concreteFuncs[imethod.Name()]; exist { - if !types.Identical(cmethod.Type(), imethod.Type()) { - return nil, nil, fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s", - conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type()) - } - continue - } - - mf := missingFn{fn: imethod} - if isStruct && len(index) > 0 { - field := concreteStruct.Field(index[0]) - - fn := field.Name() - if is[*types.Pointer](field.Type()) { - fn = "*" + fn - } - - mf.needSubtle = fmt.Sprintf("// Subtle: this method shadows the method (%s).%s of %s.%s.\n", fn, imethod.Name(), si.Concrete.Obj().Name(), field.Name()) - } - - missing = append(missing, mf) - } - if len(missing) == 0 { - return nil, nil, fmt.Errorf("no missing methods found") + return nil, nil, bug.Errorf("can't find metadata for file %s among dependencies of %s", declPGF.URI, mp) } // Build import environment for the declaring file. @@ -167,7 +127,7 @@ func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache. // TODO(adonovan): don't ignore vendor prefix. // // Ignore the current package import. - if pkg.Path() == conc.Pkg().Path() { + if pkg.Path() == sym.Pkg().Path() { return "" } @@ -178,7 +138,7 @@ func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache. // // TODO(adonovan): resolve conflict between declared // name and existing file-level (declPGF.File.Imports) - // or package-level (si.Concrete.Pkg.Scope) decls by + // or package-level (sym.Pkg.Scope) decls by // generating a fresh name. name = pkg.Name() importEnv[importPath] = name @@ -193,71 +153,13 @@ func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache. return name } - // Format interface name (used only in a comment). - iface := si.Interface.Name() - if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() { - iface = ipkg.Name() + "." + iface - } - - // Pointer receiver? - var star string - if si.Pointer { - star = "*" - } - - // If there are any that have named receiver, choose the first one. - // Otherwise, use lowercase for the first letter of the object. - rn := strings.ToLower(si.Concrete.Obj().Name()[0:1]) - for i := 0; i < si.Concrete.NumMethods(); i++ { - if recv := si.Concrete.Method(i).Signature().Recv(); recv.Name() != "" { - rn = recv.Name() - break - } - } - - // Check for receiver name conflicts - checkRecvName := func(tuple *types.Tuple) bool { - for i := 0; i < tuple.Len(); i++ { - if rn == tuple.At(i).Name() { - return true - } - } - return false - } - - // Format the new methods. - var newMethods bytes.Buffer - - for index := range missing { - mrn := rn + " " - sig := missing[index].fn.Signature() - if checkRecvName(sig.Params()) || checkRecvName(sig.Results()) { - mrn = "" - } - - fmt.Fprintf(&newMethods, `// %s implements %s. -%sfunc (%s%s%s%s) %s%s { - panic("unimplemented") -} -`, - missing[index].fn.Name(), - iface, - missing[index].needSubtle, - mrn, - star, - si.Concrete.Obj().Name(), - FormatTypeParams(si.Concrete.TypeParams()), - missing[index].fn.Name(), - strings.TrimPrefix(types.TypeString(missing[index].fn.Type(), qual), "func")) - } - - // Compute insertion point for new methods: + // Compute insertion point for new declarations: // after the top-level declaration enclosing the (package-level) type. insertOffset, err := safetoken.Offset(declPGF.Tok, declPGF.File.End()) if err != nil { return nil, nil, bug.Errorf("internal error: end position outside file bounds: %v", err) } - concOffset, err := safetoken.Offset(si.Fset.File(conc.Pos()), conc.Pos()) + symOffset, err := safetoken.Offset(fset.File(sym.Pos()), sym.Pos()) if err != nil { return nil, nil, bug.Errorf("internal error: finding type decl offset: %v", err) } @@ -266,22 +168,25 @@ func stubMethodsFixer(ctx context.Context, snapshot *cache.Snapshot, pkg *cache. if err != nil { return nil, nil, bug.Errorf("internal error: finding decl offset: %v", err) } - if declEndOffset > concOffset { + if declEndOffset > symOffset { insertOffset = declEndOffset break } } - // Splice the new methods into the file content. + // Splice the new declarations into the file content. var buf bytes.Buffer input := declPGF.Mapper.Content // unfixed content of file buf.Write(input[:insertOffset]) buf.WriteByte('\n') - io.Copy(&buf, &newMethods) + err = emit(&buf, qual) + if err != nil { + return nil, nil, err + } buf.Write(input[insertOffset:]) // Re-parse the file. - fset := token.NewFileSet() + fset = token.NewFileSet() newF, err := parser.ParseFile(fset, declPGF.URI.Path(), buf.Bytes(), parser.ParseComments|parser.SkipObjectResolution) if err != nil { return nil, nil, fmt.Errorf("could not reparse file: %w", err) diff --git a/gopls/internal/golang/stubmethods/stubcalledfunc.go b/gopls/internal/golang/stubmethods/stubcalledfunc.go new file mode 100644 index 00000000000..0b6c1052182 --- /dev/null +++ b/gopls/internal/golang/stubmethods/stubcalledfunc.go @@ -0,0 +1,369 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stubmethods + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + "unicode" + + "golang.org/x/tools/gopls/internal/util/typesutil" + "golang.org/x/tools/internal/typesinternal" +) + +var anyType = types.Universe.Lookup("any").Type() + +// CallStubInfo represents a missing method +// that a receiver type is about to generate +// which has "type X has no field or method Y" error +type CallStubInfo struct { + Fset *token.FileSet // the FileSet used to type-check the types below + Receiver typesinternal.NamedOrAlias // the method's receiver type + MethodName string + After types.Object // decl after which to insert the new decl + pointer bool + info *types.Info + path []ast.Node // path enclosing the CallExpr +} + +// GetCallStubInfo extracts necessary information to generate a method definition from +// a CallExpr. +func GetCallStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *CallStubInfo { + for i, n := range path { + switch n := n.(type) { + case *ast.CallExpr: + s, ok := n.Fun.(*ast.SelectorExpr) + // TODO: support generating stub functions in the same way. + if !ok { + return nil + } + + // If recvExpr is a package name, compiler error would be + // e.g., "undefined: http.bar", thus will not hit this code path. + recvExpr := s.X + recvType, pointer := concreteType(recvExpr, info) + + if recvType == nil || recvType.Obj().Pkg() == nil { + return nil + } + + // A method of a function-local type cannot be stubbed + // since there's nowhere to put the methods. + recv := recvType.Obj() + if recv.Parent() != recv.Pkg().Scope() { + return nil + } + + after := types.Object(recv) + // If the enclosing function declaration is a method declaration, + // and matches the receiver type of the diagnostic, + // insert after the enclosing method. + decl, ok := path[len(path)-2].(*ast.FuncDecl) + if ok && decl.Recv != nil { + if len(decl.Recv.List) != 1 { + return nil + } + mrt := info.TypeOf(decl.Recv.List[0].Type) + if mrt != nil && types.Identical(types.Unalias(typesinternal.Unpointer(mrt)), recv.Type()) { + after = info.ObjectOf(decl.Name) + } + } + return &CallStubInfo{ + Fset: fset, + Receiver: recvType, + MethodName: s.Sel.Name, + After: after, + pointer: pointer, + path: path[i:], + info: info, + } + } + } + return nil +} + +// Emit writes to out the missing method based on type info of si.Receiver and CallExpr. +func (si *CallStubInfo) Emit(out *bytes.Buffer, qual types.Qualifier) error { + params := si.collectParams() + rets := typesFromContext(si.info, si.path, si.path[0].Pos()) + recv := si.Receiver.Obj() + // Pointer receiver? + var star string + if si.pointer { + star = "*" + } + + // Choose receiver name. + // If any method has a named receiver, choose the first one. + // Otherwise, use lowercase for the first letter of the object. + recvName := strings.ToLower(fmt.Sprintf("%.1s", recv.Name())) + if named, ok := types.Unalias(si.Receiver).(*types.Named); ok { + for i := 0; i < named.NumMethods(); i++ { + if recv := named.Method(i).Type().(*types.Signature).Recv(); recv.Name() != "" { + recvName = recv.Name() + break + } + } + } + + // Emit method declaration. + fmt.Fprintf(out, "\nfunc (%s %s%s%s) %s", + recvName, + star, + recv.Name(), + typesutil.FormatTypeParams(typesinternal.TypeParams(si.Receiver)), + si.MethodName) + + // Emit parameters, avoiding name conflicts. + seen := map[string]bool{recvName: true} + out.WriteString("(") + for i, param := range params { + name := param.name + if seen[name] { + name = fmt.Sprintf("param%d", i+1) + } + seen[name] = true + + if i > 0 { + out.WriteString(", ") + } + fmt.Fprintf(out, "%s %s", name, types.TypeString(param.typ, qual)) + } + out.WriteString(") ") + + // Emit result types. + if len(rets) > 1 { + out.WriteString("(") + } + for i, r := range rets { + if i > 0 { + out.WriteString(", ") + } + out.WriteString(types.TypeString(r, qual)) + } + if len(rets) > 1 { + out.WriteString(")") + } + + // Emit body. + out.WriteString(` { + panic("unimplemented") +}`) + return nil +} + +type param struct { + name string + typ types.Type // the type of param, inferred from CallExpr +} + +// collectParams gathers the parameter information needed to generate a method stub. +// The param's type default to any if there is a type error in the argument. +func (si *CallStubInfo) collectParams() []param { + var params []param + appendParam := func(e ast.Expr, t types.Type) { + p := param{"param", anyType} + if t != nil && !containsInvalid(t) { + t = types.Default(t) + p = param{paramName(e, t), t} + } + params = append(params, p) + } + + args := si.path[0].(*ast.CallExpr).Args + for _, arg := range args { + t := si.info.TypeOf(arg) + switch t := t.(type) { + // This is the case where another function call returning multiple + // results is used as an argument. + case *types.Tuple: + for ti := 0; ti < t.Len(); ti++ { + appendParam(arg, t.At(ti).Type()) + } + default: + appendParam(arg, t) + } + } + return params +} + +// typesFromContext returns the type (or perhaps zero or multiple types) +// of the "hole" into which the expression identified by path must fit. +// +// For example, given +// +// s, i := "", 0 +// s, i = EXPR +// +// the hole that must be filled by EXPR has type (string, int). +// +// It returns nil on failure. +func typesFromContext(info *types.Info, path []ast.Node, pos token.Pos) []types.Type { + var typs []types.Type + parent := parentNode(path) + if parent == nil { + return nil + } + switch parent := parent.(type) { + case *ast.AssignStmt: + // Append all lhs's type + if len(parent.Rhs) == 1 { + for _, lhs := range parent.Lhs { + t := info.TypeOf(lhs) + if t != nil && !containsInvalid(t) { + t = types.Default(t) + } else { + t = anyType + } + typs = append(typs, t) + } + break + } + + // Lhs and Rhs counts do not match, give up + if len(parent.Lhs) != len(parent.Rhs) { + break + } + + // Append corresponding index of lhs's type + for i, rhs := range parent.Rhs { + if rhs.Pos() <= pos && pos <= rhs.End() { + t := info.TypeOf(parent.Lhs[i]) + if t != nil && !containsInvalid(t) { + t = types.Default(t) + } else { + t = anyType + } + typs = append(typs, t) + break + } + } + case *ast.CallExpr: + // Find argument containing pos. + argIdx := -1 + for i, callArg := range parent.Args { + if callArg.Pos() <= pos && pos <= callArg.End() { + argIdx = i + break + } + } + if argIdx == -1 { + break + } + + t := info.TypeOf(parent.Fun) + if t == nil { + break + } + + if sig, ok := t.Underlying().(*types.Signature); ok { + var paramType types.Type + if sig.Variadic() && argIdx >= sig.Params().Len()-1 { + v := sig.Params().At(sig.Params().Len() - 1) + if s, _ := v.Type().(*types.Slice); s != nil { + paramType = s.Elem() + } + } else if argIdx < sig.Params().Len() { + paramType = sig.Params().At(argIdx).Type() + } else { + break + } + if paramType == nil || containsInvalid(paramType) { + paramType = anyType + } + typs = append(typs, paramType) + } + default: + // TODO: support other common kinds of "holes", e.g. + // x + EXPR => typeof(x) + // !EXPR => bool + // var x int = EXPR => int + // etc. + } + return typs +} + +// parentNode returns the nodes immediately enclosing path[0], +// ignoring parens. +func parentNode(path []ast.Node) ast.Node { + if len(path) <= 1 { + return nil + } + for _, n := range path[1:] { + if _, ok := n.(*ast.ParenExpr); !ok { + return n + } + } + return nil +} + +// containsInvalid checks if the type name contains "invalid type", +// which is not a valid syntax to generate. +func containsInvalid(t types.Type) bool { + typeString := types.TypeString(t, nil) + return strings.Contains(typeString, types.Typ[types.Invalid].String()) +} + +// paramName heuristically chooses a parameter name from +// its argument expression and type. Caller should ensure +// typ is non-nil. +func paramName(e ast.Expr, typ types.Type) string { + if typ == types.Universe.Lookup("error").Type() { + return "err" + } + switch t := e.(type) { + // Use the identifier's name as the argument name. + case *ast.Ident: + return t.Name + // Use the Sel.Name's last section as the argument name. + case *ast.SelectorExpr: + return lastSection(t.Sel.Name) + } + + typ = typesinternal.Unpointer(typ) + switch t := typ.(type) { + // Use the first character of the type name as the argument name for builtin types + case *types.Basic: + return t.Name()[:1] + case *types.Slice: + return paramName(e, t.Elem()) + case *types.Array: + return paramName(e, t.Elem()) + case *types.Signature: + return "f" + case *types.Map: + return "m" + case *types.Chan: + return "ch" + case *types.Named: + return lastSection(t.Obj().Name()) + default: + return lastSection(t.String()) + } +} + +// lastSection find the position of the last uppercase letter, +// extract the substring from that point onward, +// and convert it to lowercase. +// +// Example: lastSection("registryManagerFactory") = "factory" +func lastSection(identName string) string { + lastUpperIndex := -1 + for i, r := range identName { + if unicode.IsUpper(r) { + lastUpperIndex = i + } + } + if lastUpperIndex != -1 { + last := identName[lastUpperIndex:] + return strings.ToLower(last) + } else { + return identName + } +} diff --git a/gopls/internal/golang/stubmethods/stubmethods.go b/gopls/internal/golang/stubmethods/stubmethods.go index ee7b525a6a0..dbfcefd9e16 100644 --- a/gopls/internal/golang/stubmethods/stubmethods.go +++ b/gopls/internal/golang/stubmethods/stubmethods.go @@ -8,18 +8,23 @@ package stubmethods import ( + "bytes" "fmt" "go/ast" "go/token" "go/types" + "golang.org/x/tools/internal/typesinternal" + "strings" + + "golang.org/x/tools/gopls/internal/util/typesutil" ) // TODO(adonovan): eliminate the confusing Fset parameter; only the // file name and byte offset of Concrete are needed. -// StubInfo represents a concrete type +// IfaceStubInfo represents a concrete type // that wants to stub out an interface type -type StubInfo struct { +type IfaceStubInfo struct { // Interface is the interface that the client wants to implement. // When the interface is defined, the underlying object will be a TypeName. // Note that we keep track of types.Object instead of types.Type in order @@ -29,11 +34,11 @@ type StubInfo struct { // TODO(marwan-at-work): implement interface literals. Fset *token.FileSet // the FileSet used to type-check the types below Interface *types.TypeName - Concrete *types.Named - Pointer bool + Concrete typesinternal.NamedOrAlias + pointer bool } -// GetStubInfo determines whether the "missing method error" +// GetIfaceStubInfo determines whether the "missing method error" // can be used to deduced what the concrete and interface types are. // // TODO(adonovan): this function (and its following 5 helpers) tries @@ -42,7 +47,7 @@ type StubInfo struct { // function call. This is essentially what the refactor/satisfy does, // more generally. Refactor to share logic, after auditing 'satisfy' // for safety on ill-typed code. -func GetStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *StubInfo { +func GetIfaceStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos token.Pos) *IfaceStubInfo { for _, n := range path { switch n := n.(type) { case *ast.ValueSpec: @@ -70,10 +75,133 @@ func GetStubInfo(fset *token.FileSet, info *types.Info, path []ast.Node, pos tok return nil } +// Emit writes to out the missing methods of si.Concrete required for it to implement si.Interface +func (si *IfaceStubInfo) Emit(out *bytes.Buffer, qual types.Qualifier) error { + conc := si.Concrete.Obj() + // Record all direct methods of the current object + concreteFuncs := make(map[string]struct{}) + if named, ok := types.Unalias(si.Concrete).(*types.Named); ok { + for i := 0; i < named.NumMethods(); i++ { + concreteFuncs[named.Method(i).Name()] = struct{}{} + } + } + + // Find subset of interface methods that the concrete type lacks. + ifaceType := si.Interface.Type().Underlying().(*types.Interface) + + type missingFn struct { + fn *types.Func + needSubtle string + } + + var ( + missing []missingFn + concreteStruct, isStruct = typesinternal.Origin(si.Concrete).Underlying().(*types.Struct) + ) + + for i := 0; i < ifaceType.NumMethods(); i++ { + imethod := ifaceType.Method(i) + cmethod, index, _ := types.LookupFieldOrMethod(si.Concrete, si.pointer, imethod.Pkg(), imethod.Name()) + if cmethod == nil { + missing = append(missing, missingFn{fn: imethod}) + continue + } + + if _, ok := cmethod.(*types.Var); ok { + // len(LookupFieldOrMethod.index) = 1 => conflict, >1 => shadow. + return fmt.Errorf("adding method %s.%s would conflict with (or shadow) existing field", + conc.Name(), imethod.Name()) + } + + if _, exist := concreteFuncs[imethod.Name()]; exist { + if !types.Identical(cmethod.Type(), imethod.Type()) { + return fmt.Errorf("method %s.%s already exists but has the wrong type: got %s, want %s", + conc.Name(), imethod.Name(), cmethod.Type(), imethod.Type()) + } + continue + } + + mf := missingFn{fn: imethod} + if isStruct && len(index) > 0 { + field := concreteStruct.Field(index[0]) + + fn := field.Name() + if _, ok := field.Type().(*types.Pointer); ok { + fn = "*" + fn + } + + mf.needSubtle = fmt.Sprintf("// Subtle: this method shadows the method (%s).%s of %s.%s.\n", fn, imethod.Name(), si.Concrete.Obj().Name(), field.Name()) + } + + missing = append(missing, mf) + } + if len(missing) == 0 { + return fmt.Errorf("no missing methods found") + } + + // Format interface name (used only in a comment). + iface := si.Interface.Name() + if ipkg := si.Interface.Pkg(); ipkg != nil && ipkg != conc.Pkg() { + iface = ipkg.Name() + "." + iface + } + + // Pointer receiver? + var star string + if si.pointer { + star = "*" + } + + // If there are any that have named receiver, choose the first one. + // Otherwise, use lowercase for the first letter of the object. + rn := strings.ToLower(si.Concrete.Obj().Name()[0:1]) + if named, ok := types.Unalias(si.Concrete).(*types.Named); ok { + for i := 0; i < named.NumMethods(); i++ { + if recv := named.Method(i).Type().(*types.Signature).Recv(); recv.Name() != "" { + rn = recv.Name() + break + } + } + } + + // Check for receiver name conflicts + checkRecvName := func(tuple *types.Tuple) bool { + for i := 0; i < tuple.Len(); i++ { + if rn == tuple.At(i).Name() { + return true + } + } + return false + } + + for index := range missing { + mrn := rn + " " + sig := missing[index].fn.Signature() + if checkRecvName(sig.Params()) || checkRecvName(sig.Results()) { + mrn = "" + } + + fmt.Fprintf(out, `// %s implements %s. +%sfunc (%s%s%s%s) %s%s { + panic("unimplemented") +} +`, + missing[index].fn.Name(), + iface, + missing[index].needSubtle, + mrn, + star, + si.Concrete.Obj().Name(), + typesutil.FormatTypeParams(typesinternal.TypeParams(si.Concrete)), + missing[index].fn.Name(), + strings.TrimPrefix(types.TypeString(missing[index].fn.Type(), qual), "func")) + } + return nil +} + // fromCallExpr tries to find an *ast.CallExpr's function declaration and // analyzes a function call's signature against the passed in parameter to deduce // the concrete and interface types. -func fromCallExpr(fset *token.FileSet, info *types.Info, pos token.Pos, call *ast.CallExpr) *StubInfo { +func fromCallExpr(fset *token.FileSet, info *types.Info, pos token.Pos, call *ast.CallExpr) *IfaceStubInfo { // Find argument containing pos. argIdx := -1 var arg ast.Expr @@ -116,10 +244,10 @@ func fromCallExpr(fset *token.FileSet, info *types.Info, pos token.Pos, call *as if iface == nil { return nil } - return &StubInfo{ + return &IfaceStubInfo{ Fset: fset, Concrete: concType, - Pointer: pointer, + pointer: pointer, Interface: iface, } } @@ -128,8 +256,8 @@ func fromCallExpr(fset *token.FileSet, info *types.Info, pos token.Pos, call *as // a concrete type that is trying to be returned as an interface type. // // For example, func() io.Writer { return myType{} } -// would return StubInfo with the interface being io.Writer and the concrete type being myType{}. -func fromReturnStmt(fset *token.FileSet, info *types.Info, pos token.Pos, path []ast.Node, ret *ast.ReturnStmt) (*StubInfo, error) { +// would return StubIfaceInfo with the interface being io.Writer and the concrete type being myType{}. +func fromReturnStmt(fset *token.FileSet, info *types.Info, pos token.Pos, path []ast.Node, ret *ast.ReturnStmt) (*IfaceStubInfo, error) { // Find return operand containing pos. returnIdx := -1 for i, r := range ret.Results { @@ -146,6 +274,11 @@ func fromReturnStmt(fset *token.FileSet, info *types.Info, pos token.Pos, path [ if concType == nil || concType.Obj().Pkg() == nil { return nil, nil } + conc := concType.Obj() + if conc.Parent() != conc.Pkg().Scope() { + return nil, fmt.Errorf("local type %q cannot be stubbed", conc.Name()) + } + funcType := enclosingFunction(path, info) if funcType == nil { return nil, fmt.Errorf("could not find the enclosing function of the return statement") @@ -159,17 +292,17 @@ func fromReturnStmt(fset *token.FileSet, info *types.Info, pos token.Pos, path [ if iface == nil { return nil, nil } - return &StubInfo{ + return &IfaceStubInfo{ Fset: fset, Concrete: concType, - Pointer: pointer, + pointer: pointer, Interface: iface, }, nil } -// fromValueSpec returns *StubInfo from a variable declaration such as +// fromValueSpec returns *StubIfaceInfo from a variable declaration such as // var x io.Writer = &T{} -func fromValueSpec(fset *token.FileSet, info *types.Info, spec *ast.ValueSpec, pos token.Pos) *StubInfo { +func fromValueSpec(fset *token.FileSet, info *types.Info, spec *ast.ValueSpec, pos token.Pos) *IfaceStubInfo { // Find RHS element containing pos. var rhs ast.Expr for _, r := range spec.Values { @@ -193,22 +326,27 @@ func fromValueSpec(fset *token.FileSet, info *types.Info, spec *ast.ValueSpec, p if concType == nil || concType.Obj().Pkg() == nil { return nil } + conc := concType.Obj() + if conc.Parent() != conc.Pkg().Scope() { + return nil + } + ifaceObj := ifaceType(ifaceNode, info) if ifaceObj == nil { return nil } - return &StubInfo{ + return &IfaceStubInfo{ Fset: fset, Concrete: concType, Interface: ifaceObj, - Pointer: pointer, + pointer: pointer, } } -// fromAssignStmt returns *StubInfo from a variable assignment such as +// fromAssignStmt returns *StubIfaceInfo from a variable assignment such as // var x io.Writer // x = &T{} -func fromAssignStmt(fset *token.FileSet, info *types.Info, assign *ast.AssignStmt, pos token.Pos) *StubInfo { +func fromAssignStmt(fset *token.FileSet, info *types.Info, assign *ast.AssignStmt, pos token.Pos) *IfaceStubInfo { // The interface conversion error in an assignment is against the RHS: // // var x io.Writer @@ -243,11 +381,15 @@ func fromAssignStmt(fset *token.FileSet, info *types.Info, assign *ast.AssignStm if concType == nil || concType.Obj().Pkg() == nil { return nil } - return &StubInfo{ + conc := concType.Obj() + if conc.Parent() != conc.Pkg().Scope() { + return nil + } + return &IfaceStubInfo{ Fset: fset, Concrete: concType, Interface: ifaceObj, - Pointer: pointer, + pointer: pointer, } } diff --git a/gopls/internal/golang/types_format.go b/gopls/internal/golang/types_format.go index 41828244e11..55abb06a0ea 100644 --- a/gopls/internal/golang/types_format.go +++ b/gopls/internal/golang/types_format.go @@ -177,25 +177,6 @@ func formatFieldList(ctx context.Context, fset *token.FileSet, list *ast.FieldLi return result, writeResultParens } -// FormatTypeParams turns TypeParamList into its Go representation, such as: -// [T, Y]. Note that it does not print constraints as this is mainly used for -// formatting type params in method receivers. -func FormatTypeParams(tparams *types.TypeParamList) string { - if tparams == nil || tparams.Len() == 0 { - return "" - } - var buf bytes.Buffer - buf.WriteByte('[') - for i := 0; i < tparams.Len(); i++ { - if i > 0 { - buf.WriteString(", ") - } - buf.WriteString(tparams.At(i).Obj().Name()) - } - buf.WriteByte(']') - return buf.String() -} - // NewSignature returns formatted signature for a types.Signature struct. func NewSignature(ctx context.Context, s *cache.Snapshot, pkg *cache.Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier, mq MetadataQualifier) (*signature, error) { var tparams []string diff --git a/gopls/internal/licenses/licenses_test.go b/gopls/internal/licenses/licenses_test.go index 00b6b6c94f7..c31b4e9e659 100644 --- a/gopls/internal/licenses/licenses_test.go +++ b/gopls/internal/licenses/licenses_test.go @@ -10,16 +10,9 @@ import ( "os/exec" "runtime" "testing" - - "golang.org/x/tools/internal/testenv" ) func TestLicenses(t *testing.T) { - // License text differs for older Go versions because staticcheck or gofumpt - // isn't supported for those versions, and this fails for unknown, unrelated - // reasons on Kokoro legacy CI. - testenv.NeedsGo1Point(t, 21) - if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { t.Skip("generating licenses only works on Unixes") } diff --git a/gopls/internal/mod/diagnostics.go b/gopls/internal/mod/diagnostics.go index 8da69313e49..a89c148d7a7 100644 --- a/gopls/internal/mod/diagnostics.go +++ b/gopls/internal/mod/diagnostics.go @@ -31,7 +31,7 @@ func ParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protoc ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) defer done() - return collectDiagnostics(ctx, snapshot, ModParseDiagnostics) + return collectDiagnostics(ctx, snapshot, parseDiagnostics) } // Diagnostics returns diagnostics from running go mod tidy. @@ -39,7 +39,7 @@ func TidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[protoco ctx, done := event.Start(ctx, "mod.Diagnostics", snapshot.Labels()...) defer done() - return collectDiagnostics(ctx, snapshot, ModTidyDiagnostics) + return collectDiagnostics(ctx, snapshot, tidyDiagnostics) } // UpgradeDiagnostics returns upgrade diagnostics for the modules in the @@ -48,7 +48,7 @@ func UpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (map[prot ctx, done := event.Start(ctx, "mod.UpgradeDiagnostics", snapshot.Labels()...) defer done() - return collectDiagnostics(ctx, snapshot, ModUpgradeDiagnostics) + return collectDiagnostics(ctx, snapshot, upgradeDiagnostics) } // VulnerabilityDiagnostics returns vulnerability diagnostics for the active modules in the @@ -57,7 +57,7 @@ func VulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot) (ma ctx, done := event.Start(ctx, "mod.VulnerabilityDiagnostics", snapshot.Labels()...) defer done() - return collectDiagnostics(ctx, snapshot, ModVulnerabilityDiagnostics) + return collectDiagnostics(ctx, snapshot, vulnerabilityDiagnostics) } func collectDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagFn func(context.Context, *cache.Snapshot, file.Handle) ([]*cache.Diagnostic, error)) (map[protocol.DocumentURI][]*cache.Diagnostic, error) { @@ -94,8 +94,8 @@ func collectDiagnostics(ctx context.Context, snapshot *cache.Snapshot, diagFn fu return reports, nil } -// ModParseDiagnostics reports diagnostics from parsing the mod file. -func ModParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (diagnostics []*cache.Diagnostic, err error) { +// parseDiagnostics reports diagnostics from parsing the mod file. +func parseDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (diagnostics []*cache.Diagnostic, err error) { pm, err := snapshot.ParseMod(ctx, fh) if err != nil { if pm == nil || len(pm.ParseErrors) == 0 { @@ -106,8 +106,8 @@ func ModParseDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file. return nil, nil } -// ModTidyDiagnostics reports diagnostics from running go mod tidy. -func ModTidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { +// tidyDiagnostics reports diagnostics from running go mod tidy. +func tidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) ([]*cache.Diagnostic, error) { pm, err := snapshot.ParseMod(ctx, fh) // memoized if err != nil { return nil, nil // errors reported by ModDiagnostics above @@ -132,9 +132,9 @@ func ModTidyDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.H return tidied.Diagnostics, nil } -// ModUpgradeDiagnostics adds upgrade quick fixes for individual modules if the upgrades +// upgradeDiagnostics adds upgrade quick fixes for individual modules if the upgrades // are recorded in the view. -func ModUpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (upgradeDiagnostics []*cache.Diagnostic, err error) { +func upgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (upgradeDiagnostics []*cache.Diagnostic, err error) { pm, err := snapshot.ParseMod(ctx, fh) if err != nil { // Don't return an error if there are parse error diagnostics to be shown, but also do not @@ -177,9 +177,9 @@ func ModUpgradeDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh fil const upgradeCodeActionPrefix = "Upgrade to " -// ModVulnerabilityDiagnostics adds diagnostics for vulnerabilities in individual modules +// vulnerabilityDiagnostics adds diagnostics for vulnerabilities in individual modules // if the vulnerability is recorded in the view. -func ModVulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (vulnDiagnostics []*cache.Diagnostic, err error) { +func vulnerabilityDiagnostics(ctx context.Context, snapshot *cache.Snapshot, fh file.Handle) (vulnDiagnostics []*cache.Diagnostic, err error) { pm, err := snapshot.ParseMod(ctx, fh) if err != nil { // Don't return an error if there are parse error diagnostics to be shown, but also do not diff --git a/gopls/internal/protocol/command/command_gen.go b/gopls/internal/protocol/command/command_gen.go index 10c6c043a09..829a3824bc0 100644 --- a/gopls/internal/protocol/command/command_gen.go +++ b/gopls/internal/protocol/command/command_gen.go @@ -27,6 +27,7 @@ const ( AddDependency Command = "gopls.add_dependency" AddImport Command = "gopls.add_import" AddTelemetryCounters Command = "gopls.add_telemetry_counters" + AddTest Command = "gopls.add_test" ApplyFix Command = "gopls.apply_fix" Assembly Command = "gopls.assembly" ChangeSignature Command = "gopls.change_signature" @@ -71,6 +72,7 @@ var Commands = []Command{ AddDependency, AddImport, AddTelemetryCounters, + AddTest, ApplyFix, Assembly, ChangeSignature, @@ -131,6 +133,12 @@ func Dispatch(ctx context.Context, params *protocol.ExecuteCommandParams, s Inte return nil, err } return nil, s.AddTelemetryCounters(ctx, a0) + case AddTest: + var a0 protocol.Location + if err := UnmarshalArgs(params.Arguments, &a0); err != nil { + return nil, err + } + return s.AddTest(ctx, a0) case ApplyFix: var a0 ApplyFixArgs if err := UnmarshalArgs(params.Arguments, &a0); err != nil { @@ -372,6 +380,14 @@ func NewAddTelemetryCountersCommand(title string, a0 AddTelemetryCountersArgs) * } } +func NewAddTestCommand(title string, a0 protocol.Location) *protocol.Command { + return &protocol.Command{ + Title: title, + Command: AddTest.String(), + Arguments: MustMarshalArgs(a0), + } +} + func NewApplyFixCommand(title string, a0 ApplyFixArgs) *protocol.Command { return &protocol.Command{ Title: title, diff --git a/gopls/internal/protocol/command/commandmeta/meta.go b/gopls/internal/protocol/command/commandmeta/meta.go index dcc366521e5..166fae61e04 100644 --- a/gopls/internal/protocol/command/commandmeta/meta.go +++ b/gopls/internal/protocol/command/commandmeta/meta.go @@ -248,7 +248,7 @@ func findField(pkg *packages.Package, pos token.Pos) (*ast.Field, error) { fset := pkg.Fset var file *ast.File for _, f := range pkg.Syntax { - if fset.File(f.Pos()).Name() == fset.File(pos).Name() { + if fset.File(f.FileStart).Name() == fset.File(pos).Name() { file = f break } diff --git a/gopls/internal/protocol/command/interface.go b/gopls/internal/protocol/command/interface.go index 98c5e6a061b..258e1008395 100644 --- a/gopls/internal/protocol/command/interface.go +++ b/gopls/internal/protocol/command/interface.go @@ -224,6 +224,9 @@ type Interface interface { // to avoid conflicts with other counters gopls collects. AddTelemetryCounters(context.Context, AddTelemetryCountersArgs) error + // AddTest: add a test for the selected function + AddTest(context.Context, protocol.Location) (*protocol.WorkspaceEdit, error) + // MaybePromptForTelemetry: Prompt user to enable telemetry // // Checks for the right conditions, and then prompts the user @@ -503,23 +506,15 @@ type VulncheckArgs struct { type RunVulncheckResult struct { // Token holds the progress token for LSP workDone reporting of the vulncheck // invocation. + // + // Deprecated: previously, this was used as a signal to retrieve the result + // using gopls.fetch_vulncheck_result. Clients should ignore this field: + // gopls.vulncheck now runs synchronously, and returns a result in the Result + // field. Token protocol.ProgressToken -} -// CallStack models a trace of function calls starting -// with a client function or method and ending with a -// call to a vulnerable symbol. -type CallStack []StackEntry - -// StackEntry models an element of a call stack. -type StackEntry struct { - // See golang.org/x/exp/vulncheck.StackEntry. - - // User-friendly representation of function/method names. - // e.g. package.funcName, package.(recvType).methodName, ... - Name string - URI protocol.DocumentURI - Pos protocol.Position // Start position. (0-based. Column is always 0) + // Result holds the result of running vulncheck. + Result *vulncheck.Result } // MemStatsResult holds selected fields from runtime.MemStats. diff --git a/gopls/internal/protocol/command/util.go b/gopls/internal/protocol/command/util.go index 7cd5662e3e1..d07cd863f1c 100644 --- a/gopls/internal/protocol/command/util.go +++ b/gopls/internal/protocol/command/util.go @@ -15,18 +15,6 @@ type Command string func (c Command) String() string { return string(c) } -// IsAsync reports whether the command is asynchronous: -// clients must wait for the "end" progress notification. -func (c Command) IsAsync() bool { - switch string(c) { - // TODO(adonovan): derive this list from interface.go somewhow. - // Unfortunately we can't even reference the enum from here... - case "gopls.run_tests", "gopls.run_govulncheck", "gopls.test": - return true - } - return false -} - // MarshalArgs encodes the given arguments to json.RawMessages. This function // is used to construct arguments to a protocol.Command. // diff --git a/gopls/internal/protocol/generate/tables.go b/gopls/internal/protocol/generate/tables.go index 5ac5d473580..2036e701d48 100644 --- a/gopls/internal/protocol/generate/tables.go +++ b/gopls/internal/protocol/generate/tables.go @@ -120,8 +120,6 @@ var usedDisambiguate = make(map[string]bool) var goplsType = map[string]string{ "And_RegOpt_textDocument_colorPresentation": "WorkDoneProgressOptionsAndTextDocumentRegistrationOptions", "ConfigurationParams": "ParamConfiguration", - "DocumentDiagnosticParams": "string", - "DocumentDiagnosticReport": "string", "DocumentUri": "DocumentURI", "InitializeParams": "ParamInitialize", "LSPAny": "interface{}", diff --git a/gopls/internal/protocol/tsprotocol.go b/gopls/internal/protocol/tsprotocol.go index 65b97f5b164..b0b01a4b69a 100644 --- a/gopls/internal/protocol/tsprotocol.go +++ b/gopls/internal/protocol/tsprotocol.go @@ -1747,6 +1747,16 @@ type DocumentDiagnosticParams struct { WorkDoneProgressParams PartialResultParams } + +// The result of a document diagnostic pull request. A report can +// either be a full report containing all diagnostics for the +// requested document or an unchanged report indicating that nothing +// has changed in terms of diagnostics in comparison to the last +// pull request. +// +// @since 3.17.0 +// +// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#documentDiagnosticReport type DocumentDiagnosticReport = Or_DocumentDiagnosticReport // (alias) // The document diagnostic report kinds. // diff --git a/gopls/internal/protocol/tsserver.go b/gopls/internal/protocol/tsserver.go index b405aae1b89..4e7df50cae1 100644 --- a/gopls/internal/protocol/tsserver.go +++ b/gopls/internal/protocol/tsserver.go @@ -64,7 +64,7 @@ type Server interface { // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_definition Definition(context.Context, *DefinitionParams) ([]Location, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_diagnostic - Diagnostic(context.Context, *string) (*string, error) + Diagnostic(context.Context, *DocumentDiagnosticParams) (*DocumentDiagnosticReport, error) // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_didChange DidChange(context.Context, *DidChangeTextDocumentParams) error // See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification#textDocument_didClose @@ -387,7 +387,7 @@ func serverDispatch(ctx context.Context, server Server, reply jsonrpc2.Replier, return true, reply(ctx, resp, nil) case "textDocument/diagnostic": - var params string + var params DocumentDiagnosticParams if err := UnmarshalJSON(r.Params(), ¶ms); err != nil { return true, sendParseError(ctx, reply, err) } @@ -1030,8 +1030,8 @@ func (s *serverDispatcher) Definition(ctx context.Context, params *DefinitionPar } return result, nil } -func (s *serverDispatcher) Diagnostic(ctx context.Context, params *string) (*string, error) { - var result *string +func (s *serverDispatcher) Diagnostic(ctx context.Context, params *DocumentDiagnosticParams) (*DocumentDiagnosticReport, error) { + var result *DocumentDiagnosticReport if err := s.sender.Call(ctx, "textDocument/diagnostic", params, &result); err != nil { return nil, err } diff --git a/gopls/internal/server/command.go b/gopls/internal/server/command.go index 4f6f24d869f..403eadf0d2c 100644 --- a/gopls/internal/server/command.go +++ b/gopls/internal/server/command.go @@ -11,7 +11,6 @@ import ( "errors" "fmt" "io" - "log" "os" "path/filepath" "regexp" @@ -41,6 +40,7 @@ import ( "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/tokeninternal" "golang.org/x/tools/internal/xcontext" ) @@ -275,10 +275,28 @@ func (*commandHandler) AddTelemetryCounters(_ context.Context, args command.AddT return nil } +func (c *commandHandler) AddTest(ctx context.Context, loc protocol.Location) (*protocol.WorkspaceEdit, error) { + var result *protocol.WorkspaceEdit + err := c.run(ctx, commandConfig{ + forURI: loc.URI, + }, func(ctx context.Context, deps commandDeps) error { + if deps.snapshot.FileKind(deps.fh) != file.Go { + return fmt.Errorf("can't add test for non-Go file") + } + docedits, err := golang.AddTestForFunc(ctx, deps.snapshot, loc) + if err != nil { + return err + } + return applyChanges(ctx, c.s.client, docedits) + }) + // TODO(hxjiang): move the cursor to the new test once edits applied. + return result, err +} + // commandConfig configures common command set-up and execution. type commandConfig struct { requireSave bool // whether all files must be saved for the command to work - progress string // title to use for progress reporting. If empty, no progress will be reported. Mandatory for async commands. + progress string // title to use for progress reporting. If empty, no progress will be reported. forView string // view to resolve to a snapshot; incompatible with forURI forURI protocol.DocumentURI // URI to resolve to a snapshot. If unset, snapshot will be nil. } @@ -370,18 +388,6 @@ func (c *commandHandler) run(ctx context.Context, cfg commandConfig, run command return err } - if enum := command.Command(c.params.Command); enum.IsAsync() { - if cfg.progress == "" { - log.Fatalf("asynchronous command %q does not enable progress reporting", - enum) - } - go func() { - if err := runcmd(); err != nil { - showMessage(ctx, c.s.client, protocol.Error, err.Error()) - } - }() - return nil - } return runcmd() } @@ -400,16 +406,7 @@ func (c *commandHandler) ApplyFix(ctx context.Context, args command.ApplyFixArgs result = wsedit return nil } - resp, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: *wsedit, - }) - if err != nil { - return err - } - if !resp.Applied { - return errors.New(resp.FailureReason) - } - return nil + return applyChanges(ctx, c.s.client, changes) }) return result, err } @@ -634,17 +631,7 @@ func (c *commandHandler) RemoveDependency(ctx context.Context, args command.Remo if err != nil { return err } - response, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: *protocol.NewWorkspaceEdit( - protocol.DocumentChangeEdit(deps.fh, edits)), - }) - if err != nil { - return err - } - if !response.Applied { - return fmt.Errorf("edits not applied because of %s", response.FailureReason) - } - return nil + return applyChanges(ctx, c.s.client, []protocol.DocumentChange{protocol.DocumentChangeEdit(deps.fh, edits)}) }) } @@ -725,6 +712,7 @@ func (c *commandHandler) RunTests(ctx context.Context, args command.RunTestsArgs requireSave: true, // go test honors overlays, but tests themselves cannot forURI: args.URI, }, func(ctx context.Context, deps commandDeps) error { + jsonrpc2.Async(ctx) // don't block RPCs behind this command, since it can take a while return c.runTests(ctx, deps.snapshot, deps.work, args.URI, args.Tests, args.Benchmarks) }) } @@ -1118,17 +1106,7 @@ func (c *commandHandler) AddImport(ctx context.Context, args command.AddImportAr if err != nil { return fmt.Errorf("could not add import: %v", err) } - r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: *protocol.NewWorkspaceEdit( - protocol.DocumentChangeEdit(deps.fh, edits)), - }) - if err != nil { - return fmt.Errorf("could not apply import edits: %v", err) - } - if !r.Applied { - return fmt.Errorf("failed to apply edits: %v", r.FailureReason) - } - return nil + return applyChanges(ctx, c.s.client, []protocol.DocumentChange{protocol.DocumentChangeEdit(deps.fh, edits)}) }) } @@ -1137,18 +1115,11 @@ func (c *commandHandler) ExtractToNewFile(ctx context.Context, args protocol.Loc progress: "Extract to a new file", forURI: args.URI, }, func(ctx context.Context, deps commandDeps) error { - edit, err := golang.ExtractToNewFile(ctx, deps.snapshot, deps.fh, args.Range) + changes, err := golang.ExtractToNewFile(ctx, deps.snapshot, deps.fh, args.Range) if err != nil { return err } - resp, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{Edit: *edit}) - if err != nil { - return fmt.Errorf("could not apply edits: %v", err) - } - if !resp.Applied { - return fmt.Errorf("edits not applied: %s", resp.FailureReason) - } - return nil + return applyChanges(ctx, c.s.client, changes) }) } @@ -1233,23 +1204,25 @@ func (c *commandHandler) FetchVulncheckResult(ctx context.Context, arg command.U return ret, err } +const GoVulncheckCommandTitle = "govulncheck" + func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.VulncheckArgs) (command.RunVulncheckResult, error) { if args.URI == "" { return command.RunVulncheckResult{}, errors.New("VulncheckArgs is missing URI field") } - // Return the workdone token so that clients can identify when this - // vulncheck invocation is complete. - // - // Since the run function executes asynchronously, we use a channel to - // synchronize the start of the run and return the token. - tokenChan := make(chan protocol.ProgressToken, 1) + var commandResult command.RunVulncheckResult err := c.run(ctx, commandConfig{ - progress: "govulncheck", // (asynchronous) - requireSave: true, // govulncheck cannot honor overlays + progress: GoVulncheckCommandTitle, + requireSave: true, // govulncheck cannot honor overlays forURI: args.URI, }, func(ctx context.Context, deps commandDeps) error { - tokenChan <- deps.work.Token() + // For compatibility with the legacy asynchronous API, return the workdone + // token that clients used to use to identify when this vulncheck + // invocation is complete. + commandResult.Token = deps.work.Token() + + jsonrpc2.Async(ctx) // run this in parallel with other requests: vulncheck can be slow. workDoneWriter := progress.NewWorkDoneWriter(ctx, deps.work) dir := filepath.Dir(args.URI.Path()) @@ -1259,6 +1232,7 @@ func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.Vulnch if err != nil { return err } + commandResult.Result = result snapshot, release, err := c.s.session.InvalidateView(ctx, deps.snapshot.View(), cache.StateChange{ Vulns: map[protocol.DocumentURI]*vulncheck.Result{args.URI: result}, @@ -1295,12 +1269,7 @@ func (c *commandHandler) RunGovulncheck(ctx context.Context, args command.Vulnch if err != nil { return command.RunVulncheckResult{}, err } - select { - case <-ctx.Done(): - return command.RunVulncheckResult{}, ctx.Err() - case token := <-tokenChan: - return command.RunVulncheckResult{Token: token}, nil - } + return commandResult, nil } // MemStats implements the MemStats command. It returns an error as a @@ -1556,13 +1525,7 @@ func (c *commandHandler) ChangeSignature(ctx context.Context, args command.Chang result = wsedit return nil } - r, err := c.s.client.ApplyEdit(ctx, &protocol.ApplyWorkspaceEditParams{ - Edit: *wsedit, - }) - if !r.Applied { - return fmt.Errorf("failed to apply edits: %v", r.FailureReason) - } - return nil + return applyChanges(ctx, c.s.client, docedits) }) return result, err } diff --git a/gopls/internal/server/completion.go b/gopls/internal/server/completion.go index 079db865fb5..6c185e93717 100644 --- a/gopls/internal/server/completion.go +++ b/gopls/internal/server/completion.go @@ -143,7 +143,7 @@ func toProtocolCompletionItems(candidates []completion.CompletionItem, surroundi doc := &protocol.Or_CompletionItem_documentation{ Value: protocol.MarkupContent{ Kind: protocol.Markdown, - Value: golang.CommentToMarkdown(candidate.Documentation, options), + Value: golang.DocCommentToMarkdown(candidate.Documentation, options), }, } if options.PreferredContentFormat != protocol.Markdown { diff --git a/gopls/internal/server/diagnostics.go b/gopls/internal/server/diagnostics.go index f4a32d708e2..22eafaf2d2e 100644 --- a/gopls/internal/server/diagnostics.go +++ b/gopls/internal/server/diagnostics.go @@ -6,7 +6,6 @@ package server import ( "context" - "crypto/sha256" "errors" "fmt" "os" @@ -30,8 +29,52 @@ import ( "golang.org/x/tools/gopls/internal/work" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/jsonrpc2" ) +// Diagnostic implements the textDocument/diagnostic LSP request, reporting +// diagnostics for the given file. +// +// This is a work in progress. +// TODO(rfindley): +// - support RelatedDocuments? If so, how? Maybe include other package diagnostics? +// - support resultID (=snapshot ID) +// - support multiple views +// - add orphaned file diagnostics +// - support go.mod, go.work files +func (s *server) Diagnostic(ctx context.Context, params *protocol.DocumentDiagnosticParams) (*protocol.DocumentDiagnosticReport, error) { + ctx, done := event.Start(ctx, "server.Diagnostic") + defer done() + + fh, snapshot, release, err := s.fileOf(ctx, params.TextDocument.URI) + if err != nil { + return nil, err + } + defer release() + + jsonrpc2.Async(ctx) // allow asynchronous collection of diagnostics + + uri := fh.URI() + kind := snapshot.FileKind(fh) + var diagnostics []*cache.Diagnostic + switch kind { + case file.Go: + diagnostics, err = golang.DiagnoseFile(ctx, snapshot, uri) + if err != nil { + return nil, err + } + default: + return nil, fmt.Errorf("pull diagnostics not supported for this file kind") + } + return &protocol.DocumentDiagnosticReport{ + Value: protocol.RelatedFullDocumentDiagnosticReport{ + FullDocumentDiagnosticReport: protocol.FullDocumentDiagnosticReport{ + Items: toProtocolDiagnostics(diagnostics), + }, + }, + }, nil +} + // fileDiagnostics holds the current state of published diagnostics for a file. type fileDiagnostics struct { publishedHash file.Hash // hash of the last set of diagnostics published for this URI @@ -62,31 +105,6 @@ type ( diagMap = map[protocol.DocumentURI][]*cache.Diagnostic ) -// hashDiagnostic computes a hash to identify a diagnostic. -// The hash is for deduplicating within a file, -// so it need not incorporate d.URI. -func hashDiagnostic(d *cache.Diagnostic) file.Hash { - h := sha256.New() - for _, t := range d.Tags { - fmt.Fprintf(h, "tag: %s\n", t) - } - for _, r := range d.Related { - fmt.Fprintf(h, "related: %s %s %s\n", r.Location.URI, r.Message, r.Location.Range) - } - fmt.Fprintf(h, "code: %s\n", d.Code) - fmt.Fprintf(h, "codeHref: %s\n", d.CodeHref) - fmt.Fprintf(h, "message: %s\n", d.Message) - fmt.Fprintf(h, "range: %s\n", d.Range) - fmt.Fprintf(h, "severity: %s\n", d.Severity) - fmt.Fprintf(h, "source: %s\n", d.Source) - if d.BundledFixes != nil { - fmt.Fprintf(h, "fixes: %s\n", *d.BundledFixes) - } - var hash [sha256.Size]byte - h.Sum(hash[:0]) - return hash -} - func sortDiagnostics(d []*cache.Diagnostic) { sort.Slice(d, func(i int, j int) bool { a, b := d[i], d[j] @@ -503,15 +521,17 @@ func (s *server) diagnose(ctx context.Context, snapshot *cache.Snapshot) (diagMa // Merge analysis diagnostics with package diagnostics, and store the // resulting analysis diagnostics. + combinedDiags := make(diagMap) for uri, adiags := range analysisDiags { tdiags := pkgDiags[uri] - var tdiags2, adiags2 []*cache.Diagnostic - combineDiagnostics(tdiags, adiags, &tdiags2, &adiags2) - pkgDiags[uri] = tdiags2 - analysisDiags[uri] = adiags2 + combinedDiags[uri] = golang.CombineDiagnostics(tdiags, adiags) } - store("type checking", pkgDiags, nil) // error reported above - store("analyzing packages", analysisDiags, nil) // error reported above + for uri, tdiags := range pkgDiags { + if _, ok := combinedDiags[uri]; !ok { + combinedDiags[uri] = tdiags + } + } + store("type checking and analysing", combinedDiags, nil) // error reported above return diagnostics, nil } @@ -546,55 +566,6 @@ func (s *server) gcDetailsDiagnostics(ctx context.Context, snapshot *cache.Snaps return diagnostics, nil } -// combineDiagnostics combines and filters list/parse/type diagnostics from -// tdiags with adiags, and appends the two lists to *outT and *outA, -// respectively. -// -// Type-error analyzers produce diagnostics that are redundant -// with type checker diagnostics, but more detailed (e.g. fixes). -// Rather than report two diagnostics for the same problem, -// we combine them by augmenting the type-checker diagnostic -// and discarding the analyzer diagnostic. -// -// If an analysis diagnostic has the same range and message as -// a list/parse/type diagnostic, the suggested fix information -// (et al) of the latter is merged into a copy of the former. -// This handles the case where a type-error analyzer suggests -// a fix to a type error, and avoids duplication. -// -// The use of out-slices, though irregular, allows the caller to -// easily choose whether to keep the results separate or combined. -// -// The arguments are not modified. -func combineDiagnostics(tdiags []*cache.Diagnostic, adiags []*cache.Diagnostic, outT, outA *[]*cache.Diagnostic) { - - // Build index of (list+parse+)type errors. - type key struct { - Range protocol.Range - message string - } - index := make(map[key]int) // maps (Range,Message) to index in tdiags slice - for i, diag := range tdiags { - index[key{diag.Range, diag.Message}] = i - } - - // Filter out analysis diagnostics that match type errors, - // retaining their suggested fix (etc) fields. - for _, diag := range adiags { - if i, ok := index[key{diag.Range, diag.Message}]; ok { - copy := *tdiags[i] - copy.SuggestedFixes = diag.SuggestedFixes - copy.Tags = diag.Tags - tdiags[i] = © - continue - } - - *outA = append(*outA, diag) - } - - *outT = append(*outT, tdiags...) -} - // mustPublishDiagnostics marks the uri as needing publication, independent of // whether the published contents have changed. // @@ -815,7 +786,7 @@ func (s *server) publishFileDiagnosticsLocked(ctx context.Context, views viewSet // diagSuffixes records the set of view suffixes for a given diagnostic. diagSuffixes := make(map[file.Hash][]diagSuffix) add := func(diag *cache.Diagnostic, suffix string) { - h := hashDiagnostic(diag) + h := diag.Hash() diagSuffixes[h] = append(diagSuffixes[h], diagSuffix{diag, suffix}) } @@ -901,7 +872,7 @@ func (s *server) publishFileDiagnosticsLocked(ctx context.Context, views viewSet diag2 := *first.diag // shallow copy diag2.Message += first.suffix first.diag = &diag2 - h = hashDiagnostic(&diag2) // update the hash + h = diag2.Hash() // update the hash } hash.XORWith(h) @@ -925,6 +896,9 @@ func (s *server) publishFileDiagnosticsLocked(ctx context.Context, views viewSet } func toProtocolDiagnostics(diagnostics []*cache.Diagnostic) []protocol.Diagnostic { + // TODO(rfindley): support bundling edits, and bundle all suggested fixes here. + // (see cache.bundleLazyFixes). + reports := []protocol.Diagnostic{} for _, diag := range diagnostics { pdiag := protocol.Diagnostic{ diff --git a/gopls/internal/server/general.go b/gopls/internal/server/general.go index e330bd5bbc3..92e9729a0a6 100644 --- a/gopls/internal/server/general.go +++ b/gopls/internal/server/general.go @@ -108,6 +108,17 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ ResolveProvider: true, } } + + var diagnosticProvider *protocol.Or_ServerCapabilities_diagnosticProvider + if options.PullDiagnostics { + diagnosticProvider = &protocol.Or_ServerCapabilities_diagnosticProvider{ + Value: protocol.DiagnosticOptions{ + InterFileDependencies: true, + WorkspaceDiagnostics: false, // we don't support workspace/diagnostic + }, + } + } + var renameOpts interface{} = true if r := params.Capabilities.TextDocument.Rename; r != nil && r.PrepareSupport { renameOpts = protocol.RenameOptions{ @@ -144,6 +155,7 @@ func (s *server) Initialize(ctx context.Context, params *protocol.ParamInitializ DocumentHighlightProvider: &protocol.Or_ServerCapabilities_documentHighlightProvider{Value: true}, DocumentLinkProvider: &protocol.DocumentLinkOptions{}, InlayHintProvider: protocol.InlayHintOptions{}, + DiagnosticProvider: diagnosticProvider, ReferencesProvider: &protocol.Or_ServerCapabilities_referencesProvider{Value: true}, RenameProvider: renameOpts, SelectionRangeProvider: &protocol.Or_ServerCapabilities_selectionRangeProvider{Value: true}, diff --git a/gopls/internal/server/prompt.go b/gopls/internal/server/prompt.go index 66329784a6f..7eb400cfbe0 100644 --- a/gopls/internal/server/prompt.go +++ b/gopls/internal/server/prompt.go @@ -11,6 +11,7 @@ import ( "os" "path/filepath" "strconv" + "testing" "time" "golang.org/x/telemetry" @@ -64,19 +65,6 @@ func (s *server) getenv(key string) string { return os.Getenv(key) } -// configDir returns the root of the gopls configuration dir. By default this -// is os.UserConfigDir/gopls, but it may be overridden for tests. -func (s *server) configDir() (string, error) { - if d := s.getenv(GoplsConfigDirEnvvar); d != "" { - return d, nil - } - userDir, err := os.UserConfigDir() - if err != nil { - return "", err - } - return filepath.Join(userDir, "gopls"), nil -} - // telemetryMode returns the current effective telemetry mode. // By default this is x/telemetry.Mode(), but it may be overridden for tests. func (s *server) telemetryMode() string { @@ -119,11 +107,20 @@ func (s *server) maybePromptForTelemetry(ctx context.Context, enabled bool) { } // Only prompt if we can read/write the prompt config file. - configDir, err := s.configDir() - if err != nil { - errorf("unable to determine config dir: %v", err) + configDir := s.getenv(GoplsConfigDirEnvvar) // set for testing + if configDir == "" && testing.Testing() { + // Unless tests set GoplsConfigDirEnvvar, the prompt is a no op. + // We don't want tests to interact with os.UserConfigDir(). return } + if configDir == "" { + userDir, err := os.UserConfigDir() + if err != nil { + errorf("unable to determine user config dir: %v", err) + return + } + configDir = filepath.Join(userDir, "gopls") + } // Read the current prompt file. diff --git a/gopls/internal/server/unimplemented.go b/gopls/internal/server/unimplemented.go index c293ee167a7..9347f42c42e 100644 --- a/gopls/internal/server/unimplemented.go +++ b/gopls/internal/server/unimplemented.go @@ -22,10 +22,6 @@ func (s *server) Declaration(context.Context, *protocol.DeclarationParams) (*pro return nil, notImplemented("Declaration") } -func (s *server) Diagnostic(context.Context, *string) (*string, error) { - return nil, notImplemented("Diagnostic") -} - func (s *server) DiagnosticWorkspace(context.Context, *protocol.WorkspaceDiagnosticParams) (*protocol.WorkspaceDiagnosticReport, error) { return nil, notImplemented("DiagnosticWorkspace") } diff --git a/gopls/internal/settings/analysis.go b/gopls/internal/settings/analysis.go index 86fa4766b51..6bb85f1beca 100644 --- a/gopls/internal/settings/analysis.go +++ b/gopls/internal/settings/analysis.go @@ -50,7 +50,6 @@ import ( "golang.org/x/tools/gopls/internal/analysis/fillreturns" "golang.org/x/tools/gopls/internal/analysis/infertypeargs" "golang.org/x/tools/gopls/internal/analysis/nonewvars" - "golang.org/x/tools/gopls/internal/analysis/norangeoverfunc" "golang.org/x/tools/gopls/internal/analysis/noresultvalues" "golang.org/x/tools/gopls/internal/analysis/simplifycompositelit" "golang.org/x/tools/gopls/internal/analysis/simplifyrange" @@ -60,7 +59,6 @@ import ( "golang.org/x/tools/gopls/internal/analysis/unusedvariable" "golang.org/x/tools/gopls/internal/analysis/useany" "golang.org/x/tools/gopls/internal/protocol" - "honnef.co/go/tools/staticcheck" ) // Analyzer augments a [analysis.Analyzer] with additional LSP configuration. @@ -108,32 +106,7 @@ func (a *Analyzer) String() string { return a.analyzer.String() } var DefaultAnalyzers = make(map[string]*Analyzer) // initialized below func init() { - // Emergency workaround for #67237 to allow standard library - // to use range over func: disable SSA-based analyses of - // go1.23 packages that use range-over-func. - suppressOnRangeOverFunc := func(a *analysis.Analyzer) { - a.Requires = append(a.Requires, norangeoverfunc.Analyzer) - } - // buildir is non-exported so we have to scan the Analysis.Requires graph to find it. - var buildir *analysis.Analyzer - for _, a := range staticcheck.Analyzers { - for _, req := range a.Analyzer.Requires { - if req.Name == "buildir" { - buildir = req - } - } - - // Temporarily disable SA4004 CheckIneffectiveLoop as - // it crashes when encountering go1.23 range-over-func - // (#67237, dominikh/go-tools#1494). - if a.Analyzer.Name == "SA4004" { - suppressOnRangeOverFunc(a.Analyzer) - } - } - if buildir != nil { - suppressOnRangeOverFunc(buildir) - } - + // The traditional vet suite: analyzers := []*Analyzer{ // The traditional vet suite: {analyzer: appends.Analyzer, enabled: true}, diff --git a/gopls/internal/settings/codeactionkind.go b/gopls/internal/settings/codeactionkind.go index fa06b90e7e3..16a2eecb2cb 100644 --- a/gopls/internal/settings/codeactionkind.go +++ b/gopls/internal/settings/codeactionkind.go @@ -79,12 +79,9 @@ const ( GoDoc protocol.CodeActionKind = "source.doc" GoFreeSymbols protocol.CodeActionKind = "source.freesymbols" GoTest protocol.CodeActionKind = "source.test" + AddTest protocol.CodeActionKind = "source.addTest" // gopls - // TODO(adonovan): we should not use this category as it will - // never be requested now that we no longer interpret "no kind - // restriction" as "quickfix" instead of "all kinds". - // We need another way to make docs discoverable. GoplsDocFeatures protocol.CodeActionKind = "gopls.doc.features" // refactor.rewrite @@ -105,5 +102,8 @@ const ( RefactorExtractVariable protocol.CodeActionKind = "refactor.extract.variable" RefactorExtractToNewFile protocol.CodeActionKind = "refactor.extract.toNewFile" - // Note: add new kinds to the SupportedCodeActions map in defaults.go too. + // Note: add new kinds to: + // - the SupportedCodeActions map in default.go + // - the codeActionProducers table in ../golang/codeaction.go + // - the docs in ../../doc/features/transformation.md ) diff --git a/gopls/internal/settings/default.go b/gopls/internal/settings/default.go index 25f3eae80f5..2f637f3d16d 100644 --- a/gopls/internal/settings/default.go +++ b/gopls/internal/settings/default.go @@ -136,6 +136,7 @@ func DefaultOptions(overrides ...func(*Options)) *Options { LinkifyShowMessage: false, IncludeReplaceInWorkspace: false, ZeroConfig: true, + AddTestSourceCodeAction: false, }, } }) diff --git a/gopls/internal/settings/settings.go b/gopls/internal/settings/settings.go index 719d0690b5a..02c59163609 100644 --- a/gopls/internal/settings/settings.go +++ b/gopls/internal/settings/settings.go @@ -135,11 +135,6 @@ type BuildOptions struct { // gopls has to do to keep your workspace up to date. ExpandWorkspaceToModule bool `status:"experimental"` - // AllowImplicitNetworkAccess disables GOPROXY=off, allowing implicit module - // downloads rather than requiring user action. This option will eventually - // be removed. - AllowImplicitNetworkAccess bool `status:"experimental"` - // StandaloneTags specifies a set of build constraints that identify // individual Go source files that make up the entire main package of an // executable. @@ -699,6 +694,17 @@ type InternalOptions struct { // dynamically creating build configurations for different modules, // directories, and GOOS/GOARCH combinations to cover open files. ZeroConfig bool + + // PullDiagnostics enables support for pull diagnostics. + // + // TODO(rfindley): make pull diagnostics robust, and remove this option, + // allowing pull diagnostics by default. + PullDiagnostics bool + + // AddTestSourceCodeAction enables support for adding test as a source code + // action. + // TODO(hxjiang): remove this option once the feature is implemented. + AddTestSourceCodeAction bool } type SubdirWatchPatterns string @@ -979,6 +985,8 @@ func (o *Options) setOne(name string, value any) error { return setBool(&o.DeepCompletion, value) case "completeUnimported": return setBool(&o.CompleteUnimported, value) + case "addTestSourceCodeAction": + return setBool(&o.AddTestSourceCodeAction, value) case "completionBudget": return setDuration(&o.CompletionBudget, value) case "matcher": @@ -1132,10 +1140,7 @@ func (o *Options) setOne(name string, value any) error { return setBool(&o.AnalysisProgressReporting, value) case "allowImplicitNetworkAccess": - if err := setBool(&o.AllowImplicitNetworkAccess, value); err != nil { - return err - } - return softErrorf("gopls setting \"allowImplicitNetworkAccess\" is deprecated.\nPlease comment on https://go.dev/issue/66861 if this impacts your workflow.") + return deprecatedError("") case "standaloneTags": return setStringSlice(&o.StandaloneTags, value) @@ -1161,6 +1166,9 @@ func (o *Options) setOne(name string, value any) error { case "zeroConfig": return setBool(&o.ZeroConfig, value) + case "pullDiagnostics": + return setBool(&o.PullDiagnostics, value) + // deprecated and renamed settings // // These should never be deleted: there is essentially no cost diff --git a/gopls/internal/telemetry/cmd/stacks/stacks.go b/gopls/internal/telemetry/cmd/stacks/stacks.go index f3e7fae359b..1888267c021 100644 --- a/gopls/internal/telemetry/cmd/stacks/stacks.go +++ b/gopls/internal/telemetry/cmd/stacks/stacks.go @@ -324,8 +324,8 @@ func main() { } if prev := claimedBy[id]; prev != nil && prev != issue { - log.Printf("stack %s is claimed by issues #%d and #%d", - id, prev.Number, issue.Number) + log.Printf("stack %s is claimed by issues #%d and #%d:%s", + id, prev.Number, issue.Number, strings.ReplaceAll("\n"+stack, "\n", "\n- ")) continue } if false { @@ -788,7 +788,13 @@ func readPCLineTable(info Info) (map[string]FileLine, error) { // shallow-cloning just the desired revision. // (Skip if it's already cloned.) revDir := filepath.Join(stacksDir, info.Version) - if !fileExists(revDir) { + if !fileExists(filepath.Join(revDir, "go.mod")) { + // We check for presence of the go.mod file, + // not just the directory itself, as the /tmp reaper + // often removes stale files before removing their directories. + // Remove those stale directories now. + _ = os.RemoveAll(revDir) // ignore errors + log.Printf("cloning tools@gopls/%s", info.Version) if err := shallowClone(revDir, "https://go.googlesource.com/tools", "gopls/"+info.Version); err != nil { _ = os.RemoveAll(revDir) // ignore errors diff --git a/gopls/internal/template/completion.go b/gopls/internal/template/completion.go index dfacefc938e..dbb80cf2e3a 100644 --- a/gopls/internal/template/completion.go +++ b/gopls/internal/template/completion.go @@ -84,13 +84,15 @@ func inTemplate(fc *Parsed, pos protocol.Position) int { offset := fc.FromPosition(pos) // this could be a binary search, as the tokens are ordered for _, tk := range fc.tokens { - if tk.Start < offset && offset <= tk.End { + if tk.Start+len(Left) <= offset && offset+len(Right) <= tk.End { return tk.Start } } for _, x := range fc.elided { - if x > offset { - // fc.elided is sorted + if x+len(Left) > offset { + // fc.elided is sorted, and x is the position where a '{{' was replaced + // by ' '. We consider only cases where the replaced {{ is to the left + // of the cursor. break } // If the interval [x,offset] does not contain Left or Right diff --git a/gopls/internal/test/integration/bench/diagnostic_test.go b/gopls/internal/test/integration/bench/diagnostic_test.go new file mode 100644 index 00000000000..ce8a84d9eb2 --- /dev/null +++ b/gopls/internal/test/integration/bench/diagnostic_test.go @@ -0,0 +1,80 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bench + +import ( + "sync" + "testing" + + "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" + "golang.org/x/tools/gopls/internal/test/integration/fake" +) + +// BenchmarkDiagnosePackageFiles measures how long it takes to request +// diagnostics for 10 files in a single package, following a change to that +// package. +// +// This can be used to measure the efficiency of pull diagnostics +// (golang/go#53275). +func BenchmarkDiagnosePackageFiles(b *testing.B) { + if testing.Short() { + b.Skip("pull diagnostics are not supported by the benchmark dashboard baseline") + } + + env := getRepo(b, "kubernetes").newEnv(b, fake.EditorConfig{ + Settings: map[string]any{ + "pullDiagnostics": true, // currently required for pull diagnostic support + }, + }, "diagnosePackageFiles", false) + + // 10 arbitrary files in a single package. + files := []string{ + "pkg/kubelet/active_deadline.go", // 98 lines + "pkg/kubelet/active_deadline_test.go", // 95 lines + "pkg/kubelet/kubelet.go", // 2439 lines + "pkg/kubelet/kubelet_pods.go", // 2061 lines + "pkg/kubelet/kubelet_network.go", // 70 lines + "pkg/kubelet/kubelet_network_test.go", // 46 lines + "pkg/kubelet/pod_workers.go", // 1323 lines + "pkg/kubelet/pod_workers_test.go", // 1758 lines + "pkg/kubelet/runonce.go", // 175 lines + "pkg/kubelet/volume_host.go", // 297 lines + } + + env.Await(InitialWorkspaceLoad) + + for _, file := range files { + env.OpenFile(file) + } + + env.AfterChange() + + edit := makeEditFunc(env, files[0]) + + if stopAndRecord := startProfileIfSupported(b, env, qualifiedName("kubernetes", "diagnosePackageFiles")); stopAndRecord != nil { + defer stopAndRecord() + } + + b.ResetTimer() + + for i := 0; i < b.N; i++ { + edit() + var wg sync.WaitGroup + for _, file := range files { + wg.Add(1) + go func() { + defer wg.Done() + fileDiags := env.Diagnostics(file) + for _, d := range fileDiags { + if d.Severity == protocol.SeverityError { + b.Errorf("unexpected error diagnostic: %s", d.Message) + } + } + }() + } + wg.Wait() + } +} diff --git a/gopls/internal/test/integration/bench/didchange_test.go b/gopls/internal/test/integration/bench/didchange_test.go index 22e7ca2a11b..57ed01bbcd6 100644 --- a/gopls/internal/test/integration/bench/didchange_test.go +++ b/gopls/internal/test/integration/bench/didchange_test.go @@ -11,6 +11,7 @@ import ( "time" "golang.org/x/tools/gopls/internal/protocol" + . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/test/integration/fake" ) @@ -48,30 +49,49 @@ func BenchmarkDidChange(b *testing.B) { defer closeBuffer(b, env, test.file) // Insert the text we'll be modifying at the top of the file. - env.EditBuffer(test.file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) - env.AfterChange() - b.ResetTimer() + edit := makeEditFunc(env, test.file) if stopAndRecord := startProfileIfSupported(b, env, qualifiedName(test.repo, "didchange")); stopAndRecord != nil { defer stopAndRecord() } + b.ResetTimer() for i := 0; i < b.N; i++ { - edits := atomic.AddInt64(&editID, 1) - env.EditBuffer(test.file, protocol.TextEdit{ - Range: protocol.Range{ - Start: protocol.Position{Line: 0, Character: 0}, - End: protocol.Position{Line: 1, Character: 0}, - }, - // Increment the placeholder text, to ensure cache misses. - NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), - }) + edit() env.Await(env.StartedChange()) } }) } } +// makeEditFunc prepares the given file for incremental editing, by inserting a +// placeholder comment that will be overwritten with a new unique value by each +// call to the resulting function. While makeEditFunc awaits gopls to finish +// processing the initial edit, the callback for incremental edits does not +// await any gopls state. +// +// This is used for benchmarks that must repeatedly invalidate a file's +// contents. +// +// TODO(rfindley): use this throughout. +func makeEditFunc(env *Env, file string) func() { + // Insert the text we'll be modifying at the top of the file. + env.EditBuffer(file, protocol.TextEdit{NewText: "// __TEST_PLACEHOLDER_0__\n"}) + env.AfterChange() + + return func() { + edits := atomic.AddInt64(&editID, 1) + env.EditBuffer(file, protocol.TextEdit{ + Range: protocol.Range{ + Start: protocol.Position{Line: 0, Character: 0}, + End: protocol.Position{Line: 1, Character: 0}, + }, + // Increment the placeholder text, to ensure cache misses. + NewText: fmt.Sprintf("// __TEST_PLACEHOLDER_%d__\n", edits), + }) + } +} + func BenchmarkDiagnoseChange(b *testing.B) { for _, test := range didChangeTests { runChangeDiagnosticsBenchmark(b, test, false, "diagnoseChange") diff --git a/gopls/internal/test/integration/codelens/codelens_test.go b/gopls/internal/test/integration/codelens/codelens_test.go index 75b9fda1fbf..bb8ad95ee19 100644 --- a/gopls/internal/test/integration/codelens/codelens_test.go +++ b/gopls/internal/test/integration/codelens/codelens_test.go @@ -182,10 +182,10 @@ require golang.org/x/hello v1.2.3 if !found { t.Fatalf("found no command with the title %s", commandTitle) } - if _, err := env.Editor.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ + if err := env.Editor.ExecuteCommand(env.Ctx, &protocol.ExecuteCommandParams{ Command: lens.Command.Command, Arguments: lens.Command.Arguments, - }); err != nil { + }, nil); err != nil { t.Fatal(err) } env.AfterChange() @@ -252,7 +252,8 @@ func TestUpgradeCodelens_ModVendor(t *testing.T) { // This test checks the regression of golang/go#66055. The upgrade codelens // should work in a mod vendor context (the test above using a go.work file // was not broken). - testenv.NeedsGo1Point(t, 22) + testenv.NeedsGoCommand1Point(t, 22) + const shouldUpdateDep = ` -- go.mod -- module mod.com/a diff --git a/gopls/internal/test/integration/codelens/gcdetails_test.go b/gopls/internal/test/integration/codelens/gcdetails_test.go index 359a7804ec4..67750382de0 100644 --- a/gopls/internal/test/integration/codelens/gcdetails_test.go +++ b/gopls/internal/test/integration/codelens/gcdetails_test.go @@ -14,16 +14,12 @@ import ( . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/test/integration/fake" "golang.org/x/tools/gopls/internal/util/bug" - "golang.org/x/tools/internal/testenv" ) func TestGCDetails_Toggle(t *testing.T) { if runtime.GOOS == "android" { t.Skipf("the gc details code lens doesn't work on Android") } - // The overlay portion of the test fails with go1.19. - // I'm not sure why and not inclined to investigate. - testenv.NeedsGo1Point(t, 20) const mod = ` -- go.mod -- diff --git a/gopls/internal/test/integration/diagnostics/diagnostics_test.go b/gopls/internal/test/integration/diagnostics/diagnostics_test.go index 195089ffce3..0b8895b3d31 100644 --- a/gopls/internal/test/integration/diagnostics/diagnostics_test.go +++ b/gopls/internal/test/integration/diagnostics/diagnostics_test.go @@ -72,7 +72,11 @@ module mod.com go 1.12 ` - Run(t, onlyMod, func(t *testing.T, env *Env) { + WithOptions( + Settings{ + "pullDiagnostics": true, + }, + ).Run(t, onlyMod, func(t *testing.T, env *Env) { env.CreateBuffer("main.go", `package main func m() { @@ -81,6 +85,9 @@ func m() { `) env.AfterChange(Diagnostics(env.AtRegexp("main.go", "log"))) env.SaveBuffer("main.go") + if got := env.Diagnostics("main.go"); len(got) != 0 { + t.Errorf("got %d diagnostics, want 0", len(got)) + } env.AfterChange(NoDiagnostics(ForFile("main.go"))) }) } @@ -121,8 +128,18 @@ const a = 2 ` func TestDiagnosticClearingOnEdit(t *testing.T) { - Run(t, badPackage, func(t *testing.T, env *Env) { + WithOptions( + Settings{ + "pullDiagnostics": true, + }, + ).Run(t, badPackage, func(t *testing.T, env *Env) { env.OpenFile("b.go") + + for _, f := range []string{"a.go", "b.go"} { + if got := env.Diagnostics(f); len(got) != 1 { + t.Errorf("textDocument/diagnostic(%s) returned %d diagnostics, want 1. Got %v", f, len(got), got) + } + } env.AfterChange( Diagnostics(env.AtRegexp("a.go", "a = 1")), Diagnostics(env.AtRegexp("b.go", "a = 2")), @@ -130,6 +147,11 @@ func TestDiagnosticClearingOnEdit(t *testing.T) { // Fix the error by editing the const name in b.go to `b`. env.RegexpReplace("b.go", "(a) = 2", "b") + for _, f := range []string{"a.go", "b.go"} { + if got := env.Diagnostics(f); len(got) != 0 { + t.Errorf("textDocument/diagnostic(%s) returned %d diagnostics, want 0. Got %v", f, len(got), got) + } + } env.AfterChange( NoDiagnostics(ForFile("a.go")), NoDiagnostics(ForFile("b.go")), @@ -313,7 +335,7 @@ func Hello() { InitialWorkspaceLoad, Diagnostics(env.AtRegexp("main.go", `"mod.com/bob"`)), ) - if err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil { + if _, err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil { t.Fatal(err) } env.AfterChange( diff --git a/gopls/internal/test/integration/expectation.go b/gopls/internal/test/integration/expectation.go index 858daeee18a..f68f1de5e02 100644 --- a/gopls/internal/test/integration/expectation.go +++ b/gopls/internal/test/integration/expectation.go @@ -452,17 +452,27 @@ type WorkStatus struct { EndMsg string } -// CompletedProgress expects that workDone progress is complete for the given -// progress token. When non-nil WorkStatus is provided, it will be filled -// when the expectation is met. +// CompletedProgress expects that there is exactly one workDone progress with +// the given title, and is satisfied when that progress completes. If it is +// met, the corresponding status is written to the into argument. // -// If the token is not a progress token that the client has seen, this -// expectation is Unmeetable. -func CompletedProgress(token protocol.ProgressToken, into *WorkStatus) Expectation { +// TODO(rfindley): refactor to eliminate the redundancy with CompletedWork. +// This expectation is a vestige of older workarounds for asynchronous command +// execution. +func CompletedProgress(title string, into *WorkStatus) Expectation { check := func(s State) Verdict { - work, ok := s.work[token] - if !ok { - return Unmeetable // TODO(rfindley): refactor to allow the verdict to explain this result + var work *workProgress + for _, w := range s.work { + if w.title == title { + if work != nil { + // TODO(rfindley): refactor to allow the verdict to explain this result + return Unmeetable // multiple matches + } + work = w + } + } + if work == nil { + return Unmeetable // zero matches } if work.complete { if into != nil { @@ -473,7 +483,7 @@ func CompletedProgress(token protocol.ProgressToken, into *WorkStatus) Expectati } return Unmet } - desc := fmt.Sprintf("completed work for token %v", token) + desc := fmt.Sprintf("exactly 1 completed workDoneProgress with title %v", title) return Expectation{ Check: check, Description: desc, diff --git a/gopls/internal/test/integration/fake/editor.go b/gopls/internal/test/integration/fake/editor.go index 876d055da21..466e833f269 100644 --- a/gopls/internal/test/integration/fake/editor.go +++ b/gopls/internal/test/integration/fake/editor.go @@ -388,6 +388,12 @@ func clientCapabilities(cfg EditorConfig) (protocol.ClientCapabilities, error) { return capabilities, nil } +// Returns the connected LSP server's capabilities. +// Only populated after a call to [Editor.Connect]. +func (e *Editor) ServerCapabilities() protocol.ServerCapabilities { + return e.serverCapabilities +} + // marshalUnmarshal is a helper to json Marshal and then Unmarshal as a // different type. Used to work around cases where our protocol types are not // specific. @@ -1008,10 +1014,10 @@ func (e *Editor) ApplyCodeAction(ctx context.Context, action protocol.CodeAction // Execute any commands. The specification says that commands are // executed after edits are applied. if action.Command != nil { - if _, err := e.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + if err := e.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ Command: action.Command.Command, Arguments: action.Command.Arguments, - }); err != nil { + }, nil); err != nil { return err } } @@ -1019,6 +1025,42 @@ func (e *Editor) ApplyCodeAction(ctx context.Context, action protocol.CodeAction return e.sandbox.Workdir.CheckForFileChanges(ctx) } +func (e *Editor) Diagnostics(ctx context.Context, path string) ([]protocol.Diagnostic, error) { + if e.Server == nil { + return nil, errors.New("not connected") + } + e.mu.Lock() + capabilities := e.serverCapabilities.DiagnosticProvider + e.mu.Unlock() + + if capabilities == nil { + return nil, errors.New("server does not support pull diagnostics") + } + switch capabilities.Value.(type) { + case nil: + return nil, errors.New("server does not support pull diagnostics") + case protocol.DiagnosticOptions: + case protocol.DiagnosticRegistrationOptions: + // We could optionally check TextDocumentRegistrationOptions here to + // see if any filters apply to path. + default: + panic(fmt.Sprintf("unknown DiagnosticsProvider type %T", capabilities.Value)) + } + + params := &protocol.DocumentDiagnosticParams{ + TextDocument: e.TextDocumentIdentifier(path), + } + result, err := e.Server.Diagnostic(ctx, params) + if err != nil { + return nil, err + } + report, ok := result.Value.(protocol.RelatedFullDocumentDiagnosticReport) + if !ok { + return nil, fmt.Errorf("unexpected diagnostics report type %T", result) + } + return report.Items, nil +} + // GetQuickFixes returns the available quick fix code actions. func (e *Editor) GetQuickFixes(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic) ([]protocol.CodeAction, error) { return e.CodeActions(ctx, loc, diagnostics, protocol.QuickFix, protocol.SourceFixAll) @@ -1042,6 +1084,8 @@ func (e *Editor) applyCodeActions(ctx context.Context, loc protocol.Location, di return applied, nil } +// TODO(rfindley): add missing documentation to exported methods here. + func (e *Editor) CodeActions(ctx context.Context, loc protocol.Location, diagnostics []protocol.Diagnostic, only ...protocol.CodeActionKind) ([]protocol.CodeAction, error) { if e.Server == nil { return nil, nil @@ -1056,9 +1100,35 @@ func (e *Editor) CodeActions(ctx context.Context, loc protocol.Location, diagnos return e.Server.CodeAction(ctx, params) } -func (e *Editor) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams) (interface{}, error) { +func (e *Editor) ExecuteCodeLensCommand(ctx context.Context, path string, cmd command.Command, result any) error { + lenses, err := e.CodeLens(ctx, path) + if err != nil { + return err + } + var lens protocol.CodeLens + var found bool + for _, l := range lenses { + if l.Command.Command == cmd.String() { + lens = l + found = true + } + } + if !found { + return fmt.Errorf("found no command with the ID %s", cmd) + } + return e.ExecuteCommand(ctx, &protocol.ExecuteCommandParams{ + Command: lens.Command.Command, + Arguments: lens.Command.Arguments, + }, result) +} + +// ExecuteCommand makes a workspace/executeCommand request to the connected LSP +// server, if any. +// +// Result contains a pointer to a variable to be populated by json.Unmarshal. +func (e *Editor) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCommandParams, result any) error { if e.Server == nil { - return nil, nil + return nil } var match bool if e.serverCapabilities.ExecuteCommandProvider != nil { @@ -1071,18 +1141,37 @@ func (e *Editor) ExecuteCommand(ctx context.Context, params *protocol.ExecuteCom } } if !match { - return nil, fmt.Errorf("unsupported command %q", params.Command) + return fmt.Errorf("unsupported command %q", params.Command) } - result, err := e.Server.ExecuteCommand(ctx, params) + response, err := e.Server.ExecuteCommand(ctx, params) if err != nil { - return nil, err + return err } // Some commands use the go command, which writes directly to disk. // For convenience, check for those changes. if err := e.sandbox.Workdir.CheckForFileChanges(ctx); err != nil { - return nil, fmt.Errorf("checking for file changes: %v", err) + return fmt.Errorf("checking for file changes: %v", err) + } + if result != nil { + // ExecuteCommand already unmarshalled the response without knowing + // its schema, using the generic map[string]any representation. + // Encode and decode again, this time into a typed variable. + // + // This could be improved by generating a jsonrpc2 command client from the + // command.Interface, but that should only be done if we're consolidating + // this part of the tsprotocol generation. + // + // TODO(rfindley): we could also improve this by having ExecuteCommand return + // a json.RawMessage, similar to what we do with arguments. + data, err := json.Marshal(response) + if err != nil { + return bug.Errorf("marshalling response: %v", err) + } + if err := json.Unmarshal(data, result); err != nil { + return fmt.Errorf("unmarshalling response: %v", err) + } } - return result, nil + return nil } // FormatBuffer gofmts a Go file. @@ -1141,7 +1230,7 @@ func (e *Editor) RunGenerate(ctx context.Context, dir string) error { Command: cmd.Command, Arguments: cmd.Arguments, } - if _, err := e.ExecuteCommand(ctx, params); err != nil { + if err := e.ExecuteCommand(ctx, params, nil); err != nil { return fmt.Errorf("running generate: %v", err) } // Unfortunately we can't simply poll the workdir for file changes here, diff --git a/gopls/internal/test/integration/fake/sandbox.go b/gopls/internal/test/integration/fake/sandbox.go index fcaa50f0a76..7adf3e3e4a9 100644 --- a/gopls/internal/test/integration/fake/sandbox.go +++ b/gopls/internal/test/integration/fake/sandbox.go @@ -234,10 +234,10 @@ func (sb *Sandbox) goCommandInvocation() gocommand.Invocation { return inv } -// RunGoCommand executes a go command in the sandbox. If checkForFileChanges is -// true, the sandbox scans the working directory and emits file change events -// for any file changes it finds. -func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args, env []string, checkForFileChanges bool) error { +// RunGoCommand executes a go command in the sandbox and returns its standard +// output. If checkForFileChanges is true, the sandbox scans the working +// directory and emits file change events for any file changes it finds. +func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args, env []string, checkForFileChanges bool) ([]byte, error) { inv := sb.goCommandInvocation() inv.Verb = verb inv.Args = args @@ -247,7 +247,7 @@ func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args, env } stdout, stderr, _, err := sb.goCommandRunner.RunRaw(ctx, inv) if err != nil { - return fmt.Errorf("go command failed (stdout: %s) (stderr: %s): %v", stdout.String(), stderr.String(), err) + return nil, fmt.Errorf("go command failed (stdout: %s) (stderr: %s): %v", stdout.String(), stderr.String(), err) } // Since running a go command may result in changes to workspace files, // check if we need to send any "watched" file events. @@ -256,10 +256,10 @@ func (sb *Sandbox) RunGoCommand(ctx context.Context, dir, verb string, args, env // for benchmarks. Consider refactoring. if sb.Workdir != nil && checkForFileChanges { if err := sb.Workdir.CheckForFileChanges(ctx); err != nil { - return fmt.Errorf("checking for file changes: %w", err) + return nil, fmt.Errorf("checking for file changes: %w", err) } } - return nil + return stdout.Bytes(), nil } // GoVersion checks the version of the go command. @@ -275,7 +275,7 @@ func (sb *Sandbox) Close() error { if sb.gopath != "" { // Important: run this command in RootDir so that it doesn't interact with // any toolchain downloads that may occur - goCleanErr = sb.RunGoCommand(context.Background(), sb.RootDir(), "clean", []string{"-modcache"}, nil, false) + _, goCleanErr = sb.RunGoCommand(context.Background(), sb.RootDir(), "clean", []string{"-modcache"}, nil, false) } err := robustio.RemoveAll(sb.rootdir) if err != nil || goCleanErr != nil { diff --git a/gopls/internal/test/integration/fake/workdir.go b/gopls/internal/test/integration/fake/workdir.go index be3cb3bcf15..54fabb358c3 100644 --- a/gopls/internal/test/integration/fake/workdir.go +++ b/gopls/internal/test/integration/fake/workdir.go @@ -73,7 +73,7 @@ func writeFileData(path string, content []byte, rel RelativeTo) error { // isWindowsErrLockViolation reports whether err is ERROR_LOCK_VIOLATION // on Windows. -var isWindowsErrLockViolation = func(err error) bool { return false } +var isWindowsErrLockViolation = func(error) bool { return false } // Workdir is a temporary working directory for tests. It exposes file // operations in terms of relative paths, and fakes file watching by triggering diff --git a/gopls/internal/test/integration/misc/configuration_test.go b/gopls/internal/test/integration/misc/configuration_test.go index e96fe5dd806..1077c21ac36 100644 --- a/gopls/internal/test/integration/misc/configuration_test.go +++ b/gopls/internal/test/integration/misc/configuration_test.go @@ -15,11 +15,6 @@ import ( // Test that enabling and disabling produces the expected results of showing // and hiding staticcheck analysis results. func TestChangeConfiguration(t *testing.T) { - // Staticcheck only supports Go versions >= 1.20. - // Note: keep this in sync with TestStaticcheckWarning. Below this version we - // should get an error when setting staticcheck configuration. - testenv.NeedsGo1Point(t, 20) - const files = ` -- go.mod -- module mod.com @@ -164,8 +159,6 @@ type B struct { // // Gopls should not get confused about buffer content when recreating the view. func TestMajorOptionsChange(t *testing.T) { - testenv.NeedsGo1Point(t, 20) // needs staticcheck - const files = ` -- go.mod -- module mod.com diff --git a/gopls/internal/test/integration/misc/definition_test.go b/gopls/internal/test/integration/misc/definition_test.go index 71f255b52e2..95054977e14 100644 --- a/gopls/internal/test/integration/misc/definition_test.go +++ b/gopls/internal/test/integration/misc/definition_test.go @@ -13,6 +13,7 @@ import ( "strings" "testing" + "github.com/google/go-cmp/cmp" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/test/compare" . "golang.org/x/tools/gopls/internal/test/integration" @@ -642,3 +643,48 @@ var _ = foo(123) // call } }) } + +func TestPackageKeyInvalidationAfterSave(t *testing.T) { + // This test is a little subtle, but catches a bug that slipped through + // testing of https://go.dev/cl/614165, which moved active packages to the + // packageHandle. + // + // The bug was that after a format-and-save operation, the save marks the + // package as dirty but doesn't change its identity. In other words, this is + // the sequence of change: + // + // S_0 --format--> S_1 --save--> S_2 + // + // A package is computed on S_0, invalidated in S_1 and immediately + // invalidated again in S_2. Due to an invalidation bug, the validity of the + // package from S_0 was checked by comparing the identical keys of S_1 and + // S_2, and so the stale package from S_0 was marked as valid. + const src = ` +-- go.mod -- +module mod.com + +-- a.go -- +package a + +func Foo() { +} +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("a.go") + + fooLoc := env.RegexpSearch("a.go", "()Foo") + loc0 := env.GoToDefinition(fooLoc) + + // Insert a space that will be removed by formatting. + env.EditBuffer("a.go", protocol.TextEdit{ + Range: fooLoc.Range, + NewText: " ", + }) + env.SaveBuffer("a.go") // reformats the file before save + env.AfterChange() + loc1 := env.GoToDefinition(env.RegexpSearch("a.go", "Foo")) + if diff := cmp.Diff(loc0, loc1); diff != "" { + t.Errorf("mismatching locations (-want +got):\n%s", diff) + } + }) +} diff --git a/gopls/internal/test/integration/misc/formatting_test.go b/gopls/internal/test/integration/misc/formatting_test.go index 1808dbc8791..a0f86d3530c 100644 --- a/gopls/internal/test/integration/misc/formatting_test.go +++ b/gopls/internal/test/integration/misc/formatting_test.go @@ -10,7 +10,6 @@ import ( "golang.org/x/tools/gopls/internal/test/compare" . "golang.org/x/tools/gopls/internal/test/integration" - "golang.org/x/tools/internal/testenv" ) const unformattedProgram = ` @@ -303,7 +302,6 @@ func main() { } func TestGofumptFormatting(t *testing.T) { - testenv.NeedsGo1Point(t, 20) // gofumpt requires go 1.20+ // Exercise some gofumpt formatting rules: // - No empty lines following an assignment operator // - Octal integer literals should use the 0o prefix on modules using Go @@ -367,8 +365,6 @@ const Bar = 42 } func TestGofumpt_Issue61692(t *testing.T) { - testenv.NeedsGo1Point(t, 21) - const input = ` -- go.mod -- module foo diff --git a/gopls/internal/test/integration/misc/hover_test.go b/gopls/internal/test/integration/misc/hover_test.go index 9c679f02d53..47a1cb066f8 100644 --- a/gopls/internal/test/integration/misc/hover_test.go +++ b/gopls/internal/test/integration/misc/hover_test.go @@ -14,7 +14,6 @@ import ( "golang.org/x/tools/gopls/internal/protocol" . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/test/integration/fake" - "golang.org/x/tools/internal/testenv" ) func TestHoverUnexported(t *testing.T) { @@ -282,7 +281,6 @@ go 1.16 } func TestHoverCompletionMarkdown(t *testing.T) { - testenv.NeedsGo1Point(t, 19) const source = ` -- go.mod -- module mod.com @@ -343,7 +341,6 @@ func Hello() string { // Test that the generated markdown contains links for Go references. // https://github.com/golang/go/issues/58352 func TestHoverLinks(t *testing.T) { - testenv.NeedsGo1Point(t, 19) const input = ` -- go.mod -- go 1.19 @@ -465,7 +462,6 @@ SKIPPED ` func TestHoverEmbedDirective(t *testing.T) { - testenv.NeedsGo1Point(t, 19) Run(t, embedHover, func(t *testing.T, env *Env) { env.OpenFile("main.go") from := env.RegexpSearch("main.go", `\*.txt`) @@ -606,8 +602,6 @@ func main() { } func TestHoverBuiltinFile(t *testing.T) { - testenv.NeedsGo1Point(t, 21) // uses 'min' - // This test verifies that hovering in the builtin file provides the same // hover content as hovering over a use of a builtin. diff --git a/gopls/internal/test/integration/misc/imports_test.go b/gopls/internal/test/integration/misc/imports_test.go index 15fbd87e0fd..30a161017dc 100644 --- a/gopls/internal/test/integration/misc/imports_test.go +++ b/gopls/internal/test/integration/misc/imports_test.go @@ -292,9 +292,9 @@ var _, _ = x.X, y.Y // inclined to undertake. func cleanModCache(t *testing.T, modcache string) { cmd := exec.Command("go", "clean", "-modcache") - cmd.Env = append(os.Environ(), "GOMODCACHE="+modcache) - if err := cmd.Run(); err != nil { - t.Errorf("cleaning modcache: %v", err) + cmd.Env = append(os.Environ(), "GOMODCACHE="+modcache, "GOTOOLCHAIN=local") + if output, err := cmd.CombinedOutput(); err != nil { + t.Errorf("cleaning modcache: %v\noutput:\n%s", err, string(output)) } } diff --git a/gopls/internal/test/integration/misc/staticcheck_test.go b/gopls/internal/test/integration/misc/staticcheck_test.go index 31302393252..5af0cb42a10 100644 --- a/gopls/internal/test/integration/misc/staticcheck_test.go +++ b/gopls/internal/test/integration/misc/staticcheck_test.go @@ -7,14 +7,10 @@ package misc import ( "testing" - "golang.org/x/tools/internal/testenv" - . "golang.org/x/tools/gopls/internal/test/integration" ) func TestStaticcheckGenerics(t *testing.T) { - testenv.NeedsGo1Point(t, 20) // staticcheck requires go1.20+ - // CL 583778 causes buildir not to run on packages that use // range-over-func, since it might otherwise crash. But nearly // all packages will soon meet this description, so the @@ -85,8 +81,6 @@ var FooErr error = errors.New("foo") // Test for golang/go#56270: an analysis with related info should not panic if // analysis.RelatedInformation.End is not set. func TestStaticcheckRelatedInfo(t *testing.T) { - testenv.NeedsGo1Point(t, 20) // staticcheck is only supported at Go 1.20+ - // CL 583778 causes buildir not to run on packages that use // range-over-func, since it might otherwise crash. But nearly // all packages will soon meet this description, so the diff --git a/gopls/internal/test/integration/misc/vuln_test.go b/gopls/internal/test/integration/misc/vuln_test.go index 7be02b3ceb3..05cdbe8594f 100644 --- a/gopls/internal/test/integration/misc/vuln_test.go +++ b/gopls/internal/test/integration/misc/vuln_test.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/protocol" "golang.org/x/tools/gopls/internal/protocol/command" + "golang.org/x/tools/gopls/internal/server" "golang.org/x/tools/gopls/internal/test/compare" . "golang.org/x/tools/gopls/internal/test/integration" "golang.org/x/tools/gopls/internal/vulncheck" @@ -41,10 +42,11 @@ package foo Arguments: cmd.Arguments, } - response, err := env.Editor.ExecuteCommand(env.Ctx, params) + var result any + err := env.Editor.ExecuteCommand(env.Ctx, params, &result) // We want an error! if err == nil { - t.Errorf("got success, want invalid file URL error: %v", response) + t.Errorf("got success, want invalid file URL error. Result: %v", result) } }) } @@ -72,13 +74,16 @@ func F() { // build error incomplete ).Run(t, files, func(t *testing.T, env *Env) { env.OpenFile("go.mod") var result command.RunVulncheckResult - env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) + err := env.Editor.ExecuteCodeLensCommand(env.Ctx, "go.mod", command.RunGovulncheck, &result) + if err == nil { + t.Fatalf("govulncheck succeeded unexpectedly: %v", result) + } var ws WorkStatus env.Await( - CompletedProgress(result.Token, &ws), + CompletedProgress(server.GoVulncheckCommandTitle, &ws), ) wantEndMsg, wantMsgPart := "failed", "There are errors with the provided package patterns:" - if ws.EndMsg != "failed" || !strings.Contains(ws.Msg, wantMsgPart) { + if ws.EndMsg != "failed" || !strings.Contains(ws.Msg, wantMsgPart) || !strings.Contains(err.Error(), wantMsgPart) { t.Errorf("work status = %+v, want {EndMessage: %q, Message: %q}", ws, wantEndMsg, wantMsgPart) } }) @@ -203,14 +208,16 @@ func main() { env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) env.OnceMet( - CompletedProgress(result.Token, nil), + CompletedProgress(server.GoVulncheckCommandTitle, nil), ShownMessage("Found GOSTDLIB"), NoDiagnostics(ForFile("go.mod")), ) - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{ - "go.mod": {IDs: []string{"GOSTDLIB"}, Mode: vulncheck.ModeGovulncheck}}) + testFetchVulncheckResult(t, env, "go.mod", result.Result, map[string]fetchVulncheckResult{ + "go.mod": {IDs: []string{"GOSTDLIB"}, Mode: vulncheck.ModeGovulncheck}, + }) }) } + func TestFetchVulncheckResultStd(t *testing.T) { const files = ` -- go.mod -- @@ -252,7 +259,7 @@ func main() { NoDiagnostics(ForFile("go.mod")), // we don't publish diagnostics for standard library vulnerability yet. ) - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{ + testFetchVulncheckResult(t, env, "", nil, map[string]fetchVulncheckResult{ "go.mod": { IDs: []string{"GOSTDLIB"}, Mode: vulncheck.ModeImports, @@ -261,12 +268,28 @@ func main() { }) } +// fetchVulncheckResult summarizes a vulncheck result for a single file. type fetchVulncheckResult struct { IDs []string Mode vulncheck.AnalysisMode } -func testFetchVulncheckResult(t *testing.T, env *Env, want map[string]fetchVulncheckResult) { +// testFetchVulncheckResult checks that calling gopls.fetch_vulncheck_result +// returns the expected summarized results contained in the want argument. +// +// If fromRun is non-nil, is is the result of running running vulncheck for +// runPath, and testFetchVulncheckResult also checks that the fetched result +// for runPath matches fromRun. +// +// This awkward factoring is an artifact of a transition from fetching +// vulncheck results asynchronously, to allowing the command to run +// asynchronously, yet returning the result synchronously from the client's +// perspective. +// +// TODO(rfindley): once VS Code no longer depends on fetching results +// asynchronously, we can remove gopls.fetch_vulncheck_result, and simplify or +// remove this helper. +func testFetchVulncheckResult(t *testing.T, env *Env, runPath string, fromRun *vulncheck.Result, want map[string]fetchVulncheckResult) { t.Helper() var result map[protocol.DocumentURI]*vulncheck.Result @@ -281,8 +304,7 @@ func testFetchVulncheckResult(t *testing.T, env *Env, want map[string]fetchVulnc for _, v := range want { sort.Strings(v.IDs) } - got := map[string]fetchVulncheckResult{} - for k, r := range result { + summarize := func(r *vulncheck.Result) fetchVulncheckResult { osv := map[string]bool{} for _, v := range r.Findings { osv[v.OSV] = true @@ -292,14 +314,23 @@ func testFetchVulncheckResult(t *testing.T, env *Env, want map[string]fetchVulnc ids = append(ids, id) } sort.Strings(ids) - modfile := env.Sandbox.Workdir.RelPath(k.Path()) - got[modfile] = fetchVulncheckResult{ + return fetchVulncheckResult{ IDs: ids, Mode: r.Mode, } } - if diff := cmp.Diff(want, got); diff != "" { - t.Errorf("fetch vulnchheck result = got %v, want %v: diff %v", got, want, diff) + got := map[string]fetchVulncheckResult{} + for k, r := range result { + modfile := env.Sandbox.Workdir.RelPath(k.Path()) + got[modfile] = summarize(r) + } + if fromRun != nil { + if diff := cmp.Diff(want, got); diff != "" { + t.Errorf("fetch vulncheck result = got %v, want %v: diff %v", got, want, diff) + } + if diff := cmp.Diff(summarize(fromRun), got[runPath]); diff != "" { + t.Errorf("fetched vulncheck result differs from returned (-returned, +fetched):\n%s", diff) + } } } @@ -463,7 +494,7 @@ func TestRunVulncheckPackageDiagnostics(t *testing.T) { ReadDiagnostics("go.mod", gotDiagnostics), ) - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{ + testFetchVulncheckResult(t, env, "", nil, map[string]fetchVulncheckResult{ "go.mod": { IDs: []string{"GO-2022-01", "GO-2022-02", "GO-2022-03"}, Mode: vulncheck.ModeImports, @@ -531,7 +562,7 @@ func TestRunVulncheckPackageDiagnostics(t *testing.T) { if len(gotDiagnostics.Diagnostics) > 0 { t.Errorf("Unexpected diagnostics: %v", stringify(gotDiagnostics)) } - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{}) + testFetchVulncheckResult(t, env, "", nil, map[string]fetchVulncheckResult{}) } for _, tc := range []struct { @@ -561,7 +592,7 @@ func TestRunVulncheckPackageDiagnostics(t *testing.T) { env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) gotDiagnostics := &protocol.PublishDiagnosticsParams{} env.OnceMet( - CompletedProgress(result.Token, nil), + CompletedProgress(server.GoVulncheckCommandTitle, nil), ShownMessage("Found"), ) env.OnceMet( @@ -609,7 +640,7 @@ func TestRunGovulncheck_Expiry(t *testing.T) { var result command.RunVulncheckResult env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) env.OnceMet( - CompletedProgress(result.Token, nil), + CompletedProgress(server.GoVulncheckCommandTitle, nil), ShownMessage("Found"), ) // Sleep long enough for the results to expire. @@ -640,7 +671,7 @@ func TestRunVulncheckWarning(t *testing.T) { env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) gotDiagnostics := &protocol.PublishDiagnosticsParams{} env.OnceMet( - CompletedProgress(result.Token, nil), + CompletedProgress(server.GoVulncheckCommandTitle, nil), ShownMessage("Found"), ) // Vulncheck diagnostics asynchronous to the vulncheck command. @@ -649,7 +680,7 @@ func TestRunVulncheckWarning(t *testing.T) { ReadDiagnostics("go.mod", gotDiagnostics), ) - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{ + testFetchVulncheckResult(t, env, "go.mod", result.Result, map[string]fetchVulncheckResult{ // All vulnerabilities (symbol-level, import-level, module-level) are reported. "go.mod": {IDs: []string{"GO-2022-01", "GO-2022-02", "GO-2022-03", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}, }) @@ -795,7 +826,7 @@ func TestGovulncheckInfo(t *testing.T) { env.ExecuteCodeLensCommand("go.mod", command.RunGovulncheck, &result) gotDiagnostics := &protocol.PublishDiagnosticsParams{} env.OnceMet( - CompletedProgress(result.Token, nil), + CompletedProgress(server.GoVulncheckCommandTitle, nil), ShownMessage("No vulnerabilities found"), // only count affecting vulnerabilities. ) @@ -805,7 +836,9 @@ func TestGovulncheckInfo(t *testing.T) { ReadDiagnostics("go.mod", gotDiagnostics), ) - testFetchVulncheckResult(t, env, map[string]fetchVulncheckResult{"go.mod": {IDs: []string{"GO-2022-02", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}}) + testFetchVulncheckResult(t, env, "go.mod", result.Result, map[string]fetchVulncheckResult{ + "go.mod": {IDs: []string{"GO-2022-02", "GO-2022-04"}, Mode: vulncheck.ModeGovulncheck}, + }) // wantDiagnostics maps a module path in the require // section of a go.mod to diagnostics that will be returned // when running vulncheck. diff --git a/gopls/internal/test/integration/misc/webserver_test.go b/gopls/internal/test/integration/misc/webserver_test.go index 24518145721..d5a051ea348 100644 --- a/gopls/internal/test/integration/misc/webserver_test.go +++ b/gopls/internal/test/integration/misc/webserver_test.go @@ -21,6 +21,8 @@ import ( "golang.org/x/tools/internal/testenv" ) +// TODO(adonovan): define marker test verbs for checking package docs. + // TestWebServer exercises the web server created on demand // for code actions such as "Browse package documentation". func TestWebServer(t *testing.T) { @@ -267,6 +269,71 @@ func (*T) M() { /*in T.M*/} }) } +// TestPkgDocFileImports tests that the doc links are rendered +// as URLs based on the correct import mapping for the file in +// which they appear. +func TestPkgDocFileImports(t *testing.T) { + const files = ` +-- go.mod -- +module mod.com +go 1.20 + +-- a/a1.go -- +package a + +import "b" +import alias "d" + +// [b.T] indeed refers to b.T. +// +// [alias.D] refers to d.D +// but [d.D] also refers to d.D. +type A1 int + +-- a/a2.go -- +package a + +import b "c" + +// [b.U] actually refers to c.U. +type A2 int + +-- b/b.go -- +package b + +type T int +type U int + +-- c/c.go -- +package c + +type T int +type U int + +-- d/d.go -- +package d + +type D int +` + Run(t, files, func(t *testing.T, env *Env) { + env.OpenFile("a/a1.go") + uri1 := viewPkgDoc(t, env, env.Sandbox.Workdir.EntireFile("a/a1.go")) + doc := get(t, uri1) + + // Check that the doc links are resolved using the + // appropriate import mapping for the file in which + // they appear. + checkMatch(t, true, doc, `pkg/b\?.*#T">b.T indeed refers to b.T`) + checkMatch(t, true, doc, `pkg/c\?.*#U">b.U actually refers to c.U`) + + // Check that doc links can be resolved using either + // the original or the local name when they refer to a + // renaming import. (Local names are preferred.) + checkMatch(t, true, doc, `pkg/d\?.*#D">alias.D refers to d.D`) + checkMatch(t, true, doc, `pkg/d\?.*#D">d.D also refers to d.D`) + }) +} + // viewPkgDoc invokes the "Browse package documentation" code action // at the specified location. It returns the URI of the document, or // fails the test. @@ -361,7 +428,7 @@ func f(buf bytes.Buffer, greeting string) { // TestAssembly is a basic test of the web-based assembly listing. func TestAssembly(t *testing.T) { - testenv.NeedsGo1Point(t, 22) // for up-to-date assembly listing + testenv.NeedsGoCommand1Point(t, 22) // for up-to-date assembly listing const files = ` -- go.mod -- diff --git a/gopls/internal/test/integration/regtest.go b/gopls/internal/test/integration/regtest.go index b676fd4c500..dc9600af7df 100644 --- a/gopls/internal/test/integration/regtest.go +++ b/gopls/internal/test/integration/regtest.go @@ -9,7 +9,10 @@ import ( "flag" "fmt" "os" + "os/exec" + "path/filepath" "runtime" + "strings" "testing" "time" @@ -189,5 +192,56 @@ func Main(m *testing.M) (code int) { } runner.tempDir = dir + FilterToolchainPathAndGOROOT() + return m.Run() } + +// FilterToolchainPathAndGOROOT updates the PATH and GOROOT environment +// variables for the current process to effectively revert the changes made by +// the go command when performing a toolchain switch in the context of `go +// test` (see golang/go#68005). +// +// It does this by looking through PATH for a go command that is NOT a +// toolchain go command, and adjusting PATH to find that go command. Then it +// unsets GOROOT in order to use the default GOROOT for that go command. +// +// TODO(rfindley): this is very much a hack, so that our 1.21 and 1.22 builders +// actually exercise integration with older go commands. In golang/go#69321, we +// hope to do better. +func FilterToolchainPathAndGOROOT() { + if localGo, first := findLocalGo(); localGo != "" && !first { + dir := filepath.Dir(localGo) + path := os.Getenv("PATH") + os.Setenv("PATH", dir+string(os.PathListSeparator)+path) + os.Unsetenv("GOROOT") // Remove the GOROOT value that was added by toolchain switch. + } +} + +// findLocalGo returns a path to a local (=non-toolchain) Go version, or the +// empty string if none is found. +// +// The second result reports if path matches the result of exec.LookPath. +func findLocalGo() (path string, first bool) { + paths := filepath.SplitList(os.Getenv("PATH")) + for _, path := range paths { + // Use a simple heuristic to filter out toolchain paths. + if strings.Contains(path, "toolchain@v0.0.1-go") && filepath.Base(path) == "bin" { + continue // toolchain path + } + fullPath := filepath.Join(path, "go") + fi, err := os.Stat(fullPath) + if err != nil { + continue + } + if fi.Mode()&0111 != 0 { + first := false + pathGo, err := exec.LookPath("go") + if err == nil { + first = fullPath == pathGo + } + return fullPath, first + } + } + return "", false +} diff --git a/gopls/internal/test/integration/runner.go b/gopls/internal/test/integration/runner.go index 7b3b757536f..6d10b16cab3 100644 --- a/gopls/internal/test/integration/runner.go +++ b/gopls/internal/test/integration/runner.go @@ -203,7 +203,7 @@ func (r *Runner) Run(t *testing.T, files string, test TestFunc, opts ...RunOptio // Write the go.sum file for the requested directories, before starting the server. for _, dir := range config.writeGoSum { - if err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "./..."}, []string{"GOWORK=off"}, true); err != nil { + if _, err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "./..."}, []string{"GOWORK=off"}, true); err != nil { t.Fatal(err) } } diff --git a/gopls/internal/test/integration/template/template_test.go b/gopls/internal/test/integration/template/template_test.go index 47398f5a3a2..3087e1d60fd 100644 --- a/gopls/internal/test/integration/template/template_test.go +++ b/gopls/internal/test/integration/template/template_test.go @@ -228,4 +228,30 @@ func shorten(fn protocol.DocumentURI) string { return pieces[j-2] + "/" + pieces[j-1] } +func TestCompletionPanic_Issue57621(t *testing.T) { + const src = ` +-- go.mod -- +module mod.com + +go 1.12 +-- hello.tmpl -- +{{range .Planets}} +Hello {{ +{{end}} +` + Run(t, src, func(t *testing.T, env *Env) { + env.OpenFile("hello.tmpl") + // None of these should panic. + env.Completion(env.RegexpSearch("hello.tmpl", `Hello ()\{\{`)) + env.Completion(env.RegexpSearch("hello.tmpl", `Hello \{()\{`)) + env.Completion(env.RegexpSearch("hello.tmpl", `Hello \{\{()`)) + env.Completion(env.RegexpSearch("hello.tmpl", `()\{\{range`)) + env.Completion(env.RegexpSearch("hello.tmpl", `\{()\{range`)) + env.Completion(env.RegexpSearch("hello.tmpl", `\{\{()range`)) + env.Completion(env.RegexpSearch("hello.tmpl", `Planets()}}`)) + env.Completion(env.RegexpSearch("hello.tmpl", `Planets}()}`)) + env.Completion(env.RegexpSearch("hello.tmpl", `Planets}}()`)) + }) +} + // Hover needs tests diff --git a/gopls/internal/test/integration/watch/watch_test.go b/gopls/internal/test/integration/watch/watch_test.go index 7f41511d140..3fb1ab546a6 100644 --- a/gopls/internal/test/integration/watch/watch_test.go +++ b/gopls/internal/test/integration/watch/watch_test.go @@ -588,7 +588,7 @@ func main() { env.AfterChange( NoDiagnostics(ForFile("main.go")), ) - if err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil { + if _, err := env.Sandbox.RunGoCommand(env.Ctx, "", "mod", []string{"init", "mod.com"}, nil, true); err != nil { t.Fatal(err) } diff --git a/gopls/internal/test/integration/workspace/broken_test.go b/gopls/internal/test/integration/workspace/broken_test.go index 8f00be775e4..33b0b834eb6 100644 --- a/gopls/internal/test/integration/workspace/broken_test.go +++ b/gopls/internal/test/integration/workspace/broken_test.go @@ -10,7 +10,6 @@ import ( "golang.org/x/tools/gopls/internal/server" . "golang.org/x/tools/gopls/internal/test/integration" - "golang.org/x/tools/internal/testenv" ) // This file holds various tests for UX with respect to broken workspaces. @@ -23,10 +22,6 @@ import ( // Test for golang/go#53933 func TestBrokenWorkspace_DuplicateModules(t *testing.T) { - // The go command error message was improved in Go 1.20 to mention multiple - // modules. - testenv.NeedsGo1Point(t, 20) - // This proxy module content is replaced by the workspace, but is still // required for module resolution to function in the Go command. const proxy = ` diff --git a/gopls/internal/test/integration/workspace/quickfix_test.go b/gopls/internal/test/integration/workspace/quickfix_test.go index 6f7c8e854d0..c39e5ca3542 100644 --- a/gopls/internal/test/integration/workspace/quickfix_test.go +++ b/gopls/internal/test/integration/workspace/quickfix_test.go @@ -341,7 +341,7 @@ func main() {} } func TestStubMethods64087(t *testing.T) { - // We can't use the @fix or @suggestedfixerr or @codeactionerr + // We can't use the @fix or @quickfixerr or @codeactionerr // because the error now reported by the corrected logic // is internal and silently causes no fix to be offered. // @@ -404,7 +404,7 @@ type myerror struct{any} } func TestStubMethods64545(t *testing.T) { - // We can't use the @fix or @suggestedfixerr or @codeactionerr + // We can't use the @fix or @quickfixerr or @codeactionerr // because the error now reported by the corrected logic // is internal and silently causes no fix to be offered. // diff --git a/gopls/internal/test/integration/workspace/std_test.go b/gopls/internal/test/integration/workspace/std_test.go index 9c021fef4f3..8230d9de610 100644 --- a/gopls/internal/test/integration/workspace/std_test.go +++ b/gopls/internal/test/integration/workspace/std_test.go @@ -5,6 +5,7 @@ package workspace import ( + "os" "os/exec" "path/filepath" "runtime" @@ -36,7 +37,12 @@ func TestStdWorkspace(t *testing.T) { // Query GOROOT. This is slightly more precise than e.g. runtime.GOROOT, as // it queries the go command in the environment. - goroot, err := exec.Command("go", "env", "GOROOT").Output() + cmd := exec.Command("go", "env", "GOROOT") + // Run with GOTOOLCHAIN=local so as to not be affected by toolchain upgrades + // in the current directory (which is affected by gopls' go.mod file). + // This was golang/go#70187 + cmd.Env = append(os.Environ(), "GOTOOLCHAIN=local") + goroot, err := cmd.Output() if err != nil { t.Fatal(err) } diff --git a/gopls/internal/test/integration/workspace/workspace_test.go b/gopls/internal/test/integration/workspace/workspace_test.go index ac74e6deed5..587ac522c41 100644 --- a/gopls/internal/test/integration/workspace/workspace_test.go +++ b/gopls/internal/test/integration/workspace/workspace_test.go @@ -255,7 +255,7 @@ func TestAutomaticWorkspaceModule_Interdependent(t *testing.T) { } func TestWorkspaceVendoring(t *testing.T) { - testenv.NeedsGo1Point(t, 22) + testenv.NeedsGoCommand1Point(t, 22) WithOptions( ProxyFiles(workspaceModuleProxy), ).Run(t, multiModule, func(t *testing.T, env *Env) { diff --git a/gopls/internal/test/integration/wrappers.go b/gopls/internal/test/integration/wrappers.go index ddff4da979b..989ae913acf 100644 --- a/gopls/internal/test/integration/wrappers.go +++ b/gopls/internal/test/integration/wrappers.go @@ -5,7 +5,6 @@ package integration import ( - "encoding/json" "errors" "os" "path" @@ -238,7 +237,7 @@ func (e *Env) ApplyQuickFixes(path string, diagnostics []protocol.Diagnostic) { } } -// ApplyCodeAction applies the given code action. +// ApplyCodeAction applies the given code action, calling t.Fatal on any error. func (e *Env) ApplyCodeAction(action protocol.CodeAction) { e.T.Helper() if err := e.Editor.ApplyCodeAction(e.Ctx, action); err != nil { @@ -246,7 +245,19 @@ func (e *Env) ApplyCodeAction(action protocol.CodeAction) { } } -// GetQuickFixes returns the available quick fix code actions. +// Diagnostics returns diagnostics for the given file, calling t.Fatal on any +// error. +func (e *Env) Diagnostics(name string) []protocol.Diagnostic { + e.T.Helper() + diags, err := e.Editor.Diagnostics(e.Ctx, name) + if err != nil { + e.T.Fatal(err) + } + return diags +} + +// GetQuickFixes returns the available quick fix code actions, calling t.Fatal +// on any error. func (e *Env) GetQuickFixes(path string, diagnostics []protocol.Diagnostic) []protocol.CodeAction { e.T.Helper() loc := e.Sandbox.Workdir.EntireFile(path) @@ -303,18 +314,20 @@ func (e *Env) RunGenerate(dir string) { // RunGoCommand runs the given command in the sandbox's default working // directory. -func (e *Env) RunGoCommand(verb string, args ...string) { +func (e *Env) RunGoCommand(verb string, args ...string) []byte { e.T.Helper() - if err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, nil, true); err != nil { + out, err := e.Sandbox.RunGoCommand(e.Ctx, "", verb, args, nil, true) + if err != nil { e.T.Fatal(err) } + return out } // RunGoCommandInDir is like RunGoCommand, but executes in the given // relative directory of the sandbox. func (e *Env) RunGoCommandInDir(dir, verb string, args ...string) { e.T.Helper() - if err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, nil, true); err != nil { + if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, nil, true); err != nil { e.T.Fatal(err) } } @@ -323,7 +336,7 @@ func (e *Env) RunGoCommandInDir(dir, verb string, args ...string) { // relative directory of the sandbox with the given additional environment variables. func (e *Env) RunGoCommandInDirWithEnv(dir string, env []string, verb string, args ...string) { e.T.Helper() - if err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, env, true); err != nil { + if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, verb, args, env, true); err != nil { e.T.Fatal(err) } } @@ -344,7 +357,7 @@ func (e *Env) GoVersion() int { func (e *Env) DumpGoSum(dir string) { e.T.Helper() - if err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "./..."}, nil, true); err != nil { + if _, err := e.Sandbox.RunGoCommand(e.Ctx, dir, "list", []string{"-mod=mod", "./..."}, nil, true); err != nil { e.T.Fatal(err) } sumFile := path.Join(dir, "go.sum") @@ -375,46 +388,22 @@ func (e *Env) CodeLens(path string) []protocol.CodeLens { // ExecuteCodeLensCommand executes the command for the code lens matching the // given command name. -func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result interface{}) { - e.T.Helper() - lenses := e.CodeLens(path) - var lens protocol.CodeLens - var found bool - for _, l := range lenses { - if l.Command.Command == cmd.String() { - lens = l - found = true - } - } - if !found { - e.T.Fatalf("found no command with the ID %s", cmd) +// +// result is a pointer to a variable to be populated by json.Unmarshal. +func (e *Env) ExecuteCodeLensCommand(path string, cmd command.Command, result any) { + e.T.Helper() + if err := e.Editor.ExecuteCodeLensCommand(e.Ctx, path, cmd, result); err != nil { + e.T.Fatal(err) } - e.ExecuteCommand(&protocol.ExecuteCommandParams{ - Command: lens.Command.Command, - Arguments: lens.Command.Arguments, - }, result) } -func (e *Env) ExecuteCommand(params *protocol.ExecuteCommandParams, result interface{}) { +// ExecuteCommand executes the requested command in the editor, calling t.Fatal +// on any error. +// +// result is a pointer to a variable to be populated by json.Unmarshal. +func (e *Env) ExecuteCommand(params *protocol.ExecuteCommandParams, result any) { e.T.Helper() - response, err := e.Editor.ExecuteCommand(e.Ctx, params) - if err != nil { - e.T.Fatal(err) - } - if result == nil { - return - } - // Hack: The result of an executeCommand request will be unmarshaled into - // maps. Re-marshal and unmarshal into the type we expect. - // - // This could be improved by generating a jsonrpc2 command client from the - // command.Interface, but that should only be done if we're consolidating - // this part of the tsprotocol generation. - data, err := json.Marshal(response) - if err != nil { - e.T.Fatal(err) - } - if err := json.Unmarshal(data, result); err != nil { + if err := e.Editor.ExecuteCommand(e.Ctx, params, result); err != nil { e.T.Fatal(err) } } diff --git a/gopls/internal/test/marker/doc.go b/gopls/internal/test/marker/doc.go index bd23a4f12ef..509791d509c 100644 --- a/gopls/internal/test/marker/doc.go +++ b/gopls/internal/test/marker/doc.go @@ -10,7 +10,7 @@ Use this command to run the tests: $ go test ./gopls/internal/test/marker [-update] -A marker test uses the '//@' marker syntax of the x/tools/go/expect package +A marker test uses the '//@' marker syntax of the x/tools/internal/expect package to annotate source code with various information such as locations and arguments of LSP operations to be executed by the test. The syntax following '@' is parsed as a comma-separated list of ordinary Go function calls, for @@ -127,13 +127,13 @@ The following markers are supported within marker tests: - complete(location, ...items): specifies expected completion results at the given location. Must be used in conjunction with @item. - - diag(location, regexp): specifies an expected diagnostic matching the - given regexp at the given location. The test runner requires - a 1:1 correspondence between observed diagnostics and diag annotations. - The diagnostics source and kind fields are ignored, to reduce fuss. + - diag(location, regexp, exact=bool): specifies an expected diagnostic + matching the given regexp at the given location. The test runner requires a + 1:1 correspondence between observed diagnostics and diag annotations. The + diagnostics source and kind fields are ignored, to reduce fuss. The specified location must match the start position of the diagnostic, - but end positions are ignored. + but end positions are ignored unless exact=true. TODO(adonovan): in the older marker framework, the annotation asserted two additional fields (source="compiler", kind="error"). Restore them? @@ -219,14 +219,14 @@ The following markers are supported within marker tests: signatureHelp at the given location should match the provided string, with the active parameter (an index) highlighted. - - suggestedfix(location, regexp, golden): like diag, the location and + - quickfix(location, regexp, golden): like diag, the location and regexp identify an expected diagnostic, which must have exactly one associated "quickfix" code action. This action is executed for its editing effects on the source files. Like rename, the golden directory contains the expected transformed files. - - suggestedfixerr(location, regexp, wantError): specifies that the - suggestedfix operation should fail with an error that matches the expectation. + - quickfixerr(location, regexp, wantError): specifies that the + quickfix operation should fail with an error that matches the expectation. (Failures in the computation to offer a fix do not generally result in LSP errors, so this marker is not appropriate for testing them.) @@ -277,7 +277,7 @@ The following markers are supported within marker tests: # Argument conversion -Marker arguments are first parsed by the go/expect package, which accepts +Marker arguments are first parsed by the internal/expect package, which accepts the following tokens as defined by the Go spec: - string, int64, float64, and rune literals - true and false @@ -372,6 +372,6 @@ Note that -update does not cause missing @diag or @loc markers to be added. - If possible, improve handling for optional arguments. Rather than have multiple variations of a marker, it would be nice to support a more flexible signature: can codeaction, codeactionedit, codeactionerr, and - suggestedfix be consolidated? + quickfix be consolidated? */ package marker diff --git a/gopls/internal/test/marker/marker_test.go b/gopls/internal/test/marker/marker_test.go index 1478fe631c7..272809c3384 100644 --- a/gopls/internal/test/marker/marker_test.go +++ b/gopls/internal/test/marker/marker_test.go @@ -27,11 +27,9 @@ import ( "sort" "strings" "testing" - "time" "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" - "golang.org/x/tools/go/expect" "golang.org/x/tools/gopls/internal/cache" "golang.org/x/tools/gopls/internal/debug" "golang.org/x/tools/gopls/internal/lsprpc" @@ -43,6 +41,7 @@ import ( "golang.org/x/tools/gopls/internal/util/safetoken" "golang.org/x/tools/internal/diff" "golang.org/x/tools/internal/diff/myers" + "golang.org/x/tools/internal/expect" "golang.org/x/tools/internal/jsonrpc2" "golang.org/x/tools/internal/jsonrpc2/servertest" "golang.org/x/tools/internal/testenv" @@ -56,6 +55,7 @@ func TestMain(m *testing.M) { testenv.ExitIfSmallMachine() // Disable GOPACKAGESDRIVER, as it can cause spurious test failures. os.Setenv("GOPACKAGESDRIVER", "off") + integration.FilterToolchainPathAndGOROOT() os.Exit(m.Run()) } @@ -108,10 +108,6 @@ func Test(t *testing.T) { // Opt: use a shared cache. cache := cache.New(nil) - // Opt: seed the cache and file cache by type-checking and analyzing common - // standard library packages. - seedCache(t, cache) - for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { @@ -135,12 +131,19 @@ func Test(t *testing.T) { } testenv.NeedsGo1Point(t, go1point) } - if test.maxGoVersion != "" { + if test.minGoCommandVersion != "" { var go1point int - if _, err := fmt.Sscanf(test.maxGoVersion, "go1.%d", &go1point); err != nil { - t.Fatalf("parsing -max_go version: %v", err) + if _, err := fmt.Sscanf(test.minGoCommandVersion, "go1.%d", &go1point); err != nil { + t.Fatalf("parsing -min_go_command version: %v", err) } - testenv.SkipAfterGo1Point(t, go1point) + testenv.NeedsGoCommand1Point(t, go1point) + } + if test.maxGoCommandVersion != "" { + var go1point int + if _, err := fmt.Sscanf(test.maxGoCommandVersion, "go1.%d", &go1point); err != nil { + t.Fatalf("parsing -max_go_command version: %v", err) + } + testenv.SkipAfterGoCommand1Point(t, go1point) } if test.cgo { testenv.NeedsTool(t, "cgo") @@ -150,12 +153,14 @@ func Test(t *testing.T) { CapabilitiesJSON: test.capabilities, Env: test.env, } + if _, ok := config.Settings["diagnosticsDelay"]; !ok { if config.Settings == nil { config.Settings = make(map[string]any) } config.Settings["diagnosticsDelay"] = "10ms" } + // inv: config.Settings != nil run := &markerTestRun{ @@ -179,13 +184,30 @@ func Test(t *testing.T) { run.env.OpenFile(file) } - // Wait for the didOpen notifications to be processed, then collect - // diagnostics. - var diags map[string]*protocol.PublishDiagnosticsParams - run.env.AfterChange(integration.ReadAllDiagnostics(&diags)) - for path, params := range diags { + allDiags := make(map[string][]protocol.Diagnostic) + if run.env.Editor.ServerCapabilities().DiagnosticProvider != nil { + for name := range test.files { + // golang/go#53275: support pull diagnostics for go.mod and go.work + // files. + if strings.HasSuffix(name, ".go") { + allDiags[name] = run.env.Diagnostics(name) + } + } + } else { + // Wait for the didOpen notifications to be processed, then collect + // diagnostics. + + run.env.AfterChange() + var diags map[string]*protocol.PublishDiagnosticsParams + run.env.AfterChange(integration.ReadAllDiagnostics(&diags)) + for path, params := range diags { + allDiags[path] = params.Diagnostics + } + } + + for path, diags := range allDiags { uri := run.env.Sandbox.Workdir.URI(path) - for _, diag := range params.Diagnostics { + for _, diag := range diags { loc := protocol.Location{ URI: uri, Range: protocol.Range{ @@ -222,7 +244,13 @@ func Test(t *testing.T) { if !test.ignoreExtraDiags { for loc, diags := range run.diags { for _, diag := range diags { - t.Errorf("%s: unexpected diagnostic: %q", run.fmtLoc(loc), diag.Message) + // Note that loc is collapsed (start==end). + // For formatting, show the exact span. + exactLoc := protocol.Location{ + URI: loc.URI, + Range: diag.Range, + } + t.Errorf("%s: unexpected diagnostic: %q", run.fmtLoc(exactLoc), diag.Message) } } } @@ -269,58 +297,6 @@ func Test(t *testing.T) { } } -// seedCache populates the file cache by type checking and analyzing standard -// library packages that are reachable from tests. -// -// Most tests are themselves small codebases, and yet may reference large -// amounts of standard library code. Since tests are heavily parallelized, they -// naively end up type checking and analyzing many of the same standard library -// packages. By seeding the cache, we ensure cache hits for these standard -// library packages, significantly reducing the amount of work done by each -// test. -// -// The following command was used to determine the set of packages to import -// below: -// -// rm -rf ~/.cache/gopls && \ -// go test -count=1 ./internal/test/marker -cpuprofile=prof -v -// -// Look through the individual test timings to see which tests are slow, then -// look through the imports of slow tests to see which standard library -// packages are imported. Choose high level packages such as go/types that -// import others such as fmt or go/ast. After doing so, re-run the command and -// verify that the total samples in the collected profile decreased. -func seedCache(t *testing.T, cache *cache.Cache) { - start := time.Now() - - // The the doc string for details on how this seed was produced. - seed := `package p -import ( - _ "net/http" - _ "sort" - _ "go/types" - _ "testing" -) -` - - // Create a test environment for the seed file. - env := newEnv(t, cache, map[string][]byte{"p.go": []byte(seed)}, nil, nil, fake.EditorConfig{}) - // See other TODO: this cleanup logic is too messy. - defer env.Editor.Shutdown(context.Background()) // ignore error - defer env.Sandbox.Close() // ignore error - env.Awaiter.Await(context.Background(), integration.InitialWorkspaceLoad) - - // Opening the file is necessary to trigger analysis. - env.OpenFile("p.go") - - // As a checksum, verify that the file has no errors after analysis. - // This isn't strictly necessary, but helps avoid incorrect seeding due to - // typos. - env.AfterChange(integration.NoDiagnostics()) - - t.Logf("warming the cache took %s", time.Since(start)) -} - // A marker holds state for the execution of a single @marker // annotation in the source. type marker struct { @@ -432,10 +408,11 @@ func valueMarkerFunc(fn any) func(marker) { // called during the processing of action markers (e.g. @action("abc", 123)) // with marker arguments converted to function parameters. The provided // function's first parameter must be of type 'marker', and it must not return -// any values. +// any values. Any named arguments that may be used by the marker func must be +// listed in allowedNames. // // The provided fn should not mutate the test environment. -func actionMarkerFunc(fn any) func(marker) { +func actionMarkerFunc(fn any, allowedNames ...string) func(marker) { ftype := reflect.TypeOf(fn) if ftype.NumIn() == 0 || ftype.In(0) != markerType { panic(fmt.Sprintf("action marker function %#v must accept marker as its first argument", ftype)) @@ -444,7 +421,21 @@ func actionMarkerFunc(fn any) func(marker) { panic(fmt.Sprintf("action marker function %#v cannot have results", ftype)) } + var allowed map[string]bool + if len(allowedNames) > 0 { + allowed = make(map[string]bool) + for _, name := range allowedNames { + allowed[name] = true + } + } + return func(mark marker) { + for name := range mark.note.NamedArgs { + if !allowed[name] { + mark.errorf("unexpected named argument %q", name) + } + } + args := append([]any{mark}, mark.note.Args...) argValues, err := convertArgs(mark, ftype, args) if err != nil { @@ -498,6 +489,18 @@ func convertArgs(mark marker, ftype reflect.Type, args []any) ([]reflect.Value, return argValues, nil } +// namedArg returns the named argument for name, or the default value. +func namedArg[T any](mark marker, name string, dflt T) T { + if v, ok := mark.note.NamedArgs[name]; ok { + if e, ok := v.(T); ok { + return e + } else { + mark.errorf("invalid value for %q: %v", name, v) + } + } + return dflt +} + // is reports whether arg is a T. func is[T any](arg any) bool { _, ok := arg.(T) @@ -520,7 +523,7 @@ var actionMarkerFuncs = map[string]func(marker){ "codelenses": actionMarkerFunc(codeLensesMarker), "complete": actionMarkerFunc(completeMarker), "def": actionMarkerFunc(defMarker), - "diag": actionMarkerFunc(diagMarker), + "diag": actionMarkerFunc(diagMarker, "exact"), "documentlink": actionMarkerFunc(documentLinkMarker), "foldingrange": actionMarkerFunc(foldingRangeMarker), "format": actionMarkerFunc(formatMarker), @@ -540,8 +543,8 @@ var actionMarkerFuncs = map[string]func(marker){ "selectionrange": actionMarkerFunc(selectionRangeMarker), "signature": actionMarkerFunc(signatureMarker), "snippet": actionMarkerFunc(snippetMarker), - "suggestedfix": actionMarkerFunc(suggestedfixMarker), - "suggestedfixerr": actionMarkerFunc(suggestedfixErrMarker), + "quickfix": actionMarkerFunc(quickfixMarker), + "quickfixerr": actionMarkerFunc(quickfixErrMarker), "symbol": actionMarkerFunc(symbolMarker), "token": actionMarkerFunc(tokenMarker), "typedef": actionMarkerFunc(typedefMarker), @@ -569,16 +572,17 @@ type markerTest struct { flags []string // flags extracted from the special "flags" archive file. // Parsed flags values. See the flag definitions below for documentation. - minGoVersion string - maxGoVersion string - cgo bool - writeGoSum []string - skipGOOS []string - skipGOARCH []string - ignoreExtraDiags bool - filterBuiltins bool - filterKeywords bool - errorsOK bool + minGoVersion string // minimum Go runtime version; max should never be needed + minGoCommandVersion string + maxGoCommandVersion string + cgo bool + writeGoSum []string + skipGOOS []string + skipGOARCH []string + ignoreExtraDiags bool + filterBuiltins bool + filterKeywords bool + errorsOK bool } // flagSet returns the flagset used for parsing the special "flags" file in the @@ -586,7 +590,8 @@ type markerTest struct { func (t *markerTest) flagSet() *flag.FlagSet { flags := flag.NewFlagSet(t.name, flag.ContinueOnError) flags.StringVar(&t.minGoVersion, "min_go", "", "if set, the minimum go1.X version required for this test") - flags.StringVar(&t.maxGoVersion, "max_go", "", "if set, the maximum go1.X version required for this test") + flags.StringVar(&t.minGoCommandVersion, "min_go_command", "", "if set, the minimum go1.X go command version required for this test") + flags.StringVar(&t.maxGoCommandVersion, "max_go_command", "", "if set, the maximum go1.X go command version required for this test") flags.BoolVar(&t.cgo, "cgo", false, "if set, requires cgo (both the cgo tool and CGO_ENABLED=1)") flags.Var((*stringListValue)(&t.writeGoSum), "write_sumfile", "if set, write the sumfile for these directories") flags.Var((*stringListValue)(&t.skipGOOS), "skip_goos", "if set, skip this test on these GOOS values") @@ -873,7 +878,7 @@ func newEnv(t *testing.T, cache *cache.Cache, files, proxyFiles map[string][]byt } for _, dir := range writeGoSum { - if err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "..."}, []string{"GOWORK=off"}, true); err != nil { + if _, err := sandbox.RunGoCommand(context.Background(), dir, "list", []string{"-mod=mod", "..."}, []string{"GOWORK=off"}, true); err != nil { t.Fatal(err) } } @@ -911,7 +916,7 @@ type markerTestRun struct { settings map[string]any // Collected information. - // Each @diag/@suggestedfix marker eliminates an entry from diags. + // Each @diag/@quickfix marker eliminates an entry from diags. values map[expect.Identifier]any diags map[protocol.Location][]protocol.Diagnostic // diagnostics by position; location end == start @@ -1685,7 +1690,8 @@ func locMarker(mark marker, loc protocol.Location) protocol.Location { return lo // diagMarker implements the @diag marker. It eliminates diagnostics from // the observed set in mark.test. func diagMarker(mark marker, loc protocol.Location, re *regexp.Regexp) { - if _, ok := removeDiagnostic(mark, loc, re); !ok { + exact := namedArg(mark, "exact", false) + if _, ok := removeDiagnostic(mark, loc, exact, re); !ok { mark.errorf("no diagnostic at %v matches %q", loc, re) } } @@ -1696,12 +1702,13 @@ func diagMarker(mark marker, loc protocol.Location, re *regexp.Regexp) { // from the unmatched set. // // If not found, it returns (protocol.Diagnostic{}, false). -func removeDiagnostic(mark marker, loc protocol.Location, re *regexp.Regexp) (protocol.Diagnostic, bool) { - loc.Range.End = loc.Range.Start // diagnostics ignore end position. - diags := mark.run.diags[loc] +func removeDiagnostic(mark marker, loc protocol.Location, matchEnd bool, re *regexp.Regexp) (protocol.Diagnostic, bool) { + key := loc + key.Range.End = key.Range.Start // diagnostics ignore end position. + diags := mark.run.diags[key] for i, diag := range diags { - if re.MatchString(diag.Message) { - mark.run.diags[loc] = append(diags[:i], diags[i+1:]...) + if re.MatchString(diag.Message) && (!matchEnd || diag.Range.End == loc.Range.End) { + mark.run.diags[key] = append(diags[:i], diags[i+1:]...) return diag, true } } @@ -2026,16 +2033,14 @@ func (mark marker) consumeExtraNotes(name string, f func(marker)) { } } -// suggestedfixMarker implements the @suggestedfix(location, regexp, +// quickfixMarker implements the @quickfix(location, regexp, // kind, golden) marker. It acts like @diag(location, regexp), to set // the expectation of a diagnostic, but then it applies the "quickfix" // code action (which must be unique) suggested by the matched diagnostic. -// -// TODO(adonovan): rename to @quickfix, since that's the LSP term. -func suggestedfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, golden *Golden) { +func quickfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, golden *Golden) { loc.Range.End = loc.Range.Start // diagnostics ignore end position. // Find and remove the matching diagnostic. - diag, ok := removeDiagnostic(mark, loc, re) + diag, ok := removeDiagnostic(mark, loc, false, re) if !ok { mark.errorf("no diagnostic at %v matches %q", loc, re) return @@ -2044,7 +2049,7 @@ func suggestedfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, g // Apply the fix it suggests. changed, err := codeAction(mark.run.env, loc.URI, diag.Range, "quickfix", &diag) if err != nil { - mark.errorf("suggestedfix failed: %v. (Use @suggestedfixerr for expected errors.)", err) + mark.errorf("quickfix failed: %v. (Use @quickfixerr for expected errors.)", err) return } @@ -2052,10 +2057,10 @@ func suggestedfixMarker(mark marker, loc protocol.Location, re *regexp.Regexp, g checkDiffs(mark, changed, golden) } -func suggestedfixErrMarker(mark marker, loc protocol.Location, re *regexp.Regexp, wantErr stringMatcher) { +func quickfixErrMarker(mark marker, loc protocol.Location, re *regexp.Regexp, wantErr stringMatcher) { loc.Range.End = loc.Range.Start // diagnostics ignore end position. // Find and remove the matching diagnostic. - diag, ok := removeDiagnostic(mark, loc, re) + diag, ok := removeDiagnostic(mark, loc, false, re) if !ok { mark.errorf("no diagnostic at %v matches %q", loc, re) return @@ -2167,7 +2172,7 @@ func codeActionChanges(env *integration.Env, uri protocol.DocumentURI, rng proto // // The client makes an ExecuteCommand RPC to the server, // which dispatches it to the ApplyFix handler. - // ApplyFix dispatches to the "stub_methods" suggestedfix hook (the meat). + // ApplyFix dispatches to the "stub_methods" fixer (the meat). // The server then makes an ApplyEdit RPC to the client, // whose WorkspaceEditFunc hook temporarily gathers the edits // instead of applying them. diff --git a/gopls/internal/test/marker/testdata/codeaction/addtest.txt b/gopls/internal/test/marker/testdata/codeaction/addtest.txt new file mode 100644 index 00000000000..5d669ec7d01 --- /dev/null +++ b/gopls/internal/test/marker/testdata/codeaction/addtest.txt @@ -0,0 +1,612 @@ +This test checks the behavior of the 'add test for FUNC' code action. + +-- flags -- +-ignore_extra_diags + +-- go.mod -- +module golang.org/lsptests/addtest + +go 1.18 + +-- settings.json -- +{ + "addTestSourceCodeAction": true +} + +-- withcopyright/copyright.go -- +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +// Package main is for lsp test. +package main + +func Foo(in string) string {return in} //@codeactionedit("Foo", "source.addTest", with_copyright) + +-- @with_copyright/withcopyright/copyright_test.go -- +@@ -0,0 +1,24 @@ ++// Copyright 2020 The Go Authors. All rights reserved. ++// Use of this source code is governed by a BSD-style ++// license that can be found in the LICENSE file. ++ ++package main_test ++ ++func TestFoo(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := main.Foo(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("Foo() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- withoutcopyright/copyright.go -- +//go:build go1.23 + +// Package copyright is for lsp test. +package copyright + +func Foo(in string) string {return in} //@codeactionedit("Foo", "source.addTest", without_copyright) + +-- @without_copyright/withoutcopyright/copyright_test.go -- +@@ -0,0 +1,20 @@ ++package copyright_test ++ ++func TestFoo(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := copyright.Foo(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("Foo() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- missingtestfile/missingtestfile.go -- +package main + +func ExportedFunction(in string) string {return in} //@codeactionedit("ExportedFunction", "source.addTest", missing_test_file_exported_function) + +type Bar struct {} + +func (*Bar) ExportedMethod(in string) string {return in} //@codeactionedit("ExportedMethod", "source.addTest", missing_test_file_exported_recv_exported_method) + +-- @missing_test_file_exported_function/missingtestfile/missingtestfile_test.go -- +@@ -0,0 +1,20 @@ ++package main_test ++ ++func TestExportedFunction(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := main.ExportedFunction(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedFunction() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @missing_test_file_exported_recv_exported_method/missingtestfile/missingtestfile_test.go -- +@@ -0,0 +1,20 @@ ++package main_test ++ ++func TestBar_ExportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := ExportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- xpackagetestfile/xpackagetestfile.go -- +package main + +func ExportedFunction(in string) string {return in} //@codeactionedit("ExportedFunction", "source.addTest", xpackage_exported_function) +func unexportedFunction(in string) string {return in} //@codeactionedit("unexportedFunction", "source.addTest", xpackage_unexported_function) + +type Bar struct {} + +func (*Bar) ExportedMethod(in string) string {return in} //@codeactionedit("ExportedMethod", "source.addTest", xpackage_exported_recv_exported_method) +func (*Bar) unexportedMethod(in string) string {return in} //@codeactionedit("unexportedMethod", "source.addTest", xpackage_exported_recv_unexported_method) + +type foo struct {} + +func (*foo) ExportedMethod(in string) string {return in} //@codeactionedit("ExportedMethod", "source.addTest", xpackage_unexported_recv_exported_method) +func (*foo) unexportedMethod(in string) string {return in} //@codeactionedit("unexportedMethod", "source.addTest", xpackage_unexported_recv_unexported_method) + +-- xpackagetestfile/xpackagetestfile_test.go -- +package main + +-- @xpackage_exported_function/xpackagetestfile/xpackagetestfile_test.go -- +@@ -3 +3,18 @@ ++func TestExportedFunction(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := ExportedFunction(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedFunction() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @xpackage_unexported_function/xpackagetestfile/xpackagetestfile_test.go -- +@@ -3 +3,18 @@ ++func Test_unexportedFunction(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := unexportedFunction(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("unexportedFunction() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @xpackage_exported_recv_exported_method/xpackagetestfile/xpackagetestfile_test.go -- +@@ -3 +3,18 @@ ++func TestBar_ExportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := ExportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @xpackage_exported_recv_unexported_method/xpackagetestfile/xpackagetestfile_test.go -- +@@ -3 +3,18 @@ ++func TestBar_unexportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := unexportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @xpackage_unexported_recv_exported_method/xpackagetestfile/xpackagetestfile_test.go -- +@@ -3 +3,18 @@ ++func Test_foo_ExportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := ExportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @xpackage_unexported_recv_unexported_method/xpackagetestfile/xpackagetestfile_test.go -- +@@ -3 +3,18 @@ ++func Test_foo_unexportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := unexportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- aliasreceiver/aliasreceiver.go -- +package main + +type bar struct {} +type middle1 = bar +type middle2 = middle1 +type middle3 = middle2 +type Bar = middle3 + +func (*Bar) ExportedMethod(in string) string {return in} //@codeactionedit("ExportedMethod", "source.addTest", pointer_receiver_exported_method) +func (*Bar) unexportedMethod(in string) string {return in} //@codeactionedit("unexportedMethod", "source.addTest", pointer_receiver_unexported_method) + +type bar2 struct {} +type middle4 = bar2 +type middle5 = middle4 +type middle6 = middle5 +type foo = *middle6 + +func (foo) ExportedMethod(in string) string {return in} //@codeactionedit("ExportedMethod", "source.addTest", alias_receiver_exported_method) +func (foo) unexportedMethod(in string) string {return in} //@codeactionedit("unexportedMethod", "source.addTest", alias_receiver_unexported_method) + +-- aliasreceiver/aliasreceiver_test.go -- +package main + +-- @pointer_receiver_exported_method/aliasreceiver/aliasreceiver_test.go -- +@@ -3 +3,18 @@ ++func TestBar_ExportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := ExportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @pointer_receiver_unexported_method/aliasreceiver/aliasreceiver_test.go -- +@@ -3 +3,18 @@ ++func TestBar_unexportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := unexportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @alias_receiver_exported_method/aliasreceiver/aliasreceiver_test.go -- +@@ -3 +3,18 @@ ++func Test_foo_ExportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := ExportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("ExportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @alias_receiver_unexported_method/aliasreceiver/aliasreceiver_test.go -- +@@ -3 +3,18 @@ ++func Test_foo_unexportedMethod(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ arg string ++ want string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got := unexportedMethod(tt.arg) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("unexportedMethod() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- multiinputoutput/multiinputoutput.go -- +package main + +func Foo(in, in1, in2, in3 string) (out, out1, out2 string) {return in, in, in} //@codeactionedit("Foo", "source.addTest", multi_input_output) + +-- @multi_input_output/multiinputoutput/multiinputoutput_test.go -- +@@ -0,0 +1,34 @@ ++package main_test ++ ++func TestFoo(t *testing.T) { ++ type args struct { ++ in string ++ in2 string ++ in3 string ++ in4 string ++ } ++ tests := []struct { ++ name string // description of this test case ++ args args ++ want string ++ want2 string ++ want3 string ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got, got2, got3 := main.Foo(tt.args.in, tt.args.in2, tt.args.in3, tt.args.in4) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("Foo() = %v, want %v", got, tt.want) ++ } ++ if true { ++ t.Errorf("Foo() = %v, want %v", got2, tt.want2) ++ } ++ if true { ++ t.Errorf("Foo() = %v, want %v", got3, tt.want3) ++ } ++ }) ++ } ++} +-- xpackagerename/xpackagerename.go -- +package main + +import ( + mytime "time" + myast "go/ast" +) + +func Foo(t mytime.Time, a *myast.Node) (mytime.Time, *myast.Node) {return t, a} //@codeactionedit("Foo", "source.addTest", xpackage_rename) + +-- @xpackage_rename/xpackagerename/xpackagerename_test.go -- +@@ -0,0 +1,28 @@ ++package main_test ++ ++func TestFoo(t *testing.T) { ++ type args struct { ++ in mytime.Time ++ in2 *myast.Node ++ } ++ tests := []struct { ++ name string // description of this test case ++ args args ++ want mytime.Time ++ want2 *myast.Node ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got, got2 := main.Foo(tt.args.in, tt.args.in2) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("Foo() = %v, want %v", got, tt.want) ++ } ++ if true { ++ t.Errorf("Foo() = %v, want %v", got2, tt.want2) ++ } ++ }) ++ } ++} +-- xtestpackagerename/xtestpackagerename.go -- +package main + +import ( + mytime "time" + myast "go/ast" +) + +func Foo(t mytime.Time, a *myast.Node) (mytime.Time, *myast.Node) {return t, a} //@codeactionedit("Foo", "source.addTest", xtest_package_rename) + +-- xtestpackagerename/xtestpackagerename_test.go -- +package main_test + +import ( + yourtime "time" + yourast "go/ast" +) + +var fooTime = yourtime.Time{} +var fooNode = yourast.Node{} + +-- @xtest_package_rename/xtestpackagerename/xtestpackagerename_test.go -- +@@ -11 +11,26 @@ ++func TestFoo(t *testing.T) { ++ type args struct { ++ in yourtime.Time ++ in2 *yourast.Node ++ } ++ tests := []struct { ++ name string // description of this test case ++ args args ++ want yourtime.Time ++ want2 *yourast.Node ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got, got2 := main.Foo(tt.args.in, tt.args.in2) ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("Foo() = %v, want %v", got, tt.want) ++ } ++ if true { ++ t.Errorf("Foo() = %v, want %v", got2, tt.want2) ++ } ++ }) ++ } ++} +-- returnwitherror/returnwitherror.go -- +package main + +func OnlyErr() error {return nil} //@codeactionedit("OnlyErr", "source.addTest", return_only_error) +func StringErr() (string, error) {return "", nil} //@codeactionedit("StringErr", "source.addTest", return_string_error) +func MultipleStringErr() (string, string, string, error) {return "", "", "", nil} //@codeactionedit("MultipleStringErr", "source.addTest", return_multiple_string_error) + +-- @return_only_error/returnwitherror/returnwitherror_test.go -- +@@ -0,0 +1,24 @@ ++package main_test ++ ++func TestOnlyErr(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ wantErr bool ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ gotErr := main.OnlyErr() ++ if gotErr != nil { ++ if !tt.wantErr { ++ t.Errorf("OnlyErr() failed: %v", gotErr) ++ } ++ return ++ } ++ if tt.wantErr { ++ t.Fatal("OnlyErr() succeeded unexpectedly") ++ } ++ }) ++ } ++} +-- @return_string_error/returnwitherror/returnwitherror_test.go -- +@@ -0,0 +1,29 @@ ++package main_test ++ ++func TestStringErr(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ want string ++ wantErr bool ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got, gotErr := main.StringErr() ++ if gotErr != nil { ++ if !tt.wantErr { ++ t.Errorf("StringErr() failed: %v", gotErr) ++ } ++ return ++ } ++ if tt.wantErr { ++ t.Fatal("StringErr() succeeded unexpectedly") ++ } ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("StringErr() = %v, want %v", got, tt.want) ++ } ++ }) ++ } ++} +-- @return_multiple_string_error/returnwitherror/returnwitherror_test.go -- +@@ -0,0 +1,37 @@ ++package main_test ++ ++func TestMultipleStringErr(t *testing.T) { ++ tests := []struct { ++ name string // description of this test case ++ want string ++ want2 string ++ want3 string ++ wantErr bool ++ }{ ++ // TODO: Add test cases. ++ } ++ for _, tt := range tests { ++ t.Run(tt.name, func(t *testing.T) { ++ got, got2, got3, gotErr := main.MultipleStringErr() ++ if gotErr != nil { ++ if !tt.wantErr { ++ t.Errorf("MultipleStringErr() failed: %v", gotErr) ++ } ++ return ++ } ++ if tt.wantErr { ++ t.Fatal("MultipleStringErr() succeeded unexpectedly") ++ } ++ // TODO: update the condition below to compare got with tt.want. ++ if true { ++ t.Errorf("MultipleStringErr() = %v, want %v", got, tt.want) ++ } ++ if true { ++ t.Errorf("MultipleStringErr() = %v, want %v", got2, tt.want2) ++ } ++ if true { ++ t.Errorf("MultipleStringErr() = %v, want %v", got3, tt.want3) ++ } ++ }) ++ } ++} diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt index 1f96d6b424c..9bad8232231 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_imports.txt @@ -1,9 +1,6 @@ This test checks the behavior of removing a parameter with respect to various import scenarios. --- flags -- --min_go=go1.21 - -- go.mod -- module mod.test diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt index 174d9061927..614c4d3147f 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_method.txt @@ -5,9 +5,6 @@ Specifically, check across package boundaries 2. handling of unnamed receivers --- flags -- --min_go=go1.20 - -- go.mod -- module example.com/rm diff --git a/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt b/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt index f35662e3dad..3b6ba360d29 100644 --- a/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt +++ b/gopls/internal/test/marker/testdata/codeaction/removeparam_satisfies.txt @@ -8,9 +8,6 @@ that must be modified), in order to produce the desired outcome for our users. Doing so would be more complicated, so for now this test simply records the current behavior. --- flags -- --min_go=go1.20 - -- go.mod -- module example.com/rm diff --git a/gopls/internal/test/marker/testdata/completion/alias.txt b/gopls/internal/test/marker/testdata/completion/alias.txt new file mode 100644 index 00000000000..e4c340e3f1f --- /dev/null +++ b/gopls/internal/test/marker/testdata/completion/alias.txt @@ -0,0 +1,33 @@ +This test checks completion related to aliases. + +-- flags -- +-ignore_extra_diags +-min_go=go1.24 + +-- aliases.go -- +package aliases + +// Copied from the old builtins.go, which has been ported to the new marker tests. +/* string */ //@item(string, "string", "", "type") +/* int */ //@item(int, "int", "", "type") +/* float32 */ //@item(float32, "float32", "", "type") +/* float64 */ //@item(float64, "float64", "", "type") + +type p struct{} + +type s[a int | string] = p + +func _() { + s[]{} //@rank("]", int, float64) +} + +func takesGeneric[a int | string](s[a]) { + "s[a]{}" //@item(tpInScopeLit, "s[a]{}", "", "var") + takesGeneric() //@rank(")", tpInScopeLit),snippet(")", tpInScopeLit, "s[a]{\\}") +} + +type myType int //@item(flType, "myType", "int", "type") + +type myt[T int] myType //@item(aflType, "myt[T]", "int", "type") + +func (my myt) _() {} //@complete(") _", flType, aflType) diff --git a/gopls/internal/test/marker/testdata/completion/append.txt b/gopls/internal/test/marker/testdata/completion/append.txt index 96c09d7d428..89172211314 100644 --- a/gopls/internal/test/marker/testdata/completion/append.txt +++ b/gopls/internal/test/marker/testdata/completion/append.txt @@ -1,10 +1,7 @@ This test checks behavior of completion within append expressions. -It requires go1.23 as the new "structs" package appears as a completion. - -- flags -- -ignore_extra_diags --min_go=go1.23 -- go.mod -- module golang.org/lsptests/append diff --git a/gopls/internal/test/marker/testdata/completion/foobarbaz.txt b/gopls/internal/test/marker/testdata/completion/foobarbaz.txt index 1da0a405404..80ba5d1f5ee 100644 --- a/gopls/internal/test/marker/testdata/completion/foobarbaz.txt +++ b/gopls/internal/test/marker/testdata/completion/foobarbaz.txt @@ -3,7 +3,6 @@ This test ports some arbitrary tests from the old marker framework, that were -- flags -- -ignore_extra_diags --min_go=go1.20 -- settings.json -- { diff --git a/gopls/internal/test/marker/testdata/completion/imported-std.txt b/gopls/internal/test/marker/testdata/completion/imported-std.txt index bb17a07d4f8..e93de9563a8 100644 --- a/gopls/internal/test/marker/testdata/completion/imported-std.txt +++ b/gopls/internal/test/marker/testdata/completion/imported-std.txt @@ -13,7 +13,8 @@ behaves correctly in go1.22. (When go1.22 is assured, we can remove the min_go flag but leave the test inputs unchanged.) -- flags -- --ignore_extra_diags -min_go=go1.22 +-ignore_extra_diags +-min_go_command=go1.22 -- go.mod -- module example.com diff --git a/gopls/internal/test/marker/testdata/completion/issue62676.txt b/gopls/internal/test/marker/testdata/completion/issue62676.txt index 8f20c5872c2..af4c3b695ec 100644 --- a/gopls/internal/test/marker/testdata/completion/issue62676.txt +++ b/gopls/internal/test/marker/testdata/completion/issue62676.txt @@ -2,7 +2,6 @@ This test verifies that unimported completion respects the usePlaceholders setti -- flags -- -ignore_extra_diags --min_go=go1.21 -- settings.json -- { diff --git a/gopls/internal/test/marker/testdata/completion/nested_complit.txt b/gopls/internal/test/marker/testdata/completion/nested_complit.txt index 264ae77eab8..14677dfde73 100644 --- a/gopls/internal/test/marker/testdata/completion/nested_complit.txt +++ b/gopls/internal/test/marker/testdata/completion/nested_complit.txt @@ -5,7 +5,6 @@ version for consistency. -- flags -- -ignore_extra_diags --min_go=go1.20 -- nested_complit.go -- package nested_complit diff --git a/gopls/internal/test/marker/testdata/completion/unimported-std.txt b/gopls/internal/test/marker/testdata/completion/unimported-std.txt index 3bedf6bc5bd..0ad655c6a26 100644 --- a/gopls/internal/test/marker/testdata/completion/unimported-std.txt +++ b/gopls/internal/test/marker/testdata/completion/unimported-std.txt @@ -13,7 +13,7 @@ behaves correctly in go1.22. (When go1.22 is assured, we can remove the min_go flag but leave the test inputs unchanged.) -- flags -- --ignore_extra_diags -min_go=go1.22 +-ignore_extra_diags -- go.mod -- module example.com diff --git a/gopls/internal/test/marker/testdata/definition/cgo.txt b/gopls/internal/test/marker/testdata/definition/cgo.txt index 777285b242b..0664a7d972d 100644 --- a/gopls/internal/test/marker/testdata/definition/cgo.txt +++ b/gopls/internal/test/marker/testdata/definition/cgo.txt @@ -43,6 +43,8 @@ func _() { func Example() ``` +--- + [`cgo.Example` on pkg.go.dev](https://pkg.go.dev/cgo.test/cgo#Example) -- usecgo/usecgo.go -- package cgoimport @@ -59,4 +61,6 @@ func _() { func cgo.Example() ``` +--- + [`cgo.Example` on pkg.go.dev](https://pkg.go.dev/cgo.test/cgo#Example) diff --git a/gopls/internal/test/marker/testdata/definition/embed.txt b/gopls/internal/test/marker/testdata/definition/embed.txt index 5dc976c8b4d..8ff3e37adb3 100644 --- a/gopls/internal/test/marker/testdata/definition/embed.txt +++ b/gopls/internal/test/marker/testdata/definition/embed.txt @@ -5,7 +5,6 @@ and correct sizes information requires go1.21. -- flags -- -skip_goarch=386,arm --min_go=go1.21 -- go.mod -- module mod.com @@ -116,63 +115,89 @@ var _ = S1{ //@def("S1", S1),hover("S1", "S1", S1) func (a.A) Hi() ``` +--- + [`(a.A).Hi` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A.Hi) -- @F -- ```go field F int ``` +--- + @loc(F, "F") +--- + [`(b.Embed).F` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.F) -- @HGoodbye -- ```go func (a.H) Goodbye() ``` +--- + @loc(HGoodbye, "Goodbye") +--- + [`(a.H).Goodbye` on pkg.go.dev](https://pkg.go.dev/mod.com/a#H.Goodbye) -- @IB -- ```go func (a.I) B() ``` +--- + @loc(IB, "B") +--- + [`(a.I).B` on pkg.go.dev](https://pkg.go.dev/mod.com/a#I.B) -- @JHello -- ```go func (a.J) Hello() ``` +--- + @loc(JHello, "Hello") +--- + [`(a.J).Hello` on pkg.go.dev](https://pkg.go.dev/mod.com/a#J.Hello) -- @M -- ```go func (embed) M() ``` +--- + [`(b.Embed).M` on pkg.go.dev](https://pkg.go.dev/mod.com/b#Embed.M) -- @RField2 -- ```go field Field2 int ``` +--- + @loc(RField2, "Field2") +--- + [`(a.R).Field2` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Field2) -- @RHey -- ```go func (r a.R) Hey() ``` +--- + [`(a.R).Hey` on pkg.go.dev](https://pkg.go.dev/mod.com/a#R.Hey) -- @S1 -- ```go @@ -184,29 +209,41 @@ type S1 struct { } ``` +--- + ```go // Embedded fields: F2 int // through S2 ``` +--- + [`b.S1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1) -- @S1F1 -- ```go field F1 int ``` +--- + @loc(S1F1, "F1") +--- + [`(b.S1).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.F1) -- @S1S2 -- ```go field S2 S2 ``` +--- + @loc(S1S2, "S2"),def("S2", S2),hover("S2", "S2", S2) +--- + [`(b.S1).S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S1.S2) -- @S2 -- ```go @@ -217,43 +254,61 @@ type S2 struct { // size=32 (0x20) } ``` +--- + ```go func (a.A) Hi() ``` +--- + [`b.S2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2) -- @S2F1 -- ```go field F1 string ``` +--- + @loc(S2F1, "F1") +--- + [`(b.S2).F1` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F1) -- @S2F2 -- ```go field F2 int ``` +--- + @loc(S2F2, "F2") +--- + [`(b.S2).F2` on pkg.go.dev](https://pkg.go.dev/mod.com/b#S2.F2) -- @SField -- ```go field Field int ``` +--- + @loc(SField, "Field") +--- + [`(a.S).Field` on pkg.go.dev](https://pkg.go.dev/mod.com/a#S.Field) -- @aA -- ```go type A string // size=16 (0x10) ``` +--- + @loc(AString, "A") @@ -261,12 +316,16 @@ type A string // size=16 (0x10) func (a.A) Hi() ``` +--- + [`a.A` on pkg.go.dev](https://pkg.go.dev/mod.com/a#A) -- @aAlias -- ```go type aAlias = a.A // size=16 (0x10) ``` +--- + @loc(aAlias, "aAlias") diff --git a/gopls/internal/test/marker/testdata/definition/import.txt b/gopls/internal/test/marker/testdata/definition/import.txt index 2ae95a8c29b..1ee3a52e742 100644 --- a/gopls/internal/test/marker/testdata/definition/import.txt +++ b/gopls/internal/test/marker/testdata/definition/import.txt @@ -35,19 +35,27 @@ func _() { func DoFoo() ``` +--- + DoFoo does foo. +--- + [`foo.DoFoo` on pkg.go.dev](https://pkg.go.dev/mod.com/foo#DoFoo) -- @FooVar -- ```go var _ Foo ``` +--- + variable of type foo.Foo -- @myFoo -- ```go package myFoo ("mod.com/foo") ``` +--- + [`myFoo` on pkg.go.dev](https://pkg.go.dev/mod.com/foo) diff --git a/gopls/internal/test/marker/testdata/definition/misc.txt b/gopls/internal/test/marker/testdata/definition/misc.txt index 7c6bc27f316..9ddd7775fd9 100644 --- a/gopls/internal/test/marker/testdata/definition/misc.txt +++ b/gopls/internal/test/marker/testdata/definition/misc.txt @@ -126,6 +126,15 @@ func _() { _ = p.Sum() //@def("()", PosSum),hover("()", `Sum`, hoverSum) } -- @aPackage -- +```go +package a +``` + +--- + + - Package path: mod.com + - Module: mod.com + - Language version: go1.16 -- @hoverDeclBlocka -- ```go type a struct { // size=16 (0x10) @@ -133,12 +142,16 @@ type a struct { // size=16 (0x10) } ``` +--- + 1st type declaration block -- @hoverDeclBlockb -- ```go type b struct{} // size=0 ``` +--- + b has a comment -- @hoverDeclBlockc -- ```go @@ -147,12 +160,16 @@ type c struct { // size=16 (0x10) } ``` +--- + c is a struct -- @hoverDeclBlockd -- ```go type d string // size=16 (0x10) ``` +--- + 3rd type declaration block -- @hoverDeclBlocke -- ```go @@ -161,6 +178,8 @@ type e struct { // size=8 } ``` +--- + e has a comment -- @hoverH -- ```go @@ -169,6 +188,8 @@ type H interface { } ``` +--- + [`a.H` on pkg.go.dev](https://pkg.go.dev/mod.com#H) -- @hoverI -- ```go @@ -178,10 +199,14 @@ type I interface { } ``` +--- + ```go func (J) Hello() ``` +--- + [`a.I` on pkg.go.dev](https://pkg.go.dev/mod.com#I) -- @hoverJ -- ```go @@ -190,12 +215,16 @@ type J interface { } ``` +--- + [`a.J` on pkg.go.dev](https://pkg.go.dev/mod.com#J) -- @hoverSum -- ```go func (p *Pos) Sum() int ``` +--- + [`(a.Pos).Sum` on pkg.go.dev](https://pkg.go.dev/mod.com#Pos.Sum) -- @hoverTestA -- ```go @@ -210,24 +239,32 @@ field field string const g untyped int = 1 ``` +--- + When I hover on g, I should see this comment. -- @hoverh -- ```go const h untyped int = 2 ``` +--- + Constant block. When I hover on h, I should see this comment. -- @hoverpx -- ```go field x int ``` +--- + @loc(PosX, "x"),loc(PosY, "y") -- @hoverx -- ```go var x string ``` +--- + x is a variable. -- @hovery -- ```go @@ -238,4 +275,6 @@ var y int var z string ``` +--- + z is a variable too. diff --git a/gopls/internal/test/marker/testdata/definition/standalone.txt b/gopls/internal/test/marker/testdata/definition/standalone.txt index 2612f43d833..6af1149184d 100644 --- a/gopls/internal/test/marker/testdata/definition/standalone.txt +++ b/gopls/internal/test/marker/testdata/definition/standalone.txt @@ -1,9 +1,6 @@ This test checks the behavior of standalone packages, in particular documenting our failure to support test files as standalone packages (golang/go#64233). --- flags -- --min_go=go1.20 - -- go.mod -- module golang.org/lsptests/a diff --git a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt index 34488bec417..312a0c57120 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/analyzers.txt @@ -1,12 +1,16 @@ Test of warning diagnostics from various analyzers: copylocks, printf, slog, tests, timeformat, nilness, and cgocall. +-- settings.json -- +{ + "pullDiagnostics": true +} + -- go.mod -- module example.com go 1.12 -- flags -- --min_go=go1.21 -cgo -- bad_test.go -- @@ -14,6 +18,7 @@ package analyzer import ( "fmt" + "log/slog" "sync" "testing" "time" @@ -56,9 +61,16 @@ func _(s struct{x int}) { s.x = 1 //@diag("x", re"unused write to field x") } --- cgocall.go -- -package analyzer +// slog +func _() { + slog.Info("msg", 1) //@diag("1", re`slog.Info arg "1" should be a string or a slog.Attr`) +} +-- cgocall/cgocall.go -- +package cgocall + +// Note: this test must be in a separate package, as the unsafe import +// silences the unusedwrite analyzer. import "unsafe" // void f(void *ptr) {} @@ -68,15 +80,3 @@ import "C" func _(c chan bool) { C.f(unsafe.Pointer(&c)) //@ diag("unsafe", re"passing Go type with embedded pointer to C") } - --- bad_test_go121.go -- -//go:build go1.21 - -package analyzer - -import "log/slog" - -// slog -func _() { - slog.Info("msg", 1) //@diag("1", re`slog.Info arg "1" should be a string or a slog.Attr`) -} diff --git a/gopls/internal/test/marker/testdata/diagnostics/generated.txt b/gopls/internal/test/marker/testdata/diagnostics/generated.txt index ea5886dae03..123602df3c3 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/generated.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/generated.txt @@ -1,5 +1,10 @@ Test of "undeclared" diagnostic in generated code. +-- settings.json -- +{ + "pullDiagnostics": true +} + -- go.mod -- module example.com go 1.12 diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt b/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt index cd3ad6e9c63..22cff4315b5 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/issue56943.txt @@ -3,6 +3,11 @@ unexported interface methods in non-workspace packages. Previously, we would fail to produce a diagnostic because we trimmed the AST. See golang/go#56943. +-- settings.json -- +{ + "pullDiagnostics": true +} + -- main.go -- package main diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt b/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt index a2fb57ba0b1..f80857dcb99 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/issue60605.txt @@ -2,9 +2,6 @@ This test verifies that we can export constants with unknown kind. Previously, the exporter would panic while attempting to convert such constants to their target type (float64, in this case). --- flags -- --min_go=go1.20 - -- go.mod -- module mod.txt/p diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt b/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt index 229c99b6890..109ee53aa58 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/issue67360.txt @@ -7,9 +7,6 @@ error to lack a column. A bug in the error parser filled in 0, not 1, for the missing information, and this is an invalid value in the 1-based UTF-8 domain, leading to a panic. --- flags -- --min_go=go1.21 - -- foo.go -- //line foo.go:1 package main //@ diag(re"package", re"internal package.*not allowed") diff --git a/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt b/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt new file mode 100644 index 00000000000..6b2751d840b --- /dev/null +++ b/gopls/internal/test/marker/testdata/diagnostics/issue69505.txt @@ -0,0 +1,22 @@ +This test checks that diagnostics ranges computed with the TypeErrorEndPos +heuristic span at least a full token. + +-- go.mod -- +module example.com + +go 1.21 + +-- main.go -- +package main + +import "example.com/foo-bar" //@ diag(re`"[^"]*"`, re`not used`, exact=true) + +func f(int) {} + +func main() { + var x int + _ = x + 1.e+0i //@ diag("1.e+0i", re`truncated`, exact=true) +} + +-- foo-bar/baz.go -- +package foo diff --git a/gopls/internal/test/marker/testdata/diagnostics/range-over-func-67237.txt b/gopls/internal/test/marker/testdata/diagnostics/range-over-func-67237.txt deleted file mode 100644 index e2aa14221e3..00000000000 --- a/gopls/internal/test/marker/testdata/diagnostics/range-over-func-67237.txt +++ /dev/null @@ -1,82 +0,0 @@ - -This test verifies that SSA-based analyzers don't run on packages that -use range-over-func. This is an emergency fix of #67237 (for buildssa) -until we land https://go.dev/cl/555075. - -Similarly, it is an emergency fix of dominikh/go-tools#1494 (for -buildir) until that package is similarly fixed for go1.23. - -Explanation: -- Package p depends on q and r, and analyzers buildssa and buildir - depend on norangeoverfunc. -- Analysis pass norangeoverfunc@q fails, thus norangeoverfunc@p is not - executed; but norangeoverfunc@r is ok -- nilness requires buildssa, which is not facty, so it can run on p and r. -- SA4010 (CheckIneffectiveAppend) requires buildir, which is facty, - so SA4010 can run only on r. - -We don't import any std packages because even "fmt" depends on -range-over-func now (which means that in practice, everything does). - --- flags -- --min_go=go1.23 - --- settings.json -- -{ - "staticcheck": true, - "analyses": {"SA4010": true} -} - --- go.mod -- -module example.com - -go 1.23 - --- p/p.go -- -package p // a dependency uses range-over-func, so nilness runs but SA4010 cannot (buildir is facty) - -import ( - _ "example.com/q" - _ "example.com/r" -) - -func f(ptr *int) { - if ptr == nil { - println(*ptr) //@diag(re"[*]ptr", re"nil dereference in load") - } - - var s []int - s = append(s, 1) // no SA4010 finding -} - --- q/q.go -- -package q // uses range-over-func, so no diagnostics from SA4010 - -type iterSeq[T any] func(yield func(T) bool) - -func f(seq iterSeq[int]) { - for x := range seq { - println(x) - } - - var s []int - s = append(s, 1) // no SA4010 finding -} - -func _(ptr *int) { - if ptr == nil { - println(*ptr) //@diag(re"[*]ptr", re"nil dereference in load") - } -} - --- r/r.go -- -package r // does not use range-over-func, so nilness and SA4010 report diagnosticcs - -func f(ptr *int) { - if ptr == nil { - println(*ptr) //@diag(re"[*]ptr", re"nil dereference in load") - } - - var s []int - s = append(s, 1) //@ diag(re`s`, re`s is never used`), diag(re`append`, re`append is never used`) -} diff --git a/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt b/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt index 652ddd6b56a..c6a19a77717 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/stdversion.txt @@ -7,7 +7,7 @@ See also go/analysis/passes/stdversion/testdata/test.txtar, which runs the same test in the analysistest framework. -- flags -- --min_go=go1.22 +-min_go_command=go1.22 -- go.mod -- module example.com diff --git a/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt b/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt index 686b05c371e..9d6b0de5f6e 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/typeerr.txt @@ -19,10 +19,10 @@ package a func f(x int) { append("") //@diag(re`""`, re"a slice") - x := 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", fix) + x := 123 //@diag(re"x := 123", re"no new variables"), quickfix(re"():", re"no new variables", fix) } -- @fix/typeerr.go -- @@ -6 +6 @@ -- x := 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", fix) -+ x = 123 //@diag(re"x := 123", re"no new variables"), suggestedfix(re"():", re"no new variables", fix) +- x := 123 //@diag(re"x := 123", re"no new variables"), quickfix(re"():", re"no new variables", fix) ++ x = 123 //@diag(re"x := 123", re"no new variables"), quickfix(re"():", re"no new variables", fix) diff --git a/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt b/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt index 86010dc29c8..567d2a9d4ae 100644 --- a/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt +++ b/gopls/internal/test/marker/testdata/diagnostics/useinternal.txt @@ -5,9 +5,6 @@ This list error changed in Go 1.21. See TestValidImportCheck_GoPackagesDriver for a test that no diagnostic is produced when using a GOPACKAGESDRIVER (such as for Bazel). --- flags -- --min_go=go1.21 - -- go.mod -- module bad.test diff --git a/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt index c73390066ae..3ece2f264f6 100644 --- a/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt +++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66109.txt @@ -1,8 +1,10 @@ This test exercises the crash in golang/go#66109: a dangling reference due to test variants of a command-line-arguments package. +Depends on go1.22+ go list errors. + -- flags -- --min_go=go1.22 +-min_go_command=go1.22 -- go.mod -- module example.com/tools diff --git a/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt b/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt index d6edcb57a18..db3def0bd7c 100644 --- a/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt +++ b/gopls/internal/test/marker/testdata/fixedbugs/issue66876.txt @@ -4,7 +4,7 @@ when the go.mod go version is set to a 1.22 toolchain version (1.22.x). In golang/go#66876, gopls failed to handle this correctly. -- flags -- --min_go=go1.22 +-min_go_command=go1.22 -- go.mod -- module example.com/loopclosure diff --git a/gopls/internal/test/marker/testdata/format/issue59554.txt b/gopls/internal/test/marker/testdata/format/issue59554.txt index 816c9d1e06f..aa436301102 100644 --- a/gopls/internal/test/marker/testdata/format/issue59554.txt +++ b/gopls/internal/test/marker/testdata/format/issue59554.txt @@ -4,11 +4,6 @@ directives. Note that gofumpt is needed for this test case, as it reformats var decls into short var decls. -Note that gofumpt requires Go 1.20. - --- flags -- --min_go=go1.20 - -- settings.json -- { "formatting.gofumpt": true diff --git a/gopls/internal/test/marker/testdata/hover/comment.txt b/gopls/internal/test/marker/testdata/hover/comment.txt index 479aff6473f..86a268f5981 100644 --- a/gopls/internal/test/marker/testdata/hover/comment.txt +++ b/gopls/internal/test/marker/testdata/hover/comment.txt @@ -1,10 +1,5 @@ This test checks hovering over doc links in comments. -Requires go1.20+ for the unsafe package test. - --- flags -- --min_go=go1.20 - -- go.mod -- module mod.com @@ -54,29 +49,53 @@ func ParseInt(s string, base int, bitSize int) (int64, error) { func Conv(s string) int ``` +--- + \[Conv] converts s to an int. //@hover("Conv", "Conv", Conv) +--- + [`p.Conv` on pkg.go.dev](https://pkg.go.dev/mod.com#Conv) -- @NumberBase -- ```go const NumberBase untyped int = 10 ``` +--- + \[NumberBase] is the base to use for number parsing. //@hover("NumberBase", "NumberBase", NumberBase) +--- + [`p.NumberBase` on pkg.go.dev](https://pkg.go.dev/mod.com#NumberBase) -- @strconv -- +```go +package util +``` + +--- + Package util provides utility functions. -- @strconvParseInt -- ```go func ParseInt(s string, base int, bitSize int) (int64, error) ``` +--- + ParseInt interprets a string s in the given base (0, 2 to 36) and bit size (0 to 64) and returns the corresponding value i. +--- + [`util.ParseInt` on pkg.go.dev](https://pkg.go.dev/mod.com/util#ParseInt) -- @util -- +```go +package util +``` + +--- + Package util provides utility functions. diff --git a/gopls/internal/test/marker/testdata/hover/const.txt b/gopls/internal/test/marker/testdata/hover/const.txt index 179ff155357..2a5854ffb2c 100644 --- a/gopls/internal/test/marker/testdata/hover/const.txt +++ b/gopls/internal/test/marker/testdata/hover/const.txt @@ -83,21 +83,29 @@ func _() { const X untyped int = 0 ``` +--- + @hover("X", "X", bX) +--- + [`c.X` on pkg.go.dev](https://pkg.go.dev/mod.com#X) -- @dur -- ```go const dur time.Duration = 15*time.Minute + 10*time.Second + 350*time.Millisecond // 15m10.35s ``` +--- + dur is a constant of type time.Duration. -- @decimalConst -- ```go const decimal untyped int = 153 ``` +--- + no inline comment -- @hexConst -- ```go @@ -148,7 +156,11 @@ const longStr untyped string = "Lorem ipsum dolor sit amet, consectetur adipisci const math.Log2E untyped float = 1 / Ln2 // 1.4427 ``` +--- + Mathematical constants. +--- + [`math.Log2E` on pkg.go.dev](https://pkg.go.dev/math#Log2E) diff --git a/gopls/internal/test/marker/testdata/hover/embed.txt b/gopls/internal/test/marker/testdata/hover/embed.txt index 1dc3fcbfa12..3f4086c2332 100644 --- a/gopls/internal/test/marker/testdata/hover/embed.txt +++ b/gopls/internal/test/marker/testdata/hover/embed.txt @@ -41,6 +41,8 @@ type P struct { } ``` +--- + doc @@ -54,4 +56,6 @@ Three *chan int // through Q.q2 func (P) m() ``` +--- + [`p.P` on pkg.go.dev](https://pkg.go.dev/example.com#P) diff --git a/gopls/internal/test/marker/testdata/hover/generics.txt b/gopls/internal/test/marker/testdata/hover/generics.txt index 50ce49bb33f..50a7c706ee0 100644 --- a/gopls/internal/test/marker/testdata/hover/generics.txt +++ b/gopls/internal/test/marker/testdata/hover/generics.txt @@ -6,7 +6,6 @@ that affected the formatting of constraint interfaces. Its size expectations assume a 64-bit machine. -- flags -- --min_go=go1.20 -skip_goarch=386,arm -- go.mod -- @@ -33,6 +32,8 @@ type empty interface { // size=16 (0x10) } ``` +--- + Hovering over an interface with empty type set must not panic. -- generics.go -- package generics @@ -71,6 +72,8 @@ type parameter T any field Q int64 // size=8 ``` +--- + @hover("Q", "Q", valueQ) -- @ValueT -- ```go @@ -85,9 +88,13 @@ type parameter T any field Q int64 // size=8 ``` +--- + @hover("Q", "Q", ValueQ) +--- + [`(generics.Value).Q` on pkg.go.dev](https://pkg.go.dev/mod.com#Value.Q) -- @Ptparam -- ```go diff --git a/gopls/internal/test/marker/testdata/hover/godef.txt b/gopls/internal/test/marker/testdata/hover/godef.txt index 9b2e7ec2ce3..e7bf4817fd6 100644 --- a/gopls/internal/test/marker/testdata/hover/godef.txt +++ b/gopls/internal/test/marker/testdata/hover/godef.txt @@ -1,17 +1,6 @@ This test was ported from 'godef' in the old marker tests. It tests various hover and definition requests. -Requires go1.19+ for the new go/doc/comment package. - -TODO(adonovan): figure out why this test also fails -without -min_go=go1.20. Or just wait... - --- flags -- --min_go=go1.19 - --- flags -- --min_go=go1.20 - -- go.mod -- module godef.test @@ -37,9 +26,13 @@ func TestA2(t *testing.T) field Member string ``` +--- + @loc(Member, "Member") +--- + [`(a.Thing).Member` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing.Member) -- a/d.go -- package a //@hover("a", _, a) @@ -101,15 +94,21 @@ var nextThing NextThing //@hover("NextThing", "NextThing", NextThing), def("Next func Things(val []string) []Thing ``` +--- + [`a.Things` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Things) -- @ther -- ```go var Other Thing ``` +--- + @loc(Other, "Other") +--- + [`a.Other` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Other) -- @a -- -- @ing -- @@ -119,6 +118,8 @@ type Thing struct { } ``` +--- + ```go func (t Thing) Method(i int) string func (t *Thing) Method2(i int, j int) (error, string) @@ -126,6 +127,8 @@ func (t Thing) Method3() func (t *Thing) private() ``` +--- + [`a.Thing` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing) -- @NextThing -- ```go @@ -135,6 +138,8 @@ type NextThing struct { } ``` +--- + ```go // Embedded fields: Member string // through Thing @@ -148,12 +153,16 @@ func (n NextThing) another() string func (t *Thing) private() ``` +--- + [`a.NextThing` on pkg.go.dev](https://pkg.go.dev/godef.test/a#NextThing) -- @eth -- ```go func (t Thing) Method(i int) string ``` +--- + [`(a.Thing).Method` on pkg.go.dev](https://pkg.go.dev/godef.test/a#Thing.Method) -- a/f.go -- // Package a is a package for testing go to definition. @@ -337,36 +346,48 @@ func _() { field x string ``` +--- + X value field -- @hoverStructKeyY -- ```go field y string ``` +--- + Y key field -- @nestedNumber -- ```go field number int64 ``` +--- + nested number -- @nestedString -- ```go field str string ``` +--- + nested string -- @openMethod -- ```go func (interface) open() error ``` +--- + open method comment -- @nestedMap -- ```go field m map[string]float64 ``` +--- + nested map -- b/e.go -- package b diff --git a/gopls/internal/test/marker/testdata/hover/goprivate.txt b/gopls/internal/test/marker/testdata/hover/goprivate.txt index 202b4a11314..a042bee4b7c 100644 --- a/gopls/internal/test/marker/testdata/hover/goprivate.txt +++ b/gopls/internal/test/marker/testdata/hover/goprivate.txt @@ -19,10 +19,14 @@ type L struct{} //@hover("L", "L", L) type L struct{} // size=0 ``` +--- + GOPRIVATE should also match nested packages. -- @T -- ```go type T struct{} // size=0 ``` +--- + T should not be linked, as it is private. diff --git a/gopls/internal/test/marker/testdata/hover/hover.txt b/gopls/internal/test/marker/testdata/hover/hover.txt index 35a07fc9522..d2ae4fde9fa 100644 --- a/gopls/internal/test/marker/testdata/hover/hover.txt +++ b/gopls/internal/test/marker/testdata/hover/hover.txt @@ -1,10 +1,27 @@ -This test demonstrates some features of the new marker test runner. +This test demonstrates some basic features of hover. + +Needs go1.22 for the gotypesalias godebug value. + +-- flags -- +-min_go_command=go1.22 + +-- go.mod -- +module example.com + +go 1.18 -- a.go -- // package comment package aa //@hover("aa", "aa", aa) const abc = 0x2a //@hover("b", "abc", abc),hover(" =", "abc", abc) + +-- a2.go -- + +//go:build go1.21 + +package aa //@hover("aa", "aa", aa2) + -- typeswitch.go -- package aa @@ -15,11 +32,23 @@ func _() { println(x) //@hover("x", "x", xint),hover(")", "x", xint) } } +-- cmd/main.go -- +//go:debug gotypesalias=0 + +// Note that since GODEBUG shows only settings that differ from +// the current toolchain, the output here depends on the toolchain used. +package main //@hover("main", "main", main) + +func main() { +} + -- @abc -- ```go const abc untyped int = 0x2a // 42 ``` +--- + @hover("b", "abc", abc),hover(" =", "abc", abc) -- @x -- ```go @@ -30,4 +59,47 @@ var x interface{} var x int ``` -- @aa -- +```go +package aa +``` + +--- + package comment + + +--- + + - Package path: example.com + - Module: example.com + - Language version: go1.18 +-- @aa2 -- +```go +package aa +``` + +--- + +package comment + + +--- + + - Package path: example.com + - Module: example.com + - Language version (current file): go1.21 +-- @main -- +```go +package main +``` + +--- + +Note that since GODEBUG shows only settings that differ from the current toolchain, the output here depends on the toolchain used. + + +--- + + - Package path: example.com/cmd + - Module: example.com + - Language version: go1.18 diff --git a/gopls/internal/test/marker/testdata/hover/linkable.txt b/gopls/internal/test/marker/testdata/hover/linkable.txt index fefedbceab6..6dc8076523e 100644 --- a/gopls/internal/test/marker/testdata/hover/linkable.txt +++ b/gopls/internal/test/marker/testdata/hover/linkable.txt @@ -60,15 +60,21 @@ func _() { field Embed int64 ``` +--- + [`(p.E).Embed` on pkg.go.dev](https://pkg.go.dev/mod.com#E.Embed) -- @F -- ```go field F int64 // size=8, offset=8 ``` +--- + @hover("F", "F", F) +--- + [`(p.T).F` on pkg.go.dev](https://pkg.go.dev/mod.com#T.F) -- @Local -- ```go @@ -77,6 +83,8 @@ type Local struct { // size=8 } ``` +--- + Local types should not be linkable, even if they are capitalized. @@ -89,6 +97,8 @@ Embed int64 // through E field Nested int64 // size=8, offset=0 ``` +--- + Nested fields should also be linkable. -- @T -- ```go @@ -106,6 +116,8 @@ type T struct { // size=32 (0x20) } ``` +--- + T is in the package scope, and so should be linkable. @@ -119,16 +131,22 @@ func (T) M() func (T) m() ``` +--- + [`p.T` on pkg.go.dev](https://pkg.go.dev/mod.com#T) -- @X -- ```go var X int64 ``` +--- + Local variables should not be linkable, even if they are capitalized. -- @f -- ```go field f int64 // size=8, offset=0 ``` +--- + @hover("f", "f", f) diff --git a/gopls/internal/test/marker/testdata/hover/linkable_generics.txt b/gopls/internal/test/marker/testdata/hover/linkable_generics.txt index 0b7ade7965e..d2457ec6d31 100644 --- a/gopls/internal/test/marker/testdata/hover/linkable_generics.txt +++ b/gopls/internal/test/marker/testdata/hover/linkable_generics.txt @@ -42,9 +42,13 @@ func GF[P any] (p P) { //@hover("GF", "GF", GF) field F P ``` +--- + @hover("F", "F", F),hover("P", "P", FP) +--- + [`(generic.GT).F` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.F) -- @FP -- ```go @@ -55,6 +59,8 @@ type parameter P any func GF[P any](p P) ``` +--- + [`generic.GF` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GF) -- @GT -- ```go @@ -63,6 +69,8 @@ type GT[P any] struct { } ``` +--- + Hovering over type parameters should link to documentation. TODO(rfindley): should it? We should probably link to the type. @@ -72,6 +80,8 @@ TODO(rfindley): should it? We should probably link to the type. func (GT[P]) M(p P) ``` +--- + [`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) -- @GTP -- ```go @@ -84,6 +94,8 @@ type GT[P any] struct { } ``` +--- + Hovering over type parameters should link to documentation. TODO(rfindley): should it? We should probably link to the type. @@ -93,12 +105,16 @@ TODO(rfindley): should it? We should probably link to the type. func (GT[P]) M(p P) ``` +--- + [`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) -- @M -- ```go func (GT[P]) M(p P) ``` +--- + [`(generic.GT).M` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.M) -- @f -- ```go @@ -109,6 +125,8 @@ var f func(p int) func generic.GF(p int) // func[P any](p P) ``` +--- + [`generic.GF` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GF) -- @pP -- ```go @@ -119,15 +137,21 @@ type parameter P any var x generic.GT[int] ``` +--- + @hover("GT", "GT", xGT) -- @xF -- ```go field F int ``` +--- + @hover("F", "F", F),hover("P", "P", FP) +--- + [`(generic.GT).F` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT.F) -- @xGT -- ```go @@ -136,6 +160,8 @@ type GT[P any] struct { } ``` +--- + Hovering over type parameters should link to documentation. TODO(rfindley): should it? We should probably link to the type. @@ -145,4 +171,6 @@ TODO(rfindley): should it? We should probably link to the type. func (generic.GT[P]) M(p P) ``` +--- + [`generic.GT` on pkg.go.dev](https://pkg.go.dev/mod.com/generic#GT) diff --git a/gopls/internal/test/marker/testdata/hover/linkname.txt b/gopls/internal/test/marker/testdata/hover/linkname.txt index 8bb2eeb33cd..6e128a2f215 100644 --- a/gopls/internal/test/marker/testdata/hover/linkname.txt +++ b/gopls/internal/test/marker/testdata/hover/linkname.txt @@ -27,4 +27,6 @@ func bar() string { func bar() string ``` +--- + bar does foo. diff --git a/gopls/internal/test/marker/testdata/hover/methods.txt b/gopls/internal/test/marker/testdata/hover/methods.txt index 8af22494f75..142f3ffc97f 100644 --- a/gopls/internal/test/marker/testdata/hover/methods.txt +++ b/gopls/internal/test/marker/testdata/hover/methods.txt @@ -46,26 +46,36 @@ type I interface { } ``` +--- + ```go func (lib.J) C() ``` +--- + [`lib.I` on pkg.go.dev](https://pkg.go.dev/example.com/lib#I) -- @J -- ```go type J interface{ C() } ``` +--- + [`lib.J` on pkg.go.dev](https://pkg.go.dev/example.com/lib#J) -- @S -- ```go type S struct{ I } ``` +--- + ```go func (s lib.S) A() func (lib.J) C() func (s *lib.S) PA() ``` +--- + [`lib.S` on pkg.go.dev](https://pkg.go.dev/example.com/lib#S) diff --git a/gopls/internal/test/marker/testdata/hover/sizeoffset.txt b/gopls/internal/test/marker/testdata/hover/sizeoffset.txt index 62f3b76dd60..54af8cdc6ec 100644 --- a/gopls/internal/test/marker/testdata/hover/sizeoffset.txt +++ b/gopls/internal/test/marker/testdata/hover/sizeoffset.txt @@ -11,7 +11,6 @@ Notes: -- flags -- -skip_goarch=386,arm --min_go=go1.22 -- go.mod -- module example.com @@ -55,6 +54,8 @@ type T struct { // size=48 (0x30) } ``` +--- + [`a.T` on pkg.go.dev](https://pkg.go.dev/example.com#T) -- @wasteful -- ```go @@ -69,49 +70,67 @@ type wasteful struct { // size=48 (0x30) (29% wasted) field a int // size=8, offset=0 ``` +--- + @ hover("a", "a", a) -- @U -- ```go field U U // size=24 (0x18), offset=8 ``` +--- + @ hover("U", "U", U) +--- + [`(a.T).U` on pkg.go.dev](https://pkg.go.dev/example.com#T.U) -- @y -- ```go field y int // size=8, offset=32 (0x20) ``` +--- + @ hover("y", "y", y), hover("z", "z", z) -- @z -- ```go field z int // size=8, offset=40 (0x28) ``` +--- + @ hover("y", "y", y), hover("z", "z", z) -- @p -- ```go field p T ``` +--- + @ hover("p", "p", p) -- @q -- ```go field q int // size=8 ``` +--- + @ hover("q", "q", q) -- @Gint -- ```go field Gint G[int] // size=16 (0x10), offset=0 ``` +--- + @ hover("Gint", "Gint", Gint) -- @Gstring -- ```go field Gstring G[string] // size=24 (0x18), offset=16 (0x10) ``` +--- + @ hover("Gstring", "Gstring", Gstring) diff --git a/gopls/internal/test/marker/testdata/hover/std.txt b/gopls/internal/test/marker/testdata/hover/std.txt index c0db135f6b1..c12f6ce13dd 100644 --- a/gopls/internal/test/marker/testdata/hover/std.txt +++ b/gopls/internal/test/marker/testdata/hover/std.txt @@ -44,40 +44,58 @@ func _() { func (m *sync.Mutex) Lock() ``` +--- + Lock locks m. +--- + [`(sync.Mutex).Lock` on pkg.go.dev](https://pkg.go.dev/sync#Mutex.Lock) -- @hoverName -- ```go func (obj *types.object) Name() string ``` +--- + Name returns the object's (package-local, unqualified) name. +--- + [`(types.TypeName).Name` on pkg.go.dev](https://pkg.go.dev/go/types#TypeName.Name) -- @hoverTypes -- ```go package types ("go/types") ``` +--- + [`types` on pkg.go.dev](https://pkg.go.dev/go/types) -- @hovermake -- ```go func make(t Type, size ...int) Type ``` +--- + The make built-in function allocates and initializes an object of type slice, map, or chan (only). +--- + [`make` on pkg.go.dev](https://pkg.go.dev/builtin#make) -- @hoverstring -- ```go type string string ``` +--- + string is the set of all strings of 8-bit bytes, conventionally but not necessarily representing UTF-8-encoded text. +--- + [`string` on pkg.go.dev](https://pkg.go.dev/builtin#string) diff --git a/gopls/internal/test/marker/testdata/hover/structfield.txt b/gopls/internal/test/marker/testdata/hover/structfield.txt index 82115f7908d..6b4897968b6 100644 --- a/gopls/internal/test/marker/testdata/hover/structfield.txt +++ b/gopls/internal/test/marker/testdata/hover/structfield.txt @@ -23,7 +23,11 @@ func DoSomething() Something { field Field int `json:"field"` ``` +--- + Field with a tag +--- + [`(lib.Something).Field` on pkg.go.dev](https://pkg.go.dev/example.com/lib#Something.Field) diff --git a/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt b/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt index 18e98e81acb..df25e6fb190 100644 --- a/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt +++ b/gopls/internal/test/marker/testdata/inlayhints/issue67142.txt @@ -2,7 +2,6 @@ Regression test for golang/go#67142. -- flags -- -ignore_extra_diags --min_go=go1.21 -- settings.json -- { diff --git a/gopls/internal/test/marker/testdata/modfile/godebug.txt b/gopls/internal/test/marker/testdata/modfile/godebug.txt index dbee5faae01..49fab9bda7c 100644 --- a/gopls/internal/test/marker/testdata/modfile/godebug.txt +++ b/gopls/internal/test/marker/testdata/modfile/godebug.txt @@ -2,7 +2,7 @@ This test basic gopls functionality in a workspace with a godebug directive in its modfile. -- flags -- --min_go=go1.23 +-min_go_command=go1.23 -- go.mod -- module example.com/m diff --git a/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt b/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt index 1d06c7cf73c..1b26f607dc1 100644 --- a/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt +++ b/gopls/internal/test/marker/testdata/modfile/godebug_bad.txt @@ -3,7 +3,7 @@ This test checks that we surface the error for unexpected godebug values. TODO(golang/go#67623): the diagnostic should be on the bad godebug value. -- flags -- --min_go=go1.23 +-min_go_command=go1.23 -errors_ok -- go.mod -- diff --git a/gopls/internal/test/marker/testdata/suggestedfix/embeddirective.txt b/gopls/internal/test/marker/testdata/quickfix/embeddirective.txt similarity index 75% rename from gopls/internal/test/marker/testdata/suggestedfix/embeddirective.txt rename to gopls/internal/test/marker/testdata/quickfix/embeddirective.txt index 821eb10ef20..f0915476f7f 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/embeddirective.txt +++ b/gopls/internal/test/marker/testdata/quickfix/embeddirective.txt @@ -10,7 +10,7 @@ import ( "os" ) -//go:embed embed.txt //@suggestedfix("//go:embed", re`must import "embed"`, fix_import) +//go:embed embed.txt //@quickfix("//go:embed", re`must import "embed"`, fix_import) var t string func unused() { diff --git a/gopls/internal/test/marker/testdata/codeaction/infertypeargs.txt b/gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt similarity index 53% rename from gopls/internal/test/marker/testdata/codeaction/infertypeargs.txt rename to gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt index b622efdc358..ffb7baa7089 100644 --- a/gopls/internal/test/marker/testdata/codeaction/infertypeargs.txt +++ b/gopls/internal/test/marker/testdata/quickfix/infertypeargs.txt @@ -15,11 +15,11 @@ func app[S interface{ ~[]E }, E interface{}](s S, e E) S { func _() { _ = app[[]int] _ = app[[]int, int] - _ = app[[]int]([]int{}, 0) //@suggestedfix("[[]int]", re"unnecessary type arguments", infer) + _ = app[[]int]([]int{}, 0) //@quickfix("[[]int]", re"unnecessary type arguments", infer) _ = app([]int{}, 0) } -- @infer/p.go -- @@ -10 +10 @@ -- _ = app[[]int]([]int{}, 0) //@suggestedfix("[[]int]", re"unnecessary type arguments", infer) -+ _ = app([]int{}, 0) //@suggestedfix("[[]int]", re"unnecessary type arguments", infer) +- _ = app[[]int]([]int{}, 0) //@quickfix("[[]int]", re"unnecessary type arguments", infer) ++ _ = app([]int{}, 0) //@quickfix("[[]int]", re"unnecessary type arguments", infer) diff --git a/gopls/internal/test/marker/testdata/suggestedfix/issue65024.txt b/gopls/internal/test/marker/testdata/quickfix/issue65024.txt similarity index 90% rename from gopls/internal/test/marker/testdata/suggestedfix/issue65024.txt rename to gopls/internal/test/marker/testdata/quickfix/issue65024.txt index afdfce9f1cc..c8090b489e6 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/issue65024.txt +++ b/gopls/internal/test/marker/testdata/quickfix/issue65024.txt @@ -22,7 +22,7 @@ import "example.com/a/v2" type B struct{} -var _ a.I = &B{} //@ suggestedfix("&B{}", re"does not implement", out) +var _ a.I = &B{} //@ quickfix("&B{}", re"does not implement", out) // This line makes the diff tidier. @@ -55,7 +55,7 @@ package b type B struct{} -var _ I = &B{} //@ suggestedfix("&B{}", re"does not implement", out2) +var _ I = &B{} //@ quickfix("&B{}", re"does not implement", out2) // This line makes the diff tidier. diff --git a/gopls/internal/test/marker/testdata/suggestedfix/missingfunction.txt b/gopls/internal/test/marker/testdata/quickfix/missingfunction.txt similarity index 67% rename from gopls/internal/test/marker/testdata/suggestedfix/missingfunction.txt rename to gopls/internal/test/marker/testdata/quickfix/missingfunction.txt index b19095a06f3..a21ccca766f 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/missingfunction.txt +++ b/gopls/internal/test/marker/testdata/quickfix/missingfunction.txt @@ -4,7 +4,7 @@ This test checks the quick fix for undefined functions. package missingfunction func channels(s string) { - undefinedChannels(c()) //@suggestedfix("undefinedChannels", re"(undeclared|undefined)", channels) + undefinedChannels(c()) //@quickfix("undefinedChannels", re"(undeclared|undefined)", channels) } func c() (<-chan string, chan string) { @@ -21,7 +21,7 @@ package missingfunction func consecutiveParams() { var s string - undefinedConsecutiveParams(s, s) //@suggestedfix("undefinedConsecutiveParams", re"(undeclared|undefined)", consecutive) + undefinedConsecutiveParams(s, s) //@quickfix("undefinedConsecutiveParams", re"(undeclared|undefined)", consecutive) } -- @consecutive/consecutive.go -- @@ -7 +7,4 @@ @@ -34,7 +34,7 @@ package missingfunction func errorParam() { var err error - undefinedErrorParam(err) //@suggestedfix("undefinedErrorParam", re"(undeclared|undefined)", error) + undefinedErrorParam(err) //@quickfix("undefinedErrorParam", re"(undeclared|undefined)", error) } -- @error/error.go -- @@ -7 +7,4 @@ @@ -48,7 +48,7 @@ package missingfunction type T struct{} func literals() { - undefinedLiterals("hey compiler", T{}, &T{}) //@suggestedfix("undefinedLiterals", re"(undeclared|undefined)", literals) + undefinedLiterals("hey compiler", T{}, &T{}) //@quickfix("undefinedLiterals", re"(undeclared|undefined)", literals) } -- @literals/literals.go -- @@ -8 +8,4 @@ @@ -62,7 +62,7 @@ package missingfunction import "time" func operation() { - undefinedOperation(10 * time.Second) //@suggestedfix("undefinedOperation", re"(undeclared|undefined)", operation) + undefinedOperation(10 * time.Second) //@quickfix("undefinedOperation", re"(undeclared|undefined)", operation) } -- @operation/operation.go -- @@ -8 +8,4 @@ @@ -75,7 +75,7 @@ package missingfunction func selector() { m := map[int]bool{} - undefinedSelector(m[1]) //@suggestedfix("undefinedSelector", re"(undeclared|undefined)", selector) + undefinedSelector(m[1]) //@quickfix("undefinedSelector", re"(undeclared|undefined)", selector) } -- @selector/selector.go -- @@ -7 +7,4 @@ @@ -87,7 +87,7 @@ func selector() { package missingfunction func slice() { - undefinedSlice([]int{1, 2}) //@suggestedfix("undefinedSlice", re"(undeclared|undefined)", slice) + undefinedSlice([]int{1, 2}) //@quickfix("undefinedSlice", re"(undeclared|undefined)", slice) } -- @slice/slice.go -- @@ -6 +6,4 @@ @@ -99,7 +99,7 @@ func slice() { package missingfunction func tuple() { - undefinedTuple(b()) //@suggestedfix("undefinedTuple", re"(undeclared|undefined)", tuple) + undefinedTuple(b()) //@quickfix("undefinedTuple", re"(undeclared|undefined)", tuple) } func b() (string, error) { @@ -117,7 +117,7 @@ package missingfunction func uniqueArguments() { var s string var i int - undefinedUniqueArguments(s, i, s) //@suggestedfix("undefinedUniqueArguments", re"(undeclared|undefined)", unique) + undefinedUniqueArguments(s, i, s) //@quickfix("undefinedUniqueArguments", re"(undeclared|undefined)", unique) } -- @unique/unique_params.go -- @@ -8 +8,4 @@ diff --git a/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt b/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt new file mode 100644 index 00000000000..5b4643778a3 --- /dev/null +++ b/gopls/internal/test/marker/testdata/quickfix/noresultvalues.txt @@ -0,0 +1,18 @@ +This test checks the quick fix for removing extra return values. + +Note: gopls should really discard unnecessary return statements. + +-- noresultvalues.go -- +package typeerrors + +func x() { return nil } //@quickfix("nil", re"too many return", x) + +func y() { return nil, "hello" } //@quickfix("nil", re"too many return", y) +-- @x/noresultvalues.go -- +@@ -3 +3 @@ +-func x() { return nil } //@quickfix("nil", re"too many return", x) ++func x() { return } //@quickfix("nil", re"too many return", x) +-- @y/noresultvalues.go -- +@@ -5 +5 @@ +-func y() { return nil, "hello" } //@quickfix("nil", re"too many return", y) ++func y() { return } //@quickfix("nil", re"too many return", y) diff --git a/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt b/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt new file mode 100644 index 00000000000..44a6ad5b8ad --- /dev/null +++ b/gopls/internal/test/marker/testdata/quickfix/self_assignment.txt @@ -0,0 +1,19 @@ +Test of the suggested fix to remove unnecessary assignments. + +-- a.go -- +package quickfix + +import ( + "log" +) + +func goodbye() { + s := "hiiiiiii" + s = s //@quickfix("s = s", re"self-assignment", fix) + log.Print(s) +} + +-- @fix/a.go -- +@@ -9 +9 @@ +- s = s //@quickfix("s = s", re"self-assignment", fix) ++ //@quickfix("s = s", re"self-assignment", fix) diff --git a/gopls/internal/test/marker/testdata/suggestedfix/stub.txt b/gopls/internal/test/marker/testdata/quickfix/stub.txt similarity index 70% rename from gopls/internal/test/marker/testdata/suggestedfix/stub.txt rename to gopls/internal/test/marker/testdata/quickfix/stub.txt index fc10d8e58ad..6f0a0788679 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/stub.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stub.txt @@ -27,7 +27,7 @@ import "io" // then our implementation must add the import/package selector // in the concrete method if the concrete type is outside of the interface // package -var _ io.ReaderFrom = &readerFrom{} //@suggestedfix("&readerFrom", re"cannot use", readerFrom) +var _ io.ReaderFrom = &readerFrom{} //@quickfix("&readerFrom", re"cannot use", readerFrom) type readerFrom struct{} -- @readerFrom/add_selector.go -- @@ -44,7 +44,7 @@ import "io" func _() { var br io.ByteWriter - br = &byteWriter{} //@suggestedfix("&", re"does not implement", assign) + br = &byteWriter{} //@quickfix("&", re"does not implement", assign) _ = br } @@ -64,7 +64,7 @@ import "io" func _() { var br io.ByteWriter var i int - i, br = 1, &multiByteWriter{} //@suggestedfix("&", re"does not implement", assign_multivars) + i, br = 1, &multiByteWriter{} //@quickfix("&", re"does not implement", assign_multivars) _, _ = i, br } @@ -80,7 +80,7 @@ type multiByteWriter struct{} package stub func main() { - check(&callExpr{}) //@suggestedfix("&", re"does not implement", call_expr) + check(&callExpr{}) //@quickfix("&", re"does not implement", call_expr) } func check(err error) { @@ -105,7 +105,7 @@ import ( "sort" ) -var _ embeddedInterface = (*embeddedConcrete)(nil) //@suggestedfix("(", re"does not implement", embedded) +var _ embeddedInterface = (*embeddedConcrete)(nil) //@quickfix("(", re"does not implement", embedded) type embeddedConcrete struct{} @@ -139,7 +139,7 @@ type embeddedInterface interface { package stub func _() { - var br error = &customErr{} //@suggestedfix("&", re"does not implement", err) + var br error = &customErr{} //@quickfix("&", re"does not implement", err) _ = br } @@ -159,7 +159,7 @@ import ( ) func newCloser() io.Closer { - return closer{} //@suggestedfix("c", re"does not implement", function_return) + return closer{} //@quickfix("c", re"does not implement", function_return) } type closer struct{} @@ -177,7 +177,7 @@ import "io" // This file tests that that the stub method generator accounts for concrete // types that have type parameters defined. -var _ io.ReaderFrom = &genReader[string, int]{} //@suggestedfix("&genReader", re"does not implement", generic_receiver) +var _ io.ReaderFrom = &genReader[string, int]{} //@quickfix("&genReader", re"does not implement", generic_receiver) type genReader[T, Y any] struct { T T @@ -205,7 +205,7 @@ import ( var ( _ Reader - _ zlib.Resetter = (*ignoredResetter)(nil) //@suggestedfix("(", re"does not implement", ignored_imports) + _ zlib.Resetter = (*ignoredResetter)(nil) //@quickfix("(", re"does not implement", ignored_imports) ) type ignoredResetter struct{} @@ -223,7 +223,7 @@ type I interface{ error } type C int -var _ I = C(0) //@suggestedfix("C", re"does not implement", issue2606) +var _ I = C(0) //@quickfix("C", re"does not implement", issue2606) -- @issue2606/issue2606.go -- @@ -7 +7,5 @@ +// Error implements I. @@ -240,7 +240,7 @@ import "io" // has multiple values on the same line can still be // analyzed correctly to target the interface implementation // diagnostic. -var one, two, three io.Reader = nil, &multiVar{}, nil //@suggestedfix("&", re"does not implement", multi_var) +var one, two, three io.Reader = nil, &multiVar{}, nil //@quickfix("&", re"does not implement", multi_var) type multiVar struct{} -- @multi_var/multi_var.go -- @@ -256,7 +256,7 @@ package stub import "io" func getReaderFrom() io.ReaderFrom { - return &pointerImpl{} //@suggestedfix("&", re"does not implement", pointer) + return &pointerImpl{} //@quickfix("&", re"does not implement", pointer) } type pointerImpl struct{} @@ -275,7 +275,7 @@ import ( myio "io" ) -var _ zlib.Resetter = &myIO{} //@suggestedfix("&", re"does not implement", renamed_import) +var _ zlib.Resetter = &myIO{} //@quickfix("&", re"does not implement", renamed_import) var _ myio.Reader type myIO struct{} @@ -297,7 +297,7 @@ import ( // method references an import from its own package // that the concrete type does not yet import, and that import happens // to be renamed, then we prefer the renaming of the interface. -var _ other.Interface = &otherInterfaceImpl{} //@suggestedfix("&otherInterfaceImpl", re"does not implement", renamed_import_iface) +var _ other.Interface = &otherInterfaceImpl{} //@quickfix("&otherInterfaceImpl", re"does not implement", renamed_import_iface) type otherInterfaceImpl struct{} -- @renamed_import_iface/renamed_import_iface.go -- @@ -317,7 +317,7 @@ import ( "io" ) -var _ io.Writer = writer{} //@suggestedfix("w", re"does not implement", stdlib) +var _ io.Writer = writer{} //@quickfix("w", re"does not implement", stdlib) type writer struct{} -- @stdlib/stdlib.go -- @@ -327,40 +327,3 @@ type writer struct{} +func (w writer) Write(p []byte) (n int, err error) { + panic("unimplemented") +} --- typedecl_group.go -- -package stub - -// Regression test for Issue #56825: file corrupted by insertion of -// methods after TypeSpec in a parenthesized TypeDecl. - -import "io" - -func newReadCloser() io.ReadCloser { - return rdcloser{} //@suggestedfix("rd", re"does not implement", typedecl_group) -} - -type ( - A int - rdcloser struct{} - B int -) - -func _() { - // Local types can't be stubbed as there's nowhere to put the methods. - // Check that executing the code action causes an error, not file corruption. - // TODO(adonovan): it would be better not to offer the quick fix in this case. - type local struct{} - var _ io.ReadCloser = local{} //@suggestedfixerr("local", re"does not implement", "local type \"local\" cannot be stubbed") -} --- @typedecl_group/typedecl_group.go -- -@@ -18 +18,10 @@ -+// Close implements io.ReadCloser. -+func (r rdcloser) Close() error { -+ panic("unimplemented") -+} -+ -+// Read implements io.ReadCloser. -+func (r rdcloser) Read(p []byte) (n int, err error) { -+ panic("unimplemented") -+} -+ diff --git a/gopls/internal/test/marker/testdata/stubmethods/basic.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt similarity index 80% rename from gopls/internal/test/marker/testdata/stubmethods/basic.txt rename to gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt index e4cfb6d05a0..96f992f8aaa 100644 --- a/gopls/internal/test/marker/testdata/stubmethods/basic.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic.txt @@ -10,7 +10,7 @@ package a type C int -var _ error = C(0) //@suggestedfix(re"C.0.", re"missing method Error", stub) +var _ error = C(0) //@quickfix(re"C.0.", re"missing method Error", stub) -- @stub/a/a.go -- @@ -5 +5,5 @@ +// Error implements error. diff --git a/gopls/internal/test/marker/testdata/stubmethods/basic_resolve.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt similarity index 86% rename from gopls/internal/test/marker/testdata/stubmethods/basic_resolve.txt rename to gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt index 183b7d526eb..502cc40bb74 100644 --- a/gopls/internal/test/marker/testdata/stubmethods/basic_resolve.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/basic_resolve.txt @@ -21,7 +21,7 @@ package a type C int -var _ error = C(0) //@suggestedfix(re"C.0.", re"missing method Error", stub) +var _ error = C(0) //@quickfix(re"C.0.", re"missing method Error", stub) -- @stub/a/a.go -- @@ -5 +5,5 @@ +// Error implements error. diff --git a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt new file mode 100644 index 00000000000..9992bc0bf3d --- /dev/null +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_basic.txt @@ -0,0 +1,68 @@ +This test checks the basic of 'Declare missing method T.f' quick fix. + +-- basic_stub.go -- +package fromcallbasic + +type Basic struct{} + +func basic() { + i := 1 + b := Basic{} + f(b.basic(i)) //@quickfix("basic", re"has no field or method", basic) +} + +func f(i int) string { return "s" } +-- @basic/basic_stub.go -- +@@ -5 +5,4 @@ ++func (b Basic) basic(i int) int { ++ panic("unimplemented") ++} ++ +-- pointer.go -- +package fromcallbasic + +type P struct{} + +func recv_param_pointer() { + p := &P{} + i := 42 + p.pointer(&i) //@quickfix("pointer", re"has no field or method", pointer) +} +-- @pointer/pointer.go -- +@@ -5 +5,4 @@ ++func (p *P) pointer(i *int) { ++ panic("unimplemented") ++} ++ +-- other.go -- +package fromcallbasic + +type TypeDeclInOtherFile struct{} + +-- this.go -- +package fromcallbasic + +func fun() { + i := 1 + t := TypeDeclInOtherFile{} + t.other(i) //@quickfix("other", re"has no field or method", del_other) +} +-- @del_other/other.go -- +@@ -5 +5,3 @@ ++func (t TypeDeclInOtherFile) other(i int) { ++ panic("unimplemented") ++} +-- should_insert_after.go -- +package fromcallbasic + +type HasMethod struct{} + +func (h *HasMethod) m() { + h.should_insert_after() //@quickfix("should_insert_after", re"has no field or method", insert) +} +-- @insert/should_insert_after.go -- +@@ -8 +8,4 @@ ++ ++func (h *HasMethod) should_insert_after() { ++ panic("unimplemented") ++} diff --git a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt new file mode 100644 index 00000000000..bd15803f79c --- /dev/null +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_params.txt @@ -0,0 +1,84 @@ +This test checks the param name and type of the generated missing method based on CallExpr. + +-- basic_stub.go -- +package fromcallparams + +type A struct{} + +func untypedParams() { + a := A{} + a.untyped("s", 42, 4.12, make(map[string]int), []int{1}, [1]int{1}, make(chan string)) //@quickfix("untyped", re"has no field or method", basic) +} +-- @basic/basic_stub.go -- +@@ -5 +5,4 @@ ++func (a A) untyped(s string, i int, f float64, m map[string]int, param5 []int, param6 [1]int, ch chan string) { ++ panic("unimplemented") ++} ++ +-- nonexistent_type.go -- +package fromcallparams + +type B struct{} + +func invalidBasicKindParam() { + b := B{} + b.basicKind(NonExistentType{}) //@quickfix("basicKind", re"has no field or method", nonexistent),diag(re"NonExistentType",re"undefined: NonExistentType") +} +-- @nonexistent/nonexistent_type.go -- +@@ -5 +5,4 @@ ++func (b B) basicKind(param any) { ++ panic("unimplemented") ++} ++ +-- pass_param_by_ident.go -- +package fromcallparams + +type C struct{} + +func passParamByIdent() { + c := C{} + stringVar := "some string" + intVar := 1 + sliceVar := []int{1} + c.ident(stringVar, intVar, sliceVar) //@quickfix("ident", re"has no field or method", ident) +} +-- @ident/pass_param_by_ident.go -- +@@ -5 +5,4 @@ ++func (c C) ident(stringVar string, intVar int, sliceVar []int) { ++ panic("unimplemented") ++} ++ +-- tail_param_name.go -- +package fromcallparams + +type Tail struct{} + +type TypeWithLongName struct{} + +func TailParamName() { + t := Tail{} + t.longName(TypeWithLongName{}) //@quickfix("longName", re"has no field or method", trail) +} +-- @trail/tail_param_name.go -- +@@ -5 +5,4 @@ ++func (t Tail) longName(name TypeWithLongName) { ++ panic("unimplemented") ++} ++ +-- selector_param_name.go -- +package fromcallparams + +import "net/http" + +type Select struct{} + +func selectExpr() { + s := Select{} + s.sel(http.ErrNotMultipart) //@quickfix("sel", re"has no field or method", select) +} +-- @select/selector_param_name.go -- +@@ -7 +7,4 @@ ++func (s Select) sel(multipart *http.ProtocolError) { ++ panic("unimplemented") ++} ++ diff --git a/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt new file mode 100644 index 00000000000..ca10f628402 --- /dev/null +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/fromcall_returns.txt @@ -0,0 +1,106 @@ +This test checks the return type of the generated missing method based on CallExpr. + +-- param.go -- +package fromcallreturns + +type A struct{} + +func inferFromParam() { + a := A{} + f(a.as_param()) //@quickfix("as_param", re"has no field or method", infer_param) +} + +func f(i int) {} +-- @infer_param/param.go -- +@@ -5 +5,4 @@ ++func (a A) as_param() int { ++ panic("unimplemented") ++} ++ +-- assign.go -- +package fromcallreturns + +type Assign struct{} + +func inferReturnfromAssign() { + var assign int //@diag("assign",re"not used") + a := Assign{} + assign = a.as_assign() //@quickfix("as_assign", re"has no field or method", infer_assign) +} +-- @infer_assign/assign.go -- +@@ -5 +5,4 @@ ++func (a Assign) as_assign() int { ++ panic("unimplemented") ++} ++ +-- multiple_assign.go -- +package fromcallreturns + +type MultiAssign struct{} + +func inferReturnfromMultipleAssign() { + var assign1 int //@diag("assign1",re"not used") + var assign2 int //@diag("assign2",re"not used") + m := MultiAssign{} + assign1, assign2 = m.multi_assign() //@quickfix("multi_assign", re"has no field or method", infer_multiple_assign) +} +-- @infer_multiple_assign/multiple_assign.go -- +@@ -5 +5,4 @@ ++func (m MultiAssign) multi_assign() (int, int) { ++ panic("unimplemented") ++} ++ +-- multiple_return_in_param.go -- +package fromcallreturns + +type MultiReturn struct{} + +func inferMultipleReturnInParam() { + m := MultiReturn{} + m.param_has_multi_return(multiReturn()) //@quickfix("param_has_multi_return", re"has no field or method", multiple_return) +} + +func multiReturn() (int, int) { + return 1, 1 +} +-- @multiple_return/multiple_return_in_param.go -- +@@ -5 +5,4 @@ ++func (m MultiReturn) param_has_multi_return(i int, param2 int) { ++ panic("unimplemented") ++} ++ +-- error_nodes.go -- +package fromcallreturns + +type E struct{} + +func all_error() { + e := E{} + errorFunc(e.errors(undefined1(), undefined2(), undefined3{})) //@quickfix("errors", re"has no field or method", all_error),diag("undefined1",re"undefined"),diag("undefined2",re"undefined"),diag("undefined3",re"undefined") +} +func errorFunc(u undefined4) {} //@diag("undefined4",re"undefined") +-- @all_error/error_nodes.go -- +@@ -5 +5,4 @@ ++func (e E) errors(param any, param2 any, param3 any) any { ++ panic("unimplemented") ++} ++ +-- paren.go -- +package fromcallreturns + +type Paren struct{} + +func paren() { + p := Paren{} + fn()((p.surroundingParen())) //@quickfix("surroundingParen", re"has no field or method", surrounding_paren) +} + +func fn() func(i int) { + return func(i int) {} +} +-- @surrounding_paren/paren.go -- +@@ -5 +5,4 @@ ++func (p Paren) surroundingParen() int { ++ panic("unimplemented") ++} ++ diff --git a/gopls/internal/test/marker/testdata/stubmethods/issue61693.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt similarity index 84% rename from gopls/internal/test/marker/testdata/stubmethods/issue61693.txt rename to gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt index 387b494bc72..f654d12d139 100644 --- a/gopls/internal/test/marker/testdata/stubmethods/issue61693.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61693.txt @@ -15,7 +15,7 @@ func F(err ...error) {} func _() { var x error - F(x, C(0)) //@suggestedfix(re"C.0.", re"missing method Error", stub) + F(x, C(0)) //@quickfix(re"C.0.", re"missing method Error", stub) } -- @stub/main.go -- @@ -5 +5,5 @@ diff --git a/gopls/internal/test/marker/testdata/stubmethods/issue61830.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt similarity index 81% rename from gopls/internal/test/marker/testdata/stubmethods/issue61830.txt rename to gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt index bf5bcc5ca2e..d95abdde4b9 100644 --- a/gopls/internal/test/marker/testdata/stubmethods/issue61830.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue61830.txt @@ -14,7 +14,7 @@ type I interface { type A struct{} -var _ I = &A{} //@suggestedfix(re"&A..", re"missing method M", stub) +var _ I = &A{} //@quickfix(re"&A..", re"missing method M", stub) -- @stub/p.go -- @@ -13 +13,5 @@ +// M implements I. diff --git a/gopls/internal/test/marker/testdata/stubmethods/issue64078.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt similarity index 87% rename from gopls/internal/test/marker/testdata/stubmethods/issue64078.txt rename to gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt index 50db6f27cfd..2cbb05d0706 100644 --- a/gopls/internal/test/marker/testdata/stubmethods/issue64078.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64078.txt @@ -16,7 +16,7 @@ type I interface { M4() (aa string) } -var _ I = &A{} //@suggestedfix(re"&A..", re"missing method M", stub) +var _ I = &A{} //@quickfix(re"&A..", re"missing method M", stub) -- @stub/p.go -- @@ -5 +5,15 @@ +// M2 implements I. diff --git a/gopls/internal/test/marker/testdata/stubmethods/issue64114.txt b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt similarity index 88% rename from gopls/internal/test/marker/testdata/stubmethods/issue64114.txt rename to gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt index 35f6db728bb..e7600650371 100644 --- a/gopls/internal/test/marker/testdata/stubmethods/issue64114.txt +++ b/gopls/internal/test/marker/testdata/quickfix/stubmethods/issue64114.txt @@ -5,7 +5,7 @@ package stub // Regression test for issue #64114: code action "implement" is not listed. -var _ WriteTest = (*WriteStruct)(nil) //@suggestedfix("(", re"does not implement", issue64114) +var _ WriteTest = (*WriteStruct)(nil) //@quickfix("(", re"does not implement", issue64114) type WriterTwoStruct struct{} diff --git a/gopls/internal/test/marker/testdata/suggestedfix/undeclared.txt b/gopls/internal/test/marker/testdata/quickfix/undeclared.txt similarity index 64% rename from gopls/internal/test/marker/testdata/suggestedfix/undeclared.txt rename to gopls/internal/test/marker/testdata/quickfix/undeclared.txt index 897e9b14952..6b6e47e2765 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/undeclared.txt +++ b/gopls/internal/test/marker/testdata/quickfix/undeclared.txt @@ -9,7 +9,7 @@ go 1.12 package p func a() { - z, _ := 1+y, 11 //@suggestedfix("y", re"(undeclared name|undefined): y", a) + z, _ := 1+y, 11 //@quickfix("y", re"(undeclared name|undefined): y", a) _ = z } @@ -21,7 +21,7 @@ package p func b() { if 100 < 90 { - } else if 100 > n+2 { //@suggestedfix("n", re"(undeclared name|undefined): n", b) + } else if 100 > n+2 { //@quickfix("n", re"(undeclared name|undefined): n", b) } } @@ -32,7 +32,7 @@ func b() { package p func c() { - for i < 200 { //@suggestedfix("i", re"(undeclared name|undefined): i", c) + for i < 200 { //@quickfix("i", re"(undeclared name|undefined): i", c) } r() //@diag("r", re"(undeclared name|undefined): r") } diff --git a/gopls/internal/test/marker/testdata/suggestedfix/undeclaredfunc.txt b/gopls/internal/test/marker/testdata/quickfix/undeclaredfunc.txt similarity index 57% rename from gopls/internal/test/marker/testdata/suggestedfix/undeclaredfunc.txt rename to gopls/internal/test/marker/testdata/quickfix/undeclaredfunc.txt index d54dcae073f..6a0f7be3870 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/undeclaredfunc.txt +++ b/gopls/internal/test/marker/testdata/quickfix/undeclaredfunc.txt @@ -6,14 +6,14 @@ TODO(adonovan): infer the result variables from the context (int, in this case). -- a.go -- package a -func _() int { return f(1, "") } //@suggestedfix(re"f.1", re"unde(fined|clared name): f", x) +func _() int { return f(1, "") } //@quickfix(re"f.1", re"unde(fined|clared name): f", x) -- @x/a.go -- @@ -3 +3 @@ --func _() int { return f(1, "") } //@suggestedfix(re"f.1", re"unde(fined|clared name): f", x) +-func _() int { return f(1, "") } //@quickfix(re"f.1", re"unde(fined|clared name): f", x) +func _() int { return f(1, "") } @@ -5 +5,4 @@ +func f(i int, s string) { + panic("unimplemented") -+} //@suggestedfix(re"f.1", re"unde(fined|clared name): f", x) ++} //@quickfix(re"f.1", re"unde(fined|clared name): f", x) + diff --git a/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire.txt b/gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt similarity index 67% rename from gopls/internal/test/marker/testdata/suggestedfix/unusedrequire.txt rename to gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt index 8ec46e9ea68..79e068c67f1 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire.txt +++ b/gopls/internal/test/marker/testdata/quickfix/unusedrequire.txt @@ -13,12 +13,12 @@ module mod.com go 1.14 -require example.com v1.0.0 //@suggestedfix("require", re"not used", a) +require example.com v1.0.0 //@quickfix("require", re"not used", a) -- @a/a/go.mod -- @@ -4,3 +4 @@ - --require example.com v1.0.0 //@suggestedfix("require", re"not used", a) +-require example.com v1.0.0 //@quickfix("require", re"not used", a) - -- a/main.go -- package main diff --git a/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire_gowork.txt b/gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt similarity index 66% rename from gopls/internal/test/marker/testdata/suggestedfix/unusedrequire_gowork.txt rename to gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt index 73b0eb9607f..9c7c81516fb 100644 --- a/gopls/internal/test/marker/testdata/suggestedfix/unusedrequire_gowork.txt +++ b/gopls/internal/test/marker/testdata/quickfix/unusedrequire_gowork.txt @@ -20,12 +20,12 @@ module mod.com/a go 1.14 -require example.com v1.0.0 //@suggestedfix("require", re"not used", a) +require example.com v1.0.0 //@quickfix("require", re"not used", a) -- @a/a/go.mod -- @@ -4,3 +4 @@ - --require example.com v1.0.0 //@suggestedfix("require", re"not used", a) +-require example.com v1.0.0 //@quickfix("require", re"not used", a) - -- a/main.go -- package main @@ -36,12 +36,12 @@ module mod.com/b go 1.14 -require example.com v1.0.0 //@suggestedfix("require", re"not used", b) +require example.com v1.0.0 //@quickfix("require", re"not used", b) -- @b/b/go.mod -- @@ -4,3 +4 @@ - --require example.com v1.0.0 //@suggestedfix("require", re"not used", b) +-require example.com v1.0.0 //@quickfix("require", re"not used", b) - -- b/main.go -- package main diff --git a/gopls/internal/test/marker/testdata/references/issue60369.txt b/gopls/internal/test/marker/testdata/references/issue60369.txt index 0d868de8a15..a6a82b54339 100644 --- a/gopls/internal/test/marker/testdata/references/issue60369.txt +++ b/gopls/internal/test/marker/testdata/references/issue60369.txt @@ -4,9 +4,6 @@ references to the package name p. The bug was fixed in release go1.21 of go/types. --- flags -- --min_go=go1.21 - -- go.mod -- module example.com go 1.12 diff --git a/gopls/internal/test/marker/testdata/rename/doclink.txt b/gopls/internal/test/marker/testdata/rename/doclink.txt index bbd9bf1287a..d4e9f96891e 100644 --- a/gopls/internal/test/marker/testdata/rename/doclink.txt +++ b/gopls/internal/test/marker/testdata/rename/doclink.txt @@ -1,8 +1,5 @@ This test checks that doc links are also handled correctly (golang/go#64495). --- flags -- --min_go=go1.21 - -- go.mod -- module example.com diff --git a/gopls/internal/test/marker/testdata/rename/generics.txt b/gopls/internal/test/marker/testdata/rename/generics.txt index 71e56dd9bc4..0f57570a5fb 100644 --- a/gopls/internal/test/marker/testdata/rename/generics.txt +++ b/gopls/internal/test/marker/testdata/rename/generics.txt @@ -9,9 +9,6 @@ Fixed bugs: - golang/go#61635: renaming type parameters did not work when they were capitalized and the package was imported by another package. --- flags -- --min_go=go1.20 - -- go.mod -- module example.com go 1.20 diff --git a/gopls/internal/test/marker/testdata/rename/issue57479.txt b/gopls/internal/test/marker/testdata/rename/issue57479.txt index 78004591398..be597fbbd29 100644 --- a/gopls/internal/test/marker/testdata/rename/issue57479.txt +++ b/gopls/internal/test/marker/testdata/rename/issue57479.txt @@ -3,9 +3,6 @@ referenced by one of the function parameters. See golang/go#57479 --- flags -- --min_go=go1.22 - -- go.mod -- module golang.org/lsptests/rename diff --git a/gopls/internal/test/marker/testdata/rename/issue60752.txt b/gopls/internal/test/marker/testdata/rename/issue60752.txt index eec24b8e9de..d3cb777d3b8 100644 --- a/gopls/internal/test/marker/testdata/rename/issue60752.txt +++ b/gopls/internal/test/marker/testdata/rename/issue60752.txt @@ -6,9 +6,6 @@ behavior of types.Scope for function parameters and results. This is a regression test for issue #60752, a bug in the type checker. --- flags -- --min_go=go1.22 - -- go.mod -- module example.com go 1.18 diff --git a/gopls/internal/test/marker/testdata/suggestedfix/noresultvalues.txt b/gopls/internal/test/marker/testdata/suggestedfix/noresultvalues.txt deleted file mode 100644 index 5847cea15b7..00000000000 --- a/gopls/internal/test/marker/testdata/suggestedfix/noresultvalues.txt +++ /dev/null @@ -1,18 +0,0 @@ -This test checks the quick fix for removing extra return values. - -Note: gopls should really discard unnecessary return statements. - --- noresultvalues.go -- -package typeerrors - -func x() { return nil } //@suggestedfix("nil", re"too many return", x) - -func y() { return nil, "hello" } //@suggestedfix("nil", re"too many return", y) --- @x/noresultvalues.go -- -@@ -3 +3 @@ --func x() { return nil } //@suggestedfix("nil", re"too many return", x) -+func x() { return } //@suggestedfix("nil", re"too many return", x) --- @y/noresultvalues.go -- -@@ -5 +5 @@ --func y() { return nil, "hello" } //@suggestedfix("nil", re"too many return", y) -+func y() { return } //@suggestedfix("nil", re"too many return", y) diff --git a/gopls/internal/test/marker/testdata/suggestedfix/self_assignment.txt b/gopls/internal/test/marker/testdata/suggestedfix/self_assignment.txt deleted file mode 100644 index 9f3c7ca5618..00000000000 --- a/gopls/internal/test/marker/testdata/suggestedfix/self_assignment.txt +++ /dev/null @@ -1,19 +0,0 @@ -Test of the suggested fix to remove unnecessary assignments. - --- a.go -- -package suggestedfix - -import ( - "log" -) - -func goodbye() { - s := "hiiiiiii" - s = s //@suggestedfix("s = s", re"self-assignment", fix) - log.Print(s) -} - --- @fix/a.go -- -@@ -9 +9 @@ -- s = s //@suggestedfix("s = s", re"self-assignment", fix) -+ //@suggestedfix("s = s", re"self-assignment", fix) diff --git a/gopls/internal/test/marker/testdata/token/builtin_constant.txt b/gopls/internal/test/marker/testdata/token/builtin_constant.txt new file mode 100644 index 00000000000..8f0c021b3a9 --- /dev/null +++ b/gopls/internal/test/marker/testdata/token/builtin_constant.txt @@ -0,0 +1,21 @@ +This test checks semanticTokens on builtin constants. +(test for #70219.) + +-- settings.json -- +{ + "semanticTokens": true +} + +-- flags -- +-ignore_extra_diags + +-- default_lib_const.go -- +package p + +func _() { + a, b := false, true //@ token("false", "variable", "readonly defaultLibrary"), token("true", "variable", "readonly defaultLibrary") +} + +const ( + c = iota //@ token("iota", "variable", "readonly defaultLibrary") +) diff --git a/gopls/internal/test/marker/testdata/workfile/godebug.txt b/gopls/internal/test/marker/testdata/workfile/godebug.txt index fb7d7d5df2d..68fd0f2fe4b 100644 --- a/gopls/internal/test/marker/testdata/workfile/godebug.txt +++ b/gopls/internal/test/marker/testdata/workfile/godebug.txt @@ -2,7 +2,7 @@ This test basic gopls functionality in a workspace with a godebug directive in its modfile. -- flags -- --min_go=go1.23 +-min_go_command=go1.23 -- a/go.work -- go 1.23 diff --git a/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt b/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt index 52ad7c07d57..98a0dd250d2 100644 --- a/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt +++ b/gopls/internal/test/marker/testdata/workfile/godebug_bad.txt @@ -3,7 +3,7 @@ This test checks that we surface the error for unexpected godebug values. TODO(golang/go#67623): the diagnostic should be on the bad godebug value. -- flags -- --min_go=go1.23 +-min_go_command=go1.23 -errors_ok -- go.work -- diff --git a/gopls/internal/test/marker/testdata/zeroconfig/nested.txt b/gopls/internal/test/marker/testdata/zeroconfig/nested.txt index e76bb0c6ec0..2b8a22b1389 100644 --- a/gopls/internal/test/marker/testdata/zeroconfig/nested.txt +++ b/gopls/internal/test/marker/testdata/zeroconfig/nested.txt @@ -1,9 +1,6 @@ This test checks that gopls works with nested modules, including multiple nested modules. --- flags -- --min_go=go1.20 - -- main.go -- package main diff --git a/gopls/internal/util/persistent/map.go b/gopls/internal/util/persistent/map.go index b0e49f27d42..5cb556a482b 100644 --- a/gopls/internal/util/persistent/map.go +++ b/gopls/internal/util/persistent/map.go @@ -26,6 +26,9 @@ import ( // `foo(arg1:+n1, arg2:+n2) (ret1:+n3)`. // Each argument is followed by a delta change to its reference counter. // In case if no change is expected, the delta will be `-0`. +// +// TODO(rfindley): add Update(K, func(V, bool) V), as we have several instances +// of the Get--Set pattern that could be optimized. // Map is an associative mapping from keys to values. // diff --git a/gopls/internal/util/safetoken/safetoken_test.go b/gopls/internal/util/safetoken/safetoken_test.go index ac3b878c6c4..9926d6d2b57 100644 --- a/gopls/internal/util/safetoken/safetoken_test.go +++ b/gopls/internal/util/safetoken/safetoken_test.go @@ -24,7 +24,7 @@ func TestWorkaroundIssue57490(t *testing.T) { src := `package p; func f() { var x struct` fset := token.NewFileSet() file, _ := parser.ParseFile(fset, "a.go", src, parser.SkipObjectResolution) - tf := fset.File(file.Pos()) + tf := fset.File(file.FileStart) // Add another file to the FileSet. file2, _ := parser.ParseFile(fset, "b.go", "package q", parser.SkipObjectResolution) @@ -116,7 +116,9 @@ func TestGoplsSourceDoesNotCallTokenFileMethods(t *testing.T) { for _, pkg := range pkgs { switch pkg.PkgPath { - case "go/token", "golang.org/x/tools/gopls/internal/util/safetoken": + case "go/token", + "golang.org/x/tools/gopls/internal/util/safetoken", // this package + "golang.org/x/tools/gopls/internal/cache/parsego": // copies go/parser/resolver.go continue // allow calls within these packages } diff --git a/gopls/internal/util/typesutil/typesutil.go b/gopls/internal/util/typesutil/typesutil.go index 6e61c7ed874..11233e80bd2 100644 --- a/gopls/internal/util/typesutil/typesutil.go +++ b/gopls/internal/util/typesutil/typesutil.go @@ -5,6 +5,7 @@ package typesutil import ( + "bytes" "go/ast" "go/types" ) @@ -34,3 +35,22 @@ func FileQualifier(f *ast.File, pkg *types.Package, info *types.Info) types.Qual return p.Name() } } + +// FormatTypeParams turns TypeParamList into its Go representation, such as: +// [T, Y]. Note that it does not print constraints as this is mainly used for +// formatting type params in method receivers. +func FormatTypeParams(tparams *types.TypeParamList) string { + if tparams == nil || tparams.Len() == 0 { + return "" + } + var buf bytes.Buffer + buf.WriteByte('[') + for i := 0; i < tparams.Len(); i++ { + if i > 0 { + buf.WriteString(", ") + } + buf.WriteString(tparams.At(i).Obj().Name()) + } + buf.WriteByte(']') + return buf.String() +} diff --git a/gopls/internal/vulncheck/scan/command.go b/gopls/internal/vulncheck/scan/command.go index 4ef005010c9..1b703a720da 100644 --- a/gopls/internal/vulncheck/scan/command.go +++ b/gopls/internal/vulncheck/scan/command.go @@ -91,7 +91,7 @@ func RunGovulncheck(ctx context.Context, pattern string, snapshot *cache.Snapsho if stderr.Len() > 0 { log.Write(stderr.Bytes()) } - return nil, fmt.Errorf("failed to read govulncheck output: %v", err) + return nil, fmt.Errorf("failed to read govulncheck output: %v: stderr:\n%s", err, stderr) } findings := handler.findings // sort so the findings in the result is deterministic. diff --git a/gopls/internal/vulncheck/vulntest/report.go b/gopls/internal/vulncheck/vulntest/report.go index b67986cf8c2..7dbebca6d6b 100644 --- a/gopls/internal/vulncheck/vulntest/report.go +++ b/gopls/internal/vulncheck/vulntest/report.go @@ -128,30 +128,6 @@ func (v Version) Canonical() string { return strings.TrimPrefix(semver.Canonical(v.V()), "v") } -// Reference type is a reference (link) type. -type ReferenceType string - -const ( - ReferenceTypeAdvisory = ReferenceType("ADVISORY") - ReferenceTypeArticle = ReferenceType("ARTICLE") - ReferenceTypeReport = ReferenceType("REPORT") - ReferenceTypeFix = ReferenceType("FIX") - ReferenceTypePackage = ReferenceType("PACKAGE") - ReferenceTypeEvidence = ReferenceType("EVIDENCE") - ReferenceTypeWeb = ReferenceType("WEB") -) - -// ReferenceTypes is the set of reference types defined in OSV. -var ReferenceTypes = []ReferenceType{ - ReferenceTypeAdvisory, - ReferenceTypeArticle, - ReferenceTypeReport, - ReferenceTypeFix, - ReferenceTypePackage, - ReferenceTypeEvidence, - ReferenceTypeWeb, -} - // A Reference is a link to some external resource. // // For ease of typing, References are represented in the YAML as a diff --git a/gopls/release/release.go b/gopls/release/release.go deleted file mode 100644 index 26ce5f7870a..00000000000 --- a/gopls/release/release.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. -// -// Package release checks that the a given version of gopls is ready for -// release. It can also tag and publish the release. -// -// To run: -// -// $ cd $GOPATH/src/golang.org/x/tools/gopls -// $ go run release/release.go -version= -package main - -import ( - "flag" - "fmt" - "log" - "os" - "os/exec" - "path/filepath" - "strings" - - "golang.org/x/mod/modfile" - "golang.org/x/mod/semver" -) - -var versionFlag = flag.String("version", "", "version to tag") - -func main() { - flag.Parse() - - if *versionFlag == "" { - log.Fatalf("must provide -version flag") - } - if !semver.IsValid(*versionFlag) { - log.Fatalf("invalid version %s", *versionFlag) - } - if semver.Major(*versionFlag) != "v0" { - log.Fatalf("expected major version v0, got %s", semver.Major(*versionFlag)) - } - if semver.Build(*versionFlag) != "" { - log.Fatalf("unexpected build suffix: %s", *versionFlag) - } - // Validate that the user is running the program from the gopls module. - wd, err := os.Getwd() - if err != nil { - log.Fatal(err) - } - if filepath.Base(wd) != "gopls" { - log.Fatalf("must run from the gopls module") - } - // Confirm that the versions in the go.mod file are correct. - if err := validateGoModFile(wd); err != nil { - log.Fatal(err) - } - fmt.Println("Validated that the release is ready.") - os.Exit(0) -} - -func validateGoModFile(goplsDir string) error { - filename := filepath.Join(goplsDir, "go.mod") - data, err := os.ReadFile(filename) - if err != nil { - return err - } - gomod, err := modfile.Parse(filename, data, nil) - if err != nil { - return err - } - // Confirm that there is no replace directive in the go.mod file. - if len(gomod.Replace) > 0 { - return fmt.Errorf("expected no replace directives, got %v", len(gomod.Replace)) - } - // Confirm that the version of x/tools in the gopls/go.mod file points to - // the second-to-last commit. (The last commit will be the one to update the - // go.mod file.) - cmd := exec.Command("git", "rev-parse", "@~") - stdout, err := cmd.Output() - if err != nil { - return err - } - hash := string(stdout) - // Find the golang.org/x/tools require line and compare the versions. - var version string - for _, req := range gomod.Require { - if req.Mod.Path == "golang.org/x/tools" { - version = req.Mod.Version - break - } - } - if version == "" { - return fmt.Errorf("no require for golang.org/x/tools") - } - split := strings.Split(version, "-") - if len(split) != 3 { - return fmt.Errorf("unexpected pseudoversion format %s", version) - } - last := split[len(split)-1] - if last == "" { - return fmt.Errorf("unexpected pseudoversion format %s", version) - } - if !strings.HasPrefix(hash, last) { - return fmt.Errorf("golang.org/x/tools pseudoversion should be at commit %s, instead got %s", hash, last) - } - return nil -} diff --git a/internal/aliases/aliases_test.go b/internal/aliases/aliases_test.go index 551e9e512f1..f469821141b 100644 --- a/internal/aliases/aliases_test.go +++ b/internal/aliases/aliases_test.go @@ -44,9 +44,10 @@ func TestNewAlias(t *testing.T) { } for _, godebug := range []string{ - // The default gotypesalias value follows the x/tools/go.mod version - // The go.mod is at 1.22 so the default is gotypesalias=0. - "", // Use the default GODEBUG value (off). + // Note: previously there was a test case for "", which asserted on the + // behavior implied by the x/tools go.mod go directive. But that only works + // if x/tools is the main module for the test, which isn't the case when + // run with a go.work file, or from another module (golang/go#70082). "gotypesalias=0", "gotypesalias=1", } { diff --git a/internal/analysisinternal/addimport_test.go b/internal/analysisinternal/addimport_test.go index 31afbb05011..f361bde82f8 100644 --- a/internal/analysisinternal/addimport_test.go +++ b/internal/analysisinternal/addimport_test.go @@ -202,7 +202,7 @@ func _() { if err != nil { t.Log(err) } - pos := fset.File(f.Pos()).Pos(len(before)) + pos := fset.File(f.FileStart).Pos(len(before)) // type-check info := &types.Info{ diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go index 24755b41265..4ccaa210af1 100644 --- a/internal/analysisinternal/analysis.go +++ b/internal/analysisinternal/analysis.go @@ -10,6 +10,7 @@ import ( "bytes" "fmt" "go/ast" + "go/scanner" "go/token" "go/types" "os" @@ -21,12 +22,46 @@ import ( func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { // Get the end position for the type error. - offset, end := fset.PositionFor(start, false).Offset, start - if offset >= len(src) { - return end + file := fset.File(start) + if file == nil { + return start } - if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 { - end = start + token.Pos(width) + if offset := file.PositionFor(start, false).Offset; offset > len(src) { + return start + } else { + src = src[offset:] + } + + // Attempt to find a reasonable end position for the type error. + // + // TODO(rfindley): the heuristic implemented here is unclear. It looks like + // it seeks the end of the primary operand starting at start, but that is not + // quite implemented (for example, given a func literal this heuristic will + // return the range of the func keyword). + // + // We should formalize this heuristic, or deprecate it by finally proposing + // to add end position to all type checker errors. + // + // Nevertheless, ensure that the end position at least spans the current + // token at the cursor (this was golang/go#69505). + end := start + { + var s scanner.Scanner + fset := token.NewFileSet() + f := fset.AddFile("", fset.Base(), len(src)) + s.Init(f, src, nil /* no error handler */, scanner.ScanComments) + pos, tok, lit := s.Scan() + if tok != token.SEMICOLON && token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) { + off := file.Offset(pos) + len(lit) + src = src[off:] + end += token.Pos(off) + } + } + + // Look for bytes that might terminate the current operand. See note above: + // this is imprecise. + if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 { + end += token.Pos(width) } return end } diff --git a/internal/expect/expect.go b/internal/expect/expect.go new file mode 100644 index 00000000000..d977ea4e262 --- /dev/null +++ b/internal/expect/expect.go @@ -0,0 +1,123 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package expect provides support for interpreting structured comments in Go +source code (including go.mod and go.work files) as test expectations. + +This is primarily intended for writing tests of things that process Go source +files, although it does not directly depend on the testing package. + +Collect notes with the Extract or Parse functions, and use the +MatchBefore function to find matches within the lines the comments were on. + +The interpretation of the notes depends on the application. +For example, the test suite for a static checking tool might +use a @diag note to indicate an expected diagnostic: + + fmt.Printf("%s", 1) //@ diag("%s wants a string, got int") + +By contrast, the test suite for a source code navigation tool +might use notes to indicate the positions of features of +interest, the actions to be performed by the test, +and their expected outcomes: + + var x = 1 //@ x_decl + ... + print(x) //@ definition("x", x_decl) + print(x) //@ typeof("x", "int") + +# Note comment syntax + +Note comments always start with the special marker @, which must be the +very first character after the comment opening pair, so //@ or /*@ with no +spaces. + +This is followed by a comma separated list of notes. + +A note always starts with an identifier, which is optionally followed by an +argument list. The argument list is surrounded with parentheses and contains a +comma-separated list of arguments. +The empty parameter list and the missing parameter list are distinguishable if +needed; they result in a nil or an empty list in the Args parameter respectively. + +Arguments may be positional, such as f(value), or named, such as f(name=value). +Positional arguments must appear before named arguments. +Names may not be repeated. + +Argument values may be either identifiers or literals. +The literals supported are the basic value literals, of string, float, integer +true, false or nil. All the literals match the standard go conventions, with +all bases of integers, and both quote and backtick strings. +There is one extra literal type, which is a string literal preceded by the +identifier "re" which is compiled to a regular expression. +*/ +package expect + +import ( + "bytes" + "fmt" + "go/token" + "regexp" +) + +// Note is a parsed note from an expect comment. +// It knows the position of the start of the comment, and the name and +// arguments that make up the note. +type Note struct { + Pos token.Pos // The position at which the note identifier appears + Name string // the name associated with the note + Args []any // positional arguments (non-nil if parens were present) + NamedArgs map[string]any // named arguments (or nil if none) +} + +// ReadFile is the type of a function that can provide file contents for a +// given filename. +// This is used in MatchBefore to look up the content of the file in order to +// find the line to match the pattern against. +type ReadFile func(filename string) ([]byte, error) + +// MatchBefore attempts to match a pattern in the line before the supplied pos. +// It uses the FileSet and the ReadFile to work out the contents of the line +// that end is part of, and then matches the pattern against the content of the +// start of that line up to the supplied position. +// The pattern may be either a simple string, []byte or a *regexp.Regexp. +// MatchBefore returns the range of the line that matched the pattern, and +// invalid positions if there was no match, or an error if the line could not be +// found. +func MatchBefore(fset *token.FileSet, readFile ReadFile, end token.Pos, pattern interface{}) (token.Pos, token.Pos, error) { + f := fset.File(end) + content, err := readFile(f.Name()) + if err != nil { + return token.NoPos, token.NoPos, fmt.Errorf("invalid file: %v", err) + } + position := f.Position(end) + startOffset := f.Offset(f.LineStart(position.Line)) + endOffset := f.Offset(end) + line := content[startOffset:endOffset] + matchStart, matchEnd := -1, -1 + switch pattern := pattern.(type) { + case string: + bytePattern := []byte(pattern) + matchStart = bytes.Index(line, bytePattern) + if matchStart >= 0 { + matchEnd = matchStart + len(bytePattern) + } + case []byte: + matchStart = bytes.Index(line, pattern) + if matchStart >= 0 { + matchEnd = matchStart + len(pattern) + } + case *regexp.Regexp: + match := pattern.FindIndex(line) + if len(match) > 0 { + matchStart = match[0] + matchEnd = match[1] + } + } + if matchStart < 0 { + return token.NoPos, token.NoPos, nil + } + return f.Pos(startOffset + matchStart), f.Pos(startOffset + matchEnd), nil +} diff --git a/internal/expect/expect_test.go b/internal/expect/expect_test.go new file mode 100644 index 00000000000..3ad8d1a74fa --- /dev/null +++ b/internal/expect/expect_test.go @@ -0,0 +1,179 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package expect_test + +import ( + "bytes" + "go/token" + "os" + "reflect" + "slices" + "testing" + + "golang.org/x/tools/internal/expect" +) + +func TestMarker(t *testing.T) { + for _, tt := range []struct { + filename string + expectNotes int + expectMarkers map[string]string + expectChecks map[string][]any + // expectChecks holds {"id": values} for each call check(id, values...). + // Any named k=v arguments become a final map[string]any argument. + }{ + { + filename: "testdata/test.go", + expectNotes: 14, + expectMarkers: map[string]string{ + "αSimpleMarker": "α", + "OffsetMarker": "β", + "RegexMarker": "γ", + "εMultiple": "ε", + "ζMarkers": "ζ", + "ηBlockMarker": "η", + "Declared": "η", + "Comment": "ι", + "LineComment": "someFunc", + "NonIdentifier": "+", + "StringMarker": "\"hello\"", + }, + expectChecks: map[string][]any{ + "αSimpleMarker": nil, + "StringAndInt": {"Number %d", int64(12)}, + "Bool": {true}, + "NamedArgs": {int64(1), true, expect.Identifier("a"), map[string]any{ + "b": int64(1), + "c": "3", + "d": true, + }}, + }, + }, + { + filename: "testdata/go.fake.mod", + expectNotes: 2, + expectMarkers: map[string]string{ + "αMarker": "αfake1α", + "βMarker": "require golang.org/modfile v0.0.0", + }, + }, + { + filename: "testdata/go.fake.work", + expectNotes: 2, + expectMarkers: map[string]string{ + "αMarker": "1.23.0", + "βMarker": "αβ", + }, + }, + } { + t.Run(tt.filename, func(t *testing.T) { + content, err := os.ReadFile(tt.filename) + if err != nil { + t.Fatal(err) + } + readFile := func(string) ([]byte, error) { return content, nil } + + markers := make(map[string]token.Pos) + for name, tok := range tt.expectMarkers { + offset := bytes.Index(content, []byte(tok)) + markers[name] = token.Pos(offset + 1) + end := bytes.Index(content[offset:], []byte(tok)) + if end > 0 { + markers[name+"@"] = token.Pos(offset + end + 2) + } + } + + fset := token.NewFileSet() + notes, err := expect.Parse(fset, tt.filename, content) + if err != nil { + t.Fatalf("Failed to extract notes:\n%v", err) + } + if len(notes) != tt.expectNotes { + t.Errorf("Expected %v notes, got %v", tt.expectNotes, len(notes)) + } + for _, n := range notes { + switch { + case n.Args == nil: + // A //@foo note associates the name foo with the position of the + // first match of "foo" on the current line. + checkMarker(t, fset, readFile, markers, n.Pos, n.Name, n.Name) + case n.Name == "mark": + // A //@mark(name, "pattern") note associates the specified name + // with the position on the first match of pattern on the current line. + if len(n.Args) != 2 { + t.Errorf("%v: expected 2 args to mark, got %v", fset.Position(n.Pos), len(n.Args)) + continue + } + ident, ok := n.Args[0].(expect.Identifier) + if !ok { + t.Errorf("%v: got %v (%T), want identifier", fset.Position(n.Pos), n.Args[0], n.Args[0]) + continue + } + checkMarker(t, fset, readFile, markers, n.Pos, string(ident), n.Args[1]) + + case n.Name == "check": + // A //@check(args, ...) note specifies some hypothetical action to + // be taken by the test driver and its expected outcome. + // In this test, the action is to compare the arguments + // against expectChecks. + if len(n.Args) < 1 { + t.Errorf("%v: expected 1 args to check, got %v", fset.Position(n.Pos), len(n.Args)) + continue + } + ident, ok := n.Args[0].(expect.Identifier) + if !ok { + t.Errorf("%v: got %v (%T), want identifier", fset.Position(n.Pos), n.Args[0], n.Args[0]) + continue + } + wantArgs, ok := tt.expectChecks[string(ident)] + if !ok { + t.Errorf("%v: unexpected check %v", fset.Position(n.Pos), ident) + continue + } + gotArgs := n.Args[1:] + if n.NamedArgs != nil { + // Clip to avoid mutating Args' array. + gotArgs = append(slices.Clip(gotArgs), n.NamedArgs) + } + + if len(gotArgs) != len(wantArgs) { + t.Errorf("%v: expected %v args to check, got %v", fset.Position(n.Pos), len(wantArgs), len(gotArgs)) + continue + } + for i := range gotArgs { + if !reflect.DeepEqual(wantArgs[i], gotArgs[i]) { + t.Errorf("%v: arg %d: expected %#v, got %#v", fset.Position(n.Pos), i+1, wantArgs[i], gotArgs[i]) + } + } + default: + t.Errorf("Unexpected note %v at %v", n.Name, fset.Position(n.Pos)) + } + } + }) + } +} + +func checkMarker(t *testing.T, fset *token.FileSet, readFile expect.ReadFile, markers map[string]token.Pos, pos token.Pos, name string, pattern interface{}) { + start, end, err := expect.MatchBefore(fset, readFile, pos, pattern) + if err != nil { + t.Errorf("%v: MatchBefore failed: %v", fset.Position(pos), err) + return + } + if start == token.NoPos { + t.Errorf("%v: Pattern %v did not match", fset.Position(pos), pattern) + return + } + expectStart, ok := markers[name] + if !ok { + t.Errorf("%v: unexpected marker %v", fset.Position(pos), name) + return + } + if start != expectStart { + t.Errorf("%v: Expected %v got %v", fset.Position(pos), fset.Position(expectStart), fset.Position(start)) + } + if expectEnd, ok := markers[name+"@"]; ok && end != expectEnd { + t.Errorf("%v: Expected end %v got %v", fset.Position(pos), fset.Position(expectEnd), fset.Position(end)) + } +} diff --git a/internal/expect/extract.go b/internal/expect/extract.go new file mode 100644 index 00000000000..db6b66aaf21 --- /dev/null +++ b/internal/expect/extract.go @@ -0,0 +1,383 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package expect + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "path/filepath" + "regexp" + "strconv" + "strings" + "text/scanner" + + "golang.org/x/mod/modfile" +) + +const commentStart = "@" +const commentStartLen = len(commentStart) + +// Identifier is the type for an identifier in an Note argument list. +type Identifier string + +// Parse collects all the notes present in a file. +// If content is nil, the filename specified is read and parsed, otherwise the +// content is used and the filename is used for positions and error messages. +// Each comment whose text starts with @ is parsed as a comma-separated +// sequence of notes. +// See the package documentation for details about the syntax of those +// notes. +func Parse(fset *token.FileSet, filename string, content []byte) ([]*Note, error) { + var src interface{} + if content != nil { + src = content + } + switch filepath.Ext(filename) { + case ".go": + // TODO: We should write this in terms of the scanner. + // there are ways you can break the parser such that it will not add all the + // comments to the ast, which may result in files where the tests are silently + // not run. + file, err := parser.ParseFile(fset, filename, src, parser.ParseComments|parser.AllErrors|parser.SkipObjectResolution) + if file == nil { + return nil, err + } + return ExtractGo(fset, file) + case ".mod": + file, err := modfile.Parse(filename, content, nil) + if err != nil { + return nil, err + } + f := fset.AddFile(filename, -1, len(content)) + f.SetLinesForContent(content) + notes, err := extractModWork(fset, file.Syntax.Stmt) + if err != nil { + return nil, err + } + // Since modfile.Parse does not return an *ast, we need to add the offset + // within the file's contents to the file's base relative to the fileset. + for _, note := range notes { + note.Pos += token.Pos(f.Base()) + } + return notes, nil + case ".work": + file, err := modfile.ParseWork(filename, content, nil) + if err != nil { + return nil, err + } + f := fset.AddFile(filename, -1, len(content)) + f.SetLinesForContent(content) + notes, err := extractModWork(fset, file.Syntax.Stmt) + if err != nil { + return nil, err + } + // As with go.mod files, we need to compute a synthetic token.Pos. + for _, note := range notes { + note.Pos += token.Pos(f.Base()) + } + return notes, nil + } + return nil, nil +} + +// extractModWork collects all the notes present in a go.mod file or go.work +// file, by way of the shared modfile.Expr statement node. +// +// Each comment whose text starts with @ is parsed as a comma-separated +// sequence of notes. +// See the package documentation for details about the syntax of those +// notes. +// Only allow notes to appear with the following format: "//@mark()" or // @mark() +func extractModWork(fset *token.FileSet, exprs []modfile.Expr) ([]*Note, error) { + var notes []*Note + for _, stmt := range exprs { + comment := stmt.Comment() + if comment == nil { + continue + } + var allComments []modfile.Comment + allComments = append(allComments, comment.Before...) + allComments = append(allComments, comment.Suffix...) + for _, cmt := range allComments { + text, adjust := getAdjustedNote(cmt.Token) + if text == "" { + continue + } + parsed, err := parse(fset, token.Pos(int(cmt.Start.Byte)+adjust), text) + if err != nil { + return nil, err + } + notes = append(notes, parsed...) + } + } + return notes, nil +} + +// ExtractGo collects all the notes present in an AST. +// Each comment whose text starts with @ is parsed as a comma-separated +// sequence of notes. +// See the package documentation for details about the syntax of those +// notes. +func ExtractGo(fset *token.FileSet, file *ast.File) ([]*Note, error) { + var notes []*Note + for _, g := range file.Comments { + for _, c := range g.List { + text, adjust := getAdjustedNote(c.Text) + if text == "" { + continue + } + parsed, err := parse(fset, token.Pos(int(c.Pos())+adjust), text) + if err != nil { + return nil, err + } + notes = append(notes, parsed...) + } + } + return notes, nil +} + +func getAdjustedNote(text string) (string, int) { + if strings.HasPrefix(text, "/*") { + text = strings.TrimSuffix(text, "*/") + } + text = text[2:] // remove "//" or "/*" prefix + + // Allow notes to appear within comments. + // For example: + // "// //@mark()" is valid. + // "// @mark()" is not valid. + // "// /*@mark()*/" is not valid. + var adjust int + if i := strings.Index(text, commentStart); i > 2 { + // Get the text before the commentStart. + pre := text[i-2 : i] + if pre != "//" { + return "", 0 + } + text = text[i:] + adjust = i + } + if !strings.HasPrefix(text, commentStart) { + return "", 0 + } + text = text[commentStartLen:] + return text, commentStartLen + adjust + 1 +} + +const invalidToken rune = 0 + +type tokens struct { + scanner scanner.Scanner + current rune + err error + base token.Pos +} + +func (t *tokens) Init(base token.Pos, text string) *tokens { + t.base = base + t.scanner.Init(strings.NewReader(text)) + t.scanner.Mode = scanner.GoTokens + t.scanner.Whitespace ^= 1 << '\n' // don't skip new lines + t.scanner.Error = func(s *scanner.Scanner, msg string) { + t.Errorf("%v", msg) + } + return t +} + +func (t *tokens) Consume() string { + t.current = invalidToken + return t.scanner.TokenText() +} + +func (t *tokens) Token() rune { + if t.err != nil { + return scanner.EOF + } + if t.current == invalidToken { + t.current = t.scanner.Scan() + } + return t.current +} + +func (t *tokens) Skip(r rune) int { + i := 0 + for t.Token() == '\n' { + t.Consume() + i++ + } + return i +} + +func (t *tokens) TokenString() string { + return scanner.TokenString(t.Token()) +} + +func (t *tokens) Pos() token.Pos { + return t.base + token.Pos(t.scanner.Position.Offset) +} + +func (t *tokens) Errorf(msg string, args ...interface{}) { + if t.err != nil { + return + } + t.err = fmt.Errorf(msg, args...) +} + +func parse(fset *token.FileSet, base token.Pos, text string) ([]*Note, error) { + t := new(tokens).Init(base, text) + notes := parseComment(t) + if t.err != nil { + return nil, fmt.Errorf("%v: %s", fset.Position(t.Pos()), t.err) + } + return notes, nil +} + +func parseComment(t *tokens) []*Note { + var notes []*Note + for { + t.Skip('\n') + switch t.Token() { + case scanner.EOF: + return notes + case scanner.Ident: + notes = append(notes, parseNote(t)) + default: + t.Errorf("unexpected %s parsing comment, expect identifier", t.TokenString()) + return nil + } + switch t.Token() { + case scanner.EOF: + return notes + case ',', '\n': + t.Consume() + default: + t.Errorf("unexpected %s parsing comment, expect separator", t.TokenString()) + return nil + } + } +} + +func parseNote(t *tokens) *Note { + n := &Note{ + Pos: t.Pos(), + Name: t.Consume(), + } + + switch t.Token() { + case ',', '\n', scanner.EOF: + // no argument list present + return n + case '(': + n.Args, n.NamedArgs = parseArgumentList(t) + return n + default: + t.Errorf("unexpected %s parsing note", t.TokenString()) + return nil + } +} + +func parseArgumentList(t *tokens) (args []any, named map[string]any) { + args = []any{} // @name() is represented by a non-nil empty slice. + t.Consume() // '(' + t.Skip('\n') + for t.Token() != ')' { + name, arg := parseArgument(t) + if name != "" { + // f(k=v) + if named == nil { + named = make(map[string]any) + } + if _, dup := named[name]; dup { + t.Errorf("duplicate named argument %q", name) + return nil, nil + } + named[name] = arg + } else { + // f(v) + if named != nil { + t.Errorf("positional argument follows named argument") + return nil, nil + } + args = append(args, arg) + } + if t.Token() != ',' { + break + } + t.Consume() + t.Skip('\n') + } + if t.Token() != ')' { + t.Errorf("unexpected %s parsing argument list", t.TokenString()) + return nil, nil + } + t.Consume() // ')' + return args, named +} + +// parseArgument returns the value of the argument ("f(value)"), +// and its name if named "f(name=value)". +func parseArgument(t *tokens) (name string, value any) { +again: + switch t.Token() { + case scanner.Ident: + v := t.Consume() + switch v { + case "true": + value = true + case "false": + value = false + case "nil": + value = nil + case "re": + if t.Token() != scanner.String && t.Token() != scanner.RawString { + t.Errorf("re must be followed by string, got %s", t.TokenString()) + return + } + pattern, _ := strconv.Unquote(t.Consume()) // can't fail + re, err := regexp.Compile(pattern) + if err != nil { + t.Errorf("invalid regular expression %s: %v", pattern, err) + return + } + value = re + default: + // f(name=value)? + if name == "" && t.Token() == '=' { + t.Consume() // '=' + name = v + goto again + } + value = Identifier(v) + } + + case scanner.String, scanner.RawString: + value, _ = strconv.Unquote(t.Consume()) // can't fail + + case scanner.Int: + s := t.Consume() + v, err := strconv.ParseInt(s, 0, 0) + if err != nil { + t.Errorf("cannot convert %v to int: %v", s, err) + } + value = v + + case scanner.Float: + s := t.Consume() + v, err := strconv.ParseFloat(s, 64) + if err != nil { + t.Errorf("cannot convert %v to float: %v", s, err) + } + value = v + + case scanner.Char: + t.Errorf("unexpected char literal %s", t.Consume()) + + default: + t.Errorf("unexpected %s parsing argument", t.TokenString()) + } + return +} diff --git a/internal/expect/testdata/go.fake.mod b/internal/expect/testdata/go.fake.mod new file mode 100644 index 00000000000..ca84fcee9f3 --- /dev/null +++ b/internal/expect/testdata/go.fake.mod @@ -0,0 +1,9 @@ +// This file is named go.fake.mod so it does not define a real module, which +// would make the contents of this directory unavailable to the test when run +// from outside the repository. + +module αfake1α //@mark(αMarker, "αfake1α") + +go 1.14 + +require golang.org/modfile v0.0.0 //@mark(βMarker, "require golang.org/modfile v0.0.0") diff --git a/internal/expect/testdata/go.fake.work b/internal/expect/testdata/go.fake.work new file mode 100644 index 00000000000..f861c54991c --- /dev/null +++ b/internal/expect/testdata/go.fake.work @@ -0,0 +1,7 @@ +// This file is named go.fake.mod so it does not define a real module, which +// would make the contents of this directory unavailable to the test when run +// from outside the repository. + +go 1.23.0 //@mark(αMarker, "1.23.0") + +use ./αβ //@mark(βMarker, "αβ") diff --git a/internal/expect/testdata/test.go b/internal/expect/testdata/test.go new file mode 100644 index 00000000000..808864e7a91 --- /dev/null +++ b/internal/expect/testdata/test.go @@ -0,0 +1,42 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fake1 is used to test the expect package. +package fake1 + +// The greek letters in this file mark points we use for marker tests. +// We use unique markers so we can make the tests stable against changes to +// this file. + +const ( + _ int = iota + αSimpleMarkerα //@αSimpleMarker + offsetββMarker //@mark(OffsetMarker, "β") + regexγMaγrker //@mark(RegexMarker, re`\p{Greek}Ma`) + εMultipleεζMarkersζ //@εMultiple,ζMarkers + ηBlockMarkerη /*@ηBlockMarker*/ +) + +/*Marker ι inside ι a comment*/ //@mark(Comment,"ι inside ") +var x = "hello" //@mark(StringMarker, `"hello"`) + +// someFunc is a function. //@mark(LineComment, "someFunc") +func someFunc(a, b int) int { + // The line below must be the first occurrence of the plus operator + return a + b + 1 //@mark(NonIdentifier, re`\+[^\+]*`) +} + +// And some extra checks for interesting action parameters +// Also checks for multi-line expectations +/*@ +check(αSimpleMarker) +check(StringAndInt, + "Number %d", + 12, +) + +check(Bool, true) + +check(NamedArgs, 1, true, a, b=1, c="3", d=true) +*/ diff --git a/internal/gcimporter/gcimporter_test.go b/internal/gcimporter/gcimporter_test.go index 5519fa08a92..7848f27e0c2 100644 --- a/internal/gcimporter/gcimporter_test.go +++ b/internal/gcimporter/gcimporter_test.go @@ -166,6 +166,7 @@ func TestImportTypeparamTests(t *testing.T) { } testenv.NeedsGoBuild(t) // to find stdlib export data in the build cache + testenv.NeedsGOROOTDir(t, "test") // This package only handles gc export data. if runtime.Compiler != "gc" { @@ -1043,3 +1044,71 @@ func testAliases(t *testing.T, f func(*testing.T)) { }) } } + +type importMap map[string]*types.Package + +func (m importMap) Import(path string) (*types.Package, error) { return m[path], nil } + +func TestIssue69912(t *testing.T) { + fset := token.NewFileSet() + + check := func(pkgname, src string, imports importMap) (*types.Package, error) { + f, err := goparser.ParseFile(fset, "a.go", src, 0) + if err != nil { + return nil, err + } + config := &types.Config{ + Importer: imports, + } + return config.Check(pkgname, fset, []*ast.File{f}, nil) + } + + const libSrc = `package lib + +type T int +` + + lib, err := check("lib", libSrc, nil) + if err != nil { + t.Fatalf("Checking lib: %v", err) + } + + // Export it. + var out bytes.Buffer + if err := gcimporter.IExportData(&out, fset, lib); err != nil { + t.Fatalf("export: %v", err) // any failure to export is a bug + } + + // Re-import it. + imports := make(map[string]*types.Package) + _, lib2, err := gcimporter.IImportData(fset, imports, out.Bytes(), "lib") + if err != nil { + t.Fatalf("import: %v", err) // any failure of export+import is a bug. + } + + // Use the resulting package concurrently, via dot-imports. + + const pSrc = `package p + +import . "lib" + +type S struct { + f T +} +` + importer := importMap{ + "lib": lib2, + } + var wg sync.WaitGroup + for range 10 { + wg.Add(1) + go func() { + defer wg.Done() + _, err := check("p", pSrc, importer) + if err != nil { + t.Errorf("check failed: %v", err) + } + }() + } + wg.Wait() +} diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go index 1e19fbed8e7..7dfc31a37d7 100644 --- a/internal/gcimporter/iexport.go +++ b/internal/gcimporter/iexport.go @@ -246,6 +246,26 @@ import ( // IExportShallow encodes "shallow" export data for the specified package. // +// For types, we use "shallow" export data. Historically, the Go +// compiler always produced a summary of the types for a given package +// that included types from other packages that it indirectly +// referenced: "deep" export data. This had the advantage that the +// compiler (and analogous tools such as gopls) need only load one +// file per direct import. However, it meant that the files tended to +// get larger based on the level of the package in the import +// graph. For example, higher-level packages in the kubernetes module +// have over 1MB of "deep" export data, even when they have almost no +// content of their own, merely because they mention a major type that +// references many others. In pathological cases the export data was +// 300x larger than the source for a package due to this quadratic +// growth. +// +// "Shallow" export data means that the serialized types describe only +// a single package. If those types mention types from other packages, +// the type checker may need to request additional packages beyond +// just the direct imports. Type information for the entire transitive +// closure of imports is provided (lazily) by the DAG. +// // No promises are made about the encoding other than that it can be decoded by // the same version of IIExportShallow. If you plan to save export data in the // file system, be sure to include a cryptographic digest of the executable in @@ -268,8 +288,8 @@ func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) } // IImportShallow decodes "shallow" types.Package data encoded by -// IExportShallow in the same executable. This function cannot import data from -// cmd/compile or gcexportdata.Write. +// [IExportShallow] in the same executable. This function cannot import data +// from cmd/compile or gcexportdata.Write. // // The importer calls getPackages to obtain package symbols for all // packages mentioned in the export data, including the one being diff --git a/internal/gcimporter/iexport_go118_test.go b/internal/gcimporter/iexport_go118_test.go index 005b95b94f3..3ef0f121af8 100644 --- a/internal/gcimporter/iexport_go118_test.go +++ b/internal/gcimporter/iexport_go118_test.go @@ -98,6 +98,7 @@ func testExportSrc(t *testing.T, src []byte) { func TestIndexedImportTypeparamTests(t *testing.T) { testenv.NeedsGoBuild(t) // to find stdlib export data in the build cache + testenv.NeedsGOROOTDir(t, "test") testAliases(t, testIndexedImportTypeparamTests) } diff --git a/internal/gcimporter/iexport_test.go b/internal/gcimporter/iexport_test.go index cb6ccdd7929..5707b3784a5 100644 --- a/internal/gcimporter/iexport_test.go +++ b/internal/gcimporter/iexport_test.go @@ -11,12 +11,10 @@ import ( "fmt" "go/ast" "go/constant" - "go/importer" "go/parser" "go/token" "go/types" "math/big" - "os" "path/filepath" "reflect" "strings" @@ -461,76 +459,38 @@ type Chained = C[Named] // B[Named, A[Named]] = B[Named, *Named] = []*Named t.Fatal(err) } - testcases := map[string]func(t *testing.T) *types.Package{ - // Read the result of IExportData through x/tools/internal/gcimporter.IImportData. - "tools": func(t *testing.T) *types.Package { - // export - exportdata, err := iexport(fset1, gcimporter.IExportVersion, pkg1) - if err != nil { - t.Fatal(err) - } + // Read the result of IExportData through x/tools/internal/gcimporter.IImportData. + // export + exportdata, err := iexport(fset1, gcimporter.IExportVersion, pkg1) + if err != nil { + t.Fatal(err) + } - // import - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path()) - if err != nil { - t.Fatalf("IImportData(%s): %v", pkg1.Path(), err) - } - return pkg2 - }, - // Read the result of IExportData through $GOROOT/src/internal/gcimporter.IImportData. - // - // This test fakes creating an old go object file in indexed format. - // This means that it can be loaded by go/importer or go/types. - // This step is not supported, but it does give test coverage for stdlib. - "goroot": func(t *testing.T) *types.Package { - testenv.NeedsGo1Point(t, 24) // requires >= 1.24 go/importer. - - // Write indexed export data file contents. - // - // TODO(taking): Slightly unclear to what extent this step should be supported by go/importer. - var buf bytes.Buffer - buf.WriteString("go object \n$$B\n") // object file header - if err := gcexportdata.Write(&buf, fset1, pkg1); err != nil { - t.Fatal(err) - } + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path()) + if err != nil { + t.Fatalf("IImportData(%s): %v", pkg1.Path(), err) + } - // Write export data to temporary file - out := t.TempDir() - name := filepath.Join(out, "pkg.out") - if err := os.WriteFile(name+".a", buf.Bytes(), 0644); err != nil { - t.Fatal(err) - } - pkg2, err := importer.Default().Import(name) - if err != nil { - t.Fatal(err) - } - return pkg2 - }, - } - - for name, importer := range testcases { - t.Run(name, func(t *testing.T) { - pkg := importer(t) - for name, want := range map[string]string{ - "A": "type pkg.A[T any] = *T", - "B": "type pkg.B[R any, S *R] = []S", - "C": "type pkg.C[U any] = pkg.B[U, pkg.A[U]]", - "Named": "type pkg.Named int", - "Chained": "type pkg.Chained = pkg.C[pkg.Named]", - } { - obj := pkg.Scope().Lookup(name) - if obj == nil { - t.Errorf("failed to find %q in package %s", name, pkg) - continue - } + pkg := pkg2 + for name, want := range map[string]string{ + "A": "type pkg.A[T any] = *T", + "B": "type pkg.B[R any, S *R] = []S", + "C": "type pkg.C[U any] = pkg.B[U, pkg.A[U]]", + "Named": "type pkg.Named int", + "Chained": "type pkg.Chained = pkg.C[pkg.Named]", + } { + obj := pkg.Scope().Lookup(name) + if obj == nil { + t.Errorf("failed to find %q in package %s", name, pkg) + continue + } - got := strings.ReplaceAll(obj.String(), pkg.Path(), "pkg") - if got != want { - t.Errorf("(%q).String()=%q. wanted %q", name, got, want) - } - } - }) + got := strings.ReplaceAll(obj.String(), pkg.Path(), "pkg") + if got != want { + t.Errorf("(%q).String()=%q. wanted %q", name, got, want) + } } } diff --git a/internal/gcimporter/iimport.go b/internal/gcimporter/iimport.go index 21908a158b4..e260c0e8dbf 100644 --- a/internal/gcimporter/iimport.go +++ b/internal/gcimporter/iimport.go @@ -558,6 +558,14 @@ type importReader struct { prevColumn int64 } +// markBlack is redefined in iimport_go123.go, to work around golang/go#69912. +// +// If TypeNames are not marked black (in the sense of go/types cycle +// detection), they may be mutated when dot-imported. Fix this by punching a +// hole through the type, when compiling with Go 1.23. (The bug has been fixed +// for 1.24, but the fix was not worth back-porting). +var markBlack = func(name *types.TypeName) {} + func (r *importReader) obj(name string) { tag := r.byte() pos := r.pos() @@ -570,6 +578,7 @@ func (r *importReader) obj(name string) { } typ := r.typ() obj := aliases.NewAlias(r.p.aliases, pos, r.currPkg, name, typ, tparams) + markBlack(obj) // workaround for golang/go#69912 r.declare(obj) case constTag: @@ -590,6 +599,9 @@ func (r *importReader) obj(name string) { // declaration before recursing. obj := types.NewTypeName(pos, r.currPkg, name, nil) named := types.NewNamed(obj, nil, nil) + + markBlack(obj) // workaround for golang/go#69912 + // Declare obj before calling r.tparamList, so the new type name is recognized // if used in the constraint of one of its own typeparams (see #48280). r.declare(obj) diff --git a/internal/gcimporter/iimport_go122.go b/internal/gcimporter/iimport_go122.go new file mode 100644 index 00000000000..7586bfaca60 --- /dev/null +++ b/internal/gcimporter/iimport_go122.go @@ -0,0 +1,53 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.22 && !go1.24 + +package gcimporter + +import ( + "go/token" + "go/types" + "unsafe" +) + +// TODO(rfindley): delete this workaround once go1.24 is assured. + +func init() { + // Update markBlack so that it correctly sets the color + // of imported TypeNames. + // + // See the doc comment for markBlack for details. + + type color uint32 + const ( + white color = iota + black + grey + ) + type object struct { + _ *types.Scope + _ token.Pos + _ *types.Package + _ string + _ types.Type + _ uint32 + color_ color + _ token.Pos + } + type typeName struct { + object + } + + // If the size of types.TypeName changes, this will fail to compile. + const delta = int64(unsafe.Sizeof(typeName{})) - int64(unsafe.Sizeof(types.TypeName{})) + var _ [-delta * delta]int + + markBlack = func(obj *types.TypeName) { + type uP = unsafe.Pointer + var ptr *typeName + *(*uP)(uP(&ptr)) = uP(obj) + ptr.color_ = black + } +} diff --git a/internal/imports/fix.go b/internal/imports/fix.go index c15108178ab..5ae576977a2 100644 --- a/internal/imports/fix.go +++ b/internal/imports/fix.go @@ -27,7 +27,6 @@ import ( "unicode" "unicode/utf8" - "golang.org/x/sync/errgroup" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" @@ -91,18 +90,6 @@ type ImportFix struct { Relevance float64 // see pkg } -// An ImportInfo represents a single import statement. -type ImportInfo struct { - ImportPath string // import path, e.g. "crypto/rand". - Name string // import name, e.g. "crand", or "" if none. -} - -// A packageInfo represents what's known about a package. -type packageInfo struct { - name string // real package name, if known. - exports map[string]bool // known exports. -} - // parseOtherFiles parses all the Go files in srcDir except filename, including // test files if filename looks like a test. // @@ -162,8 +149,8 @@ func addGlobals(f *ast.File, globals map[string]bool) { // collectReferences builds a map of selector expressions, from // left hand side (X) to a set of right hand sides (Sel). -func collectReferences(f *ast.File) references { - refs := references{} +func collectReferences(f *ast.File) References { + refs := References{} var visitor visitFn visitor = func(node ast.Node) ast.Visitor { @@ -233,7 +220,7 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo { allFound := true for right := range syms { - if !pkgInfo.exports[right] { + if !pkgInfo.Exports[right] { allFound = false break } @@ -246,11 +233,6 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo { return nil } -// references is set of references found in a Go file. The first map key is the -// left hand side of a selector expression, the second key is the right hand -// side, and the value should always be true. -type references map[string]map[string]bool - // A pass contains all the inputs and state necessary to fix a file's imports. // It can be modified in some ways during use; see comments below. type pass struct { @@ -258,27 +240,29 @@ type pass struct { fset *token.FileSet // fset used to parse f and its siblings. f *ast.File // the file being fixed. srcDir string // the directory containing f. - env *ProcessEnv // the environment to use for go commands, etc. - loadRealPackageNames bool // if true, load package names from disk rather than guessing them. - otherFiles []*ast.File // sibling files. + logf func(string, ...any) + source Source // the environment to use for go commands, etc. + loadRealPackageNames bool // if true, load package names from disk rather than guessing them. + otherFiles []*ast.File // sibling files. + goroot string // Intermediate state, generated by load. existingImports map[string][]*ImportInfo - allRefs references - missingRefs references + allRefs References + missingRefs References // Inputs to fix. These can be augmented between successive fix calls. lastTry bool // indicates that this is the last call and fix should clean up as best it can. candidates []*ImportInfo // candidate imports in priority order. - knownPackages map[string]*packageInfo // information about all known packages. + knownPackages map[string]*PackageInfo // information about all known packages. } // loadPackageNames saves the package names for everything referenced by imports. -func (p *pass) loadPackageNames(imports []*ImportInfo) error { - if p.env.Logf != nil { - p.env.Logf("loading package names for %v packages", len(imports)) +func (p *pass) loadPackageNames(ctx context.Context, imports []*ImportInfo) error { + if p.logf != nil { + p.logf("loading package names for %v packages", len(imports)) defer func() { - p.env.Logf("done loading package names for %v packages", len(imports)) + p.logf("done loading package names for %v packages", len(imports)) }() } var unknown []string @@ -289,20 +273,17 @@ func (p *pass) loadPackageNames(imports []*ImportInfo) error { unknown = append(unknown, imp.ImportPath) } - resolver, err := p.env.GetResolver() - if err != nil { - return err - } - - names, err := resolver.loadPackageNames(unknown, p.srcDir) + names, err := p.source.LoadPackageNames(ctx, p.srcDir, unknown) if err != nil { return err } + // TODO(rfindley): revisit this. Why do we need to store known packages with + // no exports? The inconsistent data is confusing. for path, name := range names { - p.knownPackages[path] = &packageInfo{ - name: name, - exports: map[string]bool{}, + p.knownPackages[path] = &PackageInfo{ + Name: name, + Exports: map[string]bool{}, } } return nil @@ -330,8 +311,8 @@ func (p *pass) importIdentifier(imp *ImportInfo) string { return imp.Name } known := p.knownPackages[imp.ImportPath] - if known != nil && known.name != "" { - return withoutVersion(known.name) + if known != nil && known.Name != "" { + return withoutVersion(known.Name) } return ImportPathToAssumedName(imp.ImportPath) } @@ -339,9 +320,9 @@ func (p *pass) importIdentifier(imp *ImportInfo) string { // load reads in everything necessary to run a pass, and reports whether the // file already has all the imports it needs. It fills in p.missingRefs with the // file's missing symbols, if any, or removes unused imports if not. -func (p *pass) load() ([]*ImportFix, bool) { - p.knownPackages = map[string]*packageInfo{} - p.missingRefs = references{} +func (p *pass) load(ctx context.Context) ([]*ImportFix, bool) { + p.knownPackages = map[string]*PackageInfo{} + p.missingRefs = References{} p.existingImports = map[string][]*ImportInfo{} // Load basic information about the file in question. @@ -364,9 +345,11 @@ func (p *pass) load() ([]*ImportFix, bool) { // f's imports by the identifier they introduce. imports := collectImports(p.f) if p.loadRealPackageNames { - err := p.loadPackageNames(append(imports, p.candidates...)) + err := p.loadPackageNames(ctx, append(imports, p.candidates...)) if err != nil { - p.env.logf("loading package names: %v", err) + if p.logf != nil { + p.logf("loading package names: %v", err) + } return nil, false } } @@ -535,9 +518,10 @@ func (p *pass) assumeSiblingImportsValid() { // We have the stdlib in memory; no need to guess. rights = symbolNameSet(m) } - p.addCandidate(imp, &packageInfo{ + // TODO(rfindley): we should set package name here, for consistency. + p.addCandidate(imp, &PackageInfo{ // no name; we already know it. - exports: rights, + Exports: rights, }) } } @@ -546,14 +530,14 @@ func (p *pass) assumeSiblingImportsValid() { // addCandidate adds a candidate import to p, and merges in the information // in pkg. -func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) { +func (p *pass) addCandidate(imp *ImportInfo, pkg *PackageInfo) { p.candidates = append(p.candidates, imp) if existing, ok := p.knownPackages[imp.ImportPath]; ok { - if existing.name == "" { - existing.name = pkg.name + if existing.Name == "" { + existing.Name = pkg.Name } - for export := range pkg.exports { - existing.exports[export] = true + for export := range pkg.Exports { + existing.Exports[export] = true } } else { p.knownPackages[imp.ImportPath] = pkg @@ -581,19 +565,42 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P // getFixes gets the import fixes that need to be made to f in order to fix the imports. // It does not modify the ast. func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) { + source, err := NewProcessEnvSource(env, filename, f.Name.Name) + if err != nil { + return nil, err + } + goEnv, err := env.goEnv() + if err != nil { + return nil, err + } + return getFixesWithSource(ctx, fset, f, filename, goEnv["GOROOT"], env.logf, source) +} + +func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, goroot string, logf func(string, ...any), source Source) ([]*ImportFix, error) { + // This logic is defensively duplicated from getFixes. abs, err := filepath.Abs(filename) if err != nil { return nil, err } srcDir := filepath.Dir(abs) - env.logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) + + if logf != nil { + logf("fixImports(filename=%q), srcDir=%q ...", filename, abs, srcDir) + } // First pass: looking only at f, and using the naive algorithm to // derive package names from import paths, see if the file is already // complete. We can't add any imports yet, because we don't know // if missing references are actually package vars. - p := &pass{fset: fset, f: f, srcDir: srcDir, env: env} - if fixes, done := p.load(); done { + p := &pass{ + fset: fset, + f: f, + srcDir: srcDir, + logf: logf, + goroot: goroot, + source: source, + } + if fixes, done := p.load(ctx); done { return fixes, nil } @@ -605,7 +612,7 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st // Second pass: add information from other files in the same package, // like their package vars and imports. p.otherFiles = otherFiles - if fixes, done := p.load(); done { + if fixes, done := p.load(ctx); done { return fixes, nil } @@ -618,10 +625,17 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st // Third pass: get real package names where we had previously used // the naive algorithm. - p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} + p = &pass{ + fset: fset, + f: f, + srcDir: srcDir, + logf: logf, + goroot: goroot, + source: p.source, // safe to reuse, as it's just a wrapper around env + } p.loadRealPackageNames = true p.otherFiles = otherFiles - if fixes, done := p.load(); done { + if fixes, done := p.load(ctx); done { return fixes, nil } @@ -835,7 +849,7 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP return true }, dirFound: func(pkg *pkg) bool { - return pkgIsCandidate(filename, references{searchPkg: nil}, pkg) + return pkgIsCandidate(filename, References{searchPkg: nil}, pkg) }, packageNameLoaded: func(pkg *pkg) bool { return pkg.packageName == searchPkg @@ -1086,11 +1100,7 @@ func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) return e.GocmdRunner.Run(ctx, inv) } -func addStdlibCandidates(pass *pass, refs references) error { - goenv, err := pass.env.goEnv() - if err != nil { - return err - } +func addStdlibCandidates(pass *pass, refs References) error { localbase := func(nm string) string { ans := path.Base(nm) if ans[0] == 'v' { @@ -1105,13 +1115,13 @@ func addStdlibCandidates(pass *pass, refs references) error { } add := func(pkg string) { // Prevent self-imports. - if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir { + if path.Base(pkg) == pass.f.Name.Name && filepath.Join(pass.goroot, "src", pkg) == pass.srcDir { return } exports := symbolNameSet(stdlib.PackageSymbols[pkg]) pass.addCandidate( &ImportInfo{ImportPath: pkg}, - &packageInfo{name: localbase(pkg), exports: exports}) + &PackageInfo{Name: localbase(pkg), Exports: exports}) } for left := range refs { if left == "rand" { @@ -1175,91 +1185,14 @@ type scanCallback struct { exportsLoaded func(pkg *pkg, exports []stdlib.Symbol) } -func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error { +func addExternalCandidates(ctx context.Context, pass *pass, refs References, filename string) error { ctx, done := event.Start(ctx, "imports.addExternalCandidates") defer done() - var mu sync.Mutex - found := make(map[string][]pkgDistance) - callback := &scanCallback{ - rootFound: func(gopathwalk.Root) bool { - return true // We want everything. - }, - dirFound: func(pkg *pkg) bool { - return pkgIsCandidate(filename, refs, pkg) - }, - packageNameLoaded: func(pkg *pkg) bool { - if _, want := refs[pkg.packageName]; !want { - return false - } - if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName { - // The candidate is in the same directory and has the - // same package name. Don't try to import ourselves. - return false - } - if !canUse(filename, pkg.dir) { - return false - } - mu.Lock() - defer mu.Unlock() - found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)}) - return false // We'll do our own loading after we sort. - }, - } - resolver, err := pass.env.GetResolver() + results, err := pass.source.ResolveReferences(ctx, filename, refs) if err != nil { return err } - if err = resolver.scan(ctx, callback); err != nil { - return err - } - - // Search for imports matching potential package references. - type result struct { - imp *ImportInfo - pkg *packageInfo - } - results := make([]*result, len(refs)) - - g, ctx := errgroup.WithContext(ctx) - - searcher := symbolSearcher{ - logf: pass.env.logf, - srcDir: pass.srcDir, - xtest: strings.HasSuffix(pass.f.Name.Name, "_test"), - loadExports: resolver.loadExports, - } - - i := 0 - for pkgName, symbols := range refs { - index := i // claim an index in results - i++ - pkgName := pkgName - symbols := symbols - - g.Go(func() error { - found, err := searcher.search(ctx, found[pkgName], pkgName, symbols) - if err != nil { - return err - } - if found == nil { - return nil // No matching package. - } - - imp := &ImportInfo{ - ImportPath: found.importPathShort, - } - pkg := &packageInfo{ - name: pkgName, - exports: symbols, - } - results[index] = &result{imp, pkg} - return nil - }) - } - if err := g.Wait(); err != nil { - return err - } for _, result := range results { if result == nil { @@ -1267,7 +1200,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil } // Don't offer completions that would shadow predeclared // names, such as github.com/coreos/etcd/error. - if types.Universe.Lookup(result.pkg.name) != nil { // predeclared + if types.Universe.Lookup(result.Package.Name) != nil { // predeclared // Ideally we would skip this candidate only // if the predeclared name is actually // referenced by the file, but that's a lot @@ -1276,7 +1209,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil // user before long. continue } - pass.addCandidate(result.imp, result.pkg) + pass.addCandidate(result.Import, result.Package) } return nil } @@ -1801,7 +1734,7 @@ func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols m // filename is the file being formatted. // pkgIdent is the package being searched for, like "client" (if // searching for "client.New") -func pkgIsCandidate(filename string, refs references, pkg *pkg) bool { +func pkgIsCandidate(filename string, refs References, pkg *pkg) bool { // Check "internal" and "vendor" visibility: if !canUse(filename, pkg.dir) { return false diff --git a/internal/imports/fix_test.go b/internal/imports/fix_test.go index 0571c6aa5eb..5409db0217f 100644 --- a/internal/imports/fix_test.go +++ b/internal/imports/fix_test.go @@ -20,8 +20,8 @@ import ( "sync/atomic" "testing" - "golang.org/x/tools/go/packages/packagestest" "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/internal/stdlib" ) @@ -2513,7 +2513,7 @@ func TestPkgIsCandidate(t *testing.T) { } for i, tt := range tests { t.Run(tt.name, func(t *testing.T) { - refs := references{tt.pkgIdent: nil} + refs := References{tt.pkgIdent: nil} got := pkgIsCandidate(tt.filename, refs, tt.pkg) if got != tt.want { t.Errorf("test %d. pkgIsCandidate(%q, %q, %+v) = %v; want %v", diff --git a/internal/imports/imports.go b/internal/imports/imports.go index ff6b59a58a0..2215a12880a 100644 --- a/internal/imports/imports.go +++ b/internal/imports/imports.go @@ -47,7 +47,14 @@ type Options struct { // Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env. func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) { fileSet := token.NewFileSet() - file, adjust, err := parse(fileSet, filename, src, opt) + var parserMode parser.Mode + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + file, adjust, err := parse(fileSet, filename, src, parserMode, opt.Fragment) if err != nil { return nil, err } @@ -66,17 +73,19 @@ func Process(filename string, src []byte, opt *Options) (formatted []byte, err e // // Note that filename's directory influences which imports can be chosen, // so it is important that filename be accurate. -func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) { +func FixImports(ctx context.Context, filename string, src []byte, goroot string, logf func(string, ...any), source Source) (fixes []*ImportFix, err error) { ctx, done := event.Start(ctx, "imports.FixImports") defer done() fileSet := token.NewFileSet() - file, _, err := parse(fileSet, filename, src, opt) + // TODO(rfindley): these default values for ParseComments and AllErrors were + // extracted from gopls, but are they even needed? + file, _, err := parse(fileSet, filename, src, parser.ParseComments|parser.AllErrors, true) if err != nil { return nil, err } - return getFixes(ctx, fileSet, file, filename, opt.Env) + return getFixesWithSource(ctx, fileSet, file, filename, goroot, logf, source) } // ApplyFixes applies all of the fixes to the file and formats it. extraMode @@ -114,7 +123,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e // formatted file, and returns the postpocessed result. func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) { mergeImports(file) - sortImports(opt.LocalPrefix, fset.File(file.Pos()), file) + sortImports(opt.LocalPrefix, fset.File(file.FileStart), file) var spacesBefore []string // import paths we need spaces before for _, impSection := range astutil.Imports(fset, file) { // Within each block of contiguous imports, see if any @@ -164,13 +173,9 @@ func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(ori // parse parses src, which was read from filename, // as a Go source file or statement list. -func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) { - var parserMode parser.Mode // legacy ast.Object resolution is required here - if opt.Comments { - parserMode |= parser.ParseComments - } - if opt.AllErrors { - parserMode |= parser.AllErrors +func parse(fset *token.FileSet, filename string, src []byte, parserMode parser.Mode, fragment bool) (*ast.File, func(orig, src []byte) []byte, error) { + if parserMode&parser.SkipObjectResolution != 0 { + panic("legacy ast.Object resolution is required") } // Try as whole source file. @@ -181,7 +186,7 @@ func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast // If the error is that the source file didn't begin with a // package line and we accept fragmented input, fall through to // try as a source fragment. Stop and return on any other error. - if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") { + if !fragment || !strings.Contains(err.Error(), "expected 'package'") { return nil, nil, err } diff --git a/internal/imports/source.go b/internal/imports/source.go new file mode 100644 index 00000000000..5d2aeeebc95 --- /dev/null +++ b/internal/imports/source.go @@ -0,0 +1,63 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import "context" + +// These types document the APIs below. +// +// TODO(rfindley): consider making these defined types rather than aliases. +type ( + ImportPath = string + PackageName = string + Symbol = string + + // References is set of References found in a Go file. The first map key is the + // left hand side of a selector expression, the second key is the right hand + // side, and the value should always be true. + References = map[PackageName]map[Symbol]bool +) + +// A Result satisfies a missing import. +// +// The Import field describes the missing import spec, and the Package field +// summarizes the package exports. +type Result struct { + Import *ImportInfo + Package *PackageInfo +} + +// An ImportInfo represents a single import statement. +type ImportInfo struct { + ImportPath string // import path, e.g. "crypto/rand". + Name string // import name, e.g. "crand", or "" if none. +} + +// A PackageInfo represents what's known about a package. +type PackageInfo struct { + Name string // package name in the package declaration, if known + Exports map[string]bool // set of names of known package level sortSymbols +} + +// A Source provides imports to satisfy unresolved references in the file being +// fixed. +type Source interface { + // LoadPackageNames queries PackageName information for the requested import + // paths, when operating from the provided srcDir. + // + // TODO(rfindley): try to refactor to remove this operation. + LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error) + + // ResolveReferences asks the Source for the best package name to satisfy + // each of the missing references, in the context of fixing the given + // filename. + // + // Returns a map from package name to a [Result] for that package name that + // provides the required symbols. Keys may be omitted in the map if no + // candidates satisfy all missing references for that package name. It is up + // to each data source to select the best result for each entry in the + // missing map. + ResolveReferences(ctx context.Context, filename string, missing References) (map[PackageName]*Result, error) +} diff --git a/internal/imports/source_env.go b/internal/imports/source_env.go new file mode 100644 index 00000000000..ff9555d2879 --- /dev/null +++ b/internal/imports/source_env.go @@ -0,0 +1,125 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "context" + "path/filepath" + "strings" + "sync" + + "golang.org/x/sync/errgroup" + "golang.org/x/tools/internal/gopathwalk" +) + +// ProcessEnvSource implements the [Source] interface using the legacy +// [ProcessEnv] abstraction. +type ProcessEnvSource struct { + env *ProcessEnv + srcDir string + filename string + pkgName string +} + +// NewProcessEnvSource returns a [ProcessEnvSource] wrapping the given +// env, to be used for fixing imports in the file with name filename in package +// named pkgName. +func NewProcessEnvSource(env *ProcessEnv, filename, pkgName string) (*ProcessEnvSource, error) { + abs, err := filepath.Abs(filename) + if err != nil { + return nil, err + } + srcDir := filepath.Dir(abs) + return &ProcessEnvSource{ + env: env, + srcDir: srcDir, + filename: filename, + pkgName: pkgName, + }, nil +} + +func (s *ProcessEnvSource) LoadPackageNames(ctx context.Context, srcDir string, unknown []string) (map[string]string, error) { + r, err := s.env.GetResolver() + if err != nil { + return nil, err + } + return r.loadPackageNames(unknown, srcDir) +} + +func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename string, refs map[string]map[string]bool) (map[string]*Result, error) { + var mu sync.Mutex + found := make(map[string][]pkgDistance) + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true // We want everything. + }, + dirFound: func(pkg *pkg) bool { + return pkgIsCandidate(filename, refs, pkg) + }, + packageNameLoaded: func(pkg *pkg) bool { + if _, want := refs[pkg.packageName]; !want { + return false + } + if pkg.dir == s.srcDir && s.pkgName == pkg.packageName { + // The candidate is in the same directory and has the + // same package name. Don't try to import ourselves. + return false + } + if !canUse(filename, pkg.dir) { + return false + } + mu.Lock() + defer mu.Unlock() + found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(s.srcDir, pkg.dir)}) + return false // We'll do our own loading after we sort. + }, + } + resolver, err := s.env.GetResolver() + if err != nil { + return nil, err + } + if err := resolver.scan(ctx, callback); err != nil { + return nil, err + } + + g, ctx := errgroup.WithContext(ctx) + + searcher := symbolSearcher{ + logf: s.env.logf, + srcDir: s.srcDir, + xtest: strings.HasSuffix(s.pkgName, "_test"), + loadExports: resolver.loadExports, + } + + var resultMu sync.Mutex + results := make(map[string]*Result, len(refs)) + for pkgName, symbols := range refs { + g.Go(func() error { + found, err := searcher.search(ctx, found[pkgName], pkgName, symbols) + if err != nil { + return err + } + if found == nil { + return nil // No matching package. + } + + imp := &ImportInfo{ + ImportPath: found.importPathShort, + } + pkg := &PackageInfo{ + Name: pkgName, + Exports: symbols, + } + resultMu.Lock() + results[pkgName] = &Result{Import: imp, Package: pkg} + resultMu.Unlock() + return nil + }) + } + if err := g.Wait(); err != nil { + return nil, err + } + return results, nil +} diff --git a/internal/jsonrpc2/handler.go b/internal/jsonrpc2/handler.go index 418bd68045b..27cb108922a 100644 --- a/internal/jsonrpc2/handler.go +++ b/internal/jsonrpc2/handler.go @@ -27,8 +27,8 @@ func MethodNotFound(ctx context.Context, reply Replier, req Request) error { return reply(ctx, nil, fmt.Errorf("%w: %q", ErrMethodNotFound, req.Method())) } -// MustReplyHandler creates a Handler that panics if the wrapped handler does -// not call Reply for every request that it is passed. +// MustReplyHandler is a middleware that creates a Handler that panics if the +// wrapped handler does not call Reply for every request that it is passed. func MustReplyHandler(handler Handler) Handler { return func(ctx context.Context, reply Replier, req Request) error { called := false @@ -78,8 +78,8 @@ func CancelHandler(handler Handler) (Handler, func(id ID)) { } } -// AsyncHandler returns a handler that processes each request goes in its own -// goroutine. +// AsyncHandler is a middleware that returns a handler that processes each +// request goes in its own goroutine. // The handler returns immediately, without the request being processed. // Each request then waits for the previous request to finish before it starts. // This allows the stream to unblock at the cost of unbounded goroutines @@ -90,13 +90,14 @@ func AsyncHandler(handler Handler) Handler { return func(ctx context.Context, reply Replier, req Request) error { waitForPrevious := nextRequest nextRequest = make(chan struct{}) - unlockNext := nextRequest + releaser := &releaser{ch: nextRequest} innerReply := reply reply = func(ctx context.Context, result interface{}, err error) error { - close(unlockNext) + releaser.release(true) return innerReply(ctx, result, err) } _, queueDone := event.Start(ctx, "queued") + ctx = context.WithValue(ctx, asyncKey, releaser) go func() { <-waitForPrevious queueDone() @@ -107,3 +108,46 @@ func AsyncHandler(handler Handler) Handler { return nil } } + +// Async, when used with the [AsyncHandler] middleware, indicates that the +// current jsonrpc2 request may be handled asynchronously to subsequent +// requests. +// +// When not used with an AsyncHandler, Async is a no-op. +// +// Async must be called at most once on each request's context (and its +// descendants). +func Async(ctx context.Context) { + if r, ok := ctx.Value(asyncKey).(*releaser); ok { + r.release(false) + } +} + +type asyncKeyType struct{} + +var asyncKey = asyncKeyType{} + +// A releaser implements concurrency safe 'releasing' of async requests. (A +// request is released when it is allowed to run concurrent with other +// requests, via a call to [Async].) +type releaser struct { + mu sync.Mutex + ch chan struct{} + released bool +} + +// release closes the associated channel. If soft is set, multiple calls to +// release are allowed. +func (r *releaser) release(soft bool) { + r.mu.Lock() + defer r.mu.Unlock() + + if r.released { + if !soft { + panic("jsonrpc2.Async called multiple times") + } + } else { + close(r.ch) + r.released = true + } +} diff --git a/internal/modindex/dir_test.go b/internal/modindex/dir_test.go index 862d111ea42..cbdf194ddb4 100644 --- a/internal/modindex/dir_test.go +++ b/internal/modindex/dir_test.go @@ -8,6 +8,8 @@ import ( "os" "path/filepath" "testing" + + "github.com/google/go-cmp/cmp" ) type id struct { @@ -37,7 +39,7 @@ var idtests = []id{ "cloud.google.com/go@v0.94.0/compute/metadata", }, }, - { //m test bizarre characters in directory name + { // test bizarre characters in directory name importPath: "bad,guy.com/go", best: 0, dirs: []string{"bad,guy.com/go@v0.1.0"}, @@ -51,23 +53,132 @@ func testModCache(t *testing.T) string { return dir } +// add a trivial package to the test module cache +func addPkg(cachedir, dir string) error { + if err := os.MkdirAll(filepath.Join(cachedir, dir), 0755); err != nil { + return err + } + return os.WriteFile(filepath.Join(cachedir, dir, "foo.go"), + []byte("package foo\nfunc Foo() {}"), 0644) +} + +// update, where new stuff is semantically better than old stuff +func TestIncremental(t *testing.T) { + dir := testModCache(t) + // build old index + for _, it := range idtests { + for i, d := range it.dirs { + if it.best == i { + continue // wait for second pass + } + if err := addPkg(dir, d); err != nil { + t.Fatal(err) + } + } + } + if err := Create(dir); err != nil { + t.Fatal(err) + } + // add new stuff to the module cache + for _, it := range idtests { + for i, d := range it.dirs { + if it.best != i { + continue // only add the new stuff + } + if err := addPkg(dir, d); err != nil { + t.Fatal(err) + } + } + } + if ok, err := Update(dir); err != nil { + t.Fatal(err) + } else if !ok { + t.Error("failed to write updated index") + } + index2, err := ReadIndex(dir) + if err != nil { + t.Fatal(err) + } + // build a fresh index + if err := Create(dir); err != nil { + t.Fatal(err) + } + index1, err := ReadIndex(dir) + if err != nil { + t.Fatal(err) + } + // they should be the same except maybe for the time + index1.Changed = index2.Changed + if diff := cmp.Diff(index1, index2); diff != "" { + t.Errorf("mismatching indexes (-updated +cleared):\n%s", diff) + } +} + +// update, where new stuff is semantically worse than some old stuff +func TestIncrementalNope(t *testing.T) { + dir := testModCache(t) + // build old index + for _, it := range idtests { + for i, d := range it.dirs { + if i == 0 { + continue // wait for second pass + } + if err := addPkg(dir, d); err != nil { + t.Fatal(err) + } + } + } + if err := Create(dir); err != nil { + t.Fatal(err) + } + // add new stuff to the module cache + for _, it := range idtests { + for i, d := range it.dirs { + if i > 0 { + break // only add the new one + } + if err := addPkg(dir, d); err != nil { + t.Fatal(err) + } + } + } + if ok, err := Update(dir); err != nil { + t.Fatal(err) + } else if !ok { + t.Error("failed to write updated index") + } + index2, err := ReadIndex(dir) + if err != nil { + t.Fatal(err) + } + // build a fresh index + if err := Create(dir); err != nil { + t.Fatal(err) + } + index1, err := ReadIndex(dir) + if err != nil { + t.Fatal(err) + } + // they should be the same except maybe for the time + index1.Changed = index2.Changed + if diff := cmp.Diff(index1, index2); diff != "" { + t.Errorf("mismatching indexes (-updated +cleared):\n%s", diff) + } +} + +// choose the semantically-latest version, with a single symbol func TestDirsSinglePath(t *testing.T) { for _, itest := range idtests { t.Run(itest.importPath, func(t *testing.T) { - // create a new fake GOMODCACHE + // create a new test GOMODCACHE dir := testModCache(t) for _, d := range itest.dirs { - if err := os.MkdirAll(filepath.Join(dir, d), 0755); err != nil { - t.Fatal(err) - } - // gopathwalk wants to see .go files - err := os.WriteFile(filepath.Join(dir, d, "main.go"), []byte("package main\nfunc main() {}"), 0600) - if err != nil { + if err := addPkg(dir, d); err != nil { t.Fatal(err) } } // build and check the index - if err := IndexModCache(dir, false); err != nil { + if err := Create(dir); err != nil { t.Fatal(err) } ix, err := ReadIndex(dir) @@ -83,6 +194,13 @@ func TestDirsSinglePath(t *testing.T) { if ix.Entries[0].Dir != Relpath(itest.dirs[itest.best]) { t.Fatalf("got dir %s, wanted %s", ix.Entries[0].Dir, itest.dirs[itest.best]) } + nms := ix.Entries[0].Names + if len(nms) != 1 { + t.Fatalf("got %d names, expected 1", len(nms)) + } + if nms[0] != "Foo F 0" { + t.Fatalf("got %q, expected Foo F 0", nms[0]) + } }) } } diff --git a/internal/modindex/directories.go b/internal/modindex/directories.go index b8aab3b736e..1e1a02f239b 100644 --- a/internal/modindex/directories.go +++ b/internal/modindex/directories.go @@ -23,6 +23,7 @@ type directory struct { path Relpath importPath string version string // semantic version + syms []symbol } // filterDirs groups the directories by import path, @@ -48,7 +49,7 @@ func byImportPath(dirs []Relpath) (map[string][]*directory, error) { return ans, nil } -// sort the directories by semantic version, lates first +// sort the directories by semantic version, latest first func semanticSort(v []*directory) { slices.SortFunc(v, func(l, r *directory) int { if n := semver.Compare(l.version, r.version); n != 0 { @@ -109,10 +110,7 @@ func (r *region) addDir(rt gopathwalk.Root, dir string) { } func (r *region) skipDir(_ gopathwalk.Root, dir string) bool { - // The cache directory is alreday ignored in gopathwalk - if filepath.Base(dir) == "vendor" { - return true - } + // The cache directory is already ignored in gopathwalk\ if filepath.Base(dir) == "internal" { return true } diff --git a/internal/modindex/gomodindex/cmd.go b/internal/modindex/gomodindex/cmd.go new file mode 100644 index 00000000000..06314826422 --- /dev/null +++ b/internal/modindex/gomodindex/cmd.go @@ -0,0 +1,148 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// A command for building and maintaing the module cache +// a.out +// The commands are 'create' which builds a new index, +// 'update', which attempts to update an existing index, +// 'query', which looks up things in the index. +// 'clean', which remove obsolete index files. +// If the command is invoked with no arguments, it defaults to 'create'. +package main + +import ( + "bytes" + "flag" + "fmt" + "log" + "os" + "os/exec" + "path/filepath" + "strings" + "time" + + "golang.org/x/tools/internal/modindex" +) + +var verbose = flag.Int("v", 0, "how much information to print") + +type cmd struct { + name string + f func(string) + doc string +} + +var cmds = []cmd{ + {"create", index, "create a clean index of GOMODCACHE"}, + {"update", update, "if there is an existing index of GOMODCACHE, update it. Otherise create one."}, + {"clean", clean, "removed unreferenced indexes more than an hour old"}, + {"query", query, "not yet implemented"}, +} + +func goEnv(s string) string { + out, err := exec.Command("go", "env", s).Output() + if err != nil { + return "" + } + out = bytes.TrimSpace(out) + return string(out) +} + +func main() { + flag.Parse() + log.SetFlags(log.Lshortfile) + cachedir := goEnv("GOMODCACHE") + if cachedir == "" { + log.Fatal("can't find GOMODCACHE") + } + if flag.NArg() == 0 { + index(cachedir) + return + } + for _, c := range cmds { + if flag.Arg(0) == c.name { + c.f(cachedir) + return + } + } + flag.Usage() +} + +func init() { + var sb strings.Builder + fmt.Fprintf(&sb, "usage:\n") + for _, c := range cmds { + fmt.Fprintf(&sb, "'%s': %s\n", c.name, c.doc) + } + msg := sb.String() + flag.Usage = func() { + fmt.Fprint(os.Stderr, msg) + } +} + +func index(dir string) { + modindex.Create(dir) +} + +func update(dir string) { + modindex.Update(dir) +} + +func query(dir string) { + panic("implement") +} +func clean(_ string) { + des, err := modindex.IndexDir() + if err != nil { + log.Fatal(err) + } + // look at the files starting with 'index' + // the current ones of each version are pointed to by + // index-name-%d files. Any others more than an hour old + // are deleted. + dis, err := os.ReadDir(des) + if err != nil { + log.Fatal(err) + } + cutoff := time.Now().Add(-time.Hour) + var inames []string // older files named index* + curnames := make(map[string]bool) // current versions of index (different CurrentVersion) + for _, de := range dis { + if !strings.HasPrefix(de.Name(), "index") { + continue + } + if strings.HasPrefix(de.Name(), "index-name-") { + buf, err := os.ReadFile(filepath.Join(des, de.Name())) + if err != nil { + log.Print(err) + continue + } + curnames[string(buf)] = true + if *verbose > 1 { + log.Printf("latest index is %s", string(buf)) + } + } + info, err := de.Info() + if err != nil { + log.Print(err) + continue + } + if info.ModTime().Before(cutoff) && !strings.HasPrefix(de.Name(), "index-name-") { + // add to the list of files to be removed. index-name-%d files are never removed + inames = append(inames, de.Name()) + if *verbose > 0 { + log.Printf("%s:%s", de.Name(), cutoff.Sub(info.ModTime())) + } + } + } + for _, nm := range inames { + if curnames[nm] { + continue + } + err := os.Remove(filepath.Join(des, nm)) + if err != nil && *verbose > 0 { + log.Printf("%s not removed (%v)", nm, err) + } + } +} diff --git a/internal/modindex/index.go b/internal/modindex/index.go index eed8e41c21a..c2443db408a 100644 --- a/internal/modindex/index.go +++ b/internal/modindex/index.go @@ -145,7 +145,7 @@ func readIndexFrom(cd Abspath, bx io.Reader) (*Index, error) { } // TODO(pjw): need to check that this is the expected cachedir // so the tag should be passed in to this function - ans.Changed, err = time.Parse(time.DateTime, b.Text()) + ans.Changed, err = time.ParseInLocation(time.DateTime, b.Text(), time.Local) if err != nil { return nil, err } @@ -207,11 +207,13 @@ func writeIndex(cachedir Abspath, ix *Index) error { } func writeIndexToFile(x *Index, fd *os.File) error { + cnt := 0 w := bufio.NewWriter(fd) fmt.Fprintf(w, "%d\n", x.Version) fmt.Fprintf(w, "%s\n", x.Cachedir) - // TODO(pjw): round the time down - fmt.Fprintf(w, "%s\n", x.Changed.Format(time.DateTime)) + // round the time down + tm := x.Changed.Add(-time.Second / 2) + fmt.Fprintf(w, "%s\n", tm.Format(time.DateTime)) for _, e := range x.Entries { if e.ImportPath == "" { continue // shouldn't happen @@ -227,11 +229,13 @@ func writeIndexToFile(x *Index, fd *os.File) error { } for _, x := range e.Names { fmt.Fprintf(w, "%s\n", x) + cnt++ } } if err := w.Flush(); err != nil { return err } + log.Printf("%d Entries %d names", len(x.Entries), cnt) return nil } diff --git a/internal/modindex/lookup.go b/internal/modindex/lookup.go new file mode 100644 index 00000000000..29d4e3d7a39 --- /dev/null +++ b/internal/modindex/lookup.go @@ -0,0 +1,145 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modindex + +import ( + "slices" + "strconv" + "strings" +) + +type Candidate struct { + PkgName string + Name string + Dir string + ImportPath string + Type LexType + // information for Funcs + Results int16 // how many results + Sig []Field // arg names and types +} + +type Field struct { + Arg, Type string +} + +type LexType int8 + +const ( + Const LexType = iota + Var + Type + Func +) + +// Lookup finds all the symbols in the index with the given PkgName and name. +// If prefix is true, it finds all of these with name as a prefix. +func (ix *Index) Lookup(pkg, name string, prefix bool) []Candidate { + loc, ok := slices.BinarySearchFunc(ix.Entries, pkg, func(e Entry, pkg string) int { + return strings.Compare(e.PkgName, pkg) + }) + if !ok { + return nil // didn't find the package + } + var ans []Candidate + // loc is the first entry for this package name, but there may be severeal + for i := loc; i < len(ix.Entries); i++ { + e := ix.Entries[i] + if e.PkgName != pkg { + break // end of sorted package names + } + nloc, ok := slices.BinarySearchFunc(e.Names, name, func(s string, name string) int { + if strings.HasPrefix(s, name) { + return 0 + } + if s < name { + return -1 + } + return 1 + }) + if !ok { + continue // didn't find the name, nor any symbols with name as a prefix + } + for j := nloc; j < len(e.Names); j++ { + nstr := e.Names[j] + // benchmarks show this makes a difference when there are a lot of Possibilities + flds := fastSplit(nstr) + if !(flds[0] == name || prefix && strings.HasPrefix(flds[0], name)) { + // past range of matching Names + break + } + if len(flds) < 2 { + continue // should never happen + } + px := Candidate{ + PkgName: pkg, + Name: flds[0], + Dir: string(e.Dir), + ImportPath: e.ImportPath, + Type: asLexType(flds[1][0]), + } + if flds[1] == "F" { + n, err := strconv.Atoi(flds[2]) + if err != nil { + continue // should never happen + } + px.Results = int16(n) + if len(flds) >= 4 { + sig := strings.Split(flds[3], " ") + for i := 0; i < len(sig); i++ { + // $ cannot otherwise occur. removing the spaces + // almost works, but for chan struct{}, e.g. + sig[i] = strings.Replace(sig[i], "$", " ", -1) + } + px.Sig = toFields(sig) + } + } + ans = append(ans, px) + } + } + return ans +} + +func toFields(sig []string) []Field { + ans := make([]Field, len(sig)/2) + for i := 0; i < len(ans); i++ { + ans[i] = Field{Arg: sig[2*i], Type: sig[2*i+1]} + } + return ans +} + +// benchmarks show this is measurably better than strings.Split +func fastSplit(x string) []string { + ans := make([]string, 0, 4) + nxt := 0 + start := 0 + for i := 0; i < len(x); i++ { + if x[i] != ' ' { + continue + } + ans = append(ans, x[start:i]) + nxt++ + start = i + 1 + if nxt >= 3 { + break + } + } + ans = append(ans, x[start:]) + return ans +} + +func asLexType(c byte) LexType { + switch c { + case 'C': + return Const + case 'V': + return Var + case 'T': + return Type + case 'F': + return Func + } + return -1 +} diff --git a/internal/modindex/lookup_test.go b/internal/modindex/lookup_test.go new file mode 100644 index 00000000000..6a663554d73 --- /dev/null +++ b/internal/modindex/lookup_test.go @@ -0,0 +1,130 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modindex + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strings" + "testing" +) + +type tdata struct { + fname string + pkg string + items []titem +} + +type titem struct { + code string + result result +} + +var thedata = tdata{ + fname: "cloud.google.com/go/longrunning@v0.4.1/foo.go", + pkg: "foo", + items: []titem{ + // these need to be in alphabetical order by symbol + {"func Foo() {}", result{"Foo", Func, 0, nil}}, + {"const FooC = 23", result{"FooC", Const, 0, nil}}, + {"func FooF(int, float) error {return nil}", result{"FooF", Func, 1, + []Field{{"_", "int"}, {"_", "float"}}}}, + {"type FooT struct{}", result{"FooT", Type, 0, nil}}, + {"var FooV int", result{"FooV", Var, 0, nil}}, + {"func Ⱋoox(x int) {}", result{"Ⱋoox", Func, 0, []Field{{"x", "int"}}}}, + }, +} + +type result struct { + name string + typ LexType + result int + sig []Field +} + +func okresult(r result, p Candidate) bool { + if r.name != p.Name || r.typ != p.Type || r.result != int(p.Results) { + return false + } + if len(r.sig) != len(p.Sig) { + return false + } + for i := 0; i < len(r.sig); i++ { + if r.sig[i] != p.Sig[i] { + return false + } + } + return true +} + +func TestLookup(t *testing.T) { + log.SetFlags(log.Lshortfile) + dir := testModCache(t) + wrtData(t, dir, thedata) + if _, err := indexModCache(dir, true); err != nil { + t.Fatal(err) + } + ix, err := ReadIndex(dir) + if err != nil { + t.Fatal(err) + } + if len(ix.Entries) != 1 { + t.Fatalf("got %d Entries, expected 1", len(ix.Entries)) + } + // get all the symbols + p := ix.Lookup("foo", "", true) + if len(p) != len(thedata.items) { + // we should have gotten them all + t.Errorf("got %d possibilities for pkg foo, expected %d", len(p), len(thedata.items)) + } + for i, r := range thedata.items { + if !okresult(r.result, p[i]) { + t.Errorf("got %#v, expected %#v", p[i], r.result) + } + } + // look for the Foo... and check that each is a Foo... + p = ix.Lookup("foo", "Foo", true) + if len(p) != 5 { + t.Errorf("got %d possibilities for foo.Foo*, expected 5", len(p)) + } + for _, r := range p { + if !strings.HasPrefix(r.Name, "Foo") { + t.Errorf("got %s, expected Foo...", r.Name) + } + } + // fail to find something + p = ix.Lookup("foo", "FooVal", false) + if len(p) != 0 { + t.Errorf("got %d possibilities for foo.FooVal, expected 0", len(p)) + } + // find an exact match + p = ix.Lookup("foo", "Foo", false) + if len(p) != 1 { + t.Errorf("got %d possibilities for foo.Foo, expected 1", len(p)) + } + // "Foo" is the first test datum + if !okresult(thedata.items[0].result, p[0]) { + t.Errorf("got %#v, expected %#v", p[0], thedata.items[0].result) + } +} + +func wrtData(t *testing.T, dir string, data tdata) { + t.Helper() + locname := filepath.FromSlash(data.fname) + if err := os.MkdirAll(filepath.Join(dir, filepath.Dir(locname)), 0755); err != nil { + t.Fatal(err) + } + fd, err := os.Create(filepath.Join(dir, locname)) + if err != nil { + t.Fatal(err) + } + defer fd.Close() + fd.WriteString(fmt.Sprintf("package %s\n", data.pkg)) + for _, item := range data.items { + fd.WriteString(item.code + "\n") + } +} diff --git a/internal/modindex/modindex.go b/internal/modindex/modindex.go index b6bfec43f98..6d0b5f09d94 100644 --- a/internal/modindex/modindex.go +++ b/internal/modindex/modindex.go @@ -13,7 +13,6 @@ package modindex import ( - "log" "path/filepath" "slices" "strings" @@ -22,36 +21,51 @@ import ( "golang.org/x/mod/semver" ) -// Modindex writes an index current as of when it is called. +// Create always creates a new index for the go module cache that is in cachedir. +func Create(cachedir string) error { + _, err := indexModCache(cachedir, true) + return err +} + +// Update the index for the go module cache that is in cachedir, +// If there is no existing index it will build one. +// If there are changed directories since the last index, it will +// write a new one and return true. Otherwise it returns false. +func Update(cachedir string) (bool, error) { + return indexModCache(cachedir, false) +} + +// indexModCache writes an index current as of when it is called. // If clear is true the index is constructed from all of GOMODCACHE // otherwise the index is constructed from the last previous index -// and the updates to the cache. -func IndexModCache(cachedir string, clear bool) error { +// and the updates to the cache. It returns true if it wrote an index, +// false otherwise. +func indexModCache(cachedir string, clear bool) (bool, error) { cachedir, err := filepath.Abs(cachedir) if err != nil { - return err + return false, err } cd := Abspath(cachedir) future := time.Now().Add(24 * time.Hour) // safely in the future - err = modindexTimed(future, cd, clear) + ok, err := modindexTimed(future, cd, clear) if err != nil { - return err + return false, err } - return nil + return ok, nil } // modindexTimed writes an index current as of onlyBefore. // If clear is true the index is constructed from all of GOMODCACHE // otherwise the index is constructed from the last previous index // and all the updates to the cache before onlyBefore. -// (this is useful for testing; perhaps it should not be exported) -func modindexTimed(onlyBefore time.Time, cachedir Abspath, clear bool) error { +// It returns true if it wrote a new index, false if it wrote nothing. +func modindexTimed(onlyBefore time.Time, cachedir Abspath, clear bool) (bool, error) { var curIndex *Index if !clear { var err error curIndex, err = ReadIndex(string(cachedir)) if clear && err != nil { - return err + return false, err } // TODO(pjw): check that most of those directorie still exist } @@ -64,12 +78,16 @@ func modindexTimed(onlyBefore time.Time, cachedir Abspath, clear bool) error { cfg.onlyAfter = curIndex.Changed } if err := cfg.buildIndex(); err != nil { - return err + return false, err + } + if len(cfg.newIndex.Entries) == 0 { + // no changes, don't write a new index + return false, nil } if err := cfg.writeIndex(); err != nil { - return err + return false, err } - return nil + return true, nil } type work struct { @@ -87,52 +105,50 @@ func (w *work) buildIndex() error { // so set it now. w.newIndex = &Index{Changed: time.Now(), Cachedir: w.cacheDir} dirs := findDirs(string(w.cacheDir), w.onlyAfter, w.onlyBefore) + if len(dirs) == 0 { + return nil + } newdirs, err := byImportPath(dirs) if err != nil { return err } - log.Printf("%d dirs, %d ips", len(dirs), len(newdirs)) // for each import path it might occur only in newdirs, // only in w.oldIndex, or in both. // If it occurs in both, use the semantically later one if w.oldIndex != nil { - killed := 0 for _, e := range w.oldIndex.Entries { found, ok := newdirs[e.ImportPath] if !ok { - continue + w.newIndex.Entries = append(w.newIndex.Entries, e) + continue // use this one, there is no new one } if semver.Compare(found[0].version, e.Version) > 0 { - // the new one is better, disable the old one - e.ImportPath = "" - killed++ + // use the new one } else { // use the old one, forget the new one - delete(newdirs, e.ImportPath) - } - } - log.Printf("%d killed, %d ips", killed, len(newdirs)) - } - // Build the skeleton of the new index using newdirs, - // and include the surviving parts of the old index - if w.oldIndex != nil { - for _, e := range w.oldIndex.Entries { - if e.ImportPath != "" { w.newIndex.Entries = append(w.newIndex.Entries, e) + delete(newdirs, e.ImportPath) } } } + // get symbol information for all the new diredtories + getSymbols(w.cacheDir, newdirs) + // assemble the new index entries for k, v := range newdirs { d := v[0] + pkg, names := processSyms(d.syms) + if pkg == "" { + continue // PJW: does this ever happen? + } entry := Entry{ + PkgName: pkg, Dir: d.path, ImportPath: k, Version: d.version, + Names: names, } w.newIndex.Entries = append(w.newIndex.Entries, entry) } - // find symbols for the incomplete entries - log.Print("not finding any symbols yet") // sort the entries in the new index slices.SortFunc(w.newIndex.Entries, func(l, r Entry) int { if n := strings.Compare(l.PkgName, r.PkgName); n != 0 { diff --git a/internal/modindex/symbols.go b/internal/modindex/symbols.go new file mode 100644 index 00000000000..2e285ed996a --- /dev/null +++ b/internal/modindex/symbols.go @@ -0,0 +1,189 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modindex + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "os" + "path/filepath" + "slices" + "strings" + + "golang.org/x/sync/errgroup" +) + +// The name of a symbol contains information about the symbol: +// T for types +// C for consts +// V for vars +// and for funcs: F ( )* +// any spaces in are replaced by $s so that the fields +// of the name are space separated +type symbol struct { + pkg string // name of the symbols's package + name string // declared name + kind string // T, C, V, or F + sig string // signature information, for F +} + +// find the symbols for the best directories +func getSymbols(cd Abspath, dirs map[string][]*directory) { + var g errgroup.Group + g.SetLimit(-1) // maybe throttle this some day + for _, vv := range dirs { + // throttling some day? + d := vv[0] + g.Go(func() error { + thedir := filepath.Join(string(cd), string(d.path)) + mode := parser.SkipObjectResolution + + fi, err := os.ReadDir(thedir) + if err != nil { + return nil // log this someday? + } + for _, fx := range fi { + if !strings.HasSuffix(fx.Name(), ".go") || strings.HasSuffix(fx.Name(), "_test.go") { + continue + } + fname := filepath.Join(thedir, fx.Name()) + tr, err := parser.ParseFile(token.NewFileSet(), fname, nil, mode) + if err != nil { + continue // ignore errors, someday log them? + } + d.syms = append(d.syms, getFileExports(tr)...) + } + return nil + }) + } + g.Wait() +} + +func getFileExports(f *ast.File) []symbol { + pkg := f.Name.Name + if pkg == "main" { + return nil + } + var ans []symbol + // should we look for //go:build ignore? + for _, decl := range f.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + if decl.Recv != nil { + // ignore methods, as we are completing package selections + continue + } + name := decl.Name.Name + dtype := decl.Type + // not looking at dtype.TypeParams. That is, treating + // generic functions just like non-generic ones. + sig := dtype.Params + kind := "F" + result := []string{fmt.Sprintf("%d", dtype.Results.NumFields())} + for _, x := range sig.List { + // This code creates a string representing the type. + // TODO(pjw): it may be fragile: + // 1. x.Type could be nil, perhaps in ill-formed code + // 2. ExprString might someday change incompatibly to + // include struct tags, which can be arbitrary strings + if x.Type == nil { + // Can this happen without a parse error? (Files with parse + // errors are ignored in getSymbols) + continue // maybe report this someday + } + tp := types.ExprString(x.Type) + if len(tp) == 0 { + // Can this happen? + continue // maybe report this someday + } + // This is only safe if ExprString never returns anything with a $ + // The only place a $ can occur seems to be in a struct tag, which + // can be an arbitrary string literal, and ExprString does not presently + // print struct tags. So for this to happen the type of a formal parameter + // has to be a explict struct, e.g. foo(x struct{a int "$"}) and ExprString + // would have to show the struct tag. Even testing for this case seems + // a waste of effort, but let's not ignore such pathologies + if strings.Contains(tp, "$") { + continue + } + tp = strings.Replace(tp, " ", "$", -1) + if len(x.Names) == 0 { + result = append(result, "_") + result = append(result, tp) + } else { + for _, y := range x.Names { + result = append(result, y.Name) + result = append(result, tp) + } + } + } + sigs := strings.Join(result, " ") + if s := newsym(pkg, name, kind, sigs); s != nil { + ans = append(ans, *s) + } + case *ast.GenDecl: + switch decl.Tok { + case token.CONST, token.VAR: + tp := "V" + if decl.Tok == token.CONST { + tp = "C" + } + for _, sp := range decl.Specs { + for _, x := range sp.(*ast.ValueSpec).Names { + if s := newsym(pkg, x.Name, tp, ""); s != nil { + ans = append(ans, *s) + } + } + } + case token.TYPE: + for _, sp := range decl.Specs { + if s := newsym(pkg, sp.(*ast.TypeSpec).Name.Name, "T", ""); s != nil { + ans = append(ans, *s) + } + } + } + } + } + return ans +} + +func newsym(pkg, name, kind, sig string) *symbol { + if len(name) == 0 || !ast.IsExported(name) { + return nil + } + sym := symbol{pkg: pkg, name: name, kind: kind, sig: sig} + return &sym +} + +// return the package name and the value for the symbols. +// if there are multiple packages, choose one arbitrarily +// the returned slice is sorted lexicographically +func processSyms(syms []symbol) (string, []string) { + if len(syms) == 0 { + return "", nil + } + slices.SortFunc(syms, func(l, r symbol) int { + return strings.Compare(l.name, r.name) + }) + pkg := syms[0].pkg + var names []string + for _, s := range syms { + var nx string + if s.pkg == pkg { + if s.sig != "" { + nx = fmt.Sprintf("%s %s %s", s.name, s.kind, s.sig) + } else { + nx = fmt.Sprintf("%s %s", s.name, s.kind) + } + names = append(names, nx) + } else { + continue // PJW: do we want to keep track of these? + } + } + return pkg, names +} diff --git a/internal/packagestest/expect.go b/internal/packagestest/expect.go new file mode 100644 index 00000000000..053d8e8a9db --- /dev/null +++ b/internal/packagestest/expect.go @@ -0,0 +1,468 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest + +import ( + "fmt" + "go/token" + "os" + "path/filepath" + "reflect" + "regexp" + "strings" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/expect" +) + +const ( + markMethod = "mark" + eofIdentifier = "EOF" +) + +// Expect invokes the supplied methods for all expectation notes found in +// the exported source files. +// +// All exported go source files are parsed to collect the expectation +// notes. +// See the documentation for expect.Parse for how the notes are collected +// and parsed. +// +// The methods are supplied as a map of name to function, and those functions +// will be matched against the expectations by name. +// Notes with no matching function will be skipped, and functions with no +// matching notes will not be invoked. +// If there are no registered markers yet, a special pass will be run first +// which adds any markers declared with @mark(Name, pattern) or @name. These +// call the Mark method to add the marker to the global set. +// You can register the "mark" method to override these in your own call to +// Expect. The bound Mark function is usable directly in your method map, so +// +// exported.Expect(map[string]interface{}{"mark": exported.Mark}) +// +// replicates the built in behavior. +// +// # Method invocation +// +// When invoking a method the expressions in the parameter list need to be +// converted to values to be passed to the method. +// There are a very limited set of types the arguments are allowed to be. +// +// expect.Note : passed the Note instance being evaluated. +// string : can be supplied either a string literal or an identifier. +// int : can only be supplied an integer literal. +// *regexp.Regexp : can only be supplied a regular expression literal +// token.Pos : has a file position calculated as described below. +// token.Position : has a file position calculated as described below. +// expect.Range: has a start and end position as described below. +// interface{} : will be passed any value +// +// # Position calculation +// +// There is some extra handling when a parameter is being coerced into a +// token.Pos, token.Position or Range type argument. +// +// If the parameter is an identifier, it will be treated as the name of an +// marker to look up (as if markers were global variables). +// +// If it is a string or regular expression, then it will be passed to +// expect.MatchBefore to look up a match in the line at which it was declared. +// +// It is safe to call this repeatedly with different method sets, but it is +// not safe to call it concurrently. +func (e *Exported) Expect(methods map[string]interface{}) error { + if err := e.getNotes(); err != nil { + return err + } + if err := e.getMarkers(); err != nil { + return err + } + var err error + ms := make(map[string]method, len(methods)) + for name, f := range methods { + mi := method{f: reflect.ValueOf(f)} + mi.converters = make([]converter, mi.f.Type().NumIn()) + for i := 0; i < len(mi.converters); i++ { + mi.converters[i], err = e.buildConverter(mi.f.Type().In(i)) + if err != nil { + return fmt.Errorf("invalid method %v: %v", name, err) + } + } + ms[name] = mi + } + for _, n := range e.notes { + if n.Args == nil { + // simple identifier form, convert to a call to mark + n = &expect.Note{ + Pos: n.Pos, + Name: markMethod, + Args: []interface{}{n.Name, n.Name}, + } + } + mi, ok := ms[n.Name] + if !ok { + continue + } + params := make([]reflect.Value, len(mi.converters)) + args := n.Args + for i, convert := range mi.converters { + params[i], args, err = convert(n, args) + if err != nil { + return fmt.Errorf("%v: %v", e.ExpectFileSet.Position(n.Pos), err) + } + } + if len(args) > 0 { + return fmt.Errorf("%v: unwanted args got %+v extra", e.ExpectFileSet.Position(n.Pos), args) + } + //TODO: catch the error returned from the method + mi.f.Call(params) + } + return nil +} + +// A Range represents an interval within a source file in go/token notation. +type Range struct { + TokFile *token.File // non-nil + Start, End token.Pos // both valid and within range of TokFile +} + +// Mark adds a new marker to the known set. +func (e *Exported) Mark(name string, r Range) { + if e.markers == nil { + e.markers = make(map[string]Range) + } + e.markers[name] = r +} + +func (e *Exported) getNotes() error { + if e.notes != nil { + return nil + } + notes := []*expect.Note{} + var dirs []string + for _, module := range e.written { + for _, filename := range module { + dirs = append(dirs, filepath.Dir(filename)) + } + } + for filename := range e.Config.Overlay { + dirs = append(dirs, filepath.Dir(filename)) + } + pkgs, err := packages.Load(e.Config, dirs...) + if err != nil { + return fmt.Errorf("unable to load packages for directories %s: %v", dirs, err) + } + seen := make(map[token.Position]struct{}) + for _, pkg := range pkgs { + for _, filename := range pkg.GoFiles { + content, err := e.FileContents(filename) + if err != nil { + return err + } + l, err := expect.Parse(e.ExpectFileSet, filename, content) + if err != nil { + return fmt.Errorf("failed to extract expectations: %v", err) + } + for _, note := range l { + pos := e.ExpectFileSet.Position(note.Pos) + if _, ok := seen[pos]; ok { + continue + } + notes = append(notes, note) + seen[pos] = struct{}{} + } + } + } + if _, ok := e.written[e.primary]; !ok { + e.notes = notes + return nil + } + // Check go.mod markers regardless of mode, we need to do this so that our marker count + // matches the counts in the summary.txt.golden file for the test directory. + if gomod, found := e.written[e.primary]["go.mod"]; found { + // If we are in Modules mode, then we need to check the contents of the go.mod.temp. + if e.Exporter == Modules { + gomod += ".temp" + } + l, err := goModMarkers(e, gomod) + if err != nil { + return fmt.Errorf("failed to extract expectations for go.mod: %v", err) + } + notes = append(notes, l...) + } + e.notes = notes + return nil +} + +func goModMarkers(e *Exported, gomod string) ([]*expect.Note, error) { + if _, err := os.Stat(gomod); os.IsNotExist(err) { + // If there is no go.mod file, we want to be able to continue. + return nil, nil + } + content, err := e.FileContents(gomod) + if err != nil { + return nil, err + } + if e.Exporter == GOPATH { + return expect.Parse(e.ExpectFileSet, gomod, content) + } + gomod = strings.TrimSuffix(gomod, ".temp") + // If we are in Modules mode, copy the original contents file back into go.mod + if err := os.WriteFile(gomod, content, 0644); err != nil { + return nil, nil + } + return expect.Parse(e.ExpectFileSet, gomod, content) +} + +func (e *Exported) getMarkers() error { + if e.markers != nil { + return nil + } + // set markers early so that we don't call getMarkers again from Expect + e.markers = make(map[string]Range) + return e.Expect(map[string]interface{}{ + markMethod: e.Mark, + }) +} + +var ( + noteType = reflect.TypeOf((*expect.Note)(nil)) + identifierType = reflect.TypeOf(expect.Identifier("")) + posType = reflect.TypeOf(token.Pos(0)) + positionType = reflect.TypeOf(token.Position{}) + rangeType = reflect.TypeOf(Range{}) + fsetType = reflect.TypeOf((*token.FileSet)(nil)) + regexType = reflect.TypeOf((*regexp.Regexp)(nil)) + exportedType = reflect.TypeOf((*Exported)(nil)) +) + +// converter converts from a marker's argument parsed from the comment to +// reflect values passed to the method during Invoke. +// It takes the args remaining, and returns the args it did not consume. +// This allows a converter to consume 0 args for well known types, or multiple +// args for compound types. +type converter func(*expect.Note, []interface{}) (reflect.Value, []interface{}, error) + +// method is used to track information about Invoke methods that is expensive to +// calculate so that we can work it out once rather than per marker. +type method struct { + f reflect.Value // the reflect value of the passed in method + converters []converter // the parameter converters for the method +} + +// buildConverter works out what function should be used to go from an ast expressions to a reflect +// value of the type expected by a method. +// It is called when only the target type is know, it returns converters that are flexible across +// all supported expression types for that target type. +func (e *Exported) buildConverter(pt reflect.Type) (converter, error) { + switch { + case pt == noteType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return reflect.ValueOf(n), args, nil + }, nil + case pt == fsetType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return reflect.ValueOf(e.ExpectFileSet), args, nil + }, nil + case pt == exportedType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + return reflect.ValueOf(e), args, nil + }, nil + case pt == posType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + r, remains, err := e.rangeConverter(n, args) + if err != nil { + return reflect.Value{}, nil, err + } + return reflect.ValueOf(r.Start), remains, nil + }, nil + case pt == positionType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + r, remains, err := e.rangeConverter(n, args) + if err != nil { + return reflect.Value{}, nil, err + } + return reflect.ValueOf(e.ExpectFileSet.Position(r.Start)), remains, nil + }, nil + case pt == rangeType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + r, remains, err := e.rangeConverter(n, args) + if err != nil { + return reflect.Value{}, nil, err + } + return reflect.ValueOf(r), remains, nil + }, nil + case pt == identifierType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + if len(args) < 1 { + return reflect.Value{}, nil, fmt.Errorf("missing argument") + } + arg := args[0] + args = args[1:] + switch arg := arg.(type) { + case expect.Identifier: + return reflect.ValueOf(arg), args, nil + default: + return reflect.Value{}, nil, fmt.Errorf("cannot convert %v to string", arg) + } + }, nil + + case pt == regexType: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + if len(args) < 1 { + return reflect.Value{}, nil, fmt.Errorf("missing argument") + } + arg := args[0] + args = args[1:] + if _, ok := arg.(*regexp.Regexp); !ok { + return reflect.Value{}, nil, fmt.Errorf("cannot convert %v to *regexp.Regexp", arg) + } + return reflect.ValueOf(arg), args, nil + }, nil + + case pt.Kind() == reflect.String: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + if len(args) < 1 { + return reflect.Value{}, nil, fmt.Errorf("missing argument") + } + arg := args[0] + args = args[1:] + switch arg := arg.(type) { + case expect.Identifier: + return reflect.ValueOf(string(arg)), args, nil + case string: + return reflect.ValueOf(arg), args, nil + default: + return reflect.Value{}, nil, fmt.Errorf("cannot convert %v to string", arg) + } + }, nil + case pt.Kind() == reflect.Int64: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + if len(args) < 1 { + return reflect.Value{}, nil, fmt.Errorf("missing argument") + } + arg := args[0] + args = args[1:] + switch arg := arg.(type) { + case int64: + return reflect.ValueOf(arg), args, nil + default: + return reflect.Value{}, nil, fmt.Errorf("cannot convert %v to int", arg) + } + }, nil + case pt.Kind() == reflect.Bool: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + if len(args) < 1 { + return reflect.Value{}, nil, fmt.Errorf("missing argument") + } + arg := args[0] + args = args[1:] + b, ok := arg.(bool) + if !ok { + return reflect.Value{}, nil, fmt.Errorf("cannot convert %v to bool", arg) + } + return reflect.ValueOf(b), args, nil + }, nil + case pt.Kind() == reflect.Slice: + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + converter, err := e.buildConverter(pt.Elem()) + if err != nil { + return reflect.Value{}, nil, err + } + result := reflect.MakeSlice(reflect.SliceOf(pt.Elem()), 0, len(args)) + for range args { + value, remains, err := converter(n, args) + if err != nil { + return reflect.Value{}, nil, err + } + result = reflect.Append(result, value) + args = remains + } + return result, args, nil + }, nil + default: + if pt.Kind() == reflect.Interface && pt.NumMethod() == 0 { + return func(n *expect.Note, args []interface{}) (reflect.Value, []interface{}, error) { + if len(args) < 1 { + return reflect.Value{}, nil, fmt.Errorf("missing argument") + } + return reflect.ValueOf(args[0]), args[1:], nil + }, nil + } + return nil, fmt.Errorf("param has unexpected type %v (kind %v)", pt, pt.Kind()) + } +} + +func (e *Exported) rangeConverter(n *expect.Note, args []interface{}) (Range, []interface{}, error) { + tokFile := e.ExpectFileSet.File(n.Pos) + if len(args) < 1 { + return Range{}, nil, fmt.Errorf("missing argument") + } + arg := args[0] + args = args[1:] + switch arg := arg.(type) { + case expect.Identifier: + // handle the special identifiers + switch arg { + case eofIdentifier: + // end of file identifier + eof := tokFile.Pos(tokFile.Size()) + return newRange(tokFile, eof, eof), args, nil + default: + // look up an marker by name + mark, ok := e.markers[string(arg)] + if !ok { + return Range{}, nil, fmt.Errorf("cannot find marker %v", arg) + } + return mark, args, nil + } + case string: + start, end, err := expect.MatchBefore(e.ExpectFileSet, e.FileContents, n.Pos, arg) + if err != nil { + return Range{}, nil, err + } + if !start.IsValid() { + return Range{}, nil, fmt.Errorf("%v: pattern %s did not match", e.ExpectFileSet.Position(n.Pos), arg) + } + return newRange(tokFile, start, end), args, nil + case *regexp.Regexp: + start, end, err := expect.MatchBefore(e.ExpectFileSet, e.FileContents, n.Pos, arg) + if err != nil { + return Range{}, nil, err + } + if !start.IsValid() { + return Range{}, nil, fmt.Errorf("%v: pattern %s did not match", e.ExpectFileSet.Position(n.Pos), arg) + } + return newRange(tokFile, start, end), args, nil + default: + return Range{}, nil, fmt.Errorf("cannot convert %v to pos", arg) + } +} + +// newRange creates a new Range from a token.File and two valid positions within it. +func newRange(file *token.File, start, end token.Pos) Range { + fileBase := file.Base() + fileEnd := fileBase + file.Size() + if !start.IsValid() { + panic("invalid start token.Pos") + } + if !end.IsValid() { + panic("invalid end token.Pos") + } + if int(start) < fileBase || int(start) > fileEnd { + panic(fmt.Sprintf("invalid start: %d not in [%d, %d]", start, fileBase, fileEnd)) + } + if int(end) < fileBase || int(end) > fileEnd { + panic(fmt.Sprintf("invalid end: %d not in [%d, %d]", end, fileBase, fileEnd)) + } + if start > end { + panic("invalid start: greater than end") + } + return Range{ + TokFile: file, + Start: start, + End: end, + } +} diff --git a/internal/packagestest/expect_test.go b/internal/packagestest/expect_test.go new file mode 100644 index 00000000000..d155f5fe9e2 --- /dev/null +++ b/internal/packagestest/expect_test.go @@ -0,0 +1,71 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest_test + +import ( + "go/token" + "testing" + + "golang.org/x/tools/internal/expect" + "golang.org/x/tools/internal/packagestest" +) + +func TestExpect(t *testing.T) { + exported := packagestest.Export(t, packagestest.GOPATH, []packagestest.Module{{ + Name: "golang.org/fake", + Files: packagestest.MustCopyFileTree("testdata"), + }}) + defer exported.Cleanup() + checkCount := 0 + if err := exported.Expect(map[string]interface{}{ + "check": func(src, target token.Position) { + checkCount++ + }, + "boolArg": func(n *expect.Note, yes, no bool) { + if !yes { + t.Errorf("Expected boolArg first param to be true") + } + if no { + t.Errorf("Expected boolArg second param to be false") + } + }, + "intArg": func(n *expect.Note, i int64) { + if i != 42 { + t.Errorf("Expected intarg to be 42") + } + }, + "stringArg": func(n *expect.Note, name expect.Identifier, value string) { + if string(name) != value { + t.Errorf("Got string arg %v expected %v", value, name) + } + }, + "directNote": func(n *expect.Note) {}, + "range": func(r packagestest.Range) { + if r.Start == token.NoPos || r.Start == 0 { + t.Errorf("Range had no valid starting position") + } + if r.End == token.NoPos || r.End == 0 { + t.Errorf("Range had no valid ending position") + } else if r.End <= r.Start { + t.Errorf("Range ending was not greater than start") + } + }, + "checkEOF": func(n *expect.Note, p token.Pos) { + if p <= n.Pos { + t.Errorf("EOF was before the checkEOF note") + } + }, + }); err != nil { + t.Fatal(err) + } + // We expect to have walked the @check annotations in all .go files, + // including _test.go files (XTest or otherwise). But to have walked the + // non-_test.go files only once. Hence wantCheck = 3 (testdata/test.go) + 1 + // (testdata/test_test.go) + 1 (testdata/x_test.go) + wantCheck := 7 + if wantCheck != checkCount { + t.Fatalf("Expected @check count of %v; got %v", wantCheck, checkCount) + } +} diff --git a/internal/packagestest/export.go b/internal/packagestest/export.go new file mode 100644 index 00000000000..f8d10718c09 --- /dev/null +++ b/internal/packagestest/export.go @@ -0,0 +1,666 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package packagestest creates temporary projects on disk for testing go tools on. + +By changing the exporter used, you can create projects for multiple build +systems from the same description, and run the same tests on them in many +cases. + +# Example + +As an example of packagestest use, consider the following test that runs +the 'go list' command on the specified modules: + + // TestGoList exercises the 'go list' command in module mode and in GOPATH mode. + func TestGoList(t *testing.T) { packagestest.TestAll(t, testGoList) } + func testGoList(t *testing.T, x packagestest.Exporter) { + e := packagestest.Export(t, x, []packagestest.Module{ + { + Name: "gopher.example/repoa", + Files: map[string]interface{}{ + "a/a.go": "package a", + }, + }, + { + Name: "gopher.example/repob", + Files: map[string]interface{}{ + "b/b.go": "package b", + }, + }, + }) + defer e.Cleanup() + + cmd := exec.Command("go", "list", "gopher.example/...") + cmd.Dir = e.Config.Dir + cmd.Env = e.Config.Env + out, err := cmd.Output() + if err != nil { + t.Fatal(err) + } + t.Logf("'go list gopher.example/...' with %s mode layout:\n%s", x.Name(), out) + } + +TestGoList uses TestAll to exercise the 'go list' command with all +exporters known to packagestest. Currently, packagestest includes +exporters that produce module mode layouts and GOPATH mode layouts. +Running the test with verbose output will print: + + === RUN TestGoList + === RUN TestGoList/GOPATH + === RUN TestGoList/Modules + --- PASS: TestGoList (0.21s) + --- PASS: TestGoList/GOPATH (0.03s) + main_test.go:36: 'go list gopher.example/...' with GOPATH mode layout: + gopher.example/repoa/a + gopher.example/repob/b + --- PASS: TestGoList/Modules (0.18s) + main_test.go:36: 'go list gopher.example/...' with Modules mode layout: + gopher.example/repoa/a + gopher.example/repob/b +*/ +package packagestest + +import ( + "errors" + "flag" + "fmt" + "go/token" + "io" + "log" + "os" + "path/filepath" + "runtime" + "strings" + "testing" + + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/expect" + "golang.org/x/tools/internal/testenv" +) + +var ( + skipCleanup = flag.Bool("skip-cleanup", false, "Do not delete the temporary export folders") // for debugging +) + +// ErrUnsupported indicates an error due to an operation not supported on the +// current platform. +var ErrUnsupported = errors.New("operation is not supported") + +// Module is a representation of a go module. +type Module struct { + // Name is the base name of the module as it would be in the go.mod file. + Name string + // Files is the set of source files for all packages that make up the module. + // The keys are the file fragment that follows the module name, the value can + // be a string or byte slice, in which case it is the contents of the + // file, otherwise it must be a Writer function. + Files map[string]interface{} + + // Overlay is the set of source file overlays for the module. + // The keys are the file fragment as in the Files configuration. + // The values are the in memory overlay content for the file. + Overlay map[string][]byte +} + +// A Writer is a function that writes out a test file. +// It is provided the name of the file to write, and may return an error if it +// cannot write the file. +// These are used as the content of the Files map in a Module. +type Writer func(filename string) error + +// Exported is returned by the Export function to report the structure that was produced on disk. +type Exported struct { + // Config is a correctly configured packages.Config ready to be passed to packages.Load. + // Exactly what it will contain varies depending on the Exporter being used. + Config *packages.Config + + // Modules is the module description that was used to produce this exported data set. + Modules []Module + + ExpectFileSet *token.FileSet // The file set used when parsing expectations + + Exporter Exporter // the exporter used + temp string // the temporary directory that was exported to + primary string // the first non GOROOT module that was exported + written map[string]map[string]string // the full set of exported files + notes []*expect.Note // The list of expectations extracted from go source files + markers map[string]Range // The set of markers extracted from go source files +} + +// Exporter implementations are responsible for converting from the generic description of some +// test data to a driver specific file layout. +type Exporter interface { + // Name reports the name of the exporter, used in logging and sub-test generation. + Name() string + // Filename reports the system filename for test data source file. + // It is given the base directory, the module the file is part of and the filename fragment to + // work from. + Filename(exported *Exported, module, fragment string) string + // Finalize is called once all files have been written to write any extra data needed and modify + // the Config to match. It is handed the full list of modules that were encountered while writing + // files. + Finalize(exported *Exported) error +} + +// All is the list of known exporters. +// This is used by TestAll to run tests with all the exporters. +var All = []Exporter{GOPATH, Modules} + +// TestAll invokes the testing function once for each exporter registered in +// the All global. +// Each exporter will be run as a sub-test named after the exporter being used. +func TestAll(t *testing.T, f func(*testing.T, Exporter)) { + t.Helper() + for _, e := range All { + e := e // in case f calls t.Parallel + t.Run(e.Name(), func(t *testing.T) { + t.Helper() + f(t, e) + }) + } +} + +// BenchmarkAll invokes the testing function once for each exporter registered in +// the All global. +// Each exporter will be run as a sub-test named after the exporter being used. +func BenchmarkAll(b *testing.B, f func(*testing.B, Exporter)) { + b.Helper() + for _, e := range All { + e := e // in case f calls t.Parallel + b.Run(e.Name(), func(b *testing.B) { + b.Helper() + f(b, e) + }) + } +} + +// Export is called to write out a test directory from within a test function. +// It takes the exporter and the build system agnostic module descriptions, and +// uses them to build a temporary directory. +// It returns an Exported with the results of the export. +// The Exported.Config is prepared for loading from the exported data. +// You must invoke Exported.Cleanup on the returned value to clean up. +// The file deletion in the cleanup can be skipped by setting the skip-cleanup +// flag when invoking the test, allowing the temporary directory to be left for +// debugging tests. +// +// If the Writer for any file within any module returns an error equivalent to +// ErrUnspported, Export skips the test. +func Export(t testing.TB, exporter Exporter, modules []Module) *Exported { + t.Helper() + if exporter == Modules { + testenv.NeedsTool(t, "go") + } + + dirname := strings.Replace(t.Name(), "/", "_", -1) + dirname = strings.Replace(dirname, "#", "_", -1) // duplicate subtests get a #NNN suffix. + temp, err := os.MkdirTemp("", dirname) + if err != nil { + t.Fatal(err) + } + exported := &Exported{ + Config: &packages.Config{ + Dir: temp, + Env: append(os.Environ(), "GOPACKAGESDRIVER=off", "GOROOT="), // Clear GOROOT to work around #32849. + Overlay: make(map[string][]byte), + Tests: true, + Mode: packages.LoadImports, + }, + Modules: modules, + Exporter: exporter, + temp: temp, + primary: modules[0].Name, + written: map[string]map[string]string{}, + ExpectFileSet: token.NewFileSet(), + } + if testing.Verbose() { + exported.Config.Logf = t.Logf + } + defer func() { + if t.Failed() || t.Skipped() { + exported.Cleanup() + } + }() + for _, module := range modules { + // Create all parent directories before individual files. If any file is a + // symlink to a directory, that directory must exist before the symlink is + // created or else it may be created with the wrong type on Windows. + // (See https://golang.org/issue/39183.) + for fragment := range module.Files { + fullpath := exporter.Filename(exported, module.Name, filepath.FromSlash(fragment)) + if err := os.MkdirAll(filepath.Dir(fullpath), 0755); err != nil { + t.Fatal(err) + } + } + + for fragment, value := range module.Files { + fullpath := exporter.Filename(exported, module.Name, filepath.FromSlash(fragment)) + written, ok := exported.written[module.Name] + if !ok { + written = map[string]string{} + exported.written[module.Name] = written + } + written[fragment] = fullpath + switch value := value.(type) { + case Writer: + if err := value(fullpath); err != nil { + if errors.Is(err, ErrUnsupported) { + t.Skip(err) + } + t.Fatal(err) + } + case string: + if err := os.WriteFile(fullpath, []byte(value), 0644); err != nil { + t.Fatal(err) + } + default: + t.Fatalf("Invalid type %T in files, must be string or Writer", value) + } + } + for fragment, value := range module.Overlay { + fullpath := exporter.Filename(exported, module.Name, filepath.FromSlash(fragment)) + exported.Config.Overlay[fullpath] = value + } + } + if err := exporter.Finalize(exported); err != nil { + t.Fatal(err) + } + testenv.NeedsGoPackagesEnv(t, exported.Config.Env) + return exported +} + +// Script returns a Writer that writes out contents to the file and sets the +// executable bit on the created file. +// It is intended for source files that are shell scripts. +func Script(contents string) Writer { + return func(filename string) error { + return os.WriteFile(filename, []byte(contents), 0755) + } +} + +// Link returns a Writer that creates a hard link from the specified source to +// the required file. +// This is used to link testdata files into the generated testing tree. +// +// If hard links to source are not supported on the destination filesystem, the +// returned Writer returns an error for which errors.Is(_, ErrUnsupported) +// returns true. +func Link(source string) Writer { + return func(filename string) error { + linkErr := os.Link(source, filename) + + if linkErr != nil && !builderMustSupportLinks() { + // Probe to figure out whether Link failed because the Link operation + // isn't supported. + if stat, err := openAndStat(source); err == nil { + if err := createEmpty(filename, stat.Mode()); err == nil { + // Successfully opened the source and created the destination, + // but the result is empty and not a hard-link. + return &os.PathError{Op: "Link", Path: filename, Err: ErrUnsupported} + } + } + } + + return linkErr + } +} + +// Symlink returns a Writer that creates a symlink from the specified source to the +// required file. +// This is used to link testdata files into the generated testing tree. +// +// If symlinks to source are not supported on the destination filesystem, the +// returned Writer returns an error for which errors.Is(_, ErrUnsupported) +// returns true. +func Symlink(source string) Writer { + if !strings.HasPrefix(source, ".") { + if absSource, err := filepath.Abs(source); err == nil { + if _, err := os.Stat(source); !os.IsNotExist(err) { + source = absSource + } + } + } + return func(filename string) error { + symlinkErr := os.Symlink(source, filename) + + if symlinkErr != nil && !builderMustSupportLinks() { + // Probe to figure out whether Symlink failed because the Symlink + // operation isn't supported. + fullSource := source + if !filepath.IsAbs(source) { + // Compute the target path relative to the parent of filename, not the + // current working directory. + fullSource = filepath.Join(filename, "..", source) + } + stat, err := openAndStat(fullSource) + mode := os.ModePerm + if err == nil { + mode = stat.Mode() + } else if !errors.Is(err, os.ErrNotExist) { + // We couldn't open the source, but it might exist. We don't expect to be + // able to portably create a symlink to a file we can't see. + return symlinkErr + } + + if err := createEmpty(filename, mode|0644); err == nil { + // Successfully opened the source (or verified that it does not exist) and + // created the destination, but we couldn't create it as a symlink. + // Probably the OS just doesn't support symlinks in this context. + return &os.PathError{Op: "Symlink", Path: filename, Err: ErrUnsupported} + } + } + + return symlinkErr + } +} + +// builderMustSupportLinks reports whether we are running on a Go builder +// that is known to support hard and symbolic links. +func builderMustSupportLinks() bool { + if os.Getenv("GO_BUILDER_NAME") == "" { + // Any OS can be configured to mount an exotic filesystem. + // Don't make assumptions about what users are running. + return false + } + + switch runtime.GOOS { + case "windows", "plan9": + // Some versions of Windows and all versions of plan9 do not support + // symlinks by default. + return false + + default: + // All other platforms should support symlinks by default, and our builders + // should not do anything unusual that would violate that. + return true + } +} + +// openAndStat attempts to open source for reading. +func openAndStat(source string) (os.FileInfo, error) { + src, err := os.Open(source) + if err != nil { + return nil, err + } + stat, err := src.Stat() + src.Close() + if err != nil { + return nil, err + } + return stat, nil +} + +// createEmpty creates an empty file or directory (depending on mode) +// at dst, with the same permissions as mode. +func createEmpty(dst string, mode os.FileMode) error { + if mode.IsDir() { + return os.Mkdir(dst, mode.Perm()) + } + + f, err := os.OpenFile(dst, os.O_WRONLY|os.O_CREATE|os.O_EXCL, mode.Perm()) + if err != nil { + return err + } + if err := f.Close(); err != nil { + os.Remove(dst) // best-effort + return err + } + + return nil +} + +// Copy returns a Writer that copies a file from the specified source to the +// required file. +// This is used to copy testdata files into the generated testing tree. +func Copy(source string) Writer { + return func(filename string) error { + stat, err := os.Stat(source) + if err != nil { + return err + } + if !stat.Mode().IsRegular() { + // cannot copy non-regular files (e.g., directories, + // symlinks, devices, etc.) + return fmt.Errorf("cannot copy non regular file %s", source) + } + return copyFile(filename, source, stat.Mode().Perm()) + } +} + +func copyFile(dest, source string, perm os.FileMode) error { + src, err := os.Open(source) + if err != nil { + return err + } + defer src.Close() + + dst, err := os.OpenFile(dest, os.O_WRONLY|os.O_CREATE|os.O_EXCL, perm) + if err != nil { + return err + } + + _, err = io.Copy(dst, src) + if closeErr := dst.Close(); err == nil { + err = closeErr + } + return err +} + +// GroupFilesByModules attempts to map directories to the modules within each directory. +// This function assumes that the folder is structured in the following way: +// +// dir/ +// primarymod/ +// *.go files +// packages +// go.mod (optional) +// modules/ +// repoa/ +// mod1/ +// *.go files +// packages +// go.mod (optional) +// +// It scans the directory tree anchored at root and adds a Copy writer to the +// map for every file found. +// This is to enable the common case in tests where you have a full copy of the +// package in your testdata. +func GroupFilesByModules(root string) ([]Module, error) { + root = filepath.FromSlash(root) + primarymodPath := filepath.Join(root, "primarymod") + + _, err := os.Stat(primarymodPath) + if os.IsNotExist(err) { + return nil, fmt.Errorf("could not find primarymod folder within %s", root) + } + + primarymod := &Module{ + Name: root, + Files: make(map[string]interface{}), + Overlay: make(map[string][]byte), + } + mods := map[string]*Module{ + root: primarymod, + } + modules := []Module{*primarymod} + + if err := filepath.Walk(primarymodPath, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + return nil + } + fragment, err := filepath.Rel(primarymodPath, path) + if err != nil { + return err + } + primarymod.Files[filepath.ToSlash(fragment)] = Copy(path) + return nil + }); err != nil { + return nil, err + } + + modulesPath := filepath.Join(root, "modules") + if _, err := os.Stat(modulesPath); os.IsNotExist(err) { + return modules, nil + } + + var currentRepo, currentModule string + updateCurrentModule := func(dir string) { + if dir == currentModule { + return + } + // Handle the case where we step into a nested directory that is a module + // and then step out into the parent which is also a module. + // Example: + // - repoa + // - moda + // - go.mod + // - v2 + // - go.mod + // - what.go + // - modb + for dir != root { + if mods[dir] != nil { + currentModule = dir + return + } + dir = filepath.Dir(dir) + } + } + + if err := filepath.Walk(modulesPath, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + enclosingDir := filepath.Dir(path) + // If the path is not a directory, then we want to add the path to + // the files map of the currentModule. + if !info.IsDir() { + updateCurrentModule(enclosingDir) + fragment, err := filepath.Rel(currentModule, path) + if err != nil { + return err + } + mods[currentModule].Files[filepath.ToSlash(fragment)] = Copy(path) + return nil + } + // If the path is a directory and it's enclosing folder is equal to + // the modules folder, then the path is a new repo. + if enclosingDir == modulesPath { + currentRepo = path + return nil + } + // If the path is a directory and it's enclosing folder is not the same + // as the current repo and it is not of the form `v1`,`v2`,... + // then the path is a folder/package of the current module. + if enclosingDir != currentRepo && !versionSuffixRE.MatchString(filepath.Base(path)) { + return nil + } + // If the path is a directory and it's enclosing folder is the current repo + // then the path is a new module. + module, err := filepath.Rel(modulesPath, path) + if err != nil { + return err + } + mods[path] = &Module{ + Name: filepath.ToSlash(module), + Files: make(map[string]interface{}), + Overlay: make(map[string][]byte), + } + currentModule = path + modules = append(modules, *mods[path]) + return nil + }); err != nil { + return nil, err + } + return modules, nil +} + +// MustCopyFileTree returns a file set for a module based on a real directory tree. +// It scans the directory tree anchored at root and adds a Copy writer to the +// map for every file found. It skips copying files in nested modules. +// This is to enable the common case in tests where you have a full copy of the +// package in your testdata. +// This will panic if there is any kind of error trying to walk the file tree. +func MustCopyFileTree(root string) map[string]interface{} { + result := map[string]interface{}{} + if err := filepath.Walk(filepath.FromSlash(root), func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() { + // skip nested modules. + if path != root { + if fi, err := os.Stat(filepath.Join(path, "go.mod")); err == nil && !fi.IsDir() { + return filepath.SkipDir + } + } + return nil + } + fragment, err := filepath.Rel(root, path) + if err != nil { + return err + } + result[filepath.ToSlash(fragment)] = Copy(path) + return nil + }); err != nil { + log.Panic(fmt.Sprintf("MustCopyFileTree failed: %v", err)) + } + return result +} + +// Cleanup removes the temporary directory (unless the --skip-cleanup flag was set) +// It is safe to call cleanup multiple times. +func (e *Exported) Cleanup() { + if e.temp == "" { + return + } + if *skipCleanup { + log.Printf("Skipping cleanup of temp dir: %s", e.temp) + return + } + // Make everything read-write so that the Module exporter's module cache can be deleted. + filepath.Walk(e.temp, func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + if info.IsDir() { + os.Chmod(path, 0777) + } + return nil + }) + os.RemoveAll(e.temp) // ignore errors + e.temp = "" +} + +// Temp returns the temporary directory that was generated. +func (e *Exported) Temp() string { + return e.temp +} + +// File returns the full path for the given module and file fragment. +func (e *Exported) File(module, fragment string) string { + if m := e.written[module]; m != nil { + return m[fragment] + } + return "" +} + +// FileContents returns the contents of the specified file. +// It will use the overlay if the file is present, otherwise it will read it +// from disk. +func (e *Exported) FileContents(filename string) ([]byte, error) { + if content, found := e.Config.Overlay[filename]; found { + return content, nil + } + content, err := os.ReadFile(filename) + if err != nil { + return nil, err + } + return content, nil +} diff --git a/internal/packagestest/export_test.go b/internal/packagestest/export_test.go new file mode 100644 index 00000000000..6c074216fbe --- /dev/null +++ b/internal/packagestest/export_test.go @@ -0,0 +1,234 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest_test + +import ( + "os" + "path/filepath" + "reflect" + "sort" + "testing" + + "golang.org/x/tools/internal/packagestest" +) + +var testdata = []packagestest.Module{{ + Name: "golang.org/fake1", + Files: map[string]interface{}{ + "a.go": packagestest.Symlink("testdata/a.go"), // broken symlink + "b.go": "invalid file contents", + }, + Overlay: map[string][]byte{ + "b.go": []byte("package fake1"), + "c.go": []byte("package fake1"), + }, +}, { + Name: "golang.org/fake2", + Files: map[string]interface{}{ + "other/a.go": "package fake2", + }, +}, { + Name: "golang.org/fake2/v2", + Files: map[string]interface{}{ + "other/a.go": "package fake2", + }, +}, { + Name: "golang.org/fake3@v1.0.0", + Files: map[string]interface{}{ + "other/a.go": "package fake3", + }, +}, { + Name: "golang.org/fake3@v1.1.0", + Files: map[string]interface{}{ + "other/a.go": "package fake3", + }, +}} + +type fileTest struct { + module, fragment, expect string + check func(t *testing.T, exported *packagestest.Exported, filename string) +} + +func checkFiles(t *testing.T, exported *packagestest.Exported, tests []fileTest) { + for _, test := range tests { + expect := filepath.Join(exported.Temp(), filepath.FromSlash(test.expect)) + got := exported.File(test.module, test.fragment) + if got == "" { + t.Errorf("File %v missing from the output", expect) + } else if got != expect { + t.Errorf("Got file %v, expected %v", got, expect) + } + if test.check != nil { + test.check(t, exported, got) + } + } +} + +func checkLink(expect string) func(t *testing.T, exported *packagestest.Exported, filename string) { + expect = filepath.FromSlash(expect) + return func(t *testing.T, exported *packagestest.Exported, filename string) { + if target, err := os.Readlink(filename); err != nil { + t.Errorf("Error checking link %v: %v", filename, err) + } else if target != expect { + t.Errorf("Link %v does not match, got %v expected %v", filename, target, expect) + } + } +} + +func checkContent(expect string) func(t *testing.T, exported *packagestest.Exported, filename string) { + return func(t *testing.T, exported *packagestest.Exported, filename string) { + if content, err := exported.FileContents(filename); err != nil { + t.Errorf("Error reading %v: %v", filename, err) + } else if string(content) != expect { + t.Errorf("Content of %v does not match, got %v expected %v", filename, string(content), expect) + } + } +} + +func TestGroupFilesByModules(t *testing.T) { + for _, tt := range []struct { + testdir string + want []packagestest.Module + }{ + { + testdir: "testdata/groups/one", + want: []packagestest.Module{ + { + Name: "testdata/groups/one", + Files: map[string]interface{}{ + "main.go": true, + }, + }, + { + Name: "example.com/extra", + Files: map[string]interface{}{ + "help.go": true, + }, + }, + }, + }, + { + testdir: "testdata/groups/two", + want: []packagestest.Module{ + { + Name: "testdata/groups/two", + Files: map[string]interface{}{ + "main.go": true, + "expect/yo.go": true, + "expect/yo_test.go": true, + }, + }, + { + Name: "example.com/extra", + Files: map[string]interface{}{ + "yo.go": true, + "geez/help.go": true, + }, + }, + { + Name: "example.com/extra/v2", + Files: map[string]interface{}{ + "me.go": true, + "geez/help.go": true, + }, + }, + { + Name: "example.com/tempmod", + Files: map[string]interface{}{ + "main.go": true, + }, + }, + { + Name: "example.com/what@v1.0.0", + Files: map[string]interface{}{ + "main.go": true, + }, + }, + { + Name: "example.com/what@v1.1.0", + Files: map[string]interface{}{ + "main.go": true, + }, + }, + }, + }, + } { + t.Run(tt.testdir, func(t *testing.T) { + got, err := packagestest.GroupFilesByModules(tt.testdir) + if err != nil { + t.Fatalf("could not group files %v", err) + } + if len(got) != len(tt.want) { + t.Fatalf("%s: wanted %d modules but got %d", tt.testdir, len(tt.want), len(got)) + } + for i, w := range tt.want { + g := got[i] + if filepath.FromSlash(g.Name) != filepath.FromSlash(w.Name) { + t.Fatalf("%s: wanted module[%d].Name to be %s but got %s", tt.testdir, i, filepath.FromSlash(w.Name), filepath.FromSlash(g.Name)) + } + for fh := range w.Files { + if _, ok := g.Files[fh]; !ok { + t.Fatalf("%s, module[%d]: wanted %s but could not find", tt.testdir, i, fh) + } + } + for fh := range g.Files { + if _, ok := w.Files[fh]; !ok { + t.Fatalf("%s, module[%d]: found unexpected file %s", tt.testdir, i, fh) + } + } + } + }) + } +} + +func TestMustCopyFiles(t *testing.T) { + // Create the following test directory structure in a temporary directory. + src := map[string]string{ + // copies all files under the specified directory. + "go.mod": "module example.com", + "m.go": "package m", + "a/a.go": "package a", + // contents from a nested module shouldn't be copied. + "nested/go.mod": "module example.com/nested", + "nested/m.go": "package nested", + "nested/b/b.go": "package b", + } + + tmpDir, err := os.MkdirTemp("", t.Name()) + if err != nil { + t.Fatalf("failed to create a temporary directory: %v", err) + } + defer os.RemoveAll(tmpDir) + + for fragment, contents := range src { + fullpath := filepath.Join(tmpDir, filepath.FromSlash(fragment)) + if err := os.MkdirAll(filepath.Dir(fullpath), 0755); err != nil { + t.Fatal(err) + } + if err := os.WriteFile(fullpath, []byte(contents), 0644); err != nil { + t.Fatal(err) + } + } + + copied := packagestest.MustCopyFileTree(tmpDir) + var got []string + for fragment := range copied { + got = append(got, filepath.ToSlash(fragment)) + } + want := []string{"go.mod", "m.go", "a/a.go"} + + sort.Strings(got) + sort.Strings(want) + if !reflect.DeepEqual(got, want) { + t.Errorf("packagestest.MustCopyFileTree = %v, want %v", got, want) + } + + // packagestest.Export is happy. + exported := packagestest.Export(t, packagestest.Modules, []packagestest.Module{{ + Name: "example.com", + Files: packagestest.MustCopyFileTree(tmpDir), + }}) + defer exported.Cleanup() +} diff --git a/internal/packagestest/gopath.go b/internal/packagestest/gopath.go new file mode 100644 index 00000000000..c2e57a1545c --- /dev/null +++ b/internal/packagestest/gopath.go @@ -0,0 +1,77 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest + +import ( + "path" + "path/filepath" +) + +// GOPATH is the exporter that produces GOPATH layouts. +// Each "module" is put in it's own GOPATH entry to help test complex cases. +// Given the two files +// +// golang.org/repoa#a/a.go +// golang.org/repob#b/b.go +// +// You would get the directory layout +// +// /sometemporarydirectory +// ├── repoa +// │ └── src +// │ └── golang.org +// │ └── repoa +// │ └── a +// │ └── a.go +// └── repob +// └── src +// └── golang.org +// └── repob +// └── b +// └── b.go +// +// GOPATH would be set to +// +// /sometemporarydirectory/repoa;/sometemporarydirectory/repob +// +// and the working directory would be +// +// /sometemporarydirectory/repoa/src +var GOPATH = gopath{} + +type gopath struct{} + +func (gopath) Name() string { + return "GOPATH" +} + +func (gopath) Filename(exported *Exported, module, fragment string) string { + return filepath.Join(gopathDir(exported, module), "src", module, fragment) +} + +func (gopath) Finalize(exported *Exported) error { + exported.Config.Env = append(exported.Config.Env, "GO111MODULE=off") + gopath := "" + for module := range exported.written { + if gopath != "" { + gopath += string(filepath.ListSeparator) + } + dir := gopathDir(exported, module) + gopath += dir + if module == exported.primary { + exported.Config.Dir = filepath.Join(dir, "src") + } + } + exported.Config.Env = append(exported.Config.Env, "GOPATH="+gopath) + return nil +} + +func gopathDir(exported *Exported, module string) string { + dir := path.Base(module) + if versionSuffixRE.MatchString(dir) { + dir = path.Base(path.Dir(module)) + "_" + dir + } + return filepath.Join(exported.temp, dir) +} diff --git a/internal/packagestest/gopath_test.go b/internal/packagestest/gopath_test.go new file mode 100644 index 00000000000..fa9f7e545eb --- /dev/null +++ b/internal/packagestest/gopath_test.go @@ -0,0 +1,28 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest_test + +import ( + "path/filepath" + "testing" + + "golang.org/x/tools/internal/packagestest" +) + +func TestGOPATHExport(t *testing.T) { + exported := packagestest.Export(t, packagestest.GOPATH, testdata) + defer exported.Cleanup() + // Check that the cfg contains all the right bits + var expectDir = filepath.Join(exported.Temp(), "fake1", "src") + if exported.Config.Dir != expectDir { + t.Errorf("Got working directory %v expected %v", exported.Config.Dir, expectDir) + } + checkFiles(t, exported, []fileTest{ + {"golang.org/fake1", "a.go", "fake1/src/golang.org/fake1/a.go", checkLink("testdata/a.go")}, + {"golang.org/fake1", "b.go", "fake1/src/golang.org/fake1/b.go", checkContent("package fake1")}, + {"golang.org/fake2", "other/a.go", "fake2/src/golang.org/fake2/other/a.go", checkContent("package fake2")}, + {"golang.org/fake2/v2", "other/a.go", "fake2_v2/src/golang.org/fake2/v2/other/a.go", checkContent("package fake2")}, + }) +} diff --git a/internal/packagestest/modules.go b/internal/packagestest/modules.go new file mode 100644 index 00000000000..0c8d3d8fec9 --- /dev/null +++ b/internal/packagestest/modules.go @@ -0,0 +1,223 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest + +import ( + "bytes" + "context" + "fmt" + "os" + "path" + "path/filepath" + "regexp" + "strings" + + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/proxydir" +) + +// Modules is the exporter that produces module layouts. +// Each "repository" is put in its own module, and the module file generated +// will have replace directives for all other modules. +// Given the two files +// +// golang.org/repoa#a/a.go +// golang.org/repob#b/b.go +// +// You would get the directory layout +// +// /sometemporarydirectory +// ├── repoa +// │ ├── a +// │ │ └── a.go +// │ └── go.mod +// └── repob +// ├── b +// │ └── b.go +// └── go.mod +// +// and the working directory would be +// +// /sometemporarydirectory/repoa +var Modules = modules{} + +type modules struct{} + +type moduleAtVersion struct { + module string + version string +} + +func (modules) Name() string { + return "Modules" +} + +func (modules) Filename(exported *Exported, module, fragment string) string { + if module == exported.primary { + return filepath.Join(primaryDir(exported), fragment) + } + return filepath.Join(moduleDir(exported, module), fragment) +} + +func (modules) Finalize(exported *Exported) error { + // Write out the primary module. This module can use symlinks and + // other weird stuff, and will be the working dir for the go command. + // It depends on all the other modules. + primaryDir := primaryDir(exported) + if err := os.MkdirAll(primaryDir, 0755); err != nil { + return err + } + exported.Config.Dir = primaryDir + if exported.written[exported.primary] == nil { + exported.written[exported.primary] = make(map[string]string) + } + + // Create a map of modulepath -> {module, version} for modulepaths + // that are of the form `repoa/mod1@v1.1.0`. + versions := make(map[string]moduleAtVersion) + for module := range exported.written { + if splt := strings.Split(module, "@"); len(splt) > 1 { + versions[module] = moduleAtVersion{ + module: splt[0], + version: splt[1], + } + } + } + + // If the primary module already has a go.mod, write the contents to a temp + // go.mod for now and then we will reset it when we are getting all the markers. + if gomod := exported.written[exported.primary]["go.mod"]; gomod != "" { + contents, err := os.ReadFile(gomod) + if err != nil { + return err + } + if err := os.WriteFile(gomod+".temp", contents, 0644); err != nil { + return err + } + } + + exported.written[exported.primary]["go.mod"] = filepath.Join(primaryDir, "go.mod") + var primaryGomod bytes.Buffer + fmt.Fprintf(&primaryGomod, "module %s\nrequire (\n", exported.primary) + for other := range exported.written { + if other == exported.primary { + continue + } + version := moduleVersion(other) + // If other is of the form `repo1/mod1@v1.1.0`, + // then we need to extract the module and the version. + if v, ok := versions[other]; ok { + other = v.module + version = v.version + } + fmt.Fprintf(&primaryGomod, "\t%v %v\n", other, version) + } + fmt.Fprintf(&primaryGomod, ")\n") + if err := os.WriteFile(filepath.Join(primaryDir, "go.mod"), primaryGomod.Bytes(), 0644); err != nil { + return err + } + + // Create the mod cache so we can rename it later, even if we don't need it. + if err := os.MkdirAll(modCache(exported), 0755); err != nil { + return err + } + + // Write out the go.mod files for the other modules. + for module, files := range exported.written { + if module == exported.primary { + continue + } + dir := moduleDir(exported, module) + modfile := filepath.Join(dir, "go.mod") + // If other is of the form `repo1/mod1@v1.1.0`, + // then we need to extract the module name without the version. + if v, ok := versions[module]; ok { + module = v.module + } + if err := os.WriteFile(modfile, []byte("module "+module+"\n"), 0644); err != nil { + return err + } + files["go.mod"] = modfile + } + + // Zip up all the secondary modules into the proxy dir. + modProxyDir := filepath.Join(exported.temp, "modproxy") + for module, files := range exported.written { + if module == exported.primary { + continue + } + version := moduleVersion(module) + // If other is of the form `repo1/mod1@v1.1.0`, + // then we need to extract the module and the version. + if v, ok := versions[module]; ok { + module = v.module + version = v.version + } + if err := writeModuleFiles(modProxyDir, module, version, files); err != nil { + return fmt.Errorf("creating module proxy dir for %v: %v", module, err) + } + } + + // Discard the original mod cache dir, which contained the files written + // for us by Export. + if err := os.Rename(modCache(exported), modCache(exported)+".orig"); err != nil { + return err + } + exported.Config.Env = append(exported.Config.Env, + "GO111MODULE=on", + "GOPATH="+filepath.Join(exported.temp, "modcache"), + "GOMODCACHE=", + "GOPROXY="+proxydir.ToURL(modProxyDir), + "GOSUMDB=off", + ) + + // Run go mod download to recreate the mod cache dir with all the extra + // stuff in cache. All the files created by Export should be recreated. + inv := gocommand.Invocation{ + Verb: "mod", + Args: []string{"download", "all"}, + Env: exported.Config.Env, + BuildFlags: exported.Config.BuildFlags, + WorkingDir: exported.Config.Dir, + } + _, err := new(gocommand.Runner).Run(context.Background(), inv) + return err +} + +func writeModuleFiles(rootDir, module, ver string, filePaths map[string]string) error { + fileData := make(map[string][]byte) + for name, path := range filePaths { + contents, err := os.ReadFile(path) + if err != nil { + return err + } + fileData[name] = contents + } + return proxydir.WriteModuleVersion(rootDir, module, ver, fileData) +} + +func modCache(exported *Exported) string { + return filepath.Join(exported.temp, "modcache/pkg/mod") +} + +func primaryDir(exported *Exported) string { + return filepath.Join(exported.temp, path.Base(exported.primary)) +} + +func moduleDir(exported *Exported, module string) string { + if strings.Contains(module, "@") { + return filepath.Join(modCache(exported), module) + } + return filepath.Join(modCache(exported), path.Dir(module), path.Base(module)+"@"+moduleVersion(module)) +} + +var versionSuffixRE = regexp.MustCompile(`v\d+`) + +func moduleVersion(module string) string { + if versionSuffixRE.MatchString(path.Base(module)) { + return path.Base(module) + ".0.0" + } + return "v1.0.0" +} diff --git a/internal/packagestest/modules_test.go b/internal/packagestest/modules_test.go new file mode 100644 index 00000000000..a1beeed7ac3 --- /dev/null +++ b/internal/packagestest/modules_test.go @@ -0,0 +1,32 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packagestest_test + +import ( + "path/filepath" + "testing" + + "golang.org/x/tools/internal/packagestest" +) + +func TestModulesExport(t *testing.T) { + exported := packagestest.Export(t, packagestest.Modules, testdata) + defer exported.Cleanup() + // Check that the cfg contains all the right bits + var expectDir = filepath.Join(exported.Temp(), "fake1") + if exported.Config.Dir != expectDir { + t.Errorf("Got working directory %v expected %v", exported.Config.Dir, expectDir) + } + checkFiles(t, exported, []fileTest{ + {"golang.org/fake1", "go.mod", "fake1/go.mod", nil}, + {"golang.org/fake1", "a.go", "fake1/a.go", checkLink("testdata/a.go")}, + {"golang.org/fake1", "b.go", "fake1/b.go", checkContent("package fake1")}, + {"golang.org/fake2", "go.mod", "modcache/pkg/mod/golang.org/fake2@v1.0.0/go.mod", nil}, + {"golang.org/fake2", "other/a.go", "modcache/pkg/mod/golang.org/fake2@v1.0.0/other/a.go", checkContent("package fake2")}, + {"golang.org/fake2/v2", "other/a.go", "modcache/pkg/mod/golang.org/fake2/v2@v2.0.0/other/a.go", checkContent("package fake2")}, + {"golang.org/fake3@v1.1.0", "other/a.go", "modcache/pkg/mod/golang.org/fake3@v1.1.0/other/a.go", checkContent("package fake3")}, + {"golang.org/fake3@v1.0.0", "other/a.go", "modcache/pkg/mod/golang.org/fake3@v1.0.0/other/a.go", nil}, + }) +} diff --git a/internal/packagestest/testdata/groups/one/modules/example.com/extra/help.go b/internal/packagestest/testdata/groups/one/modules/example.com/extra/help.go new file mode 100644 index 00000000000..ee032937550 --- /dev/null +++ b/internal/packagestest/testdata/groups/one/modules/example.com/extra/help.go @@ -0,0 +1 @@ +package extra \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/one/primarymod/main.go b/internal/packagestest/testdata/groups/one/primarymod/main.go new file mode 100644 index 00000000000..54fe6e8b326 --- /dev/null +++ b/internal/packagestest/testdata/groups/one/primarymod/main.go @@ -0,0 +1 @@ +package one \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/help.go b/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/help.go new file mode 100644 index 00000000000..930ffdc81fe --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/extra/geez/help.go @@ -0,0 +1 @@ +package example.com/extra/geez \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/help.go b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/help.go new file mode 100644 index 00000000000..930ffdc81fe --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/geez/help.go @@ -0,0 +1 @@ +package example.com/extra/geez \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/me.go b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/me.go new file mode 100644 index 00000000000..6a8c7d31f24 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/extra/v2/me.go @@ -0,0 +1 @@ +package example.com/extra \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/extra/yo.go b/internal/packagestest/testdata/groups/two/modules/example.com/extra/yo.go new file mode 100644 index 00000000000..6a8c7d31f24 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/extra/yo.go @@ -0,0 +1 @@ +package example.com/extra \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/main.go b/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/main.go new file mode 100644 index 00000000000..85dbfa7cf31 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/tempmod/main.go @@ -0,0 +1 @@ +package example.com/tempmod \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/main.go b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/main.go new file mode 100644 index 00000000000..4723ee64bb1 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.0.0/main.go @@ -0,0 +1 @@ +package example.com/what \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/main.go b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/main.go new file mode 100644 index 00000000000..4723ee64bb1 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/modules/example.com/what@v1.1.0/main.go @@ -0,0 +1 @@ +package example.com/what \ No newline at end of file diff --git a/internal/packagestest/testdata/groups/two/primarymod/expect/yo.go b/internal/packagestest/testdata/groups/two/primarymod/expect/yo.go new file mode 100644 index 00000000000..bce2d30e094 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/primarymod/expect/yo.go @@ -0,0 +1,3 @@ +package expect + +var X int //@check("X", "X") diff --git a/internal/packagestest/testdata/groups/two/primarymod/expect/yo_test.go b/internal/packagestest/testdata/groups/two/primarymod/expect/yo_test.go new file mode 100644 index 00000000000..a8b06126582 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/primarymod/expect/yo_test.go @@ -0,0 +1,10 @@ +package expect_test + +import ( + "testdata/groups/two/expect" + "testing" +) + +func TestX(t *testing.T) { + _ = expect.X //@check("X", "X") +} diff --git a/internal/packagestest/testdata/groups/two/primarymod/main.go b/internal/packagestest/testdata/groups/two/primarymod/main.go new file mode 100644 index 00000000000..0b263348651 --- /dev/null +++ b/internal/packagestest/testdata/groups/two/primarymod/main.go @@ -0,0 +1 @@ +package two \ No newline at end of file diff --git a/internal/packagestest/testdata/test.go b/internal/packagestest/testdata/test.go new file mode 100644 index 00000000000..13fc12b9fae --- /dev/null +++ b/internal/packagestest/testdata/test.go @@ -0,0 +1,24 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fake1 + +// This is a test file for the behaviors in Exported.Expect. + +type AThing string //@AThing,mark(StringThing, "AThing"),mark(REThing,re`.T.*g`) + +type Match string //@check("Match",re`[[:upper:]]`) + +//@check(AThing, StringThing) +//@check(AThing, REThing) + +//@boolArg(true, false) +//@intArg(42) +//@stringArg(PlainString, "PlainString") +//@stringArg(IdentAsString,IdentAsString) +//@directNote() +//@range(AThing) + +// The following test should remain at the bottom of the file +//@checkEOF(EOF) diff --git a/internal/packagestest/testdata/test_test.go b/internal/packagestest/testdata/test_test.go new file mode 100644 index 00000000000..18b20805f95 --- /dev/null +++ b/internal/packagestest/testdata/test_test.go @@ -0,0 +1,3 @@ +package fake1 + +type ATestType string //@check("ATestType","ATestType") diff --git a/internal/packagestest/testdata/x_test.go b/internal/packagestest/testdata/x_test.go new file mode 100644 index 00000000000..c8c4fa25343 --- /dev/null +++ b/internal/packagestest/testdata/x_test.go @@ -0,0 +1,3 @@ +package fake1_test + +type AnXTestType string //@check("AnXTestType","AnXTestType") diff --git a/internal/refactor/inline/escape.go b/internal/refactor/inline/escape.go index a3f5e555e9f..45cce11a9e2 100644 --- a/internal/refactor/inline/escape.go +++ b/internal/refactor/inline/escape.go @@ -41,10 +41,13 @@ func escape(info *types.Info, root ast.Node, f func(v *types.Var, escapes bool)) // // We must traverse the normal terms and check // whether any of them is an array. + // + // We assume TypeOf returns non-nil. if _, ok := info.TypeOf(e.X).Underlying().(*types.Array); ok { lvalue(e.X, escapes) // &a[i] on array } case *ast.SelectorExpr: + // We assume TypeOf returns non-nil. if _, ok := info.TypeOf(e.X).Underlying().(*types.Struct); ok { lvalue(e.X, escapes) // &s.f on struct } diff --git a/internal/refactor/inline/inline_test.go b/internal/refactor/inline/inline_test.go index a64c806569b..8da5fa98cd3 100644 --- a/internal/refactor/inline/inline_test.go +++ b/internal/refactor/inline/inline_test.go @@ -23,10 +23,10 @@ import ( "unsafe" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/expect" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/diff" + "golang.org/x/tools/internal/expect" "golang.org/x/tools/internal/refactor/inline" "golang.org/x/tools/internal/testenv" "golang.org/x/tools/txtar" @@ -86,7 +86,7 @@ func TestData(t *testing.T) { for _, pkg := range pkgs { for _, file := range pkg.Syntax { // Read file content (for @inline regexp, and inliner). - content, err := os.ReadFile(pkg.Fset.File(file.Pos()).Name()) + content, err := os.ReadFile(pkg.Fset.File(file.FileStart).Name()) if err != nil { t.Error(err) continue @@ -95,7 +95,7 @@ func TestData(t *testing.T) { // Read and process @inline notes. notes, err := expect.ExtractGo(pkg.Fset, file) if err != nil { - t.Errorf("parsing notes in %q: %v", pkg.Fset.File(file.Pos()).Name(), err) + t.Errorf("parsing notes in %q: %v", pkg.Fset.File(file.FileStart).Name(), err) continue } for _, note := range notes { @@ -157,7 +157,7 @@ func doInlineNote(logf func(string, ...any), pkg *packages.Package, file *ast.Fi // Find extent of pattern match within commented line. var startPos, endPos token.Pos { - tokFile := pkg.Fset.File(file.Pos()) + tokFile := pkg.Fset.File(file.FileStart) lineStartOffset := int(tokFile.LineStart(posn.Line)) - tokFile.Base() line := content[lineStartOffset:] if i := bytes.IndexByte(line, '\n'); i >= 0 { diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go index d4a17ce039a..70c186b13b5 100644 --- a/internal/testenv/testenv.go +++ b/internal/testenv/testenv.go @@ -8,6 +8,7 @@ package testenv import ( "bytes" + "context" "fmt" "go/build" "os" @@ -21,6 +22,7 @@ import ( "time" "golang.org/x/mod/modfile" + "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/goroot" ) @@ -323,6 +325,36 @@ func Go1Point() int { panic("bad release tags") } +// NeedsGoCommand1Point skips t if the ambient go command version in the PATH +// of the current process is older than 1.x. +// +// NeedsGoCommand1Point memoizes the result of running the go command, so +// should be called after all mutations of PATH. +func NeedsGoCommand1Point(t testing.TB, x int) { + NeedsTool(t, "go") + go1point, err := goCommand1Point() + if err != nil { + panic(fmt.Sprintf("unable to determine go version: %v", err)) + } + if go1point < x { + t.Helper() + t.Skipf("go command is version 1.%d, older than required 1.%d", go1point, x) + } +} + +var ( + goCommand1PointOnce sync.Once + goCommand1Point_ int + goCommand1PointErr error +) + +func goCommand1Point() (int, error) { + goCommand1PointOnce.Do(func() { + goCommand1Point_, goCommand1PointErr = gocommand.GoVersion(context.Background(), gocommand.Invocation{}, new(gocommand.Runner)) + }) + return goCommand1Point_, goCommand1PointErr +} + // NeedsGo1Point skips t if the Go version used to run the test is older than // 1.x. func NeedsGo1Point(t testing.TB, x int) { @@ -332,6 +364,23 @@ func NeedsGo1Point(t testing.TB, x int) { } } +// SkipAfterGo1Point skips t if the ambient go command version in the PATH of +// the current process is newer than 1.x. +// +// SkipAfterGoCommand1Point memoizes the result of running the go command, so +// should be called after any mutation of PATH. +func SkipAfterGoCommand1Point(t testing.TB, x int) { + NeedsTool(t, "go") + go1point, err := goCommand1Point() + if err != nil { + panic(fmt.Sprintf("unable to determine go version: %v", err)) + } + if go1point > x { + t.Helper() + t.Skipf("go command is version 1.%d, newer than maximum 1.%d", go1point, x) + } +} + // SkipAfterGo1Point skips t if the Go version used to run the test is newer than // 1.x. func SkipAfterGo1Point(t testing.TB, x int) { @@ -490,3 +539,17 @@ func NeedsGoExperiment(t testing.TB, flag string) { t.Skipf("skipping test: flag %q is not set in GOEXPERIMENT=%q", flag, goexp) } } + +// NeedsGOROOTDir skips the test if GOROOT/dir does not exist, and GOROOT is a +// released version of Go (=has a VERSION file). Some GOROOT directories are +// removed by cmd/distpack. +// +// See also golang/go#70081. +func NeedsGOROOTDir(t *testing.T, dir string) { + gorootTest := filepath.Join(GOROOT(t), dir) + if _, err := os.Stat(gorootTest); os.IsNotExist(err) { + if _, err := os.Stat(filepath.Join(GOROOT(t), "VERSION")); err == nil { + t.Skipf("skipping: GOROOT/%s not present", dir) + } + } +} diff --git a/internal/typeparams/free.go b/internal/typeparams/free.go index 358108268b4..0ade5c2949e 100644 --- a/internal/typeparams/free.go +++ b/internal/typeparams/free.go @@ -6,6 +6,8 @@ package typeparams import ( "go/types" + + "golang.org/x/tools/internal/aliases" ) // Free is a memoization of the set of free type parameters within a @@ -36,6 +38,18 @@ func (w *Free) Has(typ types.Type) (res bool) { break case *types.Alias: + if aliases.TypeParams(t).Len() > aliases.TypeArgs(t).Len() { + return true // This is an uninstantiated Alias. + } + // The expansion of an alias can have free type parameters, + // whether or not the alias itself has type parameters: + // + // func _[K comparable]() { + // type Set = map[K]bool // free(Set) = {K} + // type MapTo[V] = map[K]V // free(Map[foo]) = {V} + // } + // + // So, we must Unalias. return w.Has(types.Unalias(t)) case *types.Array: @@ -96,9 +110,8 @@ func (w *Free) Has(typ types.Type) (res bool) { case *types.Named: args := t.TypeArgs() - // TODO(taking): this does not match go/types/infer.go. Check with rfindley. if params := t.TypeParams(); params.Len() > args.Len() { - return true + return true // this is an uninstantiated named type. } for i, n := 0, args.Len(); i < n; i++ { if w.Has(args.At(i)) { diff --git a/internal/typeparams/free_test.go b/internal/typeparams/free_test.go index b73a8238be3..5ba2779c6ba 100644 --- a/internal/typeparams/free_test.go +++ b/internal/typeparams/free_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//go:debug gotypesalias=1 + package typeparams import ( @@ -10,6 +12,8 @@ import ( "go/token" "go/types" "testing" + + "golang.org/x/tools/internal/testenv" ) func TestFree(t *testing.T) { @@ -71,3 +75,51 @@ func (v *V[T]) Push(x T) { *v = append(*v, x) } } } } + +func TestFree124(t *testing.T) { + testenv.NeedsGo1Point(t, 24) + const source = ` +package P + +func Within[T any]() { + type p[V []T] = int + + type q[V any] = T + + var end int // end provides a position to test at. + _ = end +} +` + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "hello.go", source, 0) + if err != nil { + t.Fatal(err) + } + + var conf types.Config + pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + for _, test := range []struct { + expr string // type expression + want bool // expected value + }{ + {"p", true}, // not an instantiation + {"p[[]T]", false}, // is an instantiation + {"q[int]", true}, + } { + pos := pkg.Scope().Lookup("Within").(*types.Func).Scope().Lookup("end").Pos() + tv, err := types.Eval(fset, pkg, pos, test.expr) + if err != nil { + t.Errorf("Eval(%s) failed: %v", test.expr, err) + } + + if got := new(Free).Has(tv.Type); got != test.want { + t.Logf("Eval(%s) returned the type %s", test.expr, tv.Type) + t.Errorf("isParameterized(%s) = %v, want %v", test.expr, got, test.want) + } + } +} diff --git a/internal/typeparams/genericfeatures/features.go b/internal/typeparams/genericfeatures/features.go index 236d4bb9b56..af7385ff2db 100644 --- a/internal/typeparams/genericfeatures/features.go +++ b/internal/typeparams/genericfeatures/features.go @@ -80,7 +80,7 @@ func ForPackage(inspect *inspector.Inspector, info *types.Info) Features { direct |= GenericFuncDecls } case *ast.InterfaceType: - tv := info.Types[n] + tv := info.Types[n] // may be zero if iface, _ := tv.Type.(*types.Interface); iface != nil && !iface.IsMethodSet() { direct |= EmbeddedTypeSets } diff --git a/internal/typesinternal/types.go b/internal/typesinternal/types.go index 83923286120..df3ea521254 100644 --- a/internal/typesinternal/types.go +++ b/internal/typesinternal/types.go @@ -11,6 +11,8 @@ import ( "go/types" "reflect" "unsafe" + + "golang.org/x/tools/internal/aliases" ) func SetUsesCgo(conf *types.Config) bool { @@ -63,3 +65,57 @@ func NameRelativeTo(pkg *types.Package) types.Qualifier { return other.Name() } } + +// A NamedOrAlias is a [types.Type] that is named (as +// defined by the spec) and capable of bearing type parameters: it +// abstracts aliases ([types.Alias]) and defined types +// ([types.Named]). +// +// Every type declared by an explicit "type" declaration is a +// NamedOrAlias. (Built-in type symbols may additionally +// have type [types.Basic], which is not a NamedOrAlias, +// though the spec regards them as "named".) +// +// NamedOrAlias cannot expose the Origin method, because +// [types.Alias.Origin] and [types.Named.Origin] have different +// (covariant) result types; use [Origin] instead. +type NamedOrAlias interface { + types.Type + Obj() *types.TypeName +} + +// TypeParams is a light shim around t.TypeParams(). +// (go/types.Alias).TypeParams requires >= 1.23. +func TypeParams(t NamedOrAlias) *types.TypeParamList { + switch t := t.(type) { + case *types.Alias: + return aliases.TypeParams(t) + case *types.Named: + return t.TypeParams() + } + return nil +} + +// TypeArgs is a light shim around t.TypeArgs(). +// (go/types.Alias).TypeArgs requires >= 1.23. +func TypeArgs(t NamedOrAlias) *types.TypeList { + switch t := t.(type) { + case *types.Alias: + return aliases.TypeArgs(t) + case *types.Named: + return t.TypeArgs() + } + return nil +} + +// Origin returns the generic type of the Named or Alias type t if it +// is instantiated, otherwise it returns t. +func Origin(t NamedOrAlias) NamedOrAlias { + switch t := t.(type) { + case *types.Alias: + return aliases.Origin(t) + case *types.Named: + return t.Origin() + } + return t +} diff --git a/internal/versions/types.go b/internal/versions/types.go index f0bb0d15f03..0fc10ce4eb5 100644 --- a/internal/versions/types.go +++ b/internal/versions/types.go @@ -31,8 +31,3 @@ func FileVersion(info *types.Info, file *ast.File) string { // This would act as a max version on what a tool can support. return Future } - -// InitFileVersions initializes info to record Go versions for Go files. -func InitFileVersions(info *types.Info) { - info.FileVersions = make(map[*ast.File]string) -} diff --git a/internal/versions/types_test.go b/internal/versions/types_test.go index 377369ffed7..bf459a5829c 100644 --- a/internal/versions/types_test.go +++ b/internal/versions/types_test.go @@ -11,51 +11,195 @@ import ( "go/parser" "go/token" "go/types" + "strings" "testing" + "golang.org/x/tools/internal/testenv" "golang.org/x/tools/internal/versions" ) +var contents = map[string]string{ + "gobuild122.go": ` +//go:build go1.22 +package p +`, + "gobuild121.go": ` +//go:build go1.21 +package p +`, + "gobuild120.go": ` +//go:build go1.20 +package p +`, + "gobuild119.go": ` +//go:build go1.19 +package p +`, + "noversion.go": ` +package p +`, +} + func Test(t *testing.T) { - var contents = map[string]string{ - "gobuild.go": ` - //go:build go1.23 - package p - `, - "noversion.go": ` - package p - `, + testenv.NeedsGo1Point(t, 23) // TODO(#69749): Allow on 1.22 if a fix for #69749 is submitted. + + for _, item := range []struct { + goversion string + pversion string + tests []fileTest + }{ + { + "", "", []fileTest{ + {"noversion.go", ""}, + {"gobuild119.go", "go1.21"}, + {"gobuild120.go", "go1.21"}, + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, + { + "go1.20", "go1.20", []fileTest{ + {"noversion.go", "go1.20"}, + {"gobuild119.go", "go1.21"}, + {"gobuild120.go", "go1.21"}, + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, + { + "go1.21", "go1.21", []fileTest{ + {"noversion.go", "go1.21"}, + {"gobuild119.go", "go1.21"}, + {"gobuild120.go", "go1.21"}, + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, + { + "go1.22", "go1.22", []fileTest{ + {"noversion.go", "go1.22"}, + {"gobuild119.go", "go1.21"}, + {"gobuild120.go", "go1.21"}, + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, + } { + name := fmt.Sprintf("types.Config{GoVersion:%q}", item.goversion) + t.Run(name, func(t *testing.T) { + testFiles(t, item.goversion, item.pversion, item.tests) + }) } - type fileTest struct { - fname string - want string +} + +func TestToolchain122(t *testing.T) { + // TestToolchain122 tests the 1.22 toolchain for the FileVersion it returns. + // These results are at the moment unique to 1.22. So test it with distinct + // expectations. + + // TODO(#69749): Remove requirement if a fix for #69749 is submitted. + if testenv.Go1Point() != 22 { + t.Skip("Expectations are only for 1.22 toolchain") } + for _, item := range []struct { goversion string pversion string tests []fileTest }{ - // {"", "", []fileTest{{"noversion.go", ""}, {"gobuild.go", ""}}}, // TODO(matloob): re-enable this test (with modifications) once CL 607955 has been submitted - {"go1.22", "go1.22", []fileTest{{"noversion.go", "go1.22"}, {"gobuild.go", "go1.23"}}}, + { + "", "", []fileTest{ + {"noversion.go", ""}, + {"gobuild119.go", ""}, // differs + {"gobuild120.go", ""}, // differs + {"gobuild121.go", ""}, // differs + {"gobuild122.go", ""}}, // differs + }, + { + "go1.20", "go1.20", []fileTest{ + {"noversion.go", "go1.20"}, + {"gobuild119.go", "go1.20"}, // differs + {"gobuild120.go", "go1.20"}, // differs + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, + { + "go1.21", "go1.21", []fileTest{ + {"noversion.go", "go1.21"}, + {"gobuild119.go", "go1.19"}, // differs + {"gobuild120.go", "go1.20"}, // differs + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, + { + "go1.22", "go1.22", []fileTest{ + {"noversion.go", "go1.22"}, + {"gobuild119.go", "go1.19"}, // differs + {"gobuild120.go", "go1.20"}, // differs + {"gobuild121.go", "go1.21"}, + {"gobuild122.go", "go1.22"}}, + }, } { name := fmt.Sprintf("types.Config{GoVersion:%q}", item.goversion) + t.Run(name, func(t *testing.T) { + testFiles(t, item.goversion, item.pversion, item.tests) + }) + } +} + +type fileTest struct { + fname string + want string +} + +func testFiles(t *testing.T, goversion string, pversion string, tests []fileTest) { + + fset := token.NewFileSet() + files := make([]*ast.File, len(tests)) + for i, test := range tests { + files[i] = parse(t, fset, test.fname, contents[test.fname]) + } + pkg, info, err := typeCheck(fset, files, goversion) + if err != nil { + t.Fatal(err) + } + if got, want := pkg.GoVersion(), pversion; versions.Compare(got, want) != 0 { + t.Errorf("GoVersion()=%q. expected %q", got, want) + } + if got := versions.FileVersion(info, nil); got != "" { + t.Errorf(`FileVersions(nil)=%q. expected ""`, got) + } + for i, test := range tests { + if got, want := versions.FileVersion(info, files[i]), test.want; got != want { + t.Errorf("FileVersions(%s)=%q. expected %q", test.fname, got, want) + } + } +} + +func TestTooNew(t *testing.T) { + testenv.NeedsGo1Point(t, 23) // TODO(#69749): Allow on 1.22 if a fix for #69749 is submitted. + + const contents = ` + //go:build go1.99 + package p + ` + type fileTest struct { + fname string + want string + } + + for _, goversion := range []string{ + "", + "go1.22", + } { + name := fmt.Sprintf("types.Config{GoVersion:%q}", goversion) t.Run(name, func(t *testing.T) { fset := token.NewFileSet() - files := make([]*ast.File, len(item.tests)) - for i, test := range item.tests { - files[i] = parse(t, fset, test.fname, contents[test.fname]) + files := []*ast.File{parse(t, fset, "p.go", contents)} + _, _, err := typeCheck(fset, files, goversion) + if err == nil { + t.Fatal("Expected an error from a using a TooNew file version") } - pkg, info := typeCheck(t, fset, files, item.goversion) - if got, want := pkg.GoVersion(), item.pversion; versions.Compare(got, want) != 0 { - t.Errorf("GoVersion()=%q. expected %q", got, want) - } - if got := versions.FileVersion(info, nil); got != "" { - t.Errorf(`FileVersions(nil)=%q. expected ""`, got) - } - for i, test := range item.tests { - if got, want := versions.FileVersion(info, files[i]), test.want; got != want { - t.Errorf("FileVersions(%s)=%q. expected %q", test.fname, got, want) - } + got := err.Error() + want := "file requires newer Go version go1.99" + if !strings.Contains(got, want) { + t.Errorf("Error message %q did not include %q", got, want) } }) } @@ -69,16 +213,14 @@ func parse(t *testing.T, fset *token.FileSet, name, src string) *ast.File { return file } -func typeCheck(t *testing.T, fset *token.FileSet, files []*ast.File, goversion string) (*types.Package, *types.Info) { +func typeCheck(fset *token.FileSet, files []*ast.File, goversion string) (*types.Package, *types.Info, error) { conf := types.Config{ Importer: importer.Default(), GoVersion: goversion, } - info := types.Info{} - versions.InitFileVersions(&info) - pkg, err := conf.Check("", fset, files, &info) - if err != nil { - t.Fatal(err) + info := types.Info{ + FileVersions: make(map[*ast.File]string), } - return pkg, &info + pkg, err := conf.Check("", fset, files, &info) + return pkg, &info, err } diff --git a/internal/versions/versions_test.go b/internal/versions/versions_test.go index 0886f8c80be..2599b8f26e5 100644 --- a/internal/versions/versions_test.go +++ b/internal/versions/versions_test.go @@ -205,8 +205,9 @@ func TestFileVersions(t *testing.T) { {GoVersion: versions.Go1_22}, {}, // GoVersion is unset. } { - info := &types.Info{} - versions.InitFileVersions(info) + info := &types.Info{ + FileVersions: make(map[*ast.File]string), + } _, err = conf.Check("P", fset, []*ast.File{f}, info) if err != nil { diff --git a/refactor/importgraph/graph_test.go b/refactor/importgraph/graph_test.go index 75263839a24..f3378a41e86 100644 --- a/refactor/importgraph/graph_test.go +++ b/refactor/importgraph/graph_test.go @@ -17,7 +17,7 @@ import ( "strings" "testing" - "golang.org/x/tools/go/packages/packagestest" + "golang.org/x/tools/internal/packagestest" "golang.org/x/tools/refactor/importgraph" _ "crypto/hmac" // just for test, below @@ -153,7 +153,7 @@ func TestBuild(t *testing.T) { } if !reverse.Search(this)[this] { printNode("reverse", this) - t.Errorf("irrefexive: reverse.Search(importgraph)[importgraph] not found") + t.Errorf("irreflexive: reverse.Search(importgraph)[importgraph] not found") } // Test Search is transitive. (There is no direct edge to these packages.) diff --git a/refactor/rename/mvpkg.go b/refactor/rename/mvpkg.go index 78897112278..2dbe6439f81 100644 --- a/refactor/rename/mvpkg.go +++ b/refactor/rename/mvpkg.go @@ -321,7 +321,7 @@ func (m *mover) move() error { log.Printf("failed to pretty-print syntax tree: %v", err) continue } - tokenFile := m.iprog.Fset.File(f.Pos()) + tokenFile := m.iprog.Fset.File(f.FileStart) writeFile(tokenFile.Name(), buf.Bytes()) } diff --git a/refactor/rename/rename.go b/refactor/rename/rename.go index ae646475692..3e944b2df38 100644 --- a/refactor/rename/rename.go +++ b/refactor/rename/rename.go @@ -490,7 +490,7 @@ func (r *renamer) update() error { var generatedFileNames []string for _, info := range r.packages { for _, f := range info.Files { - tokenFile := r.iprog.Fset.File(f.Pos()) + tokenFile := r.iprog.Fset.File(f.FileStart) if filesToUpdate[tokenFile] && generated(f, tokenFile) { generatedFileNames = append(generatedFileNames, tokenFile.Name()) } @@ -505,7 +505,7 @@ func (r *renamer) update() error { for _, info := range r.packages { first := true for _, f := range info.Files { - tokenFile := r.iprog.Fset.File(f.Pos()) + tokenFile := r.iprog.Fset.File(f.FileStart) if filesToUpdate[tokenFile] { if first { npkgs++ diff --git a/refactor/rename/spec.go b/refactor/rename/spec.go index ab7dbc3a92d..1d8c32c9dc3 100644 --- a/refactor/rename/spec.go +++ b/refactor/rename/spec.go @@ -313,7 +313,7 @@ func findFromObjectsInFile(iprog *loader.Program, spec *spec) ([]types.Object, e // NB: under certain proprietary build systems, a given // filename may appear in multiple packages. for _, f := range info.Files { - thisFile := iprog.Fset.File(f.Pos()) + thisFile := iprog.Fset.File(f.FileStart) if !sameFile(thisFile.Name(), spec.filename) { continue } diff --git a/refactor/satisfy/find_test.go b/refactor/satisfy/find_test.go index daa8b219ef2..cb755601c78 100644 --- a/refactor/satisfy/find_test.go +++ b/refactor/satisfy/find_test.go @@ -15,7 +15,6 @@ import ( "sort" "testing" - "golang.org/x/tools/internal/versions" "golang.org/x/tools/refactor/satisfy" ) @@ -216,15 +215,15 @@ func constraints(t *testing.T, src string) []string { // type-check info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) conf := types.Config{ Importer: importer.Default(), }